mirror of
https://github.com/Xahau/xahau.js.git
synced 2025-11-24 06:05:51 +00:00
ripple-binary-codec refactor (#88)
Refactored all components in ripple-binary-codec /src
This commit is contained in:
@@ -14,12 +14,10 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"create-hash": "^1.2.0",
|
"create-hash": "^1.2.0",
|
||||||
"decimal.js": "^10.2.0",
|
"decimal.js": "^10.2.0",
|
||||||
"inherits": "^2.0.4",
|
|
||||||
"lodash": "^4.17.15",
|
|
||||||
"ripple-address-codec": "^4.1.1"
|
"ripple-address-codec": "^4.1.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^26.0.0",
|
"@types/jest": "^26.0.7",
|
||||||
"@types/lodash": "^4.14.155",
|
"@types/lodash": "^4.14.155",
|
||||||
"@types/node": "^14.0.10",
|
"@types/node": "^14.0.10",
|
||||||
"@typescript-eslint/eslint-plugin": "^3.2.0",
|
"@typescript-eslint/eslint-plugin": "^3.2.0",
|
||||||
|
|||||||
@@ -1 +1,3 @@
|
|||||||
# ripple-binary-codec
|
# ripple-binary-codec
|
||||||
|
|
||||||
|
Serialize and deserialize transactions according to the XRP Ledger protocol.
|
||||||
@@ -1,36 +1,107 @@
|
|||||||
/* eslint-disable func-style */
|
/* eslint-disable func-style */
|
||||||
|
|
||||||
import { coreTypes } from "./types";
|
import { coreTypes } from "./types";
|
||||||
const { HashPrefix } = require("./hash-prefixes");
|
import { BinaryParser } from "./serdes/binary-parser";
|
||||||
const { BinaryParser } = require("./serdes/binary-parser");
|
import { AccountID } from "./types/account-id";
|
||||||
const { BinarySerializer, BytesList } = require("./serdes/binary-serializer");
|
import { HashPrefix } from "./hash-prefixes";
|
||||||
const { bytesToHex, slice, parseBytes } = require("./utils/bytes-utils");
|
import { BinarySerializer, BytesList } from "./serdes/binary-serializer";
|
||||||
|
import { sha512Half, transactionID } from "./hashes";
|
||||||
|
import { FieldInstance } from "./enums";
|
||||||
|
import { STObject } from "./types/st-object";
|
||||||
|
import { JsonObject } from "./types/serialized-type";
|
||||||
|
|
||||||
const { sha512Half, transactionID } = require("./hashes");
|
/**
|
||||||
|
* Construct a BinaryParser
|
||||||
|
*
|
||||||
|
* @param bytes hex-string to construct BinaryParser from
|
||||||
|
* @returns A BinaryParser
|
||||||
|
*/
|
||||||
|
const makeParser = (bytes: string): BinaryParser => new BinaryParser(bytes);
|
||||||
|
|
||||||
const makeParser = (bytes) => new BinaryParser(bytes);
|
/**
|
||||||
const readJSON = (parser) => parser.readType(coreTypes.STObject).toJSON();
|
* Parse BinaryParser into JSON
|
||||||
const binaryToJSON = (bytes) => readJSON(makeParser(bytes));
|
*
|
||||||
|
* @param parser BinaryParser object
|
||||||
|
* @returns JSON for the bytes in the BinaryParser
|
||||||
|
*/
|
||||||
|
const readJSON = (parser: BinaryParser): JsonObject =>
|
||||||
|
(parser.readType(coreTypes.STObject) as STObject).toJSON();
|
||||||
|
|
||||||
function serializeObject(object, opts = <any>{}) {
|
/**
|
||||||
|
* Parse a hex-string into its JSON interpretation
|
||||||
|
*
|
||||||
|
* @param bytes hex-string to parse into JSON
|
||||||
|
* @returns JSON
|
||||||
|
*/
|
||||||
|
const binaryToJSON = (bytes: string): JsonObject => readJSON(makeParser(bytes));
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for passing parameters to SerializeObject
|
||||||
|
*
|
||||||
|
* @field set signingFieldOnly to true if you want to serialize only signing fields
|
||||||
|
*/
|
||||||
|
interface OptionObject {
|
||||||
|
prefix?: Buffer;
|
||||||
|
suffix?: Buffer;
|
||||||
|
signingFieldsOnly?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function to serialize JSON object representing a transaction
|
||||||
|
*
|
||||||
|
* @param object JSON object to serialize
|
||||||
|
* @param opts options for serializing, including optional prefix, suffix, and signingFieldOnly
|
||||||
|
* @returns A Buffer containing the serialized object
|
||||||
|
*/
|
||||||
|
function serializeObject(object: JsonObject, opts: OptionObject = {}): Buffer {
|
||||||
const { prefix, suffix, signingFieldsOnly = false } = opts;
|
const { prefix, suffix, signingFieldsOnly = false } = opts;
|
||||||
const bytesList = new BytesList();
|
const bytesList = new BytesList();
|
||||||
|
|
||||||
if (prefix) {
|
if (prefix) {
|
||||||
bytesList.put(prefix);
|
bytesList.put(prefix);
|
||||||
}
|
}
|
||||||
const filter = signingFieldsOnly ? (f) => f.isSigningField : undefined;
|
|
||||||
|
const filter = signingFieldsOnly
|
||||||
|
? (f: FieldInstance): boolean => f.isSigningField
|
||||||
|
: undefined;
|
||||||
coreTypes.STObject.from(object, filter).toBytesSink(bytesList);
|
coreTypes.STObject.from(object, filter).toBytesSink(bytesList);
|
||||||
|
|
||||||
if (suffix) {
|
if (suffix) {
|
||||||
bytesList.put(suffix);
|
bytesList.put(suffix);
|
||||||
}
|
}
|
||||||
|
|
||||||
return bytesList.toBytes();
|
return bytesList.toBytes();
|
||||||
}
|
}
|
||||||
|
|
||||||
function signingData(tx, prefix = HashPrefix.transactionSig) {
|
/**
|
||||||
return serializeObject(tx, { prefix, signingFieldsOnly: true });
|
* Serialize an object for signing
|
||||||
|
*
|
||||||
|
* @param transaction Transaction to serialize
|
||||||
|
* @param prefix Prefix bytes to put before the serialized object
|
||||||
|
* @returns A Buffer with the serialized object
|
||||||
|
*/
|
||||||
|
function signingData(
|
||||||
|
transaction: JsonObject,
|
||||||
|
prefix: Buffer = HashPrefix.transactionSig
|
||||||
|
): Buffer {
|
||||||
|
return serializeObject(transaction, { prefix, signingFieldsOnly: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
function signingClaimData(claim) {
|
/**
|
||||||
|
* Interface describing fields required for a Claim
|
||||||
|
*/
|
||||||
|
interface ClaimObject extends JsonObject {
|
||||||
|
channel: string;
|
||||||
|
amount: string | number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serialize a signingClaim
|
||||||
|
*
|
||||||
|
* @param claim A claim object to serialize
|
||||||
|
* @returns the serialized object with appropriate prefix
|
||||||
|
*/
|
||||||
|
function signingClaimData(claim: ClaimObject): Buffer {
|
||||||
const prefix = HashPrefix.paymentChannelClaim;
|
const prefix = HashPrefix.paymentChannelClaim;
|
||||||
const channel = coreTypes.Hash256.from(claim.channel).toBytes();
|
const channel = coreTypes.Hash256.from(claim.channel).toBytes();
|
||||||
const amount = coreTypes.UInt64.from(BigInt(claim.amount)).toBytes();
|
const amount = coreTypes.UInt64.from(BigInt(claim.amount)).toBytes();
|
||||||
@@ -43,26 +114,38 @@ function signingClaimData(claim) {
|
|||||||
return bytesList.toBytes();
|
return bytesList.toBytes();
|
||||||
}
|
}
|
||||||
|
|
||||||
function multiSigningData(tx, signingAccount) {
|
/**
|
||||||
|
* Serialize a transaction object for multiSigning
|
||||||
|
*
|
||||||
|
* @param transaction transaction to serialize
|
||||||
|
* @param signingAccount Account to sign the transaction with
|
||||||
|
* @returns serialized transaction with appropriate prefix and suffix
|
||||||
|
*/
|
||||||
|
function multiSigningData(
|
||||||
|
transaction: JsonObject,
|
||||||
|
signingAccount: string | AccountID
|
||||||
|
): Buffer {
|
||||||
const prefix = HashPrefix.transactionMultiSig;
|
const prefix = HashPrefix.transactionMultiSig;
|
||||||
const suffix = coreTypes.AccountID.from(signingAccount).toBytes();
|
const suffix = coreTypes.AccountID.from(signingAccount).toBytes();
|
||||||
return serializeObject(tx, { prefix, suffix, signingFieldsOnly: true });
|
return serializeObject(transaction, {
|
||||||
|
prefix,
|
||||||
|
suffix,
|
||||||
|
signingFieldsOnly: true,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export {
|
export {
|
||||||
BinaryParser,
|
BinaryParser,
|
||||||
BinarySerializer,
|
BinarySerializer,
|
||||||
BytesList,
|
BytesList,
|
||||||
|
ClaimObject,
|
||||||
makeParser,
|
makeParser,
|
||||||
serializeObject,
|
serializeObject,
|
||||||
readJSON,
|
readJSON,
|
||||||
bytesToHex,
|
|
||||||
parseBytes,
|
|
||||||
multiSigningData,
|
multiSigningData,
|
||||||
signingData,
|
signingData,
|
||||||
signingClaimData,
|
signingClaimData,
|
||||||
binaryToJSON,
|
binaryToJSON,
|
||||||
sha512Half,
|
sha512Half,
|
||||||
transactionID,
|
transactionID,
|
||||||
slice,
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -5,13 +5,13 @@ import {
|
|||||||
Type,
|
Type,
|
||||||
TransactionResult,
|
TransactionResult,
|
||||||
} from "./enums";
|
} from "./enums";
|
||||||
const types = require("./types");
|
import * as types from "./types";
|
||||||
const binary = require("./binary");
|
import * as binary from "./binary";
|
||||||
const { ShaMap } = require("./shamap");
|
import { ShaMap } from "./shamap";
|
||||||
const ledgerHashes = require("./ledger-hashes");
|
import * as ledgerHashes from "./ledger-hashes";
|
||||||
const hashes = require("./hashes");
|
import * as hashes from "./hashes";
|
||||||
const quality = require("./quality");
|
import { quality } from "./quality";
|
||||||
const { HashPrefix } = require("./hash-prefixes");
|
import { HashPrefix } from "./hash-prefixes";
|
||||||
|
|
||||||
export {
|
export {
|
||||||
hashes,
|
hashes,
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { serializeUIntN } from "../utils/bytes-utils";
|
|
||||||
import * as enums from "./definitions.json";
|
import * as enums from "./definitions.json";
|
||||||
import { SerializedType } from "../types/serialized-type";
|
import { SerializedType } from "../types/serialized-type";
|
||||||
|
|
||||||
@@ -37,7 +36,10 @@ class Bytes {
|
|||||||
readonly ordinal: number,
|
readonly ordinal: number,
|
||||||
readonly ordinalWidth: number
|
readonly ordinalWidth: number
|
||||||
) {
|
) {
|
||||||
this.bytes = serializeUIntN(ordinal, ordinalWidth);
|
this.bytes = Buffer.alloc(ordinalWidth);
|
||||||
|
for (let i = 0; i < ordinalWidth; i++) {
|
||||||
|
this.bytes[ordinalWidth - i - 1] = (ordinal >>> (i * 8)) & 0xff;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
toJSON(): string {
|
toJSON(): string {
|
||||||
@@ -57,7 +59,7 @@ class Bytes {
|
|||||||
* @brief: Collection of Bytes objects, mapping bidirectionally
|
* @brief: Collection of Bytes objects, mapping bidirectionally
|
||||||
*/
|
*/
|
||||||
class BytesLookup {
|
class BytesLookup {
|
||||||
constructor(types: { [key: string]: number }, readonly ordinalWidth: number) {
|
constructor(types: Record<string, number>, readonly ordinalWidth: number) {
|
||||||
Object.entries(types).forEach(([k, v]) => {
|
Object.entries(types).forEach(([k, v]) => {
|
||||||
this[k] = new Bytes(k, v, ordinalWidth);
|
this[k] = new Bytes(k, v, ordinalWidth);
|
||||||
this[v.toString()] = this[k];
|
this[v.toString()] = this[k];
|
||||||
|
|||||||
@@ -1,10 +1,19 @@
|
|||||||
import { serializeUIntN } from "./utils/bytes-utils";
|
/**
|
||||||
|
* Write a 32 bit integer to a Buffer
|
||||||
function bytes(uint32) {
|
*
|
||||||
return serializeUIntN(uint32, 4);
|
* @param uint32 32 bit integer to write to buffer
|
||||||
|
* @returns a buffer with the bytes representation of uint32
|
||||||
|
*/
|
||||||
|
function bytes(uint32: number): Buffer {
|
||||||
|
const result = Buffer.alloc(4);
|
||||||
|
result.writeUInt32BE(uint32);
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
const HashPrefix = {
|
/**
|
||||||
|
* Maps HashPrefix names to their byte representation
|
||||||
|
*/
|
||||||
|
const HashPrefix: Record<string, Buffer> = {
|
||||||
transactionID: bytes(0x54584e00),
|
transactionID: bytes(0x54584e00),
|
||||||
// transaction plus metadata
|
// transaction plus metadata
|
||||||
transaction: bytes(0x534e4400),
|
transaction: bytes(0x534e4400),
|
||||||
|
|||||||
@@ -1,44 +1,76 @@
|
|||||||
import { makeClass } from "./utils/make-class";
|
|
||||||
import { HashPrefix } from "./hash-prefixes";
|
import { HashPrefix } from "./hash-prefixes";
|
||||||
import { coreTypes } from "./types";
|
|
||||||
import { parseBytes } from "./utils/bytes-utils";
|
|
||||||
import * as createHash from "create-hash";
|
import * as createHash from "create-hash";
|
||||||
|
import { Hash256 } from "./types/hash-256";
|
||||||
|
import { BytesList } from "./serdes/binary-serializer";
|
||||||
|
|
||||||
const Sha512Half = makeClass(
|
/**
|
||||||
{
|
* Class for hashing with SHA512
|
||||||
Sha512Half() {
|
* @extends BytesList So SerializedTypes can write bytes to a Sha512Half
|
||||||
this.hash = createHash("sha512");
|
*/
|
||||||
},
|
class Sha512Half extends BytesList {
|
||||||
statics: {
|
private hash: createHash = createHash("sha512");
|
||||||
put(bytes) {
|
|
||||||
return new this().put(bytes);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
put(bytes) {
|
|
||||||
this.hash.update(parseBytes(bytes, Buffer));
|
|
||||||
return this;
|
|
||||||
},
|
|
||||||
finish256() {
|
|
||||||
const bytes = this.hash.digest();
|
|
||||||
return bytes.slice(0, 32);
|
|
||||||
},
|
|
||||||
finish() {
|
|
||||||
return new coreTypes.Hash256(this.finish256());
|
|
||||||
},
|
|
||||||
},
|
|
||||||
undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
function sha512Half(...args) {
|
/**
|
||||||
const hash = new Sha512Half();
|
* Construct a new Sha512Hash and write bytes this.hash
|
||||||
args.forEach((a) => hash.put(a));
|
*
|
||||||
return parseBytes(hash.finish256(), Uint8Array);
|
* @param bytes bytes to write to this.hash
|
||||||
|
* @returns the new Sha512Hash object
|
||||||
|
*/
|
||||||
|
static put(bytes: Buffer): Sha512Half {
|
||||||
|
return new Sha512Half().put(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write bytes to an existing Sha512Hash
|
||||||
|
*
|
||||||
|
* @param bytes bytes to write to object
|
||||||
|
* @returns the Sha512 object
|
||||||
|
*/
|
||||||
|
put(bytes: Buffer): Sha512Half {
|
||||||
|
this.hash.update(bytes);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute SHA512 hash and slice in half
|
||||||
|
*
|
||||||
|
* @returns half of a SHA512 hash
|
||||||
|
*/
|
||||||
|
finish256(): Buffer {
|
||||||
|
const bytes: Buffer = this.hash.digest();
|
||||||
|
return bytes.slice(0, 32);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a Hash256 from the Sha512Half object
|
||||||
|
*
|
||||||
|
* @returns a Hash256 object
|
||||||
|
*/
|
||||||
|
finish(): Hash256 {
|
||||||
|
return new Hash256(this.finish256());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function transactionID(serialized) {
|
/**
|
||||||
return new coreTypes.Hash256(
|
* compute SHA512 hash of a list of bytes
|
||||||
sha512Half(HashPrefix.transactionID, serialized)
|
*
|
||||||
);
|
* @param args zero or more arguments to hash
|
||||||
|
* @returns the sha512half hash of the arguments.
|
||||||
|
*/
|
||||||
|
function sha512Half(...args: Buffer[]): Buffer {
|
||||||
|
const hash = new Sha512Half();
|
||||||
|
args.forEach((a) => hash.put(a));
|
||||||
|
return hash.finish256();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Construct a transactionID from a Serialized Transaction
|
||||||
|
*
|
||||||
|
* @param serialized bytes to hash
|
||||||
|
* @returns a Hash256 object
|
||||||
|
*/
|
||||||
|
function transactionID(serialized: Buffer): Hash256 {
|
||||||
|
return new Hash256(sha512Half(HashPrefix.transactionID, serialized));
|
||||||
}
|
}
|
||||||
|
|
||||||
export { Sha512Half, sha512Half, transactionID };
|
export { Sha512Half, sha512Half, transactionID };
|
||||||
|
|||||||
@@ -1,69 +1,98 @@
|
|||||||
import { strict as assert } from "assert";
|
import * as assert from "assert";
|
||||||
import { quality, binary } from "./coretypes";
|
import { quality, binary } from "./coretypes";
|
||||||
import { coreTypes } from "./types";
|
import { decodeLedgerData } from "./ledger-hashes";
|
||||||
|
import { ClaimObject } from "./binary";
|
||||||
|
import { JsonObject } from "./types/serialized-type";
|
||||||
const {
|
const {
|
||||||
bytesToHex,
|
|
||||||
signingData,
|
signingData,
|
||||||
signingClaimData,
|
signingClaimData,
|
||||||
multiSigningData,
|
multiSigningData,
|
||||||
binaryToJSON,
|
binaryToJSON,
|
||||||
serializeObject,
|
serializeObject,
|
||||||
BinaryParser,
|
|
||||||
} = binary;
|
} = binary;
|
||||||
|
|
||||||
function decodeLedgerData(binary) {
|
/**
|
||||||
assert(typeof binary === "string", "binary must be a hex string");
|
* Decode a transaction
|
||||||
const parser = new BinaryParser(binary);
|
*
|
||||||
return {
|
* @param binary hex-string of the encoded transaction
|
||||||
ledger_index: parser.readUInt32(),
|
* @returns the JSON representation of the transaction
|
||||||
total_coins: parser.readType(coreTypes.UInt64).valueOf().toString(),
|
*/
|
||||||
parent_hash: parser.readType(coreTypes.Hash256).toHex(),
|
function decode(binary: string): JsonObject {
|
||||||
transaction_hash: parser.readType(coreTypes.Hash256).toHex(),
|
|
||||||
account_hash: parser.readType(coreTypes.Hash256).toHex(),
|
|
||||||
parent_close_time: parser.readUInt32(),
|
|
||||||
close_time: parser.readUInt32(),
|
|
||||||
close_time_resolution: parser.readUInt8(),
|
|
||||||
close_flags: parser.readUInt8(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function decode(binary) {
|
|
||||||
assert(typeof binary === "string", "binary must be a hex string");
|
assert(typeof binary === "string", "binary must be a hex string");
|
||||||
return binaryToJSON(binary);
|
return binaryToJSON(binary);
|
||||||
}
|
}
|
||||||
|
|
||||||
function encode(json) {
|
/**
|
||||||
|
* Encode a transaction
|
||||||
|
*
|
||||||
|
* @param json The JSON representation of a transaction
|
||||||
|
* @returns A hex-string of the encoded transaction
|
||||||
|
*/
|
||||||
|
function encode(json: JsonObject): string {
|
||||||
assert(typeof json === "object");
|
assert(typeof json === "object");
|
||||||
return bytesToHex(serializeObject(json));
|
return serializeObject(json).toString("hex").toUpperCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
function encodeForSigning(json) {
|
/**
|
||||||
|
* Encode a transaction and prepare for signing
|
||||||
|
*
|
||||||
|
* @param json JSON object representing the transaction
|
||||||
|
* @param signer string representing the account to sign the transaction with
|
||||||
|
* @returns a hex string of the encoded transaction
|
||||||
|
*/
|
||||||
|
function encodeForSigning(json: JsonObject): string {
|
||||||
assert(typeof json === "object");
|
assert(typeof json === "object");
|
||||||
return bytesToHex(signingData(json));
|
return signingData(json).toString("hex").toUpperCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
function encodeForSigningClaim(json) {
|
/**
|
||||||
|
* Encode a transaction and prepare for signing with a claim
|
||||||
|
*
|
||||||
|
* @param json JSON object representing the transaction
|
||||||
|
* @param signer string representing the account to sign the transaction with
|
||||||
|
* @returns a hex string of the encoded transaction
|
||||||
|
*/
|
||||||
|
function encodeForSigningClaim(json: ClaimObject): string {
|
||||||
assert(typeof json === "object");
|
assert(typeof json === "object");
|
||||||
return bytesToHex(signingClaimData(json));
|
return signingClaimData(json).toString("hex").toUpperCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
function encodeForMultisigning(json, signer) {
|
/**
|
||||||
|
* Encode a transaction and prepare for multi-signing
|
||||||
|
*
|
||||||
|
* @param json JSON object representing the transaction
|
||||||
|
* @param signer string representing the account to sign the transaction with
|
||||||
|
* @returns a hex string of the encoded transaction
|
||||||
|
*/
|
||||||
|
function encodeForMultisigning(json: JsonObject, signer: string): string {
|
||||||
assert(typeof json === "object");
|
assert(typeof json === "object");
|
||||||
assert.equal(json.SigningPubKey, "");
|
assert.equal(json.SigningPubKey, "");
|
||||||
return bytesToHex(multiSigningData(json, signer));
|
return multiSigningData(json, signer).toString("hex").toUpperCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
function encodeQuality(value) {
|
/**
|
||||||
|
* Encode a quality value
|
||||||
|
*
|
||||||
|
* @param value string representation of a number
|
||||||
|
* @returns a hex-string representing the quality
|
||||||
|
*/
|
||||||
|
function encodeQuality(value: string): string {
|
||||||
assert(typeof value === "string");
|
assert(typeof value === "string");
|
||||||
return bytesToHex(quality.encode(value));
|
return quality.encode(value).toString("hex").toUpperCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
function decodeQuality(value) {
|
/**
|
||||||
|
* Decode a quality value
|
||||||
|
*
|
||||||
|
* @param value hex-string of a quality
|
||||||
|
* @returns a string representing the quality
|
||||||
|
*/
|
||||||
|
function decodeQuality(value: string): string {
|
||||||
assert(typeof value === "string");
|
assert(typeof value === "string");
|
||||||
return quality.decode(value).toString();
|
return quality.decode(value).toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
export {
|
||||||
decode,
|
decode,
|
||||||
encode,
|
encode,
|
||||||
encodeForSigning,
|
encodeForSigning,
|
||||||
|
|||||||
@@ -1,19 +1,49 @@
|
|||||||
import * as _ from "lodash";
|
import * as assert from "assert";
|
||||||
import { strict as assert } from "assert";
|
import { ShaMap, ShaMapNode, ShaMapLeaf } from "./shamap";
|
||||||
import { coreTypes } from "./types";
|
|
||||||
const { STObject, Hash256 } = coreTypes;
|
|
||||||
import { ShaMap } from "./shamap";
|
|
||||||
import { HashPrefix } from "./hash-prefixes";
|
import { HashPrefix } from "./hash-prefixes";
|
||||||
import { Sha512Half } from "./hashes";
|
import { Sha512Half } from "./hashes";
|
||||||
import { BinarySerializer, serializeObject } from "./binary";
|
import { BinarySerializer, serializeObject } from "./binary";
|
||||||
|
import { Hash256 } from "./types/hash-256";
|
||||||
|
import { STObject } from "./types/st-object";
|
||||||
|
import { UInt64 } from "./types/uint-64";
|
||||||
|
import { UInt32 } from "./types/uint-32";
|
||||||
|
import { UInt8 } from "./types/uint-8";
|
||||||
|
import { BinaryParser } from "./serdes/binary-parser";
|
||||||
|
import { JsonObject } from "./types/serialized-type";
|
||||||
|
|
||||||
function computeHash(itemizer, itemsJson) {
|
/**
|
||||||
|
* Computes the hash of a list of objects
|
||||||
|
*
|
||||||
|
* @param itemizer Converts an item into a format that can be added to SHAMap
|
||||||
|
* @param itemsJson Array of items to add to a SHAMap
|
||||||
|
* @returns the hash of the SHAMap
|
||||||
|
*/
|
||||||
|
function computeHash(
|
||||||
|
itemizer: (item: JsonObject) => [Hash256?, ShaMapNode?, ShaMapLeaf?],
|
||||||
|
itemsJson: Array<JsonObject>
|
||||||
|
): Hash256 {
|
||||||
const map = new ShaMap();
|
const map = new ShaMap();
|
||||||
itemsJson.forEach((item) => map.addItem(...itemizer(item)));
|
itemsJson.forEach((item) => map.addItem(...itemizer(item)));
|
||||||
return map.hash();
|
return map.hash();
|
||||||
}
|
}
|
||||||
|
|
||||||
function transactionItem(json) {
|
/**
|
||||||
|
* Interface describing a transaction item
|
||||||
|
*/
|
||||||
|
interface transactionItemObject extends JsonObject {
|
||||||
|
hash: string;
|
||||||
|
metaData: JsonObject;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a transaction into an index and an item
|
||||||
|
*
|
||||||
|
* @param json transaction with metadata
|
||||||
|
* @returns a tuple of index and item to be added to SHAMap
|
||||||
|
*/
|
||||||
|
function transactionItemizer(
|
||||||
|
json: transactionItemObject
|
||||||
|
): [Hash256, ShaMapNode, undefined] {
|
||||||
assert(json.hash);
|
assert(json.hash);
|
||||||
const index = Hash256.from(json.hash);
|
const index = Hash256.from(json.hash);
|
||||||
const item = {
|
const item = {
|
||||||
@@ -25,11 +55,26 @@ function transactionItem(json) {
|
|||||||
serializer.writeLengthEncoded(STObject.from(json));
|
serializer.writeLengthEncoded(STObject.from(json));
|
||||||
serializer.writeLengthEncoded(STObject.from(json.metaData));
|
serializer.writeLengthEncoded(STObject.from(json.metaData));
|
||||||
},
|
},
|
||||||
};
|
} as ShaMapNode;
|
||||||
return [index, item];
|
return [index, item, undefined];
|
||||||
}
|
}
|
||||||
|
|
||||||
function entryItem(json) {
|
/**
|
||||||
|
* Interface describing an entry item
|
||||||
|
*/
|
||||||
|
interface entryItemObject extends JsonObject {
|
||||||
|
index: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert an entry to a pair Hash256 and ShaMapNode
|
||||||
|
*
|
||||||
|
* @param json JSON describing a ledger entry item
|
||||||
|
* @returns a tuple of index and item to be added to SHAMap
|
||||||
|
*/
|
||||||
|
function entryItemizer(
|
||||||
|
json: entryItemObject
|
||||||
|
): [Hash256, ShaMapNode, undefined] {
|
||||||
const index = Hash256.from(json.index);
|
const index = Hash256.from(json.index);
|
||||||
const bytes = serializeObject(json);
|
const bytes = serializeObject(json);
|
||||||
const item = {
|
const item = {
|
||||||
@@ -39,29 +84,95 @@ function entryItem(json) {
|
|||||||
toBytesSink(sink) {
|
toBytesSink(sink) {
|
||||||
sink.put(bytes);
|
sink.put(bytes);
|
||||||
},
|
},
|
||||||
};
|
} as ShaMapNode;
|
||||||
return [index, item];
|
return [index, item, undefined];
|
||||||
}
|
}
|
||||||
|
|
||||||
const transactionTreeHash = _.partial(computeHash, transactionItem);
|
/**
|
||||||
const accountStateHash = _.partial(computeHash, entryItem);
|
* Function computing the hash of a transaction tree
|
||||||
|
*
|
||||||
|
* @param param An array of transaction objects to hash
|
||||||
|
* @returns A Hash256 object
|
||||||
|
*/
|
||||||
|
function transactionTreeHash(param: Array<JsonObject>): Hash256 {
|
||||||
|
const itemizer = transactionItemizer as (
|
||||||
|
json: JsonObject
|
||||||
|
) => [Hash256, ShaMapNode, undefined];
|
||||||
|
return computeHash(itemizer, param);
|
||||||
|
}
|
||||||
|
|
||||||
function ledgerHash(header) {
|
/**
|
||||||
|
* Function computing the hash of accountState
|
||||||
|
*
|
||||||
|
* @param param A list of accountStates hash
|
||||||
|
* @returns A Hash256 object
|
||||||
|
*/
|
||||||
|
function accountStateHash(param: Array<JsonObject>): Hash256 {
|
||||||
|
const itemizer = entryItemizer as (
|
||||||
|
json: JsonObject
|
||||||
|
) => [Hash256, ShaMapNode, undefined];
|
||||||
|
return computeHash(itemizer, param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface describing a ledger header
|
||||||
|
*/
|
||||||
|
interface ledgerObject {
|
||||||
|
ledger_index: number;
|
||||||
|
total_coins: string | number | bigint;
|
||||||
|
parent_hash: string;
|
||||||
|
transaction_hash: string;
|
||||||
|
account_hash: string;
|
||||||
|
parent_close_time: number;
|
||||||
|
close_time: number;
|
||||||
|
close_time_resolution: number;
|
||||||
|
close_flags: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serialize and hash a ledger header
|
||||||
|
*
|
||||||
|
* @param header a ledger header
|
||||||
|
* @returns the hash of header
|
||||||
|
*/
|
||||||
|
function ledgerHash(header: ledgerObject): Hash256 {
|
||||||
const hash = new Sha512Half();
|
const hash = new Sha512Half();
|
||||||
hash.put(HashPrefix.ledgerHeader);
|
hash.put(HashPrefix.ledgerHeader);
|
||||||
assert(header.parent_close_time !== undefined);
|
assert(header.parent_close_time !== undefined);
|
||||||
assert(header.close_flags !== undefined);
|
assert(header.close_flags !== undefined);
|
||||||
|
|
||||||
coreTypes.UInt32.from(header.ledger_index).toBytesSink(hash);
|
UInt32.from<number>(header.ledger_index).toBytesSink(hash);
|
||||||
coreTypes.UInt64.from(BigInt(header.total_coins)).toBytesSink(hash);
|
UInt64.from<bigint>(BigInt(header.total_coins)).toBytesSink(hash);
|
||||||
coreTypes.Hash256.from(header.parent_hash).toBytesSink(hash);
|
Hash256.from(header.parent_hash).toBytesSink(hash);
|
||||||
coreTypes.Hash256.from(header.transaction_hash).toBytesSink(hash);
|
Hash256.from(header.transaction_hash).toBytesSink(hash);
|
||||||
coreTypes.Hash256.from(header.account_hash).toBytesSink(hash);
|
Hash256.from(header.account_hash).toBytesSink(hash);
|
||||||
coreTypes.UInt32.from(header.parent_close_time).toBytesSink(hash);
|
UInt32.from<number>(header.parent_close_time).toBytesSink(hash);
|
||||||
coreTypes.UInt32.from(header.close_time).toBytesSink(hash);
|
UInt32.from<number>(header.close_time).toBytesSink(hash);
|
||||||
coreTypes.UInt8.from(header.close_time_resolution).toBytesSink(hash);
|
UInt8.from<number>(header.close_time_resolution).toBytesSink(hash);
|
||||||
coreTypes.UInt8.from(header.close_flags).toBytesSink(hash);
|
UInt8.from<number>(header.close_flags).toBytesSink(hash);
|
||||||
return hash.finish();
|
return hash.finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
export { accountStateHash, transactionTreeHash, ledgerHash };
|
/**
|
||||||
|
* Decodes a serialized ledger header
|
||||||
|
*
|
||||||
|
* @param binary A serialized ledger header
|
||||||
|
* @returns A JSON object describing a ledger header
|
||||||
|
*/
|
||||||
|
function decodeLedgerData(binary: string): ledgerObject {
|
||||||
|
assert(typeof binary === "string", "binary must be a hex string");
|
||||||
|
const parser = new BinaryParser(binary);
|
||||||
|
return {
|
||||||
|
ledger_index: parser.readUInt32(),
|
||||||
|
total_coins: parser.readType(UInt64).valueOf().toString(),
|
||||||
|
parent_hash: parser.readType(Hash256).toHex(),
|
||||||
|
transaction_hash: parser.readType(Hash256).toHex(),
|
||||||
|
account_hash: parser.readType(Hash256).toHex(),
|
||||||
|
parent_close_time: parser.readUInt32(),
|
||||||
|
close_time: parser.readUInt32(),
|
||||||
|
close_time_resolution: parser.readUInt8(),
|
||||||
|
close_flags: parser.readUInt8(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { accountStateHash, transactionTreeHash, ledgerHash, decodeLedgerData };
|
||||||
|
|||||||
@@ -1,23 +1,37 @@
|
|||||||
const Decimal = require("decimal.js");
|
|
||||||
import { bytesToHex, slice, parseBytes } from "./utils/bytes-utils";
|
|
||||||
import { coreTypes } from "./types";
|
import { coreTypes } from "./types";
|
||||||
|
import { Decimal } from "decimal.js";
|
||||||
|
|
||||||
module.exports = {
|
/**
|
||||||
encode(arg) {
|
* class for encoding and decoding quality
|
||||||
const quality = arg instanceof Decimal ? arg : new Decimal(arg);
|
*/
|
||||||
const exponent = quality.e - 15;
|
class quality {
|
||||||
const qualityString = quality
|
/**
|
||||||
.times("1e" + -exponent)
|
* Encode quality amount
|
||||||
.abs()
|
*
|
||||||
.toString();
|
* @param arg string representation of an amount
|
||||||
|
* @returns Serialized quality
|
||||||
|
*/
|
||||||
|
static encode(quality: string): Buffer {
|
||||||
|
const decimal = new Decimal(quality);
|
||||||
|
const exponent = decimal.e - 15;
|
||||||
|
const qualityString = decimal.times(`1e${-exponent}`).abs().toString();
|
||||||
const bytes = coreTypes.UInt64.from(BigInt(qualityString)).toBytes();
|
const bytes = coreTypes.UInt64.from(BigInt(qualityString)).toBytes();
|
||||||
bytes[0] = exponent + 100;
|
bytes[0] = exponent + 100;
|
||||||
return bytes;
|
return bytes;
|
||||||
},
|
}
|
||||||
decode(arg) {
|
|
||||||
const bytes = slice(parseBytes(arg), -8);
|
/**
|
||||||
|
* Decode quality amount
|
||||||
|
*
|
||||||
|
* @param arg hex-string denoting serialized quality
|
||||||
|
* @returns deserialized quality
|
||||||
|
*/
|
||||||
|
static decode(quality: string): Decimal {
|
||||||
|
const bytes = Buffer.from(quality, "hex").slice(-8);
|
||||||
const exponent = bytes[0] - 100;
|
const exponent = bytes[0] - 100;
|
||||||
const mantissa = new Decimal("0x" + bytesToHex(slice(bytes, 1)));
|
const mantissa = new Decimal(`0x${bytes.slice(1).toString("hex")}`);
|
||||||
return mantissa.times("1e" + exponent);
|
return mantissa.times(`1e${exponent}`);
|
||||||
},
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
|
export { quality };
|
||||||
|
|||||||
@@ -127,7 +127,8 @@ class BinarySerializer {
|
|||||||
*/
|
*/
|
||||||
writeFieldAndValue(field: FieldInstance, value: SerializedType): void {
|
writeFieldAndValue(field: FieldInstance, value: SerializedType): void {
|
||||||
const associatedValue = field.associatedType.from(value);
|
const associatedValue = field.associatedType.from(value);
|
||||||
assert(associatedValue.toBytesSink, field.name);
|
assert(associatedValue.toBytesSink !== undefined);
|
||||||
|
assert(field.name !== undefined);
|
||||||
|
|
||||||
this.sink.put(field.header);
|
this.sink.put(field.header);
|
||||||
|
|
||||||
|
|||||||
@@ -1,118 +1,181 @@
|
|||||||
import { strict as assert } from "assert";
|
import { strict as assert } from "assert";
|
||||||
import { makeClass } from "./utils/make-class";
|
|
||||||
import { coreTypes } from "./types";
|
import { coreTypes } from "./types";
|
||||||
import { HashPrefix } from "./hash-prefixes";
|
import { HashPrefix } from "./hash-prefixes";
|
||||||
import { Sha512Half } from "./hashes";
|
import { Sha512Half } from "./hashes";
|
||||||
|
import { Hash256 } from "./types/hash-256";
|
||||||
|
import { BytesList } from "./serdes/binary-serializer";
|
||||||
|
|
||||||
const ShaMapNode = makeClass(
|
/**
|
||||||
{
|
* Abstract class describing a SHAMapNode
|
||||||
virtuals: {
|
*/
|
||||||
hashPrefix() {},
|
abstract class ShaMapNode {
|
||||||
isLeaf() {},
|
abstract hashPrefix(): Buffer;
|
||||||
isInner() {},
|
abstract isLeaf(): boolean;
|
||||||
},
|
abstract isInner(): boolean;
|
||||||
cached: {
|
abstract toBytesSink(list: BytesList): void;
|
||||||
hash() {
|
abstract hash(): Hash256;
|
||||||
const hasher = Sha512Half.put(this.hashPrefix());
|
}
|
||||||
this.toBytesSink(hasher);
|
|
||||||
return hasher.finish();
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
const ShaMapLeaf = makeClass(
|
/**
|
||||||
{
|
* Class describing a Leaf of SHAMap
|
||||||
inherits: ShaMapNode,
|
*/
|
||||||
ShaMapLeaf(index, item) {
|
class ShaMapLeaf extends ShaMapNode {
|
||||||
ShaMapNode.call(this);
|
constructor(public index: Hash256, public item?: ShaMapNode) {
|
||||||
this.index = index;
|
super();
|
||||||
this.item = item;
|
}
|
||||||
},
|
|
||||||
isLeaf() {
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
isInner() {
|
|
||||||
return false;
|
|
||||||
},
|
|
||||||
hashPrefix() {
|
|
||||||
return this.item.hashPrefix();
|
|
||||||
},
|
|
||||||
toBytesSink(sink) {
|
|
||||||
this.item.toBytesSink(sink);
|
|
||||||
this.index.toBytesSink(sink);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
const $uper = ShaMapNode.prototype;
|
/**
|
||||||
|
* @returns true as ShaMapLeaf is a leaf node
|
||||||
|
*/
|
||||||
|
isLeaf(): boolean {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
const ShaMapInner = makeClass(
|
/**
|
||||||
{
|
* @returns false as ShaMapLeaf is not an inner node
|
||||||
inherits: ShaMapNode,
|
*/
|
||||||
ShaMapInner(depth = 0) {
|
isInner(): boolean {
|
||||||
ShaMapNode.call(this);
|
return false;
|
||||||
this.depth = depth;
|
}
|
||||||
this.slotBits = 0;
|
|
||||||
this.branches = Array(16);
|
|
||||||
},
|
|
||||||
isInner() {
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
isLeaf() {
|
|
||||||
return false;
|
|
||||||
},
|
|
||||||
hashPrefix() {
|
|
||||||
return HashPrefix.innerNode;
|
|
||||||
},
|
|
||||||
setBranch(slot, branch) {
|
|
||||||
this.slotBits = this.slotBits | (1 << slot);
|
|
||||||
this.branches[slot] = branch;
|
|
||||||
},
|
|
||||||
empty() {
|
|
||||||
return this.slotBits === 0;
|
|
||||||
},
|
|
||||||
hash() {
|
|
||||||
if (this.empty()) {
|
|
||||||
return coreTypes.Hash256.ZERO_256;
|
|
||||||
}
|
|
||||||
return $uper.hash.call(this);
|
|
||||||
},
|
|
||||||
toBytesSink(sink) {
|
|
||||||
for (let i = 0; i < this.branches.length; i++) {
|
|
||||||
const branch = this.branches[i];
|
|
||||||
const hash = branch ? branch.hash() : coreTypes.Hash256.ZERO_256;
|
|
||||||
hash.toBytesSink(sink);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
addItem(index, item, leaf) {
|
|
||||||
assert(index instanceof coreTypes.Hash256);
|
|
||||||
const nibble = index.nibblet(this.depth);
|
|
||||||
const existing = this.branches[nibble];
|
|
||||||
if (!existing) {
|
|
||||||
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
|
|
||||||
} else if (existing.isLeaf()) {
|
|
||||||
const newInner = new ShaMapInner(this.depth + 1);
|
|
||||||
newInner.addItem(existing.index, null, existing);
|
|
||||||
newInner.addItem(index, item, leaf);
|
|
||||||
this.setBranch(nibble, newInner);
|
|
||||||
} else if (existing.isInner()) {
|
|
||||||
existing.addItem(index, item, leaf);
|
|
||||||
} else {
|
|
||||||
assert(false);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
const ShaMap = makeClass(
|
/**
|
||||||
{
|
* Get the prefix of the this.item
|
||||||
inherits: ShaMapInner,
|
*
|
||||||
},
|
* @returns The hash prefix, unless this.item is undefined, then it returns an empty Buffer
|
||||||
undefined
|
*/
|
||||||
);
|
hashPrefix(): Buffer {
|
||||||
|
return this.item === undefined ? Buffer.alloc(0) : this.item.hashPrefix();
|
||||||
|
}
|
||||||
|
|
||||||
export { ShaMap };
|
/**
|
||||||
|
* Hash the bytes representation of this
|
||||||
|
*
|
||||||
|
* @returns hash of this.item concatenated with this.index
|
||||||
|
*/
|
||||||
|
hash(): Hash256 {
|
||||||
|
const hash = Sha512Half.put(this.hashPrefix());
|
||||||
|
this.toBytesSink(hash);
|
||||||
|
return hash.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write the bytes representation of this to a BytesList
|
||||||
|
* @param list BytesList to write bytes to
|
||||||
|
*/
|
||||||
|
toBytesSink(list: BytesList): void {
|
||||||
|
if (this.item !== undefined) {
|
||||||
|
this.item.toBytesSink(list);
|
||||||
|
}
|
||||||
|
this.index.toBytesSink(list);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class defining an Inner Node of a SHAMap
|
||||||
|
*/
|
||||||
|
class ShaMapInner extends ShaMapNode {
|
||||||
|
private slotBits = 0;
|
||||||
|
private branches: Array<ShaMapNode> = Array(16);
|
||||||
|
|
||||||
|
constructor(private depth: number = 0) {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns true as ShaMapInner is an inner node
|
||||||
|
*/
|
||||||
|
isInner(): boolean {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns false as ShaMapInner is not a leaf node
|
||||||
|
*/
|
||||||
|
isLeaf(): boolean {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the hash prefix for this node
|
||||||
|
*
|
||||||
|
* @returns hash prefix describing an inner node
|
||||||
|
*/
|
||||||
|
hashPrefix(): Buffer {
|
||||||
|
return HashPrefix.innerNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a branch of this node to be another node
|
||||||
|
*
|
||||||
|
* @param slot Slot to add branch to this.branches
|
||||||
|
* @param branch Branch to add
|
||||||
|
*/
|
||||||
|
setBranch(slot: number, branch: ShaMapNode): void {
|
||||||
|
this.slotBits = this.slotBits | (1 << slot);
|
||||||
|
this.branches[slot] = branch;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns true if node is empty
|
||||||
|
*/
|
||||||
|
empty(): boolean {
|
||||||
|
return this.slotBits === 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the hash of this node
|
||||||
|
*
|
||||||
|
* @returns The hash of this node
|
||||||
|
*/
|
||||||
|
hash(): Hash256 {
|
||||||
|
if (this.empty()) {
|
||||||
|
return coreTypes.Hash256.ZERO_256;
|
||||||
|
}
|
||||||
|
const hash = Sha512Half.put(this.hashPrefix());
|
||||||
|
this.toBytesSink(hash);
|
||||||
|
return hash.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the bytes representation of this node to a BytesList
|
||||||
|
*
|
||||||
|
* @param list BytesList to write bytes to
|
||||||
|
*/
|
||||||
|
toBytesSink(list: BytesList): void {
|
||||||
|
for (let i = 0; i < this.branches.length; i++) {
|
||||||
|
const branch = this.branches[i];
|
||||||
|
const hash = branch ? branch.hash() : coreTypes.Hash256.ZERO_256;
|
||||||
|
hash.toBytesSink(list);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add item to the SHAMap
|
||||||
|
*
|
||||||
|
* @param index Hash of the index of the item being inserted
|
||||||
|
* @param item Item to insert in the map
|
||||||
|
* @param leaf Leaf node to insert when branch doesn't exist
|
||||||
|
*/
|
||||||
|
addItem(index?: Hash256, item?: ShaMapNode, leaf?: ShaMapLeaf): void {
|
||||||
|
assert(index !== undefined);
|
||||||
|
const nibble = index.nibblet(this.depth);
|
||||||
|
const existing = this.branches[nibble];
|
||||||
|
|
||||||
|
if (existing === undefined) {
|
||||||
|
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
|
||||||
|
} else if (existing instanceof ShaMapLeaf) {
|
||||||
|
const newInner = new ShaMapInner(this.depth + 1);
|
||||||
|
newInner.addItem(existing.index, undefined, existing);
|
||||||
|
newInner.addItem(index, item, leaf);
|
||||||
|
this.setBranch(nibble, newInner);
|
||||||
|
} else if (existing instanceof ShaMapInner) {
|
||||||
|
existing.addItem(index, item, leaf);
|
||||||
|
} else {
|
||||||
|
throw new Error("invalid ShaMap.addItem call");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ShaMap extends ShaMapInner {}
|
||||||
|
|
||||||
|
export { ShaMap, ShaMapNode, ShaMapLeaf };
|
||||||
|
|||||||
@@ -17,13 +17,18 @@ class AccountID extends Hash160 {
|
|||||||
* @param value either an existing AccountID, a hex-string, or a base58 r-Address
|
* @param value either an existing AccountID, a hex-string, or a base58 r-Address
|
||||||
* @returns an AccountID object
|
* @returns an AccountID object
|
||||||
*/
|
*/
|
||||||
static from(value: AccountID | string): AccountID {
|
static from<T extends Hash160 | string>(value: T): AccountID {
|
||||||
if (value instanceof this) {
|
if (value instanceof AccountID) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
return /^r/.test(value)
|
|
||||||
? this.fromBase58(value)
|
if (typeof value === "string") {
|
||||||
: new AccountID(Buffer.from(value, "hex"));
|
return /^r/.test(value)
|
||||||
|
? this.fromBase58(value)
|
||||||
|
: new AccountID(Buffer.from(value, "hex"));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct AccountID from value given");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
import { Decimal } from "decimal.js";
|
import { Decimal } from "decimal.js";
|
||||||
import { SerializedType } from "./serialized-type";
|
|
||||||
import { BinaryParser } from "../serdes/binary-parser";
|
import { BinaryParser } from "../serdes/binary-parser";
|
||||||
import { Currency } from "./currency";
|
|
||||||
import { AccountID } from "./account-id";
|
import { AccountID } from "./account-id";
|
||||||
|
import { Currency } from "./currency";
|
||||||
|
import { JsonObject, SerializedType } from "./serialized-type";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constants for validating amounts
|
* Constants for validating amounts
|
||||||
@@ -24,12 +26,25 @@ Decimal.config({
|
|||||||
/**
|
/**
|
||||||
* Interface for JSON objects that represent amounts
|
* Interface for JSON objects that represent amounts
|
||||||
*/
|
*/
|
||||||
interface AmountObject {
|
interface AmountObject extends JsonObject {
|
||||||
value: string;
|
value: string;
|
||||||
currency: string;
|
currency: string;
|
||||||
issuer: string;
|
issuer: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard for AmountObject
|
||||||
|
*/
|
||||||
|
function isAmountObject(arg): arg is AmountObject {
|
||||||
|
const keys = Object.keys(arg).sort();
|
||||||
|
return (
|
||||||
|
keys.length === 3 &&
|
||||||
|
keys[0] === "currency" &&
|
||||||
|
keys[1] === "issuer" &&
|
||||||
|
keys[2] === "value"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for serializing/Deserializing Amounts
|
* Class for serializing/Deserializing Amounts
|
||||||
*/
|
*/
|
||||||
@@ -45,10 +60,11 @@ class Amount extends SerializedType {
|
|||||||
/**
|
/**
|
||||||
* Construct an amount from an IOU or string amount
|
* Construct an amount from an IOU or string amount
|
||||||
*
|
*
|
||||||
* @param value An Amount, object representing an IOU, or a string representing an integer amount
|
* @param value An Amount, object representing an IOU, or a string
|
||||||
|
* representing an integer amount
|
||||||
* @returns An Amount object
|
* @returns An Amount object
|
||||||
*/
|
*/
|
||||||
static from(value: Amount | AmountObject | string): Amount {
|
static from<T extends Amount | AmountObject | string>(value: T): Amount {
|
||||||
if (value instanceof Amount) {
|
if (value instanceof Amount) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
@@ -63,7 +79,9 @@ class Amount extends SerializedType {
|
|||||||
amount[0] |= 0x40;
|
amount[0] |= 0x40;
|
||||||
|
|
||||||
return new Amount(amount);
|
return new Amount(amount);
|
||||||
} else if (typeof value === "object") {
|
}
|
||||||
|
|
||||||
|
if (isAmountObject(value)) {
|
||||||
const number = new Decimal(value.value);
|
const number = new Decimal(value.value);
|
||||||
Amount.assertIouIsValid(number);
|
Amount.assertIouIsValid(number);
|
||||||
|
|
||||||
@@ -92,6 +110,7 @@ class Amount extends SerializedType {
|
|||||||
const issuer = AccountID.from(value.issuer).toBytes();
|
const issuer = AccountID.from(value.issuer).toBytes();
|
||||||
return new Amount(Buffer.concat([amount, currency, issuer]));
|
return new Amount(Buffer.concat([amount, currency, issuer]));
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error("Invalid type to construct an Amount");
|
throw new Error("Invalid type to construct an Amount");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,8 +142,8 @@ class Amount extends SerializedType {
|
|||||||
} else {
|
} else {
|
||||||
const parser = new BinaryParser(this.toString());
|
const parser = new BinaryParser(this.toString());
|
||||||
const mantissa = parser.read(8);
|
const mantissa = parser.read(8);
|
||||||
const currency = Currency.fromParser(parser);
|
const currency = Currency.fromParser(parser) as Currency;
|
||||||
const issuer = AccountID.fromParser(parser);
|
const issuer = AccountID.fromParser(parser) as AccountID;
|
||||||
|
|
||||||
const b1 = mantissa[0];
|
const b1 = mantissa[0];
|
||||||
const b2 = mantissa[1];
|
const b2 = mantissa[1];
|
||||||
@@ -189,7 +208,8 @@ class Amount extends SerializedType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensure that the value after being multiplied by the exponent does not contain a decimal.
|
* Ensure that the value after being multiplied by the exponent does not
|
||||||
|
* contain a decimal.
|
||||||
*
|
*
|
||||||
* @param decimal a Decimal object
|
* @param decimal a Decimal object
|
||||||
* @returns a string of the object without a decimal
|
* @returns a string of the object without a decimal
|
||||||
@@ -215,4 +235,4 @@ class Amount extends SerializedType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { Amount };
|
export { Amount, AmountObject };
|
||||||
|
|||||||
@@ -26,8 +26,16 @@ class Blob extends SerializedType {
|
|||||||
* @param value existing Blob object or a hex-string
|
* @param value existing Blob object or a hex-string
|
||||||
* @returns A Blob object
|
* @returns A Blob object
|
||||||
*/
|
*/
|
||||||
static from(value: Blob | string): Blob {
|
static from<T extends Blob | string>(value: T): Blob {
|
||||||
return value instanceof Blob ? value : new Blob(Buffer.from(value, "hex"));
|
if (value instanceof Blob) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return new Blob(Buffer.from(value, "hex"));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct Blob from value given");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -113,10 +113,16 @@ class Currency extends Hash160 {
|
|||||||
*
|
*
|
||||||
* @param val Currency object or a string representation of a currency
|
* @param val Currency object or a string representation of a currency
|
||||||
*/
|
*/
|
||||||
static from(val: Currency | string): Currency {
|
static from<T extends Hash160 | string>(value: T): Currency {
|
||||||
return val instanceof this
|
if (value instanceof Currency) {
|
||||||
? val
|
return value;
|
||||||
: new Currency(bytesFromRepresentation(val));
|
}
|
||||||
|
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return new Currency(bytesFromRepresentation(value));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct Currency from value given");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -15,9 +15,18 @@ class Hash extends Comparable {
|
|||||||
* Construct a Hash object from an existing Hash object or a hex-string
|
* Construct a Hash object from an existing Hash object or a hex-string
|
||||||
*
|
*
|
||||||
* @param value A hash object or hex-string of a hash
|
* @param value A hash object or hex-string of a hash
|
||||||
*/
|
*/
|
||||||
static from(value: Hash | string): Hash {
|
static from<T extends Hash | string>(value: T): Hash {
|
||||||
return value instanceof this ? value : new this(Buffer.from(value, "hex"));
|
if(value instanceof this) {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
if(typeof value === "string") {
|
||||||
|
return new this(Buffer.from(value, "hex"));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct Hash from given value");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { AccountID } from "./account-id";
|
import { AccountID } from "./account-id";
|
||||||
import { Currency } from "./currency";
|
import { Currency } from "./currency";
|
||||||
import { BinaryParser } from "../serdes/binary-parser";
|
import { BinaryParser } from "../serdes/binary-parser";
|
||||||
import { SerializedType } from "./serialized-type";
|
import { SerializedType, JsonObject } from "./serialized-type";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constants for separating Paths in a PathSet
|
* Constants for separating Paths in a PathSet
|
||||||
@@ -19,12 +19,33 @@ const TYPE_ISSUER = 0x20;
|
|||||||
/**
|
/**
|
||||||
* The object representation of a Hop, an issuer AccountID, an account AccountID, and a Currency
|
* The object representation of a Hop, an issuer AccountID, an account AccountID, and a Currency
|
||||||
*/
|
*/
|
||||||
interface HopObject {
|
interface HopObject extends JsonObject {
|
||||||
issuer?: string;
|
issuer?: string;
|
||||||
account?: string;
|
account?: string;
|
||||||
currency?: string;
|
currency?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TypeGuard for HopObject
|
||||||
|
*/
|
||||||
|
function isHopObject(arg): arg is HopObject {
|
||||||
|
return (arg.issuer !== undefined ||
|
||||||
|
arg.account !== undefined ||
|
||||||
|
arg.currency !== undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TypeGuard for PathSet
|
||||||
|
*/
|
||||||
|
function isPathSet(arg): arg is Array<Array<HopObject>> {
|
||||||
|
return (
|
||||||
|
Array.isArray(arg) && arg.length === 0 ||
|
||||||
|
Array.isArray(arg) && Array.isArray(arg[0]) && arg[0].length === 0 ||
|
||||||
|
Array.isArray(arg) && Array.isArray(arg[0]) && isHopObject(arg[0][0])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Serialize and Deserialize a Hop
|
* Serialize and Deserialize a Hop
|
||||||
*/
|
*/
|
||||||
@@ -96,15 +117,15 @@ class Hop extends SerializedType {
|
|||||||
|
|
||||||
const result: HopObject = {};
|
const result: HopObject = {};
|
||||||
if (type & TYPE_ACCOUNT) {
|
if (type & TYPE_ACCOUNT) {
|
||||||
result.account = AccountID.fromParser(hopParser).toJSON();
|
result.account = (AccountID.fromParser(hopParser) as AccountID).toJSON();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type & TYPE_CURRENCY) {
|
if (type & TYPE_CURRENCY) {
|
||||||
result.currency = Currency.fromParser(hopParser).toJSON();
|
result.currency = (Currency.fromParser(hopParser) as Currency).toJSON();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type & TYPE_ISSUER) {
|
if (type & TYPE_ISSUER) {
|
||||||
result.issuer = AccountID.fromParser(hopParser).toJSON();
|
result.issuer = (AccountID.fromParser(hopParser) as AccountID).toJSON();
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
@@ -169,7 +190,7 @@ class Path extends SerializedType {
|
|||||||
*
|
*
|
||||||
* @returns an Array of HopObject constructed from this.bytes
|
* @returns an Array of HopObject constructed from this.bytes
|
||||||
*/
|
*/
|
||||||
toJSON() {
|
toJSON(): Array<HopObject> {
|
||||||
const json: Array<HopObject> = [];
|
const json: Array<HopObject> = [];
|
||||||
const pathParser = new BinaryParser(this.toString());
|
const pathParser = new BinaryParser(this.toString());
|
||||||
|
|
||||||
@@ -191,21 +212,25 @@ class PathSet extends SerializedType {
|
|||||||
* @param value A PathSet or Array of Array of HopObjects
|
* @param value A PathSet or Array of Array of HopObjects
|
||||||
* @returns the PathSet constructed from value
|
* @returns the PathSet constructed from value
|
||||||
*/
|
*/
|
||||||
static from(value: PathSet | Array<Array<HopObject>>): PathSet {
|
static from<T extends PathSet | Array<Array<HopObject>>>(value: T): PathSet {
|
||||||
if (value instanceof PathSet) {
|
if (value instanceof PathSet) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
const bytes: Array<Buffer> = [];
|
if (isPathSet(value)) {
|
||||||
|
const bytes: Array<Buffer> = [];
|
||||||
|
|
||||||
value.forEach((path: Array<HopObject>) => {
|
value.forEach((path: Array<HopObject>) => {
|
||||||
bytes.push(Path.from(path).toBytes());
|
bytes.push(Path.from(path).toBytes());
|
||||||
bytes.push(Buffer.from([PATH_SEPARATOR_BYTE]));
|
bytes.push(Buffer.from([PATH_SEPARATOR_BYTE]));
|
||||||
});
|
});
|
||||||
|
|
||||||
bytes[bytes.length - 1] = Buffer.from([PATHSET_END_BYTE]);
|
bytes[bytes.length - 1] = Buffer.from([PATHSET_END_BYTE]);
|
||||||
|
|
||||||
return new PathSet(Buffer.concat(bytes));
|
return new PathSet(Buffer.concat(bytes));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct PathSet from given value");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import { BytesList } from "../serdes/binary-serializer";
|
import { BytesList } from "../serdes/binary-serializer";
|
||||||
import { BinaryParser } from "../serdes/binary-parser";
|
import { BinaryParser } from "../serdes/binary-parser";
|
||||||
|
|
||||||
|
type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject;
|
||||||
|
|
||||||
|
type JsonObject = { [key: string]: JSON };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The base class for all binary-codec types
|
* The base class for all binary-codec types
|
||||||
*/
|
*/
|
||||||
@@ -16,7 +20,7 @@ class SerializedType {
|
|||||||
return this.fromParser(parser, hint);
|
return this.fromParser(parser, hint);
|
||||||
}
|
}
|
||||||
|
|
||||||
static from(value: any): SerializedType {
|
static from(value: SerializedType | JSON | bigint): SerializedType {
|
||||||
throw new Error("from not implemented");
|
throw new Error("from not implemented");
|
||||||
return this.from(value);
|
return this.from(value);
|
||||||
}
|
}
|
||||||
@@ -58,7 +62,7 @@ class SerializedType {
|
|||||||
*
|
*
|
||||||
* @returns any type, if not overloaded returns hexString representation of bytes
|
* @returns any type, if not overloaded returns hexString representation of bytes
|
||||||
*/
|
*/
|
||||||
toJSON(): any {
|
toJSON(): JSON {
|
||||||
return this.toHex();
|
return this.toHex();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,8 +105,10 @@ class Comparable extends SerializedType {
|
|||||||
* @returns A number denoting the relationship of this and other
|
* @returns A number denoting the relationship of this and other
|
||||||
*/
|
*/
|
||||||
compareTo(other: Comparable): number {
|
compareTo(other: Comparable): number {
|
||||||
throw new Error(`cannot compare ${this} and ${other}`);
|
throw new Error(
|
||||||
|
`cannot compare ${this.toString()} and ${other.toString()}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { SerializedType, Comparable };
|
export { SerializedType, Comparable, JSON, JsonObject };
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { SerializedType } from "./serialized-type";
|
import { SerializedType, JsonObject } from "./serialized-type";
|
||||||
import { STObject } from "./st-object";
|
import { STObject } from "./st-object";
|
||||||
import { BinaryParser } from "../serdes/binary-parser";
|
import { BinaryParser } from "../serdes/binary-parser";
|
||||||
|
|
||||||
@@ -7,6 +7,15 @@ const ARRAY_END_MARKER_NAME = "ArrayEndMarker";
|
|||||||
|
|
||||||
const OBJECT_END_MARKER = Buffer.from([0xe1]);
|
const OBJECT_END_MARKER = Buffer.from([0xe1]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TypeGuard for Array<JsonObject>
|
||||||
|
*/
|
||||||
|
function isObjects(args): args is Array<JsonObject> {
|
||||||
|
return (
|
||||||
|
Array.isArray(args) && (args.length === 0 || typeof args[0] === "object")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for serializing and deserializing Arrays of Objects
|
* Class for serializing and deserializing Arrays of Objects
|
||||||
*/
|
*/
|
||||||
@@ -43,18 +52,22 @@ class STArray extends SerializedType {
|
|||||||
* @param value STArray or Array of Objects to parse into an STArray
|
* @param value STArray or Array of Objects to parse into an STArray
|
||||||
* @returns An STArray object
|
* @returns An STArray object
|
||||||
*/
|
*/
|
||||||
static from(value: STArray | Array<object>): STArray {
|
static from<T extends STArray | Array<JsonObject>>(value: T): STArray {
|
||||||
if (value instanceof STArray) {
|
if (value instanceof STArray) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
const bytes: Array<Buffer> = [];
|
if (isObjects(value)) {
|
||||||
value.forEach((obj) => {
|
const bytes: Array<Buffer> = [];
|
||||||
bytes.push(STObject.from(obj).toBytes());
|
value.forEach((obj) => {
|
||||||
});
|
bytes.push(STObject.from(obj).toBytes());
|
||||||
|
});
|
||||||
|
|
||||||
bytes.push(ARRAY_END_MARKER);
|
bytes.push(ARRAY_END_MARKER);
|
||||||
return new STArray(Buffer.concat(bytes));
|
return new STArray(Buffer.concat(bytes));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct Currency from value given");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -62,8 +75,8 @@ class STArray extends SerializedType {
|
|||||||
*
|
*
|
||||||
* @returns An Array of JSON objects
|
* @returns An Array of JSON objects
|
||||||
*/
|
*/
|
||||||
toJSON(): Array<object> {
|
toJSON(): Array<JsonObject> {
|
||||||
const result: Array<object> = [];
|
const result: Array<JsonObject> = [];
|
||||||
|
|
||||||
const arrayParser = new BinaryParser(this.toString());
|
const arrayParser = new BinaryParser(this.toString());
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { Field } from "../enums";
|
import { Field, FieldInstance } from "../enums";
|
||||||
import { SerializedType } from "./serialized-type";
|
import { SerializedType, JsonObject } from "./serialized-type";
|
||||||
import { BinaryParser } from "../serdes/binary-parser";
|
import { BinaryParser } from "../serdes/binary-parser";
|
||||||
import { BinarySerializer, BytesList } from "../serdes/binary-serializer";
|
import { BinarySerializer, BytesList } from "../serdes/binary-serializer";
|
||||||
|
|
||||||
@@ -45,8 +45,8 @@ class STObject extends SerializedType {
|
|||||||
* @param filter optional, denote which field to include in serialized object
|
* @param filter optional, denote which field to include in serialized object
|
||||||
* @returns a STObject object
|
* @returns a STObject object
|
||||||
*/
|
*/
|
||||||
static from(
|
static from<T extends STObject | JsonObject>(
|
||||||
value: STObject | object,
|
value: T,
|
||||||
filter?: (...any) => boolean
|
filter?: (...any) => boolean
|
||||||
): STObject {
|
): STObject {
|
||||||
if (value instanceof STObject) {
|
if (value instanceof STObject) {
|
||||||
@@ -57,8 +57,8 @@ class STObject extends SerializedType {
|
|||||||
const bytes: BinarySerializer = new BinarySerializer(list);
|
const bytes: BinarySerializer = new BinarySerializer(list);
|
||||||
|
|
||||||
let sorted = Object.keys(value)
|
let sorted = Object.keys(value)
|
||||||
.map((f) => Field[f])
|
.map((f: string): FieldInstance => Field[f] as FieldInstance)
|
||||||
.filter((f) => f !== undefined && f.isSerialized)
|
.filter((f: FieldInstance): boolean => f !== undefined && f.isSerialized)
|
||||||
.sort((a, b) => {
|
.sort((a, b) => {
|
||||||
return a.ordinal - b.ordinal;
|
return a.ordinal - b.ordinal;
|
||||||
});
|
});
|
||||||
@@ -84,7 +84,7 @@ class STObject extends SerializedType {
|
|||||||
*
|
*
|
||||||
* @returns a JSON object
|
* @returns a JSON object
|
||||||
*/
|
*/
|
||||||
toJSON(): object {
|
toJSON(): JsonObject {
|
||||||
const objectParser = new BinaryParser(this.toString());
|
const objectParser = new BinaryParser(this.toString());
|
||||||
const accumulator = {};
|
const accumulator = {};
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,18 @@ class UInt16 extends UInt {
|
|||||||
*
|
*
|
||||||
* @param val UInt16 object or number
|
* @param val UInt16 object or number
|
||||||
*/
|
*/
|
||||||
static from(val: UInt16 | number): UInt16 {
|
static from<T extends UInt16 | number>(val: T): UInt16 {
|
||||||
if (val instanceof UInt16) {
|
if (val instanceof UInt16) {
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
const buf = Buffer.alloc(UInt16.width);
|
if (typeof val === "number") {
|
||||||
buf.writeUInt16BE(val);
|
const buf = Buffer.alloc(UInt16.width);
|
||||||
return new UInt16(buf);
|
buf.writeUInt16BE(val);
|
||||||
|
return new UInt16(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Can not construct UInt16 with given value");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -23,14 +23,26 @@ class UInt32 extends UInt {
|
|||||||
*
|
*
|
||||||
* @param val UInt32 object or number
|
* @param val UInt32 object or number
|
||||||
*/
|
*/
|
||||||
static from(val: UInt32 | number): UInt32 {
|
static from<T extends UInt32 | number | string>(val: T): UInt32 {
|
||||||
if (val instanceof UInt32) {
|
if (val instanceof UInt32) {
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
const buf = Buffer.alloc(UInt32.width);
|
const buf = Buffer.alloc(UInt32.width);
|
||||||
buf.writeUInt32BE(val);
|
|
||||||
return new UInt32(buf);
|
if (typeof val === "string") {
|
||||||
|
const num = Number.parseInt(val);
|
||||||
|
buf.writeUInt32BE(num);
|
||||||
|
return new UInt32(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof val === "number") {
|
||||||
|
buf.writeUInt32BE(val);
|
||||||
|
return new UInt32(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(typeof val);
|
||||||
|
throw new Error("Cannot construct UInt32 from given value");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class UInt64 extends UInt {
|
|||||||
* @param val A UInt64, hex-string, bigint, or number
|
* @param val A UInt64, hex-string, bigint, or number
|
||||||
* @returns A UInt64 object
|
* @returns A UInt64 object
|
||||||
*/
|
*/
|
||||||
static from(val: UInt64 | string | bigint | number): UInt64 {
|
static from<T extends UInt64 | string | bigint | number>(val: T): UInt64 {
|
||||||
if (val instanceof UInt64) {
|
if (val instanceof UInt64) {
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
@@ -38,17 +38,23 @@ class UInt64 extends UInt {
|
|||||||
throw new Error("value must be an unsigned integer");
|
throw new Error("value must be an unsigned integer");
|
||||||
}
|
}
|
||||||
buf.writeBigUInt64BE(BigInt(val));
|
buf.writeBigUInt64BE(BigInt(val));
|
||||||
} else if (typeof val === "string") {
|
return new UInt64(buf);
|
||||||
if (!HEX_REGEX.test(val)) {
|
|
||||||
throw new Error(val + "is not a valid hex-string");
|
|
||||||
}
|
|
||||||
buf = Buffer.from(val, "hex");
|
|
||||||
} else {
|
|
||||||
// typeof val === bigint
|
|
||||||
buf.writeBigUInt64BE(val);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return new UInt64(buf);
|
if (typeof val === "string") {
|
||||||
|
if (!HEX_REGEX.test(val)) {
|
||||||
|
throw new Error(`${val} is not a valid hex-string`);
|
||||||
|
}
|
||||||
|
buf = Buffer.from(val, "hex");
|
||||||
|
return new UInt64(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof val === "bigint") {
|
||||||
|
buf.writeBigUInt64BE(val);
|
||||||
|
return new UInt64(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct UInt64 from given value");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -21,14 +21,18 @@ class UInt8 extends UInt {
|
|||||||
*
|
*
|
||||||
* @param val UInt8 object or number
|
* @param val UInt8 object or number
|
||||||
*/
|
*/
|
||||||
static from(val: UInt8 | number): UInt8 {
|
static from<T extends UInt8 | number>(val: T): UInt8 {
|
||||||
if (val instanceof UInt8) {
|
if (val instanceof UInt8) {
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
const buf = Buffer.alloc(UInt8.width);
|
if (typeof val === "number") {
|
||||||
buf.writeUInt8(val);
|
const buf = Buffer.alloc(UInt8.width);
|
||||||
return new UInt8(buf);
|
buf.writeUInt8(val);
|
||||||
|
return new UInt8(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct UInt8 from given value");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -3,6 +3,13 @@ import { BinaryParser } from "../serdes/binary-parser";
|
|||||||
import { Hash256 } from "./hash-256";
|
import { Hash256 } from "./hash-256";
|
||||||
import { BytesList } from "../serdes/binary-serializer";
|
import { BytesList } from "../serdes/binary-serializer";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TypeGuard for Array<string>
|
||||||
|
*/
|
||||||
|
function isStrings(arg): arg is Array<string> {
|
||||||
|
return Array.isArray(arg) && (arg.length === 0 || typeof arg[0] === "string");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for serializing and deserializing vectors of Hash256
|
* Class for serializing and deserializing vectors of Hash256
|
||||||
*/
|
*/
|
||||||
@@ -34,16 +41,20 @@ class Vector256 extends SerializedType {
|
|||||||
* @param value A Vector256 object or array of hex-strings representing Hash256's
|
* @param value A Vector256 object or array of hex-strings representing Hash256's
|
||||||
* @returns a Vector256 object
|
* @returns a Vector256 object
|
||||||
*/
|
*/
|
||||||
static from(value: Vector256 | Array<string>): Vector256 {
|
static from<T extends Vector256 | Array<string>>(value: T): Vector256 {
|
||||||
if (value instanceof Vector256) {
|
if (value instanceof Vector256) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
const bytesList = new BytesList();
|
if (isStrings(value)) {
|
||||||
value.forEach((hash) => {
|
const bytesList = new BytesList();
|
||||||
Hash256.from(hash).toBytesSink(bytesList);
|
value.forEach((hash) => {
|
||||||
});
|
Hash256.from(hash).toBytesSink(bytesList);
|
||||||
return new Vector256(bytesList.toBytes());
|
});
|
||||||
|
return new Vector256(bytesList.toBytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("Cannot construct Vector256 from given value");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,107 +0,0 @@
|
|||||||
import { strict as assert } from "assert";
|
|
||||||
|
|
||||||
function signum(a, b) {
|
|
||||||
return a < b ? -1 : a === b ? 0 : 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const hexLookup = (function () {
|
|
||||||
const res = <any>{};
|
|
||||||
const reverse = (res.reverse = new Array(256));
|
|
||||||
for (let i = 0; i < 16; i++) {
|
|
||||||
const char = i.toString(16).toUpperCase();
|
|
||||||
res[char] = i;
|
|
||||||
|
|
||||||
for (let j = 0; j < 16; j++) {
|
|
||||||
const char2 = j.toString(16).toUpperCase();
|
|
||||||
const byte = (i << 4) + j;
|
|
||||||
const byteHex = char + char2;
|
|
||||||
res[byteHex] = byte;
|
|
||||||
reverse[byte] = byteHex;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
})();
|
|
||||||
|
|
||||||
const reverseHexLookup = hexLookup.reverse;
|
|
||||||
|
|
||||||
function bytesToHex(sequence) {
|
|
||||||
const buf = Array(sequence.length);
|
|
||||||
for (let i = sequence.length - 1; i >= 0; i--) {
|
|
||||||
buf[i] = reverseHexLookup[sequence[i]];
|
|
||||||
}
|
|
||||||
return buf.join("");
|
|
||||||
}
|
|
||||||
|
|
||||||
function byteForHex(hex) {
|
|
||||||
const byte = hexLookup[hex];
|
|
||||||
if (byte === undefined) {
|
|
||||||
throw new Error(`\`${hex}\` is not a valid hex representation of a byte`);
|
|
||||||
}
|
|
||||||
return byte;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseBytes(val, Output = <any>Array) {
|
|
||||||
if (!val || val.length === undefined) {
|
|
||||||
throw new Error(`${val} is not a sequence`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof val === "string") {
|
|
||||||
const start = val.length % 2;
|
|
||||||
const res = new Output((val.length + start) / 2);
|
|
||||||
for (let i = val.length, to = res.length - 1; to >= start; i -= 2, to--) {
|
|
||||||
res[to] = byteForHex(val.slice(i - 2, i));
|
|
||||||
}
|
|
||||||
if (start === 1) {
|
|
||||||
res[0] = byteForHex(val[0]);
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
} else if (val instanceof Output) {
|
|
||||||
return val;
|
|
||||||
} else if (Output === Uint8Array) {
|
|
||||||
return new Output(val);
|
|
||||||
}
|
|
||||||
const res = new Output(val.length);
|
|
||||||
for (let i = val.length - 1; i >= 0; i--) {
|
|
||||||
res[i] = val[i];
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeUIntN(val, width) {
|
|
||||||
const newBytes = new Uint8Array(width);
|
|
||||||
const lastIx = width - 1;
|
|
||||||
for (let i = 0; i < width; i++) {
|
|
||||||
newBytes[lastIx - i] = (val >>> (i * 8)) & 0xff;
|
|
||||||
}
|
|
||||||
return newBytes;
|
|
||||||
}
|
|
||||||
|
|
||||||
function compareBytes(a, b) {
|
|
||||||
assert(a.length === b.length);
|
|
||||||
for (let i = 0; i < a.length; i++) {
|
|
||||||
const cmp = signum(a[i], b[i]);
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function slice(val, startIx = 0, endIx = val.length, Output = val.constructor) {
|
|
||||||
/* eslint-disable no-param-reassign */
|
|
||||||
if (startIx < 0) {
|
|
||||||
startIx += val.length;
|
|
||||||
}
|
|
||||||
if (endIx < 0) {
|
|
||||||
endIx += val.length;
|
|
||||||
}
|
|
||||||
/* eslint-enable no-param-reassign */
|
|
||||||
const len = endIx - startIx;
|
|
||||||
const res = new Output(len);
|
|
||||||
for (let i = endIx - 1; i >= startIx; i--) {
|
|
||||||
res[i - startIx] = val[i];
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
export { parseBytes, bytesToHex, slice, compareBytes, serializeUIntN };
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
import _ = require("lodash");
|
|
||||||
const inherits = require("inherits");
|
|
||||||
|
|
||||||
function forEach(obj, func) {
|
|
||||||
Object.keys(obj || {}).forEach((k) => {
|
|
||||||
func(obj[k], k);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureArray(val) {
|
|
||||||
return Array.isArray(val) ? val : [val];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function makeClass(klass_, definition_) {
|
|
||||||
const definition = definition_ || klass_;
|
|
||||||
let klass = typeof klass_ === "function" ? klass_ : null;
|
|
||||||
if (klass === null) {
|
|
||||||
for (const k in definition) {
|
|
||||||
if (k[0].match(/[A-Z]/)) {
|
|
||||||
klass = definition[k];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const parent = definition.inherits;
|
|
||||||
if (parent) {
|
|
||||||
if (klass === null) {
|
|
||||||
klass = function () {
|
|
||||||
parent.apply(this, arguments);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
inherits(klass, parent);
|
|
||||||
_.defaults(klass, parent);
|
|
||||||
}
|
|
||||||
if (klass === null) {
|
|
||||||
klass = function () {};
|
|
||||||
}
|
|
||||||
const proto = klass.prototype;
|
|
||||||
function addFunc(original, name, wrapper) {
|
|
||||||
proto[name] = wrapper || original;
|
|
||||||
}
|
|
||||||
(definition.getters || []).forEach((k) => {
|
|
||||||
const key = "_" + k;
|
|
||||||
proto[k] = function () {
|
|
||||||
return this[key];
|
|
||||||
};
|
|
||||||
});
|
|
||||||
forEach(definition.virtuals, (f, n) => {
|
|
||||||
addFunc(f, n, function () {
|
|
||||||
throw new Error("unimplemented");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
forEach(definition.methods, addFunc);
|
|
||||||
forEach(definition, (f, n) => {
|
|
||||||
if (_.isFunction(f) && f !== klass) {
|
|
||||||
addFunc(f, n, undefined);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
_.assign(klass, definition.statics);
|
|
||||||
if (typeof klass.init === "function") {
|
|
||||||
klass.init();
|
|
||||||
}
|
|
||||||
forEach(definition.cached, (f, n) => {
|
|
||||||
const key = "_" + n;
|
|
||||||
addFunc(f, n, function () {
|
|
||||||
let value = this[key];
|
|
||||||
if (value === undefined) {
|
|
||||||
value = this[key] = f.call(this);
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
if (definition.mixins) {
|
|
||||||
const mixins = {};
|
|
||||||
// Right-most in the list win
|
|
||||||
ensureArray(definition.mixins)
|
|
||||||
.reverse()
|
|
||||||
.forEach((o) => {
|
|
||||||
_.defaults(mixins, o);
|
|
||||||
});
|
|
||||||
_.defaults(proto, mixins);
|
|
||||||
}
|
|
||||||
|
|
||||||
return klass;
|
|
||||||
}
|
|
||||||
@@ -5,7 +5,7 @@ const { Amount } = coreTypes
|
|||||||
const fixtures = loadFixture('data-driven-tests.json')
|
const fixtures = loadFixture('data-driven-tests.json')
|
||||||
|
|
||||||
function amountErrorTests () {
|
function amountErrorTests () {
|
||||||
_.filter(fixtures.values_tests, { type: 'Amount' }).forEach(f => {
|
fixtures.values_tests.filter(obj => obj.type === 'Amount').forEach(f => {
|
||||||
// We only want these with errors
|
// We only want these with errors
|
||||||
if (!f.error) {
|
if (!f.error) {
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ const { Amount, Hash160 } = coreTypes
|
|||||||
const { makeParser, readJSON } = binary
|
const { makeParser, readJSON } = binary
|
||||||
const { Field, TransactionType } = require('./../dist/enums')
|
const { Field, TransactionType } = require('./../dist/enums')
|
||||||
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
|
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
|
||||||
const { bytesToHex } = require('../dist/utils/bytes-utils')
|
|
||||||
const fixtures = loadFixture('data-driven-tests.json')
|
const fixtures = loadFixture('data-driven-tests.json')
|
||||||
const { BytesList } = require('../dist/serdes/binary-serializer')
|
const { BytesList } = require('../dist/serdes/binary-serializer')
|
||||||
|
|
||||||
@@ -107,9 +106,9 @@ function transactionParsingTests () {
|
|||||||
expect(parser.read(8)).not.toEqual([])
|
expect(parser.read(8)).not.toEqual([])
|
||||||
expect(parser.readField()).toEqual(Field.SigningPubKey)
|
expect(parser.readField()).toEqual(Field.SigningPubKey)
|
||||||
expect(parser.readVariableLengthLength()).toBe(33)
|
expect(parser.readVariableLengthLength()).toBe(33)
|
||||||
expect(bytesToHex(parser.read(33))).toEqual(tx_json.SigningPubKey)
|
expect(parser.read(33).toString('hex').toUpperCase()).toEqual(tx_json.SigningPubKey)
|
||||||
expect(parser.readField()).toEqual(Field.TxnSignature)
|
expect(parser.readField()).toEqual(Field.TxnSignature)
|
||||||
expect(bytesToHex(parser.readVariableLength())).toEqual(tx_json.TxnSignature)
|
expect(parser.readVariableLength().toString('hex').toUpperCase()).toEqual(tx_json.TxnSignature)
|
||||||
expect(parser.readField()).toEqual(Field.Account)
|
expect(parser.readField()).toEqual(Field.Account)
|
||||||
expect(encodeAccountID(parser.readVariableLength())).toEqual(tx_json.Account)
|
expect(encodeAccountID(parser.readVariableLength())).toEqual(tx_json.Account)
|
||||||
expect(parser.end()).toBe(true)
|
expect(parser.end()).toBe(true)
|
||||||
@@ -180,12 +179,13 @@ function transactionParsingTests () {
|
|||||||
const parser = makeParser(transaction.binary)
|
const parser = makeParser(transaction.binary)
|
||||||
const jsonFromBinary = readJSON(parser)
|
const jsonFromBinary = readJSON(parser)
|
||||||
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false)
|
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false)
|
||||||
expect(_.isPlainObject(jsonFromBinary)).toBe(true)
|
expect(jsonFromBinary instanceof Object).toBe(true);
|
||||||
|
expect(jsonFromBinary.prototype).toBe(undefined)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function amountParsingTests () {
|
function amountParsingTests () {
|
||||||
_.filter(fixtures.values_tests, { type: 'Amount' }).forEach((f, i) => {
|
fixtures.values_tests.filter(obj => obj.type === 'Amount').forEach((f, i) => {
|
||||||
if (f.error) {
|
if (f.error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -246,7 +246,7 @@ function assertRecyclable (json, forField) {
|
|||||||
function nestedObjectTests () {
|
function nestedObjectTests () {
|
||||||
function disabled (i) {
|
function disabled (i) {
|
||||||
unused(i)
|
unused(i)
|
||||||
return false // !_.includes([2], i);
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
fixtures.whole_objects.forEach((f, i) => {
|
fixtures.whole_objects.forEach((f, i) => {
|
||||||
|
|||||||
@@ -1,70 +0,0 @@
|
|||||||
const { slice, compareBytes, parseBytes, bytesToHex } = require('../dist/utils/bytes-utils')
|
|
||||||
|
|
||||||
describe('bytes-utils', function () {
|
|
||||||
describe('parseBytes', function () {
|
|
||||||
test('can decode hex', function () {
|
|
||||||
expect(parseBytes('0012')).toEqual([0x00, 0x12])
|
|
||||||
expect(parseBytes('0012')).toEqual([0x00, 0x12])
|
|
||||||
expect(parseBytes('00AA')).toEqual([0x00, 0xaa])
|
|
||||||
})
|
|
||||||
test('can decode hex to a Uint8Array', function () {
|
|
||||||
const result = parseBytes('0012', Uint8Array)
|
|
||||||
expect(result instanceof Uint8Array).toBe(true)
|
|
||||||
expect(result).toEqual(Uint8Array.from([0x00, 0x12]))
|
|
||||||
})
|
|
||||||
test('can convert a list to a Uint8Array', function () {
|
|
||||||
const result = parseBytes([0x00, 0x12], Uint8Array)
|
|
||||||
expect(result instanceof Uint8Array).toBe(true)
|
|
||||||
expect(result).toEqual(Uint8Array.from([0x00, 0x12]))
|
|
||||||
})
|
|
||||||
test('can decode hex to a Buffer', function () {
|
|
||||||
const result = parseBytes('0012', Buffer)
|
|
||||||
expect(result instanceof Buffer).toBe(true)
|
|
||||||
expect(result.toJSON().data).toEqual([0x00, 0x12])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('bytesToHex', function () {
|
|
||||||
test('can encode an array as hex', function () {
|
|
||||||
expect(bytesToHex([0x00, 0xaa])).toBe('00AA')
|
|
||||||
expect(bytesToHex([0xaa])).toBe('AA')
|
|
||||||
})
|
|
||||||
test('can encode Uint8Array as hex', function () {
|
|
||||||
expect(bytesToHex(new Uint8Array([0x00, 0xaa]))).toBe('00AA')
|
|
||||||
expect(bytesToHex(new Uint8Array([0xaa]))).toBe('AA')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('compareBytes', function () {
|
|
||||||
test('compares the bytes sequence as big endian number', function () {
|
|
||||||
expect(compareBytes([0, 1, 2], [1, 2, 3])).toBe(-1)
|
|
||||||
})
|
|
||||||
test('throws when the bytes sequences are of unlike length', function () {
|
|
||||||
expect(() => compareBytes([0, 1], [1])).toThrow()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('slice', function () {
|
|
||||||
const val = [1, 2, 3, 4, 5]
|
|
||||||
test('creates a slice of the same type as first arg', function () {
|
|
||||||
expect(Array.isArray(slice(val))).toBe(true)
|
|
||||||
})
|
|
||||||
test('the 2nd arg is the start position [2:]', function () {
|
|
||||||
expect(val.slice(2)).toEqual([3, 4, 5])
|
|
||||||
expect(slice(val, 2)).toEqual([3, 4, 5])
|
|
||||||
})
|
|
||||||
test('the 3rd arg is the end position [2:4]', function () {
|
|
||||||
expect(slice(val, 2, 4)).toEqual([3, 4])
|
|
||||||
})
|
|
||||||
test('can slice using negative numbers [-3:]', function () {
|
|
||||||
expect(slice(val, -3)).toEqual([3, 4, 5])
|
|
||||||
})
|
|
||||||
test('can slice using negative numbers [-3:-1]', function () {
|
|
||||||
expect(slice(val, -3, -1)).toEqual([3, 4])
|
|
||||||
})
|
|
||||||
test('the 4th arg is the output class type', function () {
|
|
||||||
expect(slice(val, 2, 4, Buffer).toJSON().data).toEqual([3, 4])
|
|
||||||
expect(slice(val, 2, 4, Uint8Array)).toEqual(Uint8Array.from([3, 4]))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
const { quality, binary } = require('../dist/coretypes')
|
const { quality } = require('../dist/coretypes')
|
||||||
|
|
||||||
describe('Quality encode/decode', function () {
|
describe('Quality encode/decode', function () {
|
||||||
const bookDirectory =
|
const bookDirectory =
|
||||||
@@ -10,6 +10,6 @@ describe('Quality encode/decode', function () {
|
|||||||
})
|
})
|
||||||
test('can encode', function () {
|
test('can encode', function () {
|
||||||
const bytes = quality.encode(expectedQuality)
|
const bytes = quality.encode(expectedQuality)
|
||||||
expect(binary.bytesToHex(bytes)).toBe(bookDirectory.slice(-16))
|
expect(bytes.toString('hex').toUpperCase()).toBe(bookDirectory.slice(-16))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ function makeItem (indexArg) {
|
|||||||
index.toBytesSink(sink)
|
index.toBytesSink(sink)
|
||||||
},
|
},
|
||||||
hashPrefix () {
|
hashPrefix () {
|
||||||
return [1, 3, 3, 7]
|
return Buffer.from([1, 3, 3, 7])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return [index, item]
|
return [index, item]
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ describe('Signing data', function () {
|
|||||||
})
|
})
|
||||||
test('can create multi signing blobs', function () {
|
test('can create multi signing blobs', function () {
|
||||||
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
|
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
|
||||||
const signingJson = _.assign({}, tx_json, { SigningPubKey: '' })
|
const signingJson = Object.assign({}, tx_json, { SigningPubKey: '' })
|
||||||
const actual = encodeForMultisigning(signingJson, signingAccount)
|
const actual = encodeForMultisigning(signingJson, signingAccount)
|
||||||
expect(actual).toBe(
|
expect(actual).toBe(
|
||||||
['534D5400', // signingPrefix
|
['534D5400', // signingPrefix
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ const { coreTypes } = require('../dist/types')
|
|||||||
const { SerializedType } = require('../dist/types/serialized-type')
|
const { SerializedType } = require('../dist/types/serialized-type')
|
||||||
|
|
||||||
describe('SerializedType interfaces', () => {
|
describe('SerializedType interfaces', () => {
|
||||||
_.forOwn(coreTypes, (Value, name) => {
|
Object.entries(coreTypes).forEach(([name, Value]) => {
|
||||||
test(`${name} has a \`from\` static constructor`, () => {
|
test(`${name} has a \`from\` static constructor`, () => {
|
||||||
expect(Value.from && Value.from !== Array.from).toBe(true)
|
expect(Value.from && Value.from !== Array.from).toBe(true)
|
||||||
})
|
})
|
||||||
@@ -27,7 +27,7 @@ describe('SerializedType interfaces', () => {
|
|||||||
expect(Value.from(newJSON).toJSON()).toEqual(newJSON)
|
expect(Value.from(newJSON).toJSON()).toEqual(newJSON)
|
||||||
})
|
})
|
||||||
describe(`${name} supports all methods of the SerializedType mixin`, () => {
|
describe(`${name} supports all methods of the SerializedType mixin`, () => {
|
||||||
_.keys(SerializedType.prototype).forEach(k => {
|
Object.keys(SerializedType.prototype).forEach(k => {
|
||||||
test(`new ${name}.prototype.${k} !== undefined`, () => {
|
test(`new ${name}.prototype.${k} !== undefined`, () => {
|
||||||
expect(Value.prototype[k]).not.toBe(undefined)
|
expect(Value.prototype[k]).not.toBe(undefined)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const { parseBytes } = require('../dist/utils/bytes-utils')
|
|
||||||
|
|
||||||
function hexOnly (hex) {
|
function hexOnly (hex) {
|
||||||
return hex.replace(/[^a-fA-F0-9]/g, '')
|
return hex.replace(/[^a-fA-F0-9]/g, '')
|
||||||
@@ -7,8 +6,8 @@ function hexOnly (hex) {
|
|||||||
|
|
||||||
function unused () {}
|
function unused () {}
|
||||||
|
|
||||||
function parseHexOnly (hex, to) {
|
function parseHexOnly (hex) {
|
||||||
return parseBytes(hexOnly(hex), to)
|
return Buffer.from(hexOnly(hex), 'hex');
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadFixture (relativePath) {
|
function loadFixture (relativePath) {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user