run prettier on all packages

This commit is contained in:
Greg Weisbrod
2021-11-12 02:41:28 -05:00
parent 3b523d7e37
commit afd6aadaf1
51 changed files with 2086 additions and 2062 deletions

View File

@@ -63,7 +63,6 @@ module.exports = {
'tsdoc/syntax': 'off',
'import/no-commonjs': 'off',
'import/order': 'off',
'prettier/prettier': 'off',
'no-restricted-syntax': 'off',
'guard-for-in': 'off',
'object-shorthand': 'off',

View File

@@ -8,21 +8,29 @@ import {
decodeNodePublic,
encodeAccountPublic,
decodeAccountPublic,
isValidClassicAddress
isValidClassicAddress,
} from './xrp-codec'
import * as assert from 'assert'
const PREFIX_BYTES = {
MAIN: Buffer.from([0x05, 0x44]), // 5, 68
TEST: Buffer.from([0x04, 0x93]) // 4, 147
TEST: Buffer.from([0x04, 0x93]), // 4, 147
}
function classicAddressToXAddress(classicAddress: string, tag: number | false, test: boolean): string {
function classicAddressToXAddress(
classicAddress: string,
tag: number | false,
test: boolean,
): string {
const accountId = decodeAccountID(classicAddress)
return encodeXAddress(accountId, tag, test)
}
function encodeXAddress(accountId: Buffer, tag: number | false, test: boolean): string {
function encodeXAddress(
accountId: Buffer,
tag: number | false,
test: boolean,
): string {
if (accountId.length !== 20) {
// RIPEMD160 is 160 bits = 20 bytes
throw new Error('Account ID must be 20 bytes')
@@ -35,41 +43,44 @@ function encodeXAddress(accountId: Buffer, tag: number | false, test: boolean):
if (tag === false) {
tag = 0
}
const bytes = Buffer.concat(
[
test ? PREFIX_BYTES.TEST : PREFIX_BYTES.MAIN,
accountId,
Buffer.from(
[
flag, // 0x00 if no tag, 0x01 if 32-bit tag
tag & 0xff, // first byte
(tag >> 8) & 0xff, // second byte
(tag >> 16) & 0xff, // third byte
(tag >> 24) & 0xff, // fourth byte
0, 0, 0, 0 // four zero bytes (reserved for 64-bit tags)
]
)
]
)
const bytes = Buffer.concat([
test ? PREFIX_BYTES.TEST : PREFIX_BYTES.MAIN,
accountId,
Buffer.from([
flag, // 0x00 if no tag, 0x01 if 32-bit tag
tag & 0xff, // first byte
(tag >> 8) & 0xff, // second byte
(tag >> 16) & 0xff, // third byte
(tag >> 24) & 0xff, // fourth byte
0,
0,
0,
0, // four zero bytes (reserved for 64-bit tags)
]),
])
const xAddress = codec.encodeChecked(bytes)
return xAddress
}
function xAddressToClassicAddress(xAddress: string): {classicAddress: string, tag: number | false, test: boolean} {
const {
accountId,
tag,
test
} = decodeXAddress(xAddress)
function xAddressToClassicAddress(xAddress: string): {
classicAddress: string
tag: number | false
test: boolean
} {
const { accountId, tag, test } = decodeXAddress(xAddress)
const classicAddress = encodeAccountID(accountId)
return {
classicAddress,
tag,
test
test,
}
}
function decodeXAddress(xAddress: string): {accountId: Buffer, tag: number | false, test: boolean} {
function decodeXAddress(xAddress: string): {
accountId: Buffer
tag: number | false
test: boolean
} {
const decoded = codec.decodeChecked(xAddress)
const test = isBufferForTestAddress(decoded)
const accountId = decoded.slice(2, 22)
@@ -77,7 +88,7 @@ function decodeXAddress(xAddress: string): {accountId: Buffer, tag: number | fal
return {
accountId,
tag,
test
test,
}
}
@@ -103,8 +114,10 @@ function tagFromBuffer(buf: Buffer): number | false {
return buf[23] + buf[24] * 0x100 + buf[25] * 0x10000 + buf[26] * 0x1000000
}
assert.strictEqual(flag, 0, 'flag must be zero to indicate no tag')
assert.ok(Buffer.from('0000000000000000', 'hex').equals(buf.slice(23, 23 + 8)),
'remaining bytes must be zero')
assert.ok(
Buffer.from('0000000000000000', 'hex').equals(buf.slice(23, 23 + 8)),
'remaining bytes must be zero',
)
return false
}
@@ -132,5 +145,5 @@ export {
encodeXAddress, // Encode account ID, tag, and network ID to X-address
xAddressToClassicAddress, // Decode X-address to account ID, tag, and network ID
decodeXAddress, // Convert X-address to classic address, tag, and network ID
isValidXAddress // Check whether an X-address (X...) is valid
isValidXAddress, // Check whether an X-address (X...) is valid
}

View File

@@ -29,15 +29,15 @@ function isSequence(val: Sequence | number): val is Sequence {
}
/**
* Concatenate all `arguments` into a single array. Each argument can be either
* a single element or a sequence, which has a `length` property and supports
* element retrieval via sequence[ix].
*
* > concatArgs(1, [2, 3], Buffer.from([4,5]), new Uint8Array([6, 7]));
* [1,2,3,4,5,6,7]
*
* @returns {number[]} Array of concatenated arguments
*/
* Concatenate all `arguments` into a single array. Each argument can be either
* a single element or a sequence, which has a `length` property and supports
* element retrieval via sequence[ix].
*
* > concatArgs(1, [2, 3], Buffer.from([4,5]), new Uint8Array([6, 7]));
* [1,2,3,4,5,6,7]
*
* @returns {number[]} Array of concatenated arguments
*/
export function concatArgs(...args: (number | Sequence)[]): number[] {
const ret: number[] = []

View File

@@ -3,7 +3,7 @@
*/
import * as baseCodec from 'base-x'
import {seqEqual, concatArgs} from './utils'
import { seqEqual, concatArgs } from './utils'
class Codec {
sha256: (bytes: Uint8Array) => Buffer
@@ -12,7 +12,7 @@ class Codec {
base: number
constructor(options: {
sha256: (bytes: Uint8Array) => Buffer,
sha256: (bytes: Uint8Array) => Buffer
alphabet: string
}) {
this.sha256 = options.sha256
@@ -27,18 +27,27 @@ class Codec {
* @param bytes Buffer of data to encode.
* @param opts Options object including the version bytes and the expected length of the data to encode.
*/
encode(bytes: Buffer, opts: {
versions: number[],
expectedLength: number
}): string {
encode(
bytes: Buffer,
opts: {
versions: number[]
expectedLength: number
},
): string {
const versions = opts.versions
return this.encodeVersioned(bytes, versions, opts.expectedLength)
}
encodeVersioned(bytes: Buffer, versions: number[], expectedLength: number): string {
encodeVersioned(
bytes: Buffer,
versions: number[],
expectedLength: number,
): string {
if (expectedLength && bytes.length !== expectedLength) {
throw new Error('unexpected_payload_length: bytes.length does not match expectedLength.' +
' Ensure that the bytes are a Buffer.')
throw new Error(
'unexpected_payload_length: bytes.length does not match expectedLength.' +
' Ensure that the bytes are a Buffer.',
)
}
return this.encodeChecked(Buffer.from(concatArgs(versions, bytes)))
}
@@ -58,13 +67,16 @@ class Codec {
* @param base58string Base58Check-encoded string to decode.
* @param opts Options object including the version byte(s) and the expected length of the data after decoding.
*/
decode(base58string: string, opts: {
versions: (number | number[])[],
expectedLength?: number,
versionTypes?: ['ed25519', 'secp256k1']
}): {
version: number[],
bytes: Buffer,
decode(
base58string: string,
opts: {
versions: (number | number[])[]
expectedLength?: number
versionTypes?: ['ed25519', 'secp256k1']
},
): {
version: number[]
bytes: Buffer
type: string | null
} {
const versions = opts.versions
@@ -73,25 +85,33 @@ class Codec {
const withoutSum = this.decodeChecked(base58string)
if (versions.length > 1 && !opts.expectedLength) {
throw new Error('expectedLength is required because there are >= 2 possible versions')
throw new Error(
'expectedLength is required because there are >= 2 possible versions',
)
}
const versionLengthGuess = typeof versions[0] === 'number' ? 1 : (versions[0] as number[]).length
const payloadLength = opts.expectedLength || withoutSum.length - versionLengthGuess
const versionLengthGuess =
typeof versions[0] === 'number' ? 1 : (versions[0] as number[]).length
const payloadLength =
opts.expectedLength || withoutSum.length - versionLengthGuess
const versionBytes = withoutSum.slice(0, -payloadLength)
const payload = withoutSum.slice(-payloadLength)
for (let i = 0; i < versions.length; i++) {
const version: number[] = Array.isArray(versions[i]) ? versions[i] as number[] : [versions[i] as number]
const version: number[] = Array.isArray(versions[i])
? (versions[i] as number[])
: [versions[i] as number]
if (seqEqual(versionBytes, version)) {
return {
version,
bytes: payload,
type: types ? types[i] : null
type: types ? types[i] : null,
}
}
}
throw new Error('version_invalid: version bytes do not match any of the provided version(s)')
throw new Error(
'version_invalid: version bytes do not match any of the provided version(s)',
)
}
decodeChecked(base58string: string): Buffer {
@@ -127,15 +147,15 @@ const createHash = require('create-hash')
const ACCOUNT_ID = 0 // Account address (20 bytes)
const ACCOUNT_PUBLIC_KEY = 0x23 // Account public key (33 bytes)
const FAMILY_SEED = 0x21 // 33; Seed value (for secret keys) (16 bytes)
const NODE_PUBLIC = 0x1C // 28; Validation public key (33 bytes)
const NODE_PUBLIC = 0x1c // 28; Validation public key (33 bytes)
const ED25519_SEED = [0x01, 0xE1, 0x4B] // [1, 225, 75]
const ED25519_SEED = [0x01, 0xe1, 0x4b] // [1, 225, 75]
const codecOptions = {
sha256: function(bytes: Uint8Array) {
sha256: function (bytes: Uint8Array) {
return createHash('sha256').update(Buffer.from(bytes)).digest()
},
alphabet: 'rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz'
alphabet: 'rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz',
}
const codecWithXrpAlphabet = new Codec(codecOptions)
@@ -144,7 +164,10 @@ export const codec = codecWithXrpAlphabet
// entropy is a Buffer of size 16
// type is 'ed25519' or 'secp256k1'
export function encodeSeed(entropy: Buffer, type: 'ed25519' | 'secp256k1'): string {
export function encodeSeed(
entropy: Buffer,
type: 'ed25519' | 'secp256k1',
): string {
if (entropy.length !== 16) {
throw new Error('entropy must have length 16')
}
@@ -152,56 +175,59 @@ export function encodeSeed(entropy: Buffer, type: 'ed25519' | 'secp256k1'): stri
expectedLength: 16,
// for secp256k1, use `FAMILY_SEED`
versions: type === 'ed25519' ? ED25519_SEED : [FAMILY_SEED]
versions: type === 'ed25519' ? ED25519_SEED : [FAMILY_SEED],
}
// prefixes entropy with version bytes
return codecWithXrpAlphabet.encode(entropy, opts)
}
export function decodeSeed(seed: string, opts: {
versionTypes: ['ed25519', 'secp256k1'],
versions: (number | number[])[]
expectedLength: number
} = {
versionTypes: ['ed25519', 'secp256k1'],
versions: [ED25519_SEED, FAMILY_SEED],
expectedLength: 16
}) {
export function decodeSeed(
seed: string,
opts: {
versionTypes: ['ed25519', 'secp256k1']
versions: (number | number[])[]
expectedLength: number
} = {
versionTypes: ['ed25519', 'secp256k1'],
versions: [ED25519_SEED, FAMILY_SEED],
expectedLength: 16,
},
) {
return codecWithXrpAlphabet.decode(seed, opts)
}
export function encodeAccountID(bytes: Buffer): string {
const opts = {versions: [ACCOUNT_ID], expectedLength: 20}
const opts = { versions: [ACCOUNT_ID], expectedLength: 20 }
return codecWithXrpAlphabet.encode(bytes, opts)
}
export const encodeAddress = encodeAccountID
export function decodeAccountID(accountId: string): Buffer {
const opts = {versions: [ACCOUNT_ID], expectedLength: 20}
const opts = { versions: [ACCOUNT_ID], expectedLength: 20 }
return codecWithXrpAlphabet.decode(accountId, opts).bytes
}
export const decodeAddress = decodeAccountID
export function decodeNodePublic(base58string: string): Buffer {
const opts = {versions: [NODE_PUBLIC], expectedLength: 33}
const opts = { versions: [NODE_PUBLIC], expectedLength: 33 }
return codecWithXrpAlphabet.decode(base58string, opts).bytes
}
export function encodeNodePublic(bytes: Buffer): string {
const opts = {versions: [NODE_PUBLIC], expectedLength: 33}
const opts = { versions: [NODE_PUBLIC], expectedLength: 33 }
return codecWithXrpAlphabet.encode(bytes, opts)
}
export function encodeAccountPublic(bytes: Buffer): string {
const opts = {versions: [ACCOUNT_PUBLIC_KEY], expectedLength: 33}
const opts = { versions: [ACCOUNT_PUBLIC_KEY], expectedLength: 33 }
return codecWithXrpAlphabet.encode(bytes, opts)
}
export function decodeAccountPublic(base58string: string): Buffer {
const opts = {versions: [ACCOUNT_PUBLIC_KEY], expectedLength: 33}
const opts = { versions: [ACCOUNT_PUBLIC_KEY], expectedLength: 33 }
return codecWithXrpAlphabet.decode(base58string, opts).bytes
}

View File

@@ -74,7 +74,6 @@ module.exports = {
'import/newline-after-import': 'off',
'node/global-require': 'off',
'consistent-default-export-name/default-import-match-filename': 'off',
'prettier/prettier': 'off',
'jsdoc/require-throws': 'off',
'jsdoc/require-description-complete-sentence': 'off',
'jsdoc/require-jsdoc': 'off',

View File

@@ -1,16 +1,16 @@
/* eslint-disable func-style */
import { coreTypes } from "./types";
import { BinaryParser } from "./serdes/binary-parser";
import { AccountID } from "./types/account-id";
import { HashPrefix } from "./hash-prefixes";
import { BinarySerializer, BytesList } from "./serdes/binary-serializer";
import { sha512Half, transactionID } from "./hashes";
import { FieldInstance } from "./enums";
import { STObject } from "./types/st-object";
import { JsonObject } from "./types/serialized-type";
import { Buffer } from "buffer/";
import * as bigInt from "big-integer";
import { coreTypes } from './types'
import { BinaryParser } from './serdes/binary-parser'
import { AccountID } from './types/account-id'
import { HashPrefix } from './hash-prefixes'
import { BinarySerializer, BytesList } from './serdes/binary-serializer'
import { sha512Half, transactionID } from './hashes'
import { FieldInstance } from './enums'
import { STObject } from './types/st-object'
import { JsonObject } from './types/serialized-type'
import { Buffer } from 'buffer/'
import * as bigInt from 'big-integer'
/**
* Construct a BinaryParser
@@ -18,7 +18,7 @@ import * as bigInt from "big-integer";
* @param bytes hex-string to construct BinaryParser from
* @returns A BinaryParser
*/
const makeParser = (bytes: string): BinaryParser => new BinaryParser(bytes);
const makeParser = (bytes: string): BinaryParser => new BinaryParser(bytes)
/**
* Parse BinaryParser into JSON
@@ -27,7 +27,7 @@ const makeParser = (bytes: string): BinaryParser => new BinaryParser(bytes);
* @returns JSON for the bytes in the BinaryParser
*/
const readJSON = (parser: BinaryParser): JsonObject =>
(parser.readType(coreTypes.STObject) as STObject).toJSON();
(parser.readType(coreTypes.STObject) as STObject).toJSON()
/**
* Parse a hex-string into its JSON interpretation
@@ -35,7 +35,7 @@ const readJSON = (parser: BinaryParser): JsonObject =>
* @param bytes hex-string to parse into JSON
* @returns JSON
*/
const binaryToJSON = (bytes: string): JsonObject => readJSON(makeParser(bytes));
const binaryToJSON = (bytes: string): JsonObject => readJSON(makeParser(bytes))
/**
* Interface for passing parameters to SerializeObject
@@ -43,9 +43,9 @@ const binaryToJSON = (bytes: string): JsonObject => readJSON(makeParser(bytes));
* @field set signingFieldOnly to true if you want to serialize only signing fields
*/
interface OptionObject {
prefix?: Buffer;
suffix?: Buffer;
signingFieldsOnly?: boolean;
prefix?: Buffer
suffix?: Buffer
signingFieldsOnly?: boolean
}
/**
@@ -56,23 +56,23 @@ interface OptionObject {
* @returns A Buffer containing the serialized object
*/
function serializeObject(object: JsonObject, opts: OptionObject = {}): Buffer {
const { prefix, suffix, signingFieldsOnly = false } = opts;
const bytesList = new BytesList();
const { prefix, suffix, signingFieldsOnly = false } = opts
const bytesList = new BytesList()
if (prefix) {
bytesList.put(prefix);
bytesList.put(prefix)
}
const filter = signingFieldsOnly
? (f: FieldInstance): boolean => f.isSigningField
: undefined;
coreTypes.STObject.from(object, filter).toBytesSink(bytesList);
: undefined
coreTypes.STObject.from(object, filter).toBytesSink(bytesList)
if (suffix) {
bytesList.put(suffix);
bytesList.put(suffix)
}
return bytesList.toBytes();
return bytesList.toBytes()
}
/**
@@ -84,17 +84,17 @@ function serializeObject(object: JsonObject, opts: OptionObject = {}): Buffer {
*/
function signingData(
transaction: JsonObject,
prefix: Buffer = HashPrefix.transactionSig
prefix: Buffer = HashPrefix.transactionSig,
): Buffer {
return serializeObject(transaction, { prefix, signingFieldsOnly: true });
return serializeObject(transaction, { prefix, signingFieldsOnly: true })
}
/**
* Interface describing fields required for a Claim
*/
interface ClaimObject extends JsonObject {
channel: string;
amount: string | number;
channel: string
amount: string | number
}
/**
@@ -104,17 +104,17 @@ interface ClaimObject extends JsonObject {
* @returns the serialized object with appropriate prefix
*/
function signingClaimData(claim: ClaimObject): Buffer {
const num = bigInt(String(claim.amount));
const prefix = HashPrefix.paymentChannelClaim;
const channel = coreTypes.Hash256.from(claim.channel).toBytes();
const amount = coreTypes.UInt64.from(num).toBytes();
const num = bigInt(String(claim.amount))
const prefix = HashPrefix.paymentChannelClaim
const channel = coreTypes.Hash256.from(claim.channel).toBytes()
const amount = coreTypes.UInt64.from(num).toBytes()
const bytesList = new BytesList();
const bytesList = new BytesList()
bytesList.put(prefix);
bytesList.put(channel);
bytesList.put(amount);
return bytesList.toBytes();
bytesList.put(prefix)
bytesList.put(channel)
bytesList.put(amount)
return bytesList.toBytes()
}
/**
@@ -126,15 +126,15 @@ function signingClaimData(claim: ClaimObject): Buffer {
*/
function multiSigningData(
transaction: JsonObject,
signingAccount: string | AccountID
signingAccount: string | AccountID,
): Buffer {
const prefix = HashPrefix.transactionMultiSig;
const suffix = coreTypes.AccountID.from(signingAccount).toBytes();
const prefix = HashPrefix.transactionMultiSig
const suffix = coreTypes.AccountID.from(signingAccount).toBytes()
return serializeObject(transaction, {
prefix,
suffix,
signingFieldsOnly: true,
});
})
}
export {
@@ -151,4 +151,4 @@ export {
binaryToJSON,
sha512Half,
transactionID,
};
}

View File

@@ -4,14 +4,14 @@ import {
LedgerEntryType,
Type,
TransactionResult,
} from "./enums";
import * as types from "./types";
import * as binary from "./binary";
import { ShaMap } from "./shamap";
import * as ledgerHashes from "./ledger-hashes";
import * as hashes from "./hashes";
import { quality } from "./quality";
import { HashPrefix } from "./hash-prefixes";
} from './enums'
import * as types from './types'
import * as binary from './binary'
import { ShaMap } from './shamap'
import * as ledgerHashes from './ledger-hashes'
import * as hashes from './hashes'
import { quality } from './quality'
import { HashPrefix } from './hash-prefixes'
export {
hashes,
@@ -26,4 +26,4 @@ export {
HashPrefix,
ShaMap,
types,
};
}

View File

@@ -1,58 +1,58 @@
import * as enums from "./definitions.json";
import { SerializedType } from "../types/serialized-type";
import { Buffer } from "buffer/";
import * as enums from './definitions.json'
import { SerializedType } from '../types/serialized-type'
import { Buffer } from 'buffer/'
const TYPE_WIDTH = 2;
const LEDGER_ENTRY_WIDTH = 2;
const TRANSACTION_TYPE_WIDTH = 2;
const TRANSACTION_RESULT_WIDTH = 1;
const TYPE_WIDTH = 2
const LEDGER_ENTRY_WIDTH = 2
const TRANSACTION_TYPE_WIDTH = 2
const TRANSACTION_RESULT_WIDTH = 1
/*
* @brief: Serialize a field based on type_code and Field.nth
*/
function fieldHeader(type: number, nth: number): Buffer {
const header: Array<number> = [];
const header: Array<number> = []
if (type < 16) {
if (nth < 16) {
header.push((type << 4) | nth);
header.push((type << 4) | nth)
} else {
header.push(type << 4, nth);
header.push(type << 4, nth)
}
} else if (nth < 16) {
header.push(nth, type);
header.push(nth, type)
} else {
header.push(0, type, nth);
header.push(0, type, nth)
}
return Buffer.from(header);
return Buffer.from(header)
}
/*
* @brief: Bytes, name, and ordinal representing one type, ledger_type, transaction type, or result
*/
class Bytes {
readonly bytes: Uint8Array;
readonly bytes: Uint8Array
constructor(
readonly name: string,
readonly ordinal: number,
readonly ordinalWidth: number
readonly ordinalWidth: number,
) {
this.bytes = Buffer.alloc(ordinalWidth);
this.bytes = Buffer.alloc(ordinalWidth)
for (let i = 0; i < ordinalWidth; i++) {
this.bytes[ordinalWidth - i - 1] = (ordinal >>> (i * 8)) & 0xff;
this.bytes[ordinalWidth - i - 1] = (ordinal >>> (i * 8)) & 0xff
}
}
toJSON(): string {
return this.name;
return this.name
}
toBytesSink(sink): void {
sink.put(this.bytes);
sink.put(this.bytes)
}
toBytes(): Uint8Array {
return this.bytes;
return this.bytes
}
}
@@ -62,17 +62,17 @@ class Bytes {
class BytesLookup {
constructor(types: Record<string, number>, readonly ordinalWidth: number) {
Object.entries(types).forEach(([k, v]) => {
this[k] = new Bytes(k, v, ordinalWidth);
this[v.toString()] = this[k];
});
this[k] = new Bytes(k, v, ordinalWidth)
this[v.toString()] = this[k]
})
}
from(value: Bytes | string): Bytes {
return value instanceof Bytes ? value : (this[value] as Bytes);
return value instanceof Bytes ? value : (this[value] as Bytes)
}
fromParser(parser): Bytes {
return this.from(parser.readUIntN(this.ordinalWidth).toString());
return this.from(parser.readUIntN(this.ordinalWidth).toString())
}
}
@@ -80,28 +80,28 @@ class BytesLookup {
* type FieldInfo is the type of the objects containing information about each field in definitions.json
*/
interface FieldInfo {
nth: number;
isVLEncoded: boolean;
isSerialized: boolean;
isSigningField: boolean;
type: string;
nth: number
isVLEncoded: boolean
isSerialized: boolean
isSigningField: boolean
type: string
}
interface FieldInstance {
readonly nth: number;
readonly isVariableLengthEncoded: boolean;
readonly isSerialized: boolean;
readonly isSigningField: boolean;
readonly type: Bytes;
readonly ordinal: number;
readonly name: string;
readonly header: Buffer;
readonly associatedType: typeof SerializedType;
readonly nth: number
readonly isVariableLengthEncoded: boolean
readonly isSerialized: boolean
readonly isSigningField: boolean
readonly type: Bytes
readonly ordinal: number
readonly name: string
readonly header: Buffer
readonly associatedType: typeof SerializedType
}
function buildField([name, info]: [string, FieldInfo]): FieldInstance {
const typeOrdinal = enums.TYPES[info.type];
const field = fieldHeader(typeOrdinal, info.nth);
const typeOrdinal = enums.TYPES[info.type]
const field = fieldHeader(typeOrdinal, info.nth)
return {
name: name,
nth: info.nth,
@@ -112,7 +112,7 @@ function buildField([name, info]: [string, FieldInfo]): FieldInstance {
type: new Bytes(info.type, typeOrdinal, TYPE_WIDTH),
header: field,
associatedType: SerializedType, // For later assignment in ./types/index.js
};
}
}
/*
@@ -121,30 +121,30 @@ function buildField([name, info]: [string, FieldInfo]): FieldInstance {
class FieldLookup {
constructor(fields: Array<[string, FieldInfo]>) {
fields.forEach(([k, v]) => {
this[k] = buildField([k, v]);
this[this[k].ordinal.toString()] = this[k];
});
this[k] = buildField([k, v])
this[this[k].ordinal.toString()] = this[k]
})
}
fromString(value: string): FieldInstance {
return this[value] as FieldInstance;
return this[value] as FieldInstance
}
}
const Type = new BytesLookup(enums.TYPES, TYPE_WIDTH);
const Type = new BytesLookup(enums.TYPES, TYPE_WIDTH)
const LedgerEntryType = new BytesLookup(
enums.LEDGER_ENTRY_TYPES,
LEDGER_ENTRY_WIDTH
);
LEDGER_ENTRY_WIDTH,
)
const TransactionType = new BytesLookup(
enums.TRANSACTION_TYPES,
TRANSACTION_TYPE_WIDTH
);
TRANSACTION_TYPE_WIDTH,
)
const TransactionResult = new BytesLookup(
enums.TRANSACTION_RESULTS,
TRANSACTION_RESULT_WIDTH
);
const Field = new FieldLookup(enums.FIELDS as Array<[string, FieldInfo]>);
TRANSACTION_RESULT_WIDTH,
)
const Field = new FieldLookup(enums.FIELDS as Array<[string, FieldInfo]>)
export {
Field,
@@ -153,4 +153,4 @@ export {
LedgerEntryType,
TransactionResult,
TransactionType,
};
}

View File

@@ -97,38 +97,38 @@ const input = {
tecKILLED: 150,
tecHAS_OBLIGATIONS: 151,
tecTOO_SOON: 152,
};
}
let startingFromTemBADSENDXRPPATHS = -284;
let startingFromTemBADSENDXRPPATHS = -284
let startingFromTefFAILURE = -199;
let startingFromTefFAILURE = -199
let startingFromTerRETRY = -99;
let startingFromTerRETRY = -99
const tesSUCCESS = 0;
const tesSUCCESS = 0
let startingFromTecCLAIM = 100;
let startingFromTecCLAIM = 100
const startingFromTecDIRFULL = 121;
const startingFromTecDIRFULL = 121
let previousKey = "tem";
let previousKey = 'tem'
Object.keys(input).forEach((key) => {
if (key.substring(0, 3) !== previousKey.substring(0, 3)) {
console.log();
previousKey = key;
console.log()
previousKey = key
}
if (key.substring(0, 3) === "tem") {
console.log(` "${key}": ${startingFromTemBADSENDXRPPATHS++},`);
} else if (key.substring(0, 3) === "tef") {
console.log(` "${key}": ${startingFromTefFAILURE++},`);
} else if (key.substring(0, 3) === "ter") {
console.log(` "${key}": ${startingFromTerRETRY++},`);
} else if (key.substring(0, 3) === "tes") {
console.log(` "${key}": ${tesSUCCESS},`);
} else if (key.substring(0, 3) === "tec") {
if (key === "tecDIR_FULL") {
startingFromTecCLAIM = startingFromTecDIRFULL;
if (key.substring(0, 3) === 'tem') {
console.log(` "${key}": ${startingFromTemBADSENDXRPPATHS++},`)
} else if (key.substring(0, 3) === 'tef') {
console.log(` "${key}": ${startingFromTefFAILURE++},`)
} else if (key.substring(0, 3) === 'ter') {
console.log(` "${key}": ${startingFromTerRETRY++},`)
} else if (key.substring(0, 3) === 'tes') {
console.log(` "${key}": ${tesSUCCESS},`)
} else if (key.substring(0, 3) === 'tec') {
if (key === 'tecDIR_FULL') {
startingFromTecCLAIM = startingFromTecDIRFULL
}
console.log(` "${key}": ${startingFromTecCLAIM++},`);
console.log(` "${key}": ${startingFromTecCLAIM++},`)
}
});
})

View File

@@ -1,4 +1,4 @@
import { Buffer } from "buffer/";
import { Buffer } from 'buffer/'
/**
* Write a 32 bit integer to a Buffer
@@ -7,9 +7,9 @@ import { Buffer } from "buffer/";
* @returns a buffer with the bytes representation of uint32
*/
function bytes(uint32: number): Buffer {
const result = Buffer.alloc(4);
result.writeUInt32BE(uint32, 0);
return result;
const result = Buffer.alloc(4)
result.writeUInt32BE(uint32, 0)
return result
}
/**
@@ -35,6 +35,6 @@ const HashPrefix: Record<string, Buffer> = {
proposal: bytes(0x50525000),
// payment channel claim
paymentChannelClaim: bytes(0x434c4d00),
};
}
export { HashPrefix };
export { HashPrefix }

View File

@@ -1,15 +1,15 @@
import { HashPrefix } from "./hash-prefixes";
import * as createHash from "create-hash";
import { Hash256 } from "./types/hash-256";
import { BytesList } from "./serdes/binary-serializer";
import { Buffer } from "buffer/";
import { HashPrefix } from './hash-prefixes'
import * as createHash from 'create-hash'
import { Hash256 } from './types/hash-256'
import { BytesList } from './serdes/binary-serializer'
import { Buffer } from 'buffer/'
/**
* Class for hashing with SHA512
* @extends BytesList So SerializedTypes can write bytes to a Sha512Half
*/
class Sha512Half extends BytesList {
private hash: createHash = createHash("sha512");
private hash: createHash = createHash('sha512')
/**
* Construct a new Sha512Hash and write bytes this.hash
@@ -18,7 +18,7 @@ class Sha512Half extends BytesList {
* @returns the new Sha512Hash object
*/
static put(bytes: Buffer): Sha512Half {
return new Sha512Half().put(bytes);
return new Sha512Half().put(bytes)
}
/**
@@ -28,8 +28,8 @@ class Sha512Half extends BytesList {
* @returns the Sha512 object
*/
put(bytes: Buffer): Sha512Half {
this.hash.update(bytes);
return this;
this.hash.update(bytes)
return this
}
/**
@@ -38,8 +38,8 @@ class Sha512Half extends BytesList {
* @returns half of a SHA512 hash
*/
finish256(): Buffer {
const bytes: Buffer = this.hash.digest();
return bytes.slice(0, 32);
const bytes: Buffer = this.hash.digest()
return bytes.slice(0, 32)
}
/**
@@ -48,7 +48,7 @@ class Sha512Half extends BytesList {
* @returns a Hash256 object
*/
finish(): Hash256 {
return new Hash256(this.finish256());
return new Hash256(this.finish256())
}
}
@@ -59,9 +59,9 @@ class Sha512Half extends BytesList {
* @returns the sha512half hash of the arguments.
*/
function sha512Half(...args: Buffer[]): Buffer {
const hash = new Sha512Half();
args.forEach((a) => hash.put(a));
return hash.finish256();
const hash = new Sha512Half()
args.forEach((a) => hash.put(a))
return hash.finish256()
}
/**
@@ -71,7 +71,7 @@ function sha512Half(...args: Buffer[]): Buffer {
* @returns a Hash256 object
*/
function transactionID(serialized: Buffer): Hash256 {
return new Hash256(sha512Half(HashPrefix.transactionID, serialized));
return new Hash256(sha512Half(HashPrefix.transactionID, serialized))
}
export { Sha512Half, sha512Half, transactionID };
export { Sha512Half, sha512Half, transactionID }

View File

@@ -1,15 +1,15 @@
import * as assert from "assert";
import { quality, binary } from "./coretypes";
import { decodeLedgerData } from "./ledger-hashes";
import { ClaimObject } from "./binary";
import { JsonObject } from "./types/serialized-type";
import * as assert from 'assert'
import { quality, binary } from './coretypes'
import { decodeLedgerData } from './ledger-hashes'
import { ClaimObject } from './binary'
import { JsonObject } from './types/serialized-type'
const {
signingData,
signingClaimData,
multiSigningData,
binaryToJSON,
serializeObject,
} = binary;
} = binary
/**
* Decode a transaction
@@ -18,8 +18,8 @@ const {
* @returns the JSON representation of the transaction
*/
function decode(binary: string): JsonObject {
assert.ok(typeof binary === "string", "binary must be a hex string");
return binaryToJSON(binary);
assert.ok(typeof binary === 'string', 'binary must be a hex string')
return binaryToJSON(binary)
}
/**
@@ -29,10 +29,10 @@ function decode(binary: string): JsonObject {
* @returns A hex-string of the encoded transaction
*/
function encode(json: object): string {
assert.ok(typeof json === "object");
assert.ok(typeof json === 'object')
return serializeObject(json as JsonObject)
.toString("hex")
.toUpperCase();
.toString('hex')
.toUpperCase()
}
/**
@@ -43,10 +43,10 @@ function encode(json: object): string {
* @returns a hex string of the encoded transaction
*/
function encodeForSigning(json: object): string {
assert.ok(typeof json === "object");
assert.ok(typeof json === 'object')
return signingData(json as JsonObject)
.toString("hex")
.toUpperCase();
.toString('hex')
.toUpperCase()
}
/**
@@ -57,10 +57,10 @@ function encodeForSigning(json: object): string {
* @returns a hex string of the encoded transaction
*/
function encodeForSigningClaim(json: object): string {
assert.ok(typeof json === "object");
assert.ok(typeof json === 'object')
return signingClaimData(json as ClaimObject)
.toString("hex")
.toUpperCase();
.toString('hex')
.toUpperCase()
}
/**
@@ -71,11 +71,11 @@ function encodeForSigningClaim(json: object): string {
* @returns a hex string of the encoded transaction
*/
function encodeForMultisigning(json: object, signer: string): string {
assert.ok(typeof json === "object");
assert.equal(json["SigningPubKey"], "");
assert.ok(typeof json === 'object')
assert.equal(json['SigningPubKey'], '')
return multiSigningData(json as JsonObject, signer)
.toString("hex")
.toUpperCase();
.toString('hex')
.toUpperCase()
}
/**
@@ -85,8 +85,8 @@ function encodeForMultisigning(json: object, signer: string): string {
* @returns a hex-string representing the quality
*/
function encodeQuality(value: string): string {
assert.ok(typeof value === "string");
return quality.encode(value).toString("hex").toUpperCase();
assert.ok(typeof value === 'string')
return quality.encode(value).toString('hex').toUpperCase()
}
/**
@@ -96,8 +96,8 @@ function encodeQuality(value: string): string {
* @returns a string representing the quality
*/
function decodeQuality(value: string): string {
assert.ok(typeof value === "string");
return quality.decode(value).toString();
assert.ok(typeof value === 'string')
return quality.decode(value).toString()
}
export = {
@@ -109,4 +109,4 @@ export = {
encodeQuality,
decodeQuality,
decodeLedgerData,
};
}

View File

@@ -1,16 +1,16 @@
import * as assert from "assert";
import { ShaMap, ShaMapNode, ShaMapLeaf } from "./shamap";
import { HashPrefix } from "./hash-prefixes";
import { Sha512Half } from "./hashes";
import { BinarySerializer, serializeObject } from "./binary";
import { Hash256 } from "./types/hash-256";
import { STObject } from "./types/st-object";
import { UInt64 } from "./types/uint-64";
import { UInt32 } from "./types/uint-32";
import { UInt8 } from "./types/uint-8";
import { BinaryParser } from "./serdes/binary-parser";
import { JsonObject } from "./types/serialized-type";
import * as bigInt from "big-integer";
import * as assert from 'assert'
import { ShaMap, ShaMapNode, ShaMapLeaf } from './shamap'
import { HashPrefix } from './hash-prefixes'
import { Sha512Half } from './hashes'
import { BinarySerializer, serializeObject } from './binary'
import { Hash256 } from './types/hash-256'
import { STObject } from './types/st-object'
import { UInt64 } from './types/uint-64'
import { UInt32 } from './types/uint-32'
import { UInt8 } from './types/uint-8'
import { BinaryParser } from './serdes/binary-parser'
import { JsonObject } from './types/serialized-type'
import * as bigInt from 'big-integer'
/**
* Computes the hash of a list of objects
@@ -21,19 +21,19 @@ import * as bigInt from "big-integer";
*/
function computeHash(
itemizer: (item: JsonObject) => [Hash256?, ShaMapNode?, ShaMapLeaf?],
itemsJson: Array<JsonObject>
itemsJson: Array<JsonObject>,
): Hash256 {
const map = new ShaMap();
itemsJson.forEach((item) => map.addItem(...itemizer(item)));
return map.hash();
const map = new ShaMap()
itemsJson.forEach((item) => map.addItem(...itemizer(item)))
return map.hash()
}
/**
* Interface describing a transaction item
*/
interface transactionItemObject extends JsonObject {
hash: string;
metaData: JsonObject;
hash: string
metaData: JsonObject
}
/**
@@ -43,28 +43,28 @@ interface transactionItemObject extends JsonObject {
* @returns a tuple of index and item to be added to SHAMap
*/
function transactionItemizer(
json: transactionItemObject
json: transactionItemObject,
): [Hash256, ShaMapNode, undefined] {
assert.ok(json.hash);
const index = Hash256.from(json.hash);
assert.ok(json.hash)
const index = Hash256.from(json.hash)
const item = {
hashPrefix() {
return HashPrefix.transaction;
return HashPrefix.transaction
},
toBytesSink(sink) {
const serializer = new BinarySerializer(sink);
serializer.writeLengthEncoded(STObject.from(json));
serializer.writeLengthEncoded(STObject.from(json.metaData));
const serializer = new BinarySerializer(sink)
serializer.writeLengthEncoded(STObject.from(json))
serializer.writeLengthEncoded(STObject.from(json.metaData))
},
} as ShaMapNode;
return [index, item, undefined];
} as ShaMapNode
return [index, item, undefined]
}
/**
* Interface describing an entry item
*/
interface entryItemObject extends JsonObject {
index: string;
index: string
}
/**
@@ -74,19 +74,19 @@ interface entryItemObject extends JsonObject {
* @returns a tuple of index and item to be added to SHAMap
*/
function entryItemizer(
json: entryItemObject
json: entryItemObject,
): [Hash256, ShaMapNode, undefined] {
const index = Hash256.from(json.index);
const bytes = serializeObject(json);
const index = Hash256.from(json.index)
const bytes = serializeObject(json)
const item = {
hashPrefix() {
return HashPrefix.accountStateEntry;
return HashPrefix.accountStateEntry
},
toBytesSink(sink) {
sink.put(bytes);
sink.put(bytes)
},
} as ShaMapNode;
return [index, item, undefined];
} as ShaMapNode
return [index, item, undefined]
}
/**
@@ -97,9 +97,9 @@ function entryItemizer(
*/
function transactionTreeHash(param: Array<JsonObject>): Hash256 {
const itemizer = transactionItemizer as (
json: JsonObject
) => [Hash256, ShaMapNode, undefined];
return computeHash(itemizer, param);
json: JsonObject,
) => [Hash256, ShaMapNode, undefined]
return computeHash(itemizer, param)
}
/**
@@ -110,24 +110,24 @@ function transactionTreeHash(param: Array<JsonObject>): Hash256 {
*/
function accountStateHash(param: Array<JsonObject>): Hash256 {
const itemizer = entryItemizer as (
json: JsonObject
) => [Hash256, ShaMapNode, undefined];
return computeHash(itemizer, param);
json: JsonObject,
) => [Hash256, ShaMapNode, undefined]
return computeHash(itemizer, param)
}
/**
* Interface describing a ledger header
*/
interface ledgerObject {
ledger_index: number;
total_coins: string | number | bigInt.BigInteger;
parent_hash: string;
transaction_hash: string;
account_hash: string;
parent_close_time: number;
close_time: number;
close_time_resolution: number;
close_flags: number;
ledger_index: number
total_coins: string | number | bigInt.BigInteger
parent_hash: string
transaction_hash: string
account_hash: string
parent_close_time: number
close_time: number
close_time_resolution: number
close_flags: number
}
/**
@@ -137,23 +137,23 @@ interface ledgerObject {
* @returns the hash of header
*/
function ledgerHash(header: ledgerObject): Hash256 {
const hash = new Sha512Half();
hash.put(HashPrefix.ledgerHeader);
assert.ok(header.parent_close_time !== undefined);
assert.ok(header.close_flags !== undefined);
const hash = new Sha512Half()
hash.put(HashPrefix.ledgerHeader)
assert.ok(header.parent_close_time !== undefined)
assert.ok(header.close_flags !== undefined)
UInt32.from<number>(header.ledger_index).toBytesSink(hash);
UInt32.from<number>(header.ledger_index).toBytesSink(hash)
UInt64.from<bigInt.BigInteger>(
bigInt(String(header.total_coins))
).toBytesSink(hash);
Hash256.from<string>(header.parent_hash).toBytesSink(hash);
Hash256.from<string>(header.transaction_hash).toBytesSink(hash);
Hash256.from<string>(header.account_hash).toBytesSink(hash);
UInt32.from<number>(header.parent_close_time).toBytesSink(hash);
UInt32.from<number>(header.close_time).toBytesSink(hash);
UInt8.from<number>(header.close_time_resolution).toBytesSink(hash);
UInt8.from<number>(header.close_flags).toBytesSink(hash);
return hash.finish();
bigInt(String(header.total_coins)),
).toBytesSink(hash)
Hash256.from<string>(header.parent_hash).toBytesSink(hash)
Hash256.from<string>(header.transaction_hash).toBytesSink(hash)
Hash256.from<string>(header.account_hash).toBytesSink(hash)
UInt32.from<number>(header.parent_close_time).toBytesSink(hash)
UInt32.from<number>(header.close_time).toBytesSink(hash)
UInt8.from<number>(header.close_time_resolution).toBytesSink(hash)
UInt8.from<number>(header.close_flags).toBytesSink(hash)
return hash.finish()
}
/**
@@ -163,8 +163,8 @@ function ledgerHash(header: ledgerObject): Hash256 {
* @returns A JSON object describing a ledger header
*/
function decodeLedgerData(binary: string): object {
assert.ok(typeof binary === "string", "binary must be a hex string");
const parser = new BinaryParser(binary);
assert.ok(typeof binary === 'string', 'binary must be a hex string')
const parser = new BinaryParser(binary)
return {
ledger_index: parser.readUInt32(),
total_coins: parser.readType(UInt64).valueOf().toString(),
@@ -175,7 +175,7 @@ function decodeLedgerData(binary: string): object {
close_time: parser.readUInt32(),
close_time_resolution: parser.readUInt8(),
close_flags: parser.readUInt8(),
};
}
}
export { accountStateHash, transactionTreeHash, ledgerHash, decodeLedgerData };
export { accountStateHash, transactionTreeHash, ledgerHash, decodeLedgerData }

View File

@@ -1,7 +1,7 @@
import { coreTypes } from "./types";
import { Decimal } from "decimal.js";
import * as bigInt from "big-integer";
import { Buffer } from "buffer/";
import { coreTypes } from './types'
import { Decimal } from 'decimal.js'
import * as bigInt from 'big-integer'
import { Buffer } from 'buffer/'
/**
* class for encoding and decoding quality
@@ -14,12 +14,12 @@ class quality {
* @returns Serialized quality
*/
static encode(quality: string): Buffer {
const decimal = new Decimal(quality);
const exponent = decimal.e - 15;
const qualityString = decimal.times(`1e${-exponent}`).abs().toString();
const bytes = coreTypes.UInt64.from(bigInt(qualityString)).toBytes();
bytes[0] = exponent + 100;
return bytes;
const decimal = new Decimal(quality)
const exponent = decimal.e - 15
const qualityString = decimal.times(`1e${-exponent}`).abs().toString()
const bytes = coreTypes.UInt64.from(bigInt(qualityString)).toBytes()
bytes[0] = exponent + 100
return bytes
}
/**
@@ -29,11 +29,11 @@ class quality {
* @returns deserialized quality
*/
static decode(quality: string): Decimal {
const bytes = Buffer.from(quality, "hex").slice(-8);
const exponent = bytes[0] - 100;
const mantissa = new Decimal(`0x${bytes.slice(1).toString("hex")}`);
return mantissa.times(`1e${exponent}`);
const bytes = Buffer.from(quality, 'hex').slice(-8)
const exponent = bytes[0] - 100
const mantissa = new Decimal(`0x${bytes.slice(1).toString('hex')}`)
return mantissa.times(`1e${exponent}`)
}
}
export { quality };
export { quality }

View File

@@ -1,13 +1,13 @@
import * as assert from "assert";
import { Field, FieldInstance } from "../enums";
import { SerializedType } from "../types/serialized-type";
import { Buffer } from "buffer/";
import * as assert from 'assert'
import { Field, FieldInstance } from '../enums'
import { SerializedType } from '../types/serialized-type'
import { Buffer } from 'buffer/'
/**
* BinaryParser is used to compute fields and values from a HexString
*/
class BinaryParser {
private bytes: Buffer;
private bytes: Buffer
/**
* Initialize bytes to a hex string
@@ -15,7 +15,7 @@ class BinaryParser {
* @param hexBytes a hex string
*/
constructor(hexBytes: string) {
this.bytes = Buffer.from(hexBytes, "hex");
this.bytes = Buffer.from(hexBytes, 'hex')
}
/**
@@ -24,8 +24,8 @@ class BinaryParser {
* @returns The first byte of the BinaryParser
*/
peek(): number {
assert.ok(this.bytes.byteLength !== 0);
return this.bytes[0];
assert.ok(this.bytes.byteLength !== 0)
return this.bytes[0]
}
/**
@@ -34,8 +34,8 @@ class BinaryParser {
* @param n the number of bytes to skip
*/
skip(n: number): void {
assert.ok(n <= this.bytes.byteLength);
this.bytes = this.bytes.slice(n);
assert.ok(n <= this.bytes.byteLength)
this.bytes = this.bytes.slice(n)
}
/**
@@ -45,11 +45,11 @@ class BinaryParser {
* @return The bytes
*/
read(n: number): Buffer {
assert.ok(n <= this.bytes.byteLength);
assert.ok(n <= this.bytes.byteLength)
const slice = this.bytes.slice(0, n);
this.skip(n);
return slice;
const slice = this.bytes.slice(0, n)
this.skip(n)
return slice
}
/**
@@ -59,29 +59,29 @@ class BinaryParser {
* @return The number represented by those bytes
*/
readUIntN(n: number): number {
assert.ok(0 < n && n <= 4, "invalid n");
return this.read(n).reduce((a, b) => (a << 8) | b) >>> 0;
assert.ok(0 < n && n <= 4, 'invalid n')
return this.read(n).reduce((a, b) => (a << 8) | b) >>> 0
}
readUInt8(): number {
return this.readUIntN(1);
return this.readUIntN(1)
}
readUInt16(): number {
return this.readUIntN(2);
return this.readUIntN(2)
}
readUInt32(): number {
return this.readUIntN(4);
return this.readUIntN(4)
}
size(): number {
return this.bytes.byteLength;
return this.bytes.byteLength
}
end(customEnd?: number): boolean {
const length = this.bytes.byteLength;
return length === 0 || (customEnd !== undefined && length <= customEnd);
const length = this.bytes.byteLength
return length === 0 || (customEnd !== undefined && length <= customEnd)
}
/**
@@ -90,7 +90,7 @@ class BinaryParser {
* @return The variable length bytes
*/
readVariableLength(): Buffer {
return this.read(this.readVariableLengthLength());
return this.read(this.readVariableLengthLength())
}
/**
@@ -99,18 +99,18 @@ class BinaryParser {
* @return The length of the variable length encoded bytes
*/
readVariableLengthLength(): number {
const b1 = this.readUInt8();
const b1 = this.readUInt8()
if (b1 <= 192) {
return b1;
return b1
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
const b2 = this.readUInt8()
return 193 + (b1 - 193) * 256 + b2
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
const b2 = this.readUInt8()
const b3 = this.readUInt8()
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3
}
throw new Error("Invalid variable length indicator");
throw new Error('Invalid variable length indicator')
}
/**
@@ -119,25 +119,25 @@ class BinaryParser {
* @return Field ordinal
*/
readFieldOrdinal(): number {
let type = this.readUInt8();
let nth = type & 15;
type >>= 4;
let type = this.readUInt8()
let nth = type & 15
type >>= 4
if (type === 0) {
type = this.readUInt8();
type = this.readUInt8()
if (type === 0 || type < 16) {
throw new Error("Cannot read FieldOrdinal, type_code out of range");
throw new Error('Cannot read FieldOrdinal, type_code out of range')
}
}
if (nth === 0) {
nth = this.readUInt8();
nth = this.readUInt8()
if (nth === 0 || nth < 16) {
throw new Error("Cannot read FieldOrdinal, field_code out of range");
throw new Error('Cannot read FieldOrdinal, field_code out of range')
}
}
return (type << 16) | nth;
return (type << 16) | nth
}
/**
@@ -146,7 +146,7 @@ class BinaryParser {
* @return The field represented by the bytes at the head of the BinaryParser
*/
readField(): FieldInstance {
return Field.fromString(this.readFieldOrdinal().toString());
return Field.fromString(this.readFieldOrdinal().toString())
}
/**
@@ -156,7 +156,7 @@ class BinaryParser {
* @return The instance of that type read from the BinaryParser
*/
readType(type: typeof SerializedType): SerializedType {
return type.fromParser(this);
return type.fromParser(this)
}
/**
@@ -166,7 +166,7 @@ class BinaryParser {
* @return The type associated with the given field
*/
typeForField(field: FieldInstance): typeof SerializedType {
return field.associatedType;
return field.associatedType
}
/**
@@ -176,20 +176,20 @@ class BinaryParser {
* @return The value associated with the given field
*/
readFieldValue(field: FieldInstance): SerializedType {
const type = this.typeForField(field);
const type = this.typeForField(field)
if (!type) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
throw new Error(`unsupported: (${field.name}, ${field.type.name})`)
}
const sizeHint = field.isVariableLengthEncoded
? this.readVariableLengthLength()
: undefined;
const value = type.fromParser(this, sizeHint);
: undefined
const value = type.fromParser(this, sizeHint)
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `
);
`fromParser for (${field.name}, ${field.type.name}) -> undefined `,
)
}
return value;
return value
}
/**
@@ -198,9 +198,9 @@ class BinaryParser {
* @return The field and value
*/
readFieldAndValue(): [FieldInstance, SerializedType] {
const field = this.readField();
return [field, this.readFieldValue(field)];
const field = this.readField()
return [field, this.readFieldValue(field)]
}
}
export { BinaryParser };
export { BinaryParser }

View File

@@ -1,13 +1,13 @@
import * as assert from "assert";
import { FieldInstance } from "../enums";
import { SerializedType } from "../types/serialized-type";
import { Buffer } from "buffer/";
import * as assert from 'assert'
import { FieldInstance } from '../enums'
import { SerializedType } from '../types/serialized-type'
import { Buffer } from 'buffer/'
/**
* Bytes list is a collection of buffer objects
*/
class BytesList {
private bytesArray: Array<Buffer> = [];
private bytesArray: Array<Buffer> = []
/**
* Get the total number of bytes in the BytesList
@@ -15,7 +15,7 @@ class BytesList {
* @return the number of bytes
*/
public getLength(): number {
return Buffer.concat(this.bytesArray).byteLength;
return Buffer.concat(this.bytesArray).byteLength
}
/**
@@ -25,9 +25,9 @@ class BytesList {
* @return this BytesList
*/
public put(bytesArg: Buffer): BytesList {
const bytes = Buffer.from(bytesArg); // Temporary, to catch instances of Uint8Array being passed in
this.bytesArray.push(bytes);
return this;
const bytes = Buffer.from(bytesArg) // Temporary, to catch instances of Uint8Array being passed in
this.bytesArray.push(bytes)
return this
}
/**
@@ -36,15 +36,15 @@ class BytesList {
* @param list The BytesList to write to
*/
public toBytesSink(list: BytesList): void {
list.put(this.toBytes());
list.put(this.toBytes())
}
public toBytes(): Buffer {
return Buffer.concat(this.bytesArray);
return Buffer.concat(this.bytesArray)
}
toHex(): string {
return this.toBytes().toString("hex").toUpperCase();
return this.toBytes().toString('hex').toUpperCase()
}
}
@@ -52,10 +52,10 @@ class BytesList {
* BinarySerializer is used to write fields and values to buffers
*/
class BinarySerializer {
private sink: BytesList = new BytesList();
private sink: BytesList = new BytesList()
constructor(sink: BytesList) {
this.sink = sink;
this.sink = sink
}
/**
@@ -64,7 +64,7 @@ class BinarySerializer {
* @param value a SerializedType value
*/
write(value: SerializedType): void {
value.toBytesSink(this.sink);
value.toBytesSink(this.sink)
}
/**
@@ -73,7 +73,7 @@ class BinarySerializer {
* @param bytes the bytes to write
*/
put(bytes: Buffer): void {
this.sink.put(bytes);
this.sink.put(bytes)
}
/**
@@ -83,7 +83,7 @@ class BinarySerializer {
* @param value a value of that type
*/
writeType(type: typeof SerializedType, value: SerializedType): void {
this.write(type.from(value));
this.write(type.from(value))
}
/**
@@ -92,7 +92,7 @@ class BinarySerializer {
* @param bl BytesList to write to BinarySerializer
*/
writeBytesList(bl: BytesList): void {
bl.toBytesSink(this.sink);
bl.toBytesSink(this.sink)
}
/**
@@ -101,23 +101,23 @@ class BinarySerializer {
* @param length the length of the bytes
*/
private encodeVariableLength(length: number): Buffer {
const lenBytes = Buffer.alloc(3);
const lenBytes = Buffer.alloc(3)
if (length <= 192) {
lenBytes[0] = length;
return lenBytes.slice(0, 1);
lenBytes[0] = length
return lenBytes.slice(0, 1)
} else if (length <= 12480) {
length -= 193;
lenBytes[0] = 193 + (length >>> 8);
lenBytes[1] = length & 0xff;
return lenBytes.slice(0, 2);
length -= 193
lenBytes[0] = 193 + (length >>> 8)
lenBytes[1] = length & 0xff
return lenBytes.slice(0, 2)
} else if (length <= 918744) {
length -= 12481;
lenBytes[0] = 241 + (length >>> 16);
lenBytes[1] = (length >> 8) & 0xff;
lenBytes[2] = length & 0xff;
return lenBytes.slice(0, 3);
length -= 12481
lenBytes[0] = 241 + (length >>> 16)
lenBytes[1] = (length >> 8) & 0xff
lenBytes[2] = length & 0xff
return lenBytes.slice(0, 3)
}
throw new Error("Overflow error");
throw new Error('Overflow error')
}
/**
@@ -127,16 +127,16 @@ class BinarySerializer {
* @param value value to write to BinarySerializer
*/
writeFieldAndValue(field: FieldInstance, value: SerializedType): void {
const associatedValue = field.associatedType.from(value);
assert.ok(associatedValue.toBytesSink !== undefined);
assert.ok(field.name !== undefined);
const associatedValue = field.associatedType.from(value)
assert.ok(associatedValue.toBytesSink !== undefined)
assert.ok(field.name !== undefined)
this.sink.put(field.header);
this.sink.put(field.header)
if (field.isVariableLengthEncoded) {
this.writeLengthEncoded(associatedValue);
this.writeLengthEncoded(associatedValue)
} else {
associatedValue.toBytesSink(this.sink);
associatedValue.toBytesSink(this.sink)
}
}
@@ -146,11 +146,11 @@ class BinarySerializer {
* @param value length encoded value to write to BytesList
*/
public writeLengthEncoded(value: SerializedType): void {
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVariableLength(bytes.getLength()));
this.writeBytesList(bytes);
const bytes = new BytesList()
value.toBytesSink(bytes)
this.put(this.encodeVariableLength(bytes.getLength()))
this.writeBytesList(bytes)
}
}
export { BytesList, BinarySerializer };
export { BytesList, BinarySerializer }

View File

@@ -1,20 +1,20 @@
import { strict as assert } from "assert";
import { coreTypes } from "./types";
import { HashPrefix } from "./hash-prefixes";
import { Sha512Half } from "./hashes";
import { Hash256 } from "./types/hash-256";
import { BytesList } from "./serdes/binary-serializer";
import { Buffer } from "buffer/";
import { strict as assert } from 'assert'
import { coreTypes } from './types'
import { HashPrefix } from './hash-prefixes'
import { Sha512Half } from './hashes'
import { Hash256 } from './types/hash-256'
import { BytesList } from './serdes/binary-serializer'
import { Buffer } from 'buffer/'
/**
* Abstract class describing a SHAMapNode
*/
abstract class ShaMapNode {
abstract hashPrefix(): Buffer;
abstract isLeaf(): boolean;
abstract isInner(): boolean;
abstract toBytesSink(list: BytesList): void;
abstract hash(): Hash256;
abstract hashPrefix(): Buffer
abstract isLeaf(): boolean
abstract isInner(): boolean
abstract toBytesSink(list: BytesList): void
abstract hash(): Hash256
}
/**
@@ -22,21 +22,21 @@ abstract class ShaMapNode {
*/
class ShaMapLeaf extends ShaMapNode {
constructor(public index: Hash256, public item?: ShaMapNode) {
super();
super()
}
/**
* @returns true as ShaMapLeaf is a leaf node
*/
isLeaf(): boolean {
return true;
return true
}
/**
* @returns false as ShaMapLeaf is not an inner node
*/
isInner(): boolean {
return false;
return false
}
/**
@@ -45,7 +45,7 @@ class ShaMapLeaf extends ShaMapNode {
* @returns The hash prefix, unless this.item is undefined, then it returns an empty Buffer
*/
hashPrefix(): Buffer {
return this.item === undefined ? Buffer.alloc(0) : this.item.hashPrefix();
return this.item === undefined ? Buffer.alloc(0) : this.item.hashPrefix()
}
/**
@@ -54,9 +54,9 @@ class ShaMapLeaf extends ShaMapNode {
* @returns hash of this.item concatenated with this.index
*/
hash(): Hash256 {
const hash = Sha512Half.put(this.hashPrefix());
this.toBytesSink(hash);
return hash.finish();
const hash = Sha512Half.put(this.hashPrefix())
this.toBytesSink(hash)
return hash.finish()
}
/**
@@ -65,9 +65,9 @@ class ShaMapLeaf extends ShaMapNode {
*/
toBytesSink(list: BytesList): void {
if (this.item !== undefined) {
this.item.toBytesSink(list);
this.item.toBytesSink(list)
}
this.index.toBytesSink(list);
this.index.toBytesSink(list)
}
}
@@ -75,25 +75,25 @@ class ShaMapLeaf extends ShaMapNode {
* Class defining an Inner Node of a SHAMap
*/
class ShaMapInner extends ShaMapNode {
private slotBits = 0;
private branches: Array<ShaMapNode> = Array(16);
private slotBits = 0
private branches: Array<ShaMapNode> = Array(16)
constructor(private depth: number = 0) {
super();
super()
}
/**
* @returns true as ShaMapInner is an inner node
*/
isInner(): boolean {
return true;
return true
}
/**
* @returns false as ShaMapInner is not a leaf node
*/
isLeaf(): boolean {
return false;
return false
}
/**
@@ -102,7 +102,7 @@ class ShaMapInner extends ShaMapNode {
* @returns hash prefix describing an inner node
*/
hashPrefix(): Buffer {
return HashPrefix.innerNode;
return HashPrefix.innerNode
}
/**
@@ -112,15 +112,15 @@ class ShaMapInner extends ShaMapNode {
* @param branch Branch to add
*/
setBranch(slot: number, branch: ShaMapNode): void {
this.slotBits = this.slotBits | (1 << slot);
this.branches[slot] = branch;
this.slotBits = this.slotBits | (1 << slot)
this.branches[slot] = branch
}
/**
* @returns true if node is empty
*/
empty(): boolean {
return this.slotBits === 0;
return this.slotBits === 0
}
/**
@@ -130,11 +130,11 @@ class ShaMapInner extends ShaMapNode {
*/
hash(): Hash256 {
if (this.empty()) {
return coreTypes.Hash256.ZERO_256;
return coreTypes.Hash256.ZERO_256
}
const hash = Sha512Half.put(this.hashPrefix());
this.toBytesSink(hash);
return hash.finish();
const hash = Sha512Half.put(this.hashPrefix())
this.toBytesSink(hash)
return hash.finish()
}
/**
@@ -144,9 +144,9 @@ class ShaMapInner extends ShaMapNode {
*/
toBytesSink(list: BytesList): void {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch ? branch.hash() : coreTypes.Hash256.ZERO_256;
hash.toBytesSink(list);
const branch = this.branches[i]
const hash = branch ? branch.hash() : coreTypes.Hash256.ZERO_256
hash.toBytesSink(list)
}
}
@@ -158,25 +158,25 @@ class ShaMapInner extends ShaMapNode {
* @param leaf Leaf node to insert when branch doesn't exist
*/
addItem(index?: Hash256, item?: ShaMapNode, leaf?: ShaMapLeaf): void {
assert.ok(index !== undefined);
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
assert.ok(index !== undefined)
const nibble = index.nibblet(this.depth)
const existing = this.branches[nibble]
if (existing === undefined) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item))
} else if (existing instanceof ShaMapLeaf) {
const newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, undefined, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
const newInner = new ShaMapInner(this.depth + 1)
newInner.addItem(existing.index, undefined, existing)
newInner.addItem(index, item, leaf)
this.setBranch(nibble, newInner)
} else if (existing instanceof ShaMapInner) {
existing.addItem(index, item, leaf);
existing.addItem(index, item, leaf)
} else {
throw new Error("invalid ShaMap.addItem call");
throw new Error('invalid ShaMap.addItem call')
}
}
}
class ShaMap extends ShaMapInner {}
export { ShaMap, ShaMapNode, ShaMapLeaf };
export { ShaMap, ShaMapNode, ShaMapLeaf }

View File

@@ -3,20 +3,20 @@ import {
encodeAccountID,
isValidXAddress,
xAddressToClassicAddress,
} from "ripple-address-codec";
import { Hash160 } from "./hash-160";
import { Buffer } from "buffer/";
} from 'ripple-address-codec'
import { Hash160 } from './hash-160'
import { Buffer } from 'buffer/'
const HEX_REGEX = /^[A-F0-9]{40}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/
/**
* Class defining how to encode and decode an AccountID
*/
class AccountID extends Hash160 {
static readonly defaultAccountID: AccountID = new AccountID(Buffer.alloc(20));
static readonly defaultAccountID: AccountID = new AccountID(Buffer.alloc(20))
constructor(bytes?: Buffer) {
super(bytes ?? AccountID.defaultAccountID.bytes);
super(bytes ?? AccountID.defaultAccountID.bytes)
}
/**
@@ -27,20 +27,20 @@ class AccountID extends Hash160 {
*/
static from<T extends Hash160 | string>(value: T): AccountID {
if (value instanceof AccountID) {
return value;
return value
}
if (typeof value === "string") {
if (value === "") {
return new AccountID();
if (typeof value === 'string') {
if (value === '') {
return new AccountID()
}
return HEX_REGEX.test(value)
? new AccountID(Buffer.from(value, "hex"))
: this.fromBase58(value);
? new AccountID(Buffer.from(value, 'hex'))
: this.fromBase58(value)
}
throw new Error("Cannot construct AccountID from value given");
throw new Error('Cannot construct AccountID from value given')
}
/**
@@ -51,15 +51,15 @@ class AccountID extends Hash160 {
*/
static fromBase58(value: string): AccountID {
if (isValidXAddress(value)) {
const classic = xAddressToClassicAddress(value);
const classic = xAddressToClassicAddress(value)
if (classic.tag !== false)
throw new Error("Only allowed to have tag on Account or Destination");
throw new Error('Only allowed to have tag on Account or Destination')
value = classic.classicAddress;
value = classic.classicAddress
}
return new AccountID(Buffer.from(decodeAccountID(value)));
return new AccountID(Buffer.from(decodeAccountID(value)))
}
/**
@@ -68,7 +68,7 @@ class AccountID extends Hash160 {
* @returns the base58 string for this AccountID
*/
toJSON(): string {
return this.toBase58();
return this.toBase58()
}
/**
@@ -78,9 +78,9 @@ class AccountID extends Hash160 {
*/
toBase58(): string {
/* eslint-disable @typescript-eslint/no-explicit-any */
return encodeAccountID(this.bytes as any);
return encodeAccountID(this.bytes as any)
/* eslint-enable @typescript-eslint/no-explicit-any */
}
}
export { AccountID };
export { AccountID }

View File

@@ -1,22 +1,22 @@
import { Decimal } from "decimal.js";
import { Decimal } from 'decimal.js'
import { BinaryParser } from "../serdes/binary-parser";
import { BinaryParser } from '../serdes/binary-parser'
import { AccountID } from "./account-id";
import { Currency } from "./currency";
import { JsonObject, SerializedType } from "./serialized-type";
import * as bigInt from "big-integer";
import { Buffer } from "buffer/";
import { AccountID } from './account-id'
import { Currency } from './currency'
import { JsonObject, SerializedType } from './serialized-type'
import * as bigInt from 'big-integer'
import { Buffer } from 'buffer/'
/**
* Constants for validating amounts
*/
const MIN_IOU_EXPONENT = -96;
const MAX_IOU_EXPONENT = 80;
const MAX_IOU_PRECISION = 16;
const MAX_DROPS = new Decimal("1e17");
const MIN_XRP = new Decimal("1e-6");
const mask = bigInt(0x00000000ffffffff);
const MIN_IOU_EXPONENT = -96
const MAX_IOU_EXPONENT = 80
const MAX_IOU_PRECISION = 16
const MAX_DROPS = new Decimal('1e17')
const MIN_XRP = new Decimal('1e-6')
const mask = bigInt(0x00000000ffffffff)
/**
* decimal.js configuration for Amount IOUs
@@ -24,28 +24,28 @@ const mask = bigInt(0x00000000ffffffff);
Decimal.config({
toExpPos: MAX_IOU_EXPONENT + MAX_IOU_PRECISION,
toExpNeg: MIN_IOU_EXPONENT - MAX_IOU_PRECISION,
});
})
/**
* Interface for JSON objects that represent amounts
*/
interface AmountObject extends JsonObject {
value: string;
currency: string;
issuer: string;
value: string
currency: string
issuer: string
}
/**
* Type guard for AmountObject
*/
function isAmountObject(arg): arg is AmountObject {
const keys = Object.keys(arg).sort();
const keys = Object.keys(arg).sort()
return (
keys.length === 3 &&
keys[0] === "currency" &&
keys[1] === "issuer" &&
keys[2] === "value"
);
keys[0] === 'currency' &&
keys[1] === 'issuer' &&
keys[2] === 'value'
)
}
/**
@@ -53,11 +53,11 @@ function isAmountObject(arg): arg is AmountObject {
*/
class Amount extends SerializedType {
static defaultAmount: Amount = new Amount(
Buffer.from("4000000000000000", "hex")
);
Buffer.from('4000000000000000', 'hex'),
)
constructor(bytes: Buffer) {
super(bytes ?? Amount.defaultAmount.bytes);
super(bytes ?? Amount.defaultAmount.bytes)
}
/**
@@ -69,63 +69,63 @@ class Amount extends SerializedType {
*/
static from<T extends Amount | AmountObject | string>(value: T): Amount {
if (value instanceof Amount) {
return value;
return value
}
let amount = Buffer.alloc(8);
if (typeof value === "string") {
Amount.assertXrpIsValid(value);
let amount = Buffer.alloc(8)
if (typeof value === 'string') {
Amount.assertXrpIsValid(value)
const number = bigInt(value);
const number = bigInt(value)
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(number.shiftRight(32)), 0);
intBuf[1].writeUInt32BE(Number(number.and(mask)), 0);
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)]
intBuf[0].writeUInt32BE(Number(number.shiftRight(32)), 0)
intBuf[1].writeUInt32BE(Number(number.and(mask)), 0)
amount = Buffer.concat(intBuf);
amount = Buffer.concat(intBuf)
amount[0] |= 0x40;
amount[0] |= 0x40
return new Amount(amount);
return new Amount(amount)
}
if (isAmountObject(value)) {
const number = new Decimal(value.value);
Amount.assertIouIsValid(number);
const number = new Decimal(value.value)
Amount.assertIouIsValid(number)
if (number.isZero()) {
amount[0] |= 0x80;
amount[0] |= 0x80
} else {
const integerNumberString = number
.times(`1e${-(number.e - 15)}`)
.abs()
.toString();
.toString()
const num = bigInt(integerNumberString);
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(num.shiftRight(32)), 0);
intBuf[1].writeUInt32BE(Number(num.and(mask)), 0);
const num = bigInt(integerNumberString)
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)]
intBuf[0].writeUInt32BE(Number(num.shiftRight(32)), 0)
intBuf[1].writeUInt32BE(Number(num.and(mask)), 0)
amount = Buffer.concat(intBuf);
amount = Buffer.concat(intBuf)
amount[0] |= 0x80;
amount[0] |= 0x80
if (number.gt(new Decimal(0))) {
amount[0] |= 0x40;
amount[0] |= 0x40
}
const exponent = number.e - 15;
const exponentByte = 97 + exponent;
amount[0] |= exponentByte >>> 2;
amount[1] |= (exponentByte & 0x03) << 6;
const exponent = number.e - 15
const exponentByte = 97 + exponent
amount[0] |= exponentByte >>> 2
amount[1] |= (exponentByte & 0x03) << 6
}
const currency = Currency.from(value.currency).toBytes();
const issuer = AccountID.from(value.issuer).toBytes();
return new Amount(Buffer.concat([amount, currency, issuer]));
const currency = Currency.from(value.currency).toBytes()
const issuer = AccountID.from(value.issuer).toBytes()
return new Amount(Buffer.concat([amount, currency, issuer]))
}
throw new Error("Invalid type to construct an Amount");
throw new Error('Invalid type to construct an Amount')
}
/**
@@ -135,9 +135,9 @@ class Amount extends SerializedType {
* @returns An Amount object
*/
static fromParser(parser: BinaryParser): Amount {
const isXRP = parser.peek() & 0x80;
const numBytes = isXRP ? 48 : 8;
return new Amount(parser.read(numBytes));
const isXRP = parser.peek() & 0x80
const numBytes = isXRP ? 48 : 8
return new Amount(parser.read(numBytes))
}
/**
@@ -147,41 +147,41 @@ class Amount extends SerializedType {
*/
toJSON(): AmountObject | string {
if (this.isNative()) {
const bytes = this.bytes;
const isPositive = bytes[0] & 0x40;
const sign = isPositive ? "" : "-";
bytes[0] &= 0x3f;
const bytes = this.bytes
const isPositive = bytes[0] & 0x40
const sign = isPositive ? '' : '-'
bytes[0] &= 0x3f
const msb = bigInt(bytes.slice(0, 4).readUInt32BE(0));
const lsb = bigInt(bytes.slice(4).readUInt32BE(0));
const num = msb.shiftLeft(32).or(lsb);
const msb = bigInt(bytes.slice(0, 4).readUInt32BE(0))
const lsb = bigInt(bytes.slice(4).readUInt32BE(0))
const num = msb.shiftLeft(32).or(lsb)
return `${sign}${num.toString()}`;
return `${sign}${num.toString()}`
} else {
const parser = new BinaryParser(this.toString());
const mantissa = parser.read(8);
const currency = Currency.fromParser(parser) as Currency;
const issuer = AccountID.fromParser(parser) as AccountID;
const parser = new BinaryParser(this.toString())
const mantissa = parser.read(8)
const currency = Currency.fromParser(parser) as Currency
const issuer = AccountID.fromParser(parser) as AccountID
const b1 = mantissa[0];
const b2 = mantissa[1];
const b1 = mantissa[0]
const b2 = mantissa[1]
const isPositive = b1 & 0x40;
const sign = isPositive ? "" : "-";
const exponent = ((b1 & 0x3f) << 2) + ((b2 & 0xff) >> 6) - 97;
const isPositive = b1 & 0x40
const sign = isPositive ? '' : '-'
const exponent = ((b1 & 0x3f) << 2) + ((b2 & 0xff) >> 6) - 97
mantissa[0] = 0;
mantissa[1] &= 0x3f;
const value = new Decimal(`${sign}0x${mantissa.toString("hex")}`).times(
`1e${exponent}`
);
Amount.assertIouIsValid(value);
mantissa[0] = 0
mantissa[1] &= 0x3f
const value = new Decimal(`${sign}0x${mantissa.toString('hex')}`).times(
`1e${exponent}`,
)
Amount.assertIouIsValid(value)
return {
value: value.toString(),
currency: currency.toJSON(),
issuer: issuer.toJSON(),
};
}
}
}
@@ -192,14 +192,14 @@ class Amount extends SerializedType {
* @returns void, but will throw if invalid amount
*/
private static assertXrpIsValid(amount: string): void {
if (amount.indexOf(".") !== -1) {
throw new Error(`${amount.toString()} is an illegal amount`);
if (amount.indexOf('.') !== -1) {
throw new Error(`${amount.toString()} is an illegal amount`)
}
const decimal = new Decimal(amount);
const decimal = new Decimal(amount)
if (!decimal.isZero()) {
if (decimal.lt(MIN_XRP) || decimal.gt(MAX_DROPS)) {
throw new Error(`${amount.toString()} is an illegal amount`);
throw new Error(`${amount.toString()} is an illegal amount`)
}
}
}
@@ -212,16 +212,16 @@ class Amount extends SerializedType {
*/
private static assertIouIsValid(decimal: Decimal): void {
if (!decimal.isZero()) {
const p = decimal.precision();
const e = decimal.e - 15;
const p = decimal.precision()
const e = decimal.e - 15
if (
p > MAX_IOU_PRECISION ||
e > MAX_IOU_EXPONENT ||
e < MIN_IOU_EXPONENT
) {
throw new Error("Decimal precision out of range");
throw new Error('Decimal precision out of range')
}
this.verifyNoDecimal(decimal);
this.verifyNoDecimal(decimal)
}
}
@@ -236,10 +236,10 @@ class Amount extends SerializedType {
const integerNumberString = decimal
.times(`1e${-(decimal.e - 15)}`)
.abs()
.toString();
.toString()
if (integerNumberString.indexOf(".") !== -1) {
throw new Error("Decimal place found in integerNumberString");
if (integerNumberString.indexOf('.') !== -1) {
throw new Error('Decimal place found in integerNumberString')
}
}
@@ -249,8 +249,8 @@ class Amount extends SerializedType {
* @returns true if Native (XRP)
*/
private isNative(): boolean {
return (this.bytes[0] & 0x80) === 0;
return (this.bytes[0] & 0x80) === 0
}
}
export { Amount, AmountObject };
export { Amount, AmountObject }

View File

@@ -1,13 +1,13 @@
import { SerializedType } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { Buffer } from "buffer/";
import { SerializedType } from './serialized-type'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Variable length encoded type
*/
class Blob extends SerializedType {
constructor(bytes: Buffer) {
super(bytes);
super(bytes)
}
/**
@@ -18,7 +18,7 @@ class Blob extends SerializedType {
* @returns A Blob object
*/
static fromParser(parser: BinaryParser, hint: number): Blob {
return new Blob(parser.read(hint));
return new Blob(parser.read(hint))
}
/**
@@ -29,15 +29,15 @@ class Blob extends SerializedType {
*/
static from<T extends Blob | string>(value: T): Blob {
if (value instanceof Blob) {
return value;
return value
}
if (typeof value === "string") {
return new Blob(Buffer.from(value, "hex"));
if (typeof value === 'string') {
return new Blob(Buffer.from(value, 'hex'))
}
throw new Error("Cannot construct Blob from value given");
throw new Error('Cannot construct Blob from value given')
}
}
export { Blob };
export { Blob }

View File

@@ -1,60 +1,60 @@
import { Hash160 } from "./hash-160";
import { Buffer } from "buffer/";
import { Hash160 } from './hash-160'
import { Buffer } from 'buffer/'
const ISO_REGEX = /^[A-Z0-9]{3}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/;
const ISO_REGEX = /^[A-Z0-9]{3}$/
const HEX_REGEX = /^[A-F0-9]{40}$/
/**
* Convert an ISO code to a currency bytes representation
*/
function isoToBytes(iso: string): Buffer {
const bytes = Buffer.alloc(20);
if (iso !== "XRP") {
const isoBytes = iso.split("").map((c) => c.charCodeAt(0));
bytes.set(isoBytes, 12);
const bytes = Buffer.alloc(20)
if (iso !== 'XRP') {
const isoBytes = iso.split('').map((c) => c.charCodeAt(0))
bytes.set(isoBytes, 12)
}
return bytes;
return bytes
}
/**
* Tests if ISO is a valid iso code
*/
function isIsoCode(iso: string): boolean {
return ISO_REGEX.test(iso);
return ISO_REGEX.test(iso)
}
function isoCodeFromHex(code: Buffer): string | null {
const iso = code.toString();
if (iso === "XRP") {
const iso = code.toString()
if (iso === 'XRP') {
throw new Error(
"Disallowed currency code: to indicate the currency XRP you must use 20 bytes of 0s"
);
'Disallowed currency code: to indicate the currency XRP you must use 20 bytes of 0s',
)
}
if (isIsoCode(iso)) {
return iso;
return iso
}
return null;
return null
}
/**
* Tests if hex is a valid hex-string
*/
function isHex(hex: string): boolean {
return HEX_REGEX.test(hex);
return HEX_REGEX.test(hex)
}
/**
* Tests if a string is a valid representation of a currency
*/
function isStringRepresentation(input: string): boolean {
return input.length === 3 || isHex(input);
return input.length === 3 || isHex(input)
}
/**
* Tests if a Buffer is a valid representation of a currency
*/
function isBytesArray(bytes: Buffer): boolean {
return bytes.byteLength === 20;
return bytes.byteLength === 20
}
/**
@@ -63,7 +63,7 @@ function isBytesArray(bytes: Buffer): boolean {
function isValidRepresentation(input: Buffer | string): boolean {
return input instanceof Buffer
? isBytesArray(input)
: isStringRepresentation(input);
: isStringRepresentation(input)
}
/**
@@ -71,28 +71,28 @@ function isValidRepresentation(input: Buffer | string): boolean {
*/
function bytesFromRepresentation(input: string): Buffer {
if (!isValidRepresentation(input)) {
throw new Error(`Unsupported Currency representation: ${input}`);
throw new Error(`Unsupported Currency representation: ${input}`)
}
return input.length === 3 ? isoToBytes(input) : Buffer.from(input, "hex");
return input.length === 3 ? isoToBytes(input) : Buffer.from(input, 'hex')
}
/**
* Class defining how to encode and decode Currencies
*/
class Currency extends Hash160 {
static readonly XRP = new Currency(Buffer.alloc(20));
private readonly _iso: string | null;
static readonly XRP = new Currency(Buffer.alloc(20))
private readonly _iso: string | null
constructor(byteBuf: Buffer) {
super(byteBuf ?? Currency.XRP.bytes);
const code = this.bytes.slice(12, 15);
super(byteBuf ?? Currency.XRP.bytes)
const code = this.bytes.slice(12, 15)
if (this.bytes[0] !== 0) {
this._iso = null;
} else if (code.toString("hex") === "000000") {
this._iso = "XRP";
this._iso = null
} else if (code.toString('hex') === '000000') {
this._iso = 'XRP'
} else {
this._iso = isoCodeFromHex(code);
this._iso = isoCodeFromHex(code)
}
}
@@ -102,7 +102,7 @@ class Currency extends Hash160 {
* @returns ISO code if it exists, else null
*/
iso(): string | null {
return this._iso;
return this._iso
}
/**
@@ -112,14 +112,14 @@ class Currency extends Hash160 {
*/
static from<T extends Hash160 | string>(value: T): Currency {
if (value instanceof Currency) {
return value;
return value
}
if (typeof value === "string") {
return new Currency(bytesFromRepresentation(value));
if (typeof value === 'string') {
return new Currency(bytesFromRepresentation(value))
}
throw new Error("Cannot construct Currency from value given");
throw new Error('Cannot construct Currency from value given')
}
/**
@@ -128,12 +128,12 @@ class Currency extends Hash160 {
* @returns JSON representation
*/
toJSON(): string {
const iso = this.iso();
const iso = this.iso()
if (iso !== null) {
return iso;
return iso
}
return this.bytes.toString("hex").toUpperCase();
return this.bytes.toString('hex').toUpperCase()
}
}
export { Currency };
export { Currency }

View File

@@ -1,16 +1,16 @@
import { Hash } from "./hash";
import { Buffer } from "buffer/";
import { Hash } from './hash'
import { Buffer } from 'buffer/'
/**
* Hash with a width of 128 bits
*/
class Hash128 extends Hash {
static readonly width = 16;
static readonly ZERO_128: Hash128 = new Hash128(Buffer.alloc(Hash128.width));
static readonly width = 16
static readonly ZERO_128: Hash128 = new Hash128(Buffer.alloc(Hash128.width))
constructor(bytes: Buffer) {
super(bytes ?? Hash128.ZERO_128.bytes);
super(bytes ?? Hash128.ZERO_128.bytes)
}
}
export { Hash128 };
export { Hash128 }

View File

@@ -1,20 +1,20 @@
import { Hash } from "./hash";
import { Buffer } from "buffer/";
import { Hash } from './hash'
import { Buffer } from 'buffer/'
/**
* Hash with a width of 160 bits
*/
class Hash160 extends Hash {
static readonly width = 20;
static readonly ZERO_160: Hash160 = new Hash160(Buffer.alloc(Hash160.width));
static readonly width = 20
static readonly ZERO_160: Hash160 = new Hash160(Buffer.alloc(Hash160.width))
constructor(bytes?: Buffer) {
if (bytes && bytes.byteLength === 0) {
bytes = Hash160.ZERO_160.bytes;
bytes = Hash160.ZERO_160.bytes
}
super(bytes ?? Hash160.ZERO_160.bytes);
super(bytes ?? Hash160.ZERO_160.bytes)
}
}
export { Hash160 };
export { Hash160 }

View File

@@ -1,16 +1,16 @@
import { Hash } from "./hash";
import { Buffer } from "buffer/";
import { Hash } from './hash'
import { Buffer } from 'buffer/'
/**
* Hash with a width of 256 bits
*/
class Hash256 extends Hash {
static readonly width = 32;
static readonly ZERO_256 = new Hash256(Buffer.alloc(Hash256.width));
static readonly width = 32
static readonly ZERO_256 = new Hash256(Buffer.alloc(Hash256.width))
constructor(bytes: Buffer) {
super(bytes ?? Hash256.ZERO_256.bytes);
super(bytes ?? Hash256.ZERO_256.bytes)
}
}
export { Hash256 };
export { Hash256 }

View File

@@ -1,17 +1,17 @@
import { Comparable } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { Buffer } from "buffer/";
import { Comparable } from './serialized-type'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Base class defining how to encode and decode hashes
*/
class Hash extends Comparable {
static readonly width: number;
static readonly width: number
constructor(bytes: Buffer) {
super(bytes);
super(bytes)
if (this.bytes.byteLength !== (this.constructor as typeof Hash).width) {
throw new Error(`Invalid Hash length ${this.bytes.byteLength}`);
throw new Error(`Invalid Hash length ${this.bytes.byteLength}`)
}
}
@@ -22,14 +22,14 @@ class Hash extends Comparable {
*/
static from<T extends Hash | string>(value: T): Hash {
if (value instanceof this) {
return value;
return value
}
if (typeof value === "string") {
return new this(Buffer.from(value, "hex"));
if (typeof value === 'string') {
return new this(Buffer.from(value, 'hex'))
}
throw new Error("Cannot construct Hash from given value");
throw new Error('Cannot construct Hash from given value')
}
/**
@@ -39,7 +39,7 @@ class Hash extends Comparable {
* @param hint length of the bytes to read, optional
*/
static fromParser(parser: BinaryParser, hint?: number): Hash {
return new this(parser.read(hint ?? this.width));
return new this(parser.read(hint ?? this.width))
}
/**
@@ -49,15 +49,15 @@ class Hash extends Comparable {
*/
compareTo(other: Hash): number {
return this.bytes.compare(
(this.constructor as typeof Hash).from(other).bytes
);
(this.constructor as typeof Hash).from(other).bytes,
)
}
/**
* @returns the hex-string representation of this Hash
*/
toString(): string {
return this.toHex();
return this.toHex()
}
/**
@@ -67,15 +67,15 @@ class Hash extends Comparable {
* @returns The number represented by the four bits
*/
nibblet(depth: number): number {
const byteIx = depth > 0 ? (depth / 2) | 0 : 0;
let b = this.bytes[byteIx];
const byteIx = depth > 0 ? (depth / 2) | 0 : 0
let b = this.bytes[byteIx]
if (depth % 2 === 0) {
b = (b & 0xf0) >>> 4;
b = (b & 0xf0) >>> 4
} else {
b = b & 0x0f;
b = b & 0x0f
}
return b;
return b
}
}
export { Hash };
export { Hash }

View File

@@ -3,22 +3,22 @@ import {
TransactionResult,
TransactionType,
LedgerEntryType,
} from "../enums";
import { AccountID } from "./account-id";
import { Amount } from "./amount";
import { Blob } from "./blob";
import { Currency } from "./currency";
import { Hash128 } from "./hash-128";
import { Hash160 } from "./hash-160";
import { Hash256 } from "./hash-256";
import { PathSet } from "./path-set";
import { STArray } from "./st-array";
import { STObject } from "./st-object";
import { UInt16 } from "./uint-16";
import { UInt32 } from "./uint-32";
import { UInt64 } from "./uint-64";
import { UInt8 } from "./uint-8";
import { Vector256 } from "./vector-256";
} from '../enums'
import { AccountID } from './account-id'
import { Amount } from './amount'
import { Blob } from './blob'
import { Currency } from './currency'
import { Hash128 } from './hash-128'
import { Hash160 } from './hash-160'
import { Hash256 } from './hash-256'
import { PathSet } from './path-set'
import { STArray } from './st-array'
import { STObject } from './st-object'
import { UInt16 } from './uint-16'
import { UInt32 } from './uint-32'
import { UInt64 } from './uint-64'
import { UInt8 } from './uint-8'
import { Vector256 } from './vector-256'
const coreTypes = {
AccountID,
@@ -36,14 +36,14 @@ const coreTypes = {
UInt32,
UInt64,
Vector256,
};
}
Object.values(Field).forEach((field) => {
field.associatedType = coreTypes[field.type.name];
});
field.associatedType = coreTypes[field.type.name]
})
Field["TransactionType"].associatedType = TransactionType;
Field["TransactionResult"].associatedType = TransactionResult;
Field["LedgerEntryType"].associatedType = LedgerEntryType;
Field['TransactionType'].associatedType = TransactionType
Field['TransactionResult'].associatedType = TransactionResult
Field['LedgerEntryType'].associatedType = LedgerEntryType
export { coreTypes };
export { coreTypes }

View File

@@ -1,29 +1,29 @@
import { AccountID } from "./account-id";
import { Currency } from "./currency";
import { BinaryParser } from "../serdes/binary-parser";
import { SerializedType, JsonObject } from "./serialized-type";
import { Buffer } from "buffer/";
import { AccountID } from './account-id'
import { Currency } from './currency'
import { BinaryParser } from '../serdes/binary-parser'
import { SerializedType, JsonObject } from './serialized-type'
import { Buffer } from 'buffer/'
/**
* Constants for separating Paths in a PathSet
*/
const PATHSET_END_BYTE = 0x00;
const PATH_SEPARATOR_BYTE = 0xff;
const PATHSET_END_BYTE = 0x00
const PATH_SEPARATOR_BYTE = 0xff
/**
* Constant for masking types of a Hop
*/
const TYPE_ACCOUNT = 0x01;
const TYPE_CURRENCY = 0x10;
const TYPE_ISSUER = 0x20;
const TYPE_ACCOUNT = 0x01
const TYPE_CURRENCY = 0x10
const TYPE_ISSUER = 0x20
/**
* The object representation of a Hop, an issuer AccountID, an account AccountID, and a Currency
*/
interface HopObject extends JsonObject {
issuer?: string;
account?: string;
currency?: string;
issuer?: string
account?: string
currency?: string
}
/**
@@ -34,7 +34,7 @@ function isHopObject(arg): arg is HopObject {
arg.issuer !== undefined ||
arg.account !== undefined ||
arg.currency !== undefined
);
)
}
/**
@@ -45,7 +45,7 @@ function isPathSet(arg): arg is Array<Array<HopObject>> {
(Array.isArray(arg) && arg.length === 0) ||
(Array.isArray(arg) && Array.isArray(arg[0]) && arg[0].length === 0) ||
(Array.isArray(arg) && Array.isArray(arg[0]) && isHopObject(arg[0][0]))
);
)
}
/**
@@ -60,27 +60,27 @@ class Hop extends SerializedType {
*/
static from(value: Hop | HopObject): Hop {
if (value instanceof Hop) {
return value;
return value
}
const bytes: Array<Buffer> = [Buffer.from([0])];
const bytes: Array<Buffer> = [Buffer.from([0])]
if (value.account) {
bytes.push(AccountID.from(value.account).toBytes());
bytes[0][0] |= TYPE_ACCOUNT;
bytes.push(AccountID.from(value.account).toBytes())
bytes[0][0] |= TYPE_ACCOUNT
}
if (value.currency) {
bytes.push(Currency.from(value.currency).toBytes());
bytes[0][0] |= TYPE_CURRENCY;
bytes.push(Currency.from(value.currency).toBytes())
bytes[0][0] |= TYPE_CURRENCY
}
if (value.issuer) {
bytes.push(AccountID.from(value.issuer).toBytes());
bytes[0][0] |= TYPE_ISSUER;
bytes.push(AccountID.from(value.issuer).toBytes())
bytes[0][0] |= TYPE_ISSUER
}
return new Hop(Buffer.concat(bytes));
return new Hop(Buffer.concat(bytes))
}
/**
@@ -90,22 +90,22 @@ class Hop extends SerializedType {
* @returns a Hop
*/
static fromParser(parser: BinaryParser): Hop {
const type = parser.readUInt8();
const bytes: Array<Buffer> = [Buffer.from([type])];
const type = parser.readUInt8()
const bytes: Array<Buffer> = [Buffer.from([type])]
if (type & TYPE_ACCOUNT) {
bytes.push(parser.read(AccountID.width));
bytes.push(parser.read(AccountID.width))
}
if (type & TYPE_CURRENCY) {
bytes.push(parser.read(Currency.width));
bytes.push(parser.read(Currency.width))
}
if (type & TYPE_ISSUER) {
bytes.push(parser.read(AccountID.width));
bytes.push(parser.read(AccountID.width))
}
return new Hop(Buffer.concat(bytes));
return new Hop(Buffer.concat(bytes))
}
/**
@@ -114,36 +114,36 @@ class Hop extends SerializedType {
* @returns a HopObject, an JS object with optional account, issuer, and currency
*/
toJSON(): HopObject {
const hopParser = new BinaryParser(this.bytes.toString("hex"));
const type = hopParser.readUInt8();
const hopParser = new BinaryParser(this.bytes.toString('hex'))
const type = hopParser.readUInt8()
let account, currency, issuer;
let account, currency, issuer
if (type & TYPE_ACCOUNT) {
account = (AccountID.fromParser(hopParser) as AccountID).toJSON();
account = (AccountID.fromParser(hopParser) as AccountID).toJSON()
}
if (type & TYPE_CURRENCY) {
currency = (Currency.fromParser(hopParser) as Currency).toJSON();
currency = (Currency.fromParser(hopParser) as Currency).toJSON()
}
if (type & TYPE_ISSUER) {
issuer = (AccountID.fromParser(hopParser) as AccountID).toJSON();
issuer = (AccountID.fromParser(hopParser) as AccountID).toJSON()
}
const result: HopObject = {};
const result: HopObject = {}
if (account) {
result.account = account;
result.account = account
}
if (issuer) {
result.issuer = issuer;
result.issuer = issuer
}
if (currency) {
result.currency = currency;
result.currency = currency
}
return result;
return result
}
/**
@@ -152,7 +152,7 @@ class Hop extends SerializedType {
* @returns a number to be bitwise and-ed with TYPE_ constants to describe the types in the hop
*/
type(): number {
return this.bytes[0];
return this.bytes[0]
}
}
@@ -168,15 +168,15 @@ class Path extends SerializedType {
*/
static from(value: Path | Array<HopObject>): Path {
if (value instanceof Path) {
return value;
return value
}
const bytes: Array<Buffer> = [];
const bytes: Array<Buffer> = []
value.forEach((hop: HopObject) => {
bytes.push(Hop.from(hop).toBytes());
});
bytes.push(Hop.from(hop).toBytes())
})
return new Path(Buffer.concat(bytes));
return new Path(Buffer.concat(bytes))
}
/**
@@ -186,18 +186,18 @@ class Path extends SerializedType {
* @returns the Path represented by the bytes read from the BinaryParser
*/
static fromParser(parser: BinaryParser): Path {
const bytes: Array<Buffer> = [];
const bytes: Array<Buffer> = []
while (!parser.end()) {
bytes.push(Hop.fromParser(parser).toBytes());
bytes.push(Hop.fromParser(parser).toBytes())
if (
parser.peek() === PATHSET_END_BYTE ||
parser.peek() === PATH_SEPARATOR_BYTE
) {
break;
break
}
}
return new Path(Buffer.concat(bytes));
return new Path(Buffer.concat(bytes))
}
/**
@@ -206,14 +206,14 @@ class Path extends SerializedType {
* @returns an Array of HopObject constructed from this.bytes
*/
toJSON(): Array<HopObject> {
const json: Array<HopObject> = [];
const pathParser = new BinaryParser(this.toString());
const json: Array<HopObject> = []
const pathParser = new BinaryParser(this.toString())
while (!pathParser.end()) {
json.push(Hop.fromParser(pathParser).toJSON());
json.push(Hop.fromParser(pathParser).toJSON())
}
return json;
return json
}
}
@@ -229,23 +229,23 @@ class PathSet extends SerializedType {
*/
static from<T extends PathSet | Array<Array<HopObject>>>(value: T): PathSet {
if (value instanceof PathSet) {
return value;
return value
}
if (isPathSet(value)) {
const bytes: Array<Buffer> = [];
const bytes: Array<Buffer> = []
value.forEach((path: Array<HopObject>) => {
bytes.push(Path.from(path).toBytes());
bytes.push(Buffer.from([PATH_SEPARATOR_BYTE]));
});
bytes.push(Path.from(path).toBytes())
bytes.push(Buffer.from([PATH_SEPARATOR_BYTE]))
})
bytes[bytes.length - 1] = Buffer.from([PATHSET_END_BYTE]);
bytes[bytes.length - 1] = Buffer.from([PATHSET_END_BYTE])
return new PathSet(Buffer.concat(bytes));
return new PathSet(Buffer.concat(bytes))
}
throw new Error("Cannot construct PathSet from given value");
throw new Error('Cannot construct PathSet from given value')
}
/**
@@ -255,18 +255,18 @@ class PathSet extends SerializedType {
* @returns the PathSet read from parser
*/
static fromParser(parser: BinaryParser): PathSet {
const bytes: Array<Buffer> = [];
const bytes: Array<Buffer> = []
while (!parser.end()) {
bytes.push(Path.fromParser(parser).toBytes());
bytes.push(parser.read(1));
bytes.push(Path.fromParser(parser).toBytes())
bytes.push(parser.read(1))
if (bytes[bytes.length - 1][0] == PATHSET_END_BYTE) {
break;
break
}
}
return new PathSet(Buffer.concat(bytes));
return new PathSet(Buffer.concat(bytes))
}
/**
@@ -275,16 +275,16 @@ class PathSet extends SerializedType {
* @returns an Array of Array of HopObjects, representing this PathSet
*/
toJSON(): Array<Array<HopObject>> {
const json: Array<Array<HopObject>> = [];
const pathParser = new BinaryParser(this.toString());
const json: Array<Array<HopObject>> = []
const pathParser = new BinaryParser(this.toString())
while (!pathParser.end()) {
json.push(Path.fromParser(pathParser).toJSON());
pathParser.skip(1);
json.push(Path.fromParser(pathParser).toJSON())
pathParser.skip(1)
}
return json;
return json
}
}
export { PathSet };
export { PathSet }

View File

@@ -1,32 +1,32 @@
import { BytesList } from "../serdes/binary-serializer";
import { BinaryParser } from "../serdes/binary-parser";
import * as bigInt from "big-integer";
import { Buffer } from "buffer/";
import { BytesList } from '../serdes/binary-serializer'
import { BinaryParser } from '../serdes/binary-parser'
import * as bigInt from 'big-integer'
import { Buffer } from 'buffer/'
type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject;
type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject
type JsonObject = { [key: string]: JSON };
type JsonObject = { [key: string]: JSON }
/**
* The base class for all binary-codec types
*/
class SerializedType {
protected readonly bytes: Buffer = Buffer.alloc(0);
protected readonly bytes: Buffer = Buffer.alloc(0)
constructor(bytes: Buffer) {
this.bytes = bytes ?? Buffer.alloc(0);
this.bytes = bytes ?? Buffer.alloc(0)
}
static fromParser(parser: BinaryParser, hint?: number): SerializedType {
throw new Error("fromParser not implemented");
return this.fromParser(parser, hint);
throw new Error('fromParser not implemented')
return this.fromParser(parser, hint)
}
static from(
value: SerializedType | JSON | bigInt.BigInteger
value: SerializedType | JSON | bigInt.BigInteger,
): SerializedType {
throw new Error("from not implemented");
return this.from(value);
throw new Error('from not implemented')
return this.from(value)
}
/**
@@ -35,7 +35,7 @@ class SerializedType {
* @param list The BytesList to write SerializedType bytes to
*/
toBytesSink(list: BytesList): void {
list.put(this.bytes);
list.put(this.bytes)
}
/**
@@ -44,7 +44,7 @@ class SerializedType {
* @returns hex String of this.bytes
*/
toHex(): string {
return this.toBytes().toString("hex").toUpperCase();
return this.toBytes().toString('hex').toUpperCase()
}
/**
@@ -54,11 +54,11 @@ class SerializedType {
*/
toBytes(): Buffer {
if (this.bytes) {
return this.bytes;
return this.bytes
}
const bytes = new BytesList();
this.toBytesSink(bytes);
return bytes.toBytes();
const bytes = new BytesList()
this.toBytesSink(bytes)
return bytes.toBytes()
}
/**
@@ -67,14 +67,14 @@ class SerializedType {
* @returns any type, if not overloaded returns hexString representation of bytes
*/
toJSON(): JSON {
return this.toHex();
return this.toHex()
}
/**
* @returns hexString representation of this.bytes
*/
toString(): string {
return this.toHex();
return this.toHex()
}
}
@@ -83,23 +83,23 @@ class SerializedType {
*/
class Comparable extends SerializedType {
lt(other: Comparable): boolean {
return this.compareTo(other) < 0;
return this.compareTo(other) < 0
}
eq(other: Comparable): boolean {
return this.compareTo(other) === 0;
return this.compareTo(other) === 0
}
gt(other: Comparable): boolean {
return this.compareTo(other) > 0;
return this.compareTo(other) > 0
}
gte(other: Comparable): boolean {
return this.compareTo(other) > -1;
return this.compareTo(other) > -1
}
lte(other: Comparable): boolean {
return this.compareTo(other) < 1;
return this.compareTo(other) < 1
}
/**
@@ -109,10 +109,8 @@ class Comparable extends SerializedType {
* @returns A number denoting the relationship of this and other
*/
compareTo(other: Comparable): number {
throw new Error(
`cannot compare ${this.toString()} and ${other.toString()}`
);
throw new Error(`cannot compare ${this.toString()} and ${other.toString()}`)
}
}
export { SerializedType, Comparable, JSON, JsonObject };
export { SerializedType, Comparable, JSON, JsonObject }

View File

@@ -1,20 +1,20 @@
import { SerializedType, JsonObject } from "./serialized-type";
import { STObject } from "./st-object";
import { BinaryParser } from "../serdes/binary-parser";
import { Buffer } from "buffer/";
import { SerializedType, JsonObject } from './serialized-type'
import { STObject } from './st-object'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
const ARRAY_END_MARKER = Buffer.from([0xf1]);
const ARRAY_END_MARKER_NAME = "ArrayEndMarker";
const ARRAY_END_MARKER = Buffer.from([0xf1])
const ARRAY_END_MARKER_NAME = 'ArrayEndMarker'
const OBJECT_END_MARKER = Buffer.from([0xe1]);
const OBJECT_END_MARKER = Buffer.from([0xe1])
/**
* TypeGuard for Array<JsonObject>
*/
function isObjects(args): args is Array<JsonObject> {
return (
Array.isArray(args) && (args.length === 0 || typeof args[0] === "object")
);
Array.isArray(args) && (args.length === 0 || typeof args[0] === 'object')
)
}
/**
@@ -28,23 +28,23 @@ class STArray extends SerializedType {
* @returns An STArray Object
*/
static fromParser(parser: BinaryParser): STArray {
const bytes: Array<Buffer> = [];
const bytes: Array<Buffer> = []
while (!parser.end()) {
const field = parser.readField();
const field = parser.readField()
if (field.name === ARRAY_END_MARKER_NAME) {
break;
break
}
bytes.push(
field.header,
parser.readFieldValue(field).toBytes(),
OBJECT_END_MARKER
);
OBJECT_END_MARKER,
)
}
bytes.push(ARRAY_END_MARKER);
return new STArray(Buffer.concat(bytes));
bytes.push(ARRAY_END_MARKER)
return new STArray(Buffer.concat(bytes))
}
/**
@@ -55,20 +55,20 @@ class STArray extends SerializedType {
*/
static from<T extends STArray | Array<JsonObject>>(value: T): STArray {
if (value instanceof STArray) {
return value;
return value
}
if (isObjects(value)) {
const bytes: Array<Buffer> = [];
const bytes: Array<Buffer> = []
value.forEach((obj) => {
bytes.push(STObject.from(obj).toBytes());
});
bytes.push(STObject.from(obj).toBytes())
})
bytes.push(ARRAY_END_MARKER);
return new STArray(Buffer.concat(bytes));
bytes.push(ARRAY_END_MARKER)
return new STArray(Buffer.concat(bytes))
}
throw new Error("Cannot construct STArray from value given");
throw new Error('Cannot construct STArray from value given')
}
/**
@@ -77,23 +77,23 @@ class STArray extends SerializedType {
* @returns An Array of JSON objects
*/
toJSON(): Array<JsonObject> {
const result: Array<JsonObject> = [];
const result: Array<JsonObject> = []
const arrayParser = new BinaryParser(this.toString());
const arrayParser = new BinaryParser(this.toString())
while (!arrayParser.end()) {
const field = arrayParser.readField();
const field = arrayParser.readField()
if (field.name === ARRAY_END_MARKER_NAME) {
break;
break
}
const outer = {};
outer[field.name] = STObject.fromParser(arrayParser).toJSON();
result.push(outer);
const outer = {}
outer[field.name] = STObject.fromParser(arrayParser).toJSON()
result.push(outer)
}
return result;
return result
}
}
export { STArray };
export { STArray }

View File

@@ -1,20 +1,17 @@
import { Field, FieldInstance } from "../enums";
import { SerializedType, JsonObject } from "./serialized-type";
import {
xAddressToClassicAddress,
isValidXAddress,
} from "ripple-address-codec";
import { BinaryParser } from "../serdes/binary-parser";
import { BinarySerializer, BytesList } from "../serdes/binary-serializer";
import { Buffer } from "buffer/";
import { Field, FieldInstance } from '../enums'
import { SerializedType, JsonObject } from './serialized-type'
import { xAddressToClassicAddress, isValidXAddress } from 'ripple-address-codec'
import { BinaryParser } from '../serdes/binary-parser'
import { BinarySerializer, BytesList } from '../serdes/binary-serializer'
import { Buffer } from 'buffer/'
const OBJECT_END_MARKER_BYTE = Buffer.from([0xe1]);
const OBJECT_END_MARKER = "ObjectEndMarker";
const ST_OBJECT = "STObject";
const DESTINATION = "Destination";
const ACCOUNT = "Account";
const SOURCE_TAG = "SourceTag";
const DEST_TAG = "DestinationTag";
const OBJECT_END_MARKER_BYTE = Buffer.from([0xe1])
const OBJECT_END_MARKER = 'ObjectEndMarker'
const ST_OBJECT = 'STObject'
const DESTINATION = 'Destination'
const ACCOUNT = 'Account'
const SOURCE_TAG = 'SourceTag'
const DEST_TAG = 'DestinationTag'
/**
* Break down an X-Address into an account and a tag
@@ -23,17 +20,17 @@ const DEST_TAG = "DestinationTag";
* @param xAddress X-Address corresponding to the field
*/
function handleXAddress(field: string, xAddress: string): JsonObject {
const decoded = xAddressToClassicAddress(xAddress);
const decoded = xAddressToClassicAddress(xAddress)
let tagName;
if (field === DESTINATION) tagName = DEST_TAG;
else if (field === ACCOUNT) tagName = SOURCE_TAG;
let tagName
if (field === DESTINATION) tagName = DEST_TAG
else if (field === ACCOUNT) tagName = SOURCE_TAG
else if (decoded.tag !== false)
throw new Error(`${field} cannot have an associated tag`);
throw new Error(`${field} cannot have an associated tag`)
return decoded.tag !== false
? { [field]: decoded.classicAddress, [tagName]: decoded.tag }
: { [field]: decoded.classicAddress };
: { [field]: decoded.classicAddress }
}
/**
@@ -45,9 +42,9 @@ function handleXAddress(field: string, xAddress: string): JsonObject {
*/
function checkForDuplicateTags(obj1: JsonObject, obj2: JsonObject): void {
if (!(obj1[SOURCE_TAG] === undefined || obj2[SOURCE_TAG] === undefined))
throw new Error("Cannot have Account X-Address and SourceTag");
throw new Error('Cannot have Account X-Address and SourceTag')
if (!(obj1[DEST_TAG] === undefined || obj2[DEST_TAG] === undefined))
throw new Error("Cannot have Destination X-Address and DestinationTag");
throw new Error('Cannot have Destination X-Address and DestinationTag')
}
/**
@@ -61,24 +58,24 @@ class STObject extends SerializedType {
* @returns A STObject object
*/
static fromParser(parser: BinaryParser): STObject {
const list: BytesList = new BytesList();
const bytes: BinarySerializer = new BinarySerializer(list);
const list: BytesList = new BytesList()
const bytes: BinarySerializer = new BinarySerializer(list)
while (!parser.end()) {
const field = parser.readField();
const field = parser.readField()
if (field.name === OBJECT_END_MARKER) {
break;
break
}
const associatedValue = parser.readFieldValue(field);
const associatedValue = parser.readFieldValue(field)
bytes.writeFieldAndValue(field, associatedValue);
bytes.writeFieldAndValue(field, associatedValue)
if (field.type.name === ST_OBJECT) {
bytes.put(OBJECT_END_MARKER_BYTE);
bytes.put(OBJECT_END_MARKER_BYTE)
}
}
return new STObject(list.toBytes());
return new STObject(list.toBytes())
}
/**
@@ -90,23 +87,23 @@ class STObject extends SerializedType {
*/
static from<T extends STObject | JsonObject>(
value: T,
filter?: (...any) => boolean
filter?: (...any) => boolean,
): STObject {
if (value instanceof STObject) {
return value;
return value
}
const list: BytesList = new BytesList();
const bytes: BinarySerializer = new BinarySerializer(list);
const list: BytesList = new BytesList()
const bytes: BinarySerializer = new BinarySerializer(list)
const xAddressDecoded = Object.entries(value).reduce((acc, [key, val]) => {
let handled: JsonObject | undefined = undefined;
let handled: JsonObject | undefined = undefined
if (val && isValidXAddress(val.toString())) {
handled = handleXAddress(key, val.toString());
checkForDuplicateTags(handled, value);
handled = handleXAddress(key, val.toString())
checkForDuplicateTags(handled, value)
}
return Object.assign(acc, handled ?? { [key]: val });
}, {});
return Object.assign(acc, handled ?? { [key]: val })
}, {})
let sorted = Object.keys(xAddressDecoded)
.map((f: string): FieldInstance => Field[f] as FieldInstance)
@@ -114,28 +111,28 @@ class STObject extends SerializedType {
(f: FieldInstance): boolean =>
f !== undefined &&
xAddressDecoded[f.name] !== undefined &&
f.isSerialized
f.isSerialized,
)
.sort((a, b) => {
return a.ordinal - b.ordinal;
});
return a.ordinal - b.ordinal
})
if (filter !== undefined) {
sorted = sorted.filter(filter);
sorted = sorted.filter(filter)
}
sorted.forEach((field) => {
const associatedValue = field.associatedType.from(
xAddressDecoded[field.name]
);
xAddressDecoded[field.name],
)
bytes.writeFieldAndValue(field, associatedValue);
bytes.writeFieldAndValue(field, associatedValue)
if (field.type.name === ST_OBJECT) {
bytes.put(OBJECT_END_MARKER_BYTE);
bytes.put(OBJECT_END_MARKER_BYTE)
}
});
})
return new STObject(list.toBytes());
return new STObject(list.toBytes())
}
/**
@@ -144,19 +141,19 @@ class STObject extends SerializedType {
* @returns a JSON object
*/
toJSON(): JsonObject {
const objectParser = new BinaryParser(this.toString());
const accumulator = {};
const objectParser = new BinaryParser(this.toString())
const accumulator = {}
while (!objectParser.end()) {
const field = objectParser.readField();
const field = objectParser.readField()
if (field.name === OBJECT_END_MARKER) {
break;
break
}
accumulator[field.name] = objectParser.readFieldValue(field).toJSON();
accumulator[field.name] = objectParser.readFieldValue(field).toJSON()
}
return accumulator;
return accumulator
}
}
export { STObject };
export { STObject }

View File

@@ -1,22 +1,20 @@
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import { Buffer } from "buffer/";
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Derived UInt class for serializing/deserializing 16 bit UInt
*/
class UInt16 extends UInt {
protected static readonly width: number = 16 / 8; // 2
static readonly defaultUInt16: UInt16 = new UInt16(
Buffer.alloc(UInt16.width)
);
protected static readonly width: number = 16 / 8 // 2
static readonly defaultUInt16: UInt16 = new UInt16(Buffer.alloc(UInt16.width))
constructor(bytes: Buffer) {
super(bytes ?? UInt16.defaultUInt16.bytes);
super(bytes ?? UInt16.defaultUInt16.bytes)
}
static fromParser(parser: BinaryParser): UInt {
return new UInt16(parser.read(UInt16.width));
return new UInt16(parser.read(UInt16.width))
}
/**
@@ -26,16 +24,16 @@ class UInt16 extends UInt {
*/
static from<T extends UInt16 | number>(val: T): UInt16 {
if (val instanceof UInt16) {
return val;
return val
}
if (typeof val === "number") {
const buf = Buffer.alloc(UInt16.width);
buf.writeUInt16BE(val, 0);
return new UInt16(buf);
if (typeof val === 'number') {
const buf = Buffer.alloc(UInt16.width)
buf.writeUInt16BE(val, 0)
return new UInt16(buf)
}
throw new Error("Can not construct UInt16 with given value");
throw new Error('Can not construct UInt16 with given value')
}
/**
@@ -44,8 +42,8 @@ class UInt16 extends UInt {
* @returns the number represented by this.bytes
*/
valueOf(): number {
return this.bytes.readUInt16BE(0);
return this.bytes.readUInt16BE(0)
}
}
export { UInt16 };
export { UInt16 }

View File

@@ -1,22 +1,20 @@
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import { Buffer } from "buffer/";
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Derived UInt class for serializing/deserializing 32 bit UInt
*/
class UInt32 extends UInt {
protected static readonly width: number = 32 / 8; // 4
static readonly defaultUInt32: UInt32 = new UInt32(
Buffer.alloc(UInt32.width)
);
protected static readonly width: number = 32 / 8 // 4
static readonly defaultUInt32: UInt32 = new UInt32(Buffer.alloc(UInt32.width))
constructor(bytes: Buffer) {
super(bytes ?? UInt32.defaultUInt32.bytes);
super(bytes ?? UInt32.defaultUInt32.bytes)
}
static fromParser(parser: BinaryParser): UInt {
return new UInt32(parser.read(UInt32.width));
return new UInt32(parser.read(UInt32.width))
}
/**
@@ -26,23 +24,23 @@ class UInt32 extends UInt {
*/
static from<T extends UInt32 | number | string>(val: T): UInt32 {
if (val instanceof UInt32) {
return val;
return val
}
const buf = Buffer.alloc(UInt32.width);
const buf = Buffer.alloc(UInt32.width)
if (typeof val === "string") {
const num = Number.parseInt(val);
buf.writeUInt32BE(num, 0);
return new UInt32(buf);
if (typeof val === 'string') {
const num = Number.parseInt(val)
buf.writeUInt32BE(num, 0)
return new UInt32(buf)
}
if (typeof val === "number") {
buf.writeUInt32BE(val, 0);
return new UInt32(buf);
if (typeof val === 'number') {
buf.writeUInt32BE(val, 0)
return new UInt32(buf)
}
throw new Error("Cannot construct UInt32 from given value");
throw new Error('Cannot construct UInt32 from given value')
}
/**
@@ -51,8 +49,8 @@ class UInt32 extends UInt {
* @returns the number represented by this.bytes
*/
valueOf(): number {
return this.bytes.readUInt32BE(0);
return this.bytes.readUInt32BE(0)
}
}
export { UInt32 };
export { UInt32 }

View File

@@ -1,27 +1,25 @@
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import * as bigInt from "big-integer";
import { isInstance } from "big-integer";
import { Buffer } from "buffer/";
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import * as bigInt from 'big-integer'
import { isInstance } from 'big-integer'
import { Buffer } from 'buffer/'
const HEX_REGEX = /^[a-fA-F0-9]{1,16}$/;
const mask = bigInt(0x00000000ffffffff);
const HEX_REGEX = /^[a-fA-F0-9]{1,16}$/
const mask = bigInt(0x00000000ffffffff)
/**
* Derived UInt class for serializing/deserializing 64 bit UInt
*/
class UInt64 extends UInt {
protected static readonly width: number = 64 / 8; // 8
static readonly defaultUInt64: UInt64 = new UInt64(
Buffer.alloc(UInt64.width)
);
protected static readonly width: number = 64 / 8 // 8
static readonly defaultUInt64: UInt64 = new UInt64(Buffer.alloc(UInt64.width))
constructor(bytes: Buffer) {
super(bytes ?? UInt64.defaultUInt64.bytes);
super(bytes ?? UInt64.defaultUInt64.bytes)
}
static fromParser(parser: BinaryParser): UInt {
return new UInt64(parser.read(UInt64.width));
return new UInt64(parser.read(UInt64.width))
}
/**
@@ -31,47 +29,47 @@ class UInt64 extends UInt {
* @returns A UInt64 object
*/
static from<T extends UInt64 | string | bigInt.BigInteger | number>(
val: T
val: T,
): UInt64 {
if (val instanceof UInt64) {
return val;
return val
}
let buf = Buffer.alloc(UInt64.width);
let buf = Buffer.alloc(UInt64.width)
if (typeof val === "number") {
if (typeof val === 'number') {
if (val < 0) {
throw new Error("value must be an unsigned integer");
throw new Error('value must be an unsigned integer')
}
const number = bigInt(val);
const number = bigInt(val)
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(number.shiftRight(32)), 0);
intBuf[1].writeUInt32BE(Number(number.and(mask)), 0);
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)]
intBuf[0].writeUInt32BE(Number(number.shiftRight(32)), 0)
intBuf[1].writeUInt32BE(Number(number.and(mask)), 0)
return new UInt64(Buffer.concat(intBuf));
return new UInt64(Buffer.concat(intBuf))
}
if (typeof val === "string") {
if (typeof val === 'string') {
if (!HEX_REGEX.test(val)) {
throw new Error(`${val} is not a valid hex-string`);
throw new Error(`${val} is not a valid hex-string`)
}
const strBuf = val.padStart(16, "0");
buf = Buffer.from(strBuf, "hex");
return new UInt64(buf);
const strBuf = val.padStart(16, '0')
buf = Buffer.from(strBuf, 'hex')
return new UInt64(buf)
}
if (isInstance(val)) {
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(val.shiftRight(bigInt(32))), 0);
intBuf[1].writeUInt32BE(Number(val.and(mask)), 0);
const intBuf = [Buffer.alloc(4), Buffer.alloc(4)]
intBuf[0].writeUInt32BE(Number(val.shiftRight(bigInt(32))), 0)
intBuf[1].writeUInt32BE(Number(val.and(mask)), 0)
return new UInt64(Buffer.concat(intBuf));
return new UInt64(Buffer.concat(intBuf))
}
throw new Error("Cannot construct UInt64 from given value");
throw new Error('Cannot construct UInt64 from given value')
}
/**
@@ -80,7 +78,7 @@ class UInt64 extends UInt {
* @returns a hex-string
*/
toJSON(): string {
return this.bytes.toString("hex").toUpperCase();
return this.bytes.toString('hex').toUpperCase()
}
/**
@@ -89,9 +87,9 @@ class UInt64 extends UInt {
* @returns the number represented buy this.bytes
*/
valueOf(): bigInt.BigInteger {
const msb = bigInt(this.bytes.slice(0, 4).readUInt32BE(0));
const lsb = bigInt(this.bytes.slice(4).readUInt32BE(0));
return msb.shiftLeft(bigInt(32)).or(lsb);
const msb = bigInt(this.bytes.slice(0, 4).readUInt32BE(0))
const lsb = bigInt(this.bytes.slice(4).readUInt32BE(0))
return msb.shiftLeft(bigInt(32)).or(lsb)
}
/**
@@ -100,8 +98,8 @@ class UInt64 extends UInt {
* @returns 8 bytes representing the UInt64
*/
toBytes(): Buffer {
return this.bytes;
return this.bytes
}
}
export { UInt64 };
export { UInt64 }

View File

@@ -1,20 +1,20 @@
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import { Buffer } from "buffer/";
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Derived UInt class for serializing/deserializing 8 bit UInt
*/
class UInt8 extends UInt {
protected static readonly width: number = 8 / 8; // 1
static readonly defaultUInt8: UInt8 = new UInt8(Buffer.alloc(UInt8.width));
protected static readonly width: number = 8 / 8 // 1
static readonly defaultUInt8: UInt8 = new UInt8(Buffer.alloc(UInt8.width))
constructor(bytes: Buffer) {
super(bytes ?? UInt8.defaultUInt8.bytes);
super(bytes ?? UInt8.defaultUInt8.bytes)
}
static fromParser(parser: BinaryParser): UInt {
return new UInt8(parser.read(UInt8.width));
return new UInt8(parser.read(UInt8.width))
}
/**
@@ -24,16 +24,16 @@ class UInt8 extends UInt {
*/
static from<T extends UInt8 | number>(val: T): UInt8 {
if (val instanceof UInt8) {
return val;
return val
}
if (typeof val === "number") {
const buf = Buffer.alloc(UInt8.width);
buf.writeUInt8(val, 0);
return new UInt8(buf);
if (typeof val === 'number') {
const buf = Buffer.alloc(UInt8.width)
buf.writeUInt8(val, 0)
return new UInt8(buf)
}
throw new Error("Cannot construct UInt8 from given value");
throw new Error('Cannot construct UInt8 from given value')
}
/**
@@ -42,8 +42,8 @@ class UInt8 extends UInt {
* @returns the number represented by this.bytes
*/
valueOf(): number {
return this.bytes.readUInt8(0);
return this.bytes.readUInt8(0)
}
}
export { UInt8 };
export { UInt8 }

View File

@@ -1,6 +1,6 @@
import * as bigInt from "big-integer";
import { Comparable } from "./serialized-type";
import { Buffer } from "buffer/";
import * as bigInt from 'big-integer'
import { Comparable } from './serialized-type'
import { Buffer } from 'buffer/'
/**
* Compare numbers and bigInts n1 and n2
@@ -11,19 +11,19 @@ import { Buffer } from "buffer/";
*/
function compare(
n1: number | bigInt.BigInteger,
n2: number | bigInt.BigInteger
n2: number | bigInt.BigInteger,
): number {
return n1 < n2 ? -1 : n1 == n2 ? 0 : 1;
return n1 < n2 ? -1 : n1 == n2 ? 0 : 1
}
/**
* Base class for serializing and deserializing unsigned integers.
*/
abstract class UInt extends Comparable {
protected static width: number;
protected static width: number
constructor(bytes: Buffer) {
super(bytes);
super(bytes)
}
/**
@@ -33,7 +33,7 @@ abstract class UInt extends Comparable {
* @returns -1, 0, or 1 depending on how the objects relate to each other
*/
compareTo(other: UInt): number {
return compare(this.valueOf(), other.valueOf());
return compare(this.valueOf(), other.valueOf())
}
/**
@@ -42,8 +42,8 @@ abstract class UInt extends Comparable {
* @returns number or string represented by this.bytes
*/
toJSON(): number | string {
const val = this.valueOf();
return typeof val === "number" ? val : val.toString();
const val = this.valueOf()
return typeof val === 'number' ? val : val.toString()
}
/**
@@ -51,7 +51,7 @@ abstract class UInt extends Comparable {
*
* @returns the value
*/
abstract valueOf(): number | bigInt.BigInteger;
abstract valueOf(): number | bigInt.BigInteger
}
export { UInt };
export { UInt }

View File

@@ -1,14 +1,14 @@
import { SerializedType } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { Hash256 } from "./hash-256";
import { BytesList } from "../serdes/binary-serializer";
import { Buffer } from "buffer/";
import { SerializedType } from './serialized-type'
import { BinaryParser } from '../serdes/binary-parser'
import { Hash256 } from './hash-256'
import { BytesList } from '../serdes/binary-serializer'
import { Buffer } from 'buffer/'
/**
* TypeGuard for Array<string>
*/
function isStrings(arg): arg is Array<string> {
return Array.isArray(arg) && (arg.length === 0 || typeof arg[0] === "string");
return Array.isArray(arg) && (arg.length === 0 || typeof arg[0] === 'string')
}
/**
@@ -16,7 +16,7 @@ function isStrings(arg): arg is Array<string> {
*/
class Vector256 extends SerializedType {
constructor(bytes: Buffer) {
super(bytes);
super(bytes)
}
/**
@@ -27,13 +27,13 @@ class Vector256 extends SerializedType {
* @returns a Vector256 object
*/
static fromParser(parser: BinaryParser, hint?: number): Vector256 {
const bytesList = new BytesList();
const bytes = hint ?? parser.size();
const hashes = bytes / 32;
const bytesList = new BytesList()
const bytes = hint ?? parser.size()
const hashes = bytes / 32
for (let i = 0; i < hashes; i++) {
Hash256.fromParser(parser).toBytesSink(bytesList);
Hash256.fromParser(parser).toBytesSink(bytesList)
}
return new Vector256(bytesList.toBytes());
return new Vector256(bytesList.toBytes())
}
/**
@@ -44,18 +44,18 @@ class Vector256 extends SerializedType {
*/
static from<T extends Vector256 | Array<string>>(value: T): Vector256 {
if (value instanceof Vector256) {
return value;
return value
}
if (isStrings(value)) {
const bytesList = new BytesList();
const bytesList = new BytesList()
value.forEach((hash) => {
Hash256.from(hash).toBytesSink(bytesList);
});
return new Vector256(bytesList.toBytes());
Hash256.from(hash).toBytesSink(bytesList)
})
return new Vector256(bytesList.toBytes())
}
throw new Error("Cannot construct Vector256 from given value");
throw new Error('Cannot construct Vector256 from given value')
}
/**
@@ -65,20 +65,20 @@ class Vector256 extends SerializedType {
*/
toJSON(): Array<string> {
if (this.bytes.byteLength % 32 !== 0) {
throw new Error("Invalid bytes for Vector256");
throw new Error('Invalid bytes for Vector256')
}
const result: Array<string> = [];
const result: Array<string> = []
for (let i = 0; i < this.bytes.byteLength; i += 32) {
result.push(
this.bytes
.slice(i, i + 32)
.toString("hex")
.toUpperCase()
);
.toString('hex')
.toUpperCase(),
)
}
return result;
return result
}
}
export { Vector256 };
export { Vector256 }

View File

@@ -1,43 +1,43 @@
const { loadFixture } = require("./utils");
const { coreTypes } = require("../dist/types");
const { Amount } = coreTypes;
const fixtures = loadFixture("data-driven-tests.json");
const { loadFixture } = require('./utils')
const { coreTypes } = require('../dist/types')
const { Amount } = coreTypes
const fixtures = loadFixture('data-driven-tests.json')
function amountErrorTests() {
fixtures.values_tests
.filter((obj) => obj.type === "Amount")
.filter((obj) => obj.type === 'Amount')
.forEach((f) => {
// We only want these with errors
if (!f.error) {
return;
return
}
const testName =
`${JSON.stringify(f.test_json)}\n\tis invalid ` + `because: ${f.error}`;
`${JSON.stringify(f.test_json)}\n\tis invalid ` + `because: ${f.error}`
it(testName, () => {
expect(() => {
Amount.from(f.test_json);
JSON.stringify(f.test_json);
}).toThrow();
});
});
Amount.from(f.test_json)
JSON.stringify(f.test_json)
}).toThrow()
})
})
}
describe("Amount", function () {
it("can be parsed from", function () {
expect(Amount.from("1000000") instanceof Amount).toBe(true);
expect(Amount.from("1000000").toJSON()).toEqual("1000000");
describe('Amount', function () {
it('can be parsed from', function () {
expect(Amount.from('1000000') instanceof Amount).toBe(true)
expect(Amount.from('1000000').toJSON()).toEqual('1000000')
const fixture = {
value: "1",
issuer: "0000000000000000000000000000000000000000",
currency: "USD",
};
const amt = Amount.from(fixture);
value: '1',
issuer: '0000000000000000000000000000000000000000',
currency: 'USD',
}
const amt = Amount.from(fixture)
const rewritten = {
value: "1",
issuer: "rrrrrrrrrrrrrrrrrrrrrhoLvTp",
currency: "USD",
};
expect(amt.toJSON()).toEqual(rewritten);
});
amountErrorTests();
});
value: '1',
issuer: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp',
currency: 'USD',
}
expect(amt.toJSON()).toEqual(rewritten)
})
amountErrorTests()
})

View File

@@ -1,45 +1,45 @@
const fixtures = require("./fixtures/codec-fixtures.json");
const { decode, encode, decodeLedgerData } = require("../dist");
const fixtures = require('./fixtures/codec-fixtures.json')
const { decode, encode, decodeLedgerData } = require('../dist')
function json(object) {
return JSON.stringify(object);
return JSON.stringify(object)
}
function truncateForDisplay(longStr) {
return `${longStr.slice(0, 10)} ... ${longStr.slice(-10)}`;
return `${longStr.slice(0, 10)} ... ${longStr.slice(-10)}`
}
describe("ripple-binary-codec", function () {
describe('ripple-binary-codec', function () {
function makeSuite(name, entries) {
describe(name, function () {
entries.forEach((t, testN) => {
test(`${name}[${testN}] can encode ${truncateForDisplay(
json(t.json)
json(t.json),
)} to ${truncateForDisplay(t.binary)}`, () => {
expect(encode(t.json)).toEqual(t.binary);
});
expect(encode(t.json)).toEqual(t.binary)
})
test(`${name}[${testN}] can decode ${truncateForDisplay(
t.binary
t.binary,
)} to ${truncateForDisplay(json(t.json))}`, () => {
const decoded = decode(t.binary);
expect(decoded).toEqual(t.json);
});
});
});
const decoded = decode(t.binary)
expect(decoded).toEqual(t.json)
})
})
})
}
makeSuite("transactions", fixtures.transactions);
makeSuite("accountState", fixtures.accountState);
makeSuite('transactions', fixtures.transactions)
makeSuite('accountState', fixtures.accountState)
describe("ledgerData", function () {
describe('ledgerData', function () {
if (fixtures.ledgerData) {
fixtures.ledgerData.forEach((t, testN) => {
test(`ledgerData[${testN}] can decode ${t.binary} to ${json(
t.json
t.json,
)}`, () => {
const decoded = decodeLedgerData(t.binary);
expect(t.json).toEqual(decoded);
});
});
const decoded = decodeLedgerData(t.binary)
expect(t.json).toEqual(decoded)
})
})
}
});
});
})
})

View File

@@ -1,69 +1,69 @@
const { coreTypes } = require("../dist/types");
const Decimal = require("decimal.js");
const { coreTypes } = require('../dist/types')
const Decimal = require('decimal.js')
const { encodeAccountID } = require("ripple-address-codec");
const { binary } = require("../dist/coretypes");
const { Amount, Hash160 } = coreTypes;
const { makeParser, readJSON } = binary;
const { Field, TransactionType } = require("./../dist/enums");
const { parseHexOnly, hexOnly, loadFixture } = require("./utils");
const fixtures = loadFixture("data-driven-tests.json");
const { BytesList } = require("../dist/serdes/binary-serializer");
const { Buffer } = require("buffer/");
const { encodeAccountID } = require('ripple-address-codec')
const { binary } = require('../dist/coretypes')
const { Amount, Hash160 } = coreTypes
const { makeParser, readJSON } = binary
const { Field, TransactionType } = require('./../dist/enums')
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
const fixtures = loadFixture('data-driven-tests.json')
const { BytesList } = require('../dist/serdes/binary-serializer')
const { Buffer } = require('buffer/')
const __ = hexOnly;
const __ = hexOnly
function toJSON(v) {
return v.toJSON ? v.toJSON() : v;
return v.toJSON ? v.toJSON() : v
}
function assertEqualAmountJSON(actual, expected) {
expect(typeof actual === typeof expected).toBe(true);
if (typeof actual === "string") {
expect(actual).toEqual(expected);
return;
expect(typeof actual === typeof expected).toBe(true)
if (typeof actual === 'string') {
expect(actual).toEqual(expected)
return
}
expect(actual.currency).toEqual(expected.currency);
expect(actual.issuer).toEqual(expected.issuer);
expect(actual.currency).toEqual(expected.currency)
expect(actual.issuer).toEqual(expected.issuer)
expect(
actual.value === expected.value ||
new Decimal(actual.value).equals(new Decimal(expected.value))
).toBe(true);
new Decimal(actual.value).equals(new Decimal(expected.value)),
).toBe(true)
}
function basicApiTests() {
const bytes = parseHexOnly("00,01020304,0506", Uint8Array);
test("can read slices of bytes", () => {
const parser = makeParser(bytes);
expect(parser.bytes instanceof Buffer).toBe(true);
const read1 = parser.read(1);
expect(read1 instanceof Buffer).toBe(true);
expect(read1).toEqual(Buffer.from([0]));
expect(parser.read(4)).toEqual(Buffer.from([1, 2, 3, 4]));
expect(parser.read(2)).toEqual(Buffer.from([5, 6]));
expect(() => parser.read(1)).toThrow();
});
test("can read a Uint32 at full", () => {
const parser = makeParser("FFFFFFFF");
expect(parser.readUInt32()).toEqual(0xffffffff);
});
const bytes = parseHexOnly('00,01020304,0506', Uint8Array)
test('can read slices of bytes', () => {
const parser = makeParser(bytes)
expect(parser.bytes instanceof Buffer).toBe(true)
const read1 = parser.read(1)
expect(read1 instanceof Buffer).toBe(true)
expect(read1).toEqual(Buffer.from([0]))
expect(parser.read(4)).toEqual(Buffer.from([1, 2, 3, 4]))
expect(parser.read(2)).toEqual(Buffer.from([5, 6]))
expect(() => parser.read(1)).toThrow()
})
test('can read a Uint32 at full', () => {
const parser = makeParser('FFFFFFFF')
expect(parser.readUInt32()).toEqual(0xffffffff)
})
}
function transactionParsingTests() {
const transaction = {
json: {
Account: "raD5qJMAShLeHZXf9wjUmo6vRK4arj9cF3",
Fee: "10",
Account: 'raD5qJMAShLeHZXf9wjUmo6vRK4arj9cF3',
Fee: '10',
Flags: 0,
Sequence: 103929,
SigningPubKey:
"028472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F418D6A7166",
'028472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F418D6A7166',
TakerGets: {
currency: "ILS",
issuer: "rNPRNzBB92BVpAhhZr4iXDTveCgV5Pofm9",
value: "1694.768",
currency: 'ILS',
issuer: 'rNPRNzBB92BVpAhhZr4iXDTveCgV5Pofm9',
value: '1694.768',
},
TakerPays: "98957503520",
TransactionType: "OfferCreate",
TakerPays: '98957503520',
TransactionType: 'OfferCreate',
TxnSignature: __(`
304502202ABE08D5E78D1E74A4C18F2714F64E87B8BD57444AF
A5733109EB3C077077520022100DB335EE97386E4C0591CAC02
@@ -78,208 +78,208 @@ function transactionParsingTests() {
8BD57444AFA5733109EB3C077077520022100DB335EE97386E4C059
1CAC024D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C811439408
A69F0895E62149CFCC006FB89FA7D1E6E5D`),
};
}
const tx_json = transaction.json;
const tx_json = transaction.json
// These tests are basically development logs
test("can be done with low level apis", () => {
const parser = makeParser(transaction.binary);
test('can be done with low level apis', () => {
const parser = makeParser(transaction.binary)
expect(parser.readField()).toEqual(Field.TransactionType);
expect(parser.readUInt16()).toEqual(7);
expect(parser.readField()).toEqual(Field.Flags);
expect(parser.readUInt32()).toEqual(0);
expect(parser.readField()).toEqual(Field.Sequence);
expect(parser.readUInt32()).toEqual(103929);
expect(parser.readField()).toEqual(Field.TakerPays);
parser.read(8);
expect(parser.readField()).toEqual(Field.TakerGets);
expect(parser.readField()).toEqual(Field.TransactionType)
expect(parser.readUInt16()).toEqual(7)
expect(parser.readField()).toEqual(Field.Flags)
expect(parser.readUInt32()).toEqual(0)
expect(parser.readField()).toEqual(Field.Sequence)
expect(parser.readUInt32()).toEqual(103929)
expect(parser.readField()).toEqual(Field.TakerPays)
parser.read(8)
expect(parser.readField()).toEqual(Field.TakerGets)
// amount value
expect(parser.read(8)).not.toBe([]);
expect(parser.read(8)).not.toBe([])
// amount currency
expect(Hash160.fromParser(parser)).not.toBe([]);
expect(encodeAccountID(parser.read(20))).toEqual(tx_json.TakerGets.issuer);
expect(parser.readField()).toEqual(Field.Fee);
expect(parser.read(8)).not.toEqual([]);
expect(parser.readField()).toEqual(Field.SigningPubKey);
expect(parser.readVariableLengthLength()).toBe(33);
expect(parser.read(33).toString("hex").toUpperCase()).toEqual(
tx_json.SigningPubKey
);
expect(parser.readField()).toEqual(Field.TxnSignature);
expect(parser.readVariableLength().toString("hex").toUpperCase()).toEqual(
tx_json.TxnSignature
);
expect(parser.readField()).toEqual(Field.Account);
expect(Hash160.fromParser(parser)).not.toBe([])
expect(encodeAccountID(parser.read(20))).toEqual(tx_json.TakerGets.issuer)
expect(parser.readField()).toEqual(Field.Fee)
expect(parser.read(8)).not.toEqual([])
expect(parser.readField()).toEqual(Field.SigningPubKey)
expect(parser.readVariableLengthLength()).toBe(33)
expect(parser.read(33).toString('hex').toUpperCase()).toEqual(
tx_json.SigningPubKey,
)
expect(parser.readField()).toEqual(Field.TxnSignature)
expect(parser.readVariableLength().toString('hex').toUpperCase()).toEqual(
tx_json.TxnSignature,
)
expect(parser.readField()).toEqual(Field.Account)
expect(encodeAccountID(parser.readVariableLength())).toEqual(
tx_json.Account
);
expect(parser.end()).toBe(true);
});
tx_json.Account,
)
expect(parser.end()).toBe(true)
})
test("can be done with high level apis", () => {
const parser = makeParser(transaction.binary);
test('can be done with high level apis', () => {
const parser = makeParser(transaction.binary)
function readField() {
return parser.readFieldAndValue();
return parser.readFieldAndValue()
}
{
const [field, value] = readField();
expect(field).toEqual(Field.TransactionType);
expect(value).toEqual(TransactionType.OfferCreate);
const [field, value] = readField()
expect(field).toEqual(Field.TransactionType)
expect(value).toEqual(TransactionType.OfferCreate)
}
{
const [field, value] = readField();
expect(field).toEqual(Field.Flags);
expect(value.valueOf()).toEqual(0);
const [field, value] = readField()
expect(field).toEqual(Field.Flags)
expect(value.valueOf()).toEqual(0)
}
{
const [field, value] = readField();
expect(field).toEqual(Field.Sequence);
expect(value.valueOf()).toEqual(103929);
const [field, value] = readField()
expect(field).toEqual(Field.Sequence)
expect(value.valueOf()).toEqual(103929)
}
{
const [field, value] = readField();
expect(field).toEqual(Field.TakerPays);
expect(value.isNative()).toEqual(true);
expect(value.toJSON()).toEqual("98957503520");
const [field, value] = readField()
expect(field).toEqual(Field.TakerPays)
expect(value.isNative()).toEqual(true)
expect(value.toJSON()).toEqual('98957503520')
}
{
const [field, value] = readField();
expect(field).toEqual(Field.TakerGets);
expect(value.isNative()).toEqual(false);
expect(value.toJSON().issuer).toEqual(tx_json.TakerGets.issuer);
const [field, value] = readField()
expect(field).toEqual(Field.TakerGets)
expect(value.isNative()).toEqual(false)
expect(value.toJSON().issuer).toEqual(tx_json.TakerGets.issuer)
}
{
const [field, value] = readField();
expect(field).toEqual(Field.Fee);
expect(value.isNative()).toEqual(true);
const [field, value] = readField()
expect(field).toEqual(Field.Fee)
expect(value.isNative()).toEqual(true)
}
{
const [field, value] = readField();
expect(field).toEqual(Field.SigningPubKey);
expect(value.toJSON()).toEqual(tx_json.SigningPubKey);
const [field, value] = readField()
expect(field).toEqual(Field.SigningPubKey)
expect(value.toJSON()).toEqual(tx_json.SigningPubKey)
}
{
const [field, value] = readField();
expect(field).toEqual(Field.TxnSignature);
expect(value.toJSON()).toEqual(tx_json.TxnSignature);
const [field, value] = readField()
expect(field).toEqual(Field.TxnSignature)
expect(value.toJSON()).toEqual(tx_json.TxnSignature)
}
{
const [field, value] = readField();
expect(field).toEqual(Field.Account);
expect(value.toJSON()).toEqual(tx_json.Account);
const [field, value] = readField()
expect(field).toEqual(Field.Account)
expect(value.toJSON()).toEqual(tx_json.Account)
}
expect(parser.end()).toBe(true);
});
expect(parser.end()).toBe(true)
})
test("can be done with higher level apis", () => {
const parser = makeParser(transaction.binary);
const jsonFromBinary = readJSON(parser);
expect(jsonFromBinary).toEqual(tx_json);
});
test('can be done with higher level apis', () => {
const parser = makeParser(transaction.binary)
const jsonFromBinary = readJSON(parser)
expect(jsonFromBinary).toEqual(tx_json)
})
test("readJSON (binary.decode) does not return STObject ", () => {
const parser = makeParser(transaction.binary);
const jsonFromBinary = readJSON(parser);
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false);
expect(jsonFromBinary instanceof Object).toBe(true);
expect(jsonFromBinary.prototype).toBe(undefined);
});
test('readJSON (binary.decode) does not return STObject ', () => {
const parser = makeParser(transaction.binary)
const jsonFromBinary = readJSON(parser)
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false)
expect(jsonFromBinary instanceof Object).toBe(true)
expect(jsonFromBinary.prototype).toBe(undefined)
})
}
function amountParsingTests() {
fixtures.values_tests
.filter((obj) => obj.type === "Amount")
.filter((obj) => obj.type === 'Amount')
.forEach((f, i) => {
if (f.error) {
return;
return
}
const parser = makeParser(f.expected_hex);
const parser = makeParser(f.expected_hex)
const testName = `values_tests[${i}] parses ${f.expected_hex.slice(
0,
16
16,
)}...
as ${JSON.stringify(f.test_json)}`;
as ${JSON.stringify(f.test_json)}`
test(testName, () => {
const value = parser.readType(Amount);
const value = parser.readType(Amount)
// May not actually be in canonical form. The fixtures are to be used
// also for json -> binary;
const json = toJSON(value);
assertEqualAmountJSON(json, f.test_json);
const json = toJSON(value)
assertEqualAmountJSON(json, f.test_json)
if (f.exponent) {
const exponent = new Decimal(json.value);
expect(exponent.e - 15).toEqual(f.exponent);
const exponent = new Decimal(json.value)
expect(exponent.e - 15).toEqual(f.exponent)
}
});
});
})
})
}
function fieldParsingTests() {
fixtures.fields_tests.forEach((f, i) => {
const parser = makeParser(f.expected_hex);
const parser = makeParser(f.expected_hex)
test(`fields[${i}]: parses ${f.expected_hex} as ${f.name}`, () => {
const field = parser.readField();
expect(field.name).toEqual(f.name);
expect(field.type.name).toEqual(f.type_name);
});
});
test("Field throws when type code out of range", () => {
const parser = makeParser("0101");
const field = parser.readField()
expect(field.name).toEqual(f.name)
expect(field.type.name).toEqual(f.type_name)
})
})
test('Field throws when type code out of range', () => {
const parser = makeParser('0101')
expect(() => parser.readField()).toThrow(
new Error("Cannot read FieldOrdinal, type_code out of range")
);
});
test("Field throws when field code out of range", () => {
const parser = makeParser("1001");
new Error('Cannot read FieldOrdinal, type_code out of range'),
)
})
test('Field throws when field code out of range', () => {
const parser = makeParser('1001')
expect(() => parser.readFieldOrdinal()).toThrowError(
new Error("Cannot read FieldOrdinal, field_code out of range")
);
});
test("Field throws when both type and field code out of range", () => {
const parser = makeParser("000101");
new Error('Cannot read FieldOrdinal, field_code out of range'),
)
})
test('Field throws when both type and field code out of range', () => {
const parser = makeParser('000101')
expect(() => parser.readFieldOrdinal()).toThrowError(
new Error("Cannot read FieldOrdinal, type_code out of range")
);
});
new Error('Cannot read FieldOrdinal, type_code out of range'),
)
})
}
function assertRecyclable(json, forField) {
const Type = forField.associatedType;
const recycled = Type.from(json).toJSON();
expect(recycled).toEqual(json);
const sink = new BytesList();
Type.from(recycled).toBytesSink(sink);
const recycledAgain = makeParser(sink.toHex()).readType(Type).toJSON();
expect(recycledAgain).toEqual(json);
const Type = forField.associatedType
const recycled = Type.from(json).toJSON()
expect(recycled).toEqual(json)
const sink = new BytesList()
Type.from(recycled).toBytesSink(sink)
const recycledAgain = makeParser(sink.toHex()).readType(Type).toJSON()
expect(recycledAgain).toEqual(json)
}
function nestedObjectTests() {
fixtures.whole_objects.forEach((f, i) => {
test(`whole_objects[${i}]: can parse blob into
${JSON.stringify(
f.tx_json
f.tx_json,
)}`, /* */ () => {
const parser = makeParser(f.blob_with_no_signing);
let ix = 0;
const parser = makeParser(f.blob_with_no_signing)
let ix = 0
while (!parser.end()) {
const [field, value] = parser.readFieldAndValue();
const expected = f.fields[ix];
const expectedJSON = expected[1].json;
const expectedField = expected[0];
const actual = toJSON(value);
const [field, value] = parser.readFieldAndValue()
const expected = f.fields[ix]
const expectedJSON = expected[1].json
const expectedField = expected[0]
const actual = toJSON(value)
try {
expect(actual).toEqual(expectedJSON);
expect(actual).toEqual(expectedJSON)
} catch (e) {
throw new Error(`${e} ${field} a: ${actual} e: ${expectedJSON}`);
throw new Error(`${e} ${field} a: ${actual} e: ${expectedJSON}`)
}
expect(field.name).toEqual(expectedField);
assertRecyclable(actual, field);
ix++;
expect(field.name).toEqual(expectedField)
assertRecyclable(actual, field)
ix++
}
});
});
})
})
}
function pathSetBinaryTests() {
@@ -312,85 +312,85 @@ function pathSetBinaryTests() {
69E6DCC940CA48D82337AD000000000000000000000000425443000000000057
180C769B66D942EE69E6DCC940CA48D82337AD10000000000000000000000000
00000000000000003000000000000000000000000055534400000000000A20B3
C85F482532A9578DBB3950B85CA06594D100`
);
C85F482532A9578DBB3950B85CA06594D100`,
)
const expectedJSON = [
[
{
account: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
currency: "BTC",
issuer: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
},
{
account: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
currency: "BTC",
issuer: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
},
{
account: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
currency: "BTC",
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
account: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
currency: 'BTC',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
},
{
currency: "USD",
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
},
],
[
{
account: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
currency: "BTC",
issuer: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
},
{
account: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
currency: "BTC",
issuer: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
},
{
account: "rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi",
currency: "BTC",
issuer: "rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi",
account: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi',
currency: 'BTC',
issuer: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi',
},
{
currency: "USD",
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
},
],
[
{
account: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
currency: "BTC",
issuer: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
},
{
account: "r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn",
currency: "BTC",
issuer: "r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn",
account: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn',
currency: 'BTC',
issuer: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn',
},
{ currency: "XRP" },
{ currency: 'XRP' },
{
currency: "USD",
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
},
],
];
]
test("works with long paths", () => {
const parser = makeParser(bytes);
const txn = readJSON(parser);
expect(txn.Paths).toEqual(expectedJSON);
test('works with long paths', () => {
const parser = makeParser(bytes)
const txn = readJSON(parser)
expect(txn.Paths).toEqual(expectedJSON)
// TODO: this should go elsewhere
expect(coreTypes.PathSet.from(txn.Paths).toJSON()).toEqual(expectedJSON);
});
expect(coreTypes.PathSet.from(txn.Paths).toJSON()).toEqual(expectedJSON)
})
}
describe("Binary Parser", function () {
describe("pathSetBinaryTests", () => pathSetBinaryTests());
describe("nestedObjectTests", () => nestedObjectTests());
describe("fieldParsingTests", () => fieldParsingTests());
describe("amountParsingTests", () => amountParsingTests());
describe("transactionParsingTests", () => transactionParsingTests());
describe("basicApiTests", () => basicApiTests());
});
describe('Binary Parser', function () {
describe('pathSetBinaryTests', () => pathSetBinaryTests())
describe('nestedObjectTests', () => nestedObjectTests())
describe('fieldParsingTests', () => fieldParsingTests())
describe('amountParsingTests', () => amountParsingTests())
describe('transactionParsingTests', () => transactionParsingTests())
describe('basicApiTests', () => basicApiTests())
})

View File

@@ -1,260 +1,258 @@
const { binary } = require("../dist/coretypes");
const { encode, decode } = require("../dist");
const { makeParser, BytesList, BinarySerializer } = binary;
const { coreTypes } = require("../dist/types");
const { UInt8, UInt16, UInt32, UInt64, STObject } = coreTypes;
const bigInt = require("big-integer");
const { Buffer } = require("buffer/");
const { binary } = require('../dist/coretypes')
const { encode, decode } = require('../dist')
const { makeParser, BytesList, BinarySerializer } = binary
const { coreTypes } = require('../dist/types')
const { UInt8, UInt16, UInt32, UInt64, STObject } = coreTypes
const bigInt = require('big-integer')
const { Buffer } = require('buffer/')
const { loadFixture } = require("./utils");
const fixtures = loadFixture("data-driven-tests.json");
const deliverMinTx = require("./fixtures/delivermin-tx.json");
const deliverMinTxBinary = require("./fixtures/delivermin-tx-binary.json");
const { loadFixture } = require('./utils')
const fixtures = loadFixture('data-driven-tests.json')
const deliverMinTx = require('./fixtures/delivermin-tx.json')
const deliverMinTxBinary = require('./fixtures/delivermin-tx-binary.json')
const SignerListSet = {
tx: require("./fixtures/signerlistset-tx.json"),
binary: require("./fixtures/signerlistset-tx-binary.json"),
meta: require("./fixtures/signerlistset-tx-meta-binary.json"),
};
tx: require('./fixtures/signerlistset-tx.json'),
binary: require('./fixtures/signerlistset-tx-binary.json'),
meta: require('./fixtures/signerlistset-tx-meta-binary.json'),
}
const DepositPreauth = {
tx: require("./fixtures/deposit-preauth-tx.json"),
binary: require("./fixtures/deposit-preauth-tx-binary.json"),
meta: require("./fixtures/deposit-preauth-tx-meta-binary.json"),
};
tx: require('./fixtures/deposit-preauth-tx.json'),
binary: require('./fixtures/deposit-preauth-tx-binary.json'),
meta: require('./fixtures/deposit-preauth-tx-meta-binary.json'),
}
const Escrow = {
create: {
tx: require("./fixtures/escrow-create-tx.json"),
binary: require("./fixtures/escrow-create-binary.json"),
tx: require('./fixtures/escrow-create-tx.json'),
binary: require('./fixtures/escrow-create-binary.json'),
},
finish: {
tx: require("./fixtures/escrow-finish-tx.json"),
binary: require("./fixtures/escrow-finish-binary.json"),
meta: require("./fixtures/escrow-finish-meta-binary.json"),
tx: require('./fixtures/escrow-finish-tx.json'),
binary: require('./fixtures/escrow-finish-binary.json'),
meta: require('./fixtures/escrow-finish-meta-binary.json'),
},
cancel: {
tx: require("./fixtures/escrow-cancel-tx.json"),
binary: require("./fixtures/escrow-cancel-binary.json"),
tx: require('./fixtures/escrow-cancel-tx.json'),
binary: require('./fixtures/escrow-cancel-binary.json'),
},
};
}
const PaymentChannel = {
create: {
tx: require("./fixtures/payment-channel-create-tx.json"),
binary: require("./fixtures/payment-channel-create-binary.json"),
tx: require('./fixtures/payment-channel-create-tx.json'),
binary: require('./fixtures/payment-channel-create-binary.json'),
},
fund: {
tx: require("./fixtures/payment-channel-fund-tx.json"),
binary: require("./fixtures/payment-channel-fund-binary.json"),
tx: require('./fixtures/payment-channel-fund-tx.json'),
binary: require('./fixtures/payment-channel-fund-binary.json'),
},
claim: {
tx: require("./fixtures/payment-channel-claim-tx.json"),
binary: require("./fixtures/payment-channel-claim-binary.json"),
tx: require('./fixtures/payment-channel-claim-tx.json'),
binary: require('./fixtures/payment-channel-claim-binary.json'),
},
};
}
const Ticket = {
create: {
tx: require("./fixtures/ticket-create-tx.json"),
binary: require("./fixtures/ticket-create-binary.json"),
tx: require('./fixtures/ticket-create-tx.json'),
binary: require('./fixtures/ticket-create-binary.json'),
},
};
}
let json_undefined = {
TakerPays: "223174650",
Account: "rPk2dXr27rMw9G5Ej9ad2Tt7RJzGy8ycBp",
TransactionType: "OfferCreate",
TakerPays: '223174650',
Account: 'rPk2dXr27rMw9G5Ej9ad2Tt7RJzGy8ycBp',
TransactionType: 'OfferCreate',
Memos: [
{
Memo: {
MemoType: "584D4D2076616C7565",
MemoData: "322E3230393635",
MemoType: '584D4D2076616C7565',
MemoData: '322E3230393635',
MemoFormat: undefined,
},
},
],
Fee: "15",
Fee: '15',
OfferSequence: undefined,
TakerGets: {
currency: "XMM",
value: "100",
issuer: "rExAPEZvbkZqYPuNcZ7XEBLENEshsWDQc8",
currency: 'XMM',
value: '100',
issuer: 'rExAPEZvbkZqYPuNcZ7XEBLENEshsWDQc8',
},
Flags: 524288,
Sequence: undefined,
LastLedgerSequence: 6220135,
};
}
let json_omitted = {
TakerPays: "223174650",
Account: "rPk2dXr27rMw9G5Ej9ad2Tt7RJzGy8ycBp",
TransactionType: "OfferCreate",
TakerPays: '223174650',
Account: 'rPk2dXr27rMw9G5Ej9ad2Tt7RJzGy8ycBp',
TransactionType: 'OfferCreate',
Memos: [
{
Memo: {
MemoType: "584D4D2076616C7565",
MemoData: "322E3230393635",
MemoType: '584D4D2076616C7565',
MemoData: '322E3230393635',
},
},
],
Fee: "15",
Fee: '15',
TakerGets: {
currency: "XMM",
value: "100",
issuer: "rExAPEZvbkZqYPuNcZ7XEBLENEshsWDQc8",
currency: 'XMM',
value: '100',
issuer: 'rExAPEZvbkZqYPuNcZ7XEBLENEshsWDQc8',
},
Flags: 524288,
LastLedgerSequence: 6220135,
};
}
const NegativeUNL = require("./fixtures/negative-unl.json");
const NegativeUNL = require('./fixtures/negative-unl.json')
function bytesListTest() {
const list = new BytesList()
.put(Buffer.from([0]))
.put(Buffer.from([2, 3]))
.put(Buffer.from([4, 5]));
test("is an Array<Buffer>", function () {
expect(Array.isArray(list.bytesArray)).toBe(true);
expect(list.bytesArray[0] instanceof Buffer).toBe(true);
});
test("keeps track of the length itself", function () {
expect(list.getLength()).toBe(5);
});
test("can join all arrays into one via toBytes", function () {
const joined = list.toBytes();
expect(joined).toHaveLength(5);
expect(joined).toEqual(Buffer.from([0, 2, 3, 4, 5]));
});
.put(Buffer.from([4, 5]))
test('is an Array<Buffer>', function () {
expect(Array.isArray(list.bytesArray)).toBe(true)
expect(list.bytesArray[0] instanceof Buffer).toBe(true)
})
test('keeps track of the length itself', function () {
expect(list.getLength()).toBe(5)
})
test('can join all arrays into one via toBytes', function () {
const joined = list.toBytes()
expect(joined).toHaveLength(5)
expect(joined).toEqual(Buffer.from([0, 2, 3, 4, 5]))
})
}
function assertRecycles(blob) {
const parser = makeParser(blob);
const so = parser.readType(STObject);
const out = new BytesList();
so.toBytesSink(out);
const hex = out.toHex();
expect(hex).toEqual(blob);
expect(hex + ":").not.toEqual(blob);
const parser = makeParser(blob)
const so = parser.readType(STObject)
const out = new BytesList()
so.toBytesSink(out)
const hex = out.toHex()
expect(hex).toEqual(blob)
expect(hex + ':').not.toEqual(blob)
}
function nestedObjectTests() {
fixtures.whole_objects.forEach((f, i) => {
test(`whole_objects[${i}]: can parse blob and dump out same blob`, () => {
assertRecycles(f.blob_with_no_signing);
});
});
assertRecycles(f.blob_with_no_signing)
})
})
}
function check(type, n, expected) {
test(`Uint${type.width * 8} serializes ${n} as ${expected}`, function () {
const bl = new BytesList();
const serializer = new BinarySerializer(bl);
if (expected === "throws") {
expect(() => serializer.writeType(type, n)).toThrow();
return;
const bl = new BytesList()
const serializer = new BinarySerializer(bl)
if (expected === 'throws') {
expect(() => serializer.writeType(type, n)).toThrow()
return
}
serializer.writeType(type, n);
expect(bl.toBytes()).toEqual(Buffer.from(expected));
});
serializer.writeType(type, n)
expect(bl.toBytes()).toEqual(Buffer.from(expected))
})
}
check(UInt8, 5, [5]);
check(UInt16, 5, [0, 5]);
check(UInt32, 5, [0, 0, 0, 5]);
check(UInt32, 0xffffffff, [255, 255, 255, 255]);
check(UInt8, 0xfeffffff, "throws");
check(UInt16, 0xfeffffff, "throws");
check(UInt16, 0xfeffffff, "throws");
check(UInt64, 0xfeffffff, [0, 0, 0, 0, 254, 255, 255, 255]);
check(UInt64, -1, "throws");
check(UInt64, 0, [0, 0, 0, 0, 0, 0, 0, 0]);
check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1]);
check(UInt64, bigInt(1), [0, 0, 0, 0, 0, 0, 0, 1]);
check(UInt8, 5, [5])
check(UInt16, 5, [0, 5])
check(UInt32, 5, [0, 0, 0, 5])
check(UInt32, 0xffffffff, [255, 255, 255, 255])
check(UInt8, 0xfeffffff, 'throws')
check(UInt16, 0xfeffffff, 'throws')
check(UInt16, 0xfeffffff, 'throws')
check(UInt64, 0xfeffffff, [0, 0, 0, 0, 254, 255, 255, 255])
check(UInt64, -1, 'throws')
check(UInt64, 0, [0, 0, 0, 0, 0, 0, 0, 0])
check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1])
check(UInt64, bigInt(1), [0, 0, 0, 0, 0, 0, 0, 1])
function deliverMinTest() {
test("can serialize DeliverMin", () => {
expect(encode(deliverMinTx)).toEqual(deliverMinTxBinary);
});
test('can serialize DeliverMin', () => {
expect(encode(deliverMinTx)).toEqual(deliverMinTxBinary)
})
}
function SignerListSetTest() {
test("can serialize SignerListSet", () => {
expect(encode(SignerListSet.tx)).toEqual(SignerListSet.binary);
});
test("can serialize SignerListSet metadata", () => {
expect(encode(SignerListSet.tx.meta)).toEqual(SignerListSet.meta);
});
test('can serialize SignerListSet', () => {
expect(encode(SignerListSet.tx)).toEqual(SignerListSet.binary)
})
test('can serialize SignerListSet metadata', () => {
expect(encode(SignerListSet.tx.meta)).toEqual(SignerListSet.meta)
})
}
function DepositPreauthTest() {
test("can serialize DepositPreauth", () => {
expect(encode(DepositPreauth.tx)).toEqual(DepositPreauth.binary);
});
test("can serialize DepositPreauth metadata", () => {
expect(encode(DepositPreauth.tx.meta)).toEqual(DepositPreauth.meta);
});
test('can serialize DepositPreauth', () => {
expect(encode(DepositPreauth.tx)).toEqual(DepositPreauth.binary)
})
test('can serialize DepositPreauth metadata', () => {
expect(encode(DepositPreauth.tx.meta)).toEqual(DepositPreauth.meta)
})
}
function EscrowTest() {
test("can serialize EscrowCreate", () => {
expect(encode(Escrow.create.tx)).toEqual(Escrow.create.binary);
});
test("can serialize EscrowFinish", () => {
expect(encode(Escrow.finish.tx)).toEqual(Escrow.finish.binary);
expect(encode(Escrow.finish.tx.meta)).toEqual(Escrow.finish.meta);
});
test("can serialize EscrowCancel", () => {
expect(encode(Escrow.cancel.tx)).toEqual(Escrow.cancel.binary);
});
test('can serialize EscrowCreate', () => {
expect(encode(Escrow.create.tx)).toEqual(Escrow.create.binary)
})
test('can serialize EscrowFinish', () => {
expect(encode(Escrow.finish.tx)).toEqual(Escrow.finish.binary)
expect(encode(Escrow.finish.tx.meta)).toEqual(Escrow.finish.meta)
})
test('can serialize EscrowCancel', () => {
expect(encode(Escrow.cancel.tx)).toEqual(Escrow.cancel.binary)
})
}
function PaymentChannelTest() {
test("can serialize PaymentChannelCreate", () => {
test('can serialize PaymentChannelCreate', () => {
expect(encode(PaymentChannel.create.tx)).toEqual(
PaymentChannel.create.binary
);
});
test("can serialize PaymentChannelFund", () => {
expect(encode(PaymentChannel.fund.tx)).toEqual(PaymentChannel.fund.binary);
});
test("can serialize PaymentChannelClaim", () => {
expect(encode(PaymentChannel.claim.tx)).toEqual(
PaymentChannel.claim.binary
);
});
PaymentChannel.create.binary,
)
})
test('can serialize PaymentChannelFund', () => {
expect(encode(PaymentChannel.fund.tx)).toEqual(PaymentChannel.fund.binary)
})
test('can serialize PaymentChannelClaim', () => {
expect(encode(PaymentChannel.claim.tx)).toEqual(PaymentChannel.claim.binary)
})
}
function NegativeUNLTest() {
test("can serialize NegativeUNL", () => {
expect(encode(NegativeUNL.tx)).toEqual(NegativeUNL.binary);
});
test("can deserialize NegativeUNL", () => {
expect(decode(NegativeUNL.binary)).toEqual(NegativeUNL.tx);
});
test('can serialize NegativeUNL', () => {
expect(encode(NegativeUNL.tx)).toEqual(NegativeUNL.binary)
})
test('can deserialize NegativeUNL', () => {
expect(decode(NegativeUNL.binary)).toEqual(NegativeUNL.tx)
})
}
function omitUndefinedTest() {
test("omits fields with undefined value", () => {
let encodedOmitted = encode(json_omitted);
let encodedUndefined = encode(json_undefined);
expect(encodedOmitted).toEqual(encodedUndefined);
expect(decode(encodedOmitted)).toEqual(decode(encodedUndefined));
});
test('omits fields with undefined value', () => {
let encodedOmitted = encode(json_omitted)
let encodedUndefined = encode(json_undefined)
expect(encodedOmitted).toEqual(encodedUndefined)
expect(decode(encodedOmitted)).toEqual(decode(encodedUndefined))
})
}
function ticketTest() {
test("can serialize TicketCreate", () => {
expect(encode(Ticket.create.tx)).toEqual(Ticket.create.binary);
});
test('can serialize TicketCreate', () => {
expect(encode(Ticket.create.tx)).toEqual(Ticket.create.binary)
})
}
describe("Binary Serialization", function () {
describe("nestedObjectTests", () => nestedObjectTests());
describe("BytesList", () => bytesListTest());
describe("DeliverMin", () => deliverMinTest());
describe("DepositPreauth", () => DepositPreauthTest());
describe("SignerListSet", () => SignerListSetTest());
describe("Escrow", () => EscrowTest());
describe("PaymentChannel", () => PaymentChannelTest());
describe("NegativeUNLTest", () => NegativeUNLTest());
describe("OmitUndefined", () => omitUndefinedTest());
describe("TicketTest", () => ticketTest());
});
describe('Binary Serialization', function () {
describe('nestedObjectTests', () => nestedObjectTests())
describe('BytesList', () => bytesListTest())
describe('DeliverMin', () => deliverMinTest())
describe('DepositPreauth', () => DepositPreauthTest())
describe('SignerListSet', () => SignerListSetTest())
describe('Escrow', () => EscrowTest())
describe('PaymentChannel', () => PaymentChannelTest())
describe('NegativeUNLTest', () => NegativeUNLTest())
describe('OmitUndefined', () => omitUndefinedTest())
describe('TicketTest', () => ticketTest())
})

View File

@@ -1,87 +1,87 @@
const { coreTypes } = require("../dist/types");
const { Hash160, Hash256, AccountID, Currency } = coreTypes;
const { Buffer } = require("buffer/");
const { coreTypes } = require('../dist/types')
const { Hash160, Hash256, AccountID, Currency } = coreTypes
const { Buffer } = require('buffer/')
describe("Hash160", function () {
test("has a static width member", function () {
expect(Hash160.width).toBe(20);
});
test("inherited by subclasses", function () {
expect(AccountID.width).toBe(20);
expect(Currency.width).toBe(20);
});
test("can be compared against another", function () {
const h1 = Hash160.from("1000000000000000000000000000000000000000");
const h2 = Hash160.from("2000000000000000000000000000000000000000");
const h3 = Hash160.from("0000000000000000000000000000000000000003");
expect(h1.lt(h2)).toBe(true);
expect(h3.lt(h2)).toBe(true);
});
test("throws when constructed from invalid hash length", () => {
describe('Hash160', function () {
test('has a static width member', function () {
expect(Hash160.width).toBe(20)
})
test('inherited by subclasses', function () {
expect(AccountID.width).toBe(20)
expect(Currency.width).toBe(20)
})
test('can be compared against another', function () {
const h1 = Hash160.from('1000000000000000000000000000000000000000')
const h2 = Hash160.from('2000000000000000000000000000000000000000')
const h3 = Hash160.from('0000000000000000000000000000000000000003')
expect(h1.lt(h2)).toBe(true)
expect(h3.lt(h2)).toBe(true)
})
test('throws when constructed from invalid hash length', () => {
expect(() =>
Hash160.from("10000000000000000000000000000000000000")
).toThrow("Invalid Hash length 19");
Hash160.from('10000000000000000000000000000000000000'),
).toThrow('Invalid Hash length 19')
expect(() =>
Hash160.from("100000000000000000000000000000000000000000")
).toThrow("Invalid Hash length 21");
});
});
Hash160.from('100000000000000000000000000000000000000000'),
).toThrow('Invalid Hash length 21')
})
})
describe("Hash256", function () {
test("has a static width member", function () {
expect(Hash256.width).toBe(32);
});
test("has a ZERO_256 member", function () {
describe('Hash256', function () {
test('has a static width member', function () {
expect(Hash256.width).toBe(32)
})
test('has a ZERO_256 member', function () {
expect(Hash256.ZERO_256.toJSON()).toBe(
"0000000000000000000000000000000000000000000000000000000000000000"
);
});
test("supports getting the nibblet values at given positions", function () {
'0000000000000000000000000000000000000000000000000000000000000000',
)
})
test('supports getting the nibblet values at given positions', function () {
const h = Hash256.from(
"1359BD0000000000000000000000000000000000000000000000000000000000"
);
expect(h.nibblet(0)).toBe(0x1);
expect(h.nibblet(1)).toBe(0x3);
expect(h.nibblet(2)).toBe(0x5);
expect(h.nibblet(3)).toBe(0x9);
expect(h.nibblet(4)).toBe(0x0b);
expect(h.nibblet(5)).toBe(0xd);
});
});
'1359BD0000000000000000000000000000000000000000000000000000000000',
)
expect(h.nibblet(0)).toBe(0x1)
expect(h.nibblet(1)).toBe(0x3)
expect(h.nibblet(2)).toBe(0x5)
expect(h.nibblet(3)).toBe(0x9)
expect(h.nibblet(4)).toBe(0x0b)
expect(h.nibblet(5)).toBe(0xd)
})
})
describe("Currency", function () {
test("Will throw an error for dodgy XRP ", function () {
describe('Currency', function () {
test('Will throw an error for dodgy XRP ', function () {
expect(() =>
Currency.from("0000000000000000000000005852500000000000")
).toThrow();
});
test("Currency with lowercase letters decode to hex", () => {
expect(Currency.from("xRp").toJSON()).toBe(
"0000000000000000000000007852700000000000"
);
});
test("Currency codes with symbols decode to hex", () => {
expect(Currency.from("x|p").toJSON()).toBe(
"000000000000000000000000787C700000000000"
);
});
test("Currency codes with uppercase and 0-9 decode to ISO codes", () => {
expect(Currency.from("X8P").toJSON()).toBe("X8P");
expect(Currency.from("USD").toJSON()).toBe("USD");
});
test("can be constructed from a Buffer", function () {
const xrp = new Currency(Buffer.alloc(20));
expect(xrp.iso()).toBe("XRP");
});
test("Can handle non-standard currency codes", () => {
const currency = "015841551A748AD2C1F76FF6ECB0CCCD00000000";
expect(Currency.from(currency).toJSON()).toBe(currency);
});
test("throws on invalid reprs", function () {
expect(() => Currency.from(Buffer.alloc(19))).toThrow();
expect(() => Currency.from(1)).toThrow();
Currency.from('0000000000000000000000005852500000000000'),
).toThrow()
})
test('Currency with lowercase letters decode to hex', () => {
expect(Currency.from('xRp').toJSON()).toBe(
'0000000000000000000000007852700000000000',
)
})
test('Currency codes with symbols decode to hex', () => {
expect(Currency.from('x|p').toJSON()).toBe(
'000000000000000000000000787C700000000000',
)
})
test('Currency codes with uppercase and 0-9 decode to ISO codes', () => {
expect(Currency.from('X8P').toJSON()).toBe('X8P')
expect(Currency.from('USD').toJSON()).toBe('USD')
})
test('can be constructed from a Buffer', function () {
const xrp = new Currency(Buffer.alloc(20))
expect(xrp.iso()).toBe('XRP')
})
test('Can handle non-standard currency codes', () => {
const currency = '015841551A748AD2C1F76FF6ECB0CCCD00000000'
expect(Currency.from(currency).toJSON()).toBe(currency)
})
test('throws on invalid reprs', function () {
expect(() => Currency.from(Buffer.alloc(19))).toThrow()
expect(() => Currency.from(1)).toThrow()
expect(() =>
Currency.from("00000000000000000000000000000000000000m")
).toThrow();
});
});
Currency.from('00000000000000000000000000000000000000m'),
).toThrow()
})
})

View File

@@ -1,29 +1,29 @@
const { loadFixture } = require("./utils");
const { loadFixture } = require('./utils')
const {
transactionTreeHash,
ledgerHash,
accountStateHash,
} = require("../dist/ledger-hashes");
} = require('../dist/ledger-hashes')
describe("Ledger Hashes", function () {
describe('Ledger Hashes', function () {
function testFactory(ledgerFixture) {
describe(`can calculate hashes for ${ledgerFixture}`, function () {
const ledger = loadFixture(ledgerFixture);
test("computes correct account state hash", function () {
const ledger = loadFixture(ledgerFixture)
test('computes correct account state hash', function () {
expect(accountStateHash(ledger.accountState).toHex()).toBe(
ledger.account_hash
);
});
test("computes correct transaction tree hash", function () {
ledger.account_hash,
)
})
test('computes correct transaction tree hash', function () {
expect(transactionTreeHash(ledger.transactions).toHex()).toBe(
ledger.transaction_hash
);
});
test("computes correct ledger header hash", function () {
expect(ledgerHash(ledger).toHex()).toBe(ledger.hash);
});
});
ledger.transaction_hash,
)
})
test('computes correct ledger header hash', function () {
expect(ledgerHash(ledger).toHex()).toBe(ledger.hash)
})
})
}
testFactory("ledger-full-40000.json");
testFactory("ledger-full-38129.json");
});
testFactory('ledger-full-40000.json')
testFactory('ledger-full-38129.json')
})

View File

@@ -1,46 +1,46 @@
const { encode, decode } = require("../dist");
const { encode, decode } = require('../dist')
let str =
"1100612200000000240000000125000068652D0000000055B6632D6376A2D9319F20A1C6DCCB486432D1E4A79951229D4C3DE2946F51D56662400009184E72A00081140DD319918CD5AE792BF7EC80D63B0F01B4573BBC";
let lower = str.toLowerCase();
'1100612200000000240000000125000068652D0000000055B6632D6376A2D9319F20A1C6DCCB486432D1E4A79951229D4C3DE2946F51D56662400009184E72A00081140DD319918CD5AE792BF7EC80D63B0F01B4573BBC'
let lower = str.toLowerCase()
let bin =
"1100612200000000240000000125000000082D00000000550735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E56240000002540BE400811479927BAFFD3D04A26096C0C97B1B0D45B01AD3C0";
'1100612200000000240000000125000000082D00000000550735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E56240000002540BE400811479927BAFFD3D04A26096C0C97B1B0D45B01AD3C0'
let json = {
OwnerCount: 0,
Account: "rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK",
Account: 'rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK',
PreviousTxnLgrSeq: 8,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5".toLowerCase(),
'0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5'.toLowerCase(),
Flags: 0,
Sequence: 1,
Balance: "10000000000",
};
Balance: '10000000000',
}
let jsonUpper = {
OwnerCount: 0,
Account: "rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK",
Account: 'rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK',
PreviousTxnLgrSeq: 8,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5",
'0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
};
Balance: '10000000000',
}
describe("Lowercase hex test", () => {
test("Correctly decodes", () => {
expect(decode(lower)).toEqual(decode(str));
});
test("Re-encodes to uppercase hex", () => {
expect(encode(decode(lower))).toEqual(str);
});
test("Encode when hex field lowercase", () => {
expect(encode(json)).toBe(bin);
});
test("Re-decodes to uppercase hex", () => {
expect(decode(encode(json))).toEqual(jsonUpper);
});
});
describe('Lowercase hex test', () => {
test('Correctly decodes', () => {
expect(decode(lower)).toEqual(decode(str))
})
test('Re-encodes to uppercase hex', () => {
expect(encode(decode(lower))).toEqual(str)
})
test('Encode when hex field lowercase', () => {
expect(encode(json)).toBe(bin)
})
test('Re-decodes to uppercase hex', () => {
expect(decode(encode(json))).toEqual(jsonUpper)
})
})

View File

@@ -1,38 +1,38 @@
const { encode, decode } = require("../dist");
const { encode, decode } = require('../dist')
let json = {
Account: "rrrrrrrrrrrrrrrrrrrrrhoLvTp",
Account: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp',
Sequence: 0,
Fee: "0",
SigningPubKey: "",
Signature: "",
};
Fee: '0',
SigningPubKey: '',
Signature: '',
}
let json_blank_acct = {
Account: "",
Account: '',
Sequence: 0,
Fee: "0",
SigningPubKey: "",
Signature: "",
};
Fee: '0',
SigningPubKey: '',
Signature: '',
}
let binary =
"24000000006840000000000000007300760081140000000000000000000000000000000000000000";
'24000000006840000000000000007300760081140000000000000000000000000000000000000000'
describe("Can encode Pseudo Transactions", () => {
test("Correctly encodes Pseudo Transaciton", () => {
expect(encode(json)).toEqual(binary);
});
describe('Can encode Pseudo Transactions', () => {
test('Correctly encodes Pseudo Transaciton', () => {
expect(encode(json)).toEqual(binary)
})
test("Can decode account objects", () => {
expect(decode(encode(json))).toEqual(json);
});
test('Can decode account objects', () => {
expect(decode(encode(json))).toEqual(json)
})
test("Blank AccountID is ACCOUNT_ZERO", () => {
expect(encode(json_blank_acct)).toEqual(binary);
});
test('Blank AccountID is ACCOUNT_ZERO', () => {
expect(encode(json_blank_acct)).toEqual(binary)
})
test("Decodes Blank AccountID", () => {
expect(decode(encode(json_blank_acct))).toEqual(json);
});
});
test('Decodes Blank AccountID', () => {
expect(decode(encode(json_blank_acct))).toEqual(json)
})
})

View File

@@ -1,15 +1,15 @@
const { quality } = require("../dist/coretypes");
const { quality } = require('../dist/coretypes')
describe("Quality encode/decode", function () {
describe('Quality encode/decode', function () {
const bookDirectory =
"4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0";
const expectedQuality = "195796912.5171664";
test("can decode", function () {
const decimal = quality.decode(bookDirectory);
expect(decimal.toString()).toBe(expectedQuality);
});
test("can encode", function () {
const bytes = quality.encode(expectedQuality);
expect(bytes.toString("hex").toUpperCase()).toBe(bookDirectory.slice(-16));
});
});
'4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0'
const expectedQuality = '195796912.5171664'
test('can decode', function () {
const decimal = quality.decode(bookDirectory)
expect(decimal.toString()).toBe(expectedQuality)
})
test('can encode', function () {
const bytes = quality.encode(expectedQuality)
expect(bytes.toString('hex').toUpperCase()).toBe(bookDirectory.slice(-16))
})
})

View File

@@ -1,89 +1,89 @@
const { ShaMap } = require("../dist/shamap.js");
const { binary, HashPrefix } = require("../dist/coretypes");
const { coreTypes } = require("../dist/types");
const { loadFixture } = require("./utils");
const { Buffer } = require("buffer/");
const { ShaMap } = require('../dist/shamap.js')
const { binary, HashPrefix } = require('../dist/coretypes')
const { coreTypes } = require('../dist/types')
const { loadFixture } = require('./utils')
const { Buffer } = require('buffer/')
function now() {
return Number(Date.now()) / 1000;
return Number(Date.now()) / 1000
}
const ZERO = "0000000000000000000000000000000000000000000000000000000000000000";
const ZERO = '0000000000000000000000000000000000000000000000000000000000000000'
function makeItem(indexArg) {
let str = indexArg;
let str = indexArg
while (str.length < 64) {
str += "0";
str += '0'
}
const index = coreTypes.Hash256.from(str);
const index = coreTypes.Hash256.from(str)
const item = {
toBytesSink(sink) {
index.toBytesSink(sink);
index.toBytesSink(sink)
},
hashPrefix() {
return Buffer.from([1, 3, 3, 7]);
return Buffer.from([1, 3, 3, 7])
},
};
return [index, item];
}
return [index, item]
}
describe("ShaMap", () => {
now();
describe('ShaMap', () => {
now()
test("hashes to zero when empty", () => {
const map = new ShaMap();
expect(map.hash().toHex()).toBe(ZERO);
});
test("creates the same hash no matter which order items are added", () => {
let map = new ShaMap();
test('hashes to zero when empty', () => {
const map = new ShaMap()
expect(map.hash().toHex()).toBe(ZERO)
})
test('creates the same hash no matter which order items are added', () => {
let map = new ShaMap()
const items = [
"0",
"1",
"11",
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E20000000000000000",
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E21000000000000000",
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E22000000000000000",
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E23000000000000000",
"12",
"122",
];
items.forEach((i) => map.addItem(...makeItem(i)));
const h1 = map.hash();
expect(h1.eq(h1)).toBe(true);
map = new ShaMap();
items.reverse().forEach((i) => map.addItem(...makeItem(i)));
expect(map.hash()).toStrictEqual(h1);
});
'0',
'1',
'11',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E20000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E21000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E22000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E23000000000000000',
'12',
'122',
]
items.forEach((i) => map.addItem(...makeItem(i)))
const h1 = map.hash()
expect(h1.eq(h1)).toBe(true)
map = new ShaMap()
items.reverse().forEach((i) => map.addItem(...makeItem(i)))
expect(map.hash()).toStrictEqual(h1)
})
function factory(fixture) {
test(`recreate account state hash from ${fixture}`, () => {
const map = new ShaMap();
const ledger = loadFixture(fixture);
const map = new ShaMap()
const ledger = loadFixture(fixture)
// const t = now();
const leafNodePrefix = HashPrefix.accountStateEntry;
const leafNodePrefix = HashPrefix.accountStateEntry
ledger.accountState
.map((e, i) => {
if ((i > 1000) & (i % 1000 === 0)) {
console.log(e.index);
console.log(i);
console.log(e.index)
console.log(i)
}
const bytes = binary.serializeObject(e);
const bytes = binary.serializeObject(e)
return {
index: coreTypes.Hash256.from(e.index),
hashPrefix() {
return leafNodePrefix;
return leafNodePrefix
},
toBytesSink(sink) {
sink.put(bytes);
sink.put(bytes)
},
};
}
})
.forEach((so) => map.addItem(so.index, so));
expect(map.hash().toHex()).toBe(ledger.account_hash);
.forEach((so) => map.addItem(so.index, so))
expect(map.hash().toHex()).toBe(ledger.account_hash)
// console.log('took seconds: ', (now() - t));
});
})
}
factory("ledger-full-38129.json");
factory("ledger-full-40000.json");
factory('ledger-full-38129.json')
factory('ledger-full-40000.json')
// factory('ledger-4320277.json');
// factory('14280680.json');
});
})

View File

@@ -2,128 +2,128 @@ const {
encodeForSigning,
encodeForSigningClaim,
encodeForMultisigning,
} = require("../dist");
} = require('../dist')
const tx_json = {
Account: "r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ",
Amount: "1000",
Destination: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
Fee: "10",
Account: 'r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ',
Amount: '1000',
Destination: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
Fee: '10',
Flags: 2147483648,
Sequence: 1,
TransactionType: "Payment",
TransactionType: 'Payment',
TxnSignature:
"30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1" +
"E721B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80" +
"ECA3CD7B9B",
'30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1' +
'E721B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80' +
'ECA3CD7B9B',
Signature:
"30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1E72" +
"1B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80ECA" +
"3CD7B9B",
'30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1E72' +
'1B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80ECA' +
'3CD7B9B',
SigningPubKey:
"ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A",
};
'ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A',
}
describe("Signing data", function () {
test("can create single signing blobs", function () {
const actual = encodeForSigning(tx_json);
describe('Signing data', function () {
test('can create single signing blobs', function () {
const actual = encodeForSigning(tx_json)
expect(actual).toBe(
[
"53545800", // signingPrefix
'53545800', // signingPrefix
// TransactionType
"12",
"0000",
'12',
'0000',
// Flags
"22",
"80000000",
'22',
'80000000',
// Sequence
"24",
"00000001",
'24',
'00000001',
// Amount
"61",
'61',
// native amount
"40000000000003E8",
'40000000000003E8',
// Fee
"68",
'68',
// native amount
"400000000000000A",
'400000000000000A',
// SigningPubKey
"73",
'73',
// VLLength
"21",
"ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A",
'21',
'ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A',
// Account
"81",
'81',
// VLLength
"14",
"5B812C9D57731E27A2DA8B1830195F88EF32A3B6",
'14',
'5B812C9D57731E27A2DA8B1830195F88EF32A3B6',
// Destination
"83",
'83',
// VLLength
"14",
"B5F762798A53D543A014CAF8B297CFF8F2F937E8",
].join("")
);
});
test("can create multi signing blobs", function () {
const signingAccount = "rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN";
const signingJson = Object.assign({}, tx_json, { SigningPubKey: "" });
const actual = encodeForMultisigning(signingJson, signingAccount);
'14',
'B5F762798A53D543A014CAF8B297CFF8F2F937E8',
].join(''),
)
})
test('can create multi signing blobs', function () {
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
const signingJson = Object.assign({}, tx_json, { SigningPubKey: '' })
const actual = encodeForMultisigning(signingJson, signingAccount)
expect(actual).toBe(
[
"534D5400", // signingPrefix
'534D5400', // signingPrefix
// TransactionType
"12",
"0000",
'12',
'0000',
// Flags
"22",
"80000000",
'22',
'80000000',
// Sequence
"24",
"00000001",
'24',
'00000001',
// Amount
"61",
'61',
// native amount
"40000000000003E8",
'40000000000003E8',
// Fee
"68",
'68',
// native amount
"400000000000000A",
'400000000000000A',
// SigningPubKey
"73",
'73',
// VLLength
"00",
'00',
// '',
// Account
"81",
'81',
// VLLength
"14",
"5B812C9D57731E27A2DA8B1830195F88EF32A3B6",
'14',
'5B812C9D57731E27A2DA8B1830195F88EF32A3B6',
// Destination
"83",
'83',
// VLLength
"14",
"B5F762798A53D543A014CAF8B297CFF8F2F937E8",
'14',
'B5F762798A53D543A014CAF8B297CFF8F2F937E8',
// signingAccount suffix
"C0A5ABEF242802EFED4B041E8F2D4A8CC86AE3D1",
].join("")
);
});
test("can create claim blob", function () {
'C0A5ABEF242802EFED4B041E8F2D4A8CC86AE3D1',
].join(''),
)
})
test('can create claim blob', function () {
const channel =
"43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1";
const amount = "1000";
const json = { channel, amount };
const actual = encodeForSigningClaim(json);
'43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1'
const amount = '1000'
const json = { channel, amount }
const actual = encodeForSigningClaim(json)
expect(actual).toBe(
[
// hash prefix
"434C4D00",
'434C4D00',
// channel ID
"43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1",
'43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1',
// amount as a uint64
"00000000000003E8",
].join("")
);
});
});
'00000000000003E8',
].join(''),
)
})
})

View File

@@ -1,10 +1,10 @@
const { encode, decode } = require("../dist");
const { encode, decode } = require('../dist')
// Notice: no Amount or Fee
const tx_json = {
Account: "r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ",
Account: 'r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ',
// Amount: '1000',
Destination: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
Destination: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
// Fee: '10',
// JavaScript converts operands to 32-bit signed ints after doing bitwise
@@ -12,86 +12,86 @@ const tx_json = {
Flags: (1 << 31) >>> 0, // tfFullyCanonicalSig
Sequence: 1,
TransactionType: "Payment",
TransactionType: 'Payment',
// TxnSignature,
// Signature,
// SigningPubKey
};
}
describe("encoding and decoding tx_json", function () {
test("can encode tx_json without Amount or Fee", function () {
const encoded = encode(tx_json);
const decoded = decode(encoded);
expect(tx_json).toEqual(decoded);
});
test("can encode tx_json with Amount and Fee", function () {
describe('encoding and decoding tx_json', function () {
test('can encode tx_json without Amount or Fee', function () {
const encoded = encode(tx_json)
const decoded = decode(encoded)
expect(tx_json).toEqual(decoded)
})
test('can encode tx_json with Amount and Fee', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: "1000",
Fee: "10",
});
const encoded = encode(my_tx);
const decoded = decode(encoded);
expect(my_tx).toEqual(decoded);
});
test("can encode tx_json with TicketCount", function () {
Amount: '1000',
Fee: '10',
})
const encoded = encode(my_tx)
const decoded = decode(encoded)
expect(my_tx).toEqual(decoded)
})
test('can encode tx_json with TicketCount', function () {
const my_tx = Object.assign({}, tx_json, {
TicketCount: 2,
});
const encoded = encode(my_tx);
const decoded = decode(encoded);
expect(my_tx).toEqual(decoded);
});
test("can encode tx_json with TicketSequence", function () {
})
const encoded = encode(my_tx)
const decoded = decode(encoded)
expect(my_tx).toEqual(decoded)
})
test('can encode tx_json with TicketSequence', function () {
const my_tx = Object.assign({}, tx_json, {
Sequence: 0,
TicketSequence: 2,
});
const encoded = encode(my_tx);
const decoded = decode(encoded);
expect(my_tx).toEqual(decoded);
});
test("throws when Amount is invalid", function () {
})
const encoded = encode(my_tx)
const decoded = decode(encoded)
expect(my_tx).toEqual(decoded)
})
test('throws when Amount is invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: "1000.001",
Fee: "10",
});
Amount: '1000.001',
Fee: '10',
})
expect(() => {
encode(my_tx);
}).toThrow();
});
test("throws when Fee is invalid", function () {
encode(my_tx)
}).toThrow()
})
test('throws when Fee is invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: "1000",
Fee: "10.123",
});
Amount: '1000',
Fee: '10.123',
})
expect(() => {
encode(my_tx);
}).toThrow();
});
test("throws when Amount and Fee are invalid", function () {
encode(my_tx)
}).toThrow()
})
test('throws when Amount and Fee are invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: "1000.789",
Fee: "10.123",
});
Amount: '1000.789',
Fee: '10.123',
})
expect(() => {
encode(my_tx);
}).toThrow();
});
test("throws when Amount is a number instead of a string-encoded integer", function () {
encode(my_tx)
}).toThrow()
})
test('throws when Amount is a number instead of a string-encoded integer', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: 1000.789,
});
})
expect(() => {
encode(my_tx);
}).toThrow();
});
encode(my_tx)
}).toThrow()
})
test("throws when Fee is a number instead of a string-encoded integer", function () {
test('throws when Fee is a number instead of a string-encoded integer', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: 1234.56,
});
})
expect(() => {
encode(my_tx);
}).toThrow();
});
});
encode(my_tx)
}).toThrow()
})
})

View File

@@ -1,34 +1,34 @@
const { coreTypes } = require("../dist/types");
const { SerializedType } = require("../dist/types/serialized-type");
const { coreTypes } = require('../dist/types')
const { SerializedType } = require('../dist/types/serialized-type')
describe("SerializedType interfaces", () => {
describe('SerializedType interfaces', () => {
Object.entries(coreTypes).forEach(([name, Value]) => {
test(`${name} has a \`from\` static constructor`, () => {
expect(Value.from && Value.from !== Array.from).toBe(true);
});
expect(Value.from && Value.from !== Array.from).toBe(true)
})
test(`${name} has a default constructor`, () => {
expect(new Value()).not.toBe(undefined);
});
expect(new Value()).not.toBe(undefined)
})
test(`${name}.from will return the same object`, () => {
const instance = new Value();
expect(Value.from(instance) === instance).toBe(true);
});
const instance = new Value()
expect(Value.from(instance) === instance).toBe(true)
})
test(`${name} instances have toBytesSink`, () => {
expect(new Value().toBytesSink).not.toBe(undefined);
});
expect(new Value().toBytesSink).not.toBe(undefined)
})
test(`${name} instances have toJSON`, () => {
expect(new Value().toJSON).not.toBe(undefined);
});
expect(new Value().toJSON).not.toBe(undefined)
})
test(`${name}.from(json).toJSON() == json`, () => {
const newJSON = new Value().toJSON();
expect(Value.from(newJSON).toJSON()).toEqual(newJSON);
});
const newJSON = new Value().toJSON()
expect(Value.from(newJSON).toJSON()).toEqual(newJSON)
})
describe(`${name} supports all methods of the SerializedType mixin`, () => {
Object.keys(SerializedType.prototype).forEach((k) => {
test(`new ${name}.prototype.${k} !== undefined`, () => {
expect(Value.prototype[k]).not.toBe(undefined);
});
});
});
});
});
expect(Value.prototype[k]).not.toBe(undefined)
})
})
})
})
})

View File

@@ -1,148 +1,148 @@
const { coreTypes } = require("../dist/types");
const { UInt8, UInt64 } = coreTypes;
const { coreTypes } = require('../dist/types')
const { UInt8, UInt64 } = coreTypes
const { encode } = require("../dist");
const { encode } = require('../dist')
const binary =
"11007222000300003700000000000000003800000000000000006280000000000000000000000000000000000000005553440000000000000000000000000000000000000000000000000166D5438D7EA4C680000000000000000000000000005553440000000000AE123A8556F3CF91154711376AFB0F894F832B3D67D5438D7EA4C680000000000000000000000000005553440000000000F51DFC2A09D62CBBA1DFBDD4691DAC96AD98B90F";
'11007222000300003700000000000000003800000000000000006280000000000000000000000000000000000000005553440000000000000000000000000000000000000000000000000166D5438D7EA4C680000000000000000000000000005553440000000000AE123A8556F3CF91154711376AFB0F894F832B3D67D5438D7EA4C680000000000000000000000000005553440000000000F51DFC2A09D62CBBA1DFBDD4691DAC96AD98B90F'
const json = {
Balance: {
currency: "USD",
issuer: "rrrrrrrrrrrrrrrrrrrrBZbvji",
value: "0",
currency: 'USD',
issuer: 'rrrrrrrrrrrrrrrrrrrrBZbvji',
value: '0',
},
Flags: 196608,
HighLimit: {
currency: "USD",
issuer: "rPMh7Pi9ct699iZUTWaytJUoHcJ7cgyziK",
value: "1000",
currency: 'USD',
issuer: 'rPMh7Pi9ct699iZUTWaytJUoHcJ7cgyziK',
value: '1000',
},
HighNode: "0",
LedgerEntryType: "RippleState",
HighNode: '0',
LedgerEntryType: 'RippleState',
LowLimit: {
currency: "USD",
issuer: "rG1QQv2nh2gr7RCZ1P8YYcBUKCCN633jCn",
value: "1000",
currency: 'USD',
issuer: 'rG1QQv2nh2gr7RCZ1P8YYcBUKCCN633jCn',
value: '1000',
},
LowNode: "0",
};
LowNode: '0',
}
const binaryEntry0 =
"11007222001100002501EC24873700000000000000003800000000000000A35506FC7DE374089D50F81AAE13E7BBF3D0E694769331E14F55351B38D0148018EA62D44BF89AC2A40B800000000000000000000000004A50590000000000000000000000000000000000000000000000000166D6C38D7EA4C680000000000000000000000000004A5059000000000047C1258B4B79774B28176324068F759EDE226F686780000000000000000000000000000000000000004A505900000000005BBC0F22F61D9224A110650CFE21CC0C4BE13098";
'11007222001100002501EC24873700000000000000003800000000000000A35506FC7DE374089D50F81AAE13E7BBF3D0E694769331E14F55351B38D0148018EA62D44BF89AC2A40B800000000000000000000000004A50590000000000000000000000000000000000000000000000000166D6C38D7EA4C680000000000000000000000000004A5059000000000047C1258B4B79774B28176324068F759EDE226F686780000000000000000000000000000000000000004A505900000000005BBC0F22F61D9224A110650CFE21CC0C4BE13098'
const jsonEntry0 = {
Balance: {
currency: "JPY",
issuer: "rrrrrrrrrrrrrrrrrrrrBZbvji",
value: "0.3369568318",
currency: 'JPY',
issuer: 'rrrrrrrrrrrrrrrrrrrrBZbvji',
value: '0.3369568318',
},
Flags: 1114112,
HighLimit: {
currency: "JPY",
issuer: "r94s8px6kSw1uZ1MV98dhSRTvc6VMPoPcN",
value: "0",
currency: 'JPY',
issuer: 'r94s8px6kSw1uZ1MV98dhSRTvc6VMPoPcN',
value: '0',
},
HighNode: "a3",
LedgerEntryType: "RippleState",
HighNode: 'a3',
LedgerEntryType: 'RippleState',
LowLimit: {
currency: "JPY",
issuer: "rfYQMgj3g3Qp8VLoZNvvU35mEuuJC8nCmY",
value: "1000000000",
currency: 'JPY',
issuer: 'rfYQMgj3g3Qp8VLoZNvvU35mEuuJC8nCmY',
value: '1000000000',
},
LowNode: "0",
LowNode: '0',
PreviousTxnID:
"06FC7DE374089D50F81AAE13E7BBF3D0E694769331E14F55351B38D0148018EA",
'06FC7DE374089D50F81AAE13E7BBF3D0E694769331E14F55351B38D0148018EA',
PreviousTxnLgrSeq: 32253063,
index: "000319BAE0A618A7D3BB492F17E98E5D92EA0C6458AFEBED44206B5B4798A840",
};
index: '000319BAE0A618A7D3BB492F17E98E5D92EA0C6458AFEBED44206B5B4798A840',
}
const binaryEntry1 =
"1100642200000000320000000000000002580CB3C1AD2C371136AEA434246D971C5FCCD32CBF520667E131AB7B10D706E7528214BA53D10260FFCC968ACD16BA30F7CEABAD6E5D92011340A3454ACED87177146EABD5E4A256021D836D1E3617618B1EB362D10B0D1BAC6AE1ED9E8D280BBE0B6656748FD647231851C6C650794D5E6852DFA1E35E68630F";
'1100642200000000320000000000000002580CB3C1AD2C371136AEA434246D971C5FCCD32CBF520667E131AB7B10D706E7528214BA53D10260FFCC968ACD16BA30F7CEABAD6E5D92011340A3454ACED87177146EABD5E4A256021D836D1E3617618B1EB362D10B0D1BAC6AE1ED9E8D280BBE0B6656748FD647231851C6C650794D5E6852DFA1E35E68630F'
const jsonEntry1 = {
Flags: 0,
IndexPrevious: "2",
IndexPrevious: '2',
Indexes: [
"A3454ACED87177146EABD5E4A256021D836D1E3617618B1EB362D10B0D1BAC6A",
"E1ED9E8D280BBE0B6656748FD647231851C6C650794D5E6852DFA1E35E68630F",
'A3454ACED87177146EABD5E4A256021D836D1E3617618B1EB362D10B0D1BAC6A',
'E1ED9E8D280BBE0B6656748FD647231851C6C650794D5E6852DFA1E35E68630F',
],
LedgerEntryType: "DirectoryNode",
Owner: "rHzDaMNybxQppiE3uWyt2N265KvAKdiRdP",
RootIndex: "0CB3C1AD2C371136AEA434246D971C5FCCD32CBF520667E131AB7B10D706E752",
index: "0B4A2E68C111F7E42FAEEE405F7344560C8240840B151D9D04131EB79D080167",
};
LedgerEntryType: 'DirectoryNode',
Owner: 'rHzDaMNybxQppiE3uWyt2N265KvAKdiRdP',
RootIndex: '0CB3C1AD2C371136AEA434246D971C5FCCD32CBF520667E131AB7B10D706E752',
index: '0B4A2E68C111F7E42FAEEE405F7344560C8240840B151D9D04131EB79D080167',
}
const binaryEntry2 =
"1100722200210000250178D1CA37000000000000000038000000000000028355C0C37CE200B509E0A529880634F7841A9EF4CB65F03C12E6004CFAD9718D66946280000000000000000000000000000000000000004743420000000000000000000000000000000000000000000000000166D6071AFD498D000000000000000000000000000047434200000000002599D1D255BCA61189CA64C84528F2FCBE4BFC3867800000000000000000000000000000000000000047434200000000006EEBB1D1852CE667876A0B3630861FB6C6AB358E";
'1100722200210000250178D1CA37000000000000000038000000000000028355C0C37CE200B509E0A529880634F7841A9EF4CB65F03C12E6004CFAD9718D66946280000000000000000000000000000000000000004743420000000000000000000000000000000000000000000000000166D6071AFD498D000000000000000000000000000047434200000000002599D1D255BCA61189CA64C84528F2FCBE4BFC3867800000000000000000000000000000000000000047434200000000006EEBB1D1852CE667876A0B3630861FB6C6AB358E'
const jsonEntry2 = {
Balance: {
currency: "GCB",
issuer: "rrrrrrrrrrrrrrrrrrrrBZbvji",
value: "0",
currency: 'GCB',
issuer: 'rrrrrrrrrrrrrrrrrrrrBZbvji',
value: '0',
},
Flags: 2162688,
HighLimit: {
currency: "GCB",
issuer: "rBfVgTnsdh8ckC19RM8aVGNuMZnpwrMP6n",
value: "0",
currency: 'GCB',
issuer: 'rBfVgTnsdh8ckC19RM8aVGNuMZnpwrMP6n',
value: '0',
},
HighNode: "283",
LedgerEntryType: "RippleState",
HighNode: '283',
LedgerEntryType: 'RippleState',
LowLimit: {
currency: "GCB",
issuer: "rhRFGCy2RJTA8oxkjjtYTvofPVGqcgvXWj",
value: "2000000",
currency: 'GCB',
issuer: 'rhRFGCy2RJTA8oxkjjtYTvofPVGqcgvXWj',
value: '2000000',
},
LowNode: "0",
LowNode: '0',
PreviousTxnID:
"C0C37CE200B509E0A529880634F7841A9EF4CB65F03C12E6004CFAD9718D6694",
'C0C37CE200B509E0A529880634F7841A9EF4CB65F03C12E6004CFAD9718D6694',
PreviousTxnLgrSeq: 24695242,
index: "0000041EFD027808D3F78C8352F97E324CB816318E00B977C74ECDDC7CD975B2",
};
index: '0000041EFD027808D3F78C8352F97E324CB816318E00B977C74ECDDC7CD975B2',
}
test("compareToTests[0]", () => {
expect(UInt8.from(124).compareTo(UInt64.from(124))).toBe(0);
});
test('compareToTests[0]', () => {
expect(UInt8.from(124).compareTo(UInt64.from(124))).toBe(0)
})
test("compareToTest[1]", () => {
expect(UInt64.from(124).compareTo(UInt8.from(124))).toBe(0);
});
test('compareToTest[1]', () => {
expect(UInt64.from(124).compareTo(UInt8.from(124))).toBe(0)
})
test("compareToTest[2]", () => {
expect(UInt64.from(124).compareTo(UInt8.from(123))).toBe(1);
});
test('compareToTest[2]', () => {
expect(UInt64.from(124).compareTo(UInt8.from(123))).toBe(1)
})
test("compareToTest[3]", () => {
expect(UInt8.from(124).compareTo(UInt8.from(13))).toBe(1);
});
test('compareToTest[3]', () => {
expect(UInt8.from(124).compareTo(UInt8.from(13))).toBe(1)
})
test("compareToTest[4]", () => {
expect(UInt8.from(124).compareTo(124)).toBe(0);
});
test('compareToTest[4]', () => {
expect(UInt8.from(124).compareTo(124)).toBe(0)
})
test("compareToTest[5]", () => {
expect(UInt64.from(124).compareTo(124)).toBe(0);
});
test('compareToTest[5]', () => {
expect(UInt64.from(124).compareTo(124)).toBe(0)
})
test("compareToTest[6]", () => {
expect(UInt64.from(124).compareTo(123)).toBe(1);
});
test('compareToTest[6]', () => {
expect(UInt64.from(124).compareTo(123)).toBe(1)
})
test("compareToTest[7]", () => {
expect(UInt8.from(124).compareTo(13)).toBe(1);
});
test('compareToTest[7]', () => {
expect(UInt8.from(124).compareTo(13)).toBe(1)
})
test("UInt64 from string zero", () => {
expect(UInt64.from("0")).toEqual(UInt64.from(0));
expect(encode(json)).toEqual(binary);
});
test('UInt64 from string zero', () => {
expect(UInt64.from('0')).toEqual(UInt64.from(0))
expect(encode(json)).toEqual(binary)
})
test("UInt64 from non 16 length hex", () => {
expect(encode(jsonEntry0)).toEqual(binaryEntry0);
expect(encode(jsonEntry1)).toEqual(binaryEntry1);
expect(encode(jsonEntry2)).toEqual(binaryEntry2);
});
test('UInt64 from non 16 length hex', () => {
expect(encode(jsonEntry0)).toEqual(binaryEntry0)
expect(encode(jsonEntry1)).toEqual(binaryEntry1)
expect(encode(jsonEntry2)).toEqual(binaryEntry2)
})
test("valueOfTests", () => {
let val = UInt8.from(1);
val |= 0x2;
expect(val).toBe(3);
});
test('valueOfTests', () => {
let val = UInt8.from(1)
val |= 0x2
expect(val).toBe(3)
})

View File

@@ -1,181 +1,181 @@
const { encode, decode } = require("./../dist/index");
const fixtures = require("./fixtures/x-codec-fixtures.json");
const { encode, decode } = require('./../dist/index')
const fixtures = require('./fixtures/x-codec-fixtures.json')
let json_x1 = {
OwnerCount: 0,
Account: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
Account: 'XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT',
PreviousTxnLgrSeq: 7,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
};
Balance: '10000000000',
}
let json_r1 = {
OwnerCount: 0,
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Account: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
PreviousTxnLgrSeq: 7,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
Balance: '10000000000',
SourceTag: 12345,
};
}
let json_null_x = {
OwnerCount: 0,
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Issuer: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71",
Account: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Destination: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Issuer: 'XVXdn5wEVm5G4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71',
PreviousTxnLgrSeq: 7,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
};
Balance: '10000000000',
}
let json_invalid_x = {
OwnerCount: 0,
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Issuer: "XVXdn5wEVm5g4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71",
Account: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Destination: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Issuer: 'XVXdn5wEVm5g4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71',
PreviousTxnLgrSeq: 7,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
};
Balance: '10000000000',
}
let json_null_r = {
OwnerCount: 0,
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Issuer: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Account: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Destination: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Issuer: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
PreviousTxnLgrSeq: 7,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
};
Balance: '10000000000',
}
let invalid_json_issuer_tagged = {
OwnerCount: 0,
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
Issuer: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
Account: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Destination: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
Issuer: 'XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT',
PreviousTxnLgrSeq: 7,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
};
Balance: '10000000000',
}
let invalid_json_x_and_tagged = {
OwnerCount: 0,
Account: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
Account: 'XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT',
PreviousTxnLgrSeq: 7,
LedgerEntryType: "AccountRoot",
LedgerEntryType: 'AccountRoot',
PreviousTxnID:
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
Flags: 0,
Sequence: 1,
Balance: "10000000000",
Balance: '10000000000',
SourceTag: 12345,
};
}
let json_issued_x = {
TakerPays: {
currency: "USD",
issuer: "X7WZKEeNVS2p9Tire9DtNFkzWBZbFtJHWxDjN9fCrBGqVA4",
value: "7072.8",
currency: 'USD',
issuer: 'X7WZKEeNVS2p9Tire9DtNFkzWBZbFtJHWxDjN9fCrBGqVA4',
value: '7072.8',
},
};
}
let json_issued_r = {
TakerPays: {
currency: "USD",
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
value: "7072.8",
currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
value: '7072.8',
},
};
}
let json_issued_with_tag = {
TakerPays: {
currency: "USD",
issuer: "X7WZKEeNVS2p9Tire9DtNFkzWBZbFtSiS2eDBib7svZXuc2",
value: "7072.8",
currency: 'USD',
issuer: 'X7WZKEeNVS2p9Tire9DtNFkzWBZbFtSiS2eDBib7svZXuc2',
value: '7072.8',
},
};
}
describe("X-Address Account is equivalent to a classic address w/ SourceTag", () => {
let encoded_x = encode(json_x1);
let encoded_r = encode(json_r1);
test("Can encode with x-Address", () => {
expect(encoded_x).toEqual(encoded_r);
});
describe('X-Address Account is equivalent to a classic address w/ SourceTag', () => {
let encoded_x = encode(json_x1)
let encoded_r = encode(json_r1)
test('Can encode with x-Address', () => {
expect(encoded_x).toEqual(encoded_r)
})
test("decoded X-address is object w/ source and tag", () => {
let decoded_x = decode(encoded_x);
expect(decoded_x).toEqual(json_r1);
});
test('decoded X-address is object w/ source and tag', () => {
let decoded_x = decode(encoded_x)
expect(decoded_x).toEqual(json_r1)
})
test("Encoding issuer X-Address w/ undefined destination tag", () => {
expect(encode(json_null_x)).toEqual(encode(json_null_r));
});
test('Encoding issuer X-Address w/ undefined destination tag', () => {
expect(encode(json_null_x)).toEqual(encode(json_null_r))
})
test("Throws when X-Address is invalid", () => {
expect(() => encode(json_invalid_x)).toThrow("checksum_invalid");
});
test('Throws when X-Address is invalid', () => {
expect(() => encode(json_invalid_x)).toThrow('checksum_invalid')
})
test("Encodes issued currency w/ x-address", () => {
expect(encode(json_issued_x)).toEqual(encode(json_issued_r));
});
});
test('Encodes issued currency w/ x-address', () => {
expect(encode(json_issued_x)).toEqual(encode(json_issued_r))
})
})
describe("Invalid X-Address behavior", () => {
test("X-Address with tag throws value for invalid field", () => {
describe('Invalid X-Address behavior', () => {
test('X-Address with tag throws value for invalid field', () => {
expect(() => encode(invalid_json_issuer_tagged)).toThrow(
new Error("Issuer cannot have an associated tag")
);
});
new Error('Issuer cannot have an associated tag'),
)
})
test("Throws when Account has both X-Addr and Destination Tag", () => {
test('Throws when Account has both X-Addr and Destination Tag', () => {
expect(() => encode(invalid_json_x_and_tagged)).toThrow(
new Error("Cannot have Account X-Address and SourceTag")
);
});
new Error('Cannot have Account X-Address and SourceTag'),
)
})
test("Throws when issued currency has tag", () => {
test('Throws when issued currency has tag', () => {
expect(() => encode(json_issued_with_tag)).toThrow(
"Only allowed to have tag on Account or Destination"
);
});
});
'Only allowed to have tag on Account or Destination',
)
})
})
describe("ripple-binary-codec x-address test", function () {
describe('ripple-binary-codec x-address test', function () {
function makeSuite(name, entries) {
describe(name, function () {
entries.forEach((t, testN) => {
test(`${name}[${testN}] encodes X-address json equivalent to classic address json`, () => {
expect(encode(t.rjson)).toEqual(encode(t.xjson));
});
expect(encode(t.rjson)).toEqual(encode(t.xjson))
})
test(`${name}[${testN}] decodes X-address json equivalent to classic address json`, () => {
expect(decode(encode(t.xjson))).toEqual(t.rjson);
});
});
});
expect(decode(encode(t.xjson))).toEqual(t.rjson)
})
})
})
}
makeSuite("transactions", fixtures.transactions);
});
makeSuite('transactions', fixtures.transactions)
})