test: run binary-codec tests in the browser (#2566)

- Convert tests to typescript
- Update type definitions causing errors in tests
  - `makeParser` to accept a `Buffer` in addition to `string`
  - `SerializedType` constructor allows not passing in a byte array
  - `Comparable` is now a generic type so that it allows `compareTo`
methods to take more that the type itself. Example: `Uint64.compareTo`
can accept `number`
- Update tests to use jasmine compatible functions
  - Switching from `test` to `it`.
  - Updated test checking if coretypes all implement SerializedType
  - Import fixtures directly instead of using `loadFixture` utility
- Remove importing of `buffer/` explicitly.  It was throwing off type
checking in tests.  Buffer is going away in a future PR anyway.
- Fixed `npm run clean` not clearing `.tsbuildinfo` files for keypairs
- Remove unused account-tx-transactions.db. It was likely used in the
past to test historical ledgers.
This commit is contained in:
Caleb Kniffen
2023-11-14 19:13:19 -06:00
parent 4c7f46c111
commit e2433101cb
58 changed files with 382 additions and 367 deletions

View File

@@ -11,6 +11,12 @@
* Remove `assert` dependency. If you were catching `AssertionError` you need to change to `Error`.
* Remove `create-hash` in favor of `@noble/hashes`
### Changes
* Update type definitions which causing errors in tests that the code already supported
* `makeParser` to accept a `Buffer` in addition to `string`
* `SerializedType` constructor allows not passing in a byte array
* `Comparable` is now a generic type so that it allows `compareTo` methods to take more that the type itself.
## 1.11.0 (2023-11-30)
### Added
- Support for the DID amendment (XLS-40).

View File

@@ -0,0 +1,15 @@
const baseKarmaConfig = require('../../karma.config')
const webpackConfig = require('./test/webpack.config')
delete webpackConfig.entry
module.exports = function (config) {
baseKarmaConfig(config)
config.set({
base: '',
webpack: webpackConfig,
// list of files / patterns to load in the browser
files: ['test/**/*.test.ts'],
})
}

View File

@@ -17,10 +17,11 @@
"ripple-address-codec": "^5.0.0-beta.0"
},
"scripts": {
"build": "tsc -b && copyfiles ./src/enums/definitions.json ./dist/enums/",
"clean": "rm -rf ./dist ./coverage tsconfig.tsbuildinfo",
"build": "tsc --build tsconfig.build.json && copyfiles ./src/enums/definitions.json ./dist/enums/",
"clean": "rm -rf ./dist ./coverage ./test/testCompiledForWeb tsconfig.build.tsbuildinfo",
"prepublishOnly": "npm test",
"test": "npm run build && jest --verbose false --silent=false ./test/*.test.js",
"test": "npm run build && jest --verbose false --silent=false ./test/*.test.ts",
"test:browser": "npm run build && karma start ./karma.config.js",
"lint": "eslint . --ext .ts --ext .test.js"
},
"keywords": [

View File

@@ -13,20 +13,23 @@ import {
} from './enums'
import { STObject } from './types/st-object'
import { JsonObject } from './types/serialized-type'
import { Buffer } from 'buffer/'
/**
* Construct a BinaryParser
*
* @param bytes hex-string to construct BinaryParser from
* @param bytes hex-string or Buffer to construct BinaryParser from
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns A BinaryParser
* @returns BinaryParser
*/
const makeParser = (
bytes: string,
bytes: string | Buffer,
definitions?: XrplDefinitionsBase,
): BinaryParser => new BinaryParser(bytes, definitions)
): BinaryParser =>
new BinaryParser(
bytes instanceof Buffer ? bytes.toString('hex') : bytes,
definitions,
)
/**
* Parse BinaryParser into JSON

View File

@@ -1,5 +1,4 @@
import { BytesList, BinaryParser } from '../binary'
import { Buffer } from 'buffer/'
/*
* @brief: Bytes, name, and ordinal representing one type, ledger_type, transaction type, or result

View File

@@ -1,7 +1,6 @@
import { Bytes } from './bytes'
import { SerializedType } from '../types/serialized-type'
import { TYPE_WIDTH } from './constants'
import { Buffer } from 'buffer/'
/**
* Encoding information for a rippled field, often used in transactions.

View File

@@ -1,5 +1,3 @@
import { Buffer } from 'buffer/'
/**
* Write a 32 bit integer to a Buffer
*

View File

@@ -1,7 +1,7 @@
import { HashPrefix } from './hash-prefixes'
import { Hash256 } from './types'
import { BytesList } from './serdes/binary-serializer'
import { Buffer } from 'buffer/'
import { sha512 } from '@xrplf/isomorphic/sha512'
/**

View File

@@ -1,5 +1,5 @@
import { coreTypes } from './types'
import { Buffer } from 'buffer/'
import BigNumber from 'bignumber.js'
/**

View File

@@ -4,7 +4,6 @@ import {
FieldInstance,
} from '../enums'
import { type SerializedType } from '../types/serialized-type'
import { Buffer } from 'buffer/'
/**
* BinaryParser is used to compute fields and values from a HexString

View File

@@ -1,6 +1,5 @@
import { FieldInstance } from '../enums'
import { type SerializedType } from '../types/serialized-type'
import { Buffer } from 'buffer/'
/**
* Bytes list is a collection of buffer objects

View File

@@ -3,7 +3,6 @@ import { HashPrefix } from './hash-prefixes'
import { Sha512Half } from './hashes'
import { Hash256 } from './types/hash-256'
import { BytesList } from './serdes/binary-serializer'
import { Buffer } from 'buffer/'
/**
* Abstract class describing a SHAMapNode

View File

@@ -5,7 +5,6 @@ import {
xAddressToClassicAddress,
} from 'ripple-address-codec'
import { Hash160 } from './hash-160'
import { Buffer } from 'buffer/'
const HEX_REGEX = /^[A-F0-9]{40}$/

View File

@@ -3,7 +3,7 @@ import { BinaryParser } from '../serdes/binary-parser'
import { AccountID } from './account-id'
import { Currency } from './currency'
import { JsonObject, SerializedType } from './serialized-type'
import { Buffer } from 'buffer/'
import BigNumber from 'bignumber.js'
/**

View File

@@ -1,6 +1,5 @@
import { SerializedType } from './serialized-type'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Variable length encoded type

View File

@@ -1,5 +1,4 @@
import { Hash160 } from './hash-160'
import { Buffer } from 'buffer/'
const XRP_HEX_REGEX = /^0{40}$/
const ISO_REGEX = /^[A-Z0-9a-z?!@#$%^&*(){}[\]|]{3}$/

View File

@@ -1,5 +1,4 @@
import { Hash } from './hash'
import { Buffer } from 'buffer/'
/**
* Hash with a width of 128 bits

View File

@@ -1,5 +1,4 @@
import { Hash } from './hash'
import { Buffer } from 'buffer/'
/**
* Hash with a width of 160 bits

View File

@@ -1,5 +1,4 @@
import { Hash } from './hash'
import { Buffer } from 'buffer/'
/**
* Hash with a width of 256 bits

View File

@@ -1,11 +1,10 @@
import { Comparable } from './serialized-type'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Base class defining how to encode and decode hashes
*/
class Hash extends Comparable {
class Hash extends Comparable<Hash | string> {
static readonly width: number
constructor(bytes: Buffer) {

View File

@@ -3,7 +3,6 @@ import { BinaryParser } from '../serdes/binary-parser'
import { AccountID } from './account-id'
import { Currency } from './currency'
import { JsonObject, SerializedType } from './serialized-type'
import { Buffer } from 'buffer/'
/**
* Interface for JSON objects that represent amounts

View File

@@ -2,7 +2,6 @@ import { AccountID } from './account-id'
import { Currency } from './currency'
import { BinaryParser } from '../serdes/binary-parser'
import { SerializedType, JsonObject } from './serialized-type'
import { Buffer } from 'buffer/'
/**
* Constants for separating Paths in a PathSet

View File

@@ -1,6 +1,6 @@
import { BytesList } from '../serdes/binary-serializer'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
import { XrplDefinitionsBase } from '../enums'
type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject
@@ -13,7 +13,7 @@ type JsonObject = { [key: string]: JSON }
class SerializedType {
protected readonly bytes: Buffer = Buffer.alloc(0)
constructor(bytes: Buffer) {
constructor(bytes?: Buffer) {
this.bytes = bytes ?? Buffer.alloc(0)
}
@@ -80,26 +80,31 @@ class SerializedType {
}
/**
* Base class for SerializedTypes that are comparable
* Base class for SerializedTypes that are comparable.
*
* @template T - What types you want to allow comparisons between. You must specify all types. Primarily used to allow
* comparisons between built-in types (like `string`) and SerializedType subclasses (like `Hash`).
*
* Ex. `class Hash extends Comparable<Hash | string>`
*/
class Comparable extends SerializedType {
lt(other: Comparable): boolean {
class Comparable<T extends Object> extends SerializedType {
lt(other: T): boolean {
return this.compareTo(other) < 0
}
eq(other: Comparable): boolean {
eq(other: T): boolean {
return this.compareTo(other) === 0
}
gt(other: Comparable): boolean {
gt(other: T): boolean {
return this.compareTo(other) > 0
}
gte(other: Comparable): boolean {
gte(other: T): boolean {
return this.compareTo(other) > -1
}
lte(other: Comparable): boolean {
lte(other: T): boolean {
return this.compareTo(other) < 1
}
@@ -109,7 +114,7 @@ class Comparable extends SerializedType {
* @param other The comparable object to compare this to
* @returns A number denoting the relationship of this and other
*/
compareTo(other: Comparable): number {
compareTo(other: T): number {
throw new Error(`cannot compare ${this.toString()} and ${other.toString()}`)
}
}

View File

@@ -2,7 +2,6 @@ import { DEFAULT_DEFINITIONS, XrplDefinitionsBase } from '../enums'
import { SerializedType, JsonObject } from './serialized-type'
import { STObject } from './st-object'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
const ARRAY_END_MARKER = Buffer.from([0xf1])
const ARRAY_END_MARKER_NAME = 'ArrayEndMarker'

View File

@@ -8,7 +8,7 @@ import { SerializedType, JsonObject } from './serialized-type'
import { xAddressToClassicAddress, isValidXAddress } from 'ripple-address-codec'
import { BinaryParser } from '../serdes/binary-parser'
import { BinarySerializer, BytesList } from '../serdes/binary-serializer'
import { Buffer } from 'buffer/'
import { STArray } from './st-array'
const OBJECT_END_MARKER_BYTE = Buffer.from([0xe1])

View File

@@ -1,6 +1,5 @@
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Derived UInt class for serializing/deserializing 16 bit UInt

View File

@@ -1,6 +1,5 @@
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Derived UInt class for serializing/deserializing 32 bit UInt

View File

@@ -1,6 +1,5 @@
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
const HEX_REGEX = /^[a-fA-F0-9]{1,16}$/
const mask = BigInt(0x00000000ffffffff)

View File

@@ -1,6 +1,5 @@
import { UInt } from './uint'
import { BinaryParser } from '../serdes/binary-parser'
import { Buffer } from 'buffer/'
/**
* Derived UInt class for serializing/deserializing 8 bit UInt

View File

@@ -1,5 +1,4 @@
import { Comparable } from './serialized-type'
import { Buffer } from 'buffer/'
/**
* Compare numbers and bigInts n1 and n2
@@ -15,7 +14,7 @@ function compare(n1: number | bigint, n2: number | bigint): number {
/**
* Base class for serializing and deserializing unsigned integers.
*/
abstract class UInt extends Comparable {
abstract class UInt extends Comparable<UInt | number> {
protected static width: number
constructor(bytes: Buffer) {
@@ -28,7 +27,7 @@ abstract class UInt extends Comparable {
* @param other other UInt to compare this to
* @returns -1, 0, or 1 depending on how the objects relate to each other
*/
compareTo(other: UInt): number {
compareTo(other: UInt | number): number {
return compare(this.valueOf(), other.valueOf())
}

View File

@@ -2,7 +2,6 @@ import { SerializedType } from './serialized-type'
import { BinaryParser } from '../serdes/binary-parser'
import { Hash256 } from './hash-256'
import { BytesList } from '../serdes/binary-serializer'
import { Buffer } from 'buffer/'
/**
* TypeGuard for Array<string>

View File

@@ -2,7 +2,7 @@ import { BinaryParser } from '../serdes/binary-parser'
import { AccountID } from './account-id'
import { JsonObject, SerializedType } from './serialized-type'
import { Buffer } from 'buffer/'
import { Issue, IssueObject } from './issue'
/**

View File

@@ -1,7 +1,6 @@
const { loadFixture } = require('./utils')
const { coreTypes } = require('../src/types')
import { coreTypes } from '../src/types'
import fixtures from './fixtures/data-driven-tests.json'
const { Amount } = coreTypes
const fixtures = loadFixture('data-driven-tests.json')
function amountErrorTests() {
fixtures.values_tests

View File

@@ -1,5 +1,5 @@
const fixtures = require('./fixtures/codec-fixtures.json')
const { decode, encode, decodeLedgerData } = require('../src')
import fixtures from './fixtures/codec-fixtures.json'
import { decode, encode, decodeLedgerData } from '../src'
function json(object) {
return JSON.stringify(object)
@@ -13,12 +13,12 @@ describe('ripple-binary-codec', function () {
function makeSuite(name, entries) {
describe(name, function () {
entries.forEach((t, testN) => {
test(`${name}[${testN}] can encode ${truncateForDisplay(
it(`${name}[${testN}] can encode ${truncateForDisplay(
json(t.json),
)} to ${truncateForDisplay(t.binary)}`, () => {
expect(encode(t.json)).toEqual(t.binary)
})
test(`${name}[${testN}] can decode ${truncateForDisplay(
it(`${name}[${testN}] can decode ${truncateForDisplay(
t.binary,
)} to ${truncateForDisplay(json(t.json))}`, () => {
const decoded = decode(t.binary)
@@ -33,7 +33,7 @@ describe('ripple-binary-codec', function () {
describe('ledgerData', function () {
if (fixtures.ledgerData) {
fixtures.ledgerData.forEach((t, testN) => {
test(`ledgerData[${testN}] can decode ${t.binary} to ${json(
it(`ledgerData[${testN}] can decode ${t.binary} to ${json(
t.json,
)}`, () => {
const decoded = decodeLedgerData(t.binary)

View File

@@ -1,15 +1,12 @@
const { coreTypes } = require('../src/types')
const BigNumber = require('bignumber.js')
import { coreTypes, Amount, Hash160 } from '../src/types'
import BigNumber from 'bignumber.js'
const { encodeAccountID } = require('ripple-address-codec')
const { binary } = require('../src/coretypes')
const { Amount, Hash160 } = coreTypes
const { makeParser, readJSON } = binary
const { Field, TransactionType } = require('./../src/enums')
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
const fixtures = loadFixture('data-driven-tests.json')
const { BytesList } = require('../src/serdes/binary-serializer')
const { Buffer } = require('buffer/')
import { encodeAccountID } from 'ripple-address-codec'
import { Field, TransactionType } from '../src/enums'
import { makeParser, readJSON } from '../src/binary'
import { parseHexOnly, hexOnly } from './utils'
import { BytesList } from '../src/serdes/binary-serializer'
import fixtures from './fixtures/data-driven-tests.json'
const __ = hexOnly
function toJSON(v) {
@@ -31,9 +28,10 @@ function assertEqualAmountJSON(actual, expected) {
}
function basicApiTests() {
const bytes = parseHexOnly('00,01020304,0506', Uint8Array)
test('can read slices of bytes', () => {
const bytes = parseHexOnly('00,01020304,0506')
it('can read slices of bytes', () => {
const parser = makeParser(bytes)
// @ts-expect-error -- checking private variable type
expect(parser.bytes instanceof Buffer).toBe(true)
const read1 = parser.read(1)
expect(read1 instanceof Buffer).toBe(true)
@@ -42,7 +40,7 @@ function basicApiTests() {
expect(parser.read(2)).toEqual(Buffer.from([5, 6]))
expect(() => parser.read(1)).toThrow()
})
test('can read a Uint32 at full', () => {
it('can read a Uint32 at full', () => {
const parser = makeParser('FFFFFFFF')
expect(parser.readUInt32()).toEqual(0xffffffff)
})
@@ -83,103 +81,106 @@ function transactionParsingTests() {
const tx_json = transaction.json
// These tests are basically development logs
test('can be done with low level apis', () => {
it('can be done with low level apis', () => {
const parser = makeParser(transaction.binary)
expect(parser.readField()).toEqual(Field.TransactionType)
expect(parser.readField()).toEqual(Field['TransactionType'])
expect(parser.readUInt16()).toEqual(7)
expect(parser.readField()).toEqual(Field.Flags)
expect(parser.readField()).toEqual(Field['Flags'])
expect(parser.readUInt32()).toEqual(0)
expect(parser.readField()).toEqual(Field.Sequence)
expect(parser.readField()).toEqual(Field['Sequence'])
expect(parser.readUInt32()).toEqual(103929)
expect(parser.readField()).toEqual(Field.TakerPays)
expect(parser.readField()).toEqual(Field['TakerPays'])
parser.read(8)
expect(parser.readField()).toEqual(Field.TakerGets)
expect(parser.readField()).toEqual(Field['TakerGets'])
// amount value
expect(parser.read(8)).not.toBe([])
// amount currency
expect(Hash160.fromParser(parser)).not.toBe([])
expect(encodeAccountID(parser.read(20))).toEqual(tx_json.TakerGets.issuer)
expect(parser.readField()).toEqual(Field.Fee)
expect(parser.readField()).toEqual(Field['Fee'])
expect(parser.read(8)).not.toEqual([])
expect(parser.readField()).toEqual(Field.SigningPubKey)
expect(parser.readField()).toEqual(Field['SigningPubKey'])
expect(parser.readVariableLengthLength()).toBe(33)
expect(parser.read(33).toString('hex').toUpperCase()).toEqual(
tx_json.SigningPubKey,
)
expect(parser.readField()).toEqual(Field.TxnSignature)
expect(parser.readField()).toEqual(Field['TxnSignature'])
expect(parser.readVariableLength().toString('hex').toUpperCase()).toEqual(
tx_json.TxnSignature,
)
expect(parser.readField()).toEqual(Field.Account)
expect(parser.readField()).toEqual(Field['Account'])
expect(encodeAccountID(parser.readVariableLength())).toEqual(
tx_json.Account,
)
expect(parser.end()).toBe(true)
})
test('can be done with high level apis', () => {
it('can be done with high level apis', () => {
const parser = makeParser(transaction.binary)
function readField() {
return parser.readFieldAndValue()
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TransactionType)
expect(value).toEqual(TransactionType.OfferCreate)
expect(field).toEqual(Field['TransactionType'])
expect(value).toEqual(TransactionType['OfferCreate'])
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Flags)
expect(field).toEqual(Field['Flags'])
expect(value.valueOf()).toEqual(0)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Sequence)
expect(field).toEqual(Field['Sequence'])
expect(value.valueOf()).toEqual(103929)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TakerPays)
expect(value.isNative()).toEqual(true)
expect(field).toEqual(Field['TakerPays'])
// @ts-expect-error -- checking private variable type
expect((value as Amount).isNative()).toEqual(true)
expect(value.toJSON()).toEqual('98957503520')
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TakerGets)
expect(value.isNative()).toEqual(false)
expect(value.toJSON().issuer).toEqual(tx_json.TakerGets.issuer)
expect(field).toEqual(Field['TakerGets'])
// @ts-expect-error -- checking private function
expect((value as Amount).isNative()).toEqual(false)
expect(value.toJSON()?.['issuer']).toEqual(tx_json.TakerGets.issuer)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Fee)
expect(value.isNative()).toEqual(true)
expect(field).toEqual(Field['Fee'])
// @ts-expect-error -- checking private function
expect((value as Amount).isNative()).toEqual(true)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.SigningPubKey)
expect(field).toEqual(Field['SigningPubKey'])
expect(value.toJSON()).toEqual(tx_json.SigningPubKey)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TxnSignature)
expect(field).toEqual(Field['TxnSignature'])
expect(value.toJSON()).toEqual(tx_json.TxnSignature)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Account)
expect(field).toEqual(Field['Account'])
expect(value.toJSON()).toEqual(tx_json.Account)
}
expect(parser.end()).toBe(true)
})
test('can be done with higher level apis', () => {
it('can be done with higher level apis', () => {
const parser = makeParser(transaction.binary)
const jsonFromBinary = readJSON(parser)
expect(jsonFromBinary).toEqual(tx_json)
})
test('readJSON (binary.decode) does not return STObject ', () => {
it('readJSON (binary.decode) does not return STObject ', () => {
const parser = makeParser(transaction.binary)
const jsonFromBinary = readJSON(parser)
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false)
@@ -188,8 +189,20 @@ function transactionParsingTests() {
})
}
interface AmountTest {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- it is json
test_json: any
type_id: number
is_native: boolean
type: string
expected_hex: string
is_negative?: boolean
exponent?: number
error?: string
}
function amountParsingTests() {
fixtures.values_tests
;(fixtures.values_tests as AmountTest[])
.filter((obj) => obj.type === 'Amount')
.forEach((f, i) => {
if (f.error) {
@@ -201,7 +214,7 @@ function amountParsingTests() {
16,
)}...
as ${JSON.stringify(f.test_json)}`
test(testName, () => {
it(testName, () => {
const value = parser.readType(Amount)
// May not actually be in canonical form. The fixtures are to be used
// also for json -> binary;
@@ -209,7 +222,7 @@ function amountParsingTests() {
assertEqualAmountJSON(json, f.test_json)
if (f.exponent) {
const exponent = new BigNumber(json.value)
expect(exponent.e - 15).toEqual(f.exponent)
expect((exponent.e ?? 0) - 15).toEqual(f?.exponent)
}
})
})
@@ -218,31 +231,31 @@ function amountParsingTests() {
function fieldParsingTests() {
fixtures.fields_tests.forEach((f, i) => {
const parser = makeParser(f.expected_hex)
test(`fields[${i}]: parses ${f.expected_hex} as ${f.name}`, () => {
it(`fields[${i}]: parses ${f.expected_hex} as ${f.name}`, () => {
const field = parser.readField()
expect(field.name).toEqual(f.name)
expect(field.type.name).toEqual(f.type_name)
})
})
test('Field throws when type code out of range', () => {
it('Field throws when type code out of range', () => {
const parser = makeParser('0101')
expect(() => parser.readField()).toThrow(
new Error('Cannot read FieldOrdinal, type_code out of range'),
)
})
test('Field throws when field code out of range', () => {
it('Field throws when field code out of range', () => {
const parser = makeParser('1001')
expect(() => parser.readFieldOrdinal()).toThrowError(
expect(() => parser.readFieldOrdinal()).toThrow(
new Error('Cannot read FieldOrdinal, field_code out of range'),
)
})
test('Field throws when both type and field code out of range', () => {
it('Field throws when both type and field code out of range', () => {
const parser = makeParser('000101')
expect(() => parser.readFieldOrdinal()).toThrowError(
expect(() => parser.readFieldOrdinal()).toThrow(
new Error('Cannot read FieldOrdinal, type_code out of range'),
)
})
test('readUIntN', () => {
it('readUIntN', () => {
const parser = makeParser('0009')
expect(parser.readUIntN(2)).toEqual(9)
expect(() => parser.readUIntN(-1)).toThrow(new Error('invalid n'))
@@ -262,7 +275,7 @@ function assertRecyclable(json, forField) {
function nestedObjectTests() {
fixtures.whole_objects.forEach((f, i) => {
test(`whole_objects[${i}]: can parse blob into
it(`whole_objects[${i}]: can parse blob into
${JSON.stringify(
f.tx_json,
)}`, /* */ () => {
@@ -270,7 +283,8 @@ function nestedObjectTests() {
let ix = 0
while (!parser.end()) {
const [field, value] = parser.readFieldAndValue()
const expected = f.fields[ix]
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- this is a json object
const expected: any = f.fields[ix]
const expectedJSON = expected[1].json
const expectedField = expected[0]
const actual = toJSON(value)
@@ -383,7 +397,7 @@ function pathSetBinaryTests() {
],
]
test('works with long paths', () => {
it('works with long paths', () => {
const parser = makeParser(bytes)
const txn = readJSON(parser)
expect(txn.Paths).toEqual(expectedJSON)

View File

@@ -1,12 +1,10 @@
import fixtures from './fixtures/data-driven-tests.json'
const { binary } = require('../src/coretypes')
const { encode, decode } = require('../src')
const { makeParser, BytesList, BinarySerializer } = binary
const { coreTypes } = require('../src/types')
const { UInt8, UInt16, UInt32, UInt64, STObject } = coreTypes
const { Buffer } = require('buffer/')
const { loadFixture } = require('./utils')
const fixtures = loadFixture('data-driven-tests.json')
const deliverMinTx = require('./fixtures/delivermin-tx.json')
const deliverMinTxBinary = require('./fixtures/delivermin-tx-binary.json')
const SignerListSet = {
@@ -110,16 +108,16 @@ function bytesListTest() {
.put(Buffer.from([0]))
.put(Buffer.from([2, 3]))
.put(Buffer.from([4, 5]))
test('is an Array<Buffer>', function () {
it('is an Array<Buffer>', function () {
expect(Array.isArray(list.bytesArray)).toBe(true)
expect(list.bytesArray[0] instanceof Buffer).toBe(true)
})
test('keeps track of the length itself', function () {
it('keeps track of the length itself', function () {
expect(list.getLength()).toBe(5)
})
test('can join all arrays into one via toBytes', function () {
it('can join all arrays into one via toBytes', function () {
const joined = list.toBytes()
expect(joined).toHaveLength(5)
expect(joined.length).toEqual(5)
expect(joined).toEqual(Buffer.from([0, 2, 3, 4, 5]))
})
}
@@ -136,14 +134,14 @@ function assertRecycles(blob) {
function nestedObjectTests() {
fixtures.whole_objects.forEach((f, i) => {
test(`whole_objects[${i}]: can parse blob and dump out same blob`, () => {
it(`whole_objects[${i}]: can parse blob and dump out same blob`, () => {
assertRecycles(f.blob_with_no_signing)
})
})
}
function check(type, n, expected) {
test(`Uint${type.width * 8} serializes ${n} as ${expected}`, function () {
it(`Uint${type.width * 8} serializes ${n} as ${expected}`, function () {
const bl = new BytesList()
const serializer = new BinarySerializer(bl)
if (expected === 'throws') {
@@ -169,67 +167,67 @@ check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1])
check(UInt64, BigInt(1), [0, 0, 0, 0, 0, 0, 0, 1])
function deliverMinTest() {
test('can serialize DeliverMin', () => {
it('can serialize DeliverMin', () => {
expect(encode(deliverMinTx)).toEqual(deliverMinTxBinary)
})
}
function SignerListSetTest() {
test('can serialize SignerListSet', () => {
it('can serialize SignerListSet', () => {
expect(encode(SignerListSet.tx)).toEqual(SignerListSet.binary)
})
test('can serialize SignerListSet metadata', () => {
it('can serialize SignerListSet metadata', () => {
expect(encode(SignerListSet.tx.meta)).toEqual(SignerListSet.meta)
})
}
function DepositPreauthTest() {
test('can serialize DepositPreauth', () => {
it('can serialize DepositPreauth', () => {
expect(encode(DepositPreauth.tx)).toEqual(DepositPreauth.binary)
})
test('can serialize DepositPreauth metadata', () => {
it('can serialize DepositPreauth metadata', () => {
expect(encode(DepositPreauth.tx.meta)).toEqual(DepositPreauth.meta)
})
}
function EscrowTest() {
test('can serialize EscrowCreate', () => {
it('can serialize EscrowCreate', () => {
expect(encode(Escrow.create.tx)).toEqual(Escrow.create.binary)
})
test('can serialize EscrowFinish', () => {
it('can serialize EscrowFinish', () => {
expect(encode(Escrow.finish.tx)).toEqual(Escrow.finish.binary)
expect(encode(Escrow.finish.tx.meta)).toEqual(Escrow.finish.meta)
})
test('can serialize EscrowCancel', () => {
it('can serialize EscrowCancel', () => {
expect(encode(Escrow.cancel.tx)).toEqual(Escrow.cancel.binary)
})
}
function PaymentChannelTest() {
test('can serialize PaymentChannelCreate', () => {
it('can serialize PaymentChannelCreate', () => {
expect(encode(PaymentChannel.create.tx)).toEqual(
PaymentChannel.create.binary,
)
})
test('can serialize PaymentChannelFund', () => {
it('can serialize PaymentChannelFund', () => {
expect(encode(PaymentChannel.fund.tx)).toEqual(PaymentChannel.fund.binary)
})
test('can serialize PaymentChannelClaim', () => {
it('can serialize PaymentChannelClaim', () => {
expect(encode(PaymentChannel.claim.tx)).toEqual(PaymentChannel.claim.binary)
})
}
function NegativeUNLTest() {
test('can serialize NegativeUNL', () => {
it('can serialize NegativeUNL', () => {
expect(encode(NegativeUNL.tx)).toEqual(NegativeUNL.binary)
})
test('can deserialize NegativeUNL', () => {
it('can deserialize NegativeUNL', () => {
expect(decode(NegativeUNL.binary)).toEqual(NegativeUNL.tx)
})
}
function omitUndefinedTest() {
test('omits fields with undefined value', () => {
it('omits fields with undefined value', () => {
let encodedOmitted = encode(json_omitted)
let encodedUndefined = encode(json_undefined)
expect(encodedOmitted).toEqual(encodedUndefined)
@@ -238,7 +236,7 @@ function omitUndefinedTest() {
}
function ticketTest() {
test('can serialize TicketCreate', () => {
it('can serialize TicketCreate', () => {
expect(encode(Ticket.create.tx)).toEqual(Ticket.create.binary)
})
}
@@ -247,25 +245,25 @@ function nfTokenTest() {
const fixtures = require('./fixtures/nf-token.json')
for (const txName of Object.keys(fixtures)) {
test(`can serialize transaction ${txName}`, () => {
it(`can serialize transaction ${txName}`, () => {
expect(encode(fixtures[txName].tx.json)).toEqual(
fixtures[txName].tx.binary,
)
})
test(`can deserialize transaction ${txName}`, () => {
it(`can deserialize transaction ${txName}`, () => {
expect(decode(fixtures[txName].tx.binary)).toEqual(
fixtures[txName].tx.json,
)
})
test(`can serialize meta ${txName}`, () => {
it(`can serialize meta ${txName}`, () => {
expect(encode(fixtures[txName].meta.json)).toEqual(
fixtures[txName].meta.binary,
)
})
test(`can deserialize meta ${txName}`, () => {
it(`can deserialize meta ${txName}`, () => {
expect(decode(fixtures[txName].meta.binary)).toEqual(
fixtures[txName].meta.json,
)

View File

@@ -1,6 +1,6 @@
const { encode, decode, XrplDefinitions } = require('../src')
const normalDefinitionsJson = require('../src/enums/definitions.json')
const { UInt32 } = require('../dist/types/uint-32')
import { encode, decode, XrplDefinitions } from '../src'
import normalDefinitionsJson from '../src/enums/definitions.json'
import { UInt32 } from '../src/types/uint-32'
const txJson = {
Account: 'r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ',
@@ -13,7 +13,7 @@ const txJson = {
}
describe('encode and decode using new types as a parameter', function () {
test('can encode and decode a new TransactionType', function () {
it('can encode and decode a new TransactionType', function () {
const tx = { ...txJson, TransactionType: 'NewTestTransaction' }
// Before updating the types, this should not be encodable
expect(() => encode(tx)).toThrow()
@@ -28,14 +28,14 @@ describe('encode and decode using new types as a parameter', function () {
const encoded = encode(tx, newDefs)
expect(() => decode(encoded)).toThrow()
const decoded = decode(encoded, newDefs)
expect(decoded).toStrictEqual(tx)
expect(decoded).toEqual(tx)
})
test('can encode and decode a new Field', function () {
it('can encode and decode a new Field', function () {
const tx = { ...txJson, NewFieldDefinition: 10 }
// Before updating the types, undefined fields will be ignored on encode
expect(decode(encode(tx))).not.toStrictEqual(tx)
expect(decode(encode(tx))).not.toEqual(tx)
// Normally this would be generated directly from rippled with something like `server_definitions`.
// Added here to make it easier to see what is actually changing in the definitions.json file.
@@ -57,10 +57,10 @@ describe('encode and decode using new types as a parameter', function () {
const encoded = encode(tx, newDefs)
expect(() => decode(encoded)).toThrow()
const decoded = decode(encoded, newDefs)
expect(decoded).toStrictEqual(tx)
expect(decoded).toEqual(tx)
})
test('can encode and decode a new Field nested in STObject in STArray in STObject', function () {
it('can encode and decode a new Field nested in STObject in STArray in STObject', function () {
const tx = {
...txJson,
NewFieldArray: [
@@ -73,7 +73,7 @@ describe('encode and decode using new types as a parameter', function () {
}
// Before updating the types, undefined fields will be ignored on encode
expect(decode(encode(tx))).not.toStrictEqual(tx)
expect(decode(encode(tx))).not.toEqual(tx)
// Normally this would be generated directly from rippled with something like `server_definitions`.
// Added here to make it easier to see what is actually changing in the definitions.json file.
@@ -117,10 +117,10 @@ describe('encode and decode using new types as a parameter', function () {
const encoded = encode(tx, newDefs)
expect(() => decode(encoded)).toThrow()
const decoded = decode(encoded, newDefs)
expect(decoded).toStrictEqual(tx)
expect(decoded).toEqual(tx)
})
test('can encode and decode a new Type', function () {
it('can encode and decode a new Type', function () {
const tx = {
...txJson,
TestField: 10, // Should work the same as a UInt32
@@ -142,7 +142,7 @@ describe('encode and decode using new types as a parameter', function () {
])
// Test that before updating the types this tx fails to decode correctly. Note that undefined fields are ignored on encode.
expect(decode(encode(tx))).not.toStrictEqual(tx)
expect(decode(encode(tx))).not.toEqual(tx)
class NewType extends UInt32 {
// Should be the same as UInt32
@@ -155,6 +155,6 @@ describe('encode and decode using new types as a parameter', function () {
const encoded = encode(tx, newDefs)
expect(() => decode(encoded)).toThrow()
const decoded = decode(encoded, newDefs)
expect(decoded).toStrictEqual(tx)
expect(decoded).toEqual(tx)
})
})

View File

@@ -1,16 +1,15 @@
const { coreTypes } = require('../src/types')
const { Hash128, Hash160, Hash256, AccountID, Currency } = coreTypes
const { Buffer } = require('buffer/')
describe('Hash128', function () {
test('has a static width member', function () {
it('has a static width member', function () {
expect(Hash128.width).toBe(16)
})
test('can be unset', function () {
it('can be unset', function () {
const h1 = Hash128.from('')
expect(h1.toJSON()).toBe('')
})
test('can be compared against another', function () {
it('can be compared against another', function () {
const h1 = Hash128.from('100000000000000000000000000000000')
const h2 = Hash128.from('200000000000000000000000000000000')
const h3 = Hash128.from('000000000000000000000000000000003')
@@ -19,50 +18,50 @@ describe('Hash128', function () {
expect(h2.gt(h1)).toBe(true)
expect(h1.gt(h3)).toBe(true)
})
test('throws when constructed from invalid hash length', () => {
it('throws when constructed from invalid hash length', () => {
expect(() => Hash128.from('1000000000000000000000000000000')).toThrow(
'Invalid Hash length 15',
new Error('Invalid Hash length 15'),
)
expect(() => Hash128.from('10000000000000000000000000000000000')).toThrow(
'Invalid Hash length 17',
new Error('Invalid Hash length 17'),
)
})
})
describe('Hash160', function () {
test('has a static width member', function () {
it('has a static width member', function () {
expect(Hash160.width).toBe(20)
})
test('inherited by subclasses', function () {
it('inherited by subclasses', function () {
expect(AccountID.width).toBe(20)
expect(Currency.width).toBe(20)
})
test('can be compared against another', function () {
it('can be compared against another', function () {
const h1 = Hash160.from('1000000000000000000000000000000000000000')
const h2 = Hash160.from('2000000000000000000000000000000000000000')
const h3 = Hash160.from('0000000000000000000000000000000000000003')
expect(h1.lt(h2)).toBe(true)
expect(h3.lt(h2)).toBe(true)
})
test('throws when constructed from invalid hash length', () => {
it('throws when constructed from invalid hash length', () => {
expect(() =>
Hash160.from('10000000000000000000000000000000000000'),
).toThrow('Invalid Hash length 19')
).toThrow(new Error('Invalid Hash length 19'))
expect(() =>
Hash160.from('100000000000000000000000000000000000000000'),
).toThrow('Invalid Hash length 21')
).toThrow(new Error('Invalid Hash length 21'))
})
})
describe('Hash256', function () {
test('has a static width member', function () {
it('has a static width member', function () {
expect(Hash256.width).toBe(32)
})
test('has a ZERO_256 member', function () {
it('has a ZERO_256 member', function () {
expect(Hash256.ZERO_256.toJSON()).toBe(
'0000000000000000000000000000000000000000000000000000000000000000',
)
})
test('supports getting the nibblet values at given positions', function () {
it('supports getting the nibblet values at given positions', function () {
const h = Hash256.from(
'1359BD0000000000000000000000000000000000000000000000000000000000',
)
@@ -76,56 +75,56 @@ describe('Hash256', function () {
})
describe('Currency', function () {
test('Decoding allows dodgy XRP without throwing', function () {
it('Decoding allows dodgy XRP without throwing', function () {
const currencyCode = '0000000000000000000000005852500000000000'
expect(Currency.from(currencyCode).toJSON()).toBe(currencyCode)
})
test('Currency code with lowercase letters decodes to ISO code', () => {
it('Currency code with lowercase letters decodes to ISO code', () => {
expect(Currency.from('xRp').toJSON()).toBe('xRp')
})
test('Currency codes with symbols decodes to ISO code', () => {
it('Currency codes with symbols decodes to ISO code', () => {
expect(Currency.from('x|p').toJSON()).toBe('x|p')
})
test('Currency code with non-standard symbols decodes to hex', () => {
it('Currency code with non-standard symbols decodes to hex', () => {
expect(Currency.from(':::').toJSON()).toBe(
'0000000000000000000000003A3A3A0000000000',
)
})
test('Currency codes can be exclusively standard symbols', () => {
it('Currency codes can be exclusively standard symbols', () => {
expect(Currency.from('![]').toJSON()).toBe('![]')
})
test('Currency codes with uppercase and 0-9 decode to ISO codes', () => {
it('Currency codes with uppercase and 0-9 decode to ISO codes', () => {
expect(Currency.from('X8P').toJSON()).toBe('X8P')
expect(Currency.from('USD').toJSON()).toBe('USD')
})
test('Currency codes with no contiguous zeroes in first 96 type code & reserved bits', function () {
it('Currency codes with no contiguous zeroes in first 96 type code & reserved bits', function () {
expect(
Currency.from('0000000023410000000000005852520000000000').iso(),
).toBe(null)
})
test('Currency codes with no contiguous zeroes in last 40 reserved bits', function () {
it('Currency codes with no contiguous zeroes in last 40 reserved bits', function () {
expect(
Currency.from('0000000000000000000000005852527570656500').iso(),
).toBe(null)
})
test('can be constructed from a Buffer', function () {
it('can be constructed from a Buffer', function () {
const xrp = new Currency(Buffer.alloc(20))
expect(xrp.iso()).toBe('XRP')
})
test('Can handle non-standard currency codes', () => {
it('Can handle non-standard currency codes', () => {
const currency = '015841551A748AD2C1F76FF6ECB0CCCD00000000'
expect(Currency.from(currency).toJSON()).toBe(currency)
})
test('Can handle other non-standard currency codes', () => {
it('Can handle other non-standard currency codes', () => {
const currency = '0000000000414C6F676F30330000000000000000'
expect(Currency.from(currency).toJSON()).toBe(currency)
})
test('throws on invalid reprs', function () {
it('throws on invalid reprs', function () {
expect(() => Currency.from(Buffer.alloc(19))).toThrow()
expect(() => Currency.from(1)).toThrow()
expect(() =>

View File

@@ -1,29 +0,0 @@
const { loadFixture } = require('./utils')
const {
transactionTreeHash,
ledgerHash,
accountStateHash,
} = require('../src/ledger-hashes')
describe('Ledger Hashes', function () {
function testFactory(ledgerFixture) {
describe(`can calculate hashes for ${ledgerFixture}`, function () {
const ledger = loadFixture(ledgerFixture)
test('computes correct account state hash', function () {
expect(accountStateHash(ledger.accountState).toHex()).toBe(
ledger.account_hash,
)
})
test('computes correct transaction tree hash', function () {
expect(transactionTreeHash(ledger.transactions).toHex()).toBe(
ledger.transaction_hash,
)
})
test('computes correct ledger header hash', function () {
expect(ledgerHash(ledger).toHex()).toBe(ledger.hash)
})
})
}
testFactory('ledger-full-40000.json')
testFactory('ledger-full-38129.json')
})

View File

@@ -0,0 +1,33 @@
import ledgerFull38129 from './fixtures/ledger-full-38129.json'
import ledgerFull40000 from './fixtures/ledger-full-40000.json'
const {
transactionTreeHash,
ledgerHash,
accountStateHash,
} = require('../src/ledger-hashes')
describe('Ledger Hashes', function () {
function testFactory(
ledgerIndex: number,
ledger: typeof ledgerFull38129 | typeof ledgerFull40000,
) {
describe(`can calculate hashes for ledger ${ledgerIndex}`, function () {
it('computes correct account state hash', function () {
expect(accountStateHash(ledger.accountState).toHex()).toBe(
ledger.account_hash,
)
})
it('computes correct transaction tree hash', function () {
expect(transactionTreeHash(ledger.transactions).toHex()).toBe(
ledger.transaction_hash,
)
})
it('computes correct ledger header hash', function () {
expect(ledgerHash(ledger).toHex()).toBe(ledger.hash)
})
})
}
testFactory(38129, ledgerFull38129)
testFactory(40000, ledgerFull40000)
})

View File

@@ -1,4 +1,4 @@
const { encode, decode } = require('../src')
import { encode, decode } from '../src'
let str =
'1100612200000000240000000125000068652D0000000055B6632D6376A2D9319F20A1C6DCCB486432D1E4A79951229D4C3DE2946F51D56662400009184E72A00081140DD319918CD5AE792BF7EC80D63B0F01B4573BBC'
@@ -31,16 +31,16 @@ let jsonUpper = {
}
describe('Lowercase hex test', () => {
test('Correctly decodes', () => {
it('Correctly decodes', () => {
expect(decode(lower)).toEqual(decode(str))
})
test('Re-encodes to uppercase hex', () => {
it('Re-encodes to uppercase hex', () => {
expect(encode(decode(lower))).toEqual(str)
})
test('Encode when hex field lowercase', () => {
it('Encode when hex field lowercase', () => {
expect(encode(json)).toBe(bin)
})
test('Re-decodes to uppercase hex', () => {
it('Re-decodes to uppercase hex', () => {
expect(decode(encode(json))).toEqual(jsonUpper)
})
})

View File

@@ -1,4 +1,4 @@
const { encode, decode } = require('../src')
import { encode, decode } from '../src'
let json = {
Account: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp',
@@ -20,19 +20,19 @@ let binary =
'24000000006840000000000000007300760081140000000000000000000000000000000000000000'
describe('Can encode Pseudo Transactions', () => {
test('Correctly encodes Pseudo Transaciton', () => {
it('Correctly encodes Pseudo Transaciton', () => {
expect(encode(json)).toEqual(binary)
})
test('Can decode account objects', () => {
it('Can decode account objects', () => {
expect(decode(encode(json))).toEqual(json)
})
test('Blank AccountID is ACCOUNT_ZERO', () => {
it('Blank AccountID is ACCOUNT_ZERO', () => {
expect(encode(json_blank_acct)).toEqual(binary)
})
test('Decodes Blank AccountID', () => {
it('Decodes Blank AccountID', () => {
expect(decode(encode(json_blank_acct))).toEqual(json)
})
})

View File

@@ -4,11 +4,11 @@ describe('Quality encode/decode', function () {
const bookDirectory =
'4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0'
const expectedQuality = '195796912.5171664'
test('can decode', function () {
it('can decode', function () {
const decimal = quality.decode(bookDirectory)
expect(decimal.toString()).toBe(expectedQuality)
})
test('can encode', function () {
it('can encode', function () {
const bytes = quality.encode(expectedQuality)
expect(bytes.toString('hex').toUpperCase()).toBe(bookDirectory.slice(-16))
})

View File

@@ -1,8 +1,10 @@
const { ShaMap } = require('../src/shamap')
const { binary, HashPrefix } = require('../src/coretypes')
const { coreTypes } = require('../src/types')
const { loadFixture } = require('./utils')
const { Buffer } = require('buffer/')
import ledgerFull38129 from './fixtures/ledger-full-38129.json'
import ledgerFull40000 from './fixtures/ledger-full-40000.json'
import { BytesList } from '../src/serdes/binary-serializer'
import { ShaMap, ShaMapLeaf, ShaMapNode } from '../src/shamap'
import { binary, HashPrefix } from '../src/coretypes'
import { coreTypes, Hash256 } from '../src/types'
function now() {
return Number(Date.now()) / 1000
@@ -10,14 +12,19 @@ function now() {
const ZERO = '0000000000000000000000000000000000000000000000000000000000000000'
function makeItem(indexArg) {
function makeItem(
indexArg: string,
): [
Hash256,
{ toBytesSink: (sink: BytesList) => void; hashPrefix: () => Buffer },
] {
let str = indexArg
while (str.length < 64) {
str += '0'
}
const index = coreTypes.Hash256.from(str)
const index = Hash256.from(str)
const item = {
toBytesSink(sink) {
toBytesSink(sink: BytesList) {
index.toBytesSink(sink)
},
hashPrefix() {
@@ -30,11 +37,11 @@ function makeItem(indexArg) {
describe('ShaMap', () => {
now()
test('hashes to zero when empty', () => {
it('hashes to zero when empty', () => {
const map = new ShaMap()
expect(map.hash().toHex()).toBe(ZERO)
})
test('creates the same hash no matter which order items are added', () => {
it('creates the same hash no matter which order items are added', () => {
let map = new ShaMap()
const items = [
'0',
@@ -47,22 +54,23 @@ describe('ShaMap', () => {
'12',
'122',
]
items.forEach((i) => map.addItem(...makeItem(i)))
// @ts-expect-error -- we are mocking nodes
items.forEach((i) => map.addItem(...(makeItem(i) as ShaMapNode)))
const h1 = map.hash()
expect(h1.eq(h1)).toBe(true)
map = new ShaMap()
items.reverse().forEach((i) => map.addItem(...makeItem(i)))
expect(map.hash()).toStrictEqual(h1)
// @ts-expect-error -- we are mocking nodes
items.reverse().forEach((i) => map.addItem(...(makeItem(i) as ShaMapNode)))
expect(map.hash()).toEqual(h1)
})
function factory(fixture) {
test(`recreate account state hash from ${fixture}`, () => {
function factory(ledger: typeof ledgerFull38129 | typeof ledgerFull40000) {
it(`recreate account state hash from ${ledger}`, () => {
const map = new ShaMap()
const ledger = loadFixture(fixture)
// const t = now();
const leafNodePrefix = HashPrefix.accountStateEntry
ledger.accountState
.map((e, i) => {
if ((i > 1000) & (i % 1000 === 0)) {
.map((e, i): ShaMapLeaf => {
if (i > 1000 && i % 1000 === 0) {
console.log(e.index)
console.log(i)
}
@@ -72,18 +80,16 @@ describe('ShaMap', () => {
hashPrefix() {
return leafNodePrefix
},
toBytesSink(sink) {
toBytesSink(sink: BytesList) {
sink.put(bytes)
},
}
} as ShaMapLeaf
})
.forEach((so) => map.addItem(so.index, so))
.forEach((so: ShaMapLeaf) => map.addItem(so.index, so))
expect(map.hash().toHex()).toBe(ledger.account_hash)
// console.log('took seconds: ', (now() - t));
})
}
factory('ledger-full-38129.json')
factory('ledger-full-40000.json')
// factory('ledger-4320277.json');
// factory('14280680.json');
factory(ledgerFull38129)
factory(ledgerFull40000)
})

View File

@@ -1,9 +1,9 @@
import { XrplDefinitions } from '../src/enums/xrpl-definitions'
const {
encodeForSigning,
encodeForSigningClaim,
encodeForMultisigning,
} = require('../src')
const { XrplDefinitions } = require('../src/enums/xrpl-definitions')
const normalDefinitions = require('../src/enums/definitions.json')
@@ -28,7 +28,7 @@ const tx_json = {
}
describe('Signing data', function () {
test('can create single signing blobs', function () {
it('can create single signing blobs', function () {
const actual = encodeForSigning(tx_json)
expect(actual).toBe(
[
@@ -69,7 +69,7 @@ describe('Signing data', function () {
)
})
test('can create single signing blobs with modified type', function () {
it('can create single signing blobs with modified type', function () {
const customPaymentDefinitions = JSON.parse(
JSON.stringify(normalDefinitions),
)
@@ -116,18 +116,18 @@ describe('Signing data', function () {
)
})
test('can fail gracefully for invalid TransactionType', function () {
it('can fail gracefully for invalid TransactionType', function () {
const invalidTransactionType = {
...tx_json,
TransactionType: 'NotAPayment',
}
expect(() => encodeForSigning(invalidTransactionType)).toThrow(
expect(() => encodeForSigning(invalidTransactionType)).toThrowError(
/NotAPayment/u,
)
})
test('can create multi signing blobs', function () {
it('can create multi signing blobs', function () {
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
const signingJson = { ...tx_json, SigningPubKey: '' }
const actual = encodeForMultisigning(signingJson, signingAccount)
@@ -172,7 +172,7 @@ describe('Signing data', function () {
)
})
test('can create multi signing blobs with custom definitions', function () {
it('can create multi signing blobs with custom definitions', function () {
const customPaymentDefinitions = JSON.parse(
JSON.stringify(normalDefinitions),
)
@@ -223,7 +223,7 @@ describe('Signing data', function () {
)
})
test('can create claim blob', function () {
it('can create claim blob', function () {
const channel =
'43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1'
const amount = '1000'

View File

@@ -1,4 +1,4 @@
const { encode, decode } = require('../src')
import { encode, decode } from '../src'
// Notice: no Amount or Fee
const tx_json = {
@@ -19,12 +19,12 @@ const tx_json = {
}
describe('encoding and decoding tx_json', function () {
test('can encode tx_json without Amount or Fee', function () {
it('can encode tx_json without Amount or Fee', function () {
const encoded = encode(tx_json)
const decoded = decode(encoded)
expect(tx_json).toEqual(decoded)
})
test('can encode tx_json with Amount and Fee', function () {
it('can encode tx_json with Amount and Fee', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000',
Fee: '10',
@@ -33,7 +33,7 @@ describe('encoding and decoding tx_json', function () {
const decoded = decode(encoded)
expect(my_tx).toEqual(decoded)
})
test('can encode tx_json with TicketCount', function () {
it('can encode tx_json with TicketCount', function () {
const my_tx = Object.assign({}, tx_json, {
TicketCount: 2,
})
@@ -41,7 +41,7 @@ describe('encoding and decoding tx_json', function () {
const decoded = decode(encoded)
expect(my_tx).toEqual(decoded)
})
test('can encode tx_json with TicketSequence', function () {
it('can encode tx_json with TicketSequence', function () {
const my_tx = Object.assign({}, tx_json, {
Sequence: 0,
TicketSequence: 2,
@@ -50,7 +50,7 @@ describe('encoding and decoding tx_json', function () {
const decoded = decode(encoded)
expect(my_tx).toEqual(decoded)
})
test('can decode a transaction with an issued currency that evaluates to XRP', function () {
it('can decode a transaction with an issued currency that evaluates to XRP', function () {
// Encoding is done prior, because this is disallowed during encoding with client libraries to avoid scam XRP tokens.
const expectedTx = {
TransactionType: 'TrustSet',
@@ -72,7 +72,7 @@ describe('encoding and decoding tx_json', function () {
const decoded = decode(encoded)
expect(expectedTx).toEqual(decoded)
})
test('throws when Amount is invalid', function () {
it('throws when Amount is invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000.001',
Fee: '10',
@@ -81,7 +81,7 @@ describe('encoding and decoding tx_json', function () {
encode(my_tx)
}).toThrow()
})
test('throws when Fee is invalid', function () {
it('throws when Fee is invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000',
Fee: '10.123',
@@ -90,7 +90,7 @@ describe('encoding and decoding tx_json', function () {
encode(my_tx)
}).toThrow()
})
test('throws when Amount and Fee are invalid', function () {
it('throws when Amount and Fee are invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000.789',
Fee: '10.123',
@@ -99,7 +99,7 @@ describe('encoding and decoding tx_json', function () {
encode(my_tx)
}).toThrow()
})
test('throws when Amount is a number instead of a string-encoded integer', function () {
it('throws when Amount is a number instead of a string-encoded integer', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: 1000.789,
})
@@ -108,7 +108,7 @@ describe('encoding and decoding tx_json', function () {
}).toThrow()
})
test('throws when Fee is a number instead of a string-encoded integer', function () {
it('throws when Fee is a number instead of a string-encoded integer', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: 1234.56,
})

View File

@@ -1,34 +0,0 @@
const { coreTypes } = require('../src/types')
const { SerializedType } = require('../src/types/serialized-type')
describe('SerializedType interfaces', () => {
Object.entries(coreTypes).forEach(([name, Value]) => {
test(`${name} has a \`from\` static constructor`, () => {
expect(Value.from && Value.from !== Array.from).toBe(true)
})
test(`${name} has a default constructor`, () => {
expect(new Value()).not.toBe(undefined)
})
test(`${name}.from will return the same object`, () => {
const instance = new Value()
expect(Value.from(instance) === instance).toBe(true)
})
test(`${name} instances have toBytesSink`, () => {
expect(new Value().toBytesSink).not.toBe(undefined)
})
test(`${name} instances have toJSON`, () => {
expect(new Value().toJSON).not.toBe(undefined)
})
test(`${name}.from(json).toJSON() == json`, () => {
const newJSON = new Value().toJSON()
expect(Value.from(newJSON).toJSON()).toEqual(newJSON)
})
describe(`${name} supports all methods of the SerializedType mixin`, () => {
Object.keys(SerializedType.prototype).forEach((k) => {
test(`new ${name}.prototype.${k} !== undefined`, () => {
expect(Value.prototype[k]).not.toBe(undefined)
})
})
})
})
})

View File

@@ -0,0 +1,32 @@
import { SerializedType } from '../src/types/serialized-type'
import { coreTypes } from '../src/types'
describe('SerializedType implementations', () => {
Object.entries(coreTypes).forEach(([name, Value]) => {
it(`${name} has a \`from\` static constructor`, () => {
expect(Value.from).toBeDefined()
expect(Value.from).not.toEqual(Array.from)
})
it(`${name} has a default constructor`, () => {
expect(new Value()).not.toBe(undefined)
})
it(`${name}.from will return the same object`, () => {
const instance = new Value()
expect(Value.from(instance) === instance).toBe(true)
})
it(`${name} instances have toBytesSink`, () => {
expect(new Value().toBytesSink).not.toBe(undefined)
})
it(`${name} instances have toJSON`, () => {
expect(new Value().toJSON).not.toBe(undefined)
})
it(`${name}.from(json).toJSON() == json`, () => {
const newJSON = new Value().toJSON()
expect(Value.from(newJSON).toJSON()).toEqual(newJSON)
})
it(`${name} extends SerializedType mixin`, () => {
const obj = new Value()
expect(obj).toBeInstanceOf(SerializedType)
})
})
})

View File

@@ -1,7 +1,5 @@
const { coreTypes } = require('../src/types')
const { UInt8, UInt64 } = coreTypes
const { encode } = require('../src')
import { UInt8, UInt64 } from '../src/types'
import { encode } from '../src'
const binary =
'11007222000300003700000000000000003800000000000000006280000000000000000000000000000000000000005553440000000000000000000000000000000000000000000000000166D5438D7EA4C680000000000000000000000000005553440000000000AE123A8556F3CF91154711376AFB0F894F832B3D67D5438D7EA4C680000000000000000000000000005553440000000000F51DFC2A09D62CBBA1DFBDD4691DAC96AD98B90F'
@@ -98,51 +96,51 @@ const jsonEntry2 = {
index: '0000041EFD027808D3F78C8352F97E324CB816318E00B977C74ECDDC7CD975B2',
}
test('compareToTests[0]', () => {
it('compareToTests[0]', () => {
expect(UInt8.from(124).compareTo(UInt64.from(124))).toBe(0)
})
test('compareToTest[1]', () => {
it('compareToTest[1]', () => {
expect(UInt64.from(124).compareTo(UInt8.from(124))).toBe(0)
})
test('compareToTest[2]', () => {
it('compareToTest[2]', () => {
expect(UInt64.from(124).compareTo(UInt8.from(123))).toBe(1)
})
test('compareToTest[3]', () => {
it('compareToTest[3]', () => {
expect(UInt8.from(124).compareTo(UInt8.from(13))).toBe(1)
})
test('compareToTest[4]', () => {
it('compareToTest[4]', () => {
expect(UInt8.from(124).compareTo(124)).toBe(0)
})
test('compareToTest[5]', () => {
it('compareToTest[5]', () => {
expect(UInt64.from(124).compareTo(124)).toBe(0)
})
test('compareToTest[6]', () => {
it('compareToTest[6]', () => {
expect(UInt64.from(124).compareTo(123)).toBe(1)
})
test('compareToTest[7]', () => {
it('compareToTest[7]', () => {
expect(UInt8.from(124).compareTo(13)).toBe(1)
})
test('UInt64 from string zero', () => {
it('UInt64 from string zero', () => {
expect(UInt64.from('0')).toEqual(UInt64.from(0))
expect(encode(json)).toEqual(binary)
})
test('UInt64 from non 16 length hex', () => {
it('UInt64 from non 16 length hex', () => {
expect(encode(jsonEntry0)).toEqual(binaryEntry0)
expect(encode(jsonEntry1)).toEqual(binaryEntry1)
expect(encode(jsonEntry2)).toEqual(binaryEntry2)
})
test('valueOfTests', () => {
it('valueOf tests', () => {
let val = UInt8.from(1)
val |= 0x2
expect(val).toBe(3)
expect(val.valueOf() | 0x2).toBe(3)
})

View File

@@ -1,30 +0,0 @@
const fs = require("fs");
const { Buffer } = require('buffer/')
function hexOnly(hex) {
return hex.replace(/[^a-fA-F0-9]/g, "");
}
function unused() {}
function parseHexOnly(hex) {
return Buffer.from(hexOnly(hex), "hex");
}
function loadFixture(relativePath) {
const fn = __dirname + "/fixtures/" + relativePath;
return require(fn);
}
function loadFixtureText(relativePath) {
const fn = __dirname + "/fixtures/" + relativePath;
return fs.readFileSync(fn).toString("utf8");
}
module.exports = {
hexOnly,
parseHexOnly,
loadFixture,
loadFixtureText,
unused,
};

View File

@@ -0,0 +1,7 @@
export function hexOnly(hex: string): string {
return hex.replace(/[^a-fA-F0-9]/g, '')
}
export function parseHexOnly(hex: string): Buffer {
return Buffer.from(hexOnly(hex), 'hex')
}

View File

@@ -0,0 +1,9 @@
'use strict'
const { merge } = require('webpack-merge')
const { webpackForTest } = require('../../../weback.test.config')
const { getDefaultConfiguration } = require('../../../webpack.config')
module.exports = merge(
getDefaultConfiguration(),
webpackForTest('./test/index.ts', __dirname),
)

View File

@@ -1,5 +1,5 @@
const { encode, decode } = require('./../src/index')
const fixtures = require('./fixtures/x-codec-fixtures.json')
import { encode, decode } from '../src'
import fixtures from './fixtures/x-codec-fixtures.json'
let json_x1 = {
OwnerCount: 0,
@@ -122,56 +122,56 @@ let json_issued_with_tag = {
describe('X-Address Account is equivalent to a classic address w/ SourceTag', () => {
let encoded_x = encode(json_x1)
let encoded_r = encode(json_r1)
test('Can encode with x-Address', () => {
it('Can encode with x-Address', () => {
expect(encoded_x).toEqual(encoded_r)
})
test('decoded X-address is object w/ source and tag', () => {
it('decoded X-address is object w/ source and tag', () => {
let decoded_x = decode(encoded_x)
expect(decoded_x).toEqual(json_r1)
})
test('Encoding issuer X-Address w/ undefined destination tag', () => {
it('Encoding issuer X-Address w/ undefined destination tag', () => {
expect(encode(json_null_x)).toEqual(encode(json_null_r))
})
test('Throws when X-Address is invalid', () => {
expect(() => encode(json_invalid_x)).toThrow('checksum_invalid')
it('Throws when X-Address is invalid', () => {
expect(() => encode(json_invalid_x)).toThrow(new Error('checksum_invalid'))
})
test('Encodes issued currency w/ x-address', () => {
it('Encodes issued currency w/ x-address', () => {
expect(encode(json_issued_x)).toEqual(encode(json_issued_r))
})
})
describe('Invalid X-Address behavior', () => {
test('X-Address with tag throws value for invalid field', () => {
it('X-Address with tag throws value for invalid field', () => {
expect(() => encode(invalid_json_issuer_tagged)).toThrow(
new Error('Issuer cannot have an associated tag'),
)
})
test('Throws when Account has both X-Addr and Destination Tag', () => {
it('Throws when Account has both X-Addr and Destination Tag', () => {
expect(() => encode(invalid_json_x_and_tagged)).toThrow(
new Error('Cannot have Account X-Address and SourceTag'),
)
})
test('Throws when issued currency has tag', () => {
it('Throws when issued currency has tag', () => {
expect(() => encode(json_issued_with_tag)).toThrow(
'Only allowed to have tag on Account or Destination',
new Error('Only allowed to have tag on Account or Destination'),
)
})
})
describe('ripple-binary-codec x-address test', function () {
function makeSuite(name, entries) {
function makeSuite(name: string, entries: typeof fixtures.transactions) {
describe(name, function () {
entries.forEach((t, testN) => {
test(`${name}[${testN}] encodes X-address json equivalent to classic address json`, () => {
it(`${name}[${testN}] encodes X-address json equivalent to classic address json`, () => {
expect(encode(t.rjson)).toEqual(encode(t.xjson))
})
test(`${name}[${testN}] decodes X-address json equivalent to classic address json`, () => {
it(`${name}[${testN}] decodes X-address json equivalent to classic address json`, () => {
expect(decode(encode(t.xjson))).toEqual(t.rjson)
})
})

View File

@@ -0,0 +1,7 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
},
"include": ["./src/**/*.ts", "./src/**/*.json"]
}

View File

@@ -3,5 +3,5 @@
{
// extend your base config so you don't have to redefine your compilerOptions
"extends": "./tsconfig.json",
"include": ["src/**/*.ts", "test/**/*.js"]
}
"include": ["src/**/*.ts", "test/**/*.ts"]
}

View File

@@ -5,7 +5,6 @@
"lib": [
"es2017"
],
"rootDir": "./src",
"outDir": "./dist",
"noUnusedLocals": true,
"noUnusedParameters": true,

View File

@@ -6,7 +6,7 @@
"build": "tsc --build tsconfig.build.json",
"test": "jest --verbose false --silent=false ./test/*.test.ts",
"test:browser": "npm run build && karma start ./karma.config.js",
"clean": "rm -rf ./dist ./coverage tsconfig.tsbuildinfo",
"clean": "rm -rf ./dist ./coverage ./test/testCompiledForWeb tsconfig.build.tsbuildinfo",
"lint": "eslint . --ext .ts",
"prepublish": "npm run lint && npm test"
},