mirror of
https://github.com/Xahau/xahau.js.git
synced 2025-11-20 12:15:51 +00:00
Linter config lints test directory (#99)
Modify eslint config to lint ./test/
This commit is contained in:
@@ -26,6 +26,7 @@ module.exports = {
|
||||
'@typescript-eslint', // Add some TypeScript specific rules, and disable rules covered by the typechecker
|
||||
'import', // Add rules that help validate proper imports
|
||||
'prettier', // Allows running prettier as an ESLint rule, and reporting differences as individual linting issues
|
||||
'jest'
|
||||
],
|
||||
|
||||
extends: [
|
||||
@@ -58,16 +59,24 @@ module.exports = {
|
||||
overrides: [
|
||||
// Overrides for all test files
|
||||
{
|
||||
files: 'test/**/*.ts',
|
||||
files: 'test/**/*.test.js',
|
||||
extends: ["plugin:jest/recommended"],
|
||||
rules: {
|
||||
// For our Mocha test files, the pattern has been to have unnamed functions
|
||||
'func-names': 'off',
|
||||
// Using non-null assertions (obj!.property) cancels the benefits of the strict null-checking mode, but these are test files, so we don't care.
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
// For some test files, we shadow testing constants with function parameter names
|
||||
'no-shadow': 'off',
|
||||
// Some of our test files declare helper classes with errors
|
||||
'max-classes-per-file': 'off',
|
||||
// Test files are in javascript, turn off TypeScript linting.
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'off',
|
||||
'@typescript-eslint/restrict-template-expressions': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/restrict-template-expressions': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/unbound-method': 'off'
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -79,6 +88,8 @@ module.exports = {
|
||||
{ argsIgnorePattern: '^_' },
|
||||
],
|
||||
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
|
||||
// These rules are deprecated, but we have an old config that enables it
|
||||
'@typescript-eslint/camelcase': 'off',
|
||||
'@typescript-eslint/ban-ts-ignore': 'off',
|
||||
@@ -87,7 +98,6 @@ module.exports = {
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
"spaced-comment": ["error", "always"],
|
||||
},
|
||||
},
|
||||
|
||||
@@ -22,20 +22,21 @@
|
||||
"@types/node": "^14.0.10",
|
||||
"@typescript-eslint/eslint-plugin": "^3.2.0",
|
||||
"@typescript-eslint/parser": "^3.2.0",
|
||||
"eslint": "^7.2.0",
|
||||
"eslint": "^7.7.0",
|
||||
"eslint-config-prettier": "^6.11.0",
|
||||
"eslint-plugin-import": "^2.21.1",
|
||||
"eslint-plugin-jest": "^23.20.0",
|
||||
"eslint-plugin-mocha": "^7.0.1",
|
||||
"eslint-plugin-prettier": "^3.1.3",
|
||||
"prettier": "^2.0.4",
|
||||
"jest": "^26.0.1",
|
||||
"prettier": "^2.0.4",
|
||||
"typescript": "^3.9.5"
|
||||
},
|
||||
"scripts": {
|
||||
"compile": "tsc && cp ./src/enums/definitions.json ./dist/enums",
|
||||
"prepare": "npm run compile && npm test",
|
||||
"test": "jest",
|
||||
"lint": "eslint . --ext .ts --fix"
|
||||
"lint": "eslint . --ext .ts --ext .test.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,42 +1,43 @@
|
||||
const _ = require('lodash')
|
||||
const { loadFixture } = require('./utils')
|
||||
const { coreTypes } = require('../dist/types')
|
||||
const { Amount } = coreTypes
|
||||
const fixtures = loadFixture('data-driven-tests.json')
|
||||
const { loadFixture } = require("./utils");
|
||||
const { coreTypes } = require("../dist/types");
|
||||
const { Amount } = coreTypes;
|
||||
const fixtures = loadFixture("data-driven-tests.json");
|
||||
|
||||
function amountErrorTests () {
|
||||
fixtures.values_tests.filter(obj => obj.type === 'Amount').forEach(f => {
|
||||
// We only want these with errors
|
||||
if (!f.error) {
|
||||
return
|
||||
}
|
||||
const testName = `${JSON.stringify(f.test_json)}\n\tis invalid ` +
|
||||
`because: ${f.error}`
|
||||
it(testName, () => {
|
||||
expect(() => {
|
||||
Amount.from(f.test_json)
|
||||
JSON.stringify(f.test_json)
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
function amountErrorTests() {
|
||||
fixtures.values_tests
|
||||
.filter((obj) => obj.type === "Amount")
|
||||
.forEach((f) => {
|
||||
// We only want these with errors
|
||||
if (!f.error) {
|
||||
return;
|
||||
}
|
||||
const testName =
|
||||
`${JSON.stringify(f.test_json)}\n\tis invalid ` + `because: ${f.error}`;
|
||||
it(testName, () => {
|
||||
expect(() => {
|
||||
Amount.from(f.test_json);
|
||||
JSON.stringify(f.test_json);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe('Amount', function () {
|
||||
it('can be parsed from', function () {
|
||||
expect(Amount.from('1000000') instanceof Amount).toBe(true)
|
||||
expect(Amount.from('1000000').toJSON()).toEqual('1000000')
|
||||
describe("Amount", function () {
|
||||
it("can be parsed from", function () {
|
||||
expect(Amount.from("1000000") instanceof Amount).toBe(true);
|
||||
expect(Amount.from("1000000").toJSON()).toEqual("1000000");
|
||||
const fixture = {
|
||||
value: '1',
|
||||
issuer: '0000000000000000000000000000000000000000',
|
||||
currency: 'USD'
|
||||
}
|
||||
const amt = Amount.from(fixture)
|
||||
value: "1",
|
||||
issuer: "0000000000000000000000000000000000000000",
|
||||
currency: "USD",
|
||||
};
|
||||
const amt = Amount.from(fixture);
|
||||
const rewritten = {
|
||||
value: '1',
|
||||
issuer: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp',
|
||||
currency: 'USD'
|
||||
}
|
||||
expect(amt.toJSON()).toEqual(rewritten)
|
||||
})
|
||||
amountErrorTests()
|
||||
})
|
||||
value: "1",
|
||||
issuer: "rrrrrrrrrrrrrrrrrrrrrhoLvTp",
|
||||
currency: "USD",
|
||||
};
|
||||
expect(amt.toJSON()).toEqual(rewritten);
|
||||
});
|
||||
amountErrorTests();
|
||||
});
|
||||
|
||||
@@ -1,44 +1,47 @@
|
||||
const fixtures = require('./fixtures/codec-fixtures.json')
|
||||
const { decode, encode, decodeLedgerData } = require('../dist')
|
||||
const fixtures = require("./fixtures/codec-fixtures.json");
|
||||
const { decode, encode, decodeLedgerData } = require("../dist");
|
||||
|
||||
function json (object) {
|
||||
return JSON.stringify(object)
|
||||
function json(object) {
|
||||
return JSON.stringify(object);
|
||||
}
|
||||
|
||||
function truncateForDisplay (longStr) {
|
||||
return longStr.slice(0, 10) + '...' + longStr.slice(-10)
|
||||
function truncateForDisplay(longStr) {
|
||||
return `${longStr.slice(0, 10)} ... ${longStr.slice(-10)}`;
|
||||
}
|
||||
|
||||
describe('ripple-binary-codec', function () {
|
||||
function makeSuite (name, entries) {
|
||||
describe("ripple-binary-codec", function () {
|
||||
function makeSuite(name, entries) {
|
||||
describe(name, function () {
|
||||
entries.forEach((t, testN) => {
|
||||
// eslint-disable-next-line max-len
|
||||
test(`${name}[${testN}] can encode ${truncateForDisplay(json(t.json))} to ${truncateForDisplay(t.binary)}`,
|
||||
() => {
|
||||
expect(encode(t.json)).toEqual(t.binary)
|
||||
})
|
||||
test(`${name}[${testN}] can encode ${truncateForDisplay(
|
||||
json(t.json)
|
||||
)} to ${truncateForDisplay(t.binary)}`, () => {
|
||||
expect(encode(t.json)).toEqual(t.binary);
|
||||
});
|
||||
// eslint-disable-next-line max-len
|
||||
test(`${name}[${testN}] can decode ${truncateForDisplay(t.binary)} to ${truncateForDisplay(json(t.json))}`,
|
||||
() => {
|
||||
const decoded = decode(t.binary)
|
||||
expect(decoded).toEqual(t.json)
|
||||
})
|
||||
})
|
||||
})
|
||||
test(`${name}[${testN}] can decode ${truncateForDisplay(
|
||||
t.binary
|
||||
)} to ${truncateForDisplay(json(t.json))}`, () => {
|
||||
const decoded = decode(t.binary);
|
||||
expect(decoded).toEqual(t.json);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
makeSuite('transactions', fixtures.transactions)
|
||||
makeSuite('accountState', fixtures.accountState)
|
||||
makeSuite("transactions", fixtures.transactions);
|
||||
makeSuite("accountState", fixtures.accountState);
|
||||
|
||||
describe('ledgerData', function () {
|
||||
describe("ledgerData", function () {
|
||||
if (fixtures.ledgerData) {
|
||||
fixtures.ledgerData.forEach((t, testN) => {
|
||||
test(`ledgerData[${testN}] can decode ${t.binary} to ${json(t.json)}`,
|
||||
() => {
|
||||
const decoded = decodeLedgerData(t.binary)
|
||||
expect(t.json).toEqual(decoded)
|
||||
})
|
||||
})
|
||||
test(`ledgerData[${testN}] can decode ${t.binary} to ${json(
|
||||
t.json
|
||||
)}`, () => {
|
||||
const decoded = decodeLedgerData(t.binary);
|
||||
expect(t.json).toEqual(decoded);
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,75 +1,74 @@
|
||||
/* eslint-disable func-style */
|
||||
|
||||
const { coreTypes } = require('../dist/types')
|
||||
const Decimal = require('decimal.js')
|
||||
const { coreTypes } = require("../dist/types");
|
||||
const Decimal = require("decimal.js");
|
||||
|
||||
const _ = require('lodash')
|
||||
const { encodeAccountID } = require('ripple-address-codec')
|
||||
const { binary } = require('../dist/coretypes')
|
||||
const { Amount, Hash160 } = coreTypes
|
||||
const { makeParser, readJSON } = binary
|
||||
const { Field, TransactionType } = require('./../dist/enums')
|
||||
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
|
||||
const fixtures = loadFixture('data-driven-tests.json')
|
||||
const { BytesList } = require('../dist/serdes/binary-serializer')
|
||||
const { encodeAccountID } = require("ripple-address-codec");
|
||||
const { binary } = require("../dist/coretypes");
|
||||
const { Amount, Hash160 } = coreTypes;
|
||||
const { makeParser, readJSON } = binary;
|
||||
const { Field, TransactionType } = require("./../dist/enums");
|
||||
const { parseHexOnly, hexOnly, loadFixture } = require("./utils");
|
||||
const fixtures = loadFixture("data-driven-tests.json");
|
||||
const { BytesList } = require("../dist/serdes/binary-serializer");
|
||||
|
||||
const __ = hexOnly
|
||||
function unused () {}
|
||||
function toJSON (v) {
|
||||
return v.toJSON ? v.toJSON() : v
|
||||
const __ = hexOnly;
|
||||
function toJSON(v) {
|
||||
return v.toJSON ? v.toJSON() : v;
|
||||
}
|
||||
|
||||
function assertEqualAmountJSON (actual, expected) {
|
||||
expect((typeof actual) === (typeof expected)).toBe(true)
|
||||
if ((typeof actual) === 'string') {
|
||||
expect(actual).toEqual(expected)
|
||||
return
|
||||
function assertEqualAmountJSON(actual, expected) {
|
||||
expect(typeof actual === typeof expected).toBe(true);
|
||||
if (typeof actual === "string") {
|
||||
expect(actual).toEqual(expected);
|
||||
return;
|
||||
}
|
||||
expect(actual.currency).toEqual(expected.currency)
|
||||
expect(actual.issuer).toEqual(expected.issuer)
|
||||
expect(actual.value === expected.value ||
|
||||
new Decimal(actual.value).equals(
|
||||
new Decimal(expected.value))).toBe(true)
|
||||
expect(actual.currency).toEqual(expected.currency);
|
||||
expect(actual.issuer).toEqual(expected.issuer);
|
||||
expect(
|
||||
actual.value === expected.value ||
|
||||
new Decimal(actual.value).equals(new Decimal(expected.value))
|
||||
).toBe(true);
|
||||
}
|
||||
|
||||
function basicApiTests () {
|
||||
const bytes = parseHexOnly('00,01020304,0506', Uint8Array)
|
||||
test('can read slices of bytes', () => {
|
||||
const parser = makeParser(bytes)
|
||||
expect(parser.bytes instanceof Buffer).toBe(true)
|
||||
const read1 = parser.read(1)
|
||||
expect(read1 instanceof Buffer).toBe(true)
|
||||
expect(read1).toEqual(Buffer.from([0]))
|
||||
expect(parser.read(4)).toEqual(Buffer.from([1, 2, 3, 4]))
|
||||
expect(parser.read(2)).toEqual(Buffer.from([5, 6]))
|
||||
expect(() => parser.read(1)).toThrow()
|
||||
})
|
||||
test('can read a Uint32 at full', () => {
|
||||
const parser = makeParser('FFFFFFFF')
|
||||
expect(parser.readUInt32()).toEqual(0xFFFFFFFF)
|
||||
})
|
||||
function basicApiTests() {
|
||||
const bytes = parseHexOnly("00,01020304,0506", Uint8Array);
|
||||
test("can read slices of bytes", () => {
|
||||
const parser = makeParser(bytes);
|
||||
expect(parser.bytes instanceof Buffer).toBe(true);
|
||||
const read1 = parser.read(1);
|
||||
expect(read1 instanceof Buffer).toBe(true);
|
||||
expect(read1).toEqual(Buffer.from([0]));
|
||||
expect(parser.read(4)).toEqual(Buffer.from([1, 2, 3, 4]));
|
||||
expect(parser.read(2)).toEqual(Buffer.from([5, 6]));
|
||||
expect(() => parser.read(1)).toThrow();
|
||||
});
|
||||
test("can read a Uint32 at full", () => {
|
||||
const parser = makeParser("FFFFFFFF");
|
||||
expect(parser.readUInt32()).toEqual(0xffffffff);
|
||||
});
|
||||
}
|
||||
|
||||
function transactionParsingTests () {
|
||||
function transactionParsingTests() {
|
||||
const transaction = {
|
||||
json: {
|
||||
Account: 'raD5qJMAShLeHZXf9wjUmo6vRK4arj9cF3',
|
||||
Fee: '10',
|
||||
Account: "raD5qJMAShLeHZXf9wjUmo6vRK4arj9cF3",
|
||||
Fee: "10",
|
||||
Flags: 0,
|
||||
Sequence: 103929,
|
||||
SigningPubKey:
|
||||
'028472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F418D6A7166',
|
||||
"028472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F418D6A7166",
|
||||
TakerGets: {
|
||||
currency: 'ILS',
|
||||
issuer: 'rNPRNzBB92BVpAhhZr4iXDTveCgV5Pofm9',
|
||||
value: '1694.768'
|
||||
currency: "ILS",
|
||||
issuer: "rNPRNzBB92BVpAhhZr4iXDTveCgV5Pofm9",
|
||||
value: "1694.768",
|
||||
},
|
||||
TakerPays: '98957503520',
|
||||
TransactionType: 'OfferCreate',
|
||||
TakerPays: "98957503520",
|
||||
TransactionType: "OfferCreate",
|
||||
TxnSignature: __(`
|
||||
304502202ABE08D5E78D1E74A4C18F2714F64E87B8BD57444AF
|
||||
A5733109EB3C077077520022100DB335EE97386E4C0591CAC02
|
||||
4D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C`)
|
||||
4D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C`),
|
||||
},
|
||||
binary: __(`
|
||||
120007220000000024000195F964400000170A53AC2065D5460561E
|
||||
@@ -79,207 +78,212 @@ function transactionParsingTests () {
|
||||
418D6A71667447304502202ABE08D5E78D1E74A4C18F2714F64E87B
|
||||
8BD57444AFA5733109EB3C077077520022100DB335EE97386E4C059
|
||||
1CAC024D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C811439408
|
||||
A69F0895E62149CFCC006FB89FA7D1E6E5D`)
|
||||
}
|
||||
A69F0895E62149CFCC006FB89FA7D1E6E5D`),
|
||||
};
|
||||
|
||||
const tx_json = transaction.json
|
||||
const tx_json = transaction.json;
|
||||
// These tests are basically development logs
|
||||
|
||||
test('can be done with low level apis', () => {
|
||||
const parser = makeParser(transaction.binary)
|
||||
test("can be done with low level apis", () => {
|
||||
const parser = makeParser(transaction.binary);
|
||||
|
||||
expect(parser.readField()).toEqual(Field.TransactionType)
|
||||
expect(parser.readUInt16()).toEqual(7)
|
||||
expect(parser.readField()).toEqual(Field.Flags)
|
||||
expect(parser.readUInt32()).toEqual(0)
|
||||
expect(parser.readField()).toEqual(Field.Sequence)
|
||||
expect(parser.readUInt32()).toEqual(103929)
|
||||
expect(parser.readField()).toEqual(Field.TakerPays)
|
||||
parser.read(8)
|
||||
expect(parser.readField()).toEqual(Field.TakerGets)
|
||||
expect(parser.readField()).toEqual(Field.TransactionType);
|
||||
expect(parser.readUInt16()).toEqual(7);
|
||||
expect(parser.readField()).toEqual(Field.Flags);
|
||||
expect(parser.readUInt32()).toEqual(0);
|
||||
expect(parser.readField()).toEqual(Field.Sequence);
|
||||
expect(parser.readUInt32()).toEqual(103929);
|
||||
expect(parser.readField()).toEqual(Field.TakerPays);
|
||||
parser.read(8);
|
||||
expect(parser.readField()).toEqual(Field.TakerGets);
|
||||
// amount value
|
||||
expect(parser.read(8)).not.toBe([])
|
||||
expect(parser.read(8)).not.toBe([]);
|
||||
// amount currency
|
||||
expect(Hash160.fromParser(parser)).not.toBe([])
|
||||
expect(encodeAccountID(parser.read(20))).toEqual(tx_json.TakerGets.issuer)
|
||||
expect(parser.readField()).toEqual(Field.Fee)
|
||||
expect(parser.read(8)).not.toEqual([])
|
||||
expect(parser.readField()).toEqual(Field.SigningPubKey)
|
||||
expect(parser.readVariableLengthLength()).toBe(33)
|
||||
expect(parser.read(33).toString('hex').toUpperCase()).toEqual(tx_json.SigningPubKey)
|
||||
expect(parser.readField()).toEqual(Field.TxnSignature)
|
||||
expect(parser.readVariableLength().toString('hex').toUpperCase()).toEqual(tx_json.TxnSignature)
|
||||
expect(parser.readField()).toEqual(Field.Account)
|
||||
expect(encodeAccountID(parser.readVariableLength())).toEqual(tx_json.Account)
|
||||
expect(parser.end()).toBe(true)
|
||||
})
|
||||
expect(Hash160.fromParser(parser)).not.toBe([]);
|
||||
expect(encodeAccountID(parser.read(20))).toEqual(tx_json.TakerGets.issuer);
|
||||
expect(parser.readField()).toEqual(Field.Fee);
|
||||
expect(parser.read(8)).not.toEqual([]);
|
||||
expect(parser.readField()).toEqual(Field.SigningPubKey);
|
||||
expect(parser.readVariableLengthLength()).toBe(33);
|
||||
expect(parser.read(33).toString("hex").toUpperCase()).toEqual(
|
||||
tx_json.SigningPubKey
|
||||
);
|
||||
expect(parser.readField()).toEqual(Field.TxnSignature);
|
||||
expect(parser.readVariableLength().toString("hex").toUpperCase()).toEqual(
|
||||
tx_json.TxnSignature
|
||||
);
|
||||
expect(parser.readField()).toEqual(Field.Account);
|
||||
expect(encodeAccountID(parser.readVariableLength())).toEqual(
|
||||
tx_json.Account
|
||||
);
|
||||
expect(parser.end()).toBe(true);
|
||||
});
|
||||
|
||||
test('can be done with high level apis', () => {
|
||||
const parser = makeParser(transaction.binary)
|
||||
function readField () {
|
||||
return parser.readFieldAndValue()
|
||||
test("can be done with high level apis", () => {
|
||||
const parser = makeParser(transaction.binary);
|
||||
function readField() {
|
||||
return parser.readFieldAndValue();
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.TransactionType)
|
||||
expect(value).toEqual(TransactionType.OfferCreate)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.TransactionType);
|
||||
expect(value).toEqual(TransactionType.OfferCreate);
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.Flags)
|
||||
expect(value.valueOf()).toEqual(0)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.Flags);
|
||||
expect(value.valueOf()).toEqual(0);
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.Sequence)
|
||||
expect(value.valueOf()).toEqual(103929)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.Sequence);
|
||||
expect(value.valueOf()).toEqual(103929);
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.TakerPays)
|
||||
expect(value.isNative()).toEqual(true)
|
||||
expect(value.toJSON()).toEqual('98957503520')
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.TakerPays);
|
||||
expect(value.isNative()).toEqual(true);
|
||||
expect(value.toJSON()).toEqual("98957503520");
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.TakerGets)
|
||||
expect(value.isNative()).toEqual(false)
|
||||
expect(value.toJSON().issuer).toEqual(tx_json.TakerGets.issuer)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.TakerGets);
|
||||
expect(value.isNative()).toEqual(false);
|
||||
expect(value.toJSON().issuer).toEqual(tx_json.TakerGets.issuer);
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.Fee)
|
||||
expect(value.isNative()).toEqual(true)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.Fee);
|
||||
expect(value.isNative()).toEqual(true);
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.SigningPubKey)
|
||||
expect(value.toJSON()).toEqual(tx_json.SigningPubKey)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.SigningPubKey);
|
||||
expect(value.toJSON()).toEqual(tx_json.SigningPubKey);
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.TxnSignature)
|
||||
expect(value.toJSON()).toEqual(tx_json.TxnSignature)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.TxnSignature);
|
||||
expect(value.toJSON()).toEqual(tx_json.TxnSignature);
|
||||
}
|
||||
{
|
||||
const [field, value] = readField()
|
||||
expect(field).toEqual(Field.Account)
|
||||
expect(value.toJSON()).toEqual(tx_json.Account)
|
||||
const [field, value] = readField();
|
||||
expect(field).toEqual(Field.Account);
|
||||
expect(value.toJSON()).toEqual(tx_json.Account);
|
||||
}
|
||||
expect(parser.end()).toBe(true)
|
||||
})
|
||||
expect(parser.end()).toBe(true);
|
||||
});
|
||||
|
||||
test('can be done with higher level apis', () => {
|
||||
const parser = makeParser(transaction.binary)
|
||||
const jsonFromBinary = readJSON(parser)
|
||||
expect(jsonFromBinary).toEqual(tx_json)
|
||||
})
|
||||
test("can be done with higher level apis", () => {
|
||||
const parser = makeParser(transaction.binary);
|
||||
const jsonFromBinary = readJSON(parser);
|
||||
expect(jsonFromBinary).toEqual(tx_json);
|
||||
});
|
||||
|
||||
test('readJSON (binary.decode) does not return STObject ', () => {
|
||||
const parser = makeParser(transaction.binary)
|
||||
const jsonFromBinary = readJSON(parser)
|
||||
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false)
|
||||
test("readJSON (binary.decode) does not return STObject ", () => {
|
||||
const parser = makeParser(transaction.binary);
|
||||
const jsonFromBinary = readJSON(parser);
|
||||
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false);
|
||||
expect(jsonFromBinary instanceof Object).toBe(true);
|
||||
expect(jsonFromBinary.prototype).toBe(undefined)
|
||||
})
|
||||
expect(jsonFromBinary.prototype).toBe(undefined);
|
||||
});
|
||||
}
|
||||
|
||||
function amountParsingTests () {
|
||||
fixtures.values_tests.filter(obj => obj.type === 'Amount').forEach((f, i) => {
|
||||
if (f.error) {
|
||||
return
|
||||
}
|
||||
const parser = makeParser(f.expected_hex)
|
||||
const testName =
|
||||
`values_tests[${i}] parses ${f.expected_hex.slice(0, 16)}...
|
||||
as ${JSON.stringify(f.test_json)}`
|
||||
test(testName, () => {
|
||||
const value = parser.readType(Amount)
|
||||
// May not actually be in canonical form. The fixtures are to be used
|
||||
// also for json -> binary;
|
||||
const json = toJSON(value)
|
||||
assertEqualAmountJSON(json, (f.test_json))
|
||||
if (f.exponent) {
|
||||
const exponent = new Decimal(json.value);
|
||||
expect(exponent.e-15).toEqual(f.exponent)
|
||||
function amountParsingTests() {
|
||||
fixtures.values_tests
|
||||
.filter((obj) => obj.type === "Amount")
|
||||
.forEach((f, i) => {
|
||||
if (f.error) {
|
||||
return;
|
||||
}
|
||||
})
|
||||
})
|
||||
const parser = makeParser(f.expected_hex);
|
||||
const testName = `values_tests[${i}] parses ${f.expected_hex.slice(
|
||||
0,
|
||||
16
|
||||
)}...
|
||||
as ${JSON.stringify(f.test_json)}`;
|
||||
test(testName, () => {
|
||||
const value = parser.readType(Amount);
|
||||
// May not actually be in canonical form. The fixtures are to be used
|
||||
// also for json -> binary;
|
||||
const json = toJSON(value);
|
||||
assertEqualAmountJSON(json, f.test_json);
|
||||
if (f.exponent) {
|
||||
const exponent = new Decimal(json.value);
|
||||
expect(exponent.e - 15).toEqual(f.exponent);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function fieldParsingTests () {
|
||||
function fieldParsingTests() {
|
||||
fixtures.fields_tests.forEach((f, i) => {
|
||||
const parser = makeParser(f.expected_hex)
|
||||
const parser = makeParser(f.expected_hex);
|
||||
test(`fields[${i}]: parses ${f.expected_hex} as ${f.name}`, () => {
|
||||
const field = parser.readField()
|
||||
expect(field.name).toEqual(f.name)
|
||||
expect(field.type.name).toEqual(f.type_name)
|
||||
})
|
||||
})
|
||||
const field = parser.readField();
|
||||
expect(field.name).toEqual(f.name);
|
||||
expect(field.type.name).toEqual(f.type_name);
|
||||
});
|
||||
});
|
||||
test("Field throws when type code out of range", () => {
|
||||
const parser = makeParser("0101");
|
||||
expect(() => parser.readField()).toThrow(new Error("Cannot read FieldOrdinal, type_code out of range"));
|
||||
})
|
||||
expect(() => parser.readField()).toThrow(
|
||||
new Error("Cannot read FieldOrdinal, type_code out of range")
|
||||
);
|
||||
});
|
||||
test("Field throws when field code out of range", () => {
|
||||
const parser = makeParser("1001");
|
||||
expect(() => parser.readFieldOrdinal()).toThrowError(new Error("Cannot read FieldOrdinal, field_code out of range"))
|
||||
})
|
||||
expect(() => parser.readFieldOrdinal()).toThrowError(
|
||||
new Error("Cannot read FieldOrdinal, field_code out of range")
|
||||
);
|
||||
});
|
||||
test("Field throws when both type and field code out of range", () => {
|
||||
const parser = makeParser("000101");
|
||||
expect(() => parser.readFieldOrdinal()).toThrowError(new Error("Cannot read FieldOrdinal, type_code out of range"))
|
||||
})
|
||||
expect(() => parser.readFieldOrdinal()).toThrowError(
|
||||
new Error("Cannot read FieldOrdinal, type_code out of range")
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function assertRecyclable (json, forField) {
|
||||
const Type = forField.associatedType
|
||||
const recycled = Type.from(json).toJSON()
|
||||
expect(recycled).toEqual(json)
|
||||
const sink = new BytesList()
|
||||
Type.from(recycled).toBytesSink(sink)
|
||||
const recycledAgain = makeParser(sink.toHex())
|
||||
.readType(Type)
|
||||
.toJSON()
|
||||
expect(recycledAgain).toEqual(json)
|
||||
function assertRecyclable(json, forField) {
|
||||
const Type = forField.associatedType;
|
||||
const recycled = Type.from(json).toJSON();
|
||||
expect(recycled).toEqual(json);
|
||||
const sink = new BytesList();
|
||||
Type.from(recycled).toBytesSink(sink);
|
||||
const recycledAgain = makeParser(sink.toHex()).readType(Type).toJSON();
|
||||
expect(recycledAgain).toEqual(json);
|
||||
}
|
||||
|
||||
function nestedObjectTests () {
|
||||
function disabled (i) {
|
||||
unused(i)
|
||||
return false
|
||||
}
|
||||
|
||||
function nestedObjectTests() {
|
||||
fixtures.whole_objects.forEach((f, i) => {
|
||||
if (disabled(i)) {
|
||||
return
|
||||
}
|
||||
|
||||
test(`whole_objects[${i}]: can parse blob into
|
||||
${JSON.stringify(f.tx_json)}`,
|
||||
/* */ () => {
|
||||
const parser = makeParser(f.blob_with_no_signing)
|
||||
let ix = 0
|
||||
${JSON.stringify(
|
||||
f.tx_json
|
||||
)}`, /* */ () => {
|
||||
const parser = makeParser(f.blob_with_no_signing);
|
||||
let ix = 0;
|
||||
while (!parser.end()) {
|
||||
const [field, value] = parser.readFieldAndValue()
|
||||
const expected = f.fields[ix]
|
||||
const expectedJSON = expected[1].json
|
||||
const expectedField = expected[0]
|
||||
const actual = toJSON(value)
|
||||
const [field, value] = parser.readFieldAndValue();
|
||||
const expected = f.fields[ix];
|
||||
const expectedJSON = expected[1].json;
|
||||
const expectedField = expected[0];
|
||||
const actual = toJSON(value);
|
||||
|
||||
try {
|
||||
expect(actual).toEqual(expectedJSON)
|
||||
expect(actual).toEqual(expectedJSON);
|
||||
} catch (e) {
|
||||
throw new Error(`${e} ${field} a: ${actual} e: ${expectedJSON}`)
|
||||
throw new Error(`${e} ${field} a: ${actual} e: ${expectedJSON}`);
|
||||
}
|
||||
expect(field.name).toEqual(expectedField)
|
||||
assertRecyclable(actual, field)
|
||||
ix++
|
||||
expect(field.name).toEqual(expectedField);
|
||||
assertRecyclable(actual, field);
|
||||
ix++;
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function pathSetBinaryTests () {
|
||||
function pathSetBinaryTests() {
|
||||
const bytes = __(
|
||||
`1200002200000000240000002E2E00004BF161D4C71AFD498D00000000000000
|
||||
0000000000000055534400000000000A20B3C85F482532A9578DBB3950B85CA0
|
||||
@@ -309,80 +313,85 @@ function pathSetBinaryTests () {
|
||||
69E6DCC940CA48D82337AD000000000000000000000000425443000000000057
|
||||
180C769B66D942EE69E6DCC940CA48D82337AD10000000000000000000000000
|
||||
58525000000000003000000000000000000000000055534400000000000A20B3
|
||||
C85F482532A9578DBB3950B85CA06594D100`)
|
||||
C85F482532A9578DBB3950B85CA06594D100`
|
||||
);
|
||||
|
||||
const expectedJSON =
|
||||
[[{
|
||||
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
|
||||
currency: 'BTC',
|
||||
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'
|
||||
},
|
||||
{
|
||||
account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
|
||||
currency: 'BTC',
|
||||
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo'
|
||||
},
|
||||
{
|
||||
account: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
|
||||
currency: 'BTC',
|
||||
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
|
||||
},
|
||||
{
|
||||
currency: 'USD',
|
||||
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
|
||||
}],
|
||||
[{
|
||||
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
|
||||
currency: 'BTC',
|
||||
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'
|
||||
},
|
||||
{
|
||||
account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
|
||||
currency: 'BTC',
|
||||
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo'
|
||||
},
|
||||
{
|
||||
account: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi',
|
||||
currency: 'BTC',
|
||||
issuer: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi'
|
||||
},
|
||||
{
|
||||
currency: 'USD',
|
||||
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
|
||||
}],
|
||||
[{
|
||||
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
|
||||
currency: 'BTC',
|
||||
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'
|
||||
},
|
||||
{
|
||||
account: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn',
|
||||
currency: 'BTC',
|
||||
issuer: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn'
|
||||
},
|
||||
{ currency: '0000000000000000000000005852500000000000' },
|
||||
{
|
||||
currency: 'USD',
|
||||
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
|
||||
}]]
|
||||
const expectedJSON = [
|
||||
[
|
||||
{
|
||||
account: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
|
||||
currency: "BTC",
|
||||
issuer: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
|
||||
},
|
||||
{
|
||||
account: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
|
||||
currency: "BTC",
|
||||
issuer: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
|
||||
},
|
||||
{
|
||||
account: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
|
||||
currency: "BTC",
|
||||
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
|
||||
},
|
||||
{
|
||||
currency: "USD",
|
||||
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
account: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
|
||||
currency: "BTC",
|
||||
issuer: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
|
||||
},
|
||||
{
|
||||
account: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
|
||||
currency: "BTC",
|
||||
issuer: "rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo",
|
||||
},
|
||||
{
|
||||
account: "rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi",
|
||||
currency: "BTC",
|
||||
issuer: "rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi",
|
||||
},
|
||||
{
|
||||
currency: "USD",
|
||||
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
account: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
|
||||
currency: "BTC",
|
||||
issuer: "r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K",
|
||||
},
|
||||
{
|
||||
account: "r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn",
|
||||
currency: "BTC",
|
||||
issuer: "r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn",
|
||||
},
|
||||
{ currency: "0000000000000000000000005852500000000000" },
|
||||
{
|
||||
currency: "USD",
|
||||
issuer: "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B",
|
||||
},
|
||||
],
|
||||
];
|
||||
|
||||
test('works with long paths', () => {
|
||||
const parser = makeParser(bytes)
|
||||
const txn = readJSON(parser)
|
||||
expect(txn.Paths).toEqual(expectedJSON)
|
||||
test("works with long paths", () => {
|
||||
const parser = makeParser(bytes);
|
||||
const txn = readJSON(parser);
|
||||
expect(txn.Paths).toEqual(expectedJSON);
|
||||
// TODO: this should go elsewhere
|
||||
expect(
|
||||
coreTypes.PathSet.from(txn.Paths).toJSON()).toEqual(
|
||||
expectedJSON
|
||||
)
|
||||
})
|
||||
expect(coreTypes.PathSet.from(txn.Paths).toJSON()).toEqual(expectedJSON);
|
||||
});
|
||||
}
|
||||
|
||||
describe('Binary Parser', function() {
|
||||
describe('pathSetBinaryTests', pathSetBinaryTests);
|
||||
describe('nestedObjectTests', nestedObjectTests);
|
||||
describe('fieldParsingTests', fieldParsingTests);
|
||||
describe('amountParsingTests', amountParsingTests);
|
||||
describe('transactionParsingTests', transactionParsingTests);
|
||||
describe('basicApiTests', basicApiTests);
|
||||
describe("Binary Parser", function () {
|
||||
describe("pathSetBinaryTests", () => pathSetBinaryTests());
|
||||
describe("nestedObjectTests", () => nestedObjectTests());
|
||||
describe("fieldParsingTests", () => fieldParsingTests());
|
||||
describe("amountParsingTests", () => amountParsingTests());
|
||||
describe("transactionParsingTests", () => transactionParsingTests());
|
||||
describe("basicApiTests", () => basicApiTests());
|
||||
});
|
||||
@@ -1,194 +1,191 @@
|
||||
/* eslint-disable func-style */
|
||||
|
||||
const { binary } = require('../dist/coretypes')
|
||||
const { encode, decode } = require('../dist')
|
||||
const { makeParser, BytesList, BinarySerializer } = binary
|
||||
const { coreTypes } = require('../dist/types')
|
||||
const { UInt8, UInt16, UInt32, UInt64, STObject } = coreTypes
|
||||
const { binary } = require("../dist/coretypes");
|
||||
const { encode, decode } = require("../dist");
|
||||
const { makeParser, BytesList, BinarySerializer } = binary;
|
||||
const { coreTypes } = require("../dist/types");
|
||||
const { UInt8, UInt16, UInt32, UInt64, STObject } = coreTypes;
|
||||
|
||||
const { loadFixture } = require('./utils')
|
||||
const fixtures = loadFixture('data-driven-tests.json')
|
||||
const deliverMinTx = require('./fixtures/delivermin-tx.json')
|
||||
const deliverMinTxBinary = require('./fixtures/delivermin-tx-binary.json')
|
||||
const { loadFixture } = require("./utils");
|
||||
const fixtures = loadFixture("data-driven-tests.json");
|
||||
const deliverMinTx = require("./fixtures/delivermin-tx.json");
|
||||
const deliverMinTxBinary = require("./fixtures/delivermin-tx-binary.json");
|
||||
const SignerListSet = {
|
||||
tx: require('./fixtures/signerlistset-tx.json'),
|
||||
binary: require('./fixtures/signerlistset-tx-binary.json'),
|
||||
meta: require('./fixtures/signerlistset-tx-meta-binary.json')
|
||||
}
|
||||
tx: require("./fixtures/signerlistset-tx.json"),
|
||||
binary: require("./fixtures/signerlistset-tx-binary.json"),
|
||||
meta: require("./fixtures/signerlistset-tx-meta-binary.json"),
|
||||
};
|
||||
const DepositPreauth = {
|
||||
tx: require('./fixtures/deposit-preauth-tx.json'),
|
||||
binary: require('./fixtures/deposit-preauth-tx-binary.json'),
|
||||
meta: require('./fixtures/deposit-preauth-tx-meta-binary.json')
|
||||
}
|
||||
tx: require("./fixtures/deposit-preauth-tx.json"),
|
||||
binary: require("./fixtures/deposit-preauth-tx-binary.json"),
|
||||
meta: require("./fixtures/deposit-preauth-tx-meta-binary.json"),
|
||||
};
|
||||
const Escrow = {
|
||||
create: {
|
||||
tx: require('./fixtures/escrow-create-tx.json'),
|
||||
binary: require('./fixtures/escrow-create-binary.json')
|
||||
tx: require("./fixtures/escrow-create-tx.json"),
|
||||
binary: require("./fixtures/escrow-create-binary.json"),
|
||||
},
|
||||
finish: {
|
||||
tx: require('./fixtures/escrow-finish-tx.json'),
|
||||
binary: require('./fixtures/escrow-finish-binary.json'),
|
||||
meta: require('./fixtures/escrow-finish-meta-binary.json')
|
||||
tx: require("./fixtures/escrow-finish-tx.json"),
|
||||
binary: require("./fixtures/escrow-finish-binary.json"),
|
||||
meta: require("./fixtures/escrow-finish-meta-binary.json"),
|
||||
},
|
||||
cancel: {
|
||||
tx: require('./fixtures/escrow-cancel-tx.json'),
|
||||
binary: require('./fixtures/escrow-cancel-binary.json')
|
||||
}
|
||||
}
|
||||
tx: require("./fixtures/escrow-cancel-tx.json"),
|
||||
binary: require("./fixtures/escrow-cancel-binary.json"),
|
||||
},
|
||||
};
|
||||
const PaymentChannel = {
|
||||
create: {
|
||||
tx: require('./fixtures/payment-channel-create-tx.json'),
|
||||
binary: require('./fixtures/payment-channel-create-binary.json')
|
||||
tx: require("./fixtures/payment-channel-create-tx.json"),
|
||||
binary: require("./fixtures/payment-channel-create-binary.json"),
|
||||
},
|
||||
fund: {
|
||||
tx: require('./fixtures/payment-channel-fund-tx.json'),
|
||||
binary: require('./fixtures/payment-channel-fund-binary.json')
|
||||
tx: require("./fixtures/payment-channel-fund-tx.json"),
|
||||
binary: require("./fixtures/payment-channel-fund-binary.json"),
|
||||
},
|
||||
claim: {
|
||||
tx: require('./fixtures/payment-channel-claim-tx.json'),
|
||||
binary: require('./fixtures/payment-channel-claim-binary.json')
|
||||
}
|
||||
tx: require("./fixtures/payment-channel-claim-tx.json"),
|
||||
binary: require("./fixtures/payment-channel-claim-binary.json"),
|
||||
},
|
||||
};
|
||||
|
||||
const NegativeUNL = require("./fixtures/negative-unl.json");
|
||||
|
||||
function bytesListTest() {
|
||||
const list = new BytesList()
|
||||
.put(Buffer.from([0]))
|
||||
.put(Buffer.from([2, 3]))
|
||||
.put(Buffer.from([4, 5]));
|
||||
test("is an Array<Buffer>", function () {
|
||||
expect(Array.isArray(list.bytesArray)).toBe(true);
|
||||
expect(list.bytesArray[0] instanceof Buffer).toBe(true);
|
||||
});
|
||||
test("keeps track of the length itself", function () {
|
||||
expect(list.getLength()).toBe(5);
|
||||
});
|
||||
test("can join all arrays into one via toBytes", function () {
|
||||
const joined = list.toBytes();
|
||||
expect(joined).toHaveLength(5);
|
||||
expect(joined).toEqual(Buffer.from([0, 2, 3, 4, 5]));
|
||||
});
|
||||
}
|
||||
|
||||
const NegativeUNL = require('./fixtures/negative-unl.json');
|
||||
|
||||
function bytesListTest () {
|
||||
const list = new BytesList().put(Buffer.from([0])).put(Buffer.from([2, 3])).put(Buffer.from([4, 5]))
|
||||
test('is an Array<Buffer>', function () {
|
||||
expect(Array.isArray(list.bytesArray)).toBe(true)
|
||||
expect(list.bytesArray[0] instanceof Buffer).toBe(true)
|
||||
})
|
||||
test('keeps track of the length itself', function () {
|
||||
expect(list.getLength()).toBe(5)
|
||||
})
|
||||
test('can join all arrays into one via toBytes', function () {
|
||||
const joined = list.toBytes()
|
||||
expect(joined).toHaveLength(5)
|
||||
expect(joined).toEqual(Buffer.from([0, 2, 3, 4, 5]))
|
||||
})
|
||||
function assertRecycles(blob) {
|
||||
const parser = makeParser(blob);
|
||||
const so = parser.readType(STObject);
|
||||
const out = new BytesList();
|
||||
so.toBytesSink(out);
|
||||
const hex = out.toHex();
|
||||
expect(hex).toEqual(blob);
|
||||
expect(hex + ":").not.toEqual(blob);
|
||||
}
|
||||
|
||||
function assertRecycles (blob) {
|
||||
const parser = makeParser(blob)
|
||||
const so = parser.readType(STObject)
|
||||
const out = new BytesList()
|
||||
so.toBytesSink(out)
|
||||
const hex = out.toHex()
|
||||
expect(hex).toEqual(blob)
|
||||
expect(hex + ':').not.toEqual(blob)
|
||||
}
|
||||
|
||||
function nestedObjectTests () {
|
||||
function nestedObjectTests() {
|
||||
fixtures.whole_objects.forEach((f, i) => {
|
||||
test(`whole_objects[${i}]: can parse blob and dump out same blob`,
|
||||
/* */ () => {
|
||||
assertRecycles(f.blob_with_no_signing)
|
||||
})
|
||||
})
|
||||
/*eslint-disable jest/expect-expect*/
|
||||
test(`whole_objects[${i}]: can parse blob and dump out same blob`, () => {
|
||||
assertRecycles(f.blob_with_no_signing);
|
||||
});
|
||||
/*eslint-enable jest/expect-expect*/
|
||||
});
|
||||
}
|
||||
|
||||
function check (type, n, expected) {
|
||||
function check(type, n, expected) {
|
||||
test(`Uint${type.width * 8} serializes ${n} as ${expected}`, function () {
|
||||
const bl = new BytesList()
|
||||
const serializer = new BinarySerializer(bl)
|
||||
if (expected === 'throws') {
|
||||
expect(() => serializer.writeType(type, n)).toThrow()
|
||||
return
|
||||
const bl = new BytesList();
|
||||
const serializer = new BinarySerializer(bl);
|
||||
if (expected === "throws") {
|
||||
expect(() => serializer.writeType(type, n)).toThrow();
|
||||
return;
|
||||
}
|
||||
serializer.writeType(type, n)
|
||||
expect(bl.toBytes()).toEqual(Buffer.from(expected))
|
||||
})
|
||||
serializer.writeType(type, n);
|
||||
expect(bl.toBytes()).toEqual(Buffer.from(expected));
|
||||
});
|
||||
}
|
||||
|
||||
check(UInt8, 5, [5])
|
||||
check(UInt16, 5, [0, 5])
|
||||
check(UInt32, 5, [0, 0, 0, 5])
|
||||
check(UInt32, 0xFFFFFFFF, [255, 255, 255, 255])
|
||||
check(UInt8, 0xFEFFFFFF, 'throws')
|
||||
check(UInt16, 0xFEFFFFFF, 'throws')
|
||||
check(UInt16, 0xFEFFFFFF, 'throws')
|
||||
check(UInt64, 0xFEFFFFFF, [0, 0, 0, 0, 254, 255, 255, 255])
|
||||
check(UInt64, -1, 'throws')
|
||||
check(UInt64, 0, [0, 0, 0, 0, 0, 0, 0, 0])
|
||||
check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1])
|
||||
check(UInt64, BigInt(1), [0, 0, 0, 0, 0, 0, 0, 1])
|
||||
check(UInt8, 5, [5]);
|
||||
check(UInt16, 5, [0, 5]);
|
||||
check(UInt32, 5, [0, 0, 0, 5]);
|
||||
check(UInt32, 0xffffffff, [255, 255, 255, 255]);
|
||||
check(UInt8, 0xfeffffff, "throws");
|
||||
check(UInt16, 0xfeffffff, "throws");
|
||||
check(UInt16, 0xfeffffff, "throws");
|
||||
check(UInt64, 0xfeffffff, [0, 0, 0, 0, 254, 255, 255, 255]);
|
||||
check(UInt64, -1, "throws");
|
||||
check(UInt64, 0, [0, 0, 0, 0, 0, 0, 0, 0]);
|
||||
check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1]);
|
||||
check(UInt64, BigInt(1), [0, 0, 0, 0, 0, 0, 0, 1]);
|
||||
|
||||
// function parseLedger4320278() {
|
||||
// test('can parse object', done => {
|
||||
// const json = loadFixture('as-ledger-4320278.json');
|
||||
// json.forEach(e => {
|
||||
// assertRecycles(e.binary);
|
||||
// });
|
||||
// done();
|
||||
// });
|
||||
// }
|
||||
// parseLedger4320278();
|
||||
|
||||
function deliverMinTest () {
|
||||
test('can serialize DeliverMin', () => {
|
||||
expect(encode(deliverMinTx)).toEqual(deliverMinTxBinary)
|
||||
})
|
||||
function deliverMinTest() {
|
||||
test("can serialize DeliverMin", () => {
|
||||
expect(encode(deliverMinTx)).toEqual(deliverMinTxBinary);
|
||||
});
|
||||
}
|
||||
|
||||
function SignerListSetTest () {
|
||||
test('can serialize SignerListSet', () => {
|
||||
expect(encode(SignerListSet.tx)).toEqual(SignerListSet.binary)
|
||||
})
|
||||
test('can serialize SignerListSet metadata', () => {
|
||||
expect(encode(SignerListSet.tx.meta)).toEqual(SignerListSet.meta)
|
||||
})
|
||||
function SignerListSetTest() {
|
||||
test("can serialize SignerListSet", () => {
|
||||
expect(encode(SignerListSet.tx)).toEqual(SignerListSet.binary);
|
||||
});
|
||||
test("can serialize SignerListSet metadata", () => {
|
||||
expect(encode(SignerListSet.tx.meta)).toEqual(SignerListSet.meta);
|
||||
});
|
||||
}
|
||||
|
||||
function DepositPreauthTest () {
|
||||
test('can serialize DepositPreauth', () => {
|
||||
expect(encode(DepositPreauth.tx)).toEqual(DepositPreauth.binary)
|
||||
})
|
||||
test('can serialize DepositPreauth metadata', () => {
|
||||
expect(encode(DepositPreauth.tx.meta)).toEqual(DepositPreauth.meta)
|
||||
})
|
||||
function DepositPreauthTest() {
|
||||
test("can serialize DepositPreauth", () => {
|
||||
expect(encode(DepositPreauth.tx)).toEqual(DepositPreauth.binary);
|
||||
});
|
||||
test("can serialize DepositPreauth metadata", () => {
|
||||
expect(encode(DepositPreauth.tx.meta)).toEqual(DepositPreauth.meta);
|
||||
});
|
||||
}
|
||||
|
||||
function EscrowTest () {
|
||||
test('can serialize EscrowCreate', () => {
|
||||
expect(encode(Escrow.create.tx)).toEqual(Escrow.create.binary)
|
||||
})
|
||||
test('can serialize EscrowFinish', () => {
|
||||
expect(encode(Escrow.finish.tx)).toEqual(Escrow.finish.binary)
|
||||
expect(encode(Escrow.finish.tx.meta)).toEqual(Escrow.finish.meta)
|
||||
})
|
||||
test('can serialize EscrowCancel', () => {
|
||||
expect(encode(Escrow.cancel.tx)).toEqual(Escrow.cancel.binary)
|
||||
})
|
||||
function EscrowTest() {
|
||||
test("can serialize EscrowCreate", () => {
|
||||
expect(encode(Escrow.create.tx)).toEqual(Escrow.create.binary);
|
||||
});
|
||||
test("can serialize EscrowFinish", () => {
|
||||
expect(encode(Escrow.finish.tx)).toEqual(Escrow.finish.binary);
|
||||
expect(encode(Escrow.finish.tx.meta)).toEqual(Escrow.finish.meta);
|
||||
});
|
||||
test("can serialize EscrowCancel", () => {
|
||||
expect(encode(Escrow.cancel.tx)).toEqual(Escrow.cancel.binary);
|
||||
});
|
||||
}
|
||||
|
||||
function PaymentChannelTest () {
|
||||
test('can serialize PaymentChannelCreate', () => {
|
||||
expect(encode(PaymentChannel.create.tx)).toEqual(PaymentChannel.create.binary)
|
||||
})
|
||||
test('can serialize PaymentChannelFund', () => {
|
||||
expect(encode(PaymentChannel.fund.tx)).toEqual(PaymentChannel.fund.binary)
|
||||
})
|
||||
test('can serialize PaymentChannelClaim', () => {
|
||||
expect(encode(PaymentChannel.claim.tx)).toEqual(PaymentChannel.claim.binary)
|
||||
})
|
||||
function PaymentChannelTest() {
|
||||
test("can serialize PaymentChannelCreate", () => {
|
||||
expect(encode(PaymentChannel.create.tx)).toEqual(
|
||||
PaymentChannel.create.binary
|
||||
);
|
||||
});
|
||||
test("can serialize PaymentChannelFund", () => {
|
||||
expect(encode(PaymentChannel.fund.tx)).toEqual(PaymentChannel.fund.binary);
|
||||
});
|
||||
test("can serialize PaymentChannelClaim", () => {
|
||||
expect(encode(PaymentChannel.claim.tx)).toEqual(
|
||||
PaymentChannel.claim.binary
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function NegativeUNLTest () {
|
||||
test('can serialize NegativeUNL', () => {
|
||||
function NegativeUNLTest() {
|
||||
test("can serialize NegativeUNL", () => {
|
||||
expect(encode(NegativeUNL.tx)).toEqual(NegativeUNL.binary);
|
||||
})
|
||||
test('can deserialize NegativeUNL', () => {
|
||||
});
|
||||
test("can deserialize NegativeUNL", () => {
|
||||
expect(decode(NegativeUNL.binary)).toEqual(NegativeUNL.tx);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
describe('Binary Serialization', function() {
|
||||
describe('nestedObjectTests', nestedObjectTests);
|
||||
describe('BytesList', bytesListTest);
|
||||
describe('DeliverMin', deliverMinTest);
|
||||
describe('DepositPreauth', DepositPreauthTest);
|
||||
describe('SignerListSet', SignerListSetTest);
|
||||
describe('Escrow', EscrowTest);
|
||||
describe('PaymentChannel', PaymentChannelTest);
|
||||
describe('NegativeUNLTest', NegativeUNLTest);
|
||||
})
|
||||
describe("Binary Serialization", function () {
|
||||
describe("nestedObjectTests", () => nestedObjectTests());
|
||||
describe("BytesList", () => bytesListTest());
|
||||
describe("DeliverMin", () => deliverMinTest());
|
||||
describe("DepositPreauth", () => DepositPreauthTest());
|
||||
describe("SignerListSet", () => SignerListSetTest());
|
||||
describe("Escrow", () => EscrowTest());
|
||||
describe("PaymentChannel", () => PaymentChannelTest());
|
||||
describe("NegativeUNLTest", () => NegativeUNLTest());
|
||||
});
|
||||
|
||||
@@ -1,60 +1,68 @@
|
||||
const { coreTypes } = require('../dist/types')
|
||||
const { Hash160, Hash256, AccountID, Currency } = coreTypes
|
||||
const { coreTypes } = require("../dist/types");
|
||||
const { Hash160, Hash256, AccountID, Currency } = coreTypes;
|
||||
|
||||
describe('Hash160', function () {
|
||||
test('has a static width member', function () {
|
||||
expect(Hash160.width).toBe(20)
|
||||
})
|
||||
test('inherited by subclasses', function () {
|
||||
expect(AccountID.width).toBe(20)
|
||||
expect(Currency.width).toBe(20)
|
||||
})
|
||||
test('can be compared against another', function () {
|
||||
const h1 = Hash160.from('1000000000000000000000000000000000000000')
|
||||
const h2 = Hash160.from('2000000000000000000000000000000000000000')
|
||||
const h3 = Hash160.from('0000000000000000000000000000000000000003')
|
||||
expect(h1.lt(h2)).toBe(true)
|
||||
expect(h3.lt(h2)).toBe(true)
|
||||
})
|
||||
test('throws when constructed from invalid hash length', () => {
|
||||
expect(() => Hash160.from('10000000000000000000000000000000000000')).toThrow('Invalid Hash length 19')
|
||||
expect(() => Hash160.from('100000000000000000000000000000000000000000')).toThrow('Invalid Hash length 21')
|
||||
})
|
||||
})
|
||||
describe("Hash160", function () {
|
||||
test("has a static width member", function () {
|
||||
expect(Hash160.width).toBe(20);
|
||||
});
|
||||
test("inherited by subclasses", function () {
|
||||
expect(AccountID.width).toBe(20);
|
||||
expect(Currency.width).toBe(20);
|
||||
});
|
||||
test("can be compared against another", function () {
|
||||
const h1 = Hash160.from("1000000000000000000000000000000000000000");
|
||||
const h2 = Hash160.from("2000000000000000000000000000000000000000");
|
||||
const h3 = Hash160.from("0000000000000000000000000000000000000003");
|
||||
expect(h1.lt(h2)).toBe(true);
|
||||
expect(h3.lt(h2)).toBe(true);
|
||||
});
|
||||
test("throws when constructed from invalid hash length", () => {
|
||||
expect(() =>
|
||||
Hash160.from("10000000000000000000000000000000000000")
|
||||
).toThrow("Invalid Hash length 19");
|
||||
expect(() =>
|
||||
Hash160.from("100000000000000000000000000000000000000000")
|
||||
).toThrow("Invalid Hash length 21");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Hash256', function () {
|
||||
test('has a static width member', function () {
|
||||
expect(Hash256.width).toBe(32)
|
||||
})
|
||||
test('has a ZERO_256 member', function () {
|
||||
expect(Hash256.ZERO_256.toJSON()).toBe('0000000000000000000000000000000000000000000000000000000000000000')
|
||||
})
|
||||
test('supports getting the nibblet values at given positions', function () {
|
||||
describe("Hash256", function () {
|
||||
test("has a static width member", function () {
|
||||
expect(Hash256.width).toBe(32);
|
||||
});
|
||||
test("has a ZERO_256 member", function () {
|
||||
expect(Hash256.ZERO_256.toJSON()).toBe(
|
||||
"0000000000000000000000000000000000000000000000000000000000000000"
|
||||
);
|
||||
});
|
||||
test("supports getting the nibblet values at given positions", function () {
|
||||
const h = Hash256.from(
|
||||
'1359BD0000000000000000000000000000000000000000000000000000000000')
|
||||
expect(h.nibblet(0)).toBe(0x1)
|
||||
expect(h.nibblet(1)).toBe(0x3)
|
||||
expect(h.nibblet(2)).toBe(0x5)
|
||||
expect(h.nibblet(3)).toBe(0x9)
|
||||
expect(h.nibblet(4)).toBe(0x0b)
|
||||
expect(h.nibblet(5)).toBe(0xd)
|
||||
})
|
||||
})
|
||||
"1359BD0000000000000000000000000000000000000000000000000000000000"
|
||||
);
|
||||
expect(h.nibblet(0)).toBe(0x1);
|
||||
expect(h.nibblet(1)).toBe(0x3);
|
||||
expect(h.nibblet(2)).toBe(0x5);
|
||||
expect(h.nibblet(3)).toBe(0x9);
|
||||
expect(h.nibblet(4)).toBe(0x0b);
|
||||
expect(h.nibblet(5)).toBe(0xd);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Currency', function () {
|
||||
test('Will have a null iso() for dodgy XRP ', function () {
|
||||
const bad = Currency.from('0000000000000000000000005852500000000000',)
|
||||
expect(bad.iso()).toBeUndefined()
|
||||
expect(bad.isNative()).toBe(false)
|
||||
})
|
||||
test('can be constructed from a Buffer', function () {
|
||||
const xrp = new Currency(Buffer.alloc(20))
|
||||
expect(xrp.iso()).toBe('XRP')
|
||||
})
|
||||
test('throws on invalid reprs', function () {
|
||||
expect(() => Currency.from(Buffer.alloc(19))).toThrow()
|
||||
expect(() => Currency.from(1)).toThrow()
|
||||
expect(() => Currency.from(
|
||||
'00000000000000000000000000000000000000m')).toThrow()
|
||||
})
|
||||
})
|
||||
describe("Currency", function () {
|
||||
test("Will have a null iso() for dodgy XRP ", function () {
|
||||
const bad = Currency.from("0000000000000000000000005852500000000000");
|
||||
expect(bad.iso()).toBeUndefined();
|
||||
expect(bad.isNative()).toBe(false);
|
||||
});
|
||||
test("can be constructed from a Buffer", function () {
|
||||
const xrp = new Currency(Buffer.alloc(20));
|
||||
expect(xrp.iso()).toBe("XRP");
|
||||
});
|
||||
test("throws on invalid reprs", function () {
|
||||
expect(() => Currency.from(Buffer.alloc(19))).toThrow();
|
||||
expect(() => Currency.from(1)).toThrow();
|
||||
expect(() =>
|
||||
Currency.from("00000000000000000000000000000000000000m")
|
||||
).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,21 +1,29 @@
|
||||
const { loadFixture } = require('./utils')
|
||||
const { transactionTreeHash, ledgerHash, accountStateHash } = require('../dist/ledger-hashes')
|
||||
const { loadFixture } = require("./utils");
|
||||
const {
|
||||
transactionTreeHash,
|
||||
ledgerHash,
|
||||
accountStateHash,
|
||||
} = require("../dist/ledger-hashes");
|
||||
|
||||
describe('Ledger Hashes', function () {
|
||||
function testFactory (ledgerFixture) {
|
||||
describe("Ledger Hashes", function () {
|
||||
function testFactory(ledgerFixture) {
|
||||
describe(`can calculate hashes for ${ledgerFixture}`, function () {
|
||||
const ledger = loadFixture(ledgerFixture)
|
||||
test('computes correct account state hash', function () {
|
||||
expect(accountStateHash(ledger.accountState).toHex()).toBe(ledger.account_hash)
|
||||
})
|
||||
test('computes correct transaction tree hash', function () {
|
||||
expect(transactionTreeHash(ledger.transactions).toHex()).toBe(ledger.transaction_hash)
|
||||
})
|
||||
test('computes correct ledger header hash', function () {
|
||||
expect(ledgerHash(ledger).toHex()).toBe(ledger.hash)
|
||||
})
|
||||
})
|
||||
const ledger = loadFixture(ledgerFixture);
|
||||
test("computes correct account state hash", function () {
|
||||
expect(accountStateHash(ledger.accountState).toHex()).toBe(
|
||||
ledger.account_hash
|
||||
);
|
||||
});
|
||||
test("computes correct transaction tree hash", function () {
|
||||
expect(transactionTreeHash(ledger.transactions).toHex()).toBe(
|
||||
ledger.transaction_hash
|
||||
);
|
||||
});
|
||||
test("computes correct ledger header hash", function () {
|
||||
expect(ledgerHash(ledger).toHex()).toBe(ledger.hash);
|
||||
});
|
||||
});
|
||||
}
|
||||
testFactory('ledger-full-40000.json')
|
||||
testFactory('ledger-full-38129.json')
|
||||
})
|
||||
testFactory("ledger-full-40000.json");
|
||||
testFactory("ledger-full-38129.json");
|
||||
});
|
||||
|
||||
@@ -1,42 +1,45 @@
|
||||
const { encode, decode } = require('../dist')
|
||||
const { encode, decode } = require("../dist");
|
||||
|
||||
let str = "1100612200000000240000000125000068652D0000000055B6632D6376A2D9319F20A1C6DCCB486432D1E4A79951229D4C3DE2946F51D56662400009184E72A00081140DD319918CD5AE792BF7EC80D63B0F01B4573BBC"
|
||||
let str =
|
||||
"1100612200000000240000000125000068652D0000000055B6632D6376A2D9319F20A1C6DCCB486432D1E4A79951229D4C3DE2946F51D56662400009184E72A00081140DD319918CD5AE792BF7EC80D63B0F01B4573BBC";
|
||||
let lower = str.toLowerCase();
|
||||
|
||||
let bin = "1100612200000000240000000125000000082D00000000550735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E56240000002540BE400811479927BAFFD3D04A26096C0C97B1B0D45B01AD3C0"
|
||||
let bin =
|
||||
"1100612200000000240000000125000000082D00000000550735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E56240000002540BE400811479927BAFFD3D04A26096C0C97B1B0D45B01AD3C0";
|
||||
let json = {
|
||||
"OwnerCount": 0,
|
||||
"Account": "rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK",
|
||||
"PreviousTxnLgrSeq": 8,
|
||||
"LedgerEntryType": "AccountRoot",
|
||||
"PreviousTxnID": "0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5".toLowerCase(),
|
||||
"Flags": 0,
|
||||
"Sequence": 1,
|
||||
"Balance": "10000000000"
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK",
|
||||
PreviousTxnLgrSeq: 8,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID: "0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5".toLowerCase(),
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
};
|
||||
|
||||
let jsonUpper = {
|
||||
"OwnerCount": 0,
|
||||
"Account": "rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK",
|
||||
"PreviousTxnLgrSeq": 8,
|
||||
"LedgerEntryType": "AccountRoot",
|
||||
"PreviousTxnID": "0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5",
|
||||
"Flags": 0,
|
||||
"Sequence": 1,
|
||||
"Balance": "10000000000"
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "rUnFEsHjxqTswbivzL2DNHBb34rhAgZZZK",
|
||||
PreviousTxnLgrSeq: 8,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"0735A0B32B2A3F4C938B76D6933003E29447DB8C7CE382BBE089402FF12A03E5",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
};
|
||||
|
||||
describe("Lowercase hex test", () => {
|
||||
test("Correctly decodes", () => {
|
||||
expect(decode(lower)).toEqual(decode(str))
|
||||
})
|
||||
test("Re-encodes to uppercase hex", () => {
|
||||
expect(encode(decode(lower))).toEqual(str)
|
||||
})
|
||||
test("Encode when hex field lowercase", () => {
|
||||
expect(encode(json)).toBe(bin)
|
||||
})
|
||||
test("Re-decodes to uppercase hex", () => {
|
||||
expect(decode(encode(json))).toEqual(jsonUpper)
|
||||
})
|
||||
})
|
||||
|
||||
test("Correctly decodes", () => {
|
||||
expect(decode(lower)).toEqual(decode(str));
|
||||
});
|
||||
test("Re-encodes to uppercase hex", () => {
|
||||
expect(encode(decode(lower))).toEqual(str);
|
||||
});
|
||||
test("Encode when hex field lowercase", () => {
|
||||
expect(encode(json)).toBe(bin);
|
||||
});
|
||||
test("Re-decodes to uppercase hex", () => {
|
||||
expect(decode(encode(json))).toEqual(jsonUpper);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,37 +1,38 @@
|
||||
const { encode, decode } = require('../dist')
|
||||
const { encode, decode } = require("../dist");
|
||||
|
||||
let json = {
|
||||
"Account": "rrrrrrrrrrrrrrrrrrrrrhoLvTp",
|
||||
"Sequence": 0,
|
||||
"Fee": "0",
|
||||
"SigningPubKey": "",
|
||||
"Signature": ""
|
||||
}
|
||||
Account: "rrrrrrrrrrrrrrrrrrrrrhoLvTp",
|
||||
Sequence: 0,
|
||||
Fee: "0",
|
||||
SigningPubKey: "",
|
||||
Signature: "",
|
||||
};
|
||||
|
||||
let json_blank_acct = {
|
||||
"Account": "",
|
||||
"Sequence": 0,
|
||||
"Fee": "0",
|
||||
"SigningPubKey": "",
|
||||
"Signature": ""
|
||||
}
|
||||
Account: "",
|
||||
Sequence: 0,
|
||||
Fee: "0",
|
||||
SigningPubKey: "",
|
||||
Signature: "",
|
||||
};
|
||||
|
||||
let binary = "24000000006840000000000000007300760081140000000000000000000000000000000000000000"
|
||||
let binary =
|
||||
"24000000006840000000000000007300760081140000000000000000000000000000000000000000";
|
||||
|
||||
describe("Can encode Pseudo Transactions", () => {
|
||||
test("Correctly encodes Pseudo Transaciton", () => {
|
||||
expect(encode(json)).toEqual(binary);
|
||||
})
|
||||
test("Correctly encodes Pseudo Transaciton", () => {
|
||||
expect(encode(json)).toEqual(binary);
|
||||
});
|
||||
|
||||
test("Can decode account objects", () => {
|
||||
expect(decode(encode(json))).toEqual(json);
|
||||
})
|
||||
test("Can decode account objects", () => {
|
||||
expect(decode(encode(json))).toEqual(json);
|
||||
});
|
||||
|
||||
test("Blank AccountID is ACCOUNT_ZERO", () => {
|
||||
expect(encode(json_blank_acct)).toEqual(binary)
|
||||
})
|
||||
test("Blank AccountID is ACCOUNT_ZERO", () => {
|
||||
expect(encode(json_blank_acct)).toEqual(binary);
|
||||
});
|
||||
|
||||
test("Decodes Blank AccountID", () => {
|
||||
expect(decode(encode(json_blank_acct))).toEqual(json);
|
||||
})
|
||||
})
|
||||
test("Decodes Blank AccountID", () => {
|
||||
expect(decode(encode(json_blank_acct))).toEqual(json);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
const { quality } = require('../dist/coretypes')
|
||||
const { quality } = require("../dist/coretypes");
|
||||
|
||||
describe('Quality encode/decode', function () {
|
||||
describe("Quality encode/decode", function () {
|
||||
const bookDirectory =
|
||||
'4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0'
|
||||
const expectedQuality = '195796912.5171664'
|
||||
test('can decode', function () {
|
||||
const decimal = quality.decode(bookDirectory)
|
||||
expect(decimal.toString()).toBe(expectedQuality)
|
||||
})
|
||||
test('can encode', function () {
|
||||
const bytes = quality.encode(expectedQuality)
|
||||
expect(bytes.toString('hex').toUpperCase()).toBe(bookDirectory.slice(-16))
|
||||
})
|
||||
})
|
||||
"4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0";
|
||||
const expectedQuality = "195796912.5171664";
|
||||
test("can decode", function () {
|
||||
const decimal = quality.decode(bookDirectory);
|
||||
expect(decimal.toString()).toBe(expectedQuality);
|
||||
});
|
||||
test("can encode", function () {
|
||||
const bytes = quality.encode(expectedQuality);
|
||||
expect(bytes.toString("hex").toUpperCase()).toBe(bookDirectory.slice(-16));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,87 +1,88 @@
|
||||
const { ShaMap } = require('../dist/shamap.js')
|
||||
const { binary, HashPrefix } = require('../dist/coretypes')
|
||||
const { coreTypes } = require('../dist/types')
|
||||
const { loadFixture } = require('./utils')
|
||||
const { ShaMap } = require("../dist/shamap.js");
|
||||
const { binary, HashPrefix } = require("../dist/coretypes");
|
||||
const { coreTypes } = require("../dist/types");
|
||||
const { loadFixture } = require("./utils");
|
||||
|
||||
function now () {
|
||||
return (Number(Date.now())) / 1000
|
||||
function now() {
|
||||
return Number(Date.now()) / 1000;
|
||||
}
|
||||
|
||||
const ZERO =
|
||||
'0000000000000000000000000000000000000000000000000000000000000000'
|
||||
const ZERO = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
function makeItem (indexArg) {
|
||||
let str = indexArg
|
||||
function makeItem(indexArg) {
|
||||
let str = indexArg;
|
||||
while (str.length < 64) {
|
||||
str += '0'
|
||||
str += "0";
|
||||
}
|
||||
const index = coreTypes.Hash256.from(str)
|
||||
const index = coreTypes.Hash256.from(str);
|
||||
const item = {
|
||||
toBytesSink (sink) {
|
||||
index.toBytesSink(sink)
|
||||
toBytesSink(sink) {
|
||||
index.toBytesSink(sink);
|
||||
},
|
||||
hashPrefix () {
|
||||
return Buffer.from([1, 3, 3, 7])
|
||||
}
|
||||
}
|
||||
return [index, item]
|
||||
hashPrefix() {
|
||||
return Buffer.from([1, 3, 3, 7]);
|
||||
},
|
||||
};
|
||||
return [index, item];
|
||||
}
|
||||
|
||||
describe('ShaMap', () => {
|
||||
now()
|
||||
describe("ShaMap", () => {
|
||||
now();
|
||||
|
||||
test('hashes to zero when empty', () => {
|
||||
const map = new ShaMap()
|
||||
expect(map.hash().toHex()).toBe(ZERO)
|
||||
})
|
||||
test('creates the same hash no matter which order items are added', () => {
|
||||
let map = new ShaMap()
|
||||
test("hashes to zero when empty", () => {
|
||||
const map = new ShaMap();
|
||||
expect(map.hash().toHex()).toBe(ZERO);
|
||||
});
|
||||
test("creates the same hash no matter which order items are added", () => {
|
||||
let map = new ShaMap();
|
||||
const items = [
|
||||
'0',
|
||||
'1',
|
||||
'11',
|
||||
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E20000000000000000',
|
||||
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E21000000000000000',
|
||||
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E22000000000000000',
|
||||
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E23000000000000000',
|
||||
'12',
|
||||
'122'
|
||||
]
|
||||
items.forEach(i => map.addItem(...makeItem(i)))
|
||||
const h1 = map.hash()
|
||||
expect(h1.eq(h1)).toBe(true)
|
||||
map = new ShaMap()
|
||||
items.reverse().forEach(i => map.addItem(...makeItem(i)))
|
||||
expect(map.hash()).toStrictEqual(h1)
|
||||
})
|
||||
function factory (fixture) {
|
||||
"0",
|
||||
"1",
|
||||
"11",
|
||||
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E20000000000000000",
|
||||
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E21000000000000000",
|
||||
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E22000000000000000",
|
||||
"7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E23000000000000000",
|
||||
"12",
|
||||
"122",
|
||||
];
|
||||
items.forEach((i) => map.addItem(...makeItem(i)));
|
||||
const h1 = map.hash();
|
||||
expect(h1.eq(h1)).toBe(true);
|
||||
map = new ShaMap();
|
||||
items.reverse().forEach((i) => map.addItem(...makeItem(i)));
|
||||
expect(map.hash()).toStrictEqual(h1);
|
||||
});
|
||||
function factory(fixture) {
|
||||
test(`recreate account state hash from ${fixture}`, () => {
|
||||
const map = new ShaMap()
|
||||
const ledger = loadFixture(fixture)
|
||||
const map = new ShaMap();
|
||||
const ledger = loadFixture(fixture);
|
||||
// const t = now();
|
||||
const leafNodePrefix = HashPrefix.accountStateEntry
|
||||
ledger.accountState.map((e, i) => {
|
||||
if (i > 1000 & (i % 1000) === 0) {
|
||||
console.log(e.index)
|
||||
console.log(i)
|
||||
}
|
||||
const bytes = binary.serializeObject(e)
|
||||
return {
|
||||
index: coreTypes.Hash256.from(e.index),
|
||||
hashPrefix () {
|
||||
return leafNodePrefix
|
||||
},
|
||||
toBytesSink (sink) {
|
||||
sink.put(bytes)
|
||||
const leafNodePrefix = HashPrefix.accountStateEntry;
|
||||
ledger.accountState
|
||||
.map((e, i) => {
|
||||
if ((i > 1000) & (i % 1000 === 0)) {
|
||||
console.log(e.index);
|
||||
console.log(i);
|
||||
}
|
||||
}
|
||||
}).forEach(so => map.addItem(so.index, so))
|
||||
expect(map.hash().toHex()).toBe(ledger.account_hash)
|
||||
const bytes = binary.serializeObject(e);
|
||||
return {
|
||||
index: coreTypes.Hash256.from(e.index),
|
||||
hashPrefix() {
|
||||
return leafNodePrefix;
|
||||
},
|
||||
toBytesSink(sink) {
|
||||
sink.put(bytes);
|
||||
},
|
||||
};
|
||||
})
|
||||
.forEach((so) => map.addItem(so.index, so));
|
||||
expect(map.hash().toHex()).toBe(ledger.account_hash);
|
||||
// console.log('took seconds: ', (now() - t));
|
||||
})
|
||||
});
|
||||
}
|
||||
factory('ledger-full-38129.json')
|
||||
factory('ledger-full-40000.json')
|
||||
factory("ledger-full-38129.json");
|
||||
factory("ledger-full-40000.json");
|
||||
// factory('ledger-4320277.json');
|
||||
// factory('14280680.json');
|
||||
})
|
||||
});
|
||||
|
||||
@@ -1,118 +1,129 @@
|
||||
const _ = require('lodash')
|
||||
const { encodeForSigning, encodeForSigningClaim, encodeForMultisigning } = require('../dist')
|
||||
const {
|
||||
encodeForSigning,
|
||||
encodeForSigningClaim,
|
||||
encodeForMultisigning,
|
||||
} = require("../dist");
|
||||
|
||||
const tx_json = {
|
||||
Account: 'r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ',
|
||||
Amount: '1000',
|
||||
Destination: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
|
||||
Fee: '10',
|
||||
Account: "r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ",
|
||||
Amount: "1000",
|
||||
Destination: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
|
||||
Fee: "10",
|
||||
Flags: 2147483648,
|
||||
Sequence: 1,
|
||||
TransactionType: 'Payment',
|
||||
TxnSignature: '30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1' +
|
||||
'E721B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80' +
|
||||
'ECA3CD7B9B',
|
||||
Signature: '30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1E72' +
|
||||
'1B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80ECA' +
|
||||
'3CD7B9B',
|
||||
TransactionType: "Payment",
|
||||
TxnSignature:
|
||||
"30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1" +
|
||||
"E721B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80" +
|
||||
"ECA3CD7B9B",
|
||||
Signature:
|
||||
"30440220718D264EF05CAED7C781FF6DE298DCAC68D002562C9BF3A07C1E72" +
|
||||
"1B420C0DAB02203A5A4779EF4D2CCC7BC3EF886676D803A9981B928D3B8ACA483B80ECA" +
|
||||
"3CD7B9B",
|
||||
SigningPubKey:
|
||||
'ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A'
|
||||
}
|
||||
"ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A",
|
||||
};
|
||||
|
||||
describe('Signing data', function () {
|
||||
test('can create single signing blobs', function () {
|
||||
const actual = encodeForSigning(tx_json)
|
||||
describe("Signing data", function () {
|
||||
test("can create single signing blobs", function () {
|
||||
const actual = encodeForSigning(tx_json);
|
||||
expect(actual).toBe(
|
||||
['53545800', // signingPrefix
|
||||
[
|
||||
"53545800", // signingPrefix
|
||||
// TransactionType
|
||||
'12',
|
||||
'0000',
|
||||
"12",
|
||||
"0000",
|
||||
// Flags
|
||||
'22',
|
||||
'80000000',
|
||||
"22",
|
||||
"80000000",
|
||||
// Sequence
|
||||
'24',
|
||||
'00000001',
|
||||
"24",
|
||||
"00000001",
|
||||
// Amount
|
||||
'61',
|
||||
"61",
|
||||
// native amount
|
||||
'40000000000003E8',
|
||||
"40000000000003E8",
|
||||
// Fee
|
||||
'68',
|
||||
"68",
|
||||
// native amount
|
||||
'400000000000000A',
|
||||
"400000000000000A",
|
||||
// SigningPubKey
|
||||
'73',
|
||||
"73",
|
||||
// VLLength
|
||||
'21',
|
||||
'ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A',
|
||||
"21",
|
||||
"ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A",
|
||||
// Account
|
||||
'81',
|
||||
"81",
|
||||
// VLLength
|
||||
'14',
|
||||
'5B812C9D57731E27A2DA8B1830195F88EF32A3B6',
|
||||
"14",
|
||||
"5B812C9D57731E27A2DA8B1830195F88EF32A3B6",
|
||||
// Destination
|
||||
'83',
|
||||
"83",
|
||||
// VLLength
|
||||
'14',
|
||||
'B5F762798A53D543A014CAF8B297CFF8F2F937E8'].join('')
|
||||
)
|
||||
})
|
||||
test('can create multi signing blobs', function () {
|
||||
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
|
||||
const signingJson = Object.assign({}, tx_json, { SigningPubKey: '' })
|
||||
const actual = encodeForMultisigning(signingJson, signingAccount)
|
||||
"14",
|
||||
"B5F762798A53D543A014CAF8B297CFF8F2F937E8",
|
||||
].join("")
|
||||
);
|
||||
});
|
||||
test("can create multi signing blobs", function () {
|
||||
const signingAccount = "rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN";
|
||||
const signingJson = Object.assign({}, tx_json, { SigningPubKey: "" });
|
||||
const actual = encodeForMultisigning(signingJson, signingAccount);
|
||||
expect(actual).toBe(
|
||||
['534D5400', // signingPrefix
|
||||
[
|
||||
"534D5400", // signingPrefix
|
||||
// TransactionType
|
||||
'12',
|
||||
'0000',
|
||||
"12",
|
||||
"0000",
|
||||
// Flags
|
||||
'22',
|
||||
'80000000',
|
||||
"22",
|
||||
"80000000",
|
||||
// Sequence
|
||||
'24',
|
||||
'00000001',
|
||||
"24",
|
||||
"00000001",
|
||||
// Amount
|
||||
'61',
|
||||
"61",
|
||||
// native amount
|
||||
'40000000000003E8',
|
||||
"40000000000003E8",
|
||||
// Fee
|
||||
'68',
|
||||
"68",
|
||||
// native amount
|
||||
'400000000000000A',
|
||||
"400000000000000A",
|
||||
// SigningPubKey
|
||||
'73',
|
||||
"73",
|
||||
// VLLength
|
||||
'00',
|
||||
"00",
|
||||
// '',
|
||||
// Account
|
||||
'81',
|
||||
"81",
|
||||
// VLLength
|
||||
'14',
|
||||
'5B812C9D57731E27A2DA8B1830195F88EF32A3B6',
|
||||
"14",
|
||||
"5B812C9D57731E27A2DA8B1830195F88EF32A3B6",
|
||||
// Destination
|
||||
'83',
|
||||
"83",
|
||||
// VLLength
|
||||
'14',
|
||||
'B5F762798A53D543A014CAF8B297CFF8F2F937E8',
|
||||
"14",
|
||||
"B5F762798A53D543A014CAF8B297CFF8F2F937E8",
|
||||
// signingAccount suffix
|
||||
'C0A5ABEF242802EFED4B041E8F2D4A8CC86AE3D1'].join('')
|
||||
)
|
||||
})
|
||||
test('can create claim blob', function () {
|
||||
"C0A5ABEF242802EFED4B041E8F2D4A8CC86AE3D1",
|
||||
].join("")
|
||||
);
|
||||
});
|
||||
test("can create claim blob", function () {
|
||||
const channel =
|
||||
'43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1'
|
||||
const amount = '1000'
|
||||
const json = { channel, amount }
|
||||
const actual = encodeForSigningClaim(json)
|
||||
expect(actual).toBe([
|
||||
// hash prefix
|
||||
'434C4D00',
|
||||
// channel ID
|
||||
'43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1',
|
||||
// amount as a uint64
|
||||
'00000000000003E8'
|
||||
].join(''))
|
||||
})
|
||||
})
|
||||
"43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1";
|
||||
const amount = "1000";
|
||||
const json = { channel, amount };
|
||||
const actual = encodeForSigningClaim(json);
|
||||
expect(actual).toBe(
|
||||
[
|
||||
// hash prefix
|
||||
"434C4D00",
|
||||
// channel ID
|
||||
"43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1",
|
||||
// amount as a uint64
|
||||
"00000000000003E8",
|
||||
].join("")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,107 +1,80 @@
|
||||
const {
|
||||
encode,
|
||||
decode
|
||||
} = require('../dist')
|
||||
const { encode, decode } = require("../dist");
|
||||
|
||||
// Notice: no Amount or Fee
|
||||
const tx_json = {
|
||||
Account: 'r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ',
|
||||
Account: "r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ",
|
||||
// Amount: '1000',
|
||||
Destination: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
|
||||
Destination: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
|
||||
// Fee: '10',
|
||||
|
||||
// JavaScript converts operands to 32-bit signed ints after doing bitwise
|
||||
// operations. We need to convert it back to an unsigned int with >>> 0.
|
||||
Flags: ((1 << 31) >>> 0), // tfFullyCanonicalSig
|
||||
Flags: (1 << 31) >>> 0, // tfFullyCanonicalSig
|
||||
|
||||
Sequence: 1,
|
||||
TransactionType: 'Payment'
|
||||
TransactionType: "Payment",
|
||||
// TxnSignature,
|
||||
// Signature,
|
||||
// SigningPubKey
|
||||
}
|
||||
};
|
||||
|
||||
const amount_parameters_message = input => {
|
||||
// disables the ESLint rule on the whole rest of the file
|
||||
/* eslint-disable max-len */
|
||||
return `${input} is an illegal amount
|
||||
|
||||
Native values must be described in drops, a million of which equal one XRP.
|
||||
This must be an integer number, with the absolute value not exceeding 100000000000000000
|
||||
|
||||
IOU values must have a maximum precision of 16 significant digits. They are serialized as
|
||||
a canonicalised mantissa and exponent.
|
||||
|
||||
The valid range for a mantissa is between 1000000000000000 and 9999999999999999
|
||||
The exponent must be >= -96 and <= 80
|
||||
|
||||
Thus the largest serializable IOU value is:
|
||||
999999999999999900000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
|
||||
And the smallest:
|
||||
0.000000000000000000000000000000000000000000000000000000000000000000000000000000001
|
||||
`
|
||||
}
|
||||
|
||||
describe('encoding and decoding tx_json', function () {
|
||||
test('can encode tx_json without Amount or Fee', function () {
|
||||
const encoded = encode(tx_json)
|
||||
const decoded = decode(encoded)
|
||||
expect(tx_json).toEqual(decoded)
|
||||
})
|
||||
test('can encode tx_json with Amount and Fee', function () {
|
||||
describe("encoding and decoding tx_json", function () {
|
||||
test("can encode tx_json without Amount or Fee", function () {
|
||||
const encoded = encode(tx_json);
|
||||
const decoded = decode(encoded);
|
||||
expect(tx_json).toEqual(decoded);
|
||||
});
|
||||
test("can encode tx_json with Amount and Fee", function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: '1000',
|
||||
Fee: '10'
|
||||
})
|
||||
const encoded = encode(my_tx)
|
||||
const decoded = decode(encoded)
|
||||
expect(my_tx).toEqual(decoded)
|
||||
})
|
||||
test('throws when Amount is invalid', function () {
|
||||
Amount: "1000",
|
||||
Fee: "10",
|
||||
});
|
||||
const encoded = encode(my_tx);
|
||||
const decoded = decode(encoded);
|
||||
expect(my_tx).toEqual(decoded);
|
||||
});
|
||||
test("throws when Amount is invalid", function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: '1000.001',
|
||||
Fee: '10'
|
||||
})
|
||||
Amount: "1000.001",
|
||||
Fee: "10",
|
||||
});
|
||||
expect(() => {
|
||||
encode(my_tx)
|
||||
}).toThrow()
|
||||
})
|
||||
test('throws when Fee is invalid', function () {
|
||||
encode(my_tx);
|
||||
}).toThrow();
|
||||
});
|
||||
test("throws when Fee is invalid", function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: '1000',
|
||||
Fee: '10.123'
|
||||
})
|
||||
Amount: "1000",
|
||||
Fee: "10.123",
|
||||
});
|
||||
expect(() => {
|
||||
encode(my_tx)
|
||||
}).toThrow()
|
||||
})
|
||||
test('throws when Amount and Fee are invalid', function () {
|
||||
encode(my_tx);
|
||||
}).toThrow();
|
||||
});
|
||||
test("throws when Amount and Fee are invalid", function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: '1000.789',
|
||||
Fee: '10.123'
|
||||
})
|
||||
Amount: "1000.789",
|
||||
Fee: "10.123",
|
||||
});
|
||||
expect(() => {
|
||||
encode(my_tx)
|
||||
}).toThrow()
|
||||
})
|
||||
test('throws when Amount is a number instead of a string-encoded integer',
|
||||
function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: 1000.789
|
||||
})
|
||||
expect(() => {
|
||||
encode(my_tx)
|
||||
}).toThrow()
|
||||
})
|
||||
encode(my_tx);
|
||||
}).toThrow();
|
||||
});
|
||||
test("throws when Amount is a number instead of a string-encoded integer", function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: 1000.789,
|
||||
});
|
||||
expect(() => {
|
||||
encode(my_tx);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
test('throws when Fee is a number instead of a string-encoded integer',
|
||||
function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: 1234.56
|
||||
})
|
||||
expect(() => {
|
||||
encode(my_tx)
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
test("throws when Fee is a number instead of a string-encoded integer", function () {
|
||||
const my_tx = Object.assign({}, tx_json, {
|
||||
Amount: 1234.56,
|
||||
});
|
||||
expect(() => {
|
||||
encode(my_tx);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,37 +1,34 @@
|
||||
const _ = require('lodash')
|
||||
const { coreTypes } = require('../dist/types')
|
||||
const { SerializedType } = require('../dist/types/serialized-type')
|
||||
const { coreTypes } = require("../dist/types");
|
||||
const { SerializedType } = require("../dist/types/serialized-type");
|
||||
|
||||
describe('SerializedType interfaces', () => {
|
||||
describe("SerializedType interfaces", () => {
|
||||
Object.entries(coreTypes).forEach(([name, Value]) => {
|
||||
test(`${name} has a \`from\` static constructor`, () => {
|
||||
expect(Value.from && Value.from !== Array.from).toBe(true)
|
||||
})
|
||||
expect(Value.from && Value.from !== Array.from).toBe(true);
|
||||
});
|
||||
test(`${name} has a default constructor`, () => {
|
||||
/* eslint-disable no-new */
|
||||
new Value()
|
||||
/* eslint-enable no-new */
|
||||
})
|
||||
expect(new Value()).not.toBe(undefined);
|
||||
});
|
||||
test(`${name}.from will return the same object`, () => {
|
||||
const instance = new Value()
|
||||
expect(Value.from(instance) === instance).toBe(true)
|
||||
})
|
||||
const instance = new Value();
|
||||
expect(Value.from(instance) === instance).toBe(true);
|
||||
});
|
||||
test(`${name} instances have toBytesSink`, () => {
|
||||
expect(new Value().toBytesSink).not.toBe(undefined)
|
||||
})
|
||||
expect(new Value().toBytesSink).not.toBe(undefined);
|
||||
});
|
||||
test(`${name} instances have toJSON`, () => {
|
||||
expect(new Value().toJSON).not.toBe(undefined)
|
||||
})
|
||||
expect(new Value().toJSON).not.toBe(undefined);
|
||||
});
|
||||
test(`${name}.from(json).toJSON() == json`, () => {
|
||||
const newJSON = new Value().toJSON()
|
||||
expect(Value.from(newJSON).toJSON()).toEqual(newJSON)
|
||||
})
|
||||
const newJSON = new Value().toJSON();
|
||||
expect(Value.from(newJSON).toJSON()).toEqual(newJSON);
|
||||
});
|
||||
describe(`${name} supports all methods of the SerializedType mixin`, () => {
|
||||
Object.keys(SerializedType.prototype).forEach(k => {
|
||||
Object.keys(SerializedType.prototype).forEach((k) => {
|
||||
test(`new ${name}.prototype.${k} !== undefined`, () => {
|
||||
expect(Value.prototype[k]).not.toBe(undefined)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
expect(Value.prototype[k]).not.toBe(undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,40 +1,40 @@
|
||||
const { coreTypes } = require('../dist/types')
|
||||
const { UInt8, UInt64 } = coreTypes
|
||||
const { coreTypes } = require("../dist/types");
|
||||
const { UInt8, UInt64 } = coreTypes;
|
||||
|
||||
test('compareToTests', () => {
|
||||
expect(UInt8.from(124).compareTo(UInt64.from(124))).toBe(0)
|
||||
})
|
||||
test("compareToTests[0]", () => {
|
||||
expect(UInt8.from(124).compareTo(UInt64.from(124))).toBe(0);
|
||||
});
|
||||
|
||||
test('compareToTest', () => {
|
||||
expect(UInt64.from(124).compareTo(UInt8.from(124))).toBe(0)
|
||||
})
|
||||
test("compareToTest[1]", () => {
|
||||
expect(UInt64.from(124).compareTo(UInt8.from(124))).toBe(0);
|
||||
});
|
||||
|
||||
test('compareToTest', () => {
|
||||
expect(UInt64.from(124).compareTo(UInt8.from(123))).toBe(1)
|
||||
})
|
||||
test("compareToTest[2]", () => {
|
||||
expect(UInt64.from(124).compareTo(UInt8.from(123))).toBe(1);
|
||||
});
|
||||
|
||||
test('compareToTest', () => {
|
||||
expect(UInt8.from(124).compareTo(UInt8.from(13))).toBe(1)
|
||||
})
|
||||
test("compareToTest[3]", () => {
|
||||
expect(UInt8.from(124).compareTo(UInt8.from(13))).toBe(1);
|
||||
});
|
||||
|
||||
test('compareToTest', () => {
|
||||
expect(UInt8.from(124).compareTo(124)).toBe(0)
|
||||
})
|
||||
test("compareToTest[4]", () => {
|
||||
expect(UInt8.from(124).compareTo(124)).toBe(0);
|
||||
});
|
||||
|
||||
test('compareToTest', () => {
|
||||
expect(UInt64.from(124).compareTo(124)).toBe(0)
|
||||
})
|
||||
test("compareToTest[5]", () => {
|
||||
expect(UInt64.from(124).compareTo(124)).toBe(0);
|
||||
});
|
||||
|
||||
test('compareToTest', () => {
|
||||
expect(UInt64.from(124).compareTo(123)).toBe(1)
|
||||
})
|
||||
test("compareToTest[6]", () => {
|
||||
expect(UInt64.from(124).compareTo(123)).toBe(1);
|
||||
});
|
||||
|
||||
test('compareToTest', () => {
|
||||
expect(UInt8.from(124).compareTo(13)).toBe(1)
|
||||
})
|
||||
test("compareToTest[7]", () => {
|
||||
expect(UInt8.from(124).compareTo(13)).toBe(1);
|
||||
});
|
||||
|
||||
test('valueOfTests', () => {
|
||||
let val = UInt8.from(1)
|
||||
val |= 0x2
|
||||
expect(val).toBe(3)
|
||||
})
|
||||
test("valueOfTests", () => {
|
||||
let val = UInt8.from(1);
|
||||
val |= 0x2;
|
||||
expect(val).toBe(3);
|
||||
});
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
const fs = require('fs')
|
||||
const fs = require("fs");
|
||||
|
||||
function hexOnly (hex) {
|
||||
return hex.replace(/[^a-fA-F0-9]/g, '')
|
||||
function hexOnly(hex) {
|
||||
return hex.replace(/[^a-fA-F0-9]/g, "");
|
||||
}
|
||||
|
||||
function unused () {}
|
||||
function unused() {}
|
||||
|
||||
function parseHexOnly (hex) {
|
||||
return Buffer.from(hexOnly(hex), 'hex');
|
||||
function parseHexOnly(hex) {
|
||||
return Buffer.from(hexOnly(hex), "hex");
|
||||
}
|
||||
|
||||
function loadFixture (relativePath) {
|
||||
const fn = __dirname + '/fixtures/' + relativePath
|
||||
return require(fn)
|
||||
function loadFixture(relativePath) {
|
||||
const fn = __dirname + "/fixtures/" + relativePath;
|
||||
return require(fn);
|
||||
}
|
||||
|
||||
function loadFixtureText (relativePath) {
|
||||
const fn = __dirname + '/fixtures/' + relativePath
|
||||
return fs.readFileSync(fn).toString('utf8')
|
||||
function loadFixtureText(relativePath) {
|
||||
const fn = __dirname + "/fixtures/" + relativePath;
|
||||
return fs.readFileSync(fn).toString("utf8");
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
@@ -25,5 +25,5 @@ module.exports = {
|
||||
parseHexOnly,
|
||||
loadFixture,
|
||||
loadFixtureText,
|
||||
unused
|
||||
}
|
||||
unused,
|
||||
};
|
||||
|
||||
@@ -1,137 +1,147 @@
|
||||
const { encode, decode } = require("./../dist/index");
|
||||
const fixtures = require('./fixtures/x-codec-fixtures.json')
|
||||
const fixtures = require("./fixtures/x-codec-fixtures.json");
|
||||
|
||||
let json_x1 = {
|
||||
OwnerCount: 0,
|
||||
Account: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID: "DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000"
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
};
|
||||
|
||||
let json_r1 = {
|
||||
OwnerCount: 0,
|
||||
Account: 'rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv',
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: 'AccountRoot',
|
||||
PreviousTxnID: 'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: '10000000000',
|
||||
SourceTag: 12345,
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
SourceTag: 12345,
|
||||
};
|
||||
|
||||
let json_null_x = {
|
||||
"OwnerCount": 0,
|
||||
"Account": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Destination": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Issuer": "XVXdn5wEVm5G4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71",
|
||||
"PreviousTxnLgrSeq": 7,
|
||||
"LedgerEntryType": "AccountRoot",
|
||||
"PreviousTxnID": "DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
"Flags": 0,
|
||||
"Sequence": 1,
|
||||
"Balance": "10000000000"
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Issuer: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
};
|
||||
|
||||
let json_invalid_x = {
|
||||
"OwnerCount": 0,
|
||||
"Account": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Destination": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Issuer": "XVXdn5wEVm5g4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71",
|
||||
"PreviousTxnLgrSeq": 7,
|
||||
"LedgerEntryType": "AccountRoot",
|
||||
"PreviousTxnID": "DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
"Flags": 0,
|
||||
"Sequence": 1,
|
||||
"Balance": "10000000000"
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Issuer: "XVXdn5wEVm5g4UhEHWDPqjvdeH361P4GETfNyyXGaoqBj71",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
};
|
||||
|
||||
let json_null_r = {
|
||||
"OwnerCount": 0,
|
||||
"Account": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Destination": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Issuer": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"PreviousTxnLgrSeq": 7,
|
||||
"LedgerEntryType": "AccountRoot",
|
||||
"PreviousTxnID": "DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
"Flags": 0,
|
||||
"Sequence": 1,
|
||||
"Balance": "10000000000"
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Issuer: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
};
|
||||
|
||||
let invalid_json_issuer_tagged = {
|
||||
"OwnerCount": 0,
|
||||
"Account": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Destination": "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
"Issuer": "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
|
||||
"PreviousTxnLgrSeq": 7,
|
||||
"LedgerEntryType": "AccountRoot",
|
||||
"PreviousTxnID": "DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
"Flags": 0,
|
||||
"Sequence": 1,
|
||||
"Balance": "10000000000"
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Destination: "rLs1MzkFWCxTbuAHgjeTZK4fcCDDnf2KRv",
|
||||
Issuer: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
};
|
||||
|
||||
let invalid_json_x_and_tagged = {
|
||||
OwnerCount: 0,
|
||||
Account: 'XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT',
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: 'AccountRoot',
|
||||
PreviousTxnID: 'DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF',
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: '10000000000',
|
||||
SourceTag: 12345,
|
||||
}
|
||||
OwnerCount: 0,
|
||||
Account: "XVXdn5wEVm5G4UhEHWDPqjvdeH361P7BsapL4m2D2XnPSwT",
|
||||
PreviousTxnLgrSeq: 7,
|
||||
LedgerEntryType: "AccountRoot",
|
||||
PreviousTxnID:
|
||||
"DF530FB14C5304852F20080B0A8EEF3A6BDD044F41F4EBBD68B8B321145FE4FF",
|
||||
Flags: 0,
|
||||
Sequence: 1,
|
||||
Balance: "10000000000",
|
||||
SourceTag: 12345,
|
||||
};
|
||||
|
||||
describe("X-Address Account is equivalent to a classic address w/ SourceTag", () => {
|
||||
let encoded_x = encode(json_x1);
|
||||
let encoded_r = encode(json_r1);
|
||||
test("Can encode with x-Address", () => {
|
||||
expect(encoded_x).toEqual(encoded_r);
|
||||
})
|
||||
let encoded_x = encode(json_x1);
|
||||
let encoded_r = encode(json_r1);
|
||||
test("Can encode with x-Address", () => {
|
||||
expect(encoded_x).toEqual(encoded_r);
|
||||
});
|
||||
|
||||
test("decoded X-address is object w/ source and tag", () => {
|
||||
let decoded_x = decode(encoded_x);
|
||||
expect(decoded_x).toEqual(json_r1);
|
||||
})
|
||||
test("decoded X-address is object w/ source and tag", () => {
|
||||
let decoded_x = decode(encoded_x);
|
||||
expect(decoded_x).toEqual(json_r1);
|
||||
});
|
||||
|
||||
test("Encoding issuer X-Address w/ undefined destination tag", () => {
|
||||
expect(encode(json_null_x)).toEqual(encode(json_null_r));
|
||||
})
|
||||
test("Encoding issuer X-Address w/ undefined destination tag", () => {
|
||||
expect(encode(json_null_x)).toEqual(encode(json_null_r));
|
||||
});
|
||||
|
||||
test("Throws when X-Address is invalid", () => {
|
||||
expect(() => encode(json_invalid_x)).toThrow("checksum_invalid");
|
||||
})
|
||||
})
|
||||
test("Throws when X-Address is invalid", () => {
|
||||
expect(() => encode(json_invalid_x)).toThrow("checksum_invalid");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Invalid X-Address behavior", () => {
|
||||
test("X-Address with tag throws value for invalid field",() => {
|
||||
expect(() => encode(invalid_json_issuer_tagged)).toThrow(new Error("Issuer cannot have an associated tag"))
|
||||
})
|
||||
test("X-Address with tag throws value for invalid field", () => {
|
||||
expect(() => encode(invalid_json_issuer_tagged)).toThrow(
|
||||
new Error("Issuer cannot have an associated tag")
|
||||
);
|
||||
});
|
||||
|
||||
test("Throws when Account has both X-Addr and Destination Tag", () => {
|
||||
expect(() => encode(invalid_json_x_and_tagged)).toThrow(new Error("Cannot have Account X-Address and SourceTag"));
|
||||
test("Throws when Account has both X-Addr and Destination Tag", () => {
|
||||
expect(() => encode(invalid_json_x_and_tagged)).toThrow(
|
||||
new Error("Cannot have Account X-Address and SourceTag")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ripple-binary-codec x-address test", function () {
|
||||
function makeSuite(name, entries) {
|
||||
describe(name, function () {
|
||||
entries.forEach((t, testN) => {
|
||||
test(`${name}[${testN}] encodes X-address json equivalent to classic address json`, () => {
|
||||
expect(encode(t.rjson)).toEqual(encode(t.xjson));
|
||||
});
|
||||
test(`${name}[${testN}] decodes X-address json equivalent to classic address json`, () => {
|
||||
expect(decode(encode(t.xjson))).toEqual(t.rjson);
|
||||
});
|
||||
});
|
||||
});
|
||||
})
|
||||
|
||||
describe('ripple-binary-codec x-address test', function () {
|
||||
function makeSuite (name, entries) {
|
||||
describe(name, function () {
|
||||
entries.forEach((t, testN) => {
|
||||
test(`${name}[${testN}] encodes X-address json equivalent to classic address json`,
|
||||
() => {
|
||||
expect(encode(t.rjson)).toEqual(encode(t.xjson))
|
||||
})
|
||||
test(`${name}[${testN}] decodes X-address json equivalent to classic address json`, () => {
|
||||
expect(decode(encode(t.xjson))).toEqual(t.rjson);
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
makeSuite('transactions', fixtures.transactions)
|
||||
})
|
||||
}
|
||||
makeSuite("transactions", fixtures.transactions);
|
||||
});
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
{
|
||||
// extend your base config so you don't have to redefine your compilerOptions
|
||||
"extends": "./tsconfig.json",
|
||||
"include": ["src/**/*.ts"]
|
||||
"include": ["src/**/*.ts", "test/**/*.js"]
|
||||
}
|
||||
@@ -625,6 +625,16 @@
|
||||
eslint-scope "^5.0.0"
|
||||
eslint-utils "^2.0.0"
|
||||
|
||||
"@typescript-eslint/experimental-utils@^2.5.0":
|
||||
version "2.34.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-2.34.0.tgz#d3524b644cdb40eebceca67f8cf3e4cc9c8f980f"
|
||||
integrity sha512-eS6FTkq+wuMJ+sgtuNTtcqavWXqsflWcfBnlYhg/nS4aZ1leewkXGbvBhaapn1q6qf4M71bsR1tez5JTRMuqwA==
|
||||
dependencies:
|
||||
"@types/json-schema" "^7.0.3"
|
||||
"@typescript-eslint/typescript-estree" "2.34.0"
|
||||
eslint-scope "^5.0.0"
|
||||
eslint-utils "^2.0.0"
|
||||
|
||||
"@typescript-eslint/parser@^3.2.0":
|
||||
version "3.7.1"
|
||||
resolved "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.7.1.tgz#5d9ccecb116d12d9c6073e9861c57c9b1aa88128"
|
||||
@@ -641,6 +651,19 @@
|
||||
resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.7.1.tgz#90375606b2fd73c1224fe9e397ee151e28fa1e0c"
|
||||
integrity sha512-PZe8twm5Z4b61jt7GAQDor6KiMhgPgf4XmUb9zdrwTbgtC/Sj29gXP1dws9yEn4+aJeyXrjsD9XN7AWFhmnUfg==
|
||||
|
||||
"@typescript-eslint/typescript-estree@2.34.0":
|
||||
version "2.34.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz#14aeb6353b39ef0732cc7f1b8285294937cf37d5"
|
||||
integrity sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg==
|
||||
dependencies:
|
||||
debug "^4.1.1"
|
||||
eslint-visitor-keys "^1.1.0"
|
||||
glob "^7.1.6"
|
||||
is-glob "^4.0.1"
|
||||
lodash "^4.17.15"
|
||||
semver "^7.3.2"
|
||||
tsutils "^3.17.1"
|
||||
|
||||
"@typescript-eslint/typescript-estree@3.7.1":
|
||||
version "3.7.1"
|
||||
resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.7.1.tgz#ce1ffbd0fa53f34d4ce851a7a364e392432f6eb3"
|
||||
@@ -1467,6 +1490,13 @@ eslint-plugin-import@^2.21.1:
|
||||
resolve "^1.17.0"
|
||||
tsconfig-paths "^3.9.0"
|
||||
|
||||
eslint-plugin-jest@^23.20.0:
|
||||
version "23.20.0"
|
||||
resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-23.20.0.tgz#e1d69c75f639e99d836642453c4e75ed22da4099"
|
||||
integrity sha512-+6BGQt85OREevBDWCvhqj1yYA4+BFK4XnRZSGJionuEYmcglMZYLNNBBemwzbqUAckURaHdJSBcjHPyrtypZOw==
|
||||
dependencies:
|
||||
"@typescript-eslint/experimental-utils" "^2.5.0"
|
||||
|
||||
eslint-plugin-mocha@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-7.0.1.tgz#b2e9e8ebef7836f999a83f8bab25d0e0c05f0d28"
|
||||
@@ -1502,10 +1532,10 @@ eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0:
|
||||
resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e"
|
||||
integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==
|
||||
|
||||
eslint@^7.2.0:
|
||||
version "7.5.0"
|
||||
resolved "https://registry.npmjs.org/eslint/-/eslint-7.5.0.tgz#9ecbfad62216d223b82ac9ffea7ef3444671d135"
|
||||
integrity sha512-vlUP10xse9sWt9SGRtcr1LAC67BENcQMFeV+w5EvLEoFe3xJ8cF1Skd0msziRx/VMC+72B4DxreCE+OR12OA6Q==
|
||||
eslint@^7.7.0:
|
||||
version "7.7.0"
|
||||
resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.7.0.tgz#18beba51411927c4b64da0a8ceadefe4030d6073"
|
||||
integrity sha512-1KUxLzos0ZVsyL81PnRN335nDtQ8/vZUD6uMtWbF+5zDtjKcsklIi78XoE0MVL93QvWTu+E5y44VyyCsOMBrIg==
|
||||
dependencies:
|
||||
"@babel/code-frame" "^7.0.0"
|
||||
ajv "^6.10.0"
|
||||
|
||||
Reference in New Issue
Block a user