Initial change from Babel/JS to TypeScript (#70)

* will compile as typescript

* migrated test suite to use JestJS

* Migrated to Jest testing framework and typescript source files

* updated deps

* updated prepublish

* resolved 1 failing test

* changed decimal .0 on four tests, it appears that these were the only four tests expecting integer values to have '.0'

* added linter

* added package-lock

* removed tslint in favor of eslint

* changed yarn to npm

* updated version 2.6->3.0

* removing package lock

* updated node version in nvmrc and jest version in package

* removed nvmrc

* removed some unused functions

* replaced data driven with file from master

* commitint yarn.lock

* removing babel as a dependency in favor of typescript compiling to es5

* removing babel deps

* resolved testing issues by migrating helper function

* added partial linting functionality for test suite

* updated imports for decodeLedgerData

* updated test

* updated yarn.lock

* removed a console.log

* added eslint-jest-plugin to package

* reverting to old linting, will add linting in next PR

* removed comments in shamap

* re-adding .nvmrc

* npm -> yarn

* added . to .eslintrc

* added .eslintrc

* removing linting for this PR

* Changed linting to print a message so that linting doesnt fail in CI

* changing back

* added newline so diff wont show

* removed eslint deps, since linting will be dealt with in a later PR

* changed function calls to describe(...)
This commit is contained in:
Nathan Nichols
2020-06-24 09:00:28 -07:00
parent 16b1b91a76
commit a930b9413c
91 changed files with 5058 additions and 5142 deletions

View File

@@ -1,4 +0,0 @@
{
"presets" : ["es2015"],
"retainLines": true
}

View File

@@ -1,6 +1,6 @@
{
"name": "ripple-binary-codec",
"version": "0.2.6",
"version": "0.3.0",
"description": "XRP Ledger binary codec",
"files": [
"distrib/npm/*",
@@ -12,32 +12,25 @@
"test": "test"
},
"dependencies": {
"babel-runtime": "^6.26.0",
"bn.js": "^5.1.1",
"bn.js": "^5.1.2",
"create-hash": "^1.2.0",
"decimal.js": "^10.2.0",
"inherits": "^2.0.4",
"lodash": "^4.17.15",
"ripple-address-codec": "^4.1.0"
"ripple-address-codec": "^4.1.1"
},
"devDependencies": {
"babel-cli": "^6.8.0",
"babel-core": "^6.8.0",
"babel-eslint": "^10.0.2",
"babel-loader": "^6.2.4",
"babel-preset-es2015": "^6.6.0",
"babel-register": "^6.8.0",
"eslint": "^7.0.0",
"fs-extra": "^9.0.0",
"intercept-stdout": "^0.1.2",
"istanbul": "~0.4.3",
"mocha": "~7.1.2"
"@types/jest": "^26.0.0",
"@types/lodash": "^4.14.155",
"@types/node": "^14.0.10",
"jest": "^26.0.1",
"typescript": "^3.9.5"
},
"scripts": {
"compile": "babel --optional runtime -d distrib/npm/ src/ && cp src/enums/definitions.json distrib/npm/enums",
"prepublish": "npm test && npm run lint && npm run compile",
"test": "istanbul test _mocha",
"lint": "if ! [ -f eslintrc ]; then curl -o eslintrc 'https://raw.githubusercontent.com/ripple/javascript-style-guide/es6/eslintrc'; echo 'parser: babel-eslint' >> eslintrc; fi; eslint -c eslintrc src/**/*.js test/*.js"
"compile": "tsc && cp ./src/enums/definitions.json ./dist/enums",
"prepare": "npm run compile && npm test",
"lint" : "echo 'no linting for this PR is available'",
"test": "jest"
},
"repository": {
"type": "git",

View File

@@ -1,7 +1,7 @@
/* eslint-disable func-style */
const BN = require('bn.js');
const types = require('./types');
import { BN } from 'bn.js';
import { coreTypes } from './types';
const { HashPrefix } = require('./hash-prefixes');
const {BinaryParser} = require('./serdes/binary-parser');
const {BinarySerializer, BytesList} = require('./serdes/binary-serializer');
@@ -10,17 +10,17 @@ const {bytesToHex, slice, parseBytes} = require('./utils/bytes-utils');
const {sha512Half, transactionID} = require('./hashes');
const makeParser = bytes => new BinaryParser(bytes);
const readJSON = parser => parser.readType(types.STObject).toJSON();
const readJSON = parser => parser.readType(coreTypes.STObject).toJSON();
const binaryToJSON = bytes => readJSON(makeParser(bytes));
function serializeObject(object, opts = {}) {
function serializeObject(object, opts = <any>{}) {
const {prefix, suffix, signingFieldsOnly = false} = opts;
const bytesList = new BytesList();
if (prefix) {
bytesList.put(prefix);
}
const filter = signingFieldsOnly ? f => f.isSigningField : undefined;
types.STObject.from(object).toBytesSink(bytesList, filter);
coreTypes.STObject.from(object).toBytesSink(bytesList, filter);
if (suffix) {
bytesList.put(suffix);
}
@@ -33,8 +33,8 @@ function signingData(tx, prefix = HashPrefix.transactionSig) {
function signingClaimData(claim) {
const prefix = HashPrefix.paymentChannelClaim
const channel = types.Hash256.from(claim.channel).toBytes()
const amount = new types.UInt64(new BN(claim.amount)).toBytes();
const channel = coreTypes.Hash256.from(claim.channel).toBytes()
const amount = new coreTypes.UInt64(new BN(claim.amount)).toBytes();
const bytesList = new BytesList();
@@ -46,11 +46,11 @@ function signingClaimData(claim) {
function multiSigningData(tx, signingAccount) {
const prefix = HashPrefix.transactionMultiSig;
const suffix = types.AccountID.from(signingAccount).toBytes();
const suffix = coreTypes.AccountID.from(signingAccount).toBytes();
return serializeObject(tx, {prefix, suffix, signingFieldsOnly: true});
}
module.exports = {
export {
BinaryParser,
BinarySerializer,
BytesList,

View File

@@ -1,6 +1,5 @@
const _ = require('lodash');
const enums = require('./enums');
const {Field} = enums;
import { Enums } from './enums';
const {Field} = Enums.Field;
const types = require('./types');
const binary = require('./binary');
const {ShaMap} = require('./shamap');
@@ -10,14 +9,14 @@ const quality = require('./quality');
const {HashPrefix} = require('./hash-prefixes');
module.exports = _.assign({
hashes: _.assign({}, hashes, ledgerHashes),
export {
hashes,
binary,
enums,
ledgerHashes,
Enums,
quality,
Field,
HashPrefix,
ShaMap
},
ShaMap,
types
);
}

View File

@@ -1,125 +0,0 @@
const assert = require('assert');
const _ = require('lodash');
const {parseBytes, serializeUIntN} = require('./../utils/bytes-utils');
const makeClass = require('./../utils/make-class');
const enums = require('./definitions.json');
function transformWith(func, obj) {
return _.transform(obj, func);
}
function biMap(obj, valueKey) {
return _.transform(obj, (result, value, key) => {
result[key] = value;
result[value[valueKey]] = value;
});
}
const EnumType = makeClass({
EnumType(definition) {
_.assign(this, definition);
// At minimum
assert(this.bytes instanceof Uint8Array);
assert(typeof this.ordinal === 'number');
assert(typeof this.name === 'string');
},
toString() {
return this.name;
},
toJSON() {
return this.name;
},
toBytesSink(sink) {
sink.put(this.bytes);
},
statics: {
ordinalByteWidth: 1,
fromParser(parser) {
return this.from(parser.readUIntN(this.ordinalByteWidth));
},
from(val) {
const ret = val instanceof this ? val : this[val];
if (!ret) {
throw new Error(
`${val} is not a valid name or ordinal for ${this.enumName}`);
}
return ret;
},
valuesByName() {
return _.transform(this.initVals, (result, ordinal, name) => {
const bytes = serializeUIntN(ordinal, this.ordinalByteWidth);
const type = new this({name, ordinal, bytes});
result[name] = type;
});
},
init() {
const mapped = this.valuesByName();
_.assign(this, biMap(mapped, 'ordinal'));
this.values = _.values(mapped);
return this;
}
}
});
function makeEnum(name, definition) {
return makeClass({
inherits: EnumType,
statics: _.assign(definition, {enumName: name})
});
}
function makeEnums(to, definition, name) {
to[name] = makeEnum(name, definition);
}
const Enums = transformWith(makeEnums, {
Type: {
initVals: enums.TYPES
},
LedgerEntryType: {
initVals: enums.LEDGER_ENTRY_TYPES, ordinalByteWidth: 2
},
TransactionType: {
initVals: enums.TRANSACTION_TYPES, ordinalByteWidth: 2
},
TransactionResult: {
initVals: enums.TRANSACTION_RESULTS, ordinalByteWidth: 1
}
});
Enums.Field = makeClass({
inherits: EnumType,
statics: {
enumName: 'Field',
initVals: enums.FIELDS,
valuesByName() {
const fields = _.map(this.initVals, ([name, definition]) => {
const type = Enums.Type[definition.type];
const bytes = this.header(type.ordinal, definition.nth);
const ordinal = type.ordinal << 16 | definition.nth;
const extra = {ordinal, name, type, bytes};
return new this(_.assign(definition, extra));
});
return _.keyBy(fields, 'name');
},
header(type, nth) {
const name = nth;
const header = [];
const push = header.push.bind(header);
if (type < 16) {
if (name < 16) {
push(type << 4 | name);
} else {
push(type << 4, name);
}
} else if (name < 16) {
push(name, type);
} else {
push(0, type, name);
}
return parseBytes(header, Uint8Array);
}
}
});
module.exports = Enums;

View File

@@ -0,0 +1,125 @@
import { makeClass } from './../utils/make-class'
const assert = require('assert')
const _ = require('lodash')
const { parseBytes, serializeUIntN } = require('./../utils/bytes-utils')
const enums = require('./definitions.json')
function transformWith (func, obj) {
return _.transform(obj, func)
}
function biMap (obj, valueKey) {
return _.transform(obj, (result, value, key) => {
result[key] = value
result[value[valueKey]] = value
})
}
const EnumType = makeClass({
EnumType (definition) {
_.assign(this, definition)
// At minimum
assert(this.bytes instanceof Uint8Array)
assert(typeof this.ordinal === 'number')
assert(typeof this.name === 'string')
},
toString () {
return this.name
},
toJSON () {
return this.name
},
toBytesSink (sink) {
sink.put(this.bytes)
},
statics: {
ordinalByteWidth: 1,
fromParser (parser) {
return this.from(parser.readUIntN(this.ordinalByteWidth))
},
from (val) {
const ret = val instanceof this ? val : this[val]
if (!ret) {
throw new Error(
`${val} is not a valid name or ordinal for ${this.enumName}`)
}
return ret
},
valuesByName () {
return _.transform(this.initVals, (result, ordinal, name) => {
const bytes = serializeUIntN(ordinal, this.ordinalByteWidth)
const type = new this({ name, ordinal, bytes })
result[name] = type
})
},
init () {
const mapped = this.valuesByName()
_.assign(this, biMap(mapped, 'ordinal'))
this.values = _.values(mapped)
return this
}
}
}, undefined)
function makeEnum (name, definition) {
return makeClass({
inherits: EnumType,
statics: _.assign(definition, { enumName: name })
}, undefined)
}
function makeEnums (to, definition, name) {
to[name] = makeEnum(name, definition)
}
const Enums = transformWith(makeEnums, {
Type: {
initVals: enums.TYPES
},
LedgerEntryType: {
initVals: enums.LEDGER_ENTRY_TYPES, ordinalByteWidth: 2
},
TransactionType: {
initVals: enums.TRANSACTION_TYPES, ordinalByteWidth: 2
},
TransactionResult: {
initVals: enums.TRANSACTION_RESULTS, ordinalByteWidth: 1
}
})
Enums.Field = makeClass({
inherits: EnumType,
statics: {
enumName: 'Field',
initVals: enums.FIELDS,
valuesByName () {
const fields = _.map(this.initVals, ([name, definition]) => {
const type = Enums.Type[definition.type]
const bytes = this.header(type.ordinal, definition.nth)
const ordinal = type.ordinal << 16 | definition.nth
const extra = { ordinal, name, type, bytes }
return new this(_.assign(definition, extra))
})
return _.keyBy(fields, 'name')
},
header (type, nth) {
const name = nth
const header = <any>[]
const push = header.push.bind(header)
if (type < 16) {
if (name < 16) {
push(type << 4 | name)
} else {
push(type << 4, name)
}
} else if (name < 16) {
push(name, type)
} else {
push(0, type, name)
}
return parseBytes(header, Uint8Array)
}
}
}, undefined)
export { Enums }

View File

@@ -1,134 +0,0 @@
/**
* Quick script to re-number values
*/
const input = {
'temBAD_SEND_XRP_PATHS': -283,
'temBAD_SEQUENCE': -282,
'temBAD_SIGNATURE': -281,
'temBAD_SRC_ACCOUNT': -280,
'temBAD_TRANSFER_RATE': -279,
'temDST_IS_SRC': -278,
'temDST_NEEDED': -277,
'temINVALID': -276,
'temINVALID_FLAG': -275,
'temREDUNDANT': -274,
'temRIPPLE_EMPTY': -273,
'temDISABLED': -272,
'temBAD_SIGNER': -271,
'temBAD_QUORUM': -270,
'temBAD_WEIGHT': -269,
'temBAD_TICK_SIZE': -268,
'temINVALID_ACCOUNT_ID': -267,
'temCANNOT_PREAUTH_SELF': -266,
'temUNCERTAIN': -265,
'temUNKNOWN': -264,
'tefFAILURE': -199,
'tefALREADY': -198,
'tefBAD_ADD_AUTH': -197,
'tefBAD_AUTH': -196,
'tefBAD_LEDGER': -195,
'tefCREATED': -194,
'tefEXCEPTION': -193,
'tefINTERNAL': -192,
'tefNO_AUTH_REQUIRED': -191,
'tefPAST_SEQ': -190,
'tefWRONG_PRIOR': -189,
'tefMASTER_DISABLED': -188,
'tefMAX_LEDGER': -187,
'tefBAD_SIGNATURE': -186,
'tefBAD_QUORUM': -185,
'tefNOT_MULTI_SIGNING': -184,
'tefBAD_AUTH_MASTER': -183,
'tefINVARIANT_FAILED': -182,
'tefTOO_BIG': -181,
'terRETRY': -99,
'terFUNDS_SPENT': -98,
'terINSUF_FEE_B': -97,
'terNO_ACCOUNT': -96,
'terNO_AUTH': -95,
'terNO_LINE': -94,
'terOWNERS': -93,
'terPRE_SEQ': -92,
'terLAST': -91,
'terNO_RIPPLE': -90,
'terQUEUED': -89,
'tesSUCCESS': 0,
'tecCLAIM': 100,
'tecPATH_PARTIAL': 101,
'tecUNFUNDED_ADD': 102,
'tecUNFUNDED_OFFER': 103,
'tecUNFUNDED_PAYMENT': 104,
'tecFAILED_PROCESSING': 105,
'tecDIR_FULL': 121,
'tecINSUF_RESERVE_LINE': 122,
'tecINSUF_RESERVE_OFFER': 123,
'tecNO_DST': 124,
'tecNO_DST_INSUF_XRP': 125,
'tecNO_LINE_INSUF_RESERVE': 126,
'tecNO_LINE_REDUNDANT': 127,
'tecPATH_DRY': 128,
'tecUNFUNDED': 129,
'tecNO_ALTERNATIVE_KEY': 130,
'tecNO_REGULAR_KEY': 131,
'tecOWNERS': 132,
'tecNO_ISSUER': 133,
'tecNO_AUTH': 134,
'tecNO_LINE': 135,
'tecINSUFF_FEE': 136,
'tecFROZEN': 137,
'tecNO_TARGET': 138,
'tecNO_PERMISSION': 139,
'tecNO_ENTRY': 140,
'tecINSUFFICIENT_RESERVE': 141,
'tecNEED_MASTER_KEY': 142,
'tecDST_TAG_NEEDED': 143,
'tecINTERNAL': 144,
'tecOVERSIZE': 145,
'tecCRYPTOCONDITION_ERROR': 146,
'tecINVARIANT_FAILED': 147,
'tecEXPIRED': 148,
'tecDUPLICATE': 149,
'tecKILLED': 150,
'tecHAS_OBLIGATIONS': 151,
'tecTOO_SOON': 152
};
let starting_from_temBAD_SEND_XRP_PATHS = -284;
let starting_from_tefFAILURE = -199;
let starting_from_terRETRY = -99;
const tesSUCCESS = 0;
let starting_from_tecCLAIM = 100;
const starting_from_tecDIR_FULL = 121;
let previousKey = 'tem';
Object.keys(input).forEach(key => {
if (key.substring(0, 3) !== previousKey.substring(0, 3)) {
console.log();
previousKey = key;
}
if (key.substring(0, 3) === 'tem') {
console.log(` "${key}": ${starting_from_temBAD_SEND_XRP_PATHS++},`);
} else if (key.substring(0, 3) === 'tef') {
console.log(` "${key}": ${starting_from_tefFAILURE++},`);
} else if (key.substring(0, 3) === 'ter') {
console.log(` "${key}": ${starting_from_terRETRY++},`);
} else if (key.substring(0, 3) === 'tes') {
console.log(` "${key}": ${tesSUCCESS},`);
} else if (key.substring(0, 3) === 'tec') {
if (key === 'tecDIR_FULL') {
starting_from_tecCLAIM = starting_from_tecDIR_FULL;
}
console.log(` "${key}": ${starting_from_tecCLAIM++},`);
}
});

View File

@@ -0,0 +1,134 @@
/**
* Quick script to re-number values
*/
const input = {
temBAD_SEND_XRP_PATHS: -283,
temBAD_SEQUENCE: -282,
temBAD_SIGNATURE: -281,
temBAD_SRC_ACCOUNT: -280,
temBAD_TRANSFER_RATE: -279,
temDST_IS_SRC: -278,
temDST_NEEDED: -277,
temINVALID: -276,
temINVALID_FLAG: -275,
temREDUNDANT: -274,
temRIPPLE_EMPTY: -273,
temDISABLED: -272,
temBAD_SIGNER: -271,
temBAD_QUORUM: -270,
temBAD_WEIGHT: -269,
temBAD_TICK_SIZE: -268,
temINVALID_ACCOUNT_ID: -267,
temCANNOT_PREAUTH_SELF: -266,
temUNCERTAIN: -265,
temUNKNOWN: -264,
tefFAILURE: -199,
tefALREADY: -198,
tefBAD_ADD_AUTH: -197,
tefBAD_AUTH: -196,
tefBAD_LEDGER: -195,
tefCREATED: -194,
tefEXCEPTION: -193,
tefINTERNAL: -192,
tefNO_AUTH_REQUIRED: -191,
tefPAST_SEQ: -190,
tefWRONG_PRIOR: -189,
tefMASTER_DISABLED: -188,
tefMAX_LEDGER: -187,
tefBAD_SIGNATURE: -186,
tefBAD_QUORUM: -185,
tefNOT_MULTI_SIGNING: -184,
tefBAD_AUTH_MASTER: -183,
tefINVARIANT_FAILED: -182,
tefTOO_BIG: -181,
terRETRY: -99,
terFUNDS_SPENT: -98,
terINSUF_FEE_B: -97,
terNO_ACCOUNT: -96,
terNO_AUTH: -95,
terNO_LINE: -94,
terOWNERS: -93,
terPRE_SEQ: -92,
terLAST: -91,
terNO_RIPPLE: -90,
terQUEUED: -89,
tesSUCCESS: 0,
tecCLAIM: 100,
tecPATH_PARTIAL: 101,
tecUNFUNDED_ADD: 102,
tecUNFUNDED_OFFER: 103,
tecUNFUNDED_PAYMENT: 104,
tecFAILED_PROCESSING: 105,
tecDIR_FULL: 121,
tecINSUF_RESERVE_LINE: 122,
tecINSUF_RESERVE_OFFER: 123,
tecNO_DST: 124,
tecNO_DST_INSUF_XRP: 125,
tecNO_LINE_INSUF_RESERVE: 126,
tecNO_LINE_REDUNDANT: 127,
tecPATH_DRY: 128,
tecUNFUNDED: 129,
tecNO_ALTERNATIVE_KEY: 130,
tecNO_REGULAR_KEY: 131,
tecOWNERS: 132,
tecNO_ISSUER: 133,
tecNO_AUTH: 134,
tecNO_LINE: 135,
tecINSUFF_FEE: 136,
tecFROZEN: 137,
tecNO_TARGET: 138,
tecNO_PERMISSION: 139,
tecNO_ENTRY: 140,
tecINSUFFICIENT_RESERVE: 141,
tecNEED_MASTER_KEY: 142,
tecDST_TAG_NEEDED: 143,
tecINTERNAL: 144,
tecOVERSIZE: 145,
tecCRYPTOCONDITION_ERROR: 146,
tecINVARIANT_FAILED: 147,
tecEXPIRED: 148,
tecDUPLICATE: 149,
tecKILLED: 150,
tecHAS_OBLIGATIONS: 151,
tecTOO_SOON: 152
}
let startingFromTemBADSENDXRPPATHS = -284
let startingFromTefFAILURE = -199
let startingFromTerRETRY = -99
const tesSUCCESS = 0
let startingFromTecCLAIM = 100
const startingFromTecDIRFULL = 121
let previousKey = 'tem'
Object.keys(input).forEach(key => {
if (key.substring(0, 3) !== previousKey.substring(0, 3)) {
console.log()
previousKey = key
}
if (key.substring(0, 3) === 'tem') {
console.log(` "${key}": ${startingFromTemBADSENDXRPPATHS++},`)
} else if (key.substring(0, 3) === 'tef') {
console.log(` "${key}": ${startingFromTefFAILURE++},`)
} else if (key.substring(0, 3) === 'ter') {
console.log(` "${key}": ${startingFromTerRETRY++},`)
} else if (key.substring(0, 3) === 'tes') {
console.log(` "${key}": ${tesSUCCESS},`)
} else if (key.substring(0, 3) === 'tec') {
if (key === 'tecDIR_FULL') {
startingFromTecCLAIM = startingFromTecDIRFULL
}
console.log(` "${key}": ${startingFromTecCLAIM++},`)
}
})

View File

@@ -1,4 +1,4 @@
const {serializeUIntN} = require('./utils/bytes-utils');
import { serializeUIntN } from './utils/bytes-utils';
function bytes(uint32) {
return serializeUIntN(uint32, 4);
@@ -26,6 +26,6 @@ const HashPrefix = {
paymentChannelClaim: bytes(0x434C4D00)
};
module.exports = {
export {
HashPrefix
};

View File

@@ -1,8 +1,8 @@
const makeClass = require('./utils/make-class');
const {HashPrefix} = require('./hash-prefixes');
const {Hash256} = require('./types');
const {parseBytes} = require('./utils/bytes-utils');
const createHash = require('create-hash');
import { makeClass } from './utils/make-class';
import { HashPrefix } from './hash-prefixes';
import { coreTypes } from './types';
import { parseBytes } from './utils/bytes-utils';
import * as createHash from 'create-hash';
const Sha512Half = makeClass({
Sha512Half() {
@@ -22,9 +22,9 @@ const Sha512Half = makeClass({
return bytes.slice(0, 32);
},
finish() {
return new Hash256(this.finish256());
return new coreTypes.Hash256(this.finish256());
}
});
}, undefined);
function sha512Half(...args) {
const hash = new Sha512Half();
@@ -33,10 +33,10 @@ function sha512Half(...args) {
}
function transactionID(serialized) {
return new Hash256(sha512Half(HashPrefix.transactionID, serialized));
return new coreTypes.Hash256(sha512Half(HashPrefix.transactionID, serialized));
}
module.exports = {
export {
Sha512Half,
sha512Half,
transactionID

View File

@@ -1,13 +1,14 @@
const assert = require('assert');
const coreTypes = require('./coretypes');
const {quality,
binary: {bytesToHex,
import {strict as assert} from 'assert';
import { quality, binary } from './coretypes';
import { coreTypes } from './types';
const { bytesToHex,
signingData,
signingClaimData,
multiSigningData,
binaryToJSON,
serializeObject,
BinaryParser}} = coreTypes;
BinaryParser } = binary;
function decodeLedgerData(binary) {
assert(typeof binary === 'string', 'binary must be a hex string');

View File

@@ -1,12 +1,12 @@
const _ = require('lodash');
const BN = require('bn.js');
const assert = require('assert');
const types = require('./types');
const {STObject, Hash256} = types;
const {ShaMap} = require('./shamap');
const {HashPrefix} = require('./hash-prefixes');
const {Sha512Half} = require('./hashes');
const {BinarySerializer, serializeObject} = require('./binary');
import * as _ from 'lodash'
import { BN } from 'bn.js';
import { strict as assert } from 'assert';
import { coreTypes } from './types';
const { STObject, Hash256 } = coreTypes;
import { ShaMap } from './shamap';
import { HashPrefix } from './hash-prefixes';
import { Sha512Half } from './hashes';
import { BinarySerializer, serializeObject } from './binary';
function computeHash(itemizer, itemsJson) {
const map = new ShaMap();
@@ -53,19 +53,19 @@ function ledgerHash(header) {
assert(header.parent_close_time !== undefined);
assert(header.close_flags !== undefined);
types.UInt32.from(header.ledger_index).toBytesSink(hash);
types.UInt64.from(new BN(header.total_coins)).toBytesSink(hash);
types.Hash256.from(header.parent_hash).toBytesSink(hash);
types.Hash256.from(header.transaction_hash).toBytesSink(hash);
types.Hash256.from(header.account_hash).toBytesSink(hash);
types.UInt32.from(header.parent_close_time).toBytesSink(hash);
types.UInt32.from(header.close_time).toBytesSink(hash);
types.UInt8.from(header.close_time_resolution).toBytesSink(hash);
types.UInt8.from(header.close_flags).toBytesSink(hash);
coreTypes.UInt32.from(header.ledger_index).toBytesSink(hash);
coreTypes.UInt64.from(new BN(header.total_coins)).toBytesSink(hash);
coreTypes.Hash256.from(header.parent_hash).toBytesSink(hash);
coreTypes.Hash256.from(header.transaction_hash).toBytesSink(hash);
coreTypes.Hash256.from(header.account_hash).toBytesSink(hash);
coreTypes.UInt32.from(header.parent_close_time).toBytesSink(hash);
coreTypes.UInt32.from(header.close_time).toBytesSink(hash);
coreTypes.UInt8.from(header.close_time_resolution).toBytesSink(hash);
coreTypes.UInt8.from(header.close_flags).toBytesSink(hash);
return hash.finish();
}
module.exports = {
export {
accountStateHash,
transactionTreeHash,
ledgerHash

View File

@@ -1,14 +1,14 @@
const Decimal = require('decimal.js');
const {bytesToHex, slice, parseBytes} = require('./utils/bytes-utils');
const {UInt64} = require('./types');
const BN = require('bn.js');
import { bytesToHex, slice, parseBytes } from './utils/bytes-utils';
import { coreTypes } from './types';
import { BN } from 'bn.js';
module.exports = {
encode(arg) {
const quality = arg instanceof Decimal ? arg : new Decimal(arg);
const exponent = quality.e - 15;
const qualityString = quality.times('1e' + -exponent).abs().toString();
const bytes = new UInt64(new BN(qualityString)).toBytes();
const bytes = new coreTypes.UInt64(new BN(qualityString)).toBytes();
bytes[0] = exponent + 100;
return bytes;
},

View File

@@ -1,99 +0,0 @@
const assert = require('assert');
const makeClass = require('../utils/make-class');
const {Field} = require('../enums');
const {slice, parseBytes} = require('../utils/bytes-utils');
const BinaryParser = makeClass({
BinaryParser(buf) {
this._buf = parseBytes(buf, Uint8Array);
this._length = this._buf.length;
this._cursor = 0;
},
skip(n) {
this._cursor += n;
},
read(n, to = Uint8Array) {
const start = this._cursor;
const end = this._cursor + n;
assert(end <= this._buf.length);
this._cursor = end;
return slice(this._buf, start, end, to);
},
readUIntN(n) {
return this.read(n, Array).reduce((a, b) => a << 8 | b) >>> 0;
},
readUInt8() {
return this._buf[this._cursor++];
},
readUInt16() {
return this.readUIntN(2);
},
readUInt32() {
return this.readUIntN(4);
},
pos() {
return this._cursor;
},
size() {
return this._buf.length;
},
end(customEnd) {
const cursor = this.pos();
return (cursor >= this._length) || (customEnd !== null &&
cursor >= customEnd);
},
readVL() {
return this.read(this.readVLLength());
},
readVLLength() {
const b1 = this.readUInt8();
if (b1 <= 192) {
return b1;
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error('Invalid varint length indicator');
},
readFieldOrdinal() {
const tagByte = this.readUInt8();
const type = (tagByte & 0xF0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0F || this.readUInt8();
return type << 16 | nth;
},
readField() {
return Field.from(this.readFieldOrdinal());
},
readType(type) {
return type.fromParser(this);
},
typeForField(field) {
return field.associatedType;
},
readFieldValue(field) {
const kls = this.typeForField(field);
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVLEncoded ? this.readVLLength() : null;
const value = kls.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `);
}
return value;
},
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
}
});
module.exports = {
BinaryParser
};

View File

@@ -0,0 +1,98 @@
import { strict as assert } from 'assert'
import { makeClass } from '../utils/make-class'
import { Enums } from '../enums'
import { slice, parseBytes } from '../utils/bytes-utils'
const BinaryParser = makeClass({
BinaryParser (buf) {
this._buf = parseBytes(buf, Uint8Array)
this._length = this._buf.length
this._cursor = 0
},
skip (n) {
this._cursor += n
},
read (n, to = Uint8Array) {
const start = this._cursor
const end = this._cursor + n
assert(end <= this._buf.length)
this._cursor = end
return slice(this._buf, start, end, to)
},
readUIntN (n) {
return this.read(n, Array).reduce((a, b) => a << 8 | b) >>> 0
},
readUInt8 () {
return this._buf[this._cursor++]
},
readUInt16 () {
return this.readUIntN(2)
},
readUInt32 () {
return this.readUIntN(4)
},
pos () {
return this._cursor
},
size () {
return this._buf.length
},
end (customEnd) {
const cursor = this.pos()
return (cursor >= this._length) || (customEnd !== null &&
cursor >= customEnd)
},
readVL () {
return this.read(this.readVLLength())
},
readVLLength () {
const b1 = this.readUInt8()
if (b1 <= 192) {
return b1
} else if (b1 <= 240) {
const b2 = this.readUInt8()
return 193 + (b1 - 193) * 256 + b2
} else if (b1 <= 254) {
const b2 = this.readUInt8()
const b3 = this.readUInt8()
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3
}
throw new Error('Invalid varint length indicator')
},
readFieldOrdinal () {
const tagByte = this.readUInt8()
const type = (tagByte & 0xF0) >>> 4 || this.readUInt8()
const nth = tagByte & 0x0F || this.readUInt8()
return type << 16 | nth
},
readField () {
return Enums.Field.from(this.readFieldOrdinal())
},
readType (type) {
return type.fromParser(this)
},
typeForField (field) {
return field.associatedType
},
readFieldValue (field) {
const kls = this.typeForField(field)
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`)
}
const sizeHint = field.isVLEncoded ? this.readVLLength() : null
const value = kls.fromParser(this, sizeHint)
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `)
}
return value
},
readFieldAndValue () {
const field = this.readField()
return [field, this.readFieldValue(field)]
}
}, undefined)
export {
BinaryParser
}

View File

@@ -1,107 +0,0 @@
const assert = require('assert');
const {parseBytes, bytesToHex} = require('../utils/bytes-utils');
const makeClass = require('../utils/make-class');
const {Type, Field} = require('../enums');
const BytesSink = {
put(/* bytesSequence */) {
// any hex string or any object with a `length` and where 0 <= [ix] <= 255
}
};
const BytesList = makeClass({
implementing: BytesSink,
BytesList() {
this.arrays = [];
this.length = 0;
},
put(bytesArg) {
const bytes = parseBytes(bytesArg, Uint8Array);
this.length += bytes.length;
this.arrays.push(bytes);
return this;
},
toBytesSink(sink) {
this.arrays.forEach(arr => {
sink.put(arr);
});
},
toBytes() {
const concatenated = new Uint8Array(this.length);
let pointer = 0;
this.arrays.forEach(arr => {
concatenated.set(arr, pointer);
pointer += arr.length;
});
return concatenated;
},
toHex() {
return bytesToHex(this.toBytes());
}
});
const BinarySerializer = makeClass({
BinarySerializer(sink) {
this.sink = sink;
},
write(value) {
value.toBytesSink(this.sink);
},
put(bytes) {
this.sink.put(bytes);
},
writeType(type, value) {
this.write(type.from(value));
},
writeBytesList(bl) {
bl.toBytesSink(this.sink);
},
encodeVL(len) {
let length = len;
const lenBytes = new Uint8Array(4);
if (length <= 192) {
lenBytes[0] = length;
return lenBytes.subarray(0, 1);
} else if (length <= 12480) {
length -= 193;
lenBytes[0] = 193 + (length >>> 8);
lenBytes[1] = length & 0xff;
return lenBytes.subarray(0, 2);
} else if (length <= 918744) {
length -= 12481;
lenBytes[0] = 241 + (length >>> 16);
lenBytes[1] = (length >> 8) & 0xff;
lenBytes[2] = length & 0xff;
return lenBytes.subarray(0, 3);
}
throw new Error('Overflow error');
},
writeFieldAndValue(field, _value) {
const sink = this.sink;
const value = field.associatedType.from(_value);
assert(value.toBytesSink, field);
sink.put(field.bytes);
if (field.isVLEncoded) {
this.writeLengthEncoded(value);
} else {
value.toBytesSink(sink);
if (field.type === Type.STObject) {
sink.put(Field.ObjectEndMarker.bytes);
} else if (field.type === Type.STArray) {
sink.put(Field.ArrayEndMarker.bytes);
}
}
},
writeLengthEncoded(value) {
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVL(bytes.length));
this.writeBytesList(bytes);
}
});
module.exports = {
BytesList,
BinarySerializer
};

View File

@@ -0,0 +1,107 @@
import { strict as assert } from 'assert'
import { parseBytes, bytesToHex } from '../utils/bytes-utils'
import { makeClass } from '../utils/make-class'
import { Enums } from '../enums'
const BytesSink = {
put (/* bytesSequence */) {
// any hex string or any object with a `length` and where 0 <= [ix] <= 255
}
}
const BytesList = makeClass({
implementing: BytesSink,
BytesList () {
this.arrays = []
this.length = 0
},
put (bytesArg) {
const bytes = parseBytes(bytesArg, Uint8Array)
this.length += bytes.length
this.arrays.push(bytes)
return this
},
toBytesSink (sink) {
this.arrays.forEach(arr => {
sink.put(arr)
})
},
toBytes () {
const concatenated = new Uint8Array(this.length)
let pointer = 0
this.arrays.forEach(arr => {
concatenated.set(arr, pointer)
pointer += arr.length
})
return concatenated
},
toHex () {
return bytesToHex(this.toBytes())
}
}, undefined)
const BinarySerializer = makeClass({
BinarySerializer (sink) {
this.sink = sink
},
write (value) {
value.toBytesSink(this.sink)
},
put (bytes) {
this.sink.put(bytes)
},
writeType (type, value) {
this.write(type.from(value))
},
writeBytesList (bl) {
bl.toBytesSink(this.sink)
},
encodeVL (len) {
let length = len
const lenBytes = new Uint8Array(4)
if (length <= 192) {
lenBytes[0] = length
return lenBytes.subarray(0, 1)
} else if (length <= 12480) {
length -= 193
lenBytes[0] = 193 + (length >>> 8)
lenBytes[1] = length & 0xff
return lenBytes.subarray(0, 2)
} else if (length <= 918744) {
length -= 12481
lenBytes[0] = 241 + (length >>> 16)
lenBytes[1] = (length >> 8) & 0xff
lenBytes[2] = length & 0xff
return lenBytes.subarray(0, 3)
}
throw new Error('Overflow error')
},
writeFieldAndValue (field, _value) {
const sink = this.sink
const value = field.associatedType.from(_value)
assert(value.toBytesSink, field)
sink.put(field.bytes)
if (field.isVLEncoded) {
this.writeLengthEncoded(value)
} else {
value.toBytesSink(sink)
if (field.type === Enums.Type.STObject) {
sink.put(Enums.Field.ObjectEndMarker.bytes)
} else if (field.type === Enums.Type.STArray) {
sink.put(Enums.Field.ArrayEndMarker.bytes)
}
}
},
writeLengthEncoded (value) {
const bytes = new BytesList()
value.toBytesSink(bytes)
this.put(this.encodeVL(bytes.length))
this.writeBytesList(bytes)
}
}, undefined)
export {
BytesList,
BinarySerializer
}

View File

@@ -1,8 +1,8 @@
const assert = require('assert');
const makeClass = require('./utils/make-class');
const {Hash256} = require('./types');
const {HashPrefix} = require('./hash-prefixes');
const {Sha512Half: Hasher} = require('./hashes');
import { strict as assert } from 'assert'
import { makeClass } from './utils/make-class';
import { coreTypes } from './types';
import { HashPrefix } from './hash-prefixes';
import { Sha512Half } from './hashes';
const ShaMapNode = makeClass({
virtuals: {
@@ -12,12 +12,12 @@ const ShaMapNode = makeClass({
},
cached: {
hash() {
const hasher = Hasher.put(this.hashPrefix());
const hasher = Sha512Half.put(this.hashPrefix());
this.toBytesSink(hasher);
return hasher.finish();
}
}
});
}, undefined);
const ShaMapLeaf = makeClass({
inherits: ShaMapNode,
@@ -39,7 +39,7 @@ const ShaMapLeaf = makeClass({
this.item.toBytesSink(sink);
this.index.toBytesSink(sink);
}
});
}, undefined);
const $uper = ShaMapNode.prototype;
@@ -69,19 +69,19 @@ const ShaMapInner = makeClass({
},
hash() {
if (this.empty()) {
return Hash256.ZERO_256;
return coreTypes.Hash256.ZERO_256;
}
return $uper.hash.call(this);
},
toBytesSink(sink) {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch ? branch.hash() : Hash256.ZERO_256;
const hash = branch ? branch.hash() : coreTypes.Hash256.ZERO_256;
hash.toBytesSink(sink);
}
},
addItem(index, item, leaf) {
assert(index instanceof Hash256);
assert(index instanceof coreTypes.Hash256);
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
if (!existing) {
@@ -97,12 +97,12 @@ const ShaMapInner = makeClass({
assert(false);
}
}
});
}, undefined);
const ShaMap = makeClass({
inherits: ShaMapInner
});
}, undefined);
module.exports = {
export {
ShaMap
};

View File

@@ -1,42 +0,0 @@
const makeClass = require('../utils/make-class');
const {decodeAccountID, encodeAccountID} = require('ripple-address-codec');
const {Hash160} = require('./hash-160');
const AccountID = makeClass({
AccountID(bytes) {
Hash160.call(this, bytes);
},
inherits: Hash160,
statics: {
from(value) {
return value instanceof this ? value :
/^r/.test(value) ? this.fromBase58(value) :
new this(value);
},
cache: {},
fromCache(base58) {
let cached = this.cache[base58];
if (!cached) {
cached = this.cache[base58] = this.fromBase58(base58);
}
return cached;
},
fromBase58(value) {
const acc = new this(decodeAccountID(value));
acc._toBase58 = value;
return acc;
}
},
toJSON() {
return this.toBase58();
},
cached: {
toBase58() {
return encodeAccountID(this._bytes);
}
}
});
module.exports = {
AccountID
};

View File

@@ -0,0 +1,42 @@
import { makeClass } from '../utils/make-class'
const { decodeAccountID, encodeAccountID } = require('ripple-address-codec')
const { Hash160 } = require('./hash-160')
const AccountID = makeClass({
AccountID (bytes) {
Hash160.call(this, bytes)
},
inherits: Hash160,
statics: {
from (value) {
return value instanceof this ? value
: /^r/.test(value) ? this.fromBase58(value)
: new this(value)
},
cache: {},
fromCache (base58) {
let cached = this.cache[base58]
if (!cached) {
cached = this.cache[base58] = this.fromBase58(base58)
}
return cached
},
fromBase58 (value) {
const acc = new this(decodeAccountID(value))
acc._toBase58 = value
return acc
}
},
toJSON () {
return this.toBase58()
},
cached: {
toBase58 () {
return encodeAccountID(this._bytes)
}
}
}, undefined)
export {
AccountID
}

View File

@@ -1,216 +0,0 @@
const _ = require('lodash');
const assert = require('assert');
const BN = require('bn.js');
const Decimal = require('decimal.js');
const makeClass = require('../utils/make-class');
const {SerializedType} = require('./serialized-type');
const {bytesToHex} = require('../utils/bytes-utils');
const {Currency} = require('./currency');
const {AccountID} = require('./account-id');
const {UInt64} = require('./uint-64');
const MIN_IOU_EXPONENT = -96;
const MAX_IOU_EXPONENT = 80;
const MAX_IOU_PRECISION = 16;
const MIN_IOU_MANTISSA = '1000' + '0000' + '0000' + '0000'; // 16 digits
const MAX_IOU_MANTISSA = '9999' + '9999' + '9999' + '9999'; // ..
const MAX_IOU = new Decimal(`${MAX_IOU_MANTISSA}e${MAX_IOU_EXPONENT}`);
const MIN_IOU = new Decimal(`${MIN_IOU_MANTISSA}e${MIN_IOU_EXPONENT}`);
const DROPS_PER_XRP = new Decimal('1e6');
const MAX_NETWORK_DROPS = new Decimal('1e17');
const MIN_XRP = new Decimal('1e-6')
const MAX_XRP = MAX_NETWORK_DROPS.dividedBy(DROPS_PER_XRP);
// Never use exponential form
Decimal.config({
toExpPos: MAX_IOU_EXPONENT + MAX_IOU_PRECISION,
toExpNeg: MIN_IOU_EXPONENT - MAX_IOU_PRECISION
});
const AMOUNT_PARAMETERS_DESCRIPTION = `
Native values must be described in drops, a million of which equal one XRP.
This must be an integer number, with the absolute value not exceeding \
${MAX_NETWORK_DROPS}
IOU values must have a maximum precision of ${MAX_IOU_PRECISION} significant \
digits. They are serialized as\na canonicalised mantissa and exponent.
The valid range for a mantissa is between ${MIN_IOU_MANTISSA} and \
${MAX_IOU_MANTISSA}
The exponent must be >= ${MIN_IOU_EXPONENT} and <= ${MAX_IOU_EXPONENT}
Thus the largest serializable IOU value is:
${MAX_IOU.toString()}
And the smallest:
${MIN_IOU.toString()}
`
function isDefined(val) {
return !_.isUndefined(val);
}
function raiseIllegalAmountError(value) {
throw new Error(`${value.toString()} is an illegal amount\n` +
AMOUNT_PARAMETERS_DESCRIPTION);
}
const parsers = {
string(str) {
// Using /^\d+$/ here fixes #31
if (!str.match(/^\d+$/)) {
raiseIllegalAmountError(str);
}
return [new Decimal(str).dividedBy(DROPS_PER_XRP), Currency.XRP];
},
object(object) {
assert(isDefined(object.currency), 'currency must be defined');
assert(isDefined(object.issuer), 'issuer must be defined');
return [new Decimal(object.value),
Currency.from(object.currency),
AccountID.from(object.issuer)];
}
};
const Amount = makeClass({
Amount(value, currency, issuer, validate = true) {
this.value = value || new Decimal('0');
this.currency = currency || Currency.XRP;
this.issuer = issuer || null;
if (validate) {
this.assertValueIsValid();
}
},
mixins: SerializedType,
statics: {
from(value) {
if (value instanceof this) {
return value;
}
const parser = parsers[typeof value];
if (parser) {
return new this(...parser(value));
}
throw new Error(`unsupported value: ${value}`);
},
fromParser(parser) {
const mantissa = parser.read(8);
const b1 = mantissa[0];
const b2 = mantissa[1];
const isIOU = b1 & 0x80;
const isPositive = b1 & 0x40;
const sign = isPositive ? '' : '-';
if (isIOU) {
mantissa[0] = 0;
const currency = parser.readType(Currency);
const issuer = parser.readType(AccountID);
const exponent = ((b1 & 0x3F) << 2) + ((b2 & 0xff) >> 6) - 97;
mantissa[1] &= 0x3F;
// decimal.js won't accept e notation with hex
const value = new Decimal(`${sign}0x${bytesToHex(mantissa)}`)
.times('1e' + exponent);
return new this(value, currency, issuer, false);
}
mantissa[0] &= 0x3F;
const drops = new Decimal(`${sign}0x${bytesToHex(mantissa)}`);
const xrpValue = drops.dividedBy(DROPS_PER_XRP);
return new this(xrpValue, Currency.XRP, null, false);
}
},
assertValueIsValid() {
// zero is always a valid amount value
if (!this.isZero()) {
if (this.isNative()) {
const abs = this.value.abs();
if (abs.lt(MIN_XRP) || abs.gt(MAX_XRP)) {
// value is in XRP scale, but show the value in canonical json form
raiseIllegalAmountError(this.value.times(DROPS_PER_XRP))
}
this.verifyNoDecimal(this.value); // This is a secondary fix for #31
} else {
const p = this.value.precision();
const e = this.exponent();
if (p > MAX_IOU_PRECISION ||
e > MAX_IOU_EXPONENT ||
e < MIN_IOU_EXPONENT) {
raiseIllegalAmountError(this.value)
}
}
}
},
isNative() {
return this.currency.isNative();
},
mantissa() {
// This is a tertiary fix for #31
const integerNumberString = this.verifyNoDecimal();
return new UInt64(
new BN(integerNumberString));
},
verifyNoDecimal() {
const integerNumberString = this.value
.times('1e' + -this.exponent()).abs().toString();
// Ensure that the value (after being multiplied by the exponent)
// does not contain a decimal. From the bn.js README:
// "decimals are not supported in this library."
// eslint-disable-next-line max-len
// https://github.com/indutny/bn.js/blob/9cb459f044853b46615464eea1a3ddfc7006463b/README.md
if (integerNumberString.indexOf('.') !== -1) {
raiseIllegalAmountError(integerNumberString);
}
return integerNumberString;
},
isZero() {
return this.value.isZero();
},
exponent() {
return this.isNative() ? -6 : this.value.e - 15;
},
valueString() {
return (this.isNative() ? this.value.times(DROPS_PER_XRP) : this.value)
.toString();
},
toBytesSink(sink) {
const isNative = this.isNative();
const notNegative = !this.value.isNegative();
const mantissa = this.mantissa().toBytes();
if (isNative) {
mantissa[0] |= notNegative ? 0x40 : 0;
sink.put(mantissa);
} else {
mantissa[0] |= 0x80;
if (!this.isZero()) {
if (notNegative) {
mantissa[0] |= 0x40;
}
const exponent = this.value.e - 15;
const exponentByte = 97 + exponent;
mantissa[0] |= (exponentByte >>> 2);
mantissa[1] |= (exponentByte & 0x03) << 6;
}
sink.put(mantissa);
this.currency.toBytesSink(sink);
this.issuer.toBytesSink(sink);
}
},
toJSON() {
const valueString = this.valueString();
if (this.isNative()) {
return valueString;
}
return {
value: valueString,
currency: this.currency.toJSON(),
issuer: this.issuer.toJSON()
};
}
});
module.exports = {
Amount
};

View File

@@ -0,0 +1,216 @@
import { makeClass } from '../utils/make-class'
const _ = require('lodash')
const assert = require('assert')
const BN = require('bn.js')
const Decimal = require('decimal.js')
const { SerializedType } = require('./serialized-type')
const { bytesToHex } = require('../utils/bytes-utils')
const { Currency } = require('./currency')
const { AccountID } = require('./account-id')
const { UInt64 } = require('./uint-64')
const MIN_IOU_EXPONENT = -96
const MAX_IOU_EXPONENT = 80
const MAX_IOU_PRECISION = 16
const MIN_IOU_MANTISSA = '1000' + '0000' + '0000' + '0000' // 16 digits
const MAX_IOU_MANTISSA = '9999' + '9999' + '9999' + '9999' // ..
const MAX_IOU = new Decimal(`${MAX_IOU_MANTISSA}e${MAX_IOU_EXPONENT}`)
const MIN_IOU = new Decimal(`${MIN_IOU_MANTISSA}e${MIN_IOU_EXPONENT}`)
const DROPS_PER_XRP = new Decimal('1e6')
const MAX_NETWORK_DROPS = new Decimal('1e17')
const MIN_XRP = new Decimal('1e-6')
const MAX_XRP = MAX_NETWORK_DROPS.dividedBy(DROPS_PER_XRP)
// Never use exponential form
Decimal.config({
toExpPos: MAX_IOU_EXPONENT + MAX_IOU_PRECISION,
toExpNeg: MIN_IOU_EXPONENT - MAX_IOU_PRECISION
})
const AMOUNT_PARAMETERS_DESCRIPTION = `
Native values must be described in drops, a million of which equal one XRP.
This must be an integer number, with the absolute value not exceeding \
${MAX_NETWORK_DROPS}
IOU values must have a maximum precision of ${MAX_IOU_PRECISION} significant \
digits. They are serialized as\na canonicalised mantissa and exponent.
The valid range for a mantissa is between ${MIN_IOU_MANTISSA} and \
${MAX_IOU_MANTISSA}
The exponent must be >= ${MIN_IOU_EXPONENT} and <= ${MAX_IOU_EXPONENT}
Thus the largest serializable IOU value is:
${MAX_IOU.toString()}
And the smallest:
${MIN_IOU.toString()}
`
function isDefined (val) {
return !_.isUndefined(val)
}
function raiseIllegalAmountError (value) {
throw new Error(`${value.toString()} is an illegal amount\n` +
AMOUNT_PARAMETERS_DESCRIPTION)
}
const parsers = {
string (str) {
// Using /^\d+$/ here fixes #31
if (!str.match(/^\d+$/)) {
raiseIllegalAmountError(str)
}
return [new Decimal(str).dividedBy(DROPS_PER_XRP), Currency.XRP]
},
object (object) {
assert(isDefined(object.currency), 'currency must be defined')
assert(isDefined(object.issuer), 'issuer must be defined')
return [new Decimal(object.value),
Currency.from(object.currency),
AccountID.from(object.issuer)]
}
}
const Amount = makeClass({
Amount (value, currency, issuer, validate = true) {
this.value = value || new Decimal('0')
this.currency = currency || Currency.XRP
this.issuer = issuer || null
if (validate) {
this.assertValueIsValid()
}
},
mixins: SerializedType,
statics: {
from (value) {
if (value instanceof this) {
return value
}
const parser = parsers[typeof value]
if (parser) {
return new this(...parser(value))
}
throw new Error(`unsupported value: ${value}`)
},
fromParser (parser) {
const mantissa = parser.read(8)
const b1 = mantissa[0]
const b2 = mantissa[1]
const isIOU = b1 & 0x80
const isPositive = b1 & 0x40
const sign = isPositive ? '' : '-'
if (isIOU) {
mantissa[0] = 0
const currency = parser.readType(Currency)
const issuer = parser.readType(AccountID)
const exponent = ((b1 & 0x3F) << 2) + ((b2 & 0xff) >> 6) - 97
mantissa[1] &= 0x3F
// decimal.js won't accept e notation with hex
const value = new Decimal(`${sign}0x${bytesToHex(mantissa)}`)
.times('1e' + exponent)
return new this(value, currency, issuer, false)
}
mantissa[0] &= 0x3F
const drops = new Decimal(`${sign}0x${bytesToHex(mantissa)}`)
const xrpValue = drops.dividedBy(DROPS_PER_XRP)
return new this(xrpValue, Currency.XRP, null, false)
}
},
assertValueIsValid () {
// zero is always a valid amount value
if (!this.isZero()) {
if (this.isNative()) {
const abs = this.value.abs()
if (abs.lt(MIN_XRP) || abs.gt(MAX_XRP)) {
// value is in XRP scale, but show the value in canonical json form
raiseIllegalAmountError(this.value.times(DROPS_PER_XRP))
}
this.verifyNoDecimal(this.value) // This is a secondary fix for #31
} else {
const p = this.value.precision()
const e = this.exponent()
if (p > MAX_IOU_PRECISION ||
e > MAX_IOU_EXPONENT ||
e < MIN_IOU_EXPONENT) {
raiseIllegalAmountError(this.value)
}
}
}
},
isNative () {
return this.currency.isNative()
},
mantissa () {
// This is a tertiary fix for #31
const integerNumberString = this.verifyNoDecimal()
return new UInt64(
new BN(integerNumberString))
},
verifyNoDecimal () {
const integerNumberString = this.value
.times('1e' + -this.exponent()).abs().toString()
// Ensure that the value (after being multiplied by the exponent)
// does not contain a decimal. From the bn.js README:
// "decimals are not supported in this library."
// eslint-disable-next-line max-len
// https://github.com/indutny/bn.js/blob/9cb459f044853b46615464eea1a3ddfc7006463b/README.md
if (integerNumberString.indexOf('.') !== -1) {
raiseIllegalAmountError(integerNumberString)
}
return integerNumberString
},
isZero () {
return this.value.isZero()
},
exponent () {
return this.isNative() ? -6 : this.value.e - 15
},
valueString () {
return (this.isNative() ? this.value.times(DROPS_PER_XRP) : this.value)
.toString()
},
toBytesSink (sink) {
const isNative = this.isNative()
const notNegative = !this.value.isNegative()
const mantissa = this.mantissa().toBytes()
if (isNative) {
mantissa[0] |= notNegative ? 0x40 : 0
sink.put(mantissa)
} else {
mantissa[0] |= 0x80
if (!this.isZero()) {
if (notNegative) {
mantissa[0] |= 0x40
}
const exponent = this.value.e - 15
const exponentByte = 97 + exponent
mantissa[0] |= (exponentByte >>> 2)
mantissa[1] |= (exponentByte & 0x03) << 6
}
sink.put(mantissa)
this.currency.toBytesSink(sink)
this.issuer.toBytesSink(sink)
}
},
toJSON () {
const valueString = this.valueString()
if (this.isNative()) {
return valueString
}
return {
value: valueString,
currency: this.currency.toJSON(),
issuer: this.issuer.toJSON()
}
}
}, undefined)
export {
Amount
}

View File

@@ -1,29 +0,0 @@
const makeClass = require('../utils/make-class');
const {parseBytes} = require('../utils/bytes-utils');
const {SerializedType} = require('./serialized-type');
const Blob = makeClass({
mixins: SerializedType,
Blob(bytes) {
if (bytes) {
this._bytes = parseBytes(bytes, Uint8Array);
} else {
this._bytes = new Uint8Array(0);
}
},
statics: {
fromParser(parser, hint) {
return new this(parser.read(hint));
},
from(value) {
if (value instanceof this) {
return value;
}
return new this(value);
}
}
});
module.exports = {
Blob
};

View File

@@ -0,0 +1,29 @@
import { makeClass } from '../utils/make-class'
import { parseBytes } from '../utils/bytes-utils'
import { SerializedType } from './serialized-type'
const Blob = makeClass({
mixins: SerializedType,
Blob (bytes) {
if (bytes) {
this._bytes = parseBytes(bytes, Uint8Array)
} else {
this._bytes = new Uint8Array(0)
}
},
statics: {
fromParser (parser, hint) {
return new this(parser.read(hint))
},
from (value) {
if (value instanceof this) {
return value
}
return new this(value)
}
}
}, undefined)
export {
Blob
}

View File

@@ -1,92 +0,0 @@
const _ = require('lodash');
const makeClass = require('../utils/make-class');
const {slice} = require('../utils/bytes-utils');
const {Hash160} = require('./hash-160');
const ISO_REGEX = /^[A-Z0-9]{3}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/;
function isoToBytes(iso) {
const bytes = new Uint8Array(20);
if (iso !== 'XRP') {
const isoBytes = iso.split('').map(c => c.charCodeAt(0));
bytes.set(isoBytes, 12);
}
return bytes;
}
function isISOCode(val) {
return val.length === 3; // ISO_REGEX.test(val);
}
function isHex(val) {
return HEX_REGEX.test(val);
}
function isStringRepr(val) {
return _.isString(val) && (isISOCode(val) || isHex(val));
}
function isBytesArray(val) {
return val.length === 20;
}
function isValidRepr(val) {
return isStringRepr(val) || isBytesArray(val);
}
function bytesFromRepr(val) {
if (isValidRepr(val)) {
// We assume at this point that we have an object with a length, either 3,
// 20 or 40.
return val.length === 3 ? isoToBytes(val) : val;
}
throw new Error(`Unsupported Currency repr: ${val}`);
}
const $uper = Hash160.prototype;
const Currency = makeClass({
inherits: Hash160,
getters: ['isNative', 'iso'],
statics: {
init() {
this.XRP = new this(new Uint8Array(20));
},
from(val) {
return val instanceof this ? val : new this(bytesFromRepr(val));
}
},
Currency(bytes) {
Hash160.call(this, bytes);
this.classify();
},
classify() {
// We only have a non null iso() property available if the currency can be
// losslessly represented by the 3 letter iso code. If none is available a
// hex encoding of the full 20 bytes is the canonical representation.
let onlyISO = true;
const bytes = this._bytes;
const code = slice(this._bytes, 12, 15, Array);
const iso = code.map(c => String.fromCharCode(c)).join('');
for (let i = bytes.length - 1; i >= 0; i--) {
if (bytes[i] !== 0 && !(i === 12 || i === 13 || i === 14)) {
onlyISO = false;
break;
}
}
const lossLessISO = onlyISO && iso !== 'XRP' && ISO_REGEX.test(iso);
this._isNative = onlyISO && _.isEqual(code, [0, 0, 0]);
this._iso = this._isNative ? 'XRP' : lossLessISO ? iso : null;
},
toJSON() {
if (this.iso()) {
return this.iso();
}
return $uper.toJSON.call(this);
}
});
module.exports = {
Currency
};

View File

@@ -0,0 +1,92 @@
import { makeClass } from '../utils/make-class'
const _ = require('lodash')
const { slice } = require('../utils/bytes-utils')
const { Hash160 } = require('./hash-160')
const ISO_REGEX = /^[A-Z0-9]{3}$/
const HEX_REGEX = /^[A-F0-9]{40}$/
function isoToBytes (iso) {
const bytes = new Uint8Array(20)
if (iso !== 'XRP') {
const isoBytes = iso.split('').map(c => c.charCodeAt(0))
bytes.set(isoBytes, 12)
}
return bytes
}
function isISOCode (val) {
return val.length === 3 // ISO_REGEX.test(val);
}
function isHex (val) {
return HEX_REGEX.test(val)
}
function isStringRepr (val) {
return _.isString(val) && (isISOCode(val) || isHex(val))
}
function isBytesArray (val) {
return val.length === 20
}
function isValidRepr (val) {
return isStringRepr(val) || isBytesArray(val)
}
function bytesFromRepr (val) {
if (isValidRepr(val)) {
// We assume at this point that we have an object with a length, either 3,
// 20 or 40.
return val.length === 3 ? isoToBytes(val) : val
}
throw new Error(`Unsupported Currency repr: ${val}`)
}
const $uper = Hash160.prototype
const Currency = makeClass({
inherits: Hash160,
getters: ['isNative', 'iso'],
statics: {
init () {
this.XRP = new this(new Uint8Array(20))
},
from (val) {
return val instanceof this ? val : new this(bytesFromRepr(val))
}
},
Currency (bytes) {
Hash160.call(this, bytes)
this.classify()
},
classify () {
// We only have a non null iso() property available if the currency can be
// losslessly represented by the 3 letter iso code. If none is available a
// hex encoding of the full 20 bytes is the canonical representation.
let onlyISO = true
const bytes = this._bytes
const code = slice(this._bytes, 12, 15, Array)
const iso = code.map(c => String.fromCharCode(c)).join('')
for (let i = bytes.length - 1; i >= 0; i--) {
if (bytes[i] !== 0 && !(i === 12 || i === 13 || i === 14)) {
onlyISO = false
break
}
}
const lossLessISO = onlyISO && iso !== 'XRP' && ISO_REGEX.test(iso)
this._isNative = onlyISO && _.isEqual(code, [0, 0, 0])
this._iso = this._isNative ? 'XRP' : lossLessISO ? iso : null
},
toJSON () {
if (this.iso()) {
return this.iso()
}
return $uper.toJSON.call(this)
}
}, undefined)
export {
Currency
}

View File

@@ -1,11 +0,0 @@
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash128 = makeClass({
inherits: Hash,
statics: {width: 16}
});
module.exports = {
Hash128
};

View File

@@ -0,0 +1,11 @@
import { makeClass } from '../utils/make-class'
import { Hash } from './hash'
const Hash128 = makeClass({
inherits: Hash,
statics: { width: 16 }
}, undefined)
export {
Hash128
}

View File

@@ -1,11 +0,0 @@
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash160 = makeClass({
inherits: Hash,
statics: {width: 20}
});
module.exports = {
Hash160
};

View File

@@ -0,0 +1,11 @@
import { makeClass } from '../utils/make-class'
const { Hash } = require('./hash')
const Hash160 = makeClass({
inherits: Hash,
statics: { width: 20 }
}, undefined)
export {
Hash160
}

View File

@@ -1,16 +0,0 @@
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash256 = makeClass({
inherits: Hash,
statics: {
width: 32,
init() {
this.ZERO_256 = new this(new Uint8Array(this.width));
}
}
});
module.exports = {
Hash256
};

View File

@@ -0,0 +1,16 @@
import { makeClass } from '../utils/make-class'
import { Hash } from './hash'
const Hash256 = makeClass({
inherits: Hash,
statics: {
width: 32,
init () {
this.ZERO_256 = new this(new Uint8Array(this.width))
}
}
}, undefined)
export {
Hash256
}

View File

@@ -1,46 +0,0 @@
const assert = require('assert');
const makeClass = require('../utils/make-class');
const {Comparable, SerializedType} = require('./serialized-type');
const {compareBytes, parseBytes} = require('../utils/bytes-utils');
const Hash = makeClass({
Hash(bytes) {
const width = this.constructor.width;
this._bytes = bytes ? parseBytes(bytes, Uint8Array) :
new Uint8Array(width);
assert.equal(this._bytes.length, width);
},
mixins: [Comparable, SerializedType],
statics: {
width: NaN,
from(value) {
if (value instanceof this) {
return value;
}
return new this(parseBytes(value));
},
fromParser(parser, hint) {
return new this(parser.read(hint || this.width));
}
},
compareTo(other) {
return compareBytes(this._bytes, this.constructor.from(other)._bytes);
},
toString() {
return this.toHex();
},
nibblet(depth) {
const byte_ix = depth > 0 ? (depth / 2) | 0 : 0;
let b = this._bytes[byte_ix];
if (depth % 2 === 0) {
b = (b & 0xF0) >>> 4;
} else {
b = b & 0x0F;
}
return b;
}
});
module.exports = {
Hash
};

View File

@@ -0,0 +1,46 @@
import * as assert from 'assert'
import { makeClass } from '../utils/make-class'
import { Comparable, SerializedType } from './serialized-type'
import { compareBytes, parseBytes } from '../utils/bytes-utils'
const Hash = makeClass({
Hash (bytes) {
const width = this.constructor.width
this._bytes = bytes ? parseBytes(bytes, Uint8Array)
: new Uint8Array(width)
assert.equal(this._bytes.length, width)
},
mixins: [Comparable, SerializedType],
statics: {
width: NaN,
from (value) {
if (value instanceof this) {
return value
}
return new this(parseBytes(value))
},
fromParser (parser, hint) {
return new this(parser.read(hint || this.width))
}
},
compareTo (other) {
return compareBytes(this._bytes, this.constructor.from(other)._bytes)
},
toString () {
return this.toHex()
},
nibblet (depth) {
const byteIx = depth > 0 ? (depth / 2) | 0 : 0
let b = this._bytes[byteIx]
if (depth % 2 === 0) {
b = (b & 0xF0) >>> 4
} else {
b = b & 0x0F
}
return b
}
}, undefined)
export {
Hash
}

View File

@@ -1,45 +0,0 @@
const enums = require('../enums');
const {Field} = enums;
const {AccountID} = require('./account-id');
const {Amount} = require('./amount');
const {Blob} = require('./blob');
const {Currency} = require('./currency');
const {Hash128} = require('./hash-128');
const {Hash160} = require('./hash-160');
const {Hash256} = require('./hash-256');
const {PathSet} = require('./path-set');
const {STArray} = require('./st-array');
const {STObject} = require('./st-object');
const {UInt16} = require('./uint-16');
const {UInt32} = require('./uint-32');
const {UInt64} = require('./uint-64');
const {UInt8} = require('./uint-8');
const {Vector256} = require('./vector-256');
const coreTypes = {
AccountID,
Amount,
Blob,
Currency,
Hash128,
Hash160,
Hash256,
PathSet,
STArray,
STObject,
UInt8,
UInt16,
UInt32,
UInt64,
Vector256
};
Field.values.forEach(field => {
field.associatedType = coreTypes[field.type];
});
Field.TransactionType.associatedType = enums.TransactionType;
Field.TransactionResult.associatedType = enums.TransactionResult;
Field.LedgerEntryType.associatedType = enums.LedgerEntryType;
module.exports = coreTypes;

View File

@@ -0,0 +1,45 @@
import { Enums } from '../enums'
import { AccountID } from './account-id'
import { Amount } from './amount'
import { Blob } from './blob'
const Field = Enums.Field
const { Currency } = require('./currency')
const { Hash128 } = require('./hash-128')
const { Hash160 } = require('./hash-160')
const { Hash256 } = require('./hash-256')
const { PathSet } = require('./path-set')
const { STArray } = require('./st-array')
const { STObject } = require('./st-object')
const { UInt16 } = require('./uint-16')
const { UInt32 } = require('./uint-32')
const { UInt64 } = require('./uint-64')
const { UInt8 } = require('./uint-8')
const { Vector256 } = require('./vector-256')
const coreTypes = {
AccountID,
Amount,
Blob,
Currency,
Hash128,
Hash160,
Hash256,
PathSet,
STArray,
STObject,
UInt8,
UInt16,
UInt32,
UInt64,
Vector256
}
Field.values.forEach(field => {
field.associatedType = coreTypes[field.type]
})
Field.TransactionType.associatedType = Enums.TransactionType
Field.TransactionResult.associatedType = Enums.TransactionResult
Field.LedgerEntryType.associatedType = Enums.LedgerEntryType
export { coreTypes }

View File

@@ -1,113 +0,0 @@
/* eslint-disable no-unused-expressions */
const makeClass = require('../utils/make-class');
const {SerializedType, ensureArrayLikeIs} = require('./serialized-type');
const {Currency} = require('./currency');
const {AccountID} = require('./account-id');
const PATHSET_END_BYTE = 0x00;
const PATH_SEPARATOR_BYTE = 0xFF;
const TYPE_ACCOUNT = 0x01;
const TYPE_CURRENCY = 0x10;
const TYPE_ISSUER = 0x20;
const Hop = makeClass({
statics: {
from(value) {
if (value instanceof this) {
return value;
}
const hop = new Hop();
value.issuer && (hop.issuer = AccountID.from(value.issuer));
value.account && (hop.account = AccountID.from(value.account));
value.currency && (hop.currency = Currency.from(value.currency));
return hop;
},
parse(parser, type) {
const hop = new Hop();
(type & TYPE_ACCOUNT) && (hop.account = AccountID.fromParser(parser));
(type & TYPE_CURRENCY) && (hop.currency = Currency.fromParser(parser));
(type & TYPE_ISSUER) && (hop.issuer = AccountID.fromParser(parser));
return hop;
}
},
toJSON() {
const type = this.type();
const ret = {};
(type & TYPE_ACCOUNT) && (ret.account = this.account.toJSON());
(type & TYPE_ISSUER) && (ret.issuer = this.issuer.toJSON());
(type & TYPE_CURRENCY) && (ret.currency = this.currency.toJSON());
return ret;
},
type() {
let type = 0;
this.issuer && (type += TYPE_ISSUER);
this.account && (type += TYPE_ACCOUNT);
this.currency && (type += TYPE_CURRENCY);
return type;
}
});
const Path = makeClass({
inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(Path, value).withChildren(Hop);
}
},
toJSON() {
return this.map(k => k.toJSON());
}
});
const PathSet = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(PathSet, value).withChildren(Path);
},
fromParser(parser) {
const pathSet = new this();
let path;
while (!parser.end()) {
const type = parser.readUInt8();
if (type === PATHSET_END_BYTE) {
break;
}
if (type === PATH_SEPARATOR_BYTE) {
path = null;
continue;
}
if (!path) {
path = new Path();
pathSet.push(path);
}
path.push(Hop.parse(parser, type));
}
return pathSet;
}
},
toJSON() {
return this.map(k => k.toJSON());
},
toBytesSink(sink) {
let n = 0;
this.forEach(path => {
if (n++ !== 0) {
sink.put([PATH_SEPARATOR_BYTE]);
}
path.forEach(hop => {
sink.put([hop.type()]);
hop.account && (hop.account.toBytesSink(sink));
hop.currency && (hop.currency.toBytesSink(sink));
hop.issuer && (hop.issuer.toBytesSink(sink));
});
});
sink.put([PATHSET_END_BYTE]);
}
});
module.exports = {
PathSet
};

View File

@@ -0,0 +1,113 @@
/* eslint-disable no-unused-expressions */
import { makeClass } from '../utils/make-class'
const { SerializedType, ensureArrayLikeIs } = require('./serialized-type')
const { Currency } = require('./currency')
const { AccountID } = require('./account-id')
const PATHSET_END_BYTE = 0x00
const PATH_SEPARATOR_BYTE = 0xFF
const TYPE_ACCOUNT = 0x01
const TYPE_CURRENCY = 0x10
const TYPE_ISSUER = 0x20
const Hop = makeClass({
statics: {
from (value) {
if (value instanceof this) {
return value
}
const hop = new Hop()
value.issuer && (hop.issuer = AccountID.from(value.issuer))
value.account && (hop.account = AccountID.from(value.account))
value.currency && (hop.currency = Currency.from(value.currency))
return hop
},
parse (parser, type) {
const hop = new Hop();
(type & TYPE_ACCOUNT) && (hop.account = AccountID.fromParser(parser));
(type & TYPE_CURRENCY) && (hop.currency = Currency.fromParser(parser));
(type & TYPE_ISSUER) && (hop.issuer = AccountID.fromParser(parser))
return hop
}
},
toJSON () {
const type = this.type()
const ret = <any>{};
(type & TYPE_ACCOUNT) && (ret.account = this.account.toJSON());
(type & TYPE_ISSUER) && (ret.issuer = this.issuer.toJSON());
(type & TYPE_CURRENCY) && (ret.currency = this.currency.toJSON())
return ret
},
type () {
let type = 0
this.issuer && (type += TYPE_ISSUER)
this.account && (type += TYPE_ACCOUNT)
this.currency && (type += TYPE_CURRENCY)
return type
}
}, undefined)
const Path = makeClass({
inherits: Array,
statics: {
from (value) {
return ensureArrayLikeIs(Path, value).withChildren(Hop)
}
},
toJSON () {
return this.map(k => k.toJSON())
}
}, undefined)
const PathSet = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
from (value) {
return ensureArrayLikeIs(PathSet, value).withChildren(Path)
},
fromParser (parser) {
const pathSet = new this()
let path
while (!parser.end()) {
const type = parser.readUInt8()
if (type === PATHSET_END_BYTE) {
break
}
if (type === PATH_SEPARATOR_BYTE) {
path = null
continue
}
if (!path) {
path = new Path()
pathSet.push(path)
}
path.push(Hop.parse(parser, type))
}
return pathSet
}
},
toJSON () {
return this.map(k => k.toJSON())
},
toBytesSink (sink) {
let n = 0
this.forEach(path => {
if (n++ !== 0) {
sink.put([PATH_SEPARATOR_BYTE])
}
path.forEach(hop => {
sink.put([hop.type()])
hop.account && (hop.account.toBytesSink(sink))
hop.currency && (hop.currency.toBytesSink(sink))
hop.issuer && (hop.issuer.toBytesSink(sink))
})
})
sink.put([PATHSET_END_BYTE])
}
}, undefined)
export {
PathSet
}

View File

@@ -1,64 +0,0 @@
const {bytesToHex, slice} = require('../utils/bytes-utils');
const {BytesList} = require('../serdes/binary-serializer');
const Comparable = {
lt(other) {
return this.compareTo(other) < 0;
},
eq(other) {
return this.compareTo(other) === 0;
},
gt(other) {
return this.compareTo(other) > 0;
},
gte(other) {
return this.compareTo(other) > -1;
},
lte(other) {
return this.compareTo(other) < 1;
}
};
const SerializedType = {
toBytesSink(sink) {
sink.put(this._bytes);
},
toHex() {
return bytesToHex(this.toBytes());
},
toBytes() {
if (this._bytes) {
return slice(this._bytes);
}
const bl = new BytesList();
this.toBytesSink(bl);
return bl.toBytes();
},
toJSON() {
return this.toHex();
},
toString() {
return this.toHex();
}
};
function ensureArrayLikeIs(Type, arrayLike) {
return {
withChildren(Child) {
if (arrayLike instanceof Type) {
return arrayLike;
}
const obj = new Type();
for (let i = 0; i < arrayLike.length; i++) {
obj.push(Child.from(arrayLike[i]));
}
return obj;
}
};
}
module.exports = {
ensureArrayLikeIs,
SerializedType,
Comparable
};

View File

@@ -0,0 +1,64 @@
import { BytesList } from '../serdes/binary-serializer'
const { bytesToHex, slice } = require('../utils/bytes-utils')
const Comparable = {
lt (other) {
return this.compareTo(other) < 0
},
eq (other) {
return this.compareTo(other) === 0
},
gt (other) {
return this.compareTo(other) > 0
},
gte (other) {
return this.compareTo(other) > -1
},
lte (other) {
return this.compareTo(other) < 1
}
}
const SerializedType = {
toBytesSink (sink) {
sink.put(this._bytes)
},
toHex () {
return bytesToHex(this.toBytes())
},
toBytes () {
if (this._bytes) {
return slice(this._bytes)
}
const bl = new BytesList()
this.toBytesSink(bl)
return bl.toBytes()
},
toJSON () {
return this.toHex()
},
toString () {
return this.toHex()
}
}
function ensureArrayLikeIs (Type, arrayLike) {
return {
withChildren (Child) {
if (arrayLike instanceof Type) {
return arrayLike
}
const obj = new Type()
for (let i = 0; i < arrayLike.length; i++) {
obj.push(Child.from(arrayLike[i]))
}
return obj
}
}
}
export {
ensureArrayLikeIs,
SerializedType,
Comparable
}

View File

@@ -1,38 +0,0 @@
const makeClass = require('../utils/make-class');
const {ensureArrayLikeIs, SerializedType} = require('./serialized-type');
const {Field} = require('../enums');
const {STObject} = require('./st-object');
const {ArrayEndMarker} = Field;
const STArray = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser(parser) {
const array = new STArray();
while (!parser.end()) {
const field = parser.readField();
if (field === ArrayEndMarker) {
break;
}
const outer = new STObject();
outer[field] = parser.readFieldValue(field);
array.push(outer);
}
return array;
},
from(value) {
return ensureArrayLikeIs(STArray, value).withChildren(STObject);
}
},
toJSON() {
return this.map(v => v.toJSON());
},
toBytesSink(sink) {
this.forEach(so => so.toBytesSink(sink));
}
});
module.exports = {
STArray
};

View File

@@ -0,0 +1,38 @@
import { makeClass } from '../utils/make-class'
import { ensureArrayLikeIs, SerializedType } from './serialized-type'
import { Enums } from '../enums'
import { STObject } from './st-object'
const { ArrayEndMarker } = Enums.Field
const STArray = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser (parser) {
const array = new STArray()
while (!parser.end()) {
const field = parser.readField()
if (field === ArrayEndMarker) {
break
}
const outer = new STObject()
outer[field] = parser.readFieldValue(field)
array.push(outer)
}
return array
},
from (value) {
return ensureArrayLikeIs(STArray, value).withChildren(STObject)
}
},
toJSON () {
return this.map(v => v.toJSON())
},
toBytesSink (sink) {
this.forEach(so => so.toBytesSink(sink))
}
}, undefined)
export {
STArray
}

View File

@@ -1,66 +0,0 @@
const _ = require('lodash');
const makeClass = require('../utils/make-class');
const {Field} = require('../enums');
const {BinarySerializer} = require('../serdes/binary-serializer');
const {ObjectEndMarker} = Field;
const {SerializedType} = require('./serialized-type');
const STObject = makeClass({
mixins: SerializedType,
statics: {
fromParser(parser, hint) {
const end = typeof hint === 'number' ? parser.pos() + hint : null;
const so = new this();
while (!parser.end(end)) {
const field = parser.readField();
if (field === ObjectEndMarker) {
break;
}
so[field] = parser.readFieldValue(field);
}
return so;
},
from(value) {
if (value instanceof this) {
return value;
}
if (typeof value === 'object') {
return _.transform(value, (so, val, key) => {
const field = Field[key];
if (field) {
so[field] = field.associatedType.from(val);
} else {
so[key] = val;
}
}, new this());
}
throw new Error(`${value} is unsupported`);
}
},
fieldKeys() {
return Object.keys(this).map(k => Field[k]).filter(Boolean);
},
toJSON() {
// Otherwise seemingly result will have same prototype as `this`
const accumulator = {}; // of only `own` properties
return _.transform(this, (result, value, key) => {
result[key] = value && value.toJSON ? value.toJSON() : value;
}, accumulator);
},
toBytesSink(sink, filter = () => true) {
const serializer = new BinarySerializer(sink);
const fields = this.fieldKeys();
const sorted = _.sortBy(fields, 'ordinal');
sorted.filter(filter).forEach(field => {
const value = this[field];
if (!field.isSerialized) {
return;
}
serializer.writeFieldAndValue(field, value);
});
}
});
module.exports = {
STObject
};

View File

@@ -0,0 +1,66 @@
import { makeClass } from '../utils/make-class'
import { Enums } from '../enums'
const _ = require('lodash')
const { BinarySerializer } = require('../serdes/binary-serializer')
const { ObjectEndMarker } = Enums.Field
const { SerializedType } = require('./serialized-type')
const STObject = makeClass({
mixins: SerializedType,
statics: {
fromParser (parser, hint) {
const end = typeof hint === 'number' ? parser.pos() + hint : null
const so = new this()
while (!parser.end(end)) {
const field = parser.readField()
if (field === ObjectEndMarker) {
break
}
so[field] = parser.readFieldValue(field)
}
return so
},
from (value) {
if (value instanceof this) {
return value
}
if (typeof value === 'object') {
return _.transform(value, (so, val, key) => {
const field = Enums.Field[key]
if (field) {
so[field] = field.associatedType.from(val)
} else {
so[key] = val
}
}, new this())
}
throw new Error(`${value} is unsupported`)
}
},
fieldKeys () {
return Object.keys(this).map(k => Enums.Field[k]).filter(Boolean)
},
toJSON () {
// Otherwise seemingly result will have same prototype as `this`
const accumulator = {} // of only `own` properties
return _.transform(this, (result, value, key) => {
result[key] = value && value.toJSON ? value.toJSON() : value
}, accumulator)
},
toBytesSink (sink, filter = () => true) {
const serializer = new BinarySerializer(sink)
const fields = this.fieldKeys()
const sorted = _.sortBy(fields, 'ordinal')
sorted.filter(filter).forEach(field => {
const value = this[field]
if (!field.isSerialized) {
return
}
serializer.writeFieldAndValue(field, value)
})
}
}, undefined)
export {
STObject
}

View File

@@ -1,11 +0,0 @@
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt16 = makeClass({
inherits: UInt,
statics: {width: 2}
});
module.exports = {
UInt16
};

View File

@@ -0,0 +1,11 @@
import { makeClass } from '../utils/make-class'
import { UInt } from './uint'
const UInt16 = makeClass({
inherits: UInt,
statics: { width: 2 }
}, undefined)
export {
UInt16
}

View File

@@ -1,11 +0,0 @@
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt32 = makeClass({
inherits: UInt,
statics: {width: 4}
});
module.exports = {
UInt32
};

View File

@@ -0,0 +1,11 @@
import { makeClass } from '../utils/make-class'
import { UInt } from './uint'
const UInt32 = makeClass({
inherits: UInt,
statics: { width: 4 }
}, undefined)
export {
UInt32
}

View File

@@ -1,50 +0,0 @@
const assert = require('assert');
const BN = require('bn.js');
const makeClass = require('../utils/make-class');
const {bytesToHex, parseBytes, serializeUIntN}
= require('../utils/bytes-utils');
const {UInt} = require('./uint');
const HEX_REGEX = /^[A-F0-9]{16}$/;
const UInt64 = makeClass({
inherits: UInt,
statics: {width: 8},
UInt64(arg = 0) {
const argType = typeof arg;
if (argType === 'number') {
assert(arg >= 0);
this._bytes = new Uint8Array(8);
this._bytes.set(serializeUIntN(arg, 4), 4);
} else if (arg instanceof BN) {
this._bytes = parseBytes(arg.toArray('be', 8), Uint8Array);
this._toBN = arg;
} else {
if (argType === 'string') {
if (!HEX_REGEX.test(arg)) {
throw new Error(`${arg} is not a valid UInt64 hex string`);
}
}
this._bytes = parseBytes(arg, Uint8Array);
}
assert(this._bytes.length === 8);
},
toJSON() {
return bytesToHex(this._bytes);
},
valueOf() {
return this.toBN();
},
cached: {
toBN() {
return new BN(this._bytes);
}
},
toBytes() {
return this._bytes;
}
});
module.exports = {
UInt64
};

View File

@@ -0,0 +1,49 @@
import { strict as assert } from 'assert'
import { BN } from 'bn.js'
import { makeClass } from '../utils/make-class'
import { bytesToHex, parseBytes, serializeUIntN } from '../utils/bytes-utils'
import { UInt } from './uint'
const HEX_REGEX = /^[A-F0-9]{16}$/
const UInt64 = makeClass({
inherits: UInt,
statics: { width: 8 },
UInt64 (arg : any = 0) {
const argType = typeof arg
if (argType === 'number') {
assert(arg >= 0)
this._bytes = new Uint8Array(8)
this._bytes.set(serializeUIntN(arg, 4), 4)
} else if (arg instanceof BN) {
this._bytes = parseBytes(arg.toArray('be', 8), Uint8Array)
this._toBN = arg
} else {
if (argType === 'string') {
if (!HEX_REGEX.test(arg)) {
throw new Error(`${arg} is not a valid UInt64 hex string`)
}
}
this._bytes = parseBytes(arg, Uint8Array)
}
assert(this._bytes.length === 8)
},
toJSON () {
return bytesToHex(this._bytes)
},
valueOf () {
return this.toBN()
},
cached: {
toBN () {
return new BN(this._bytes)
}
},
toBytes () {
return this._bytes
}
}, undefined)
export {
UInt64
}

View File

@@ -1,11 +0,0 @@
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt8 = makeClass({
inherits: UInt,
statics: {width: 1}
});
module.exports = {
UInt8
};

View File

@@ -0,0 +1,11 @@
import { makeClass } from '../utils/make-class'
import { UInt } from './uint'
const UInt8 = makeClass({
inherits: UInt,
statics: { width: 1 }
}, undefined)
export {
UInt8
}

View File

@@ -1,61 +0,0 @@
const assert = require('assert');
const BN = require('bn.js');
const makeClass = require('../utils/make-class');
const {Comparable, SerializedType} = require('./serialized-type');
const {serializeUIntN} = require('../utils/bytes-utils');
const MAX_VALUES = [0, 255, 65535, 16777215, 4294967295];
function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1;
}
const UInt = makeClass({
mixins: [Comparable, SerializedType],
UInt(val = 0) {
const max = MAX_VALUES[this.constructor.width];
if (val < 0 || !(val <= max)) {
throw new Error(`${val} not in range 0 <= $val <= ${max}`);
}
this.val = val;
},
statics: {
width: 0,
fromParser(parser) {
const val = this.width > 4 ? parser.read(this.width) :
parser.readUIntN(this.width);
return new this(val);
},
from(val) {
return val instanceof this ? val : new this(val);
}
},
toJSON() {
return this.val;
},
valueOf() {
return this.val;
},
compareTo(other) {
const thisValue = this.valueOf();
const otherValue = other.valueOf();
if (thisValue instanceof BN) {
return otherValue instanceof BN ?
thisValue.cmp(otherValue) :
thisValue.cmpn(otherValue);
} else if (otherValue instanceof BN) {
return -other.compareTo(this);
}
assert(typeof otherValue === 'number');
return signum(thisValue, otherValue);
},
toBytesSink(sink) {
sink.put(this.toBytes());
},
toBytes() {
return serializeUIntN(this.val, this.constructor.width);
}
});
module.exports = {
UInt
};

View File

@@ -0,0 +1,61 @@
import { strict as assert } from 'assert'
import { BN } from 'bn.js'
import { makeClass } from '../utils/make-class'
const { Comparable, SerializedType } = require('./serialized-type')
const { serializeUIntN } = require('../utils/bytes-utils')
const MAX_VALUES = [0, 255, 65535, 16777215, 4294967295]
function signum (a, b) {
return a < b ? -1 : a === b ? 0 : 1
}
const UInt = makeClass({
mixins: [Comparable, SerializedType],
UInt (val = 0) {
const max = MAX_VALUES[this.constructor.width]
if (val < 0 || !(val <= max)) {
throw new Error(`${val} not in range 0 <= $val <= ${max}`)
}
this.val = val
},
statics: {
width: 0,
fromParser (parser) {
const val = this.width > 4 ? parser.read(this.width)
: parser.readUIntN(this.width)
return new this(val)
},
from (val) {
return val instanceof this ? val : new this(val)
}
},
toJSON () {
return this.val
},
valueOf () {
return this.val
},
compareTo (other) {
const thisValue = this.valueOf()
const otherValue = other.valueOf()
if (thisValue instanceof BN) {
return otherValue instanceof BN
? thisValue.cmp(otherValue)
: thisValue.cmpn(otherValue)
} else if (otherValue instanceof BN) {
return -other.compareTo(this)
}
assert(typeof otherValue === 'number')
return signum(thisValue, otherValue)
},
toBytesSink (sink) {
sink.put(this.toBytes())
},
toBytes () {
return serializeUIntN(this.val, this.constructor.width)
}
}, undefined)
export {
UInt
}

View File

@@ -1,32 +0,0 @@
const makeClass = require('../utils/make-class');
const {Hash256} = require('./hash-256');
const {ensureArrayLikeIs, SerializedType} = require('./serialized-type');
const Vector256 = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser(parser, hint) {
const vector256 = new this();
const bytes = hint !== null ? hint : parser.size() - parser.pos();
const hashes = bytes / 32;
for (let i = 0; i < hashes; i++) {
vector256.push(Hash256.fromParser(parser));
}
return vector256;
},
from(value) {
return ensureArrayLikeIs(Vector256, value).withChildren(Hash256);
}
},
toBytesSink(sink) {
this.forEach(h => h.toBytesSink(sink));
},
toJSON() {
return this.map(hash => hash.toJSON());
}
});
module.exports = {
Vector256
};

View File

@@ -0,0 +1,32 @@
import { makeClass } from '../utils/make-class'
const { Hash256 } = require('./hash-256')
const { ensureArrayLikeIs, SerializedType } = require('./serialized-type')
const Vector256 = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser (parser, hint) {
const vector256 = new this()
const bytes = hint !== null ? hint : parser.size() - parser.pos()
const hashes = bytes / 32
for (let i = 0; i < hashes; i++) {
vector256.push(Hash256.fromParser(parser))
}
return vector256
},
from (value) {
return ensureArrayLikeIs(Vector256, value).withChildren(Hash256)
}
},
toBytesSink (sink) {
this.forEach(h => h.toBytesSink(sink))
},
toJSON () {
return this.map(hash => hash.toJSON())
}
}, undefined)
export {
Vector256
}

View File

@@ -1,113 +0,0 @@
const assert = require('assert');
function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1;
}
const hexLookup = (function() {
const res = {};
const reverse = res.reverse = new Array(256);
for (let i = 0; i < 16; i++) {
const char = i.toString(16).toUpperCase();
res[char] = i;
for (let j = 0; j < 16; j++) {
const char2 = j.toString(16).toUpperCase();
const byte = (i << 4) + j;
const byteHex = char + char2;
res[byteHex] = byte;
reverse[byte] = byteHex;
}
}
return res;
}());
const reverseHexLookup = hexLookup.reverse;
function bytesToHex(sequence) {
const buf = Array(sequence.length);
for (let i = sequence.length - 1; i >= 0; i--) {
buf[i] = reverseHexLookup[sequence[i]];
}
return buf.join('');
}
function byteForHex(hex) {
const byte = hexLookup[hex];
if (byte === undefined) {
throw new Error(`\`${hex}\` is not a valid hex representation of a byte`);
}
return byte;
}
function parseBytes(val, Output = Array) {
if (!val || val.length === undefined) {
throw new Error(`${val} is not a sequence`);
}
if (typeof val === 'string') {
const start = val.length % 2;
const res = new Output((val.length + start) / 2);
for (let i = val.length, to = res.length - 1; to >= start; i -= 2, to--) {
res[to] = byteForHex(val.slice(i - 2, i));
}
if (start === 1) {
res[0] = byteForHex(val[0]);
}
return res;
} else if (val instanceof Output) {
return val;
} else if (Output === Uint8Array) {
return new Output(val);
}
const res = new Output(val.length);
for (let i = val.length - 1; i >= 0; i--) {
res[i] = val[i];
}
return res;
}
function serializeUIntN(val, width) {
const newBytes = new Uint8Array(width);
const lastIx = width - 1;
for (let i = 0; i < width; i++) {
newBytes[lastIx - i] = (val >>> (i * 8) & 0xff);
}
return newBytes;
}
function compareBytes(a, b) {
assert(a.length === b.length);
for (let i = 0; i < a.length; i++) {
const cmp = signum(a[i], b[i]);
if (cmp !== 0) {
return cmp;
}
}
return 0;
}
function slice(val, startIx = 0, endIx = val.length, Output = val.constructor) {
/* eslint-disable no-param-reassign*/
if (startIx < 0) {
startIx += val.length;
}
if (endIx < 0) {
endIx += val.length;
}
/* eslint-enable no-param-reassign*/
const len = endIx - startIx;
const res = new Output(len);
for (let i = endIx - 1; i >= startIx; i--) {
res[i - startIx] = val[i];
}
return res;
}
module.exports = {
parseBytes,
bytesToHex,
slice,
compareBytes,
serializeUIntN
};

View File

@@ -0,0 +1,113 @@
import { strict as assert } from 'assert'
function signum (a, b) {
return a < b ? -1 : a === b ? 0 : 1
}
const hexLookup = (function () {
const res = <any>{}
const reverse = res.reverse = new Array(256)
for (let i = 0; i < 16; i++) {
const char = i.toString(16).toUpperCase()
res[char] = i
for (let j = 0; j < 16; j++) {
const char2 = j.toString(16).toUpperCase()
const byte = (i << 4) + j
const byteHex = char + char2
res[byteHex] = byte
reverse[byte] = byteHex
}
}
return res
}())
const reverseHexLookup = hexLookup.reverse
function bytesToHex (sequence) {
const buf = Array(sequence.length)
for (let i = sequence.length - 1; i >= 0; i--) {
buf[i] = reverseHexLookup[sequence[i]]
}
return buf.join('')
}
function byteForHex (hex) {
const byte = hexLookup[hex]
if (byte === undefined) {
throw new Error(`\`${hex}\` is not a valid hex representation of a byte`)
}
return byte
}
function parseBytes (val, Output = <any>Array) {
if (!val || val.length === undefined) {
throw new Error(`${val} is not a sequence`)
}
if (typeof val === 'string') {
const start = val.length % 2
const res = new Output((val.length + start) / 2)
for (let i = val.length, to = res.length - 1; to >= start; i -= 2, to--) {
res[to] = byteForHex(val.slice(i - 2, i))
}
if (start === 1) {
res[0] = byteForHex(val[0])
}
return res
} else if (val instanceof Output) {
return val
} else if (Output === Uint8Array) {
return new Output(val)
}
const res = new Output(val.length)
for (let i = val.length - 1; i >= 0; i--) {
res[i] = val[i]
}
return res
}
function serializeUIntN (val, width) {
const newBytes = new Uint8Array(width)
const lastIx = width - 1
for (let i = 0; i < width; i++) {
newBytes[lastIx - i] = (val >>> (i * 8) & 0xff)
}
return newBytes
}
function compareBytes (a, b) {
assert(a.length === b.length)
for (let i = 0; i < a.length; i++) {
const cmp = signum(a[i], b[i])
if (cmp !== 0) {
return cmp
}
}
return 0
}
function slice (val, startIx = 0, endIx = val.length, Output = val.constructor) {
/* eslint-disable no-param-reassign */
if (startIx < 0) {
startIx += val.length
}
if (endIx < 0) {
endIx += val.length
}
/* eslint-enable no-param-reassign */
const len = endIx - startIx
const res = new Output(len)
for (let i = endIx - 1; i >= startIx; i--) {
res[i - startIx] = val[i]
}
return res
}
export {
parseBytes,
bytesToHex,
slice,
compareBytes,
serializeUIntN
}

View File

@@ -1,83 +0,0 @@
const _ = require('lodash');
const inherits = require('inherits');
function forEach(obj, func) {
Object.keys(obj || {}).forEach(k => {
func(obj[k], k);
});
}
function ensureArray(val) {
return Array.isArray(val) ? val : [val];
}
module.exports = function makeClass(klass_, definition_) {
const definition = definition_ || klass_;
let klass = typeof klass_ === 'function' ? klass_ : null;
if (klass === null) {
for (const k in definition) {
if (k[0].match(/[A-Z]/)) {
klass = definition[k];
break;
}
}
}
const parent = definition.inherits;
if (parent) {
if (klass === null) {
klass = function() {
parent.apply(this, arguments);
};
}
inherits(klass, parent);
_.defaults(klass, parent);
}
if (klass === null) {
klass = function() {};
}
const proto = klass.prototype;
function addFunc(original, name, wrapper) {
proto[name] = wrapper || original;
}
(definition.getters || []).forEach(k => {
const key = '_' + k;
proto[k] = function() {
return this[key];
};
});
forEach(definition.virtuals, (f, n) => {
addFunc(f, n, function() {
throw new Error('unimplemented');
});
});
forEach(definition.methods, addFunc);
forEach(definition, (f, n) => {
if (_.isFunction(f) && f !== klass) {
addFunc(f, n);
}
});
_.assign(klass, definition.statics);
if (typeof klass.init === 'function') {
klass.init();
}
forEach(definition.cached, (f, n) => {
const key = '_' + n;
addFunc(f, n, function() {
let value = this[key];
if (value === undefined) {
value = this[key] = f.call(this);
}
return value;
});
});
if (definition.mixins) {
const mixins = {};
// Right-most in the list win
ensureArray(definition.mixins).reverse().forEach(o => {
_.defaults(mixins, o);
});
_.defaults(proto, mixins);
}
return klass;
};

View File

@@ -0,0 +1,83 @@
import _ = require('lodash');
const inherits = require('inherits')
function forEach (obj, func) {
Object.keys(obj || {}).forEach(k => {
func(obj[k], k)
})
}
function ensureArray (val) {
return Array.isArray(val) ? val : [val]
}
export function makeClass (klass_, definition_) {
const definition = definition_ || klass_
let klass = typeof klass_ === 'function' ? klass_ : null
if (klass === null) {
for (const k in definition) {
if (k[0].match(/[A-Z]/)) {
klass = definition[k]
break
}
}
}
const parent = definition.inherits
if (parent) {
if (klass === null) {
klass = function () {
parent.apply(this, arguments)
}
}
inherits(klass, parent)
_.defaults(klass, parent)
}
if (klass === null) {
klass = function () {}
}
const proto = klass.prototype
function addFunc (original, name, wrapper) {
proto[name] = wrapper || original
}
(definition.getters || []).forEach(k => {
const key = '_' + k
proto[k] = function () {
return this[key]
}
})
forEach(definition.virtuals, (f, n) => {
addFunc(f, n, function () {
throw new Error('unimplemented')
})
})
forEach(definition.methods, addFunc)
forEach(definition, (f, n) => {
if (_.isFunction(f) && f !== klass) {
addFunc(f, n, undefined)
}
})
_.assign(klass, definition.statics)
if (typeof klass.init === 'function') {
klass.init()
}
forEach(definition.cached, (f, n) => {
const key = '_' + n
addFunc(f, n, function () {
let value = this[key]
if (value === undefined) {
value = this[key] = f.call(this)
}
return value
})
})
if (definition.mixins) {
const mixins = {}
// Right-most in the list win
ensureArray(definition.mixins).reverse().forEach(o => {
_.defaults(mixins, o)
})
_.defaults(proto, mixins)
}
return klass
};

View File

@@ -1,43 +0,0 @@
const _ = require('lodash');
const assert = require('assert');
const utils = require('./utils');
const {Amount} = require('../src/coretypes');
const {loadFixture} = utils;
const fixtures = loadFixture('data-driven-tests.json');
function amountErrorTests() {
_.filter(fixtures.values_tests, {type: 'Amount'}).forEach(f => {
// We only want these with errors
if (!f.error) {
return
}
const testName = `${JSON.stringify(f.test_json)}\n\tis invalid ` +
`because: ${f.error}`
it(testName, () => {
assert.throws(() => {
Amount.from(f.test_json);
}, JSON.stringify(f.test_json));
});
});
}
describe('Amount', function() {
it('can be parsed from', function() {
assert(Amount.from('1000000') instanceof Amount);
assert.equal(Amount.from('1000000').valueString(), '1000000');
const fixture = {
'value': '1',
'issuer': '0000000000000000000000000000000000000000',
'currency': 'USD'
};
const amt = Amount.from(fixture);
const rewritten = {
'value': '1',
'issuer': 'rrrrrrrrrrrrrrrrrrrrrhoLvTp',
'currency': 'USD'
};
assert.deepEqual(amt.toJSON(), rewritten);
});
amountErrorTests()
});

View File

@@ -0,0 +1,42 @@
const _ = require('lodash')
const { loadFixture } = require('./utils')
const { coreTypes } = require('../dist/types')
const { Amount } = coreTypes
const fixtures = loadFixture('data-driven-tests.json')
function amountErrorTests () {
_.filter(fixtures.values_tests, { type: 'Amount' }).forEach(f => {
// We only want these with errors
if (!f.error) {
return
}
const testName = `${JSON.stringify(f.test_json)}\n\tis invalid ` +
`because: ${f.error}`
it(testName, () => {
expect(() => {
Amount.from(f.test_json)
JSON.stringify(f.test_json)
}).toThrow()
})
})
}
describe('Amount', function () {
it('can be parsed from', function () {
expect(Amount.from('1000000') instanceof Amount).toBe(true)
expect(Amount.from('1000000').valueString()).toEqual('1000000')
const fixture = {
value: '1',
issuer: '0000000000000000000000000000000000000000',
currency: 'USD'
}
const amt = Amount.from(fixture)
const rewritten = {
value: '1',
issuer: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp',
currency: 'USD'
}
expect(amt.toJSON()).toEqual(rewritten)
})
amountErrorTests()
})

View File

@@ -1,43 +0,0 @@
const assert = require('assert');
const fixtures = require('./fixtures/codec-fixtures.json');
const {decode, encode, decodeLedgerData} = require('../src');
function json(object) {
return JSON.stringify(object);
}
function truncateForDisplay(longStr) {
return longStr.slice(0, 10) + '...' + longStr.slice(-10);
}
describe('ripple-binary-codec', function() {
function makeSuite(name, entries) {
describe(name, function() {
entries.forEach((t, test_n) => {
// eslint-disable-next-line max-len
it(`${name}[${test_n}] can encode ${truncateForDisplay(json(t.json))} to ${truncateForDisplay(t.binary)}`,
() => {
assert.equal(t.binary, encode(t.json));
});
// eslint-disable-next-line max-len
it(`${name}[${test_n}] can decode ${truncateForDisplay(t.binary)} to ${truncateForDisplay(json(t.json))}`,
() => {
const decoded = decode(t.binary);
assert.deepEqual(t.json, decoded);
});
});
});
}
makeSuite('transactions', fixtures.transactions);
makeSuite('accountState', fixtures.accountState);
describe('ledgerData', function() {
fixtures.ledgerData.forEach((t, test_n) => {
it(`ledgerData[${test_n}] can decode ${t.binary} to ${json(t.json)}`,
() => {
const decoded = decodeLedgerData(t.binary);
assert.deepEqual(t.json, decoded);
});
});
})
});

View File

@@ -0,0 +1,44 @@
const fixtures = require('./fixtures/codec-fixtures.json')
const { decode, encode, decodeLedgerData } = require('../dist')
function json (object) {
return JSON.stringify(object)
}
function truncateForDisplay (longStr) {
return longStr.slice(0, 10) + '...' + longStr.slice(-10)
}
describe('ripple-binary-codec', function () {
function makeSuite (name, entries) {
describe(name, function () {
entries.forEach((t, testN) => {
// eslint-disable-next-line max-len
test(`${name}[${testN}] can encode ${truncateForDisplay(json(t.json))} to ${truncateForDisplay(t.binary)}`,
() => {
expect(t.binary).toEqual(encode(t.json))
})
// eslint-disable-next-line max-len
test(`${name}[${testN}] can decode ${truncateForDisplay(t.binary)} to ${truncateForDisplay(json(t.json))}`,
() => {
const decoded = decode(t.binary)
expect(t.json).toEqual(decoded)
})
})
})
}
makeSuite('transactions', fixtures.transactions)
makeSuite('accountState', fixtures.accountState)
describe('ledgerData', function () {
if (fixtures.ledgerData) {
fixtures.ledgerData.forEach((t, testN) => {
test(`ledgerData[${testN}] can decode ${t.binary} to ${json(t.json)}`,
() => {
const decoded = decodeLedgerData(t.binary)
expect(t.json).toEqual(decoded)
})
})
}
})
})

View File

@@ -1,341 +0,0 @@
/* eslint-disable func-style */
const coreTypes = require('../src/coretypes');
const _ = require('lodash');
const assert = require('assert');
const {encodeAccountID} = require('ripple-address-codec');
const {binary: {makeParser, readJSON}, Field, Amount, Hash160} = coreTypes;
const {enums: {TransactionType}} = coreTypes;
const utils = require('./utils');
const {parseHexOnly, assertEqualAmountJSON, hexOnly, loadFixture} = utils;
const {bytesToHex} = require('../src/utils/bytes-utils');
const fixtures = loadFixture('data-driven-tests.json');
const {BytesList} = require('../src/serdes/binary-serializer');
const __ = hexOnly;
function unused() {}
function toJSON(v) {
return v.toJSON ? v.toJSON() : v;
}
function basicApiTests() {
const bytes = parseHexOnly('00,01020304,0506', Uint8Array);
it('can read slices of bytes', () => {
const parser = makeParser(bytes);
assert.deepEqual(parser.pos(), 0);
assert(parser._buf instanceof Uint8Array);
const read1 = parser.read(1);
assert(read1 instanceof Uint8Array);
assert.deepEqual(read1, Uint8Array.from([0]));
assert.deepEqual(parser.read(4), Uint8Array.from([1, 2, 3, 4]));
assert.deepEqual(parser.read(2), Uint8Array.from([5, 6]));
assert.throws(() => parser.read(1));
});
it('can read a Uint32 at full', () => {
const parser = makeParser('FFFFFFFF');
assert.equal(parser.readUInt32(), 0xFFFFFFFF);
});
}
function transactionParsingTests() {
const transaction = {
json: {
'Account': 'raD5qJMAShLeHZXf9wjUmo6vRK4arj9cF3',
'Fee': '10',
'Flags': 0,
'Sequence': 103929,
'SigningPubKey':
'028472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F418D6A7166',
'TakerGets': {'currency': 'ILS',
'issuer': 'rNPRNzBB92BVpAhhZr4iXDTveCgV5Pofm9',
'value': '1694.768'},
'TakerPays': '98957503520',
'TransactionType': 'OfferCreate',
'TxnSignature': __(`
304502202ABE08D5E78D1E74A4C18F2714F64E87B8BD57444AF
A5733109EB3C077077520022100DB335EE97386E4C0591CAC02
4D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C`)
},
binary: __(`
120007220000000024000195F964400000170A53AC2065D5460561E
C9DE000000000000000000000000000494C53000000000092D70596
8936C419CE614BF264B5EEB1CEA47FF468400000000000000A73210
28472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F
418D6A71667447304502202ABE08D5E78D1E74A4C18F2714F64E87B
8BD57444AFA5733109EB3C077077520022100DB335EE97386E4C059
1CAC024D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C811439408
A69F0895E62149CFCC006FB89FA7D1E6E5D`)
};
const tx_json = transaction.json;
// These tests are basically development logs
it('can be done with low level apis', () => {
const parser = makeParser(transaction.binary);
assert.equal(parser.readField(), Field.TransactionType);
assert.equal(parser.readUInt16(), 7);
assert.equal(parser.readField(), Field.Flags);
assert.equal(parser.readUInt32(), 0);
assert.equal(parser.readField(), Field.Sequence);
assert.equal(parser.readUInt32(), 103929);
assert.equal(parser.readField(), Field.TakerPays);
parser.read(8);
assert.equal(parser.readField(), Field.TakerGets);
// amount value
assert(parser.read(8));
// amount currency
assert(Hash160.fromParser(parser));
assert.equal(encodeAccountID(parser.read(20)),
tx_json.TakerGets.issuer);
assert.equal(parser.readField(), Field.Fee);
assert(parser.read(8));
assert.equal(parser.readField(), Field.SigningPubKey);
assert.equal(parser.readVLLength(), 33);
assert.equal(bytesToHex(parser.read(33)), tx_json.SigningPubKey);
assert.equal(parser.readField(), Field.TxnSignature);
assert.equal(bytesToHex(parser.readVL()), tx_json.TxnSignature);
assert.equal(parser.readField(), Field.Account);
assert.equal(encodeAccountID(parser.readVL()), tx_json.Account);
assert(parser.end());
});
it('can be done with high level apis', () => {
const parser = makeParser(transaction.binary);
function readField() {
return parser.readFieldAndValue();
}
{
const [field, value] = readField();
assert.equal(field, Field.TransactionType);
assert.equal(value, TransactionType.OfferCreate);
}
{
const [field, value] = readField();
assert.equal(field, Field.Flags);
assert.equal(value, 0);
}
{
const [field, value] = readField();
assert.equal(field, Field.Sequence);
assert.equal(value, 103929);
}
{
const [field, value] = readField();
assert.equal(field, Field.TakerPays);
assert.equal(value.currency.isNative(), true);
assert.equal(value.currency.toJSON(), 'XRP');
}
{
const [field, value] = readField();
assert.equal(field, Field.TakerGets);
assert.equal(value.currency.isNative(), false);
assert.equal(value.issuer.toJSON(), tx_json.TakerGets.issuer);
}
{
const [field, value] = readField();
assert.equal(field, Field.Fee);
assert.equal(value.currency.isNative(), true);
}
{
const [field, value] = readField();
assert.equal(field, Field.SigningPubKey);
assert.equal(value.toJSON(), tx_json.SigningPubKey);
}
{
const [field, value] = readField();
assert.equal(field, Field.TxnSignature);
assert.equal(value.toJSON(), tx_json.TxnSignature);
}
{
const [field, value] = readField();
assert.equal(field, Field.Account);
assert.equal(value.toJSON(), tx_json.Account);
}
assert(parser.end());
});
it('can be done with higher level apis', () => {
const parser = makeParser(transaction.binary);
const jsonFromBinary = readJSON(parser);
assert.deepEqual(jsonFromBinary, tx_json);
});
it('readJSON (binary.decode) does not return STObject ', () => {
const parser = makeParser(transaction.binary);
const jsonFromBinary = readJSON(parser);
assert((jsonFromBinary instanceof coreTypes.STObject) === false);
assert(_.isPlainObject(jsonFromBinary));
});
}
function amountParsingTests() {
_.filter(fixtures.values_tests, {type: 'Amount'}).forEach((f, i) => {
if (f.error) {
return;
}
const parser = makeParser(f.expected_hex);
const testName =
`values_tests[${i}] parses ${f.expected_hex.slice(0, 16)}...
as ${JSON.stringify(f.test_json)}`;
it(testName, () => {
const value = parser.readType(Amount);
// May not actually be in canonical form. The fixtures are to be used
// also for json -> binary;
assertEqualAmountJSON(toJSON(value), f.test_json);
if (f.exponent) {
assert.equal(value.exponent(), f.exponent);
}
});
});
}
function fieldParsingTests() {
fixtures.fields_tests.forEach((f, i) => {
const parser = makeParser(f.expected_hex);
it(`fields[${i}]: parses ${f.expected_hex} as ${f.name}`, () => {
const field = parser.readField();
assert.equal(field.name, f.name);
assert.equal(field.type.name, f.type_name);
});
});
}
function assertRecyclable(json, forField) {
const Type = forField.associatedType;
const recycled = Type.from(json).toJSON();
assert.deepEqual(recycled, json);
const sink = new BytesList();
Type.from(recycled).toBytesSink(sink);
const recycledAgain = makeParser(sink.toHex())
.readType(Type)
.toJSON();
assert.deepEqual(recycledAgain, json);
}
function nestedObjectTests() {
function disabled(i) {
unused(i);
return false; // !_.includes([2], i);
}
fixtures.whole_objects.forEach((f, i) => {
if (disabled(i)) {
return;
}
it(`whole_objects[${i}]: can parse blob into
${JSON.stringify(f.tx_json)}`,
/* */ () => {
const parser = makeParser(f.blob_with_no_signing);
let ix = 0;
while (!parser.end()) {
const [field, value] = parser.readFieldAndValue();
const expected = f.fields[ix];
const expectedJSON = expected[1].json;
const expectedField = expected[0];
const actual = toJSON(value);
try {
assert.deepEqual(actual, expectedJSON);
} catch (e) {
throw new Error(`${e} ${field} a: ${actual} e: ${expectedJSON}`);
}
assert.equal(field.name, expectedField);
assertRecyclable(actual, field);
ix++;
}
});
});
}
function pathSetBinaryTests() {
const bytes = __(
`1200002200000000240000002E2E00004BF161D4C71AFD498D00000000000000
0000000000000055534400000000000A20B3C85F482532A9578DBB3950B85CA0
6594D168400000000000000A69D446F8038585E9400000000000000000000000
00425443000000000078CA21A6014541AB7B26C3929B9E0CD8C284D61C732103
A4665B1F0B7AE2BCA12E2DB80A192125BBEA660F80E9CEE137BA444C1B0769EC
7447304502205A964536805E35785C659D1F9670D057749AE39668175D6AA75D
25B218FE682E0221009252C0E5DDD5F2712A48F211669DE17B54113918E0D2C2
66F818095E9339D7D3811478CA21A6014541AB7B26C3929B9E0CD8C284D61C83
140A20B3C85F482532A9578DBB3950B85CA06594D1011231585E1F3BD02A15D6
185F8BB9B57CC60DEDDB37C10000000000000000000000004254430000000000
585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C131E4FE687C90257D3D2D694C
8531CDEECBE84F33670000000000000000000000004254430000000000E4FE68
7C90257D3D2D694C8531CDEECBE84F3367310A20B3C85F482532A9578DBB3950
B85CA06594D100000000000000000000000042544300000000000A20B3C85F48
2532A9578DBB3950B85CA06594D1300000000000000000000000005553440000
0000000A20B3C85F482532A9578DBB3950B85CA06594D1FF31585E1F3BD02A15
D6185F8BB9B57CC60DEDDB37C100000000000000000000000042544300000000
00585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C131E4FE687C90257D3D2D69
4C8531CDEECBE84F33670000000000000000000000004254430000000000E4FE
687C90257D3D2D694C8531CDEECBE84F33673115036E2D3F5437A83E5AC3CAEE
34FF2C21DEB618000000000000000000000000425443000000000015036E2D3F
5437A83E5AC3CAEE34FF2C21DEB6183000000000000000000000000055534400
000000000A20B3C85F482532A9578DBB3950B85CA06594D1FF31585E1F3BD02A
15D6185F8BB9B57CC60DEDDB37C1000000000000000000000000425443000000
0000585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C13157180C769B66D942EE
69E6DCC940CA48D82337AD000000000000000000000000425443000000000057
180C769B66D942EE69E6DCC940CA48D82337AD10000000000000000000000000
58525000000000003000000000000000000000000055534400000000000A20B3
C85F482532A9578DBB3950B85CA06594D100`);
const expectedJSON =
[[{account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'},
{account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo'},
{account: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
currency: 'BTC',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'},
{currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'}],
[{account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'},
{account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo'},
{account: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi',
currency: 'BTC',
issuer: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi'},
{currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'}],
[{account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'},
{account: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn',
currency: 'BTC',
issuer: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn'},
{currency: '0000000000000000000000005852500000000000'},
{currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'}]];
it('works with long paths', () => {
const parser = makeParser(bytes);
const txn = readJSON(parser);
assert.deepEqual(txn.Paths, expectedJSON);
// TODO: this should go elsewhere
assert.deepEqual(
coreTypes.PathSet.from(txn.Paths).toJSON(),
expectedJSON
);
});
}
describe('BinaryParser', function() {
function dataDrivenTests() {
describe('Amount parsing tests', amountParsingTests);
describe('Field Tests', fieldParsingTests);
describe('Parsing nested objects', nestedObjectTests);
}
describe('pathSetBinaryTests', pathSetBinaryTests);
describe('Basic API', basicApiTests);
describe('Parsing a transaction', transactionParsingTests);
describe('Data Driven Tests', dataDrivenTests);
});

View File

@@ -0,0 +1,377 @@
/* eslint-disable func-style */
const { coreTypes } = require('../dist/types')
const Decimal = require('decimal.js')
const _ = require('lodash')
const { encodeAccountID } = require('ripple-address-codec')
const { binary } = require('../dist/coretypes')
const { Amount, Hash160 } = coreTypes
const { makeParser, readJSON } = binary
const { Enums } = require('./../dist/enums')
const Field = Enums.Field
const { TransactionType } = Enums
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
const { bytesToHex } = require('../dist/utils/bytes-utils')
const fixtures = loadFixture('data-driven-tests.json')
const { BytesList } = require('../dist/serdes/binary-serializer')
const __ = hexOnly
function unused () {}
function toJSON (v) {
return v.toJSON ? v.toJSON() : v
}
function assertEqualAmountJSON (actual, expected) {
expect((typeof actual) === (typeof expected)).toBe(true)
if ((typeof actual) === 'string') {
expect(actual).toEqual(expected)
return
}
expect(actual.currency).toEqual(expected.currency)
expect(actual.issuer).toEqual(expected.issuer)
expect(actual.value === expected.value ||
new Decimal(actual.value).equals(
new Decimal(expected.value))).toBe(true)
}
function basicApiTests () {
const bytes = parseHexOnly('00,01020304,0506', Uint8Array)
test('can read slices of bytes', () => {
const parser = makeParser(bytes)
expect(parser.pos()).toBe(0)
expect(parser._buf instanceof Uint8Array).toBe(true)
const read1 = parser.read(1)
expect(read1 instanceof Uint8Array).toBe(true)
expect(read1).toEqual(Uint8Array.from([0]))
expect(parser.read(4)).toEqual(Uint8Array.from([1, 2, 3, 4]))
expect(parser.read(2)).toEqual(Uint8Array.from([5, 6]))
expect(() => parser.read(1)).toThrow()
})
test('can read a Uint32 at full', () => {
const parser = makeParser('FFFFFFFF')
expect(parser.readUInt32()).toEqual(0xFFFFFFFF)
})
}
basicApiTests()
function transactionParsingTests () {
const transaction = {
json: {
Account: 'raD5qJMAShLeHZXf9wjUmo6vRK4arj9cF3',
Fee: '10',
Flags: 0,
Sequence: 103929,
SigningPubKey:
'028472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F418D6A7166',
TakerGets: {
currency: 'ILS',
issuer: 'rNPRNzBB92BVpAhhZr4iXDTveCgV5Pofm9',
value: '1694.768'
},
TakerPays: '98957503520',
TransactionType: 'OfferCreate',
TxnSignature: __(`
304502202ABE08D5E78D1E74A4C18F2714F64E87B8BD57444AF
A5733109EB3C077077520022100DB335EE97386E4C0591CAC02
4D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C`)
},
binary: __(`
120007220000000024000195F964400000170A53AC2065D5460561E
C9DE000000000000000000000000000494C53000000000092D70596
8936C419CE614BF264B5EEB1CEA47FF468400000000000000A73210
28472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F
418D6A71667447304502202ABE08D5E78D1E74A4C18F2714F64E87B
8BD57444AFA5733109EB3C077077520022100DB335EE97386E4C059
1CAC024D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C811439408
A69F0895E62149CFCC006FB89FA7D1E6E5D`)
}
const tx_json = transaction.json
// These tests are basically development logs
test('can be done with low level apis', () => {
const parser = makeParser(transaction.binary)
expect(parser.readField()).toEqual(Field.TransactionType)
expect(parser.readUInt16()).toEqual(7)
expect(parser.readField()).toEqual(Field.Flags)
expect(parser.readUInt32()).toEqual(0)
expect(parser.readField()).toEqual(Field.Sequence)
expect(parser.readUInt32()).toEqual(103929)
expect(parser.readField()).toEqual(Field.TakerPays)
parser.read(8)
expect(parser.readField()).toEqual(Field.TakerGets)
// amount value
expect(parser.read(8)).not.toBe([])
// amount currency
expect(Hash160.fromParser(parser)).not.toBe([])
expect(encodeAccountID(parser.read(20))).toEqual(tx_json.TakerGets.issuer)
expect(parser.readField()).toEqual(Field.Fee)
expect(parser.read(8)).not.toEqual([])
expect(parser.readField()).toEqual(Field.SigningPubKey)
expect(parser.readVLLength()).toBe(33)
expect(bytesToHex(parser.read(33))).toEqual(tx_json.SigningPubKey)
expect(parser.readField()).toEqual(Field.TxnSignature)
expect(bytesToHex(parser.readVL())).toEqual(tx_json.TxnSignature)
expect(parser.readField()).toEqual(Field.Account)
expect(encodeAccountID(parser.readVL())).toEqual(tx_json.Account)
expect(parser.end()).toBe(true)
})
test('can be done with high level apis', () => {
const parser = makeParser(transaction.binary)
function readField () {
return parser.readFieldAndValue()
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TransactionType)
expect(value).toEqual(TransactionType.OfferCreate)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Flags)
expect(value.val).toEqual(0)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Sequence)
expect(value.val).toEqual(103929)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TakerPays)
expect(value.currency.isNative()).toEqual(true)
expect(value.currency.toJSON()).toEqual('XRP')
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TakerGets)
expect(value.currency.isNative()).toEqual(false)
expect(value.issuer.toJSON()).toEqual(tx_json.TakerGets.issuer)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Fee)
expect(value.currency.isNative()).toEqual(true)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.SigningPubKey)
expect(value.toJSON()).toEqual(tx_json.SigningPubKey)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.TxnSignature)
expect(value.toJSON()).toEqual(tx_json.TxnSignature)
}
{
const [field, value] = readField()
expect(field).toEqual(Field.Account)
expect(value.toJSON()).toEqual(tx_json.Account)
}
expect(parser.end()).toBe(true)
})
test('can be done with higher level apis', () => {
const parser = makeParser(transaction.binary)
const jsonFromBinary = readJSON(parser)
expect(jsonFromBinary).toEqual(tx_json)
})
test('readJSON (binary.decode) does not return STObject ', () => {
const parser = makeParser(transaction.binary)
const jsonFromBinary = readJSON(parser)
expect(jsonFromBinary instanceof coreTypes.STObject).toBe(false)
expect(_.isPlainObject(jsonFromBinary)).toBe(true)
})
}
transactionParsingTests()
function amountParsingTests () {
_.filter(fixtures.values_tests, { type: 'Amount' }).forEach((f, i) => {
if (f.error) {
return
}
const parser = makeParser(f.expected_hex)
const testName =
`values_tests[${i}] parses ${f.expected_hex.slice(0, 16)}...
as ${JSON.stringify(f.test_json)}`
test(testName, () => {
const value = parser.readType(Amount)
// May not actually be in canonical form. The fixtures are to be used
// also for json -> binary;
assertEqualAmountJSON(toJSON(value), (f.test_json))
if (f.exponent) {
expect(value.exponent()).toEqual(f.exponent)
}
})
})
}
amountParsingTests()
function fieldParsingTests () {
fixtures.fields_tests.forEach((f, i) => {
const parser = makeParser(f.expected_hex)
test(`fields[${i}]: parses ${f.expected_hex} as ${f.name}`, () => {
const field = parser.readField()
expect(field.name).toEqual(f.name)
expect(field.type.name).toEqual(f.type_name)
})
})
}
fieldParsingTests()
function assertRecyclable (json, forField) {
const Type = forField.associatedType
const recycled = Type.from(json).toJSON()
expect(recycled).toEqual(json)
const sink = new BytesList()
Type.from(recycled).toBytesSink(sink)
const recycledAgain = makeParser(sink.toHex())
.readType(Type)
.toJSON()
expect(recycledAgain).toEqual(json)
}
function nestedObjectTests () {
function disabled (i) {
unused(i)
return false // !_.includes([2], i);
}
fixtures.whole_objects.forEach((f, i) => {
if (disabled(i)) {
return
}
test(`whole_objects[${i}]: can parse blob into
${JSON.stringify(f.tx_json)}`,
/* */ () => {
const parser = makeParser(f.blob_with_no_signing)
let ix = 0
while (!parser.end()) {
const [field, value] = parser.readFieldAndValue()
const expected = f.fields[ix]
const expectedJSON = expected[1].json
const expectedField = expected[0]
const actual = toJSON(value)
try {
expect(actual).toEqual(expectedJSON)
} catch (e) {
throw new Error(`${e} ${field} a: ${actual} e: ${expectedJSON}`)
}
expect(field.name).toEqual(expectedField)
assertRecyclable(actual, field)
ix++
}
})
})
}
function pathSetBinaryTests () {
const bytes = __(
`1200002200000000240000002E2E00004BF161D4C71AFD498D00000000000000
0000000000000055534400000000000A20B3C85F482532A9578DBB3950B85CA0
6594D168400000000000000A69D446F8038585E9400000000000000000000000
00425443000000000078CA21A6014541AB7B26C3929B9E0CD8C284D61C732103
A4665B1F0B7AE2BCA12E2DB80A192125BBEA660F80E9CEE137BA444C1B0769EC
7447304502205A964536805E35785C659D1F9670D057749AE39668175D6AA75D
25B218FE682E0221009252C0E5DDD5F2712A48F211669DE17B54113918E0D2C2
66F818095E9339D7D3811478CA21A6014541AB7B26C3929B9E0CD8C284D61C83
140A20B3C85F482532A9578DBB3950B85CA06594D1011231585E1F3BD02A15D6
185F8BB9B57CC60DEDDB37C10000000000000000000000004254430000000000
585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C131E4FE687C90257D3D2D694C
8531CDEECBE84F33670000000000000000000000004254430000000000E4FE68
7C90257D3D2D694C8531CDEECBE84F3367310A20B3C85F482532A9578DBB3950
B85CA06594D100000000000000000000000042544300000000000A20B3C85F48
2532A9578DBB3950B85CA06594D1300000000000000000000000005553440000
0000000A20B3C85F482532A9578DBB3950B85CA06594D1FF31585E1F3BD02A15
D6185F8BB9B57CC60DEDDB37C100000000000000000000000042544300000000
00585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C131E4FE687C90257D3D2D69
4C8531CDEECBE84F33670000000000000000000000004254430000000000E4FE
687C90257D3D2D694C8531CDEECBE84F33673115036E2D3F5437A83E5AC3CAEE
34FF2C21DEB618000000000000000000000000425443000000000015036E2D3F
5437A83E5AC3CAEE34FF2C21DEB6183000000000000000000000000055534400
000000000A20B3C85F482532A9578DBB3950B85CA06594D1FF31585E1F3BD02A
15D6185F8BB9B57CC60DEDDB37C1000000000000000000000000425443000000
0000585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C13157180C769B66D942EE
69E6DCC940CA48D82337AD000000000000000000000000425443000000000057
180C769B66D942EE69E6DCC940CA48D82337AD10000000000000000000000000
58525000000000003000000000000000000000000055534400000000000A20B3
C85F482532A9578DBB3950B85CA06594D100`)
const expectedJSON =
[[{
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'
},
{
account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo'
},
{
account: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
currency: 'BTC',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
},
{
currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
}],
[{
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'
},
{
account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo'
},
{
account: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi',
currency: 'BTC',
issuer: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi'
},
{
currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
}],
[{
account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K'
},
{
account: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn',
currency: 'BTC',
issuer: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn'
},
{ currency: '0000000000000000000000005852500000000000' },
{
currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B'
}]]
test('works with long paths', () => {
const parser = makeParser(bytes)
const txn = readJSON(parser)
expect(txn.Paths).toEqual(expectedJSON)
// TODO: this should go elsewhere
expect(
coreTypes.PathSet.from(txn.Paths).toJSON()).toEqual(
expectedJSON
)
})
}
pathSetBinaryTests()
describe('Binary Parser', function() {
describe('pathSetBinaryTests', pathSetBinaryTests);
describe('nestedObjectTests', nestedObjectTests);
});

View File

@@ -1,194 +0,0 @@
/* eslint-disable func-style */
const BN = require('bn.js');
const assert = require('assert');
const lib = require('../src/coretypes');
const encode = require('../src').encode;
const {binary: {makeParser, BytesList, BinarySerializer}} = lib;
const {UInt8, UInt16, UInt32, UInt64, STObject} = lib;
const {loadFixture} = require('./utils');
const fixtures = loadFixture('data-driven-tests.json');
const deliverMinTx = require('./fixtures/delivermin-tx.json');
const deliverMinTxBinary = require('./fixtures/delivermin-tx-binary.json');
const SignerListSet = {
tx: require('./fixtures/signerlistset-tx.json'),
binary: require('./fixtures/signerlistset-tx-binary.json'),
meta: require('./fixtures/signerlistset-tx-meta-binary.json')
};
const DepositPreauth = {
tx: require('./fixtures/deposit-preauth-tx.json'),
binary: require('./fixtures/deposit-preauth-tx-binary.json'),
meta: require('./fixtures/deposit-preauth-tx-meta-binary.json')
};
const Escrow = {
create: {
tx: require('./fixtures/escrow-create-tx.json'),
binary: require('./fixtures/escrow-create-binary.json')
},
finish: {
tx: require('./fixtures/escrow-finish-tx.json'),
binary: require('./fixtures/escrow-finish-binary.json'),
meta: require('./fixtures/escrow-finish-meta-binary.json')
},
cancel: {
tx: require('./fixtures/escrow-cancel-tx.json'),
binary: require('./fixtures/escrow-cancel-binary.json')
}
}
const PaymentChannel = {
create: {
tx: require('./fixtures/payment-channel-create-tx.json'),
binary: require('./fixtures/payment-channel-create-binary.json')
},
fund: {
tx: require('./fixtures/payment-channel-fund-tx.json'),
binary: require('./fixtures/payment-channel-fund-binary.json')
},
claim: {
tx: require('./fixtures/payment-channel-claim-tx.json'),
binary: require('./fixtures/payment-channel-claim-binary.json')
}
}
function bytesListTest() {
const list = new BytesList().put([0]).put([2, 3]).put([4, 5]);
it('is an Array<Uint8Array>', function() {
assert(Array.isArray(list.arrays));
assert(list.arrays[0] instanceof Uint8Array);
});
it('keeps track of the length itself', function() {
assert.equal(list.length, 5);
});
it('can join all arrays into one via toBytes', function() {
const joined = list.toBytes();
assert(joined.length, 5);
assert.deepEqual(joined, Uint8Array.from([0, 2, 3, 4, 5]));
});
}
function assertRecycles(blob) {
const parser = makeParser(blob);
const so = parser.readType(STObject);
const out = new BytesList();
so.toBytesSink(out);
const hex = out.toHex();
assert.equal(hex, blob);
assert.notEqual(hex + ':', blob);
}
function nestedObjectTests() {
fixtures.whole_objects.forEach((f, i) => {
it(`whole_objects[${i}]: can parse blob and dump out same blob`,
/* */ () => {
assertRecycles(f.blob_with_no_signing);
});
});
}
function UIntTest() {
function check(type, n, expected) {
it(`Uint${type.width * 8} serializes ${n} as ${expected}`, function() {
const bl = new BytesList();
const serializer = new BinarySerializer(bl);
if (expected === 'throws') {
assert.throws(() => serializer.writeType(type, n));
return;
}
serializer.writeType(type, n);
assert.deepEqual(bl.toBytes(), Uint8Array.from(expected));
});
}
check(UInt8, 5, [5]);
check(UInt16, 5, [0, 5]);
check(UInt32, 5, [0, 0, 0, 5]);
check(UInt32, 0xFFFFFFFF, [255, 255, 255, 255]);
check(UInt8, 0xFEFFFFFF, 'throws');
check(UInt16, 0xFEFFFFFF, 'throws');
check(UInt16, 0xFEFFFFFF, 'throws');
check(UInt64, 0xFEFFFFFF, [0, 0, 0, 0, 254, 255, 255, 255]);
check(UInt64, -1, 'throws');
check(UInt64, 0, [0, 0, 0, 0, 0, 0, 0, 0]);
check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1]);
check(UInt64, new BN(1), [0, 0, 0, 0, 0, 0, 0, 1]);
}
function parseLedger4320278() {
it('can parse object', done => {
this.timeout(30e3);
const json = loadFixture('as-ledger-4320278.json');
json.forEach(e => {
assertRecycles(e.binary);
});
done();
});
}
function deliverMinTest() {
it('can serialize DeliverMin', () => {
assert.strictEqual(encode(deliverMinTx), deliverMinTxBinary);
});
}
function SignerListSetTest() {
it('can serialize SignerListSet', () => {
assert.strictEqual(encode(SignerListSet.tx), SignerListSet.binary);
});
it('can serialize SignerListSet metadata', () => {
assert.strictEqual(encode(SignerListSet.tx.meta), SignerListSet.meta);
});
}
function DepositPreauthTest() {
it('can serialize DepositPreauth', () => {
assert.strictEqual(encode(DepositPreauth.tx), DepositPreauth.binary);
});
it('can serialize DepositPreauth metadata', () => {
assert.strictEqual(encode(DepositPreauth.tx.meta), DepositPreauth.meta);
});
}
function EscrowTest() {
it('can serialize EscrowCreate', () => {
assert.strictEqual(encode(Escrow.create.tx),
Escrow.create.binary);
});
it('can serialize EscrowFinish', () => {
assert.strictEqual(encode(Escrow.finish.tx),
Escrow.finish.binary);
assert.strictEqual(encode(Escrow.finish.tx.meta),
Escrow.finish.meta);
});
it('can serialize EscrowCancel', () => {
assert.strictEqual(encode(Escrow.cancel.tx),
Escrow.cancel.binary);
});
}
function PaymentChannelTest() {
it('can serialize PaymentChannelCreate', () => {
assert.strictEqual(encode(PaymentChannel.create.tx),
PaymentChannel.create.binary);
});
it('can serialize PaymentChannelFund', () => {
assert.strictEqual(encode(PaymentChannel.fund.tx),
PaymentChannel.fund.binary);
});
it('can serialize PaymentChannelClaim', () => {
assert.strictEqual(encode(PaymentChannel.claim.tx),
PaymentChannel.claim.binary);
});
}
describe('Binary Serialization', function() {
describe.skip('parseLedger4320278', parseLedger4320278);
describe('nestedObjectTests', nestedObjectTests);
describe('UIntTest', UIntTest);
describe('BytesList', bytesListTest);
describe('DeliverMin', deliverMinTest);
describe('DepositPreauth', DepositPreauthTest);
describe('SignerListSet', SignerListSetTest);
describe('Escrow', EscrowTest);
describe('PaymentChannel', PaymentChannelTest);
});

View File

@@ -0,0 +1,183 @@
/* eslint-disable func-style */
const { BN } = require('bn.js')
const { binary } = require('../dist/coretypes')
const { encode } = require('../dist')
const { makeParser, BytesList, BinarySerializer } = binary
const { coreTypes } = require('../dist/types')
const { UInt8, UInt16, UInt32, UInt64, STObject } = coreTypes
const { loadFixture } = require('./utils')
const fixtures = loadFixture('data-driven-tests.json')
const deliverMinTx = require('./fixtures/delivermin-tx.json')
const deliverMinTxBinary = require('./fixtures/delivermin-tx-binary.json')
const SignerListSet = {
tx: require('./fixtures/signerlistset-tx.json'),
binary: require('./fixtures/signerlistset-tx-binary.json'),
meta: require('./fixtures/signerlistset-tx-meta-binary.json')
}
const DepositPreauth = {
tx: require('./fixtures/deposit-preauth-tx.json'),
binary: require('./fixtures/deposit-preauth-tx-binary.json'),
meta: require('./fixtures/deposit-preauth-tx-meta-binary.json')
}
const Escrow = {
create: {
tx: require('./fixtures/escrow-create-tx.json'),
binary: require('./fixtures/escrow-create-binary.json')
},
finish: {
tx: require('./fixtures/escrow-finish-tx.json'),
binary: require('./fixtures/escrow-finish-binary.json'),
meta: require('./fixtures/escrow-finish-meta-binary.json')
},
cancel: {
tx: require('./fixtures/escrow-cancel-tx.json'),
binary: require('./fixtures/escrow-cancel-binary.json')
}
}
const PaymentChannel = {
create: {
tx: require('./fixtures/payment-channel-create-tx.json'),
binary: require('./fixtures/payment-channel-create-binary.json')
},
fund: {
tx: require('./fixtures/payment-channel-fund-tx.json'),
binary: require('./fixtures/payment-channel-fund-binary.json')
},
claim: {
tx: require('./fixtures/payment-channel-claim-tx.json'),
binary: require('./fixtures/payment-channel-claim-binary.json')
}
}
function bytesListTest () {
const list = new BytesList().put([0]).put([2, 3]).put([4, 5])
test('is an Array<Uint8Array>', function () {
expect(Array.isArray(list.arrays)).toBe(true)
expect(list.arrays[0] instanceof Uint8Array).toBe(true)
})
test('keeps track of the length itself', function () {
expect(list).toHaveLength(5)
})
test('can join all arrays into one via toBytes', function () {
const joined = list.toBytes()
expect(joined).toHaveLength(5)
expect(joined).toEqual(Uint8Array.from([0, 2, 3, 4, 5]))
})
}
function assertRecycles (blob) {
const parser = makeParser(blob)
const so = parser.readType(STObject)
const out = new BytesList()
so.toBytesSink(out)
const hex = out.toHex()
expect(hex).toEqual(blob)
expect(hex + ':').not.toEqual(blob)
}
function nestedObjectTests () {
fixtures.whole_objects.forEach((f, i) => {
test(`whole_objects[${i}]: can parse blob and dump out same blob`,
/* */ () => {
assertRecycles(f.blob_with_no_signing)
})
})
}
function check (type, n, expected) {
test(`Uint${type.width * 8} serializes ${n} as ${expected}`, function () {
const bl = new BytesList()
const serializer = new BinarySerializer(bl)
if (expected === 'throws') {
expect(() => serializer.writeType(type, n)).toThrow()
return
}
serializer.writeType(type, n)
expect(bl.toBytes()).toEqual(Uint8Array.from(expected))
})
}
check(UInt8, 5, [5])
check(UInt16, 5, [0, 5])
check(UInt32, 5, [0, 0, 0, 5])
check(UInt32, 0xFFFFFFFF, [255, 255, 255, 255])
check(UInt8, 0xFEFFFFFF, 'throws')
check(UInt16, 0xFEFFFFFF, 'throws')
check(UInt16, 0xFEFFFFFF, 'throws')
check(UInt64, 0xFEFFFFFF, [0, 0, 0, 0, 254, 255, 255, 255])
check(UInt64, -1, 'throws')
check(UInt64, 0, [0, 0, 0, 0, 0, 0, 0, 0])
check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1])
check(UInt64, new BN(1), [0, 0, 0, 0, 0, 0, 0, 1])
// function parseLedger4320278() {
// test('can parse object', done => {
// const json = loadFixture('as-ledger-4320278.json');
// json.forEach(e => {
// assertRecycles(e.binary);
// });
// done();
// });
// }
// parseLedger4320278();
function deliverMinTest () {
test('can serialize DeliverMin', () => {
expect(encode(deliverMinTx)).toEqual(deliverMinTxBinary)
})
}
function SignerListSetTest () {
test('can serialize SignerListSet', () => {
expect(encode(SignerListSet.tx)).toEqual(SignerListSet.binary)
})
test('can serialize SignerListSet metadata', () => {
expect(encode(SignerListSet.tx.meta)).toEqual(SignerListSet.meta)
})
}
function DepositPreauthTest () {
test('can serialize DepositPreauth', () => {
expect(encode(DepositPreauth.tx)).toEqual(DepositPreauth.binary)
})
test('can serialize DepositPreauth metadata', () => {
expect(encode(DepositPreauth.tx.meta)).toEqual(DepositPreauth.meta)
})
}
function EscrowTest () {
test('can serialize EscrowCreate', () => {
expect(encode(Escrow.create.tx)).toEqual(Escrow.create.binary)
})
test('can serialize EscrowFinish', () => {
expect(encode(Escrow.finish.tx)).toEqual(Escrow.finish.binary)
expect(encode(Escrow.finish.tx.meta)).toEqual(Escrow.finish.meta)
})
test('can serialize EscrowCancel', () => {
expect(encode(Escrow.cancel.tx)).toEqual(Escrow.cancel.binary)
})
}
function PaymentChannelTest () {
test('can serialize PaymentChannelCreate', () => {
expect(encode(PaymentChannel.create.tx)).toEqual(PaymentChannel.create.binary)
})
test('can serialize PaymentChannelFund', () => {
expect(encode(PaymentChannel.fund.tx)).toEqual(PaymentChannel.fund.binary)
})
test('can serialize PaymentChannelClaim', () => {
expect(encode(PaymentChannel.claim.tx)).toEqual(PaymentChannel.claim.binary)
})
}
describe('Binary Serialization', function() {
describe('nestedObjectTests', nestedObjectTests);
describe('BytesList', bytesListTest);
describe('DeliverMin', deliverMinTest);
describe('DepositPreauth', DepositPreauthTest);
describe('SignerListSet', SignerListSetTest);
describe('Escrow', EscrowTest);
describe('PaymentChannel', PaymentChannelTest);
})

View File

@@ -1,72 +0,0 @@
const assert = require('assert');
const bytesUtils = require('../src/utils/bytes-utils');
const {slice, compareBytes, parseBytes, bytesToHex} = bytesUtils;
describe('bytes-utils', function() {
describe('parseBytes', function() {
it('can decode hex', function() {
assert.deepEqual(parseBytes('0012'), [0x00, 0x12]);
assert.deepEqual(parseBytes('0012'), [0x00, 0x12]);
assert.deepEqual(parseBytes('00AA'), [0x00, 0xaa]);
});
it('can decode hex to a Uint8Array', function() {
const result = parseBytes('0012', Uint8Array);
assert(result instanceof Uint8Array);
assert.deepEqual(result, Uint8Array.from([0x00, 0x12]));
});
it('can convert a list to a Uint8Array', function() {
const result = parseBytes([0x00, 0x12], Uint8Array);
assert(result instanceof Uint8Array);
assert.deepEqual(result, Uint8Array.from([0x00, 0x12]));
});
it('can decode hex to a Buffer', function() {
const result = parseBytes('0012', Buffer);
assert(result instanceof Buffer);
assert.deepEqual(result.toJSON().data, [0x00, 0x12]);
});
});
describe('bytesToHex', function() {
it('can encode an array as hex', function() {
assert.deepEqual(bytesToHex([0x00, 0xaa]), '00AA');
assert.deepEqual(bytesToHex([0xaa]), 'AA');
});
it('can encode Uint8Array as hex', function() {
assert.deepEqual(bytesToHex(new Uint8Array([0x00, 0xaa])), '00AA');
assert.deepEqual(bytesToHex(new Uint8Array([0xaa])), 'AA');
});
});
describe('compareBytes', function() {
it('compares the bytes sequence as big endian number', function() {
assert.equal(compareBytes([0, 1, 2], [1, 2, 3]), -1);
});
it('throws when the bytes sequences are of unlike length', function() {
assert.throws(() => compareBytes([0, 1], [1]));
});
});
describe('slice', function() {
const val = [1, 2, 3, 4, 5];
it('creates a slice of the same type as first arg', function() {
assert(Array.isArray(slice(val)));
});
it('the 2nd arg is the start position [2:]', function() {
assert.deepEqual(val.slice(2), [3, 4, 5]);
assert.deepEqual(slice(val, 2), [3, 4, 5]);
});
it('the 3rd arg is the end position [2:4]', function() {
assert.deepEqual(slice(val, 2, 4), [3, 4]);
});
it('can slice using negative numbers [-3:]', function() {
assert.deepEqual(slice(val, -3), [3, 4, 5]);
});
it('can slice using negative numbers [-3:-1]', function() {
assert.deepEqual(slice(val, -3, -1), [3, 4]);
});
it('the 4th arg is the output class type', function() {
assert.deepEqual(slice(val, 2, 4, Buffer).toJSON().data, [3, 4]);
assert.deepEqual(slice(val, 2, 4, Uint8Array), Uint8Array.from([3, 4]));
});
});
});

View File

@@ -0,0 +1,70 @@
const { slice, compareBytes, parseBytes, bytesToHex } = require('../dist/utils/bytes-utils')
describe('bytes-utils', function () {
describe('parseBytes', function () {
test('can decode hex', function () {
expect(parseBytes('0012')).toEqual([0x00, 0x12])
expect(parseBytes('0012')).toEqual([0x00, 0x12])
expect(parseBytes('00AA')).toEqual([0x00, 0xaa])
})
test('can decode hex to a Uint8Array', function () {
const result = parseBytes('0012', Uint8Array)
expect(result instanceof Uint8Array).toBe(true)
expect(result).toEqual(Uint8Array.from([0x00, 0x12]))
})
test('can convert a list to a Uint8Array', function () {
const result = parseBytes([0x00, 0x12], Uint8Array)
expect(result instanceof Uint8Array).toBe(true)
expect(result).toEqual(Uint8Array.from([0x00, 0x12]))
})
test('can decode hex to a Buffer', function () {
const result = parseBytes('0012', Buffer)
expect(result instanceof Buffer).toBe(true)
expect(result.toJSON().data).toEqual([0x00, 0x12])
})
})
describe('bytesToHex', function () {
test('can encode an array as hex', function () {
expect(bytesToHex([0x00, 0xaa])).toBe('00AA')
expect(bytesToHex([0xaa])).toBe('AA')
})
test('can encode Uint8Array as hex', function () {
expect(bytesToHex(new Uint8Array([0x00, 0xaa]))).toBe('00AA')
expect(bytesToHex(new Uint8Array([0xaa]))).toBe('AA')
})
})
describe('compareBytes', function () {
test('compares the bytes sequence as big endian number', function () {
expect(compareBytes([0, 1, 2], [1, 2, 3])).toBe(-1)
})
test('throws when the bytes sequences are of unlike length', function () {
expect(() => compareBytes([0, 1], [1])).toThrow()
})
})
describe('slice', function () {
const val = [1, 2, 3, 4, 5]
test('creates a slice of the same type as first arg', function () {
expect(Array.isArray(slice(val))).toBe(true)
})
test('the 2nd arg is the start position [2:]', function () {
expect(val.slice(2)).toEqual([3, 4, 5])
expect(slice(val, 2)).toEqual([3, 4, 5])
})
test('the 3rd arg is the end position [2:4]', function () {
expect(slice(val, 2, 4)).toEqual([3, 4])
})
test('can slice using negative numbers [-3:]', function () {
expect(slice(val, -3)).toEqual([3, 4, 5])
})
test('can slice using negative numbers [-3:-1]', function () {
expect(slice(val, -3, -1)).toEqual([3, 4])
})
test('the 4th arg is the output class type', function () {
expect(slice(val, 2, 4, Buffer).toJSON().data).toEqual([3, 4])
expect(slice(val, 2, 4, Uint8Array)).toEqual(Uint8Array.from([3, 4]))
})
})
})

View File

@@ -1,58 +0,0 @@
const assert = require('assert');
const {Hash160, Hash256, Currency, AccountID} = require('../src/coretypes');
describe('Hash160', function() {
it('has a static width membmer', function() {
assert.equal(Hash160.width, 20);
});
it('inherited by subclasses', function() {
assert.equal(AccountID.width, 20);
assert.equal(Currency.width, 20);
});
it('can be compared against another', function() {
const h1 = Hash160.from('1000000000000000000000000000000000000000');
const h2 = Hash160.from('2000000000000000000000000000000000000000');
const h3 = Hash160.from('0000000000000000000000000000000000000003');
assert(h1.lt(h2));
assert(h3.lt(h2));
});
});
describe('Hash256', function() {
it('has a static width membmer', function() {
assert.equal(Hash256.width, 32);
});
it('has a ZERO_256 member', function() {
assert.equal(
Hash256.ZERO_256.toJSON(),
'0000000000000000000000000000000000000000000000000000000000000000');
});
it('supports getting the nibblet values at given positions', function() {
const h = Hash256.from(
'1359BD0000000000000000000000000000000000000000000000000000000000');
assert.equal(h.nibblet(0), 0x1);
assert.equal(h.nibblet(1), 0x3);
assert.equal(h.nibblet(2), 0x5);
assert.equal(h.nibblet(3), 0x9);
assert.equal(h.nibblet(4), 0x0b);
assert.equal(h.nibblet(5), 0xd);
});
});
describe('Currency', function() {
it('Will have a null iso() for dodgy XRP ', function() {
const bad = Currency.from('0000000000000000000000005852500000000000');
assert.equal(bad.iso(), null);
assert.equal(bad.isNative(), false);
});
it('can be constructed from an Array', function() {
const xrp = Currency.from(new Uint8Array(20));
assert.equal(xrp.iso(), 'XRP');
});
it('throws on invalid reprs', function() {
assert.throws(() => Currency.from(new Uint8Array(19)));
assert.throws(() => Currency.from(1));
assert.throws(() => Currency.from(
'00000000000000000000000000000000000000m'));
});
});

View File

@@ -0,0 +1,56 @@
const { coreTypes } = require('../dist/types')
const { Hash160, Hash256, Currency, AccountID } = coreTypes
describe('Hash160', function () {
test('has a static width membmer', function () {
expect(Hash160.width).toBe(20)
})
test('inherited by subclasses', function () {
expect(AccountID.width).toBe(20)
expect(Currency.width).toBe(20)
})
test('can be compared against another', function () {
const h1 = Hash160.from('1000000000000000000000000000000000000000')
const h2 = Hash160.from('2000000000000000000000000000000000000000')
const h3 = Hash160.from('0000000000000000000000000000000000000003')
expect(h1.lt(h2)).toBe(true)
expect(h3.lt(h2)).toBe(true)
})
})
describe('Hash256', function () {
test('has a static width membmer', function () {
expect(Hash256.width).toBe(32)
})
test('has a ZERO_256 member', function () {
expect(Hash256.ZERO_256.toJSON()).toBe('0000000000000000000000000000000000000000000000000000000000000000')
})
test('supports getting the nibblet values at given positions', function () {
const h = Hash256.from(
'1359BD0000000000000000000000000000000000000000000000000000000000')
expect(h.nibblet(0)).toBe(0x1)
expect(h.nibblet(1)).toBe(0x3)
expect(h.nibblet(2)).toBe(0x5)
expect(h.nibblet(3)).toBe(0x9)
expect(h.nibblet(4)).toBe(0x0b)
expect(h.nibblet(5)).toBe(0xd)
})
})
describe('Currency', function () {
test('Will have a null iso() for dodgy XRP ', function () {
const bad = Currency.from('0000000000000000000000005852500000000000')
expect(bad.iso()).toBeNull()
expect(bad.isNative()).toBe(false)
})
test('can be constructed from an Array', function () {
const xrp = Currency.from(new Uint8Array(20))
expect(xrp.iso()).toBe('XRP')
})
test('throws on invalid reprs', function () {
expect(() => Currency.from(new Uint8Array(19))).toThrow()
expect(() => Currency.from(1)).toThrow()
expect(() => Currency.from(
'00000000000000000000000000000000000000m')).toThrow()
})
})

View File

@@ -1,26 +0,0 @@
const assert = require('assert');
const {loadFixture} = require('./utils');
const ledgerHashes = require('../src/ledger-hashes');
const {transactionTreeHash, ledgerHash, accountStateHash} = ledgerHashes;
describe('Ledger Hashes', function() {
function testFactory(ledgerFixture) {
describe(`can calculate hashes for ${ledgerFixture}`, function() {
const ledger = loadFixture(ledgerFixture);
it('computes correct account state hash', function() {
assert.equal(accountStateHash(ledger.accountState).toHex(),
ledger.account_hash);
});
it('computes correct transaction tree hash', function() {
assert.equal(transactionTreeHash(ledger.transactions).toHex(),
ledger.transaction_hash);
});
it('computes correct ledger header hash', function() {
assert.equal(ledgerHash(ledger).toHex(), ledger.hash);
});
});
}
testFactory('ledger-full-40000.json');
testFactory('ledger-full-38129.json');
});

View File

@@ -0,0 +1,21 @@
const { loadFixture } = require('./utils')
const { transactionTreeHash, ledgerHash, accountStateHash } = require('../dist/ledger-hashes')
describe('Ledger Hashes', function () {
function testFactory (ledgerFixture) {
describe(`can calculate hashes for ${ledgerFixture}`, function () {
const ledger = loadFixture(ledgerFixture)
test('computes correct account state hash', function () {
expect(accountStateHash(ledger.accountState).toHex()).toBe(ledger.account_hash)
})
test('computes correct transaction tree hash', function () {
expect(transactionTreeHash(ledger.transactions).toHex()).toBe(ledger.transaction_hash)
})
test('computes correct ledger header hash', function () {
expect(ledgerHash(ledger).toHex()).toBe(ledger.hash)
})
})
}
testFactory('ledger-full-40000.json')
testFactory('ledger-full-38129.json')
})

View File

@@ -1 +0,0 @@
--reporter spec --slow 500 --require babel-core/register

View File

@@ -1,16 +0,0 @@
const assert = require('assert');
const {quality, binary: {bytesToHex}} = require('../src/coretypes');
describe('Quality encode/decode', function() {
const bookDirectory =
'4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0';
const expectedQuality = '195796912.5171664';
it('can decode', function() {
const decimal = quality.decode(bookDirectory);
assert.equal(decimal.toString(), expectedQuality);
});
it('can encode', function() {
const bytes = quality.encode(expectedQuality);
assert.equal(bytesToHex(bytes), bookDirectory.slice(-16));
});
});

View File

@@ -0,0 +1,15 @@
const { quality, binary } = require('../dist/coretypes')
describe('Quality encode/decode', function () {
const bookDirectory =
'4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0'
const expectedQuality = '195796912.5171664'
test('can decode', function () {
const decimal = quality.decode(bookDirectory)
expect(decimal.toString()).toBe(expectedQuality)
})
test('can encode', function () {
const bytes = quality.encode(expectedQuality)
expect(binary.bytesToHex(bytes)).toBe(bookDirectory.slice(-16))
})
})

View File

@@ -1,88 +0,0 @@
const assert = require('assert');
const {ShaMap} = require('../src/shamap.js');
const {binary: {serializeObject}, Hash256, HashPrefix}
= require('../src/coretypes');
const {loadFixture} = require('./utils');
function now() {
return (Number(Date.now())) / 1000;
}
const ZERO =
'0000000000000000000000000000000000000000000000000000000000000000';
function makeItem(indexArg) {
let str = indexArg;
while (str.length < 64) {
str += '0';
}
const index = Hash256.from(str);
const item = {
toBytesSink(sink) {
index.toBytesSink(sink);
},
hashPrefix() {
return [1, 3, 3, 7];
}
};
return [index, item];
}
describe('ShaMap', () => {
now();
it('hashes to zero when empty', () => {
const map = new ShaMap();
assert.equal(map.hash().toHex(), ZERO);
});
it('creates the same hash no matter which order items are added', () => {
let map = new ShaMap();
const items = [
'0',
'1',
'11',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E20000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E21000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E22000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E23000000000000000',
'12',
'122'
];
items.forEach(i => map.addItem(...makeItem(i)));
const h1 = map.hash();
assert(h1.eq(h1));
map = new ShaMap();
items.reverse().forEach(i => map.addItem(...makeItem(i)));
assert(map.hash().eq(h1));
});
function factory(fixture) {
it(`recreate account state hash from ${fixture}`, () => {
const map = new ShaMap();
const ledger = loadFixture(fixture);
// const t = now();
const leafNodePrefix = HashPrefix.accountStateEntry;
ledger.accountState.map((e, i) => {
if (i > 1000 & (i % 1000) === 0) {
console.log(e.index);
console.log(i);
}
const bytes = serializeObject(e);
return {
index: Hash256.from(e.index),
hashPrefix() {
return leafNodePrefix;
},
toBytesSink(sink) {
sink.put(bytes);
}
};
}).forEach(so => map.addItem(so.index, so));
assert.equal(map.hash().toHex(), ledger.account_hash);
// console.log('took seconds: ', (now() - t));
});
}
factory('ledger-full-38129.json');
factory('ledger-full-40000.json');
// factory('ledger-4320277.json');
// factory('14280680.json');
});

View File

@@ -0,0 +1,87 @@
const { ShaMap } = require('../dist/shamap.js')
const { binary, HashPrefix } = require('../dist/coretypes')
const { coreTypes } = require('../dist/types')
const { loadFixture } = require('./utils')
function now () {
return (Number(Date.now())) / 1000
}
const ZERO =
'0000000000000000000000000000000000000000000000000000000000000000'
function makeItem (indexArg) {
let str = indexArg
while (str.length < 64) {
str += '0'
}
const index = coreTypes.Hash256.from(str)
const item = {
toBytesSink (sink) {
index.toBytesSink(sink)
},
hashPrefix () {
return [1, 3, 3, 7]
}
}
return [index, item]
}
describe('ShaMap', () => {
now()
test('hashes to zero when empty', () => {
const map = new ShaMap()
expect(map.hash().toHex()).toBe(ZERO)
})
test('creates the same hash no matter which order items are added', () => {
let map = new ShaMap()
const items = [
'0',
'1',
'11',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E20000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E21000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E22000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E23000000000000000',
'12',
'122'
]
items.forEach(i => map.addItem(...makeItem(i)))
const h1 = map.hash()
expect(h1.eq(h1)).toBe(true)
map = new ShaMap()
items.reverse().forEach(i => map.addItem(...makeItem(i)))
expect(map.hash()).toStrictEqual(h1)
})
function factory (fixture) {
test(`recreate account state hash from ${fixture}`, () => {
const map = new ShaMap()
const ledger = loadFixture(fixture)
// const t = now();
const leafNodePrefix = HashPrefix.accountStateEntry
ledger.accountState.map((e, i) => {
if (i > 1000 & (i % 1000) === 0) {
console.log(e.index)
console.log(i)
}
const bytes = binary.serializeObject(e)
return {
index: coreTypes.Hash256.from(e.index),
hashPrefix () {
return leafNodePrefix
},
toBytesSink (sink) {
sink.put(bytes)
}
}
}).forEach(so => map.addItem(so.index, so))
expect(map.hash().toHex()).toBe(ledger.account_hash)
// console.log('took seconds: ', (now() - t));
})
}
factory('ledger-full-38129.json')
factory('ledger-full-40000.json')
// factory('ledger-4320277.json');
// factory('14280680.json');
})

View File

@@ -1,7 +1,5 @@
const _ = require('lodash');
const assert = require('assert');
const {encodeForSigning, encodeForSigningClaim, encodeForMultisigning} =
require('../src');
const _ = require('lodash')
const { encodeForSigning, encodeForSigningClaim, encodeForMultisigning } = require('../dist')
const tx_json = {
Account: 'r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ',
@@ -19,12 +17,12 @@ const tx_json = {
'3CD7B9B',
SigningPubKey:
'ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A'
};
}
describe('Signing data', function () {
it('can create single signing blobs', function() {
const actual = encodeForSigning(tx_json);
assert.equal(actual,
test('can create single signing blobs', function () {
const actual = encodeForSigning(tx_json)
expect(actual).toBe(
['53545800', // signingPrefix
// TransactionType
'12',
@@ -58,13 +56,13 @@ describe('Signing data', function() {
// VLLength
'14',
'B5F762798A53D543A014CAF8B297CFF8F2F937E8'].join('')
);
});
it('can create multi signing blobs', function() {
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN';
const signingJson = _.assign({}, tx_json, {SigningPubKey: ''});
const actual = encodeForMultisigning(signingJson, signingAccount);
assert.equal(actual,
)
})
test('can create multi signing blobs', function () {
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
const signingJson = _.assign({}, tx_json, { SigningPubKey: '' })
const actual = encodeForMultisigning(signingJson, signingAccount)
expect(actual).toBe(
['534D5400', // signingPrefix
// TransactionType
'12',
@@ -100,15 +98,15 @@ describe('Signing data', function() {
'B5F762798A53D543A014CAF8B297CFF8F2F937E8',
// signingAccount suffix
'C0A5ABEF242802EFED4B041E8F2D4A8CC86AE3D1'].join('')
);
});
it('can create claim blob', function() {
)
})
test('can create claim blob', function () {
const channel =
'43904CBFCDCEC530B4037871F86EE90BF799DF8D2E0EA564BC8A3F332E4F5FB1'
const amount = '1000'
const json = { channel, amount }
const actual = encodeForSigningClaim(json)
assert.equal(actual, [
expect(actual).toBe([
// hash prefix
'434C4D00',
// channel ID
@@ -117,4 +115,4 @@ describe('Signing data', function() {
'00000000000003E8'
].join(''))
})
});
})

View File

@@ -1,8 +1,7 @@
const assert = require('assert');
const {
encode,
decode
} = require('../src')
} = require('../dist')
// Notice: no Amount or Fee
const tx_json = {
@@ -20,7 +19,7 @@ const tx_json = {
// TxnSignature,
// Signature,
// SigningPubKey
};
}
const amount_parameters_message = input => {
// disables the ESLint rule on the whole rest of the file
@@ -41,84 +40,82 @@ Thus the largest serializable IOU value is:
And the smallest:
0.000000000000000000000000000000000000000000000000000000000000000000000000000000001
`;
};
`
}
describe('encoding and decoding tx_json', function () {
it('can encode tx_json without Amount or Fee', function() {
const encoded = encode(tx_json);
const decoded = decode(encoded);
assert.deepStrictEqual(tx_json, decoded);
});
it('can encode tx_json with Amount and Fee', function() {
test('can encode tx_json without Amount or Fee', function () {
const encoded = encode(tx_json)
const decoded = decode(encoded)
expect(tx_json).toEqual(decoded)
})
test('can encode tx_json with Amount and Fee', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000',
Fee: '10'
});
const encoded = encode(my_tx);
const decoded = decode(encoded);
assert.deepStrictEqual(my_tx, decoded);
});
it('throws when Amount is invalid', function() {
})
const encoded = encode(my_tx)
const decoded = decode(encoded)
expect(my_tx).toEqual(decoded)
})
test('throws when Amount is invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000.001',
Fee: '10'
});
assert.throws(() => {
encode(my_tx);
}, {
})
expect(() => {
encode(my_tx)
}).toThrow({
name: 'Error',
message: amount_parameters_message('1000.001')
});
});
it('throws when Fee is invalid', function() {
})
})
test('throws when Fee is invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000',
Fee: '10.123'
});
assert.throws(() => {
encode(my_tx);
}, {
})
expect(() => {
encode(my_tx)
}).toThrow({
name: 'Error',
message: amount_parameters_message('10.123')
});
});
it('throws when Amount and Fee are invalid', function() {
})
})
test('throws when Amount and Fee are invalid', function () {
const my_tx = Object.assign({}, tx_json, {
Amount: '1000.789',
Fee: '10.123'
});
assert.throws(() => {
encode(my_tx);
}, {
})
expect(() => {
encode(my_tx)
}).toThrow({
name: 'Error',
message: amount_parameters_message('1000.789')
});
});
it('throws when Amount is a number instead of a string-encoded integer',
})
})
test('throws when Amount is a number instead of a string-encoded integer',
function () {
const my_tx = Object.assign({}, tx_json, {
Amount: 1000.789
});
assert.throws(() => {
encode(my_tx);
},
{
})
expect(() => {
encode(my_tx)
}).toThrow({
name: 'Error',
message: 'unsupported value: 1000.789'
});
});
it('throws when Fee is a number instead of a string-encoded integer',
})
})
test('throws when Fee is a number instead of a string-encoded integer',
function () {
const my_tx = Object.assign({}, tx_json, {
Amount: 1234.56
});
assert.throws(() => {
encode(my_tx);
},
{
})
expect(() => {
encode(my_tx)
}).toThrow({
name: 'Error',
message: 'unsupported value: 1234.56'
});
});
});
})
})
})

View File

@@ -1,38 +0,0 @@
const _ = require('lodash');
const assert = require('assert');
const coreTypes = require('../src/types');
const {SerializedType} = require('../src/types/serialized-type');
describe('SerializedType interfaces', () => {
_.forOwn(coreTypes, (Value, name) => {
it(`${name} has a \`from\` static constructor`, () => {
assert(Value.from && Value.from !== Array.from);
});
it(`${name} has a default constructor`, () => {
/* eslint-disable no-new*/
new Value();
/* eslint-enable no-new*/
});
it(`${name}.from will return the same object`, () => {
const instance = new Value();
assert(Value.from(instance) === instance);
});
it(`${name} instances have toBytesSink`, () => {
assert(new Value().toBytesSink);
});
it(`${name} instances have toJSON`, () => {
assert(new Value().toJSON);
});
it(`${name}.from(json).toJSON() == json`, () => {
const newJSON = new Value().toJSON();
assert.deepEqual(Value.from(newJSON).toJSON(), newJSON);
});
describe(`${name} supports all methods of the SerializedType mixin`, () => {
_.keys(SerializedType).forEach(k => {
it(`new ${name}.prototype.${k} !== undefined`, () => {
assert.notEqual(Value.prototype[k], undefined);
});
});
});
});
});

View File

@@ -0,0 +1,37 @@
const _ = require('lodash')
const { coreTypes } = require('../dist/types')
const { SerializedType } = require('../dist/types/serialized-type')
describe('SerializedType interfaces', () => {
_.forOwn(coreTypes, (Value, name) => {
test(`${name} has a \`from\` static constructor`, () => {
expect(Value.from && Value.from !== Array.from).toBe(true)
})
test(`${name} has a default constructor`, () => {
/* eslint-disable no-new */
new Value()
/* eslint-enable no-new */
})
test(`${name}.from will return the same object`, () => {
const instance = new Value()
expect(Value.from(instance) === instance).toBe(true)
})
test(`${name} instances have toBytesSink`, () => {
expect(new Value().toBytesSink).not.toBe(undefined)
})
test(`${name} instances have toJSON`, () => {
expect(new Value().toJSON).not.toBe(undefined)
})
test(`${name}.from(json).toJSON() == json`, () => {
const newJSON = new Value().toJSON()
expect(Value.from(newJSON).toJSON()).toEqual(newJSON)
})
describe(`${name} supports all methods of the SerializedType mixin`, () => {
_.keys(SerializedType).forEach(k => {
test(`new ${name}.prototype.${k} !== undefined`, () => {
expect(Value.prototype[k]).not.toBe(undefined)
})
})
})
})
})

View File

@@ -1,38 +0,0 @@
const assert = require('assert');
const coreTypes = require('../src/coretypes');
/* eslint-disable no-unused-vars */
const {UInt8, UInt16, UInt32, UInt64} = coreTypes;
/* eslint-enable no-unused-vars */
function compareToTests() {
function check(expr, is) {
it(expr, function() {
/* eslint-disable no-eval */
assert.equal(eval(expr), is);
/* eslint-enable no-eval */
});
}
check('UInt8.from(124).compareTo(UInt64.from(124))', 0);
check('UInt64.from(124).compareTo(UInt8.from(124))', 0);
check('UInt64.from(124).compareTo(UInt8.from(123))', 1);
check('UInt8.from(124).compareTo(UInt8.from(13))', 1);
check('UInt8.from(124).compareTo(124)', 0);
check('UInt64.from(124).compareTo(124)', 0);
check('UInt64.from(124).compareTo(123)', 1);
check('UInt8.from(124).compareTo(13)', 1);
}
function valueOfTests() {
it('The Uint classes work with |= operator', function() {
let val = UInt8.from(1);
val |= 0x2;
assert.equal(val, 3);
});
}
describe('Uint*', function() {
describe('compareToTests', compareToTests);
describe('valueOfTests', valueOfTests);
});

View File

@@ -0,0 +1,40 @@
const { coreTypes } = require('../dist/types')
const { UInt8, UInt64 } = coreTypes
test('compareToTests', () => {
expect(UInt8.from(124).compareTo(UInt64.from(124))).toBe(-0)
})
test('compareToTest', () => {
expect(UInt64.from(124).compareTo(UInt8.from(124))).toBe(0)
})
test('compareToTest', () => {
expect(UInt64.from(124).compareTo(UInt8.from(123))).toBe(1)
})
test('compareToTest', () => {
expect(UInt8.from(124).compareTo(UInt8.from(13))).toBe(1)
})
test('compareToTest', () => {
expect(UInt8.from(124).compareTo(124)).toBe(0)
})
test('compareToTest', () => {
expect(UInt64.from(124).compareTo(124)).toBe(0)
})
test('compareToTest', () => {
expect(UInt64.from(124).compareTo(123)).toBe(1)
})
test('compareToTest', () => {
expect(UInt8.from(124).compareTo(13)).toBe(1)
})
test('valueOfTests', () => {
let val = UInt8.from(1)
val |= 0x2
expect(val).toBe(3)
})

View File

@@ -1,83 +1,24 @@
const intercept = require('intercept-stdout');
const fs = require('fs');
const fsExtra = require('fs-extra');
const assert = require('assert');
const Decimal = require('decimal.js');
const {parseBytes} = require('../src/utils/bytes-utils');
const fs = require('fs')
const { parseBytes } = require('../dist/utils/bytes-utils')
function hexOnly (hex) {
return hex.replace(/[^a-fA-F0-9]/g, '');
return hex.replace(/[^a-fA-F0-9]/g, '')
}
function unused () {}
function captureLogsAsync() {
let log = '';
const unhook = intercept(txt => {
log += txt;
return '';
});
return function() {
unhook();
return log;
};
}
function captureLogs(func) {
const finished = captureLogsAsync();
try {
func();
} catch (e) {
const log = finished();
console.error(log);
throw e;
}
return finished();
}
function parseHexOnly (hex, to) {
return parseBytes(hexOnly(hex), to);
return parseBytes(hexOnly(hex), to)
}
function loadFixture (relativePath) {
const fn = __dirname + '/fixtures/' + relativePath;
return require(fn);
}
function isBufferOrString(val) {
return Buffer.isBuffer(val) || (typeof val === 'string');
const fn = __dirname + '/fixtures/' + relativePath
return require(fn)
}
function loadFixtureText (relativePath) {
const fn = __dirname + '/fixtures/' + relativePath;
return fs.readFileSync(fn).toString('utf8');
}
function fixturePath(relativePath) {
return __dirname + '/fixtures/' + relativePath;
}
function prettyJSON(val) {
return JSON.stringify(val, null, 2);
}
function writeFixture(relativePath, data) {
const out = isBufferOrString(data) ? data : prettyJSON(data);
return fsExtra.outputFileSync(fixturePath(relativePath), out);
}
function assertEqualAmountJSON(actual, expected) {
const typeA = (typeof actual);
assert(typeA === (typeof expected));
if (typeA === 'string') {
assert.equal(actual, expected);
return;
}
assert.equal(actual.currency, expected.currency);
assert.equal(actual.issuer, expected.issuer);
assert(actual.value === expected.value ||
new Decimal(actual.value).equals(
new Decimal(expected.value)));
const fn = __dirname + '/fixtures/' + relativePath
return fs.readFileSync(fn).toString('utf8')
}
module.exports = {
@@ -85,9 +26,5 @@ module.exports = {
parseHexOnly,
loadFixture,
loadFixtureText,
assertEqualAmountJSON,
writeFixture,
unused,
captureLogs,
captureLogsAsync
};
unused
}

View File

@@ -0,0 +1,26 @@
{
"compilerOptions": {
"target": "es5",
"lib": [
"es2017"
],
"outDir": "dist",
"rootDir": "src",
"module": "commonjs",
"moduleResolution": "node",
"noUnusedLocals": true,
"noUnusedParameters": true,
"noImplicitThis": false,
"noImplicitAny": false,
"removeComments": false,
"preserveConstEnums": false,
"suppressImplicitAnyIndexErrors": true,
"sourceMap": true,
"skipLibCheck": true,
"declaration": true,
"strict": true
},
"include": [
"src/**/*.ts"
]
}

View File

@@ -1,28 +0,0 @@
'use strict';
var babel = require('babel');
module.exports = function(wallaby) {
return {
files: [
'src/**/*.js',
'src/enums/*.json',
'test/utils.js',
'examples/*.js',
'test/fixtures/**/*.*'
],
tests: [
'test/*-test.js',
'!test/examples-test.js'
],
env: {
type: 'node'
},
testFramework: 'mocha@2.1.0',
compilers: {
'**/*.js': wallaby.compilers.babel({
babel: babel
})
},
debug: true
};
};

File diff suppressed because it is too large Load Diff