Merge coretypes repo

This commit is contained in:
Chris Clark
2015-10-07 10:57:21 -07:00
parent ef0d1f5679
commit 2877d1a280
55 changed files with 8142 additions and 25 deletions

View File

@@ -0,0 +1,13 @@
Copyright (c) 2015 Ripple Labs Inc.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -0,0 +1,3 @@
# ripple-binary-codec
encode/decode to/from the ripple binary serialization format

View File

@@ -0,0 +1,3 @@
machine:
node:
version: 0.12.0

View File

@@ -1,20 +1,26 @@
{
"name": "ripple-binary-codec",
"version": "0.0.3",
"version": "0.0.4",
"description": "ripple binary codec",
"files": [
"distrib/npm/*",
"bin/*",
"build/*",
"test/*",
"Gulpfile.js"
"test/*"
],
"main": "distrib/npm/",
"directories": {
"test": "test"
},
"dependencies": {
"@niq/ripple-core": "0.0.6"
"babel-runtime": "^5.8.25",
"bn.js": "^3.2.0",
"codecov.io": "^0.1.6",
"create-hash": "^1.1.2",
"decimal.js": "^4.0.2",
"hash.js": "^1.0.3",
"inherits": "^2.0.1",
"lodash": "^3.10.1",
"ripple-address-codec": "^2.0.1"
},
"devDependencies": {
"assert-diff": "^1.0.1",
@@ -22,26 +28,19 @@
"babel-core": "^5.8.20",
"babel-eslint": "^4.0.5",
"babel-loader": "^5.3.2",
"coveralls": "~2.10.0",
"eslint": "^1.2.1",
"eventemitter2": "^0.4.14",
"fs-extra": "^0.24.0",
"intercept-stdout": "^0.1.2",
"istanbul": "~0.3.5",
"lodash": "^3.10.0",
"map-stream": "~0.1.0",
"mocha": "~2.1.0",
"nock": "^0.34.1",
"ripple-lib": "^0.12.4",
"webpack": "~1.5.3",
"yargs": "~1.3.1"
"ripple-lib": "^0.12.9"
},
"scripts": {
"build": "gulp",
"compile": "babel --optional runtime -d distrib/npm/ src/",
"compile-with-source-maps": "babel --optional runtime -s -t -d distrib/npm/ src/",
"compile": "babel --optional runtime -d distrib/npm/ src/ && cp src/enums/definitions.json distrib/npm/enums",
"prepublish": "npm test && npm run lint && npm run compile",
"test": "istanbul test _mocha",
"coveralls": "cat ./coverage/lcov.info | coveralls",
"lint": "if ! [ -f eslintrc ]; then curl -o eslintrc 'https://raw.githubusercontent.com/ripple/javascript-style-guide/es6/eslintrc'; echo 'parser: babel-eslint' >> eslintrc; fi; eslint -c eslintrc src/*.js test/*.js"
"codecov": "cat ./coverage/coverage.json | ./node_modules/codecov.io/bin/codecov.io.js",
"lint": "if ! [ -f eslintrc ]; then curl -o eslintrc 'https://raw.githubusercontent.com/ripple/javascript-style-guide/es6/eslintrc'; echo 'parser: babel-eslint' >> eslintrc; fi; eslint -c eslintrc src/*.js test/*.js examples/*.js"
},
"repository": {
"type": "git",
@@ -54,6 +53,7 @@
"url": "https://github.com/ripple/ripple-binary-codec/issues"
},
"homepage": "https://github.com/ripple/ripple-binary-codec#readme",
"author": "ndudfield@gmail.com",
"license": "ISC",
"readmeFilename": "README.md"
}

View File

@@ -0,0 +1,56 @@
/* eslint-disable func-style */
'use strict';
const types = require('./types');
const {HashPrefix} = require('./hash-prefixes');
const {BinaryParser} = require('./serdes/binary-parser');
const {BinarySerializer, BytesList} = require('./serdes/binary-serializer');
const {bytesToHex, slice, parseBytes} = require('./utils/bytes-utils');
const {sha512Half, transactionID} = require('./hashes');
const makeParser = bytes => new BinaryParser(bytes);
const readJSON = parser => parser.readType(types.STObject).toJSON();
const binaryToJSON = (bytes) => readJSON(makeParser(bytes));
function serializeObject(object, opts = {}) {
const {prefix, suffix, signingFieldsOnly = false} = opts;
const bytesList = new BytesList();
if (prefix) {
bytesList.put(prefix);
}
const filter = signingFieldsOnly ? f => f.isSigningField : undefined;
types.STObject.from(object).toBytesSink(bytesList, filter);
if (suffix) {
bytesList.put(suffix);
}
return bytesList.toBytes();
}
function signingData(tx, prefix = HashPrefix.transactionSig) {
return serializeObject(tx, {prefix, signingFieldsOnly: true});
}
function multiSigningData(tx, signingAccount) {
const prefix = HashPrefix.transactionMultiSig;
const suffix = types.AccountID.from(signingAccount).toBytes();
return serializeObject(tx, {prefix, suffix, signingFieldsOnly: true});
}
module.exports = {
BinaryParser,
BinarySerializer,
BytesList,
makeParser,
serializeObject,
readJSON,
bytesToHex,
parseBytes,
multiSigningData,
signingData,
binaryToJSON,
sha512Half,
transactionID,
slice
};

View File

@@ -0,0 +1,27 @@
'use strict';
const _ = require('lodash');
const enums = require('./enums');
const {Field} = enums;
const types = require('./types');
const binary = require('./binary');
const {ShaMap} = require('./shamap');
const ledgerHashes = require('./ledger-hashes');
const hashes = require('./hashes');
const quality = require('./quality');
const signing = require('./signing');
const {HashPrefix} = require('./hash-prefixes');
module.exports = _.assign({
hashes: _.assign({}, hashes, ledgerHashes),
binary,
enums,
signing,
quality,
Field,
HashPrefix,
ShaMap
},
types
);

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,127 @@
'use strict';
const assert = require('assert');
const _ = require('lodash');
const {parseBytes, serializeUIntN} = require('./../utils/bytes-utils');
const makeClass = require('./../utils/make-class');
const enums = require('./definitions.json');
function transformWith(func, obj) {
return _.transform(obj, func);
}
function biMap(obj, valueKey) {
return _.transform(obj, (result, value, key) => {
result[key] = value;
result[value[valueKey]] = value;
});
}
const EnumType = makeClass({
EnumType(definition) {
_.assign(this, definition);
// At minimum
assert(this.bytes instanceof Uint8Array);
assert(typeof this.ordinal === 'number');
assert(typeof this.name === 'string');
},
toString() {
return this.name;
},
toJSON() {
return this.name;
},
toBytesSink(sink) {
sink.put(this.bytes);
},
statics: {
ordinalByteWidth: 1,
fromParser(parser) {
return this.from(parser.readUIntN(this.ordinalByteWidth));
},
from(val) {
const ret = val instanceof this ? val : this[val];
if (!ret) {
throw new Error(
`${val} is not a valid name or ordinal for ${this.enumName}`);
}
return ret;
},
valuesByName() {
return _.transform(this.initVals, (result, ordinal, name) => {
const bytes = serializeUIntN(ordinal, this.ordinalByteWidth);
const type = new this({name, ordinal, bytes});
result[name] = type;
});
},
init() {
const mapped = this.valuesByName();
_.assign(this, biMap(mapped, 'ordinal'));
this.values = _.values(mapped);
return this;
}
}
});
function makeEnum(name, definition) {
return makeClass({
inherits: EnumType,
statics: _.assign(definition, {enumName: name})
});
}
function makeEnums(to, definition, name) {
to[name] = makeEnum(name, definition);
}
const Enums = transformWith(makeEnums, {
Type: {
initVals: enums.TYPES
},
LedgerEntryType: {
initVals: enums.LEDGER_ENTRY_TYPES, ordinalByteWidth: 2
},
TransactionType: {
initVals: enums.TRANSACTION_TYPES, ordinalByteWidth: 2
},
TransactionResult: {
initVals: enums.TRANSACTION_RESULTS, ordinalByteWidth: 1
}
});
Enums.Field = makeClass({
inherits: EnumType,
statics: {
enumName: 'Field',
initVals: enums.FIELDS,
valuesByName() {
const fields = _.map(this.initVals, ([name, definition]) => {
const type = Enums.Type[definition.type];
const bytes = this.header(type.ordinal, definition.nth);
const ordinal = type.ordinal << 16 | definition.nth;
const extra = {ordinal, name, type, bytes};
return new this(_.assign(definition, extra));
});
return _.indexBy(fields, 'name');
},
header(type, nth) {
const name = nth;
const header = [];
const push = header.push.bind(header);
if (type < 16) {
if (name < 16) {
push(type << 4 | name);
} else {
push(type << 4, name);
}
} else if (name < 16) {
push(name, type);
} else {
push(0, type, name);
}
return parseBytes(header, Uint8Array);
}
}
});
module.exports = Enums;

View File

@@ -0,0 +1,31 @@
'use strict';
const {serializeUIntN} = require('./utils/bytes-utils');
function bytes(uint32) {
return serializeUIntN(uint32, 4);
}
const HashPrefix = {
transactionID: bytes(0x54584E00),
// transaction plus metadata
transaction: bytes(0x534E4400),
// account state
accountStateEntry: bytes(0x4D4C4E00),
// inner node in tree
innerNode: bytes(0x4D494E00),
// ledger master data for signing
ledgerHeader: bytes(0x4C575200),
// inner transaction to sign
transactionSig: bytes(0x53545800),
// inner transaction to sign
transactionMultiSig: bytes(0x534D5400),
// validation for signing
validation: bytes(0x56414C00),
// proposal for signing
proposal: bytes(0x50525000)
};
module.exports = {
HashPrefix
};

View File

@@ -0,0 +1,45 @@
'use strict';
const makeClass = require('./utils/make-class');
const {HashPrefix} = require('./hash-prefixes');
const {Hash256} = require('./types');
const {parseBytes} = require('./utils/bytes-utils');
const createHash = require('create-hash');
const Sha512Half = makeClass({
Sha512Half() {
this.hash = createHash('sha512');
},
statics: {
put(bytes) {
return new this().put(bytes);
}
},
put(bytes) {
this.hash.update(parseBytes(bytes, Buffer));
return this;
},
finish256() {
const bytes = this.hash.digest();
return bytes.slice(0, 32);
},
finish() {
return new Hash256(this.finish256());
}
});
function sha512Half(...args) {
const hash = new Sha512Half();
args.forEach(a => hash.put(a));
return parseBytes(hash.finish256(), Uint8Array);
}
function transactionID(serialized) {
return new Hash256(sha512Half(HashPrefix.transactionID, serialized));
}
module.exports = {
Sha512Half,
sha512Half,
transactionID
};

View File

@@ -1,7 +1,7 @@
'use strict';
const assert = require('assert');
const coreTypes = require('@niq/ripple-core');
const coreTypes = require('./coretypes');
const {quality,
binary: {bytesToHex,
signingData,

View File

@@ -0,0 +1,74 @@
'use strict';
const _ = require('lodash');
const BN = require('bn.js');
const assert = require('assert');
const types = require('./types');
const {STObject, Hash256} = types;
const {ShaMap} = require('./shamap');
const {HashPrefix} = require('./hash-prefixes');
const {Sha512Half} = require('./hashes');
const {BinarySerializer, serializeObject} = require('./binary');
function computeHash(itemizer, itemsJson) {
const map = new ShaMap();
itemsJson.forEach(item => map.addItem(...itemizer(item)));
return map.hash();
}
function transactionItem(json) {
assert(json.hash);
const index = Hash256.from(json.hash);
const item = {
hashPrefix() {
return HashPrefix.transaction;
},
toBytesSink(sink) {
const serializer = new BinarySerializer(sink);
serializer.writeLengthEncoded(STObject.from(json));
serializer.writeLengthEncoded(STObject.from(json.metaData));
}
};
return [index, item];
}
function entryItem(json) {
const index = Hash256.from(json.index);
const bytes = serializeObject(json);
const item = {
hashPrefix() {
return HashPrefix.accountStateEntry;
},
toBytesSink(sink) {
sink.put(bytes);
}
};
return [index, item];
}
const transactionTreeHash = _.partial(computeHash, transactionItem);
const accountStateHash = _.partial(computeHash, entryItem);
function ledgerHash(header) {
const hash = new Sha512Half();
hash.put(HashPrefix.ledgerHeader);
assert(header.parent_close_time !== undefined);
assert(header.close_flags !== undefined);
types.UInt32.from(header.ledger_index).toBytesSink(hash);
types.UInt64.from(new BN(header.total_coins)).toBytesSink(hash);
types.Hash256.from(header.parent_hash).toBytesSink(hash);
types.Hash256.from(header.transaction_hash).toBytesSink(hash);
types.Hash256.from(header.account_hash).toBytesSink(hash);
types.UInt32.from(header.parent_close_time).toBytesSink(hash);
types.UInt32.from(header.close_time).toBytesSink(hash);
types.UInt8.from(header.close_time_resolution).toBytesSink(hash);
types.UInt8.from(header.close_flags).toBytesSink(hash);
return hash.finish();
}
module.exports = {
accountStateHash,
transactionTreeHash,
ledgerHash
};

View File

@@ -0,0 +1,22 @@
'use strict';
const Decimal = require('decimal.js');
const {bytesToHex, slice, parseBytes} = require('./utils/bytes-utils');
const {UInt64} = require('./types');
const BN = require('bn.js');
module.exports = {
encode(arg) {
const quality = arg instanceof Decimal ? arg : new Decimal(arg);
const exponent = quality.e - 15;
const qualityString = quality.times('1e' + -exponent).abs().toString();
const bytes = new UInt64(new BN(qualityString)).toBytes();
bytes[0] = exponent + 100;
return bytes;
},
decode(arg) {
const bytes = slice(parseBytes(arg), -8);
const exponent = bytes[0] - 100;
const mantissa = new Decimal(bytesToHex(slice(bytes, 1)), 16);
return mantissa.times('1e' + exponent);
}
};

View File

@@ -0,0 +1,101 @@
'use strict';
const assert = require('assert');
const makeClass = require('../utils/make-class');
const {Field} = require('../enums');
const {slice, parseBytes} = require('../utils/bytes-utils');
const BinaryParser = makeClass({
BinaryParser(buf) {
this._buf = parseBytes(buf, Uint8Array);
this._length = this._buf.length;
this._cursor = 0;
},
skip(n) {
this._cursor += n;
},
read(n, to = Uint8Array) {
const start = this._cursor;
const end = this._cursor + n;
assert(end <= this._buf.length);
this._cursor = end;
return slice(this._buf, start, end, to);
},
readUIntN(n) {
return this.read(n, Array).reduce((a, b) => a << 8 | b) >>> 0;
},
readUInt8() {
return this._buf[this._cursor++];
},
readUInt16() {
return this.readUIntN(2);
},
readUInt32() {
return this.readUIntN(4);
},
pos() {
return this._cursor;
},
size() {
return this._buf.length;
},
end(customEnd) {
const cursor = this.pos();
return (cursor >= this._length) || (customEnd !== null &&
cursor >= customEnd);
},
readVL() {
return this.read(this.readVLLength());
},
readVLLength() {
const b1 = this.readUInt8();
if (b1 <= 192) {
return b1;
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error('Invalid varint length indicator');
},
readFieldOrdinal() {
const tagByte = this.readUInt8();
const type = (tagByte & 0xF0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0F || this.readUInt8();
return type << 16 | nth;
},
readField() {
return Field.from(this.readFieldOrdinal());
},
readType(type) {
return type.fromParser(this);
},
typeForField(field) {
return field.associatedType;
},
readFieldValue(field) {
const kls = this.typeForField(field);
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVLEncoded ? this.readVLLength() : null;
const value = kls.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `);
}
return value;
},
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
}
});
module.exports = {
BinaryParser
};

View File

@@ -0,0 +1,109 @@
'use strict';
const assert = require('assert');
const {parseBytes, bytesToHex} = require('../utils/bytes-utils');
const makeClass = require('../utils/make-class');
const {Type, Field} = require('../enums');
const BytesSink = {
put(/* bytesSequence */) {
// any hex string or any object with a `length` and where 0 <= [ix] <= 255
}
};
const BytesList = makeClass({
implementing: BytesSink,
BytesList() {
this.arrays = [];
this.length = 0;
},
put(bytesArg) {
const bytes = parseBytes(bytesArg, Uint8Array);
this.length += bytes.length;
this.arrays.push(bytes);
return this;
},
toBytesSink(sink) {
this.arrays.forEach(arr => {
sink.put(arr);
});
},
toBytes() {
const concatenated = new Uint8Array(this.length);
let pointer = 0;
this.arrays.forEach(arr => {
concatenated.set(arr, pointer);
pointer += arr.length;
});
return concatenated;
},
toHex() {
return bytesToHex(this.toBytes());
}
});
const BinarySerializer = makeClass({
BinarySerializer(sink) {
this.sink = sink;
},
write(value) {
value.toBytesSink(this.sink);
},
put(bytes) {
this.sink.put(bytes);
},
writeType(type, value) {
this.write(type.from(value));
},
writeBytesList(bl) {
bl.toBytesSink(this.sink);
},
encodeVL(len) {
let length = len;
const lenBytes = new Uint8Array(4);
if (length <= 192) {
lenBytes[0] = length;
return lenBytes.subarray(0, 1);
} else if (length <= 12480) {
length -= 193;
lenBytes[0] = 193 + (length >>> 8);
lenBytes[1] = length & 0xff;
return lenBytes.subarray(0, 2);
} else if (length <= 918744) {
length -= 12481;
lenBytes[0] = 241 + (length >>> 16);
lenBytes[1] = (length >> 8) & 0xff;
lenBytes[2] = length & 0xff;
return lenBytes.subarray(0, 3);
}
throw new Error('Overflow error');
},
writeFieldAndValue(field, _value) {
const sink = this.sink;
const value = field.associatedType.from(_value);
assert(value.toBytesSink, field);
sink.put(field.bytes);
if (field.isVLEncoded) {
this.writeLengthEncoded(value);
} else {
value.toBytesSink(sink);
if (field.type === Type.STObject) {
sink.put(Field.ObjectEndMarker.bytes);
} else if (field.type === Type.STArray) {
sink.put(Field.ArrayEndMarker.bytes);
}
}
},
writeLengthEncoded(value) {
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVL(bytes.length));
this.writeBytesList(bytes);
}
});
module.exports = {
BytesList,
BinarySerializer
};

View File

@@ -0,0 +1,110 @@
'use strict';
const assert = require('assert');
const makeClass = require('./utils/make-class');
const {Hash256} = require('./types');
const {HashPrefix} = require('./hash-prefixes');
const {Sha512Half: Hasher} = require('./hashes');
const ShaMapNode = makeClass({
virtuals: {
hashPrefix() {},
isLeaf() {},
isInner() {}
},
cached: {
hash() {
const hasher = Hasher.put(this.hashPrefix());
this.toBytesSink(hasher);
return hasher.finish();
}
}
});
const ShaMapLeaf = makeClass({
inherits: ShaMapNode,
ShaMapLeaf(index, item) {
ShaMapNode.call(this);
this.index = index;
this.item = item;
},
isLeaf() {
return true;
},
isInner() {
return false;
},
hashPrefix() {
return this.item.hashPrefix();
},
toBytesSink(sink) {
this.item.toBytesSink(sink);
this.index.toBytesSink(sink);
}
});
const $uper = ShaMapNode.prototype;
const ShaMapInner = makeClass({
inherits: ShaMapNode,
ShaMapInner(depth = 0) {
ShaMapNode.call(this);
this.depth = depth;
this.slotBits = 0;
this.branches = Array(16);
},
isInner() {
return true;
},
isLeaf() {
return false;
},
hashPrefix() {
return HashPrefix.innerNode;
},
setBranch(slot, branch) {
this.slotBits = this.slotBits | (1 << slot);
this.branches[slot] = branch;
},
empty() {
return this.slotBits === 0;
},
hash() {
if (this.empty()) {
return Hash256.ZERO_256;
}
return $uper.hash.call(this);
},
toBytesSink(sink) {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch ? branch.hash() : Hash256.ZERO_256;
hash.toBytesSink(sink);
}
},
addItem(index, item, leaf) {
assert(index instanceof Hash256);
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
if (!existing) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
} else if (existing.isLeaf()) {
const newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, null, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
} else if (existing.isInner()) {
existing.addItem(index, item, leaf);
} else {
assert(false);
}
}
});
const ShaMap = makeClass({
inherits: ShaMapInner
});
module.exports = {
ShaMap
};

View File

@@ -0,0 +1,67 @@
'use strict';
/* eslint-disable func-style */
const _ = require('lodash');
const {AccountID} = require('./types');
const binary = require('./binary');
const {
serializeObject,
bytesToHex,
multiSigningData,
transactionID,
signingData
} = binary;
const FULL_CANONICAL_SIGNATURE = 0x80000000;
const toHex = v => bytesToHex(v);
const getSigner = (o) => AccountID.from(o.Signer.Account);
const signerComparator = (a, b) => getSigner(a).compareTo(getSigner(b));
function setCanonicalSignatureFlag(tx_json) {
tx_json.Flags |= FULL_CANONICAL_SIGNATURE;
tx_json.Flags >>>= 0;
}
function serializedBundle(tx_json) {
const serialized = serializeObject(tx_json);
const hash = transactionID(serialized).toHex();
const tx_blob = toHex(serialized);
return {tx_json, tx_blob, hash};
}
function signFor(tx_json_, keyPair, signingAccount = null) {
const tx_json = _.clone(tx_json_);
tx_json.SigningPubKey = '';
setCanonicalSignatureFlag(tx_json);
const signerID = signingAccount || keyPair.id();
const signature = keyPair.sign(multiSigningData(tx_json, signerID));
const signer = {
Signer: {
SigningPubKey: toHex(keyPair.publicBytes()),
TxnSignature: toHex(signature),
Account: signerID
}
};
const signers = tx_json.Signers = tx_json.Signers || [];
signers.push(signer);
signers.sort(signerComparator);
return serializedBundle(tx_json);
}
function sign(tx_json_, keyPair) {
const tx_json = _.clone(tx_json_);
setCanonicalSignatureFlag(tx_json);
tx_json.SigningPubKey = toHex(keyPair.publicBytes());
tx_json.TxnSignature = toHex(keyPair.sign(signingData(tx_json)));
return serializedBundle(tx_json);
}
module.exports = {
signFor,
sign
};

View File

@@ -0,0 +1,44 @@
'use strict';
const makeClass = require('../utils/make-class');
const {decodeAccountID, encodeAccountID} = require('ripple-address-codec');
const {Hash160} = require('./hash-160');
const AccountID = makeClass({
AccountID(bytes) {
Hash160.call(this, bytes);
},
inherits: Hash160,
statics: {
from(value) {
return value instanceof this ? value :
/^r/.test(value) ? this.fromBase58(value) :
new this(value);
},
cache: {},
fromCache(base58) {
let cached = this.cache[base58];
if (!cached) {
cached = this.cache[base58] = this.fromBase58(base58);
}
return cached;
},
fromBase58(value) {
const acc = new this(decodeAccountID(value));
acc._toBase58 = value;
return acc;
}
},
toJSON() {
return this.toBase58();
},
cached: {
toBase58() {
return encodeAccountID(this._bytes);
}
}
});
module.exports = {
AccountID
};

View File

@@ -0,0 +1,137 @@
'use strict';
const _ = require('lodash');
const assert = require('assert');
const BN = require('bn.js');
const Decimal = require('decimal.js');
const makeClass = require('../utils/make-class');
const {SerializedType} = require('./serialized-type');
const {bytesToHex} = require('../utils/bytes-utils');
const {Currency} = require('./currency');
const {AccountID} = require('./account-id');
const {UInt64} = require('./uint-64');
Decimal.config({
toExpPos: 32,
toExpNeg: -32
});
function isDefined(val) {
return !_.isUndefined(val);
}
const parsers = {
string(str) {
return [new Decimal(str).dividedBy('1e6'), Currency.XRP];
},
object(object) {
assert(isDefined(object.currency), 'currency must be defined');
assert(isDefined(object.issuer), 'issuer must be defined');
return [new Decimal(object.value),
Currency.from(object.currency),
AccountID.from(object.issuer)];
}
};
const Amount = makeClass({
Amount(value, currency, issuer) {
this.value = value || new Decimal('0');
this.currency = currency || Currency.XRP;
this.issuer = issuer || null;
},
mixins: SerializedType,
statics: {
from(value) {
if (value instanceof this) {
return value;
}
const parser = parsers[typeof value];
if (parser) {
return new this(...parser(value));
}
throw new Error(`unsupported value: ${value}`);
},
fromParser(parser) {
const mantissa = parser.read(8);
const b1 = mantissa[0];
const b2 = mantissa[1];
const isIOU = b1 & 0x80;
const isPositive = b1 & 0x40;
const sign = isPositive ? '+' : '-';
if (isIOU) {
mantissa[0] = 0;
const currency = parser.readType(Currency);
const issuer = parser.readType(AccountID);
const exponent = ((b1 & 0x3F) << 2) + ((b2 & 0xff) >> 6) - 97;
mantissa[1] &= 0x3F;
// decimal.js won't accept e notation with hex
const value = new Decimal(sign + bytesToHex(mantissa), 16)
.times('1e' + exponent);
return new this(value, currency, issuer);
}
mantissa[0] &= 0x3F;
const drops = new Decimal(sign + bytesToHex(mantissa), 16);
const xrpValue = drops.dividedBy('1e6');
return new this(xrpValue, Currency.XRP);
}
},
isNative() {
return this.currency.isNative();
},
mantissa() {
return new UInt64(
new BN(this.value.times('1e' + -this.exponent()).abs().toString()));
},
isZero() {
return this.value.isZero();
},
exponent() {
return this.isNative() ? -6 : this.value.e - 15;
},
valueString() {
return (this.isNative() ? this.value.times('1e6') : this.value)
.toString();
},
toBytesSink(sink) {
const isNative = this.isNative();
const notNegative = !this.value.isNegative();
const mantissa = this.mantissa().toBytes();
if (isNative) {
mantissa[0] |= notNegative ? 0x40 : 0;
sink.put(mantissa);
} else {
mantissa[0] |= 0x80;
if (!this.isZero()) {
if (notNegative) {
mantissa[0] |= 0x40;
}
const exponent = this.value.e - 15;
const exponentByte = 97 + exponent;
mantissa[0] |= (exponentByte >>> 2);
mantissa[1] |= (exponentByte & 0x03) << 6;
}
sink.put(mantissa);
this.currency.toBytesSink(sink);
this.issuer.toBytesSink(sink);
}
},
toJSON() {
const valueString = this.valueString();
if (this.isNative()) {
return valueString;
}
return {
value: valueString,
currency: this.currency.toJSON(),
issuer: this.issuer.toJSON()
};
}
});
module.exports = {
Amount
};

View File

@@ -0,0 +1,31 @@
'use strict';
const makeClass = require('../utils/make-class');
const {parseBytes} = require('../utils/bytes-utils');
const {SerializedType} = require('./serialized-type');
const Blob = makeClass({
mixins: SerializedType,
Blob(bytes) {
if (bytes) {
this._bytes = parseBytes(bytes, Uint8Array);
} else {
this._bytes = new Uint8Array(0);
}
},
statics: {
fromParser(parser, hint) {
return new this(parser.read(hint));
},
from(value) {
if (value instanceof this) {
return value;
}
return new this(value);
}
}
});
module.exports = {
Blob
};

View File

@@ -0,0 +1,94 @@
'use strict';
const _ = require('lodash');
const makeClass = require('../utils/make-class');
const {slice} = require('../utils/bytes-utils');
const {Hash160} = require('./hash-160');
const ISO_REGEX = /^[A-Z0-9]{3}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/;
function isoToBytes(iso) {
const bytes = new Uint8Array(20);
if (iso !== 'XRP') {
const isoBytes = iso.split('').map(c => c.charCodeAt(0));
bytes.set(isoBytes, 12);
}
return bytes;
}
function isISOCode(val) {
return val.length === 3; // ISO_REGEX.test(val);
}
function isHex(val) {
return HEX_REGEX.test(val);
}
function isStringRepr(val) {
return _.isString(val) && (isISOCode(val) || isHex(val));
}
function isBytesArray(val) {
return val.length === 20;
}
function isValidRepr(val) {
return isStringRepr(val) || isBytesArray(val);
}
function bytesFromRepr(val) {
if (isValidRepr(val)) {
// We assume at this point that we have an object with a length, either 3,
// 20 or 40.
return val.length === 3 ? isoToBytes(val) : val;
}
throw new Error(`Unsupported Currency repr: ${val}`);
}
const $uper = Hash160.prototype;
const Currency = makeClass({
inherits: Hash160,
getters: ['isNative', 'iso'],
statics: {
init() {
this.XRP = new this(new Uint8Array(20));
},
from(val) {
return val instanceof this ? val : new this(bytesFromRepr(val));
}
},
Currency(bytes) {
Hash160.call(this, bytes);
this.classify();
},
classify() {
// We only have a non null iso() property available if the currency can be
// losslessly represented by the 3 letter iso code. If none is available a
// hex encoding of the full 20 bytes is the canonical representation.
let onlyISO = true;
const bytes = this._bytes;
const code = slice(this._bytes, 12, 15, Array);
const iso = code.map(c => String.fromCharCode(c)).join('');
for (let i = bytes.length - 1; i >= 0; i--) {
if (bytes[i] !== 0 && !(i === 12 || i === 13 || i === 14)) {
onlyISO = false;
break;
}
}
const lossLessISO = onlyISO && iso !== 'XRP' && ISO_REGEX.test(iso);
this._isNative = onlyISO && _.isEqual(code, [0, 0, 0]);
this._iso = this._isNative ? 'XRP' : lossLessISO ? iso : null;
},
toJSON() {
if (this.iso()) {
return this.iso();
}
return $uper.toJSON.call(this);
}
});
module.exports = {
Currency
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash128 = makeClass({
inherits: Hash,
statics: {width: 16}
});
module.exports = {
Hash128
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash160 = makeClass({
inherits: Hash,
statics: {width: 20}
});
module.exports = {
Hash160
};

View File

@@ -0,0 +1,18 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash256 = makeClass({
inherits: Hash,
statics: {
width: 32,
init() {
this.ZERO_256 = new this(new Uint8Array(this.width));
}
}
});
module.exports = {
Hash256
};

View File

@@ -0,0 +1,48 @@
'use strict';
const assert = require('assert');
const makeClass = require('../utils/make-class');
const {Comparable, SerializedType} = require('./serialized-type');
const {compareBytes, parseBytes} = require('../utils/bytes-utils');
const Hash = makeClass({
Hash(bytes) {
const width = this.constructor.width;
this._bytes = bytes ? parseBytes(bytes, Uint8Array) :
new Uint8Array(width);
assert.equal(this._bytes.length, width);
},
mixins: [Comparable, SerializedType],
statics: {
width: NaN,
from(value) {
if (value instanceof this) {
return value;
}
return new this(parseBytes(value));
},
fromParser(parser, hint) {
return new this(parser.read(hint || this.width));
}
},
compareTo(other) {
return compareBytes(this._bytes, this.constructor.from(other)._bytes);
},
toString() {
return this.toHex();
},
nibblet(depth) {
const byte_ix = depth > 0 ? (depth / 2) | 0 : 0;
let b = this._bytes[byte_ix];
if (depth % 2 === 0) {
b = (b & 0xF0) >>> 4;
} else {
b = b & 0x0F;
}
return b;
}
});
module.exports = {
Hash
};

View File

@@ -0,0 +1,47 @@
'use strict';
const enums = require('../enums');
const {Field} = enums;
const {AccountID} = require('./account-id');
const {Amount} = require('./amount');
const {Blob} = require('./blob');
const {Currency} = require('./currency');
const {Hash128} = require('./hash-128');
const {Hash160} = require('./hash-160');
const {Hash256} = require('./hash-256');
const {PathSet} = require('./path-set');
const {STArray} = require('./st-array');
const {STObject} = require('./st-object');
const {UInt16} = require('./uint-16');
const {UInt32} = require('./uint-32');
const {UInt64} = require('./uint-64');
const {UInt8} = require('./uint-8');
const {Vector256} = require('./vector-256');
const coreTypes = {
AccountID,
Amount,
Blob,
Currency,
Hash128,
Hash160,
Hash256,
PathSet,
STArray,
STObject,
UInt8,
UInt16,
UInt32,
UInt64,
Vector256
};
Field.values.forEach(field => {
field.associatedType = coreTypes[field.type];
});
Field.TransactionType.associatedType = enums.TransactionType;
Field.TransactionResult.associatedType = enums.TransactionResult;
Field.LedgerEntryType.associatedType = enums.LedgerEntryType;
module.exports = coreTypes;

View File

@@ -0,0 +1,114 @@
'use strict';
/* eslint-disable no-unused-expressions */
const makeClass = require('../utils/make-class');
const {SerializedType, ensureArrayLikeIs} = require('./serialized-type');
const {Currency} = require('./currency');
const {AccountID} = require('./account-id');
const PATHSET_END_BYTE = 0x00;
const PATH_SEPARATOR_BYTE = 0xFF;
const TYPE_ACCOUNT = 0x01;
const TYPE_CURRENCY = 0x10;
const TYPE_ISSUER = 0x20;
const Hop = makeClass({
statics: {
from(value) {
if (value instanceof this) {
return value;
}
const hop = new Hop();
value.issuer && (hop.issuer = AccountID.from(value.issuer));
value.account && (hop.account = AccountID.from(value.account));
value.currency && (hop.currency = Currency.from(value.currency));
return hop;
},
parse(parser, type) {
const hop = new Hop();
(type & TYPE_ACCOUNT) && (hop.account = AccountID.fromParser(parser));
(type & TYPE_CURRENCY) && (hop.currency = Currency.fromParser(parser));
(type & TYPE_ISSUER) && (hop.issuer = AccountID.fromParser(parser));
return hop;
}
},
toJSON() {
const type = this.type();
const ret = {type};
(type & TYPE_ACCOUNT) && (ret.account = this.account.toJSON());
(type & TYPE_ISSUER) && (ret.issuer = this.issuer.toJSON());
(type & TYPE_CURRENCY) && (ret.currency = this.currency.toJSON());
return ret;
},
type() {
let type = 0;
this.issuer && (type += TYPE_ISSUER);
this.account && (type += TYPE_ACCOUNT);
this.currency && (type += TYPE_CURRENCY);
return type;
}
});
const Path = makeClass({
inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(Path, value).withChildren(Hop);
}
},
toJSON() {
return this.map(k => k.toJSON());
}
});
const PathSet = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(PathSet, value).withChildren(Path);
},
fromParser(parser) {
const pathSet = new this();
let path;
while (!parser.end()) {
const type = parser.readUInt8();
if (type === PATHSET_END_BYTE) {
break;
}
if (type === PATH_SEPARATOR_BYTE) {
path = null;
continue;
}
if (!path) {
path = new Path();
pathSet.push(path);
}
path.push(Hop.parse(parser, type));
}
return pathSet;
}
},
toJSON() {
return this.map(k => k.toJSON());
},
toBytesSink(sink) {
let n = 0;
this.forEach((path) => {
if (n++ !== 0) {
sink.put([PATH_SEPARATOR_BYTE]);
}
path.forEach((hop) => {
sink.put([hop.type()]);
hop.account && (hop.account.toBytesSink(sink));
hop.currency && (hop.currency.toBytesSink(sink));
hop.issuer && (hop.issuer.toBytesSink(sink));
});
});
sink.put([PATHSET_END_BYTE]);
}
});
module.exports = {
PathSet
};

View File

@@ -0,0 +1,66 @@
'use strict';
const {bytesToHex, slice} = require('../utils/bytes-utils');
const {BytesList} = require('../serdes/binary-serializer');
const Comparable = {
lt(other) {
return this.compareTo(other) < 0;
},
eq(other) {
return this.compareTo(other) === 0;
},
gt(other) {
return this.compareTo(other) > 0;
},
gte(other) {
return this.compareTo(other) > -1;
},
lte(other) {
return this.compareTo(other) < 1;
}
};
const SerializedType = {
toBytesSink(sink) {
sink.put(this._bytes);
},
toHex() {
return bytesToHex(this.toBytes());
},
toBytes() {
if (this._bytes) {
return slice(this._bytes);
}
const bl = new BytesList();
this.toBytesSink(bl);
return bl.toBytes();
},
toJSON() {
return this.toHex();
},
toString() {
return this.toHex();
}
};
function ensureArrayLikeIs(Type, arrayLike) {
return {
withChildren(Child) {
if (arrayLike instanceof Type) {
return arrayLike;
}
const obj = new Type();
for (let i = 0; i < arrayLike.length; i++) {
obj.push(Child.from(arrayLike[i]));
}
return obj;
}
};
}
module.exports = {
ensureArrayLikeIs,
SerializedType,
Comparable
};

View File

@@ -0,0 +1,40 @@
'use strict';
const makeClass = require('../utils/make-class');
const {ensureArrayLikeIs, SerializedType} = require('./serialized-type');
const {Field} = require('../enums');
const {STObject} = require('./st-object');
const {ArrayEndMarker} = Field;
const STArray = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser(parser) {
const array = new STArray();
while (!parser.end()) {
const field = parser.readField();
if (field === ArrayEndMarker) {
break;
}
const outer = new STObject();
outer[field] = parser.readFieldValue(field);
array.push(outer);
}
return array;
},
from(value) {
return ensureArrayLikeIs(STArray, value).withChildren(STObject);
}
},
toJSON() {
return this.map((v) => v.toJSON());
},
toBytesSink(sink) {
this.forEach(so => so.toBytesSink(sink));
}
});
module.exports = {
STArray
};

View File

@@ -0,0 +1,67 @@
'use strict';
// const assert = require('assert');
const _ = require('lodash');
const makeClass = require('../utils/make-class');
const {Field} = require('../enums');
const {BinarySerializer} = require('../serdes/binary-serializer');
const {ObjectEndMarker} = Field;
const {SerializedType} = require('./serialized-type');
const STObject = makeClass({
mixins: SerializedType,
statics: {
fromParser(parser, hint) {
const end = typeof hint === 'number' ? parser.pos() + hint : null;
const so = new this();
while (!parser.end(end)) {
const field = parser.readField();
if (field === ObjectEndMarker) {
break;
}
so[field] = parser.readFieldValue(field);
}
return so;
},
from(value) {
if (value instanceof this) {
return value;
}
if (typeof value === 'object') {
return _.transform(value, (so, val, key) => {
const field = Field[key];
if (field) {
so[field] = field.associatedType.from(val);
} else {
so[key] = val;
}
}, new this());
}
throw new Error(`${value} is unsupported`);
}
},
fieldKeys() {
return Object.keys(this).map((k) => Field[k]).filter(Boolean);
},
toJSON() {
return _.transform(this, (result, value, key) => {
result[key] = value && value.toJSON ? value.toJSON() : value;
});
},
toBytesSink(sink, filter = () => true) {
const serializer = new BinarySerializer(sink);
const fields = this.fieldKeys();
const sorted = _.sortBy(fields, 'ordinal');
sorted.filter(filter).forEach((field) => {
const value = this[field];
if (!field.isSerialized) {
return;
}
serializer.writeFieldAndValue(field, value);
});
}
});
module.exports = {
STObject
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt16 = makeClass({
inherits: UInt,
statics: {width: 2}
});
module.exports = {
UInt16
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt32 = makeClass({
inherits: UInt,
statics: {width: 4}
});
module.exports = {
UInt32
};

View File

@@ -0,0 +1,52 @@
'use strict';
const assert = require('assert');
const BN = require('bn.js');
const makeClass = require('../utils/make-class');
const {bytesToHex, parseBytes, serializeUIntN}
= require('../utils/bytes-utils');
const {UInt} = require('./uint');
const HEX_REGEX = /^[A-F0-9]{16}$/;
const UInt64 = makeClass({
inherits: UInt,
statics: {width: 8},
UInt64(arg = 0) {
const argType = typeof arg;
if (argType === 'number') {
assert(arg >= 0);
this._bytes = new Uint8Array(8);
this._bytes.set(serializeUIntN(arg, 4), 4);
} else if (arg instanceof BN) {
this._bytes = parseBytes(arg.toArray('be', 8), Uint8Array);
this._toBN = arg;
} else {
if (argType === 'string') {
if (!HEX_REGEX.test(arg)) {
throw new Error(`${arg} is not a valid UInt64 hex string`);
}
}
this._bytes = parseBytes(arg, Uint8Array);
}
assert(this._bytes.length === 8);
},
toJSON() {
return bytesToHex(this._bytes);
},
valueOf() {
return this.toBN();
},
cached: {
toBN() {
return new BN(this._bytes);
}
},
toBytes() {
return this._bytes;
}
});
module.exports = {
UInt64
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt8 = makeClass({
inherits: UInt,
statics: {width: 1}
});
module.exports = {
UInt8
};

View File

@@ -0,0 +1,63 @@
'use strict';
const assert = require('assert');
const BN = require('bn.js');
const makeClass = require('../utils/make-class');
const {Comparable, SerializedType} = require('./serialized-type');
const {serializeUIntN} = require('../utils/bytes-utils');
const MAX_VALUES = [0, 255, 65535, 16777215, 4294967295];
function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1;
}
const UInt = makeClass({
mixins: [Comparable, SerializedType],
UInt(val = 0) {
const max = MAX_VALUES[this.constructor.width];
if (val < 0 || !(val <= max)) {
throw new Error(`${val} not in range 0 <= $val <= ${max}`);
}
this.val = val;
},
statics: {
width: 0,
fromParser(parser) {
const val = this.width > 4 ? parser.read(this.width) :
parser.readUIntN(this.width);
return new this(val);
},
from(val) {
return val instanceof this ? val : new this(val);
}
},
toJSON() {
return this.val;
},
valueOf() {
return this.val;
},
compareTo(other) {
const thisValue = this.valueOf();
const otherValue = other.valueOf();
if (thisValue instanceof BN) {
return otherValue instanceof BN ?
thisValue.cmp(otherValue) :
thisValue.cmpn(otherValue);
} else if (otherValue instanceof BN) {
return -other.compareTo(this);
}
assert(typeof otherValue === 'number');
return signum(thisValue, otherValue);
},
toBytesSink(sink) {
sink.put(this.toBytes());
},
toBytes() {
return serializeUIntN(this.val, this.constructor.width);
}
});
module.exports = {
UInt
};

View File

@@ -0,0 +1,34 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash256} = require('./hash-256');
const {ensureArrayLikeIs, SerializedType} = require('./serialized-type');
const Vector256 = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser(parser, hint) {
const vector256 = new this();
const bytes = hint !== null ? hint : parser.size() - parser.pos();
const hashes = bytes / 32;
for (let i = 0; i < hashes; i++) {
vector256.push(Hash256.fromParser(parser));
}
return vector256;
},
from(value) {
return ensureArrayLikeIs(Vector256, value).withChildren(Hash256);
}
},
toBytesSink(sink) {
this.forEach(h => h.toBytesSink(sink));
},
toJSON() {
return this.map((hash) => hash.toJSON());
}
});
module.exports = {
Vector256
};

View File

@@ -0,0 +1,115 @@
'use strict';
const assert = require('assert');
function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1;
}
const hexLookup = (function() {
const res = {};
const reverse = res.reverse = new Array(256);
for (let i = 0; i < 16; i++) {
const char = i.toString(16).toUpperCase();
res[char] = i;
for (let j = 0; j < 16; j++) {
const char2 = j.toString(16).toUpperCase();
const byte = (i << 4) + j;
const byteHex = char + char2;
res[byteHex] = byte;
reverse[byte] = byteHex;
}
}
return res;
}());
const reverseHexLookup = hexLookup.reverse;
function bytesToHex(sequence) {
const buf = Array(sequence.length);
for (let i = sequence.length - 1; i >= 0; i--) {
buf[i] = reverseHexLookup[sequence[i]];
}
return buf.join('');
}
function byteForHex(hex) {
const byte = hexLookup[hex];
if (byte === undefined) {
throw new Error(`\`${hex}\` is not a valid hex representation of a byte`);
}
return byte;
}
function parseBytes(val, Output = Array) {
if (!val || val.length === undefined) {
throw new Error(`${val} is not a sequence`);
}
if (typeof val === 'string') {
const start = val.length % 2;
const res = new Output((val.length + start) / 2);
for (let i = val.length, to = res.length - 1; to >= start; i -= 2, to--) {
res[to] = byteForHex(val.slice(i - 2, i));
}
if (start === 1) {
res[0] = byteForHex(val[0]);
}
return res;
} else if (val instanceof Output) {
return val;
} else if (Output === Uint8Array) {
return new Output(val);
}
const res = new Output(val.length);
for (let i = val.length - 1; i >= 0; i--) {
res[i] = val[i];
}
return res;
}
function serializeUIntN(val, width) {
const newBytes = new Uint8Array(width);
const lastIx = width - 1;
for (let i = 0; i < width; i++) {
newBytes[lastIx - i] = (val >>> (i * 8) & 0xff);
}
return newBytes;
}
function compareBytes(a, b) {
assert(a.length === b.length);
for (let i = 0; i < a.length; i++) {
const cmp = signum(a[i], b[i]);
if (cmp !== 0) {
return cmp;
}
}
return 0;
}
function slice(val, startIx = 0, endIx = val.length, Output = val.constructor) {
/* eslint-disable no-param-reassign*/
if (startIx < 0) {
startIx += val.length;
}
if (endIx < 0) {
endIx += val.length;
}
/* eslint-enable no-param-reassign*/
const len = endIx - startIx;
const res = new Output(len);
for (let i = endIx - 1; i >= startIx; i--) {
res[i - startIx] = val[i];
}
return res;
}
module.exports = {
parseBytes,
bytesToHex,
slice,
compareBytes,
serializeUIntN
};

View File

@@ -0,0 +1,85 @@
'use strict';
const _ = require('lodash');
const inherits = require('inherits');
function forEach(obj, func) {
Object.keys(obj || {}).forEach(k => {
func(obj[k], k);
});
}
function ensureArray(val) {
return Array.isArray(val) ? val : [val];
}
module.exports = function makeClass(klass_, definition_) {
const definition = definition_ || klass_;
let klass = typeof klass_ === 'function' ? klass_ : null;
if (klass === null) {
for (const k in definition) {
if (k[0].match(/[A-Z]/)) {
klass = definition[k];
break;
}
}
}
const parent = definition.inherits;
if (parent) {
if (klass === null) {
klass = function() {
parent.apply(this, arguments);
};
}
inherits(klass, parent);
_.defaults(klass, parent);
}
if (klass === null) {
klass = function() {};
}
const proto = klass.prototype;
function addFunc(original, wrapper) {
proto[original.name] = wrapper || original;
}
(definition.getters || []).forEach(k => {
const key = '_' + k;
proto[k] = function() {
return this[key];
};
});
forEach(definition.virtuals, f => {
addFunc(f, function() {
throw new Error('unimplemented');
});
});
forEach(definition.methods, addFunc);
forEach(definition, f => {
if (_.isFunction(f) && f !== klass) {
addFunc(f);
}
});
_.assign(klass, definition.statics);
if (typeof klass.init === 'function') {
klass.init();
}
forEach(definition.cached, f => {
const key = '_' + f.name;
addFunc(f, function() {
let value = this[key];
if (value === undefined) {
value = this[key] = f.call(this);
}
return value;
});
});
if (definition.mixins) {
const mixins = {};
// Right-most in the list win
ensureArray(definition.mixins).reverse().forEach(o => {
_.defaults(mixins, o);
});
_.defaults(proto, mixins);
}
return klass;
};

View File

@@ -0,0 +1,24 @@
'use strict';
const assert = require('assert-diff');
const {Amount} = require('../src/coretypes');
describe('Amount', function() {
it('can be parsed from', function() {
assert(Amount.from('1000000') instanceof Amount);
assert.equal(Amount.from('1000000').valueString(), '1000000');
const fixture = {
'value': '1',
'issuer': '0000000000000000000000000000000000000000',
'currency': 'USD'
};
const amt = Amount.from(fixture);
const rewritten = {
'value': '1',
'issuer': 'rrrrrrrrrrrrrrrrrrrrrhoLvTp',
'currency': 'USD'
};
assert.deepEqual(amt.toJSON(), rewritten);
});
});

View File

@@ -0,0 +1,373 @@
/* eslint-disable func-style */
'use strict';
const coreTypes = require('../src/coretypes');
const _ = require('lodash');
const assert = require('assert-diff');
const {encodeAccountID} = require('ripple-address-codec');
const {binary: {makeParser, readJSON}, Field, Amount, Hash160} = coreTypes;
const {enums: {TransactionType}} = coreTypes;
const utils = require('./utils');
const {parseHexOnly, assertEqualAmountJSON, hexOnly, loadFixture} = utils;
const {bytesToHex} = require('../src/utils/bytes-utils');
const fixtures = loadFixture('data-driven-tests.json');
const {BytesList} = require('../src/serdes/binary-serializer');
const __ = hexOnly;
function unused() {}
function toJSON(v) {
return v.toJSON ? v.toJSON() : v;
}
function basicApiTests() {
const bytes = parseHexOnly('00,01020304,0506', Uint8Array);
it('can read slices of bytes', () => {
const parser = makeParser(bytes);
assert.deepEqual(parser.pos(), 0);
assert(parser._buf instanceof Uint8Array);
const read1 = parser.read(1);
assert(read1 instanceof Uint8Array);
assert.deepEqual(read1, [0]);
assert.deepEqual(parser.read(4), [1, 2, 3, 4]);
assert.deepEqual(parser.read(2), [5, 6]);
assert.throws(() => parser.read(1));
});
it('can read a Uint32 at full', () => {
const parser = makeParser('FFFFFFFF');
assert.equal(parser.readUInt32(), 0xFFFFFFFF);
});
}
function transactionParsingTests() {
const transaction = {
json: {
'Account': 'raD5qJMAShLeHZXf9wjUmo6vRK4arj9cF3',
'Fee': '10',
'Flags': 0,
'Sequence': 103929,
'SigningPubKey':
'028472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F418D6A7166',
'TakerGets': {'currency': 'ILS',
'issuer': 'rNPRNzBB92BVpAhhZr4iXDTveCgV5Pofm9',
'value': '1694.768'},
'TakerPays': '98957503520',
'TransactionType': 'OfferCreate',
'TxnSignature': __(`
304502202ABE08D5E78D1E74A4C18F2714F64E87B8BD57444AF
A5733109EB3C077077520022100DB335EE97386E4C0591CAC02
4D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C`)
},
binary: __(`
120007220000000024000195F964400000170A53AC2065D5460561E
C9DE000000000000000000000000000494C53000000000092D70596
8936C419CE614BF264B5EEB1CEA47FF468400000000000000A73210
28472865AF4CB32AA285834B57576B7290AA8C31B459047DB27E16F
418D6A71667447304502202ABE08D5E78D1E74A4C18F2714F64E87B
8BD57444AFA5733109EB3C077077520022100DB335EE97386E4C059
1CAC024D50E9230D8F171EEB901B5E5E4BD6D1E0AEF98C811439408
A69F0895E62149CFCC006FB89FA7D1E6E5D`)
};
const tx_json = transaction.json;
// These tests are basically development logs
it('can be done with low level apis', () => {
const parser = makeParser(transaction.binary);
assert.equal(parser.readField(), Field.TransactionType);
assert.equal(parser.readUInt16(), 7);
assert.equal(parser.readField(), Field.Flags);
assert.equal(parser.readUInt32(), 0);
assert.equal(parser.readField(), Field.Sequence);
assert.equal(parser.readUInt32(), 103929);
assert.equal(parser.readField(), Field.TakerPays);
parser.read(8);
assert.equal(parser.readField(), Field.TakerGets);
// amount value
assert(parser.read(8));
// amount currency
assert(Hash160.fromParser(parser));
assert.equal(encodeAccountID(parser.read(20)),
tx_json.TakerGets.issuer);
assert.equal(parser.readField(), Field.Fee);
assert(parser.read(8));
assert.equal(parser.readField(), Field.SigningPubKey);
assert.equal(parser.readVLLength(), 33);
assert.equal(bytesToHex(parser.read(33)), tx_json.SigningPubKey);
assert.equal(parser.readField(), Field.TxnSignature);
assert.equal(bytesToHex(parser.readVL()), tx_json.TxnSignature);
assert.equal(parser.readField(), Field.Account);
assert.equal(encodeAccountID(parser.readVL()), tx_json.Account);
assert(parser.end());
});
it('can be done with high level apis', () => {
const parser = makeParser(transaction.binary);
function readField() {
return parser.readFieldAndValue();
}
{
const [field, value] = readField();
assert.equal(field, Field.TransactionType);
assert.equal(value, TransactionType.OfferCreate);
}
{
const [field, value] = readField();
assert.equal(field, Field.Flags);
assert.equal(value, 0);
}
{
const [field, value] = readField();
assert.equal(field, Field.Sequence);
assert.equal(value, 103929);
}
{
const [field, value] = readField();
assert.equal(field, Field.TakerPays);
assert.equal(value.currency.isNative(), true);
assert.equal(value.currency.toJSON(), 'XRP');
}
{
const [field, value] = readField();
assert.equal(field, Field.TakerGets);
assert.equal(value.currency.isNative(), false);
assert.equal(value.issuer.toJSON(), tx_json.TakerGets.issuer);
}
{
const [field, value] = readField();
assert.equal(field, Field.Fee);
assert.equal(value.currency.isNative(), true);
}
{
const [field, value] = readField();
assert.equal(field, Field.SigningPubKey);
assert.equal(value.toJSON(), tx_json.SigningPubKey);
}
{
const [field, value] = readField();
assert.equal(field, Field.TxnSignature);
assert.equal(value.toJSON(), tx_json.TxnSignature);
}
{
const [field, value] = readField();
assert.equal(field, Field.Account);
assert.equal(value.toJSON(), tx_json.Account);
}
assert(parser.end());
});
it('can be done with higher level apis', () => {
const parser = makeParser(transaction.binary);
const jsonFromBinary = readJSON(parser);
assert.deepEqual(jsonFromBinary, tx_json);
});
}
function amountParsingTests() {
_.filter(fixtures.values_tests, {type: 'Amount'}).forEach((f, i) => {
if (f.error) {
return;
}
const parser = makeParser(f.expected_hex);
const testName =
`values_tests[${i}] parses ${f.expected_hex.slice(0, 16)}...
as ${JSON.stringify(f.test_json)}`;
it(testName, () => {
const value = parser.readType(Amount);
// May not actually be in canonical form. The fixtures are to be used
// also for json -> binary;
assertEqualAmountJSON(toJSON(value), f.test_json);
if (f.exponent) {
assert.equal(value.exponent(), f.exponent);
}
});
});
}
function fieldParsingTests() {
fixtures.fields_tests.forEach((f, i) => {
const parser = makeParser(f.expected_hex);
it(`fields[${i}]: parses ${f.expected_hex} as ${f.name}`, () => {
const field = parser.readField();
assert.equal(field.name, f.name);
assert.equal(field.type.name, f.type_name);
});
});
}
function assertRecyclable(json, forField) {
const Type = forField.associatedType;
const recycled = Type.from(json).toJSON();
assert.deepEqual(recycled, json);
const sink = new BytesList();
Type.from(recycled).toBytesSink(sink);
const recycledAgain = makeParser(sink.toHex())
.readType(Type)
.toJSON();
assert.deepEqual(recycledAgain, json);
}
function nestedObjectTests() {
function disabled(i) {
unused(i);
return false; // !_.includes([2], i);
}
fixtures.whole_objects.forEach((f, i) => {
if (disabled(i)) {
return;
}
it(`whole_objects[${i}]: can parse blob into
${JSON.stringify(f.tx_json)}`,
/* */ () => {
const parser = makeParser(f.blob_with_no_signing);
let ix = 0;
while (!parser.end()) {
const [field, value] = parser.readFieldAndValue();
const expected = f.fields[ix];
const expectedJSON = expected[1].json;
const expectedField = expected[0];
const actual = toJSON(value);
try {
assert.deepEqual(actual, expectedJSON);
} catch (e) {
throw new Error(`${e} ${field} a: ${actual} e: ${expectedJSON}`);
}
assert.equal(field.name, expectedField);
assertRecyclable(actual, field);
ix++;
}
});
});
}
function pathSetBinaryTests() {
const bytes = __(
`1200002200000000240000002E2E00004BF161D4C71AFD498D00000000000000
0000000000000055534400000000000A20B3C85F482532A9578DBB3950B85CA0
6594D168400000000000000A69D446F8038585E9400000000000000000000000
00425443000000000078CA21A6014541AB7B26C3929B9E0CD8C284D61C732103
A4665B1F0B7AE2BCA12E2DB80A192125BBEA660F80E9CEE137BA444C1B0769EC
7447304502205A964536805E35785C659D1F9670D057749AE39668175D6AA75D
25B218FE682E0221009252C0E5DDD5F2712A48F211669DE17B54113918E0D2C2
66F818095E9339D7D3811478CA21A6014541AB7B26C3929B9E0CD8C284D61C83
140A20B3C85F482532A9578DBB3950B85CA06594D1011231585E1F3BD02A15D6
185F8BB9B57CC60DEDDB37C10000000000000000000000004254430000000000
585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C131E4FE687C90257D3D2D694C
8531CDEECBE84F33670000000000000000000000004254430000000000E4FE68
7C90257D3D2D694C8531CDEECBE84F3367310A20B3C85F482532A9578DBB3950
B85CA06594D100000000000000000000000042544300000000000A20B3C85F48
2532A9578DBB3950B85CA06594D1300000000000000000000000005553440000
0000000A20B3C85F482532A9578DBB3950B85CA06594D1FF31585E1F3BD02A15
D6185F8BB9B57CC60DEDDB37C100000000000000000000000042544300000000
00585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C131E4FE687C90257D3D2D69
4C8531CDEECBE84F33670000000000000000000000004254430000000000E4FE
687C90257D3D2D694C8531CDEECBE84F33673115036E2D3F5437A83E5AC3CAEE
34FF2C21DEB618000000000000000000000000425443000000000015036E2D3F
5437A83E5AC3CAEE34FF2C21DEB6183000000000000000000000000055534400
000000000A20B3C85F482532A9578DBB3950B85CA06594D1FF31585E1F3BD02A
15D6185F8BB9B57CC60DEDDB37C1000000000000000000000000425443000000
0000585E1F3BD02A15D6185F8BB9B57CC60DEDDB37C13157180C769B66D942EE
69E6DCC940CA48D82337AD000000000000000000000000425443000000000057
180C769B66D942EE69E6DCC940CA48D82337AD10000000000000000000000000
58525000000000003000000000000000000000000055534400000000000A20B3
C85F482532A9578DBB3950B85CA06594D100`);
const expectedJSON =
[[{account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
type: 49},
{account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
type: 49},
{account: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
currency: 'BTC',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
type: 49},
{currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
type: 48}],
[{account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
type: 49},
{account: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
currency: 'BTC',
issuer: 'rM1oqKtfh1zgjdAgbFmaRm3btfGBX25xVo',
type: 49},
{account: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi',
currency: 'BTC',
issuer: 'rpvfJ4mR6QQAeogpXEKnuyGBx8mYCSnYZi',
type: 49},
{currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
type: 48}],
[{account: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
currency: 'BTC',
issuer: 'r9hEDb4xBGRfBCcX3E4FirDWQBAYtpxC8K',
type: 49},
{account: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn',
currency: 'BTC',
issuer: 'r3AWbdp2jQLXLywJypdoNwVSvr81xs3uhn',
type: 49},
{currency: '0000000000000000000000005852500000000000', type: 16},
{currency: 'USD',
issuer: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B',
type: 48}]];
it('works with long paths', () => {
const parser = makeParser(bytes);
const txn = readJSON(parser);
assert.deepEqual(txn.Paths, expectedJSON);
// TODO: this should go elsewhere
assert.deepEqual(
coreTypes.PathSet.from(txn.Paths).toJSON(),
expectedJSON
);
});
}
function parseLedger4320278() {
let ripple = require('ripple-lib');
if (ripple._DEPRECATED) {
ripple = ripple._DEPRECATED;
}
ripple.Amount.strict_mode = false;
ripple = false;
it(`can parse object`, () => {
this.timeout(30e3);
const json = loadFixture('as-ledger-4320278.json');
json.forEach((e, i) => {
const expected = e.json;
const actual = readJSON(makeParser(e.binary));
actual.index = expected.index;
// const actual = new ripple.SerializedObject(e.binary).to_json();
try {
assert.deepEqual(actual, expected);
} catch (error) {
console.log('error', i, !ripple && error);
}
});
});
}
describe('BinaryParser', function() {
function dataDrivenTests() {
describe.skip('as-ledger-4320278.json', parseLedger4320278);
describe('Amount parsing tests', amountParsingTests);
describe('Field Tests', fieldParsingTests);
describe('Parsing nested objects', nestedObjectTests);
}
describe('pathSetBinaryTests', pathSetBinaryTests);
describe('Basic API', basicApiTests);
describe('Parsing a transaction', transactionParsingTests);
describe('Data Driven Tests', dataDrivenTests);
});

View File

@@ -0,0 +1,93 @@
/* eslint-disable func-style */
'use strict';
const BN = require('bn.js');
const assert = require('assert-diff');
const lib = require('../src/coretypes');
const {binary: {makeParser, BytesList, BinarySerializer}} = lib;
const {UInt8, UInt16, UInt32, UInt64, STObject} = lib;
const {loadFixture} = require('./utils');
const fixtures = loadFixture('data-driven-tests.json');
function bytesListTest() {
const list = new BytesList().put([0]).put([2, 3]).put([4, 5]);
it('is an Array<Uint8Array>', function() {
assert(Array.isArray(list.arrays));
assert(list.arrays[0] instanceof Uint8Array);
});
it('keeps track of the length itself', function() {
assert.equal(list.length, 5);
});
it('can join all arrays into one via toBytes', function() {
const joined = list.toBytes();
assert(joined.length, 5);
assert.deepEqual(joined, [0, 2, 3, 4, 5]);
});
}
function assertRecycles(blob) {
const parser = makeParser(blob);
const so = parser.readType(STObject);
const out = new BytesList();
so.toBytesSink(out);
const hex = out.toHex();
assert.equal(hex, blob);
assert.notEqual(hex + ':', blob);
}
function nestedObjectTests() {
fixtures.whole_objects.forEach((f, i) => {
it(`whole_objects[${i}]: can parse blob and dump out same blob`,
/* */ () => {
assertRecycles(f.blob_with_no_signing);
});
});
}
function UIntTest() {
function check(type, n, expected) {
it(`Uint${type.width * 8} serializes ${n} as ${expected}`, function() {
const bl = new BytesList();
const serializer = new BinarySerializer(bl);
if (expected === 'throws') {
assert.throws(() => serializer.writeType(type, n));
return;
}
serializer.writeType(type, n);
assert.deepEqual(bl.toBytes(), expected);
});
}
check(UInt8, 5, [5]);
check(UInt16, 5, [0, 5]);
check(UInt32, 5, [0, 0, 0, 5]);
check(UInt32, 0xFFFFFFFF, [255, 255, 255, 255]);
check(UInt8, 0xFEFFFFFF, 'throws');
check(UInt16, 0xFEFFFFFF, 'throws');
check(UInt16, 0xFEFFFFFF, 'throws');
check(UInt64, 0xFEFFFFFF, [0, 0, 0, 0, 254, 255, 255, 255]);
check(UInt64, -1, 'throws');
check(UInt64, 0, [0, 0, 0, 0, 0, 0, 0, 0]);
check(UInt64, 1, [0, 0, 0, 0, 0, 0, 0, 1]);
check(UInt64, new BN(1), [0, 0, 0, 0, 0, 0, 0, 1]);
}
function parseLedger4320278() {
it(`can parse object`, (done) => {
this.timeout(30e3);
const json = loadFixture('as-ledger-4320278.json');
json.forEach((e) => {
assertRecycles(e.binary);
});
done();
});
}
describe('Binary Serialization', function() {
describe.skip('parseLedger4320278', parseLedger4320278);
describe('nestedObjectTests', nestedObjectTests);
describe('UIntTest', UIntTest);
describe('BytesList', bytesListTest);
});

View File

@@ -0,0 +1,74 @@
'use strict';
const assert = require('assert');
const bytesUtils = require('../src/utils/bytes-utils');
const {slice, compareBytes, parseBytes, bytesToHex} = bytesUtils;
describe('bytes-utils', function() {
describe('parseBytes', function() {
it('can decode hex', function() {
assert.deepEqual(parseBytes('0012'), [0x00, 0x12]);
assert.deepEqual(parseBytes('0012'), [0x00, 0x12]);
assert.deepEqual(parseBytes('00AA'), [0x00, 0xaa]);
});
it('can decode hex to a Uint8Array', function() {
const result = parseBytes('0012', Uint8Array);
assert(result instanceof Uint8Array);
assert.deepEqual(result, [0x00, 0x12]);
});
it('can convert a list to a Uint8Array', function() {
const result = parseBytes([0x00, 0x12], Uint8Array);
assert(result instanceof Uint8Array);
assert.deepEqual(result, [0x00, 0x12]);
});
it('can decode hex to a Buffer', function() {
const result = parseBytes('0012', Buffer);
assert(result instanceof Buffer);
assert.deepEqual(result.toJSON().data, [0x00, 0x12]);
});
});
describe('bytesToHex', function() {
it('can encode an array as hex', function() {
assert.deepEqual(bytesToHex([0x00, 0xaa]), '00AA');
assert.deepEqual(bytesToHex([0xaa]), 'AA');
});
it('can encode Uint8Array as hex', function() {
assert.deepEqual(bytesToHex(new Uint8Array([0x00, 0xaa])), '00AA');
assert.deepEqual(bytesToHex(new Uint8Array([0xaa])), 'AA');
});
});
describe('compareBytes', function() {
it('compares the bytes sequence as big endian number', function() {
assert.equal(compareBytes([0, 1, 2], [1, 2, 3]), -1);
});
it('throws when the bytes sequences are of unlike length', function() {
assert.throws(() => compareBytes([0, 1], [1]));
});
});
describe('slice', function() {
const val = [1, 2, 3, 4, 5];
it('creates a slice of the same type as first arg', function() {
assert(Array.isArray(slice(val)));
});
it('the 2nd arg is the start position [2:]', function() {
assert.deepEqual(val.slice(2), [3, 4, 5]);
assert.deepEqual(slice(val, 2), [3, 4, 5]);
});
it('the 3rd arg is the end position [2:4]', function() {
assert.deepEqual(slice(val, 2, 4), [3, 4]);
});
it('can slice using negative numbers [-3:]', function() {
assert.deepEqual(slice(val, -3), [3, 4, 5]);
});
it('can slice using negative numbers [-3:-1]', function() {
assert.deepEqual(slice(val, -3, -1), [3, 4]);
});
it('the 4th arg is the output class type', function() {
assert.deepEqual(slice(val, 2, 4, Buffer).toJSON().data, [3, 4]);
assert.deepEqual(slice(val, 2, 4, Uint8Array), [3, 4]);
});
});
});

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,60 @@
'use strict';
const assert = require('assert-diff');
const {Hash160, Hash256, Currency, AccountID} = require('../src/coretypes');
describe('Hash160', function() {
it('has a static width membmer', function() {
assert.equal(Hash160.width, 20);
});
it('inherited by subclasses', function() {
assert.equal(AccountID.width, 20);
assert.equal(Currency.width, 20);
});
it('can be compared against another', function() {
const h1 = Hash160.from('1000000000000000000000000000000000000000');
const h2 = Hash160.from('2000000000000000000000000000000000000000');
const h3 = Hash160.from('0000000000000000000000000000000000000003');
assert(h1.lt(h2));
assert(h3.lt(h2));
});
});
describe('Hash256', function() {
it('has a static width membmer', function() {
assert.equal(Hash256.width, 32);
});
it('has a ZERO_256 member', function() {
assert.equal(
Hash256.ZERO_256.toJSON(),
'0000000000000000000000000000000000000000000000000000000000000000');
});
it('supports getting the nibblet values at given positions', function() {
const h = Hash256.from(
'1359BD0000000000000000000000000000000000000000000000000000000000');
assert.equal(h.nibblet(0), 0x1);
assert.equal(h.nibblet(1), 0x3);
assert.equal(h.nibblet(2), 0x5);
assert.equal(h.nibblet(3), 0x9);
assert.equal(h.nibblet(4), 0x0b);
assert.equal(h.nibblet(5), 0xd);
});
});
describe('Currency', function() {
it('Will have a null iso() for dodgy XRP ', function() {
const bad = Currency.from('0000000000000000000000005852500000000000');
assert.equal(bad.iso(), null);
assert.equal(bad.isNative(), false);
});
it('can be constructed from an Array', function() {
const xrp = Currency.from(new Uint8Array(20));
assert.equal(xrp.iso(), 'XRP');
});
it('throws on invalid reprs', function() {
assert.throws(() => Currency.from(new Uint8Array(19)));
assert.throws(() => Currency.from(1));
assert.throws(() => Currency.from(
'00000000000000000000000000000000000000m'));
});
});

View File

@@ -0,0 +1,28 @@
'use strict';
const assert = require('assert');
const {loadFixture} = require('./utils');
const ledgerHashes = require('../src/ledger-hashes');
const {transactionTreeHash, ledgerHash, accountStateHash} = ledgerHashes;
describe('Ledger Hashes', function() {
function testFactory(ledgerFixture) {
describe(`can calculate hashes for ${ledgerFixture}`, function() {
const ledger = loadFixture(ledgerFixture);
it('computes correct account state hash', function() {
assert.equal(accountStateHash(ledger.accountState).toHex(),
ledger.account_hash);
});
it('computes correct transaction tree hash', function() {
assert.equal(transactionTreeHash(ledger.transactions).toHex(),
ledger.transaction_hash);
});
it('computes correct ledger header hash', function() {
assert.equal(ledgerHash(ledger).toHex(), ledger.hash);
});
});
}
testFactory('ledger-full-40000.json');
testFactory('ledger-full-38129.json');
});

View File

@@ -1 +1 @@
--reporter spec --timeout 5000 --slow 500 --compilers js:babel/register
--reporter spec --slow 500 --compilers js:babel/register

View File

@@ -1,18 +1,18 @@
'use strict';
const assert = require('assert-diff');
const {encodeQuality, decodeQuality} = require('../src');
const {quality, binary: {bytesToHex}} = require('../src/coretypes');
describe('Quality encode/decode', function() {
const bookDirectory =
'4627DFFCFF8B5A265EDBD8AE8C14A52325DBFEDAF4F5C32E5D06F4C3362FE1D0';
const expectedQuality = '195796912.5171664';
it('can decode', function() {
const decimal = decodeQuality(bookDirectory);
assert.equal(decimal, expectedQuality);
const decimal = quality.decode(bookDirectory);
assert.equal(decimal.toString(), expectedQuality);
});
it('can encode', function() {
const hex = encodeQuality(expectedQuality);
assert.equal(hex, bookDirectory.slice(-16));
const bytes = quality.encode(expectedQuality);
assert.equal(bytesToHex(bytes), bookDirectory.slice(-16));
});
});

View File

@@ -0,0 +1,90 @@
'use strict';
const assert = require('assert-diff');
const {ShaMap} = require('../src/shamap.js');
const {binary: {serializeObject}, Hash256, HashPrefix}
= require('../src/coretypes');
const {loadFixture} = require('./utils');
function now() {
return (Number(Date.now())) / 1000;
}
const ZERO =
'0000000000000000000000000000000000000000000000000000000000000000';
function makeItem(indexArg) {
let str = indexArg;
while (str.length < 64) {
str += '0';
}
const index = Hash256.from(str);
const item = {
toBytesSink(sink) {
index.toBytesSink(sink);
},
hashPrefix() {
return [1, 3, 3, 7];
}
};
return [index, item];
}
describe('ShaMap', () => {
now();
it('hashes to zero when empty', () => {
const map = new ShaMap();
assert.equal(map.hash().toHex(), ZERO);
});
it('creates the same hash no matter which order items are added', () => {
let map = new ShaMap();
const items = [
'0',
'1',
'11',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E20000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E21000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E22000000000000000',
'7000DE445E22CB9BB7E1717589FA858736BAA5FD192310E23000000000000000',
'12',
'122'
];
items.forEach(i => map.addItem(...makeItem(i)));
const h1 = map.hash();
assert(h1.eq(h1));
map = new ShaMap();
items.reverse().forEach(i => map.addItem(...makeItem(i)));
assert(map.hash().eq(h1));
});
function factory(fixture) {
it(`recreate account state hash from ${fixture}`, () => {
const map = new ShaMap();
const ledger = loadFixture(fixture);
// const t = now();
const leafNodePrefix = HashPrefix.accountStateEntry;
ledger.accountState.map((e, i) => {
if (i > 1000 & (i % 1000) === 0) {
console.log(e.index);
console.log(i);
}
const bytes = serializeObject(e);
return {
index: Hash256.from(e.index),
hashPrefix() {
return leafNodePrefix;
},
toBytesSink(sink) {
sink.put(bytes);
}
};
}).forEach(so => map.addItem(so.index, so));
assert.equal(map.hash().toHex(), ledger.account_hash);
// console.log('took seconds: ', (now() - t));
});
}
factory('ledger-full-38129.json');
factory('ledger-full-40000.json');
// factory('ledger-4320277.json');
// factory('14280680.json');
});

View File

@@ -0,0 +1,40 @@
'use strict';
const _ = require('lodash');
const assert = require('assert');
const coreTypes = require('../src/types');
const {SerializedType} = require('../src/types/serialized-type');
describe('SerializedType interfaces', () => {
_.forOwn(coreTypes, (Value, name) => {
it(`${name} has a \`from\` static constructor`, () => {
assert(Value.from && Value.from !== Array.from);
});
it(`${name} has a default constructor`, () => {
/* eslint-disable no-new*/
new Value();
/* eslint-enable no-new*/
});
it(`${name}.from will return the same object`, () => {
const instance = new Value();
assert(Value.from(instance) === instance);
});
it(`${name} instances have toBytesSink`, () => {
assert(new Value().toBytesSink);
});
it(`${name} instances have toJSON`, () => {
assert(new Value().toJSON);
});
it(`${name}.from(json).toJSON() == json`, () => {
const newJSON = new Value().toJSON();
assert.deepEqual(Value.from(newJSON).toJSON(), newJSON);
});
describe(`${name} supports all methods of the SerializedType mixin`, () => {
_.keys(SerializedType).forEach(k => {
it(`new ${name}.prototype.${k} !== undefined`, () => {
assert.notEqual(Value.prototype[k], undefined);
});
});
});
});
});

View File

@@ -0,0 +1,40 @@
'use strict';
const assert = require('assert');
const coreTypes = require('../src/coretypes');
/* eslint-disable no-unused-vars */
const {UInt8, UInt16, UInt32, UInt64} = coreTypes;
/* eslint-enable no-unused-vars */
function compareToTests() {
function check(expr, is) {
it(expr, function() {
/* eslint-disable no-eval */
assert.equal(eval(expr), is);
/* eslint-enable no-eval */
});
}
check('UInt8.from(124).compareTo(UInt64.from(124))', 0);
check('UInt64.from(124).compareTo(UInt8.from(124))', 0);
check('UInt64.from(124).compareTo(UInt8.from(123))', 1);
check('UInt8.from(124).compareTo(UInt8.from(13))', 1);
check('UInt8.from(124).compareTo(124)', 0);
check('UInt64.from(124).compareTo(124)', 0);
check('UInt64.from(124).compareTo(123)', 1);
check('UInt8.from(124).compareTo(13)', 1);
}
function valueOfTests() {
it('The Uint classes work with |= operator', function() {
let val = UInt8.from(1);
val |= 0x2;
assert.equal(val, 3);
});
}
describe('Uint*', function() {
describe('compareToTests', compareToTests);
describe('valueOfTests', valueOfTests);
});

View File

@@ -0,0 +1,95 @@
'use strict';
const intercept = require('intercept-stdout');
const fs = require('fs');
const fsExtra = require('fs-extra');
const assert = require('assert');
const Decimal = require('decimal.js');
const {parseBytes} = require('../src/utils/bytes-utils');
function hexOnly(hex) {
return hex.replace(/[^a-fA-F0-9]/g, '');
}
function unused() {}
function captureLogsAsync() {
let log = '';
const unhook = intercept(txt => {
log += txt;
return '';
});
return function() {
unhook();
return log;
};
}
function captureLogs(func) {
const finished = captureLogsAsync();
try {
func();
} catch (e) {
const log = finished();
console.error(log);
throw e;
}
return finished();
}
function parseHexOnly(hex, to) {
return parseBytes(hexOnly(hex), to);
}
function loadFixture(relativePath) {
const fn = __dirname + '/fixtures/' + relativePath;
return require(fn);
}
function isBufferOrString(val) {
return Buffer.isBuffer(val) || (typeof val === 'string');
}
function loadFixtureText(relativePath) {
const fn = __dirname + '/fixtures/' + relativePath;
return fs.readFileSync(fn).toString('utf8');
}
function fixturePath(relativePath) {
return __dirname + '/fixtures/' + relativePath;
}
function prettyJSON(val) {
return JSON.stringify(val, null, 2);
}
function writeFixture(relativePath, data) {
const out = isBufferOrString(data) ? data : prettyJSON(data);
return fsExtra.outputFileSync(fixturePath(relativePath), out);
}
function assertEqualAmountJSON(actual, expected) {
const typeA = (typeof actual);
assert(typeA === (typeof expected));
if (typeA === 'string') {
assert.equal(actual, expected);
return;
}
assert.equal(actual.currency, expected.currency);
assert.equal(actual.issuer, expected.issuer);
assert(actual.value === expected.value ||
new Decimal(actual.value).equals(
new Decimal(expected.value)));
}
module.exports = {
hexOnly,
parseHexOnly,
loadFixture,
loadFixtureText,
assertEqualAmountJSON,
writeFixture,
unused,
captureLogs,
captureLogsAsync
};

View File

@@ -0,0 +1,28 @@
'use strict';
var babel = require('babel');
module.exports = function(wallaby) {
return {
files: [
'src/**/*.js',
'src/enums/*.json',
'test/utils.js',
'examples/*.js',
'test/fixtures/**/*.*'
],
tests: [
'test/*-test.js',
'!test/examples-test.js'
],
env: {
type: 'node'
},
testFramework: 'mocha@2.1.0',
compilers: {
'**/*.js': wallaby.compilers.babel({
babel: babel
})
},
debug: true
};
};