Merge coretypes repo

This commit is contained in:
Chris Clark
2015-10-07 10:57:21 -07:00
parent ef0d1f5679
commit 2877d1a280
55 changed files with 8142 additions and 25 deletions

View File

@@ -0,0 +1,56 @@
/* eslint-disable func-style */
'use strict';
const types = require('./types');
const {HashPrefix} = require('./hash-prefixes');
const {BinaryParser} = require('./serdes/binary-parser');
const {BinarySerializer, BytesList} = require('./serdes/binary-serializer');
const {bytesToHex, slice, parseBytes} = require('./utils/bytes-utils');
const {sha512Half, transactionID} = require('./hashes');
const makeParser = bytes => new BinaryParser(bytes);
const readJSON = parser => parser.readType(types.STObject).toJSON();
const binaryToJSON = (bytes) => readJSON(makeParser(bytes));
function serializeObject(object, opts = {}) {
const {prefix, suffix, signingFieldsOnly = false} = opts;
const bytesList = new BytesList();
if (prefix) {
bytesList.put(prefix);
}
const filter = signingFieldsOnly ? f => f.isSigningField : undefined;
types.STObject.from(object).toBytesSink(bytesList, filter);
if (suffix) {
bytesList.put(suffix);
}
return bytesList.toBytes();
}
function signingData(tx, prefix = HashPrefix.transactionSig) {
return serializeObject(tx, {prefix, signingFieldsOnly: true});
}
function multiSigningData(tx, signingAccount) {
const prefix = HashPrefix.transactionMultiSig;
const suffix = types.AccountID.from(signingAccount).toBytes();
return serializeObject(tx, {prefix, suffix, signingFieldsOnly: true});
}
module.exports = {
BinaryParser,
BinarySerializer,
BytesList,
makeParser,
serializeObject,
readJSON,
bytesToHex,
parseBytes,
multiSigningData,
signingData,
binaryToJSON,
sha512Half,
transactionID,
slice
};

View File

@@ -0,0 +1,27 @@
'use strict';
const _ = require('lodash');
const enums = require('./enums');
const {Field} = enums;
const types = require('./types');
const binary = require('./binary');
const {ShaMap} = require('./shamap');
const ledgerHashes = require('./ledger-hashes');
const hashes = require('./hashes');
const quality = require('./quality');
const signing = require('./signing');
const {HashPrefix} = require('./hash-prefixes');
module.exports = _.assign({
hashes: _.assign({}, hashes, ledgerHashes),
binary,
enums,
signing,
quality,
Field,
HashPrefix,
ShaMap
},
types
);

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,127 @@
'use strict';
const assert = require('assert');
const _ = require('lodash');
const {parseBytes, serializeUIntN} = require('./../utils/bytes-utils');
const makeClass = require('./../utils/make-class');
const enums = require('./definitions.json');
function transformWith(func, obj) {
return _.transform(obj, func);
}
function biMap(obj, valueKey) {
return _.transform(obj, (result, value, key) => {
result[key] = value;
result[value[valueKey]] = value;
});
}
const EnumType = makeClass({
EnumType(definition) {
_.assign(this, definition);
// At minimum
assert(this.bytes instanceof Uint8Array);
assert(typeof this.ordinal === 'number');
assert(typeof this.name === 'string');
},
toString() {
return this.name;
},
toJSON() {
return this.name;
},
toBytesSink(sink) {
sink.put(this.bytes);
},
statics: {
ordinalByteWidth: 1,
fromParser(parser) {
return this.from(parser.readUIntN(this.ordinalByteWidth));
},
from(val) {
const ret = val instanceof this ? val : this[val];
if (!ret) {
throw new Error(
`${val} is not a valid name or ordinal for ${this.enumName}`);
}
return ret;
},
valuesByName() {
return _.transform(this.initVals, (result, ordinal, name) => {
const bytes = serializeUIntN(ordinal, this.ordinalByteWidth);
const type = new this({name, ordinal, bytes});
result[name] = type;
});
},
init() {
const mapped = this.valuesByName();
_.assign(this, biMap(mapped, 'ordinal'));
this.values = _.values(mapped);
return this;
}
}
});
function makeEnum(name, definition) {
return makeClass({
inherits: EnumType,
statics: _.assign(definition, {enumName: name})
});
}
function makeEnums(to, definition, name) {
to[name] = makeEnum(name, definition);
}
const Enums = transformWith(makeEnums, {
Type: {
initVals: enums.TYPES
},
LedgerEntryType: {
initVals: enums.LEDGER_ENTRY_TYPES, ordinalByteWidth: 2
},
TransactionType: {
initVals: enums.TRANSACTION_TYPES, ordinalByteWidth: 2
},
TransactionResult: {
initVals: enums.TRANSACTION_RESULTS, ordinalByteWidth: 1
}
});
Enums.Field = makeClass({
inherits: EnumType,
statics: {
enumName: 'Field',
initVals: enums.FIELDS,
valuesByName() {
const fields = _.map(this.initVals, ([name, definition]) => {
const type = Enums.Type[definition.type];
const bytes = this.header(type.ordinal, definition.nth);
const ordinal = type.ordinal << 16 | definition.nth;
const extra = {ordinal, name, type, bytes};
return new this(_.assign(definition, extra));
});
return _.indexBy(fields, 'name');
},
header(type, nth) {
const name = nth;
const header = [];
const push = header.push.bind(header);
if (type < 16) {
if (name < 16) {
push(type << 4 | name);
} else {
push(type << 4, name);
}
} else if (name < 16) {
push(name, type);
} else {
push(0, type, name);
}
return parseBytes(header, Uint8Array);
}
}
});
module.exports = Enums;

View File

@@ -0,0 +1,31 @@
'use strict';
const {serializeUIntN} = require('./utils/bytes-utils');
function bytes(uint32) {
return serializeUIntN(uint32, 4);
}
const HashPrefix = {
transactionID: bytes(0x54584E00),
// transaction plus metadata
transaction: bytes(0x534E4400),
// account state
accountStateEntry: bytes(0x4D4C4E00),
// inner node in tree
innerNode: bytes(0x4D494E00),
// ledger master data for signing
ledgerHeader: bytes(0x4C575200),
// inner transaction to sign
transactionSig: bytes(0x53545800),
// inner transaction to sign
transactionMultiSig: bytes(0x534D5400),
// validation for signing
validation: bytes(0x56414C00),
// proposal for signing
proposal: bytes(0x50525000)
};
module.exports = {
HashPrefix
};

View File

@@ -0,0 +1,45 @@
'use strict';
const makeClass = require('./utils/make-class');
const {HashPrefix} = require('./hash-prefixes');
const {Hash256} = require('./types');
const {parseBytes} = require('./utils/bytes-utils');
const createHash = require('create-hash');
const Sha512Half = makeClass({
Sha512Half() {
this.hash = createHash('sha512');
},
statics: {
put(bytes) {
return new this().put(bytes);
}
},
put(bytes) {
this.hash.update(parseBytes(bytes, Buffer));
return this;
},
finish256() {
const bytes = this.hash.digest();
return bytes.slice(0, 32);
},
finish() {
return new Hash256(this.finish256());
}
});
function sha512Half(...args) {
const hash = new Sha512Half();
args.forEach(a => hash.put(a));
return parseBytes(hash.finish256(), Uint8Array);
}
function transactionID(serialized) {
return new Hash256(sha512Half(HashPrefix.transactionID, serialized));
}
module.exports = {
Sha512Half,
sha512Half,
transactionID
};

View File

@@ -1,7 +1,7 @@
'use strict';
const assert = require('assert');
const coreTypes = require('@niq/ripple-core');
const coreTypes = require('./coretypes');
const {quality,
binary: {bytesToHex,
signingData,

View File

@@ -0,0 +1,74 @@
'use strict';
const _ = require('lodash');
const BN = require('bn.js');
const assert = require('assert');
const types = require('./types');
const {STObject, Hash256} = types;
const {ShaMap} = require('./shamap');
const {HashPrefix} = require('./hash-prefixes');
const {Sha512Half} = require('./hashes');
const {BinarySerializer, serializeObject} = require('./binary');
function computeHash(itemizer, itemsJson) {
const map = new ShaMap();
itemsJson.forEach(item => map.addItem(...itemizer(item)));
return map.hash();
}
function transactionItem(json) {
assert(json.hash);
const index = Hash256.from(json.hash);
const item = {
hashPrefix() {
return HashPrefix.transaction;
},
toBytesSink(sink) {
const serializer = new BinarySerializer(sink);
serializer.writeLengthEncoded(STObject.from(json));
serializer.writeLengthEncoded(STObject.from(json.metaData));
}
};
return [index, item];
}
function entryItem(json) {
const index = Hash256.from(json.index);
const bytes = serializeObject(json);
const item = {
hashPrefix() {
return HashPrefix.accountStateEntry;
},
toBytesSink(sink) {
sink.put(bytes);
}
};
return [index, item];
}
const transactionTreeHash = _.partial(computeHash, transactionItem);
const accountStateHash = _.partial(computeHash, entryItem);
function ledgerHash(header) {
const hash = new Sha512Half();
hash.put(HashPrefix.ledgerHeader);
assert(header.parent_close_time !== undefined);
assert(header.close_flags !== undefined);
types.UInt32.from(header.ledger_index).toBytesSink(hash);
types.UInt64.from(new BN(header.total_coins)).toBytesSink(hash);
types.Hash256.from(header.parent_hash).toBytesSink(hash);
types.Hash256.from(header.transaction_hash).toBytesSink(hash);
types.Hash256.from(header.account_hash).toBytesSink(hash);
types.UInt32.from(header.parent_close_time).toBytesSink(hash);
types.UInt32.from(header.close_time).toBytesSink(hash);
types.UInt8.from(header.close_time_resolution).toBytesSink(hash);
types.UInt8.from(header.close_flags).toBytesSink(hash);
return hash.finish();
}
module.exports = {
accountStateHash,
transactionTreeHash,
ledgerHash
};

View File

@@ -0,0 +1,22 @@
'use strict';
const Decimal = require('decimal.js');
const {bytesToHex, slice, parseBytes} = require('./utils/bytes-utils');
const {UInt64} = require('./types');
const BN = require('bn.js');
module.exports = {
encode(arg) {
const quality = arg instanceof Decimal ? arg : new Decimal(arg);
const exponent = quality.e - 15;
const qualityString = quality.times('1e' + -exponent).abs().toString();
const bytes = new UInt64(new BN(qualityString)).toBytes();
bytes[0] = exponent + 100;
return bytes;
},
decode(arg) {
const bytes = slice(parseBytes(arg), -8);
const exponent = bytes[0] - 100;
const mantissa = new Decimal(bytesToHex(slice(bytes, 1)), 16);
return mantissa.times('1e' + exponent);
}
};

View File

@@ -0,0 +1,101 @@
'use strict';
const assert = require('assert');
const makeClass = require('../utils/make-class');
const {Field} = require('../enums');
const {slice, parseBytes} = require('../utils/bytes-utils');
const BinaryParser = makeClass({
BinaryParser(buf) {
this._buf = parseBytes(buf, Uint8Array);
this._length = this._buf.length;
this._cursor = 0;
},
skip(n) {
this._cursor += n;
},
read(n, to = Uint8Array) {
const start = this._cursor;
const end = this._cursor + n;
assert(end <= this._buf.length);
this._cursor = end;
return slice(this._buf, start, end, to);
},
readUIntN(n) {
return this.read(n, Array).reduce((a, b) => a << 8 | b) >>> 0;
},
readUInt8() {
return this._buf[this._cursor++];
},
readUInt16() {
return this.readUIntN(2);
},
readUInt32() {
return this.readUIntN(4);
},
pos() {
return this._cursor;
},
size() {
return this._buf.length;
},
end(customEnd) {
const cursor = this.pos();
return (cursor >= this._length) || (customEnd !== null &&
cursor >= customEnd);
},
readVL() {
return this.read(this.readVLLength());
},
readVLLength() {
const b1 = this.readUInt8();
if (b1 <= 192) {
return b1;
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error('Invalid varint length indicator');
},
readFieldOrdinal() {
const tagByte = this.readUInt8();
const type = (tagByte & 0xF0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0F || this.readUInt8();
return type << 16 | nth;
},
readField() {
return Field.from(this.readFieldOrdinal());
},
readType(type) {
return type.fromParser(this);
},
typeForField(field) {
return field.associatedType;
},
readFieldValue(field) {
const kls = this.typeForField(field);
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVLEncoded ? this.readVLLength() : null;
const value = kls.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `);
}
return value;
},
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
}
});
module.exports = {
BinaryParser
};

View File

@@ -0,0 +1,109 @@
'use strict';
const assert = require('assert');
const {parseBytes, bytesToHex} = require('../utils/bytes-utils');
const makeClass = require('../utils/make-class');
const {Type, Field} = require('../enums');
const BytesSink = {
put(/* bytesSequence */) {
// any hex string or any object with a `length` and where 0 <= [ix] <= 255
}
};
const BytesList = makeClass({
implementing: BytesSink,
BytesList() {
this.arrays = [];
this.length = 0;
},
put(bytesArg) {
const bytes = parseBytes(bytesArg, Uint8Array);
this.length += bytes.length;
this.arrays.push(bytes);
return this;
},
toBytesSink(sink) {
this.arrays.forEach(arr => {
sink.put(arr);
});
},
toBytes() {
const concatenated = new Uint8Array(this.length);
let pointer = 0;
this.arrays.forEach(arr => {
concatenated.set(arr, pointer);
pointer += arr.length;
});
return concatenated;
},
toHex() {
return bytesToHex(this.toBytes());
}
});
const BinarySerializer = makeClass({
BinarySerializer(sink) {
this.sink = sink;
},
write(value) {
value.toBytesSink(this.sink);
},
put(bytes) {
this.sink.put(bytes);
},
writeType(type, value) {
this.write(type.from(value));
},
writeBytesList(bl) {
bl.toBytesSink(this.sink);
},
encodeVL(len) {
let length = len;
const lenBytes = new Uint8Array(4);
if (length <= 192) {
lenBytes[0] = length;
return lenBytes.subarray(0, 1);
} else if (length <= 12480) {
length -= 193;
lenBytes[0] = 193 + (length >>> 8);
lenBytes[1] = length & 0xff;
return lenBytes.subarray(0, 2);
} else if (length <= 918744) {
length -= 12481;
lenBytes[0] = 241 + (length >>> 16);
lenBytes[1] = (length >> 8) & 0xff;
lenBytes[2] = length & 0xff;
return lenBytes.subarray(0, 3);
}
throw new Error('Overflow error');
},
writeFieldAndValue(field, _value) {
const sink = this.sink;
const value = field.associatedType.from(_value);
assert(value.toBytesSink, field);
sink.put(field.bytes);
if (field.isVLEncoded) {
this.writeLengthEncoded(value);
} else {
value.toBytesSink(sink);
if (field.type === Type.STObject) {
sink.put(Field.ObjectEndMarker.bytes);
} else if (field.type === Type.STArray) {
sink.put(Field.ArrayEndMarker.bytes);
}
}
},
writeLengthEncoded(value) {
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVL(bytes.length));
this.writeBytesList(bytes);
}
});
module.exports = {
BytesList,
BinarySerializer
};

View File

@@ -0,0 +1,110 @@
'use strict';
const assert = require('assert');
const makeClass = require('./utils/make-class');
const {Hash256} = require('./types');
const {HashPrefix} = require('./hash-prefixes');
const {Sha512Half: Hasher} = require('./hashes');
const ShaMapNode = makeClass({
virtuals: {
hashPrefix() {},
isLeaf() {},
isInner() {}
},
cached: {
hash() {
const hasher = Hasher.put(this.hashPrefix());
this.toBytesSink(hasher);
return hasher.finish();
}
}
});
const ShaMapLeaf = makeClass({
inherits: ShaMapNode,
ShaMapLeaf(index, item) {
ShaMapNode.call(this);
this.index = index;
this.item = item;
},
isLeaf() {
return true;
},
isInner() {
return false;
},
hashPrefix() {
return this.item.hashPrefix();
},
toBytesSink(sink) {
this.item.toBytesSink(sink);
this.index.toBytesSink(sink);
}
});
const $uper = ShaMapNode.prototype;
const ShaMapInner = makeClass({
inherits: ShaMapNode,
ShaMapInner(depth = 0) {
ShaMapNode.call(this);
this.depth = depth;
this.slotBits = 0;
this.branches = Array(16);
},
isInner() {
return true;
},
isLeaf() {
return false;
},
hashPrefix() {
return HashPrefix.innerNode;
},
setBranch(slot, branch) {
this.slotBits = this.slotBits | (1 << slot);
this.branches[slot] = branch;
},
empty() {
return this.slotBits === 0;
},
hash() {
if (this.empty()) {
return Hash256.ZERO_256;
}
return $uper.hash.call(this);
},
toBytesSink(sink) {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch ? branch.hash() : Hash256.ZERO_256;
hash.toBytesSink(sink);
}
},
addItem(index, item, leaf) {
assert(index instanceof Hash256);
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
if (!existing) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
} else if (existing.isLeaf()) {
const newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, null, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
} else if (existing.isInner()) {
existing.addItem(index, item, leaf);
} else {
assert(false);
}
}
});
const ShaMap = makeClass({
inherits: ShaMapInner
});
module.exports = {
ShaMap
};

View File

@@ -0,0 +1,67 @@
'use strict';
/* eslint-disable func-style */
const _ = require('lodash');
const {AccountID} = require('./types');
const binary = require('./binary');
const {
serializeObject,
bytesToHex,
multiSigningData,
transactionID,
signingData
} = binary;
const FULL_CANONICAL_SIGNATURE = 0x80000000;
const toHex = v => bytesToHex(v);
const getSigner = (o) => AccountID.from(o.Signer.Account);
const signerComparator = (a, b) => getSigner(a).compareTo(getSigner(b));
function setCanonicalSignatureFlag(tx_json) {
tx_json.Flags |= FULL_CANONICAL_SIGNATURE;
tx_json.Flags >>>= 0;
}
function serializedBundle(tx_json) {
const serialized = serializeObject(tx_json);
const hash = transactionID(serialized).toHex();
const tx_blob = toHex(serialized);
return {tx_json, tx_blob, hash};
}
function signFor(tx_json_, keyPair, signingAccount = null) {
const tx_json = _.clone(tx_json_);
tx_json.SigningPubKey = '';
setCanonicalSignatureFlag(tx_json);
const signerID = signingAccount || keyPair.id();
const signature = keyPair.sign(multiSigningData(tx_json, signerID));
const signer = {
Signer: {
SigningPubKey: toHex(keyPair.publicBytes()),
TxnSignature: toHex(signature),
Account: signerID
}
};
const signers = tx_json.Signers = tx_json.Signers || [];
signers.push(signer);
signers.sort(signerComparator);
return serializedBundle(tx_json);
}
function sign(tx_json_, keyPair) {
const tx_json = _.clone(tx_json_);
setCanonicalSignatureFlag(tx_json);
tx_json.SigningPubKey = toHex(keyPair.publicBytes());
tx_json.TxnSignature = toHex(keyPair.sign(signingData(tx_json)));
return serializedBundle(tx_json);
}
module.exports = {
signFor,
sign
};

View File

@@ -0,0 +1,44 @@
'use strict';
const makeClass = require('../utils/make-class');
const {decodeAccountID, encodeAccountID} = require('ripple-address-codec');
const {Hash160} = require('./hash-160');
const AccountID = makeClass({
AccountID(bytes) {
Hash160.call(this, bytes);
},
inherits: Hash160,
statics: {
from(value) {
return value instanceof this ? value :
/^r/.test(value) ? this.fromBase58(value) :
new this(value);
},
cache: {},
fromCache(base58) {
let cached = this.cache[base58];
if (!cached) {
cached = this.cache[base58] = this.fromBase58(base58);
}
return cached;
},
fromBase58(value) {
const acc = new this(decodeAccountID(value));
acc._toBase58 = value;
return acc;
}
},
toJSON() {
return this.toBase58();
},
cached: {
toBase58() {
return encodeAccountID(this._bytes);
}
}
});
module.exports = {
AccountID
};

View File

@@ -0,0 +1,137 @@
'use strict';
const _ = require('lodash');
const assert = require('assert');
const BN = require('bn.js');
const Decimal = require('decimal.js');
const makeClass = require('../utils/make-class');
const {SerializedType} = require('./serialized-type');
const {bytesToHex} = require('../utils/bytes-utils');
const {Currency} = require('./currency');
const {AccountID} = require('./account-id');
const {UInt64} = require('./uint-64');
Decimal.config({
toExpPos: 32,
toExpNeg: -32
});
function isDefined(val) {
return !_.isUndefined(val);
}
const parsers = {
string(str) {
return [new Decimal(str).dividedBy('1e6'), Currency.XRP];
},
object(object) {
assert(isDefined(object.currency), 'currency must be defined');
assert(isDefined(object.issuer), 'issuer must be defined');
return [new Decimal(object.value),
Currency.from(object.currency),
AccountID.from(object.issuer)];
}
};
const Amount = makeClass({
Amount(value, currency, issuer) {
this.value = value || new Decimal('0');
this.currency = currency || Currency.XRP;
this.issuer = issuer || null;
},
mixins: SerializedType,
statics: {
from(value) {
if (value instanceof this) {
return value;
}
const parser = parsers[typeof value];
if (parser) {
return new this(...parser(value));
}
throw new Error(`unsupported value: ${value}`);
},
fromParser(parser) {
const mantissa = parser.read(8);
const b1 = mantissa[0];
const b2 = mantissa[1];
const isIOU = b1 & 0x80;
const isPositive = b1 & 0x40;
const sign = isPositive ? '+' : '-';
if (isIOU) {
mantissa[0] = 0;
const currency = parser.readType(Currency);
const issuer = parser.readType(AccountID);
const exponent = ((b1 & 0x3F) << 2) + ((b2 & 0xff) >> 6) - 97;
mantissa[1] &= 0x3F;
// decimal.js won't accept e notation with hex
const value = new Decimal(sign + bytesToHex(mantissa), 16)
.times('1e' + exponent);
return new this(value, currency, issuer);
}
mantissa[0] &= 0x3F;
const drops = new Decimal(sign + bytesToHex(mantissa), 16);
const xrpValue = drops.dividedBy('1e6');
return new this(xrpValue, Currency.XRP);
}
},
isNative() {
return this.currency.isNative();
},
mantissa() {
return new UInt64(
new BN(this.value.times('1e' + -this.exponent()).abs().toString()));
},
isZero() {
return this.value.isZero();
},
exponent() {
return this.isNative() ? -6 : this.value.e - 15;
},
valueString() {
return (this.isNative() ? this.value.times('1e6') : this.value)
.toString();
},
toBytesSink(sink) {
const isNative = this.isNative();
const notNegative = !this.value.isNegative();
const mantissa = this.mantissa().toBytes();
if (isNative) {
mantissa[0] |= notNegative ? 0x40 : 0;
sink.put(mantissa);
} else {
mantissa[0] |= 0x80;
if (!this.isZero()) {
if (notNegative) {
mantissa[0] |= 0x40;
}
const exponent = this.value.e - 15;
const exponentByte = 97 + exponent;
mantissa[0] |= (exponentByte >>> 2);
mantissa[1] |= (exponentByte & 0x03) << 6;
}
sink.put(mantissa);
this.currency.toBytesSink(sink);
this.issuer.toBytesSink(sink);
}
},
toJSON() {
const valueString = this.valueString();
if (this.isNative()) {
return valueString;
}
return {
value: valueString,
currency: this.currency.toJSON(),
issuer: this.issuer.toJSON()
};
}
});
module.exports = {
Amount
};

View File

@@ -0,0 +1,31 @@
'use strict';
const makeClass = require('../utils/make-class');
const {parseBytes} = require('../utils/bytes-utils');
const {SerializedType} = require('./serialized-type');
const Blob = makeClass({
mixins: SerializedType,
Blob(bytes) {
if (bytes) {
this._bytes = parseBytes(bytes, Uint8Array);
} else {
this._bytes = new Uint8Array(0);
}
},
statics: {
fromParser(parser, hint) {
return new this(parser.read(hint));
},
from(value) {
if (value instanceof this) {
return value;
}
return new this(value);
}
}
});
module.exports = {
Blob
};

View File

@@ -0,0 +1,94 @@
'use strict';
const _ = require('lodash');
const makeClass = require('../utils/make-class');
const {slice} = require('../utils/bytes-utils');
const {Hash160} = require('./hash-160');
const ISO_REGEX = /^[A-Z0-9]{3}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/;
function isoToBytes(iso) {
const bytes = new Uint8Array(20);
if (iso !== 'XRP') {
const isoBytes = iso.split('').map(c => c.charCodeAt(0));
bytes.set(isoBytes, 12);
}
return bytes;
}
function isISOCode(val) {
return val.length === 3; // ISO_REGEX.test(val);
}
function isHex(val) {
return HEX_REGEX.test(val);
}
function isStringRepr(val) {
return _.isString(val) && (isISOCode(val) || isHex(val));
}
function isBytesArray(val) {
return val.length === 20;
}
function isValidRepr(val) {
return isStringRepr(val) || isBytesArray(val);
}
function bytesFromRepr(val) {
if (isValidRepr(val)) {
// We assume at this point that we have an object with a length, either 3,
// 20 or 40.
return val.length === 3 ? isoToBytes(val) : val;
}
throw new Error(`Unsupported Currency repr: ${val}`);
}
const $uper = Hash160.prototype;
const Currency = makeClass({
inherits: Hash160,
getters: ['isNative', 'iso'],
statics: {
init() {
this.XRP = new this(new Uint8Array(20));
},
from(val) {
return val instanceof this ? val : new this(bytesFromRepr(val));
}
},
Currency(bytes) {
Hash160.call(this, bytes);
this.classify();
},
classify() {
// We only have a non null iso() property available if the currency can be
// losslessly represented by the 3 letter iso code. If none is available a
// hex encoding of the full 20 bytes is the canonical representation.
let onlyISO = true;
const bytes = this._bytes;
const code = slice(this._bytes, 12, 15, Array);
const iso = code.map(c => String.fromCharCode(c)).join('');
for (let i = bytes.length - 1; i >= 0; i--) {
if (bytes[i] !== 0 && !(i === 12 || i === 13 || i === 14)) {
onlyISO = false;
break;
}
}
const lossLessISO = onlyISO && iso !== 'XRP' && ISO_REGEX.test(iso);
this._isNative = onlyISO && _.isEqual(code, [0, 0, 0]);
this._iso = this._isNative ? 'XRP' : lossLessISO ? iso : null;
},
toJSON() {
if (this.iso()) {
return this.iso();
}
return $uper.toJSON.call(this);
}
});
module.exports = {
Currency
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash128 = makeClass({
inherits: Hash,
statics: {width: 16}
});
module.exports = {
Hash128
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash160 = makeClass({
inherits: Hash,
statics: {width: 20}
});
module.exports = {
Hash160
};

View File

@@ -0,0 +1,18 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash} = require('./hash');
const Hash256 = makeClass({
inherits: Hash,
statics: {
width: 32,
init() {
this.ZERO_256 = new this(new Uint8Array(this.width));
}
}
});
module.exports = {
Hash256
};

View File

@@ -0,0 +1,48 @@
'use strict';
const assert = require('assert');
const makeClass = require('../utils/make-class');
const {Comparable, SerializedType} = require('./serialized-type');
const {compareBytes, parseBytes} = require('../utils/bytes-utils');
const Hash = makeClass({
Hash(bytes) {
const width = this.constructor.width;
this._bytes = bytes ? parseBytes(bytes, Uint8Array) :
new Uint8Array(width);
assert.equal(this._bytes.length, width);
},
mixins: [Comparable, SerializedType],
statics: {
width: NaN,
from(value) {
if (value instanceof this) {
return value;
}
return new this(parseBytes(value));
},
fromParser(parser, hint) {
return new this(parser.read(hint || this.width));
}
},
compareTo(other) {
return compareBytes(this._bytes, this.constructor.from(other)._bytes);
},
toString() {
return this.toHex();
},
nibblet(depth) {
const byte_ix = depth > 0 ? (depth / 2) | 0 : 0;
let b = this._bytes[byte_ix];
if (depth % 2 === 0) {
b = (b & 0xF0) >>> 4;
} else {
b = b & 0x0F;
}
return b;
}
});
module.exports = {
Hash
};

View File

@@ -0,0 +1,47 @@
'use strict';
const enums = require('../enums');
const {Field} = enums;
const {AccountID} = require('./account-id');
const {Amount} = require('./amount');
const {Blob} = require('./blob');
const {Currency} = require('./currency');
const {Hash128} = require('./hash-128');
const {Hash160} = require('./hash-160');
const {Hash256} = require('./hash-256');
const {PathSet} = require('./path-set');
const {STArray} = require('./st-array');
const {STObject} = require('./st-object');
const {UInt16} = require('./uint-16');
const {UInt32} = require('./uint-32');
const {UInt64} = require('./uint-64');
const {UInt8} = require('./uint-8');
const {Vector256} = require('./vector-256');
const coreTypes = {
AccountID,
Amount,
Blob,
Currency,
Hash128,
Hash160,
Hash256,
PathSet,
STArray,
STObject,
UInt8,
UInt16,
UInt32,
UInt64,
Vector256
};
Field.values.forEach(field => {
field.associatedType = coreTypes[field.type];
});
Field.TransactionType.associatedType = enums.TransactionType;
Field.TransactionResult.associatedType = enums.TransactionResult;
Field.LedgerEntryType.associatedType = enums.LedgerEntryType;
module.exports = coreTypes;

View File

@@ -0,0 +1,114 @@
'use strict';
/* eslint-disable no-unused-expressions */
const makeClass = require('../utils/make-class');
const {SerializedType, ensureArrayLikeIs} = require('./serialized-type');
const {Currency} = require('./currency');
const {AccountID} = require('./account-id');
const PATHSET_END_BYTE = 0x00;
const PATH_SEPARATOR_BYTE = 0xFF;
const TYPE_ACCOUNT = 0x01;
const TYPE_CURRENCY = 0x10;
const TYPE_ISSUER = 0x20;
const Hop = makeClass({
statics: {
from(value) {
if (value instanceof this) {
return value;
}
const hop = new Hop();
value.issuer && (hop.issuer = AccountID.from(value.issuer));
value.account && (hop.account = AccountID.from(value.account));
value.currency && (hop.currency = Currency.from(value.currency));
return hop;
},
parse(parser, type) {
const hop = new Hop();
(type & TYPE_ACCOUNT) && (hop.account = AccountID.fromParser(parser));
(type & TYPE_CURRENCY) && (hop.currency = Currency.fromParser(parser));
(type & TYPE_ISSUER) && (hop.issuer = AccountID.fromParser(parser));
return hop;
}
},
toJSON() {
const type = this.type();
const ret = {type};
(type & TYPE_ACCOUNT) && (ret.account = this.account.toJSON());
(type & TYPE_ISSUER) && (ret.issuer = this.issuer.toJSON());
(type & TYPE_CURRENCY) && (ret.currency = this.currency.toJSON());
return ret;
},
type() {
let type = 0;
this.issuer && (type += TYPE_ISSUER);
this.account && (type += TYPE_ACCOUNT);
this.currency && (type += TYPE_CURRENCY);
return type;
}
});
const Path = makeClass({
inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(Path, value).withChildren(Hop);
}
},
toJSON() {
return this.map(k => k.toJSON());
}
});
const PathSet = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(PathSet, value).withChildren(Path);
},
fromParser(parser) {
const pathSet = new this();
let path;
while (!parser.end()) {
const type = parser.readUInt8();
if (type === PATHSET_END_BYTE) {
break;
}
if (type === PATH_SEPARATOR_BYTE) {
path = null;
continue;
}
if (!path) {
path = new Path();
pathSet.push(path);
}
path.push(Hop.parse(parser, type));
}
return pathSet;
}
},
toJSON() {
return this.map(k => k.toJSON());
},
toBytesSink(sink) {
let n = 0;
this.forEach((path) => {
if (n++ !== 0) {
sink.put([PATH_SEPARATOR_BYTE]);
}
path.forEach((hop) => {
sink.put([hop.type()]);
hop.account && (hop.account.toBytesSink(sink));
hop.currency && (hop.currency.toBytesSink(sink));
hop.issuer && (hop.issuer.toBytesSink(sink));
});
});
sink.put([PATHSET_END_BYTE]);
}
});
module.exports = {
PathSet
};

View File

@@ -0,0 +1,66 @@
'use strict';
const {bytesToHex, slice} = require('../utils/bytes-utils');
const {BytesList} = require('../serdes/binary-serializer');
const Comparable = {
lt(other) {
return this.compareTo(other) < 0;
},
eq(other) {
return this.compareTo(other) === 0;
},
gt(other) {
return this.compareTo(other) > 0;
},
gte(other) {
return this.compareTo(other) > -1;
},
lte(other) {
return this.compareTo(other) < 1;
}
};
const SerializedType = {
toBytesSink(sink) {
sink.put(this._bytes);
},
toHex() {
return bytesToHex(this.toBytes());
},
toBytes() {
if (this._bytes) {
return slice(this._bytes);
}
const bl = new BytesList();
this.toBytesSink(bl);
return bl.toBytes();
},
toJSON() {
return this.toHex();
},
toString() {
return this.toHex();
}
};
function ensureArrayLikeIs(Type, arrayLike) {
return {
withChildren(Child) {
if (arrayLike instanceof Type) {
return arrayLike;
}
const obj = new Type();
for (let i = 0; i < arrayLike.length; i++) {
obj.push(Child.from(arrayLike[i]));
}
return obj;
}
};
}
module.exports = {
ensureArrayLikeIs,
SerializedType,
Comparable
};

View File

@@ -0,0 +1,40 @@
'use strict';
const makeClass = require('../utils/make-class');
const {ensureArrayLikeIs, SerializedType} = require('./serialized-type');
const {Field} = require('../enums');
const {STObject} = require('./st-object');
const {ArrayEndMarker} = Field;
const STArray = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser(parser) {
const array = new STArray();
while (!parser.end()) {
const field = parser.readField();
if (field === ArrayEndMarker) {
break;
}
const outer = new STObject();
outer[field] = parser.readFieldValue(field);
array.push(outer);
}
return array;
},
from(value) {
return ensureArrayLikeIs(STArray, value).withChildren(STObject);
}
},
toJSON() {
return this.map((v) => v.toJSON());
},
toBytesSink(sink) {
this.forEach(so => so.toBytesSink(sink));
}
});
module.exports = {
STArray
};

View File

@@ -0,0 +1,67 @@
'use strict';
// const assert = require('assert');
const _ = require('lodash');
const makeClass = require('../utils/make-class');
const {Field} = require('../enums');
const {BinarySerializer} = require('../serdes/binary-serializer');
const {ObjectEndMarker} = Field;
const {SerializedType} = require('./serialized-type');
const STObject = makeClass({
mixins: SerializedType,
statics: {
fromParser(parser, hint) {
const end = typeof hint === 'number' ? parser.pos() + hint : null;
const so = new this();
while (!parser.end(end)) {
const field = parser.readField();
if (field === ObjectEndMarker) {
break;
}
so[field] = parser.readFieldValue(field);
}
return so;
},
from(value) {
if (value instanceof this) {
return value;
}
if (typeof value === 'object') {
return _.transform(value, (so, val, key) => {
const field = Field[key];
if (field) {
so[field] = field.associatedType.from(val);
} else {
so[key] = val;
}
}, new this());
}
throw new Error(`${value} is unsupported`);
}
},
fieldKeys() {
return Object.keys(this).map((k) => Field[k]).filter(Boolean);
},
toJSON() {
return _.transform(this, (result, value, key) => {
result[key] = value && value.toJSON ? value.toJSON() : value;
});
},
toBytesSink(sink, filter = () => true) {
const serializer = new BinarySerializer(sink);
const fields = this.fieldKeys();
const sorted = _.sortBy(fields, 'ordinal');
sorted.filter(filter).forEach((field) => {
const value = this[field];
if (!field.isSerialized) {
return;
}
serializer.writeFieldAndValue(field, value);
});
}
});
module.exports = {
STObject
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt16 = makeClass({
inherits: UInt,
statics: {width: 2}
});
module.exports = {
UInt16
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt32 = makeClass({
inherits: UInt,
statics: {width: 4}
});
module.exports = {
UInt32
};

View File

@@ -0,0 +1,52 @@
'use strict';
const assert = require('assert');
const BN = require('bn.js');
const makeClass = require('../utils/make-class');
const {bytesToHex, parseBytes, serializeUIntN}
= require('../utils/bytes-utils');
const {UInt} = require('./uint');
const HEX_REGEX = /^[A-F0-9]{16}$/;
const UInt64 = makeClass({
inherits: UInt,
statics: {width: 8},
UInt64(arg = 0) {
const argType = typeof arg;
if (argType === 'number') {
assert(arg >= 0);
this._bytes = new Uint8Array(8);
this._bytes.set(serializeUIntN(arg, 4), 4);
} else if (arg instanceof BN) {
this._bytes = parseBytes(arg.toArray('be', 8), Uint8Array);
this._toBN = arg;
} else {
if (argType === 'string') {
if (!HEX_REGEX.test(arg)) {
throw new Error(`${arg} is not a valid UInt64 hex string`);
}
}
this._bytes = parseBytes(arg, Uint8Array);
}
assert(this._bytes.length === 8);
},
toJSON() {
return bytesToHex(this._bytes);
},
valueOf() {
return this.toBN();
},
cached: {
toBN() {
return new BN(this._bytes);
}
},
toBytes() {
return this._bytes;
}
});
module.exports = {
UInt64
};

View File

@@ -0,0 +1,13 @@
'use strict';
const makeClass = require('../utils/make-class');
const {UInt} = require('./uint');
const UInt8 = makeClass({
inherits: UInt,
statics: {width: 1}
});
module.exports = {
UInt8
};

View File

@@ -0,0 +1,63 @@
'use strict';
const assert = require('assert');
const BN = require('bn.js');
const makeClass = require('../utils/make-class');
const {Comparable, SerializedType} = require('./serialized-type');
const {serializeUIntN} = require('../utils/bytes-utils');
const MAX_VALUES = [0, 255, 65535, 16777215, 4294967295];
function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1;
}
const UInt = makeClass({
mixins: [Comparable, SerializedType],
UInt(val = 0) {
const max = MAX_VALUES[this.constructor.width];
if (val < 0 || !(val <= max)) {
throw new Error(`${val} not in range 0 <= $val <= ${max}`);
}
this.val = val;
},
statics: {
width: 0,
fromParser(parser) {
const val = this.width > 4 ? parser.read(this.width) :
parser.readUIntN(this.width);
return new this(val);
},
from(val) {
return val instanceof this ? val : new this(val);
}
},
toJSON() {
return this.val;
},
valueOf() {
return this.val;
},
compareTo(other) {
const thisValue = this.valueOf();
const otherValue = other.valueOf();
if (thisValue instanceof BN) {
return otherValue instanceof BN ?
thisValue.cmp(otherValue) :
thisValue.cmpn(otherValue);
} else if (otherValue instanceof BN) {
return -other.compareTo(this);
}
assert(typeof otherValue === 'number');
return signum(thisValue, otherValue);
},
toBytesSink(sink) {
sink.put(this.toBytes());
},
toBytes() {
return serializeUIntN(this.val, this.constructor.width);
}
});
module.exports = {
UInt
};

View File

@@ -0,0 +1,34 @@
'use strict';
const makeClass = require('../utils/make-class');
const {Hash256} = require('./hash-256');
const {ensureArrayLikeIs, SerializedType} = require('./serialized-type');
const Vector256 = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
fromParser(parser, hint) {
const vector256 = new this();
const bytes = hint !== null ? hint : parser.size() - parser.pos();
const hashes = bytes / 32;
for (let i = 0; i < hashes; i++) {
vector256.push(Hash256.fromParser(parser));
}
return vector256;
},
from(value) {
return ensureArrayLikeIs(Vector256, value).withChildren(Hash256);
}
},
toBytesSink(sink) {
this.forEach(h => h.toBytesSink(sink));
},
toJSON() {
return this.map((hash) => hash.toJSON());
}
});
module.exports = {
Vector256
};

View File

@@ -0,0 +1,115 @@
'use strict';
const assert = require('assert');
function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1;
}
const hexLookup = (function() {
const res = {};
const reverse = res.reverse = new Array(256);
for (let i = 0; i < 16; i++) {
const char = i.toString(16).toUpperCase();
res[char] = i;
for (let j = 0; j < 16; j++) {
const char2 = j.toString(16).toUpperCase();
const byte = (i << 4) + j;
const byteHex = char + char2;
res[byteHex] = byte;
reverse[byte] = byteHex;
}
}
return res;
}());
const reverseHexLookup = hexLookup.reverse;
function bytesToHex(sequence) {
const buf = Array(sequence.length);
for (let i = sequence.length - 1; i >= 0; i--) {
buf[i] = reverseHexLookup[sequence[i]];
}
return buf.join('');
}
function byteForHex(hex) {
const byte = hexLookup[hex];
if (byte === undefined) {
throw new Error(`\`${hex}\` is not a valid hex representation of a byte`);
}
return byte;
}
function parseBytes(val, Output = Array) {
if (!val || val.length === undefined) {
throw new Error(`${val} is not a sequence`);
}
if (typeof val === 'string') {
const start = val.length % 2;
const res = new Output((val.length + start) / 2);
for (let i = val.length, to = res.length - 1; to >= start; i -= 2, to--) {
res[to] = byteForHex(val.slice(i - 2, i));
}
if (start === 1) {
res[0] = byteForHex(val[0]);
}
return res;
} else if (val instanceof Output) {
return val;
} else if (Output === Uint8Array) {
return new Output(val);
}
const res = new Output(val.length);
for (let i = val.length - 1; i >= 0; i--) {
res[i] = val[i];
}
return res;
}
function serializeUIntN(val, width) {
const newBytes = new Uint8Array(width);
const lastIx = width - 1;
for (let i = 0; i < width; i++) {
newBytes[lastIx - i] = (val >>> (i * 8) & 0xff);
}
return newBytes;
}
function compareBytes(a, b) {
assert(a.length === b.length);
for (let i = 0; i < a.length; i++) {
const cmp = signum(a[i], b[i]);
if (cmp !== 0) {
return cmp;
}
}
return 0;
}
function slice(val, startIx = 0, endIx = val.length, Output = val.constructor) {
/* eslint-disable no-param-reassign*/
if (startIx < 0) {
startIx += val.length;
}
if (endIx < 0) {
endIx += val.length;
}
/* eslint-enable no-param-reassign*/
const len = endIx - startIx;
const res = new Output(len);
for (let i = endIx - 1; i >= startIx; i--) {
res[i - startIx] = val[i];
}
return res;
}
module.exports = {
parseBytes,
bytesToHex,
slice,
compareBytes,
serializeUIntN
};

View File

@@ -0,0 +1,85 @@
'use strict';
const _ = require('lodash');
const inherits = require('inherits');
function forEach(obj, func) {
Object.keys(obj || {}).forEach(k => {
func(obj[k], k);
});
}
function ensureArray(val) {
return Array.isArray(val) ? val : [val];
}
module.exports = function makeClass(klass_, definition_) {
const definition = definition_ || klass_;
let klass = typeof klass_ === 'function' ? klass_ : null;
if (klass === null) {
for (const k in definition) {
if (k[0].match(/[A-Z]/)) {
klass = definition[k];
break;
}
}
}
const parent = definition.inherits;
if (parent) {
if (klass === null) {
klass = function() {
parent.apply(this, arguments);
};
}
inherits(klass, parent);
_.defaults(klass, parent);
}
if (klass === null) {
klass = function() {};
}
const proto = klass.prototype;
function addFunc(original, wrapper) {
proto[original.name] = wrapper || original;
}
(definition.getters || []).forEach(k => {
const key = '_' + k;
proto[k] = function() {
return this[key];
};
});
forEach(definition.virtuals, f => {
addFunc(f, function() {
throw new Error('unimplemented');
});
});
forEach(definition.methods, addFunc);
forEach(definition, f => {
if (_.isFunction(f) && f !== klass) {
addFunc(f);
}
});
_.assign(klass, definition.statics);
if (typeof klass.init === 'function') {
klass.init();
}
forEach(definition.cached, f => {
const key = '_' + f.name;
addFunc(f, function() {
let value = this[key];
if (value === undefined) {
value = this[key] = f.call(this);
}
return value;
});
});
if (definition.mixins) {
const mixins = {};
// Right-most in the list win
ensureArray(definition.mixins).reverse().forEach(o => {
_.defaults(mixins, o);
});
_.defaults(proto, mixins);
}
return klass;
};