Added Linting with ESLint (#72)

* added linting, but currently the linting will fail on all files that have not been refactored.
This commit is contained in:
Nathan Nichols
2020-06-26 07:14:31 -07:00
parent a930b9413c
commit ede5b254e3
39 changed files with 2410 additions and 1667 deletions

View File

@@ -0,0 +1,4 @@
node_modules
dist
.github
coverage

View File

@@ -1,196 +0,0 @@
# ESLint documentation can be found at http://eslint.org/docs/
env:
browser: true
node: true
amd: false
mocha: true
jasmine: false
es6: true
rules:
no-alert: 2
no-array-constructor: 2
no-arrow-condition: 0
no-bitwise: 0
no-caller: 2
no-case-declarations: 2
no-catch-shadow: 2
no-class-assign: 2
no-cond-assign: [2, 'always']
no-console: 0
no-const-assign: 2
no-constant-condition: 2
no-continue: 0
no-control-regex: 2
no-debugger: 2
no-delete-var: 2
no-div-regex: 0
no-dupe-class-members: 2
no-dupe-keys: 2
no-dupe-args: 2
no-duplicate-case: 2
no-else-return: 0
no-empty: 2
no-empty-character-class: 2
no-empty-pattern: 2
no-eq-null: 2
no-eval: 2
no-ex-assign: 2
no-extend-native: 2
no-extra-bind: 2
no-extra-boolean-cast: 2
no-extra-parens: [2, 'functions']
no-extra-semi: 2
no-fallthrough: 2
no-floating-decimal: 0
no-func-assign: 2
no-implicit-coercion: 2
no-implied-eval: 2
no-inline-comments: 0
no-inner-declarations: [2, 'functions']
no-invalid-regexp: 2
no-invalid-this: 0 # this rule would cause problems with mocha test cases
no-irregular-whitespace: 2
no-iterator: 2
no-label-var: 2
no-labels: 2
no-lone-blocks: 2
no-lonely-if: 2
no-loop-func: 2
no-mixed-requires: [0, false]
no-mixed-spaces-and-tabs: [2, false]
linebreak-style: [2, 'unix']
no-multi-spaces: 2
no-multi-str: 2
no-multiple-empty-lines: [2, {max: 2}]
no-native-reassign: 2
no-negated-condition: 0
no-negated-in-lhs: 2
no-nested-ternary: 0
no-new: 2
no-new-func: 2
no-new-object: 2
no-new-require: 0
no-new-wrappers: 2
no-obj-calls: 2
no-octal: 2
no-octal-escape: 2
no-param-reassign: 2
no-path-concat: 0
no-plusplus: 0
no-process-env: 0
no-process-exit: 0
no-proto: 2
no-redeclare: 2
no-regex-spaces: 2
no-restricted-modules: 0
no-restricted-syntax: 0
no-return-assign: 2
no-script-url: 2
no-self-compare: 2
no-sequences: 2
no-shadow: 2
no-shadow-restricted-names: 2
no-spaced-func: 2
no-sparse-arrays: 2
no-sync: 0
no-ternary: 0
no-trailing-spaces: 2
no-this-before-super: 2
no-throw-literal: 2
no-undef: 2
no-undef-init: 2
no-undefined: 0
no-unexpected-multiline: 2
no-underscore-dangle: 0
no-unneeded-ternary: 0
no-unreachable: 2
no-unused-expressions: 2
no-unused-vars: [2, {vars: 'all', args: 'all', argsIgnorePattern: '^_'}]
no-use-before-define: 2
no-useless-call: 2
no-useless-concat: 0
no-void: 2
no-var: 2
no-warning-comments: [0, {terms: ['todo', 'fixme', 'xxx'], location: 'start'}]
no-with: 2
no-magic-numbers: 0
array-bracket-spacing: [2, 'never']
arrow-body-style: 0
arrow-parens: [2, 'as-needed']
arrow-spacing: 2
accessor-pairs: 2
block-scoped-var: 2
block-spacing: 2
brace-style: 2
callback-return: 0
camelcase: 0
comma-dangle: 2
comma-spacing: 2
comma-style: 2
complexity: [0, 11]
computed-property-spacing: 2
consistent-return: 2
consistent-this: [2, 'self']
constructor-super: 2
curly: [2, 'all']
default-case: 0
dot-location: [2, 'property']
dot-notation: [2, {allowKeywords: true}]
eol-last: 2
eqeqeq: 2
func-names: 0
func-style: [2, 'declaration', {"allowArrowFunctions": true}]
generator-star-spacing: 2
global-require: 0
guard-for-in: 0
handle-callback-err: 2
id-length: 0
indent: [2, 2, {SwitchCase: 1}]
init-declarations: 0
jsx-quotes: 0
key-spacing: [2, {beforeColon: false, afterColon: true}]
keyword-spacing: 2
lines-around-comment: 0
max-depth: [0, 4]
max-len: [2, 80]
max-nested-callbacks: [0, 2]
max-params: [0, 4]
max-statements: [0, 10]
new-cap: 2
new-parens: 2
newline-after-var: 0
object-curly-spacing: [2, 'never']
object-shorthand: 0
one-var: [2, 'never']
operator-assignment: [0, 'always']
operator-linebreak: 0
padded-blocks: 0
prefer-arrow-callback: 0
prefer-const: 2
prefer-spread: 2
prefer-reflect: 0
prefer-template: 0
quote-props: 0
quotes: [2, 'single']
radix: 2
id-match: 0
require-jsdoc: 0
require-yield: 0
semi-spacing: 2
sort-vars: 0
space-before-blocks: 2
space-before-function-paren: [2, 'never']
space-in-parens: 2
space-infix-ops: 2
space-unary-ops: [2, {words: true, nonwords: false}]
spaced-comment: 2
strict: [2, 'global']
use-isnan: 2
valid-jsdoc: 2
valid-typeof: 2
vars-on-top: 0
wrap-iife: 0
wrap-regex: 0
yoda: [2, 'never']
parser: babel-eslint

View File

@@ -0,0 +1,100 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser', // Make ESLint compatible with TypeScript
parserOptions: {
// Enable linting rules with type information from our tsconfig
tsconfigRootDir: __dirname,
project: ['./tsconfig.eslint.json'],
sourceType: 'module', // Allow the use of imports / ES modules
ecmaFeatures: {
impliedStrict: true, // Enable global strict mode
},
},
// Specify global variables that are predefined
env: {
browser: true, // Enable browser global variables
node: true, // Enable node global variables & Node.js scoping
es2020: true, // Add all ECMAScript 2020 globals and automatically set the ecmaVersion parser option to ES2020
jest: true, // Add Mocha testing global variables
},
plugins: [
'@typescript-eslint', // Add some TypeScript specific rules, and disable rules covered by the typechecker
'import', // Add rules that help validate proper imports
'prettier', // Allows running prettier as an ESLint rule, and reporting differences as individual linting issues
],
extends: [
// ESLint recommended rules
'eslint:recommended',
// Add TypeScript-specific rules, and disable rules covered by typechecker
'plugin:@typescript-eslint/eslint-recommended',
'plugin:@typescript-eslint/recommended',
// Add rules for import/export syntax
'plugin:import/errors',
'plugin:import/warnings',
'plugin:import/typescript',
// Add rules that specifically require type information using our tsconfig
'plugin:@typescript-eslint/recommended-requiring-type-checking',
// Enable Prettier for ESLint --fix, and disable rules that conflict with Prettier
'prettier/@typescript-eslint',
'plugin:prettier/recommended',
],
// rules: {
// // This rule is about explicitly using `return undefined` when a function returns any non-undefined object.
// // However, since we're using TypeScript, it will yell at us if a function is not allowed to return `undefined` in its signature, so we don't need this rule.
// "consistent-return": "off",
// },
overrides: [
// Overrides for all test files
{
files: 'test/**/*.ts',
rules: {
// For our Mocha test files, the pattern has been to have unnamed functions
'func-names': 'off',
// Using non-null assertions (obj!.property) cancels the benefits of the strict null-checking mode, but these are test files, so we don't care.
'@typescript-eslint/no-non-null-assertion': 'off',
// For some test files, we shadow testing constants with function parameter names
'no-shadow': 'off',
// Some of our test files declare helper classes with errors
'max-classes-per-file': 'off',
},
},
{
files: '**/*.ts',
rules: {
// Allow unused variables in our files when explicitly prepended with `_`.
'@typescript-eslint/no-unused-vars': [
'error',
{ argsIgnorePattern: '^_' },
],
// These rules are deprecated, but we have an old config that enables it
'@typescript-eslint/camelcase': 'off',
'@typescript-eslint/ban-ts-ignore': 'off',
// These rules are actually disabled in @xpring-eng/eslint-config-base/loose at the moment
'@typescript-eslint/no-unsafe-call': 'off',
'@typescript-eslint/no-unsafe-member-access': 'off',
'@typescript-eslint/no-unsafe-assignment': 'off',
},
},
{
files: ['src/XRP/default-xrp-client.ts'],
rules: {
// This is actually a good rule to have enabled, but for the XRPClient, we define a helper error message class in the same file
'max-classes-per-file': 'off',
},
},
],
}

View File

@@ -23,14 +23,22 @@
"@types/jest": "^26.0.0", "@types/jest": "^26.0.0",
"@types/lodash": "^4.14.155", "@types/lodash": "^4.14.155",
"@types/node": "^14.0.10", "@types/node": "^14.0.10",
"@typescript-eslint/eslint-plugin": "^3.2.0",
"@typescript-eslint/parser": "^3.2.0",
"eslint": "^7.2.0",
"eslint-config-prettier": "^6.11.0",
"eslint-plugin-import": "^2.21.1",
"eslint-plugin-mocha": "^7.0.1",
"eslint-plugin-prettier": "^3.1.3",
"prettier": "^2.0.4",
"jest": "^26.0.1", "jest": "^26.0.1",
"typescript": "^3.9.5" "typescript": "^3.9.5"
}, },
"scripts": { "scripts": {
"compile": "tsc && cp ./src/enums/definitions.json ./dist/enums", "compile": "tsc && cp ./src/enums/definitions.json ./dist/enums",
"prepare": "npm run compile && npm test", "prepare": "npm run compile && npm test",
"lint" : "echo 'no linting for this PR is available'", "test": "jest --coverage=true",
"test": "jest" "lint": "eslint . --ext .ts --fix"
}, },
"repository": { "repository": {
"type": "git", "type": "git",

View File

@@ -1,25 +1,25 @@
/* eslint-disable func-style */ /* eslint-disable func-style */
import { BN } from 'bn.js'; import { BN } from "bn.js";
import { coreTypes } from './types'; import { coreTypes } from "./types";
const { HashPrefix } = require('./hash-prefixes'); const { HashPrefix } = require("./hash-prefixes");
const {BinaryParser} = require('./serdes/binary-parser'); const { BinaryParser } = require("./serdes/binary-parser");
const {BinarySerializer, BytesList} = require('./serdes/binary-serializer'); const { BinarySerializer, BytesList } = require("./serdes/binary-serializer");
const {bytesToHex, slice, parseBytes} = require('./utils/bytes-utils'); const { bytesToHex, slice, parseBytes } = require("./utils/bytes-utils");
const {sha512Half, transactionID} = require('./hashes'); const { sha512Half, transactionID } = require("./hashes");
const makeParser = bytes => new BinaryParser(bytes); const makeParser = (bytes) => new BinaryParser(bytes);
const readJSON = parser => parser.readType(coreTypes.STObject).toJSON(); const readJSON = (parser) => parser.readType(coreTypes.STObject).toJSON();
const binaryToJSON = bytes => readJSON(makeParser(bytes)); const binaryToJSON = (bytes) => readJSON(makeParser(bytes));
function serializeObject(object, opts = <any>{}) { function serializeObject(object, opts = <any>{}) {
const {prefix, suffix, signingFieldsOnly = false} = opts; const { prefix, suffix, signingFieldsOnly = false } = opts;
const bytesList = new BytesList(); const bytesList = new BytesList();
if (prefix) { if (prefix) {
bytesList.put(prefix); bytesList.put(prefix);
} }
const filter = signingFieldsOnly ? f => f.isSigningField : undefined; const filter = signingFieldsOnly ? (f) => f.isSigningField : undefined;
coreTypes.STObject.from(object).toBytesSink(bytesList, filter); coreTypes.STObject.from(object).toBytesSink(bytesList, filter);
if (suffix) { if (suffix) {
bytesList.put(suffix); bytesList.put(suffix);
@@ -28,26 +28,26 @@ function serializeObject(object, opts = <any>{}) {
} }
function signingData(tx, prefix = HashPrefix.transactionSig) { function signingData(tx, prefix = HashPrefix.transactionSig) {
return serializeObject(tx, {prefix, signingFieldsOnly: true}); return serializeObject(tx, { prefix, signingFieldsOnly: true });
} }
function signingClaimData(claim) { function signingClaimData(claim) {
const prefix = HashPrefix.paymentChannelClaim const prefix = HashPrefix.paymentChannelClaim;
const channel = coreTypes.Hash256.from(claim.channel).toBytes() const channel = coreTypes.Hash256.from(claim.channel).toBytes();
const amount = new coreTypes.UInt64(new BN(claim.amount)).toBytes(); const amount = new coreTypes.UInt64(new BN(claim.amount)).toBytes();
const bytesList = new BytesList(); const bytesList = new BytesList();
bytesList.put(prefix) bytesList.put(prefix);
bytesList.put(channel) bytesList.put(channel);
bytesList.put(amount) bytesList.put(amount);
return bytesList.toBytes() return bytesList.toBytes();
} }
function multiSigningData(tx, signingAccount) { function multiSigningData(tx, signingAccount) {
const prefix = HashPrefix.transactionMultiSig; const prefix = HashPrefix.transactionMultiSig;
const suffix = coreTypes.AccountID.from(signingAccount).toBytes(); const suffix = coreTypes.AccountID.from(signingAccount).toBytes();
return serializeObject(tx, {prefix, suffix, signingFieldsOnly: true}); return serializeObject(tx, { prefix, suffix, signingFieldsOnly: true });
} }
export { export {
@@ -65,5 +65,5 @@ export {
binaryToJSON, binaryToJSON,
sha512Half, sha512Half,
transactionID, transactionID,
slice slice,
}; };

View File

@@ -1,13 +1,12 @@
import { Enums } from './enums'; import { Enums } from "./enums";
const {Field} = Enums.Field; const { Field } = Enums.Field;
const types = require('./types'); const types = require("./types");
const binary = require('./binary'); const binary = require("./binary");
const {ShaMap} = require('./shamap'); const { ShaMap } = require("./shamap");
const ledgerHashes = require('./ledger-hashes'); const ledgerHashes = require("./ledger-hashes");
const hashes = require('./hashes'); const hashes = require("./hashes");
const quality = require('./quality'); const quality = require("./quality");
const {HashPrefix} = require('./hash-prefixes'); const { HashPrefix } = require("./hash-prefixes");
export { export {
hashes, hashes,
@@ -18,5 +17,5 @@ export {
Field, Field,
HashPrefix, HashPrefix,
ShaMap, ShaMap,
types types,
} };

View File

@@ -1,125 +1,138 @@
import { makeClass } from './../utils/make-class' import { makeClass } from "./../utils/make-class";
const assert = require('assert') const assert = require("assert");
const _ = require('lodash') const _ = require("lodash");
const { parseBytes, serializeUIntN } = require('./../utils/bytes-utils') const { parseBytes, serializeUIntN } = require("./../utils/bytes-utils");
const enums = require('./definitions.json') const enums = require("./definitions.json");
function transformWith (func, obj) { function transformWith(func, obj) {
return _.transform(obj, func) return _.transform(obj, func);
} }
function biMap (obj, valueKey) { function biMap(obj, valueKey) {
return _.transform(obj, (result, value, key) => { return _.transform(obj, (result, value, key) => {
result[key] = value result[key] = value;
result[value[valueKey]] = value result[value[valueKey]] = value;
}) });
} }
const EnumType = makeClass({ const EnumType = makeClass(
EnumType (definition) { {
_.assign(this, definition) EnumType(definition) {
// At minimum _.assign(this, definition);
assert(this.bytes instanceof Uint8Array) // At minimum
assert(typeof this.ordinal === 'number') assert(this.bytes instanceof Uint8Array);
assert(typeof this.name === 'string') assert(typeof this.ordinal === "number");
}, assert(typeof this.name === "string");
toString () {
return this.name
},
toJSON () {
return this.name
},
toBytesSink (sink) {
sink.put(this.bytes)
},
statics: {
ordinalByteWidth: 1,
fromParser (parser) {
return this.from(parser.readUIntN(this.ordinalByteWidth))
}, },
from (val) { toString() {
const ret = val instanceof this ? val : this[val] return this.name;
if (!ret) {
throw new Error(
`${val} is not a valid name or ordinal for ${this.enumName}`)
}
return ret
}, },
valuesByName () { toJSON() {
return _.transform(this.initVals, (result, ordinal, name) => { return this.name;
const bytes = serializeUIntN(ordinal, this.ordinalByteWidth)
const type = new this({ name, ordinal, bytes })
result[name] = type
})
}, },
init () { toBytesSink(sink) {
const mapped = this.valuesByName() sink.put(this.bytes);
_.assign(this, biMap(mapped, 'ordinal')) },
this.values = _.values(mapped) statics: {
return this ordinalByteWidth: 1,
} fromParser(parser) {
} return this.from(parser.readUIntN(this.ordinalByteWidth));
}, undefined) },
from(val) {
const ret = val instanceof this ? val : this[val];
if (!ret) {
throw new Error(
`${val} is not a valid name or ordinal for ${this.enumName}`
);
}
return ret;
},
valuesByName() {
return _.transform(this.initVals, (result, ordinal, name) => {
const bytes = serializeUIntN(ordinal, this.ordinalByteWidth);
const type = new this({ name, ordinal, bytes });
result[name] = type;
});
},
init() {
const mapped = this.valuesByName();
_.assign(this, biMap(mapped, "ordinal"));
this.values = _.values(mapped);
return this;
},
},
},
undefined
);
function makeEnum (name, definition) { function makeEnum(name, definition) {
return makeClass({ return makeClass(
inherits: EnumType, {
statics: _.assign(definition, { enumName: name }) inherits: EnumType,
}, undefined) statics: _.assign(definition, { enumName: name }),
},
undefined
);
} }
function makeEnums (to, definition, name) { function makeEnums(to, definition, name) {
to[name] = makeEnum(name, definition) to[name] = makeEnum(name, definition);
} }
const Enums = transformWith(makeEnums, { const Enums = transformWith(makeEnums, {
Type: { Type: {
initVals: enums.TYPES initVals: enums.TYPES,
}, },
LedgerEntryType: { LedgerEntryType: {
initVals: enums.LEDGER_ENTRY_TYPES, ordinalByteWidth: 2 initVals: enums.LEDGER_ENTRY_TYPES,
ordinalByteWidth: 2,
}, },
TransactionType: { TransactionType: {
initVals: enums.TRANSACTION_TYPES, ordinalByteWidth: 2 initVals: enums.TRANSACTION_TYPES,
ordinalByteWidth: 2,
}, },
TransactionResult: { TransactionResult: {
initVals: enums.TRANSACTION_RESULTS, ordinalByteWidth: 1 initVals: enums.TRANSACTION_RESULTS,
} ordinalByteWidth: 1,
}) },
});
Enums.Field = makeClass({ Enums.Field = makeClass(
inherits: EnumType, {
statics: { inherits: EnumType,
enumName: 'Field', statics: {
initVals: enums.FIELDS, enumName: "Field",
valuesByName () { initVals: enums.FIELDS,
const fields = _.map(this.initVals, ([name, definition]) => { valuesByName() {
const type = Enums.Type[definition.type] const fields = _.map(this.initVals, ([name, definition]) => {
const bytes = this.header(type.ordinal, definition.nth) const type = Enums.Type[definition.type];
const ordinal = type.ordinal << 16 | definition.nth const bytes = this.header(type.ordinal, definition.nth);
const extra = { ordinal, name, type, bytes } const ordinal = (type.ordinal << 16) | definition.nth;
return new this(_.assign(definition, extra)) const extra = { ordinal, name, type, bytes };
}) return new this(_.assign(definition, extra));
return _.keyBy(fields, 'name') });
}, return _.keyBy(fields, "name");
header (type, nth) { },
const name = nth header(type, nth) {
const header = <any>[] const name = nth;
const push = header.push.bind(header) const header = <any>[];
if (type < 16) { const push = header.push.bind(header);
if (name < 16) { if (type < 16) {
push(type << 4 | name) if (name < 16) {
push((type << 4) | name);
} else {
push(type << 4, name);
}
} else if (name < 16) {
push(name, type);
} else { } else {
push(type << 4, name) push(0, type, name);
} }
} else if (name < 16) { return parseBytes(header, Uint8Array);
push(name, type) },
} else { },
push(0, type, name) },
} undefined
return parseBytes(header, Uint8Array) );
}
}
}, undefined)
export { Enums } export { Enums };

View File

@@ -96,39 +96,39 @@ const input = {
tecDUPLICATE: 149, tecDUPLICATE: 149,
tecKILLED: 150, tecKILLED: 150,
tecHAS_OBLIGATIONS: 151, tecHAS_OBLIGATIONS: 151,
tecTOO_SOON: 152 tecTOO_SOON: 152,
} };
let startingFromTemBADSENDXRPPATHS = -284 let startingFromTemBADSENDXRPPATHS = -284;
let startingFromTefFAILURE = -199 let startingFromTefFAILURE = -199;
let startingFromTerRETRY = -99 let startingFromTerRETRY = -99;
const tesSUCCESS = 0 const tesSUCCESS = 0;
let startingFromTecCLAIM = 100 let startingFromTecCLAIM = 100;
const startingFromTecDIRFULL = 121 const startingFromTecDIRFULL = 121;
let previousKey = 'tem' let previousKey = "tem";
Object.keys(input).forEach(key => { Object.keys(input).forEach((key) => {
if (key.substring(0, 3) !== previousKey.substring(0, 3)) { if (key.substring(0, 3) !== previousKey.substring(0, 3)) {
console.log() console.log();
previousKey = key previousKey = key;
} }
if (key.substring(0, 3) === 'tem') { if (key.substring(0, 3) === "tem") {
console.log(` "${key}": ${startingFromTemBADSENDXRPPATHS++},`) console.log(` "${key}": ${startingFromTemBADSENDXRPPATHS++},`);
} else if (key.substring(0, 3) === 'tef') { } else if (key.substring(0, 3) === "tef") {
console.log(` "${key}": ${startingFromTefFAILURE++},`) console.log(` "${key}": ${startingFromTefFAILURE++},`);
} else if (key.substring(0, 3) === 'ter') { } else if (key.substring(0, 3) === "ter") {
console.log(` "${key}": ${startingFromTerRETRY++},`) console.log(` "${key}": ${startingFromTerRETRY++},`);
} else if (key.substring(0, 3) === 'tes') { } else if (key.substring(0, 3) === "tes") {
console.log(` "${key}": ${tesSUCCESS},`) console.log(` "${key}": ${tesSUCCESS},`);
} else if (key.substring(0, 3) === 'tec') { } else if (key.substring(0, 3) === "tec") {
if (key === 'tecDIR_FULL') { if (key === "tecDIR_FULL") {
startingFromTecCLAIM = startingFromTecDIRFULL startingFromTecCLAIM = startingFromTecDIRFULL;
} }
console.log(` "${key}": ${startingFromTecCLAIM++},`) console.log(` "${key}": ${startingFromTecCLAIM++},`);
} }
}) });

View File

@@ -1,31 +1,29 @@
import { serializeUIntN } from './utils/bytes-utils'; import { serializeUIntN } from "./utils/bytes-utils";
function bytes(uint32) { function bytes(uint32) {
return serializeUIntN(uint32, 4); return serializeUIntN(uint32, 4);
} }
const HashPrefix = { const HashPrefix = {
transactionID: bytes(0x54584E00), transactionID: bytes(0x54584e00),
// transaction plus metadata // transaction plus metadata
transaction: bytes(0x534E4400), transaction: bytes(0x534e4400),
// account state // account state
accountStateEntry: bytes(0x4D4C4E00), accountStateEntry: bytes(0x4d4c4e00),
// inner node in tree // inner node in tree
innerNode: bytes(0x4D494E00), innerNode: bytes(0x4d494e00),
// ledger master data for signing // ledger master data for signing
ledgerHeader: bytes(0x4C575200), ledgerHeader: bytes(0x4c575200),
// inner transaction to sign // inner transaction to sign
transactionSig: bytes(0x53545800), transactionSig: bytes(0x53545800),
// inner transaction to sign // inner transaction to sign
transactionMultiSig: bytes(0x534D5400), transactionMultiSig: bytes(0x534d5400),
// validation for signing // validation for signing
validation: bytes(0x56414C00), validation: bytes(0x56414c00),
// proposal for signing // proposal for signing
proposal: bytes(0x50525000), proposal: bytes(0x50525000),
// payment channel claim // payment channel claim
paymentChannelClaim: bytes(0x434C4D00) paymentChannelClaim: bytes(0x434c4d00),
}; };
export { export { HashPrefix };
HashPrefix
};

View File

@@ -1,43 +1,44 @@
import { makeClass } from './utils/make-class'; import { makeClass } from "./utils/make-class";
import { HashPrefix } from './hash-prefixes'; import { HashPrefix } from "./hash-prefixes";
import { coreTypes } from './types'; import { coreTypes } from "./types";
import { parseBytes } from './utils/bytes-utils'; import { parseBytes } from "./utils/bytes-utils";
import * as createHash from 'create-hash'; import * as createHash from "create-hash";
const Sha512Half = makeClass({ const Sha512Half = makeClass(
Sha512Half() { {
this.hash = createHash('sha512'); Sha512Half() {
}, this.hash = createHash("sha512");
statics: { },
statics: {
put(bytes) {
return new this().put(bytes);
},
},
put(bytes) { put(bytes) {
return new this().put(bytes); this.hash.update(parseBytes(bytes, Buffer));
} return this;
},
finish256() {
const bytes = this.hash.digest();
return bytes.slice(0, 32);
},
finish() {
return new coreTypes.Hash256(this.finish256());
},
}, },
put(bytes) { undefined
this.hash.update(parseBytes(bytes, Buffer)); );
return this;
},
finish256() {
const bytes = this.hash.digest();
return bytes.slice(0, 32);
},
finish() {
return new coreTypes.Hash256(this.finish256());
}
}, undefined);
function sha512Half(...args) { function sha512Half(...args) {
const hash = new Sha512Half(); const hash = new Sha512Half();
args.forEach(a => hash.put(a)); args.forEach((a) => hash.put(a));
return parseBytes(hash.finish256(), Uint8Array); return parseBytes(hash.finish256(), Uint8Array);
} }
function transactionID(serialized) { function transactionID(serialized) {
return new coreTypes.Hash256(sha512Half(HashPrefix.transactionID, serialized)); return new coreTypes.Hash256(
sha512Half(HashPrefix.transactionID, serialized)
);
} }
export { export { Sha512Half, sha512Half, transactionID };
Sha512Half,
sha512Half,
transactionID
};

View File

@@ -1,64 +1,65 @@
import {strict as assert} from 'assert'; import { strict as assert } from "assert";
import { quality, binary } from './coretypes'; import { quality, binary } from "./coretypes";
import { coreTypes } from './types'; import { coreTypes } from "./types";
const { bytesToHex, const {
signingData, bytesToHex,
signingClaimData, signingData,
multiSigningData, signingClaimData,
binaryToJSON, multiSigningData,
serializeObject, binaryToJSON,
BinaryParser } = binary; serializeObject,
BinaryParser,
} = binary;
function decodeLedgerData(binary) { function decodeLedgerData(binary) {
assert(typeof binary === 'string', 'binary must be a hex string'); assert(typeof binary === "string", "binary must be a hex string");
const parser = new BinaryParser(binary) const parser = new BinaryParser(binary);
return { return {
ledger_index: parser.readUInt32(), ledger_index: parser.readUInt32(),
total_coins: parser.readType(coreTypes.UInt64).valueOf().toString(), total_coins: parser.readType(coreTypes.UInt64).valueOf().toString(),
parent_hash: parser.readType(coreTypes.Hash256).toHex(), parent_hash: parser.readType(coreTypes.Hash256).toHex(),
transaction_hash: parser.readType(coreTypes.Hash256).toHex(), transaction_hash: parser.readType(coreTypes.Hash256).toHex(),
account_hash: parser.readType(coreTypes.Hash256).toHex(), account_hash: parser.readType(coreTypes.Hash256).toHex(),
parent_close_time: parser.readUInt32(), parent_close_time: parser.readUInt32(),
close_time: parser.readUInt32(), close_time: parser.readUInt32(),
close_time_resolution: parser.readUInt8(), close_time_resolution: parser.readUInt8(),
close_flags: parser.readUInt8() close_flags: parser.readUInt8(),
} };
} }
function decode(binary) { function decode(binary) {
assert(typeof binary === 'string', 'binary must be a hex string'); assert(typeof binary === "string", "binary must be a hex string");
return binaryToJSON(binary); return binaryToJSON(binary);
} }
function encode(json) { function encode(json) {
assert(typeof json === 'object'); assert(typeof json === "object");
return bytesToHex(serializeObject(json)); return bytesToHex(serializeObject(json));
} }
function encodeForSigning(json) { function encodeForSigning(json) {
assert(typeof json === 'object'); assert(typeof json === "object");
return bytesToHex(signingData(json)); return bytesToHex(signingData(json));
} }
function encodeForSigningClaim(json) { function encodeForSigningClaim(json) {
assert(typeof json === 'object'); assert(typeof json === "object");
return bytesToHex(signingClaimData(json)); return bytesToHex(signingClaimData(json));
} }
function encodeForMultisigning(json, signer) { function encodeForMultisigning(json, signer) {
assert(typeof json === 'object'); assert(typeof json === "object");
assert.equal(json.SigningPubKey, ''); assert.equal(json.SigningPubKey, "");
return bytesToHex(multiSigningData(json, signer)); return bytesToHex(multiSigningData(json, signer));
} }
function encodeQuality(value) { function encodeQuality(value) {
assert(typeof value === 'string'); assert(typeof value === "string");
return bytesToHex(quality.encode(value)); return bytesToHex(quality.encode(value));
} }
function decodeQuality(value) { function decodeQuality(value) {
assert(typeof value === 'string'); assert(typeof value === "string");
return quality.decode(value).toString(); return quality.decode(value).toString();
} }
@@ -70,5 +71,5 @@ module.exports = {
encodeForMultisigning, encodeForMultisigning,
encodeQuality, encodeQuality,
decodeQuality, decodeQuality,
decodeLedgerData decodeLedgerData,
}; };

View File

@@ -1,16 +1,16 @@
import * as _ from 'lodash' import * as _ from "lodash";
import { BN } from 'bn.js'; import { BN } from "bn.js";
import { strict as assert } from 'assert'; import { strict as assert } from "assert";
import { coreTypes } from './types'; import { coreTypes } from "./types";
const { STObject, Hash256 } = coreTypes; const { STObject, Hash256 } = coreTypes;
import { ShaMap } from './shamap'; import { ShaMap } from "./shamap";
import { HashPrefix } from './hash-prefixes'; import { HashPrefix } from "./hash-prefixes";
import { Sha512Half } from './hashes'; import { Sha512Half } from "./hashes";
import { BinarySerializer, serializeObject } from './binary'; import { BinarySerializer, serializeObject } from "./binary";
function computeHash(itemizer, itemsJson) { function computeHash(itemizer, itemsJson) {
const map = new ShaMap(); const map = new ShaMap();
itemsJson.forEach(item => map.addItem(...itemizer(item))); itemsJson.forEach((item) => map.addItem(...itemizer(item)));
return map.hash(); return map.hash();
} }
@@ -25,7 +25,7 @@ function transactionItem(json) {
const serializer = new BinarySerializer(sink); const serializer = new BinarySerializer(sink);
serializer.writeLengthEncoded(STObject.from(json)); serializer.writeLengthEncoded(STObject.from(json));
serializer.writeLengthEncoded(STObject.from(json.metaData)); serializer.writeLengthEncoded(STObject.from(json.metaData));
} },
}; };
return [index, item]; return [index, item];
} }
@@ -39,7 +39,7 @@ function entryItem(json) {
}, },
toBytesSink(sink) { toBytesSink(sink) {
sink.put(bytes); sink.put(bytes);
} },
}; };
return [index, item]; return [index, item];
} }
@@ -65,8 +65,4 @@ function ledgerHash(header) {
return hash.finish(); return hash.finish();
} }
export { export { accountStateHash, transactionTreeHash, ledgerHash };
accountStateHash,
transactionTreeHash,
ledgerHash
};

View File

@@ -1,13 +1,16 @@
const Decimal = require('decimal.js'); const Decimal = require("decimal.js");
import { bytesToHex, slice, parseBytes } from './utils/bytes-utils'; import { bytesToHex, slice, parseBytes } from "./utils/bytes-utils";
import { coreTypes } from './types'; import { coreTypes } from "./types";
import { BN } from 'bn.js'; import { BN } from "bn.js";
module.exports = { module.exports = {
encode(arg) { encode(arg) {
const quality = arg instanceof Decimal ? arg : new Decimal(arg); const quality = arg instanceof Decimal ? arg : new Decimal(arg);
const exponent = quality.e - 15; const exponent = quality.e - 15;
const qualityString = quality.times('1e' + -exponent).abs().toString(); const qualityString = quality
.times("1e" + -exponent)
.abs()
.toString();
const bytes = new coreTypes.UInt64(new BN(qualityString)).toBytes(); const bytes = new coreTypes.UInt64(new BN(qualityString)).toBytes();
bytes[0] = exponent + 100; bytes[0] = exponent + 100;
return bytes; return bytes;
@@ -15,7 +18,7 @@ module.exports = {
decode(arg) { decode(arg) {
const bytes = slice(parseBytes(arg), -8); const bytes = slice(parseBytes(arg), -8);
const exponent = bytes[0] - 100; const exponent = bytes[0] - 100;
const mantissa = new Decimal('0x' + bytesToHex(slice(bytes, 1))); const mantissa = new Decimal("0x" + bytesToHex(slice(bytes, 1)));
return mantissa.times('1e' + exponent); return mantissa.times("1e" + exponent);
} },
}; };

View File

@@ -1,98 +1,101 @@
import { strict as assert } from 'assert' import { strict as assert } from "assert";
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { Enums } from '../enums' import { Enums } from "../enums";
import { slice, parseBytes } from '../utils/bytes-utils' import { slice, parseBytes } from "../utils/bytes-utils";
const BinaryParser = makeClass({ const BinaryParser = makeClass(
BinaryParser (buf) { {
this._buf = parseBytes(buf, Uint8Array) BinaryParser(buf) {
this._length = this._buf.length this._buf = parseBytes(buf, Uint8Array);
this._cursor = 0 this._length = this._buf.length;
this._cursor = 0;
},
skip(n) {
this._cursor += n;
},
read(n, to = Uint8Array) {
const start = this._cursor;
const end = this._cursor + n;
assert(end <= this._buf.length);
this._cursor = end;
return slice(this._buf, start, end, to);
},
readUIntN(n) {
return this.read(n, Array).reduce((a, b) => (a << 8) | b) >>> 0;
},
readUInt8() {
return this._buf[this._cursor++];
},
readUInt16() {
return this.readUIntN(2);
},
readUInt32() {
return this.readUIntN(4);
},
pos() {
return this._cursor;
},
size() {
return this._buf.length;
},
end(customEnd) {
const cursor = this.pos();
return (
cursor >= this._length || (customEnd !== null && cursor >= customEnd)
);
},
readVL() {
return this.read(this.readVLLength());
},
readVLLength() {
const b1 = this.readUInt8();
if (b1 <= 192) {
return b1;
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error("Invalid varint length indicator");
},
readFieldOrdinal() {
const tagByte = this.readUInt8();
const type = (tagByte & 0xf0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0f || this.readUInt8();
return (type << 16) | nth;
},
readField() {
return Enums.Field.from(this.readFieldOrdinal());
},
readType(type) {
return type.fromParser(this);
},
typeForField(field) {
return field.associatedType;
},
readFieldValue(field) {
const kls = this.typeForField(field);
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVLEncoded ? this.readVLLength() : null;
const value = kls.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `
);
}
return value;
},
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
},
}, },
skip (n) { undefined
this._cursor += n );
},
read (n, to = Uint8Array) {
const start = this._cursor
const end = this._cursor + n
assert(end <= this._buf.length)
this._cursor = end
return slice(this._buf, start, end, to)
},
readUIntN (n) {
return this.read(n, Array).reduce((a, b) => a << 8 | b) >>> 0
},
readUInt8 () {
return this._buf[this._cursor++]
},
readUInt16 () {
return this.readUIntN(2)
},
readUInt32 () {
return this.readUIntN(4)
},
pos () {
return this._cursor
},
size () {
return this._buf.length
},
end (customEnd) {
const cursor = this.pos()
return (cursor >= this._length) || (customEnd !== null &&
cursor >= customEnd)
},
readVL () {
return this.read(this.readVLLength())
},
readVLLength () {
const b1 = this.readUInt8()
if (b1 <= 192) {
return b1
} else if (b1 <= 240) {
const b2 = this.readUInt8()
return 193 + (b1 - 193) * 256 + b2
} else if (b1 <= 254) {
const b2 = this.readUInt8()
const b3 = this.readUInt8()
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3
}
throw new Error('Invalid varint length indicator')
},
readFieldOrdinal () {
const tagByte = this.readUInt8()
const type = (tagByte & 0xF0) >>> 4 || this.readUInt8()
const nth = tagByte & 0x0F || this.readUInt8()
return type << 16 | nth
},
readField () {
return Enums.Field.from(this.readFieldOrdinal())
},
readType (type) {
return type.fromParser(this)
},
typeForField (field) {
return field.associatedType
},
readFieldValue (field) {
const kls = this.typeForField(field)
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`)
}
const sizeHint = field.isVLEncoded ? this.readVLLength() : null
const value = kls.fromParser(this, sizeHint)
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `)
}
return value
},
readFieldAndValue () {
const field = this.readField()
return [field, this.readFieldValue(field)]
}
}, undefined)
export { export { BinaryParser };
BinaryParser
}

View File

@@ -1,107 +1,110 @@
import { strict as assert } from 'assert' import { strict as assert } from "assert";
import { parseBytes, bytesToHex } from '../utils/bytes-utils' import { parseBytes, bytesToHex } from "../utils/bytes-utils";
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { Enums } from '../enums' import { Enums } from "../enums";
const BytesSink = { const BytesSink = {
put (/* bytesSequence */) { put(/* bytesSequence */) {
// any hex string or any object with a `length` and where 0 <= [ix] <= 255 // any hex string or any object with a `length` and where 0 <= [ix] <= 255
} },
} };
const BytesList = makeClass({ const BytesList = makeClass(
implementing: BytesSink, {
BytesList () { implementing: BytesSink,
this.arrays = [] BytesList() {
this.length = 0 this.arrays = [];
this.length = 0;
},
put(bytesArg) {
const bytes = parseBytes(bytesArg, Uint8Array);
this.length += bytes.length;
this.arrays.push(bytes);
return this;
},
toBytesSink(sink) {
this.arrays.forEach((arr) => {
sink.put(arr);
});
},
toBytes() {
const concatenated = new Uint8Array(this.length);
let pointer = 0;
this.arrays.forEach((arr) => {
concatenated.set(arr, pointer);
pointer += arr.length;
});
return concatenated;
},
toHex() {
return bytesToHex(this.toBytes());
},
}, },
put (bytesArg) { undefined
const bytes = parseBytes(bytesArg, Uint8Array) );
this.length += bytes.length
this.arrays.push(bytes)
return this
},
toBytesSink (sink) {
this.arrays.forEach(arr => {
sink.put(arr)
})
},
toBytes () {
const concatenated = new Uint8Array(this.length)
let pointer = 0
this.arrays.forEach(arr => {
concatenated.set(arr, pointer)
pointer += arr.length
})
return concatenated
},
toHex () {
return bytesToHex(this.toBytes())
}
}, undefined)
const BinarySerializer = makeClass({ const BinarySerializer = makeClass(
BinarySerializer (sink) { {
this.sink = sink BinarySerializer(sink) {
}, this.sink = sink;
write (value) { },
value.toBytesSink(this.sink) write(value) {
}, value.toBytesSink(this.sink);
put (bytes) { },
this.sink.put(bytes) put(bytes) {
}, this.sink.put(bytes);
writeType (type, value) { },
this.write(type.from(value)) writeType(type, value) {
}, this.write(type.from(value));
writeBytesList (bl) { },
bl.toBytesSink(this.sink) writeBytesList(bl) {
}, bl.toBytesSink(this.sink);
encodeVL (len) { },
let length = len encodeVL(len) {
const lenBytes = new Uint8Array(4) let length = len;
if (length <= 192) { const lenBytes = new Uint8Array(4);
lenBytes[0] = length if (length <= 192) {
return lenBytes.subarray(0, 1) lenBytes[0] = length;
} else if (length <= 12480) { return lenBytes.subarray(0, 1);
length -= 193 } else if (length <= 12480) {
lenBytes[0] = 193 + (length >>> 8) length -= 193;
lenBytes[1] = length & 0xff lenBytes[0] = 193 + (length >>> 8);
return lenBytes.subarray(0, 2) lenBytes[1] = length & 0xff;
} else if (length <= 918744) { return lenBytes.subarray(0, 2);
length -= 12481 } else if (length <= 918744) {
lenBytes[0] = 241 + (length >>> 16) length -= 12481;
lenBytes[1] = (length >> 8) & 0xff lenBytes[0] = 241 + (length >>> 16);
lenBytes[2] = length & 0xff lenBytes[1] = (length >> 8) & 0xff;
return lenBytes.subarray(0, 3) lenBytes[2] = length & 0xff;
} return lenBytes.subarray(0, 3);
throw new Error('Overflow error')
},
writeFieldAndValue (field, _value) {
const sink = this.sink
const value = field.associatedType.from(_value)
assert(value.toBytesSink, field)
sink.put(field.bytes)
if (field.isVLEncoded) {
this.writeLengthEncoded(value)
} else {
value.toBytesSink(sink)
if (field.type === Enums.Type.STObject) {
sink.put(Enums.Field.ObjectEndMarker.bytes)
} else if (field.type === Enums.Type.STArray) {
sink.put(Enums.Field.ArrayEndMarker.bytes)
} }
} throw new Error("Overflow error");
}, },
writeLengthEncoded (value) { writeFieldAndValue(field, _value) {
const bytes = new BytesList() const sink = this.sink;
value.toBytesSink(bytes) const value = field.associatedType.from(_value);
this.put(this.encodeVL(bytes.length)) assert(value.toBytesSink, field);
this.writeBytesList(bytes) sink.put(field.bytes);
}
}, undefined)
export { if (field.isVLEncoded) {
BytesList, this.writeLengthEncoded(value);
BinarySerializer } else {
} value.toBytesSink(sink);
if (field.type === Enums.Type.STObject) {
sink.put(Enums.Field.ObjectEndMarker.bytes);
} else if (field.type === Enums.Type.STArray) {
sink.put(Enums.Field.ArrayEndMarker.bytes);
}
}
},
writeLengthEncoded(value) {
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVL(bytes.length));
this.writeBytesList(bytes);
},
},
undefined
);
export { BytesList, BinarySerializer };

View File

@@ -1,108 +1,118 @@
import { strict as assert } from 'assert' import { strict as assert } from "assert";
import { makeClass } from './utils/make-class'; import { makeClass } from "./utils/make-class";
import { coreTypes } from './types'; import { coreTypes } from "./types";
import { HashPrefix } from './hash-prefixes'; import { HashPrefix } from "./hash-prefixes";
import { Sha512Half } from './hashes'; import { Sha512Half } from "./hashes";
const ShaMapNode = makeClass({ const ShaMapNode = makeClass(
virtuals: { {
hashPrefix() {}, virtuals: {
isLeaf() {}, hashPrefix() {},
isInner() {} isLeaf() {},
isInner() {},
},
cached: {
hash() {
const hasher = Sha512Half.put(this.hashPrefix());
this.toBytesSink(hasher);
return hasher.finish();
},
},
}, },
cached: { undefined
hash() { );
const hasher = Sha512Half.put(this.hashPrefix());
this.toBytesSink(hasher);
return hasher.finish();
}
}
}, undefined);
const ShaMapLeaf = makeClass({ const ShaMapLeaf = makeClass(
inherits: ShaMapNode, {
ShaMapLeaf(index, item) { inherits: ShaMapNode,
ShaMapNode.call(this); ShaMapLeaf(index, item) {
this.index = index; ShaMapNode.call(this);
this.item = item; this.index = index;
this.item = item;
},
isLeaf() {
return true;
},
isInner() {
return false;
},
hashPrefix() {
return this.item.hashPrefix();
},
toBytesSink(sink) {
this.item.toBytesSink(sink);
this.index.toBytesSink(sink);
},
}, },
isLeaf() { undefined
return true; );
},
isInner() {
return false;
},
hashPrefix() {
return this.item.hashPrefix();
},
toBytesSink(sink) {
this.item.toBytesSink(sink);
this.index.toBytesSink(sink);
}
}, undefined);
const $uper = ShaMapNode.prototype; const $uper = ShaMapNode.prototype;
const ShaMapInner = makeClass({ const ShaMapInner = makeClass(
inherits: ShaMapNode, {
ShaMapInner(depth = 0) { inherits: ShaMapNode,
ShaMapNode.call(this); ShaMapInner(depth = 0) {
this.depth = depth; ShaMapNode.call(this);
this.slotBits = 0; this.depth = depth;
this.branches = Array(16); this.slotBits = 0;
this.branches = Array(16);
},
isInner() {
return true;
},
isLeaf() {
return false;
},
hashPrefix() {
return HashPrefix.innerNode;
},
setBranch(slot, branch) {
this.slotBits = this.slotBits | (1 << slot);
this.branches[slot] = branch;
},
empty() {
return this.slotBits === 0;
},
hash() {
if (this.empty()) {
return coreTypes.Hash256.ZERO_256;
}
return $uper.hash.call(this);
},
toBytesSink(sink) {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch ? branch.hash() : coreTypes.Hash256.ZERO_256;
hash.toBytesSink(sink);
}
},
addItem(index, item, leaf) {
assert(index instanceof coreTypes.Hash256);
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
if (!existing) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
} else if (existing.isLeaf()) {
const newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, null, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
} else if (existing.isInner()) {
existing.addItem(index, item, leaf);
} else {
assert(false);
}
},
}, },
isInner() { undefined
return true; );
},
isLeaf() {
return false;
},
hashPrefix() {
return HashPrefix.innerNode;
},
setBranch(slot, branch) {
this.slotBits = this.slotBits | (1 << slot);
this.branches[slot] = branch;
},
empty() {
return this.slotBits === 0;
},
hash() {
if (this.empty()) {
return coreTypes.Hash256.ZERO_256;
}
return $uper.hash.call(this);
},
toBytesSink(sink) {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch ? branch.hash() : coreTypes.Hash256.ZERO_256;
hash.toBytesSink(sink);
}
},
addItem(index, item, leaf) {
assert(index instanceof coreTypes.Hash256);
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
if (!existing) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
} else if (existing.isLeaf()) {
const newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, null, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
} else if (existing.isInner()) {
existing.addItem(index, item, leaf);
} else {
assert(false);
}
}
}, undefined);
const ShaMap = makeClass({ const ShaMap = makeClass(
inherits: ShaMapInner {
}, undefined); inherits: ShaMapInner,
},
undefined
);
export { export { ShaMap };
ShaMap
};

View File

@@ -1,42 +1,45 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
const { decodeAccountID, encodeAccountID } = require('ripple-address-codec') const { decodeAccountID, encodeAccountID } = require("ripple-address-codec");
const { Hash160 } = require('./hash-160') const { Hash160 } = require("./hash-160");
const AccountID = makeClass({ const AccountID = makeClass(
AccountID (bytes) { {
Hash160.call(this, bytes) AccountID(bytes) {
}, Hash160.call(this, bytes);
inherits: Hash160,
statics: {
from (value) {
return value instanceof this ? value
: /^r/.test(value) ? this.fromBase58(value)
: new this(value)
}, },
cache: {}, inherits: Hash160,
fromCache (base58) { statics: {
let cached = this.cache[base58] from(value) {
if (!cached) { return value instanceof this
cached = this.cache[base58] = this.fromBase58(base58) ? value
} : /^r/.test(value)
return cached ? this.fromBase58(value)
: new this(value);
},
cache: {},
fromCache(base58) {
let cached = this.cache[base58];
if (!cached) {
cached = this.cache[base58] = this.fromBase58(base58);
}
return cached;
},
fromBase58(value) {
const acc = new this(decodeAccountID(value));
acc._toBase58 = value;
return acc;
},
},
toJSON() {
return this.toBase58();
},
cached: {
toBase58() {
return encodeAccountID(this._bytes);
},
}, },
fromBase58 (value) {
const acc = new this(decodeAccountID(value))
acc._toBase58 = value
return acc
}
}, },
toJSON () { undefined
return this.toBase58() );
},
cached: {
toBase58 () {
return encodeAccountID(this._bytes)
}
}
}, undefined)
export { export { AccountID };
AccountID
}

View File

@@ -1,31 +1,31 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
const _ = require('lodash') const _ = require("lodash");
const assert = require('assert') const assert = require("assert");
const BN = require('bn.js') const BN = require("bn.js");
const Decimal = require('decimal.js') const Decimal = require("decimal.js");
const { SerializedType } = require('./serialized-type') const { SerializedType } = require("./serialized-type");
const { bytesToHex } = require('../utils/bytes-utils') const { bytesToHex } = require("../utils/bytes-utils");
const { Currency } = require('./currency') const { Currency } = require("./currency");
const { AccountID } = require('./account-id') const { AccountID } = require("./account-id");
const { UInt64 } = require('./uint-64') const { UInt64 } = require("./uint-64");
const MIN_IOU_EXPONENT = -96 const MIN_IOU_EXPONENT = -96;
const MAX_IOU_EXPONENT = 80 const MAX_IOU_EXPONENT = 80;
const MAX_IOU_PRECISION = 16 const MAX_IOU_PRECISION = 16;
const MIN_IOU_MANTISSA = '1000' + '0000' + '0000' + '0000' // 16 digits const MIN_IOU_MANTISSA = "1000" + "0000" + "0000" + "0000"; // 16 digits
const MAX_IOU_MANTISSA = '9999' + '9999' + '9999' + '9999' // .. const MAX_IOU_MANTISSA = "9999" + "9999" + "9999" + "9999"; // ..
const MAX_IOU = new Decimal(`${MAX_IOU_MANTISSA}e${MAX_IOU_EXPONENT}`) const MAX_IOU = new Decimal(`${MAX_IOU_MANTISSA}e${MAX_IOU_EXPONENT}`);
const MIN_IOU = new Decimal(`${MIN_IOU_MANTISSA}e${MIN_IOU_EXPONENT}`) const MIN_IOU = new Decimal(`${MIN_IOU_MANTISSA}e${MIN_IOU_EXPONENT}`);
const DROPS_PER_XRP = new Decimal('1e6') const DROPS_PER_XRP = new Decimal("1e6");
const MAX_NETWORK_DROPS = new Decimal('1e17') const MAX_NETWORK_DROPS = new Decimal("1e17");
const MIN_XRP = new Decimal('1e-6') const MIN_XRP = new Decimal("1e-6");
const MAX_XRP = MAX_NETWORK_DROPS.dividedBy(DROPS_PER_XRP) const MAX_XRP = MAX_NETWORK_DROPS.dividedBy(DROPS_PER_XRP);
// Never use exponential form // Never use exponential form
Decimal.config({ Decimal.config({
toExpPos: MAX_IOU_EXPONENT + MAX_IOU_PRECISION, toExpPos: MAX_IOU_EXPONENT + MAX_IOU_PRECISION,
toExpNeg: MIN_IOU_EXPONENT - MAX_IOU_PRECISION toExpNeg: MIN_IOU_EXPONENT - MAX_IOU_PRECISION,
}) });
const AMOUNT_PARAMETERS_DESCRIPTION = ` const AMOUNT_PARAMETERS_DESCRIPTION = `
Native values must be described in drops, a million of which equal one XRP. Native values must be described in drops, a million of which equal one XRP.
@@ -44,173 +44,183 @@ ${MAX_IOU.toString()}
And the smallest: And the smallest:
${MIN_IOU.toString()} ${MIN_IOU.toString()}
` `;
function isDefined (val) { function isDefined(val) {
return !_.isUndefined(val) return !_.isUndefined(val);
} }
function raiseIllegalAmountError (value) { function raiseIllegalAmountError(value) {
throw new Error(`${value.toString()} is an illegal amount\n` + throw new Error(
AMOUNT_PARAMETERS_DESCRIPTION) `${value.toString()} is an illegal amount\n` + AMOUNT_PARAMETERS_DESCRIPTION
);
} }
const parsers = { const parsers = {
string (str) { string(str) {
// Using /^\d+$/ here fixes #31 // Using /^\d+$/ here fixes #31
if (!str.match(/^\d+$/)) { if (!str.match(/^\d+$/)) {
raiseIllegalAmountError(str) raiseIllegalAmountError(str);
} }
return [new Decimal(str).dividedBy(DROPS_PER_XRP), Currency.XRP] return [new Decimal(str).dividedBy(DROPS_PER_XRP), Currency.XRP];
}, },
object (object) { object(object) {
assert(isDefined(object.currency), 'currency must be defined') assert(isDefined(object.currency), "currency must be defined");
assert(isDefined(object.issuer), 'issuer must be defined') assert(isDefined(object.issuer), "issuer must be defined");
return [new Decimal(object.value), return [
new Decimal(object.value),
Currency.from(object.currency), Currency.from(object.currency),
AccountID.from(object.issuer)] AccountID.from(object.issuer),
} ];
}
const Amount = makeClass({
Amount (value, currency, issuer, validate = true) {
this.value = value || new Decimal('0')
this.currency = currency || Currency.XRP
this.issuer = issuer || null
if (validate) {
this.assertValueIsValid()
}
}, },
mixins: SerializedType, };
statics: {
from (value) { const Amount = makeClass(
if (value instanceof this) { {
return value Amount(value, currency, issuer, validate = true) {
this.value = value || new Decimal("0");
this.currency = currency || Currency.XRP;
this.issuer = issuer || null;
if (validate) {
this.assertValueIsValid();
} }
const parser = parsers[typeof value]
if (parser) {
return new this(...parser(value))
}
throw new Error(`unsupported value: ${value}`)
}, },
fromParser (parser) { mixins: SerializedType,
const mantissa = parser.read(8) statics: {
const b1 = mantissa[0] from(value) {
const b2 = mantissa[1] if (value instanceof this) {
return value;
const isIOU = b1 & 0x80
const isPositive = b1 & 0x40
const sign = isPositive ? '' : '-'
if (isIOU) {
mantissa[0] = 0
const currency = parser.readType(Currency)
const issuer = parser.readType(AccountID)
const exponent = ((b1 & 0x3F) << 2) + ((b2 & 0xff) >> 6) - 97
mantissa[1] &= 0x3F
// decimal.js won't accept e notation with hex
const value = new Decimal(`${sign}0x${bytesToHex(mantissa)}`)
.times('1e' + exponent)
return new this(value, currency, issuer, false)
}
mantissa[0] &= 0x3F
const drops = new Decimal(`${sign}0x${bytesToHex(mantissa)}`)
const xrpValue = drops.dividedBy(DROPS_PER_XRP)
return new this(xrpValue, Currency.XRP, null, false)
}
},
assertValueIsValid () {
// zero is always a valid amount value
if (!this.isZero()) {
if (this.isNative()) {
const abs = this.value.abs()
if (abs.lt(MIN_XRP) || abs.gt(MAX_XRP)) {
// value is in XRP scale, but show the value in canonical json form
raiseIllegalAmountError(this.value.times(DROPS_PER_XRP))
} }
this.verifyNoDecimal(this.value) // This is a secondary fix for #31 const parser = parsers[typeof value];
} else { if (parser) {
const p = this.value.precision() return new this(...parser(value));
const e = this.exponent()
if (p > MAX_IOU_PRECISION ||
e > MAX_IOU_EXPONENT ||
e < MIN_IOU_EXPONENT) {
raiseIllegalAmountError(this.value)
} }
} throw new Error(`unsupported value: ${value}`);
} },
}, fromParser(parser) {
isNative () { const mantissa = parser.read(8);
return this.currency.isNative() const b1 = mantissa[0];
}, const b2 = mantissa[1];
mantissa () {
// This is a tertiary fix for #31
const integerNumberString = this.verifyNoDecimal()
return new UInt64( const isIOU = b1 & 0x80;
new BN(integerNumberString)) const isPositive = b1 & 0x40;
}, const sign = isPositive ? "" : "-";
verifyNoDecimal () {
const integerNumberString = this.value
.times('1e' + -this.exponent()).abs().toString()
// Ensure that the value (after being multiplied by the exponent)
// does not contain a decimal. From the bn.js README:
// "decimals are not supported in this library."
// eslint-disable-next-line max-len
// https://github.com/indutny/bn.js/blob/9cb459f044853b46615464eea1a3ddfc7006463b/README.md
if (integerNumberString.indexOf('.') !== -1) {
raiseIllegalAmountError(integerNumberString)
}
return integerNumberString
},
isZero () {
return this.value.isZero()
},
exponent () {
return this.isNative() ? -6 : this.value.e - 15
},
valueString () {
return (this.isNative() ? this.value.times(DROPS_PER_XRP) : this.value)
.toString()
},
toBytesSink (sink) {
const isNative = this.isNative()
const notNegative = !this.value.isNegative()
const mantissa = this.mantissa().toBytes()
if (isNative) { if (isIOU) {
mantissa[0] |= notNegative ? 0x40 : 0 mantissa[0] = 0;
sink.put(mantissa) const currency = parser.readType(Currency);
} else { const issuer = parser.readType(AccountID);
mantissa[0] |= 0x80 const exponent = ((b1 & 0x3f) << 2) + ((b2 & 0xff) >> 6) - 97;
mantissa[1] &= 0x3f;
// decimal.js won't accept e notation with hex
const value = new Decimal(`${sign}0x${bytesToHex(mantissa)}`).times(
"1e" + exponent
);
return new this(value, currency, issuer, false);
}
mantissa[0] &= 0x3f;
const drops = new Decimal(`${sign}0x${bytesToHex(mantissa)}`);
const xrpValue = drops.dividedBy(DROPS_PER_XRP);
return new this(xrpValue, Currency.XRP, null, false);
},
},
assertValueIsValid() {
// zero is always a valid amount value
if (!this.isZero()) { if (!this.isZero()) {
if (notNegative) { if (this.isNative()) {
mantissa[0] |= 0x40 const abs = this.value.abs();
if (abs.lt(MIN_XRP) || abs.gt(MAX_XRP)) {
// value is in XRP scale, but show the value in canonical json form
raiseIllegalAmountError(this.value.times(DROPS_PER_XRP));
}
this.verifyNoDecimal(this.value); // This is a secondary fix for #31
} else {
const p = this.value.precision();
const e = this.exponent();
if (
p > MAX_IOU_PRECISION ||
e > MAX_IOU_EXPONENT ||
e < MIN_IOU_EXPONENT
) {
raiseIllegalAmountError(this.value);
}
} }
const exponent = this.value.e - 15
const exponentByte = 97 + exponent
mantissa[0] |= (exponentByte >>> 2)
mantissa[1] |= (exponentByte & 0x03) << 6
} }
sink.put(mantissa) },
this.currency.toBytesSink(sink) isNative() {
this.issuer.toBytesSink(sink) return this.currency.isNative();
} },
}, mantissa() {
toJSON () { // This is a tertiary fix for #31
const valueString = this.valueString() const integerNumberString = this.verifyNoDecimal();
if (this.isNative()) {
return valueString
}
return {
value: valueString,
currency: this.currency.toJSON(),
issuer: this.issuer.toJSON()
}
}
}, undefined)
export { return new UInt64(new BN(integerNumberString));
Amount },
} verifyNoDecimal() {
const integerNumberString = this.value
.times("1e" + -this.exponent())
.abs()
.toString();
// Ensure that the value (after being multiplied by the exponent)
// does not contain a decimal. From the bn.js README:
// "decimals are not supported in this library."
// eslint-disable-next-line max-len
// https://github.com/indutny/bn.js/blob/9cb459f044853b46615464eea1a3ddfc7006463b/README.md
if (integerNumberString.indexOf(".") !== -1) {
raiseIllegalAmountError(integerNumberString);
}
return integerNumberString;
},
isZero() {
return this.value.isZero();
},
exponent() {
return this.isNative() ? -6 : this.value.e - 15;
},
valueString() {
return (this.isNative()
? this.value.times(DROPS_PER_XRP)
: this.value
).toString();
},
toBytesSink(sink) {
const isNative = this.isNative();
const notNegative = !this.value.isNegative();
const mantissa = this.mantissa().toBytes();
if (isNative) {
mantissa[0] |= notNegative ? 0x40 : 0;
sink.put(mantissa);
} else {
mantissa[0] |= 0x80;
if (!this.isZero()) {
if (notNegative) {
mantissa[0] |= 0x40;
}
const exponent = this.value.e - 15;
const exponentByte = 97 + exponent;
mantissa[0] |= exponentByte >>> 2;
mantissa[1] |= (exponentByte & 0x03) << 6;
}
sink.put(mantissa);
this.currency.toBytesSink(sink);
this.issuer.toBytesSink(sink);
}
},
toJSON() {
const valueString = this.valueString();
if (this.isNative()) {
return valueString;
}
return {
value: valueString,
currency: this.currency.toJSON(),
issuer: this.issuer.toJSON(),
};
},
},
undefined
);
export { Amount };

View File

@@ -1,29 +1,30 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { parseBytes } from '../utils/bytes-utils' import { parseBytes } from "../utils/bytes-utils";
import { SerializedType } from './serialized-type' import { SerializedType } from "./serialized-type";
const Blob = makeClass({ const Blob = makeClass(
mixins: SerializedType, {
Blob (bytes) { mixins: SerializedType,
if (bytes) { Blob(bytes) {
this._bytes = parseBytes(bytes, Uint8Array) if (bytes) {
} else { this._bytes = parseBytes(bytes, Uint8Array);
this._bytes = new Uint8Array(0) } else {
} this._bytes = new Uint8Array(0);
},
statics: {
fromParser (parser, hint) {
return new this(parser.read(hint))
},
from (value) {
if (value instanceof this) {
return value
} }
return new this(value) },
} statics: {
} fromParser(parser, hint) {
}, undefined) return new this(parser.read(hint));
},
from(value) {
if (value instanceof this) {
return value;
}
return new this(value);
},
},
},
undefined
);
export { export { Blob };
Blob
}

View File

@@ -1,92 +1,93 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
const _ = require('lodash') const _ = require("lodash");
const { slice } = require('../utils/bytes-utils') const { slice } = require("../utils/bytes-utils");
const { Hash160 } = require('./hash-160') const { Hash160 } = require("./hash-160");
const ISO_REGEX = /^[A-Z0-9]{3}$/ const ISO_REGEX = /^[A-Z0-9]{3}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/ const HEX_REGEX = /^[A-F0-9]{40}$/;
function isoToBytes (iso) { function isoToBytes(iso) {
const bytes = new Uint8Array(20) const bytes = new Uint8Array(20);
if (iso !== 'XRP') { if (iso !== "XRP") {
const isoBytes = iso.split('').map(c => c.charCodeAt(0)) const isoBytes = iso.split("").map((c) => c.charCodeAt(0));
bytes.set(isoBytes, 12) bytes.set(isoBytes, 12);
} }
return bytes return bytes;
} }
function isISOCode (val) { function isISOCode(val) {
return val.length === 3 // ISO_REGEX.test(val); return val.length === 3; // ISO_REGEX.test(val);
} }
function isHex (val) { function isHex(val) {
return HEX_REGEX.test(val) return HEX_REGEX.test(val);
} }
function isStringRepr (val) { function isStringRepr(val) {
return _.isString(val) && (isISOCode(val) || isHex(val)) return _.isString(val) && (isISOCode(val) || isHex(val));
} }
function isBytesArray (val) { function isBytesArray(val) {
return val.length === 20 return val.length === 20;
} }
function isValidRepr (val) { function isValidRepr(val) {
return isStringRepr(val) || isBytesArray(val) return isStringRepr(val) || isBytesArray(val);
} }
function bytesFromRepr (val) { function bytesFromRepr(val) {
if (isValidRepr(val)) { if (isValidRepr(val)) {
// We assume at this point that we have an object with a length, either 3, // We assume at this point that we have an object with a length, either 3,
// 20 or 40. // 20 or 40.
return val.length === 3 ? isoToBytes(val) : val return val.length === 3 ? isoToBytes(val) : val;
} }
throw new Error(`Unsupported Currency repr: ${val}`) throw new Error(`Unsupported Currency repr: ${val}`);
} }
const $uper = Hash160.prototype const $uper = Hash160.prototype;
const Currency = makeClass({ const Currency = makeClass(
inherits: Hash160, {
getters: ['isNative', 'iso'], inherits: Hash160,
statics: { getters: ["isNative", "iso"],
init () { statics: {
this.XRP = new this(new Uint8Array(20)) init() {
this.XRP = new this(new Uint8Array(20));
},
from(val) {
return val instanceof this ? val : new this(bytesFromRepr(val));
},
}, },
from (val) { Currency(bytes) {
return val instanceof this ? val : new this(bytesFromRepr(val)) Hash160.call(this, bytes);
} this.classify();
}, },
Currency (bytes) { classify() {
Hash160.call(this, bytes) // We only have a non null iso() property available if the currency can be
this.classify() // losslessly represented by the 3 letter iso code. If none is available a
}, // hex encoding of the full 20 bytes is the canonical representation.
classify () { let onlyISO = true;
// We only have a non null iso() property available if the currency can be
// losslessly represented by the 3 letter iso code. If none is available a
// hex encoding of the full 20 bytes is the canonical representation.
let onlyISO = true
const bytes = this._bytes const bytes = this._bytes;
const code = slice(this._bytes, 12, 15, Array) const code = slice(this._bytes, 12, 15, Array);
const iso = code.map(c => String.fromCharCode(c)).join('') const iso = code.map((c) => String.fromCharCode(c)).join("");
for (let i = bytes.length - 1; i >= 0; i--) { for (let i = bytes.length - 1; i >= 0; i--) {
if (bytes[i] !== 0 && !(i === 12 || i === 13 || i === 14)) { if (bytes[i] !== 0 && !(i === 12 || i === 13 || i === 14)) {
onlyISO = false onlyISO = false;
break break;
}
} }
} const lossLessISO = onlyISO && iso !== "XRP" && ISO_REGEX.test(iso);
const lossLessISO = onlyISO && iso !== 'XRP' && ISO_REGEX.test(iso) this._isNative = onlyISO && _.isEqual(code, [0, 0, 0]);
this._isNative = onlyISO && _.isEqual(code, [0, 0, 0]) this._iso = this._isNative ? "XRP" : lossLessISO ? iso : null;
this._iso = this._isNative ? 'XRP' : lossLessISO ? iso : null },
toJSON() {
if (this.iso()) {
return this.iso();
}
return $uper.toJSON.call(this);
},
}, },
toJSON () { undefined
if (this.iso()) { );
return this.iso()
}
return $uper.toJSON.call(this)
}
}, undefined)
export { export { Currency };
Currency
}

View File

@@ -1,11 +1,12 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { Hash } from './hash' import { Hash } from "./hash";
const Hash128 = makeClass({ const Hash128 = makeClass(
inherits: Hash, {
statics: { width: 16 } inherits: Hash,
}, undefined) statics: { width: 16 },
},
undefined
);
export { export { Hash128 };
Hash128
}

View File

@@ -1,11 +1,12 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
const { Hash } = require('./hash') const { Hash } = require("./hash");
const Hash160 = makeClass({ const Hash160 = makeClass(
inherits: Hash, {
statics: { width: 20 } inherits: Hash,
}, undefined) statics: { width: 20 },
},
undefined
);
export { export { Hash160 };
Hash160
}

View File

@@ -1,16 +1,17 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { Hash } from './hash' import { Hash } from "./hash";
const Hash256 = makeClass({ const Hash256 = makeClass(
inherits: Hash, {
statics: { inherits: Hash,
width: 32, statics: {
init () { width: 32,
this.ZERO_256 = new this(new Uint8Array(this.width)) init() {
} this.ZERO_256 = new this(new Uint8Array(this.width));
} },
}, undefined) },
},
undefined
);
export { export { Hash256 };
Hash256
}

View File

@@ -1,46 +1,48 @@
import * as assert from 'assert' import * as assert from "assert";
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { Comparable, SerializedType } from './serialized-type' import { Comparable, SerializedType } from "./serialized-type";
import { compareBytes, parseBytes } from '../utils/bytes-utils' import { compareBytes, parseBytes } from "../utils/bytes-utils";
const Hash = makeClass({ const Hash = makeClass(
Hash (bytes) { {
const width = this.constructor.width Hash(bytes) {
this._bytes = bytes ? parseBytes(bytes, Uint8Array) const width = this.constructor.width;
: new Uint8Array(width) this._bytes = bytes
assert.equal(this._bytes.length, width) ? parseBytes(bytes, Uint8Array)
}, : new Uint8Array(width);
mixins: [Comparable, SerializedType], assert.equal(this._bytes.length, width);
statics: { },
width: NaN, mixins: [Comparable, SerializedType],
from (value) { statics: {
if (value instanceof this) { width: NaN,
return value from(value) {
} if (value instanceof this) {
return new this(parseBytes(value)) return value;
}
return new this(parseBytes(value));
},
fromParser(parser, hint) {
return new this(parser.read(hint || this.width));
},
},
compareTo(other) {
return compareBytes(this._bytes, this.constructor.from(other)._bytes);
},
toString() {
return this.toHex();
},
nibblet(depth) {
const byteIx = depth > 0 ? (depth / 2) | 0 : 0;
let b = this._bytes[byteIx];
if (depth % 2 === 0) {
b = (b & 0xf0) >>> 4;
} else {
b = b & 0x0f;
}
return b;
}, },
fromParser (parser, hint) {
return new this(parser.read(hint || this.width))
}
}, },
compareTo (other) { undefined
return compareBytes(this._bytes, this.constructor.from(other)._bytes) );
},
toString () {
return this.toHex()
},
nibblet (depth) {
const byteIx = depth > 0 ? (depth / 2) | 0 : 0
let b = this._bytes[byteIx]
if (depth % 2 === 0) {
b = (b & 0xF0) >>> 4
} else {
b = b & 0x0F
}
return b
}
}, undefined)
export { export { Hash };
Hash
}

View File

@@ -1,20 +1,20 @@
import { Enums } from '../enums' import { Enums } from "../enums";
import { AccountID } from './account-id' import { AccountID } from "./account-id";
import { Amount } from './amount' import { Amount } from "./amount";
import { Blob } from './blob' import { Blob } from "./blob";
const Field = Enums.Field const Field = Enums.Field;
const { Currency } = require('./currency') const { Currency } = require("./currency");
const { Hash128 } = require('./hash-128') const { Hash128 } = require("./hash-128");
const { Hash160 } = require('./hash-160') const { Hash160 } = require("./hash-160");
const { Hash256 } = require('./hash-256') const { Hash256 } = require("./hash-256");
const { PathSet } = require('./path-set') const { PathSet } = require("./path-set");
const { STArray } = require('./st-array') const { STArray } = require("./st-array");
const { STObject } = require('./st-object') const { STObject } = require("./st-object");
const { UInt16 } = require('./uint-16') const { UInt16 } = require("./uint-16");
const { UInt32 } = require('./uint-32') const { UInt32 } = require("./uint-32");
const { UInt64 } = require('./uint-64') const { UInt64 } = require("./uint-64");
const { UInt8 } = require('./uint-8') const { UInt8 } = require("./uint-8");
const { Vector256 } = require('./vector-256') const { Vector256 } = require("./vector-256");
const coreTypes = { const coreTypes = {
AccountID, AccountID,
@@ -31,15 +31,15 @@ const coreTypes = {
UInt16, UInt16,
UInt32, UInt32,
UInt64, UInt64,
Vector256 Vector256,
} };
Field.values.forEach(field => { Field.values.forEach((field) => {
field.associatedType = coreTypes[field.type] field.associatedType = coreTypes[field.type];
}) });
Field.TransactionType.associatedType = Enums.TransactionType Field.TransactionType.associatedType = Enums.TransactionType;
Field.TransactionResult.associatedType = Enums.TransactionResult Field.TransactionResult.associatedType = Enums.TransactionResult;
Field.LedgerEntryType.associatedType = Enums.LedgerEntryType Field.LedgerEntryType.associatedType = Enums.LedgerEntryType;
export { coreTypes } export { coreTypes };

View File

@@ -1,113 +1,120 @@
/* eslint-disable no-unused-expressions */ /* eslint-disable no-unused-expressions */
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
const { SerializedType, ensureArrayLikeIs } = require('./serialized-type') const { SerializedType, ensureArrayLikeIs } = require("./serialized-type");
const { Currency } = require('./currency') const { Currency } = require("./currency");
const { AccountID } = require('./account-id') const { AccountID } = require("./account-id");
const PATHSET_END_BYTE = 0x00 const PATHSET_END_BYTE = 0x00;
const PATH_SEPARATOR_BYTE = 0xFF const PATH_SEPARATOR_BYTE = 0xff;
const TYPE_ACCOUNT = 0x01 const TYPE_ACCOUNT = 0x01;
const TYPE_CURRENCY = 0x10 const TYPE_CURRENCY = 0x10;
const TYPE_ISSUER = 0x20 const TYPE_ISSUER = 0x20;
const Hop = makeClass({ const Hop = makeClass(
statics: { {
from (value) { statics: {
if (value instanceof this) { from(value) {
return value if (value instanceof this) {
} return value;
const hop = new Hop() }
value.issuer && (hop.issuer = AccountID.from(value.issuer)) const hop = new Hop();
value.account && (hop.account = AccountID.from(value.account)) value.issuer && (hop.issuer = AccountID.from(value.issuer));
value.currency && (hop.currency = Currency.from(value.currency)) value.account && (hop.account = AccountID.from(value.account));
return hop value.currency && (hop.currency = Currency.from(value.currency));
return hop;
},
parse(parser, type) {
const hop = new Hop();
type & TYPE_ACCOUNT && (hop.account = AccountID.fromParser(parser));
type & TYPE_CURRENCY && (hop.currency = Currency.fromParser(parser));
type & TYPE_ISSUER && (hop.issuer = AccountID.fromParser(parser));
return hop;
},
}, },
parse (parser, type) { toJSON() {
const hop = new Hop(); const type = this.type();
(type & TYPE_ACCOUNT) && (hop.account = AccountID.fromParser(parser)); const ret = <any>{};
(type & TYPE_CURRENCY) && (hop.currency = Currency.fromParser(parser)); type & TYPE_ACCOUNT && (ret.account = this.account.toJSON());
(type & TYPE_ISSUER) && (hop.issuer = AccountID.fromParser(parser)) type & TYPE_ISSUER && (ret.issuer = this.issuer.toJSON());
return hop type & TYPE_CURRENCY && (ret.currency = this.currency.toJSON());
} return ret;
}, },
toJSON () { type() {
const type = this.type() let type = 0;
const ret = <any>{}; this.issuer && (type += TYPE_ISSUER);
(type & TYPE_ACCOUNT) && (ret.account = this.account.toJSON()); this.account && (type += TYPE_ACCOUNT);
(type & TYPE_ISSUER) && (ret.issuer = this.issuer.toJSON()); this.currency && (type += TYPE_CURRENCY);
(type & TYPE_CURRENCY) && (ret.currency = this.currency.toJSON()) return type;
return ret
},
type () {
let type = 0
this.issuer && (type += TYPE_ISSUER)
this.account && (type += TYPE_ACCOUNT)
this.currency && (type += TYPE_CURRENCY)
return type
}
}, undefined)
const Path = makeClass({
inherits: Array,
statics: {
from (value) {
return ensureArrayLikeIs(Path, value).withChildren(Hop)
}
},
toJSON () {
return this.map(k => k.toJSON())
}
}, undefined)
const PathSet = makeClass({
mixins: SerializedType,
inherits: Array,
statics: {
from (value) {
return ensureArrayLikeIs(PathSet, value).withChildren(Path)
}, },
fromParser (parser) {
const pathSet = new this()
let path
while (!parser.end()) {
const type = parser.readUInt8()
if (type === PATHSET_END_BYTE) {
break
}
if (type === PATH_SEPARATOR_BYTE) {
path = null
continue
}
if (!path) {
path = new Path()
pathSet.push(path)
}
path.push(Hop.parse(parser, type))
}
return pathSet
}
}, },
toJSON () { undefined
return this.map(k => k.toJSON()) );
},
toBytesSink (sink) {
let n = 0
this.forEach(path => {
if (n++ !== 0) {
sink.put([PATH_SEPARATOR_BYTE])
}
path.forEach(hop => {
sink.put([hop.type()])
hop.account && (hop.account.toBytesSink(sink))
hop.currency && (hop.currency.toBytesSink(sink))
hop.issuer && (hop.issuer.toBytesSink(sink))
})
})
sink.put([PATHSET_END_BYTE])
}
}, undefined)
export { const Path = makeClass(
PathSet {
} inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(Path, value).withChildren(Hop);
},
},
toJSON() {
return this.map((k) => k.toJSON());
},
},
undefined
);
const PathSet = makeClass(
{
mixins: SerializedType,
inherits: Array,
statics: {
from(value) {
return ensureArrayLikeIs(PathSet, value).withChildren(Path);
},
fromParser(parser) {
const pathSet = new this();
let path;
while (!parser.end()) {
const type = parser.readUInt8();
if (type === PATHSET_END_BYTE) {
break;
}
if (type === PATH_SEPARATOR_BYTE) {
path = null;
continue;
}
if (!path) {
path = new Path();
pathSet.push(path);
}
path.push(Hop.parse(parser, type));
}
return pathSet;
},
},
toJSON() {
return this.map((k) => k.toJSON());
},
toBytesSink(sink) {
let n = 0;
this.forEach((path) => {
if (n++ !== 0) {
sink.put([PATH_SEPARATOR_BYTE]);
}
path.forEach((hop) => {
sink.put([hop.type()]);
hop.account && hop.account.toBytesSink(sink);
hop.currency && hop.currency.toBytesSink(sink);
hop.issuer && hop.issuer.toBytesSink(sink);
});
});
sink.put([PATHSET_END_BYTE]);
},
},
undefined
);
export { PathSet };

View File

@@ -1,64 +1,60 @@
import { BytesList } from '../serdes/binary-serializer' import { BytesList } from "../serdes/binary-serializer";
const { bytesToHex, slice } = require('../utils/bytes-utils') const { bytesToHex, slice } = require("../utils/bytes-utils");
const Comparable = { const Comparable = {
lt (other) { lt(other) {
return this.compareTo(other) < 0 return this.compareTo(other) < 0;
}, },
eq (other) { eq(other) {
return this.compareTo(other) === 0 return this.compareTo(other) === 0;
}, },
gt (other) { gt(other) {
return this.compareTo(other) > 0 return this.compareTo(other) > 0;
}, },
gte (other) { gte(other) {
return this.compareTo(other) > -1 return this.compareTo(other) > -1;
}, },
lte (other) { lte(other) {
return this.compareTo(other) < 1 return this.compareTo(other) < 1;
} },
} };
const SerializedType = { const SerializedType = {
toBytesSink (sink) { toBytesSink(sink) {
sink.put(this._bytes) sink.put(this._bytes);
}, },
toHex () { toHex() {
return bytesToHex(this.toBytes()) return bytesToHex(this.toBytes());
}, },
toBytes () { toBytes() {
if (this._bytes) { if (this._bytes) {
return slice(this._bytes) return slice(this._bytes);
} }
const bl = new BytesList() const bl = new BytesList();
this.toBytesSink(bl) this.toBytesSink(bl);
return bl.toBytes() return bl.toBytes();
}, },
toJSON () { toJSON() {
return this.toHex() return this.toHex();
}, },
toString () { toString() {
return this.toHex() return this.toHex();
} },
} };
function ensureArrayLikeIs (Type, arrayLike) { function ensureArrayLikeIs(Type, arrayLike) {
return { return {
withChildren (Child) { withChildren(Child) {
if (arrayLike instanceof Type) { if (arrayLike instanceof Type) {
return arrayLike return arrayLike;
} }
const obj = new Type() const obj = new Type();
for (let i = 0; i < arrayLike.length; i++) { for (let i = 0; i < arrayLike.length; i++) {
obj.push(Child.from(arrayLike[i])) obj.push(Child.from(arrayLike[i]));
} }
return obj return obj;
} },
} };
} }
export { export { ensureArrayLikeIs, SerializedType, Comparable };
ensureArrayLikeIs,
SerializedType,
Comparable
}

View File

@@ -1,38 +1,39 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { ensureArrayLikeIs, SerializedType } from './serialized-type' import { ensureArrayLikeIs, SerializedType } from "./serialized-type";
import { Enums } from '../enums' import { Enums } from "../enums";
import { STObject } from './st-object' import { STObject } from "./st-object";
const { ArrayEndMarker } = Enums.Field const { ArrayEndMarker } = Enums.Field;
const STArray = makeClass({ const STArray = makeClass(
mixins: SerializedType, {
inherits: Array, mixins: SerializedType,
statics: { inherits: Array,
fromParser (parser) { statics: {
const array = new STArray() fromParser(parser) {
while (!parser.end()) { const array = new STArray();
const field = parser.readField() while (!parser.end()) {
if (field === ArrayEndMarker) { const field = parser.readField();
break if (field === ArrayEndMarker) {
break;
}
const outer = new STObject();
outer[field] = parser.readFieldValue(field);
array.push(outer);
} }
const outer = new STObject() return array;
outer[field] = parser.readFieldValue(field) },
array.push(outer) from(value) {
} return ensureArrayLikeIs(STArray, value).withChildren(STObject);
return array },
},
toJSON() {
return this.map((v) => v.toJSON());
},
toBytesSink(sink) {
this.forEach((so) => so.toBytesSink(sink));
}, },
from (value) {
return ensureArrayLikeIs(STArray, value).withChildren(STObject)
}
}, },
toJSON () { undefined
return this.map(v => v.toJSON()) );
},
toBytesSink (sink) {
this.forEach(so => so.toBytesSink(sink))
}
}, undefined)
export { export { STArray };
STArray
}

View File

@@ -1,66 +1,77 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { Enums } from '../enums' import { Enums } from "../enums";
const _ = require('lodash') const _ = require("lodash");
const { BinarySerializer } = require('../serdes/binary-serializer') const { BinarySerializer } = require("../serdes/binary-serializer");
const { ObjectEndMarker } = Enums.Field const { ObjectEndMarker } = Enums.Field;
const { SerializedType } = require('./serialized-type') const { SerializedType } = require("./serialized-type");
const STObject = makeClass({ const STObject = makeClass(
mixins: SerializedType, {
statics: { mixins: SerializedType,
fromParser (parser, hint) { statics: {
const end = typeof hint === 'number' ? parser.pos() + hint : null fromParser(parser, hint) {
const so = new this() const end = typeof hint === "number" ? parser.pos() + hint : null;
while (!parser.end(end)) { const so = new this();
const field = parser.readField() while (!parser.end(end)) {
if (field === ObjectEndMarker) { const field = parser.readField();
break if (field === ObjectEndMarker) {
} break;
so[field] = parser.readFieldValue(field)
}
return so
},
from (value) {
if (value instanceof this) {
return value
}
if (typeof value === 'object') {
return _.transform(value, (so, val, key) => {
const field = Enums.Field[key]
if (field) {
so[field] = field.associatedType.from(val)
} else {
so[key] = val
} }
}, new this()) so[field] = parser.readFieldValue(field);
} }
throw new Error(`${value} is unsupported`) return so;
} },
from(value) {
if (value instanceof this) {
return value;
}
if (typeof value === "object") {
return _.transform(
value,
(so, val, key) => {
const field = Enums.Field[key];
if (field) {
so[field] = field.associatedType.from(val);
} else {
so[key] = val;
}
},
new this()
);
}
throw new Error(`${value} is unsupported`);
},
},
fieldKeys() {
return Object.keys(this)
.map((k) => Enums.Field[k])
.filter(Boolean);
},
toJSON() {
// Otherwise seemingly result will have same prototype as `this`
const accumulator = {}; // of only `own` properties
return _.transform(
this,
(result, value, key) => {
result[key] = value && value.toJSON ? value.toJSON() : value;
},
accumulator
);
},
toBytesSink(sink, filter = () => true) {
const serializer = new BinarySerializer(sink);
const fields = this.fieldKeys();
const sorted = _.sortBy(fields, "ordinal");
sorted.filter(filter).forEach((field) => {
const value = this[field];
if (!field.isSerialized) {
return;
}
serializer.writeFieldAndValue(field, value);
});
},
}, },
fieldKeys () { undefined
return Object.keys(this).map(k => Enums.Field[k]).filter(Boolean) );
},
toJSON () {
// Otherwise seemingly result will have same prototype as `this`
const accumulator = {} // of only `own` properties
return _.transform(this, (result, value, key) => {
result[key] = value && value.toJSON ? value.toJSON() : value
}, accumulator)
},
toBytesSink (sink, filter = () => true) {
const serializer = new BinarySerializer(sink)
const fields = this.fieldKeys()
const sorted = _.sortBy(fields, 'ordinal')
sorted.filter(filter).forEach(field => {
const value = this[field]
if (!field.isSerialized) {
return
}
serializer.writeFieldAndValue(field, value)
})
}
}, undefined)
export { export { STObject };
STObject
}

View File

@@ -1,11 +1,12 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { UInt } from './uint' import { UInt } from "./uint";
const UInt16 = makeClass({ const UInt16 = makeClass(
inherits: UInt, {
statics: { width: 2 } inherits: UInt,
}, undefined) statics: { width: 2 },
},
undefined
);
export { export { UInt16 };
UInt16
}

View File

@@ -1,11 +1,12 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { UInt } from './uint' import { UInt } from "./uint";
const UInt32 = makeClass({ const UInt32 = makeClass(
inherits: UInt, {
statics: { width: 4 } inherits: UInt,
}, undefined) statics: { width: 4 },
},
undefined
);
export { export { UInt32 };
UInt32
}

View File

@@ -1,49 +1,50 @@
import { strict as assert } from 'assert' import { strict as assert } from "assert";
import { BN } from 'bn.js' import { BN } from "bn.js";
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { bytesToHex, parseBytes, serializeUIntN } from '../utils/bytes-utils' import { bytesToHex, parseBytes, serializeUIntN } from "../utils/bytes-utils";
import { UInt } from './uint' import { UInt } from "./uint";
const HEX_REGEX = /^[A-F0-9]{16}$/ const HEX_REGEX = /^[A-F0-9]{16}$/;
const UInt64 = makeClass({ const UInt64 = makeClass(
inherits: UInt, {
statics: { width: 8 }, inherits: UInt,
UInt64 (arg : any = 0) { statics: { width: 8 },
const argType = typeof arg UInt64(arg: any = 0) {
if (argType === 'number') { const argType = typeof arg;
assert(arg >= 0) if (argType === "number") {
this._bytes = new Uint8Array(8) assert(arg >= 0);
this._bytes.set(serializeUIntN(arg, 4), 4) this._bytes = new Uint8Array(8);
} else if (arg instanceof BN) { this._bytes.set(serializeUIntN(arg, 4), 4);
this._bytes = parseBytes(arg.toArray('be', 8), Uint8Array) } else if (arg instanceof BN) {
this._toBN = arg this._bytes = parseBytes(arg.toArray("be", 8), Uint8Array);
} else { this._toBN = arg;
if (argType === 'string') { } else {
if (!HEX_REGEX.test(arg)) { if (argType === "string") {
throw new Error(`${arg} is not a valid UInt64 hex string`) if (!HEX_REGEX.test(arg)) {
throw new Error(`${arg} is not a valid UInt64 hex string`);
}
} }
this._bytes = parseBytes(arg, Uint8Array);
} }
this._bytes = parseBytes(arg, Uint8Array) assert(this._bytes.length === 8);
} },
assert(this._bytes.length === 8) toJSON() {
return bytesToHex(this._bytes);
},
valueOf() {
return this.toBN();
},
cached: {
toBN() {
return new BN(this._bytes);
},
},
toBytes() {
return this._bytes;
},
}, },
toJSON () { undefined
return bytesToHex(this._bytes) );
},
valueOf () {
return this.toBN()
},
cached: {
toBN () {
return new BN(this._bytes)
}
},
toBytes () {
return this._bytes
}
}, undefined)
export { export { UInt64 };
UInt64
}

View File

@@ -1,11 +1,12 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
import { UInt } from './uint' import { UInt } from "./uint";
const UInt8 = makeClass({ const UInt8 = makeClass(
inherits: UInt, {
statics: { width: 1 } inherits: UInt,
}, undefined) statics: { width: 1 },
},
undefined
);
export { export { UInt8 };
UInt8
}

View File

@@ -1,61 +1,64 @@
import { strict as assert } from 'assert' import { strict as assert } from "assert";
import { BN } from 'bn.js' import { BN } from "bn.js";
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
const { Comparable, SerializedType } = require('./serialized-type') const { Comparable, SerializedType } = require("./serialized-type");
const { serializeUIntN } = require('../utils/bytes-utils') const { serializeUIntN } = require("../utils/bytes-utils");
const MAX_VALUES = [0, 255, 65535, 16777215, 4294967295] const MAX_VALUES = [0, 255, 65535, 16777215, 4294967295];
function signum (a, b) { function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1 return a < b ? -1 : a === b ? 0 : 1;
} }
const UInt = makeClass({ const UInt = makeClass(
mixins: [Comparable, SerializedType], {
UInt (val = 0) { mixins: [Comparable, SerializedType],
const max = MAX_VALUES[this.constructor.width] UInt(val = 0) {
if (val < 0 || !(val <= max)) { const max = MAX_VALUES[this.constructor.width];
throw new Error(`${val} not in range 0 <= $val <= ${max}`) if (val < 0 || !(val <= max)) {
} throw new Error(`${val} not in range 0 <= $val <= ${max}`);
this.val = val }
}, this.val = val;
statics: { },
width: 0, statics: {
fromParser (parser) { width: 0,
const val = this.width > 4 ? parser.read(this.width) fromParser(parser) {
: parser.readUIntN(this.width) const val =
return new this(val) this.width > 4
? parser.read(this.width)
: parser.readUIntN(this.width);
return new this(val);
},
from(val) {
return val instanceof this ? val : new this(val);
},
},
toJSON() {
return this.val;
},
valueOf() {
return this.val;
},
compareTo(other) {
const thisValue = this.valueOf();
const otherValue = other.valueOf();
if (thisValue instanceof BN) {
return otherValue instanceof BN
? thisValue.cmp(otherValue)
: thisValue.cmpn(otherValue);
} else if (otherValue instanceof BN) {
return -other.compareTo(this);
}
assert(typeof otherValue === "number");
return signum(thisValue, otherValue);
},
toBytesSink(sink) {
sink.put(this.toBytes());
},
toBytes() {
return serializeUIntN(this.val, this.constructor.width);
}, },
from (val) {
return val instanceof this ? val : new this(val)
}
}, },
toJSON () { undefined
return this.val );
},
valueOf () {
return this.val
},
compareTo (other) {
const thisValue = this.valueOf()
const otherValue = other.valueOf()
if (thisValue instanceof BN) {
return otherValue instanceof BN
? thisValue.cmp(otherValue)
: thisValue.cmpn(otherValue)
} else if (otherValue instanceof BN) {
return -other.compareTo(this)
}
assert(typeof otherValue === 'number')
return signum(thisValue, otherValue)
},
toBytesSink (sink) {
sink.put(this.toBytes())
},
toBytes () {
return serializeUIntN(this.val, this.constructor.width)
}
}, undefined)
export { export { UInt };
UInt
}

View File

@@ -1,32 +1,33 @@
import { makeClass } from '../utils/make-class' import { makeClass } from "../utils/make-class";
const { Hash256 } = require('./hash-256') const { Hash256 } = require("./hash-256");
const { ensureArrayLikeIs, SerializedType } = require('./serialized-type') const { ensureArrayLikeIs, SerializedType } = require("./serialized-type");
const Vector256 = makeClass({ const Vector256 = makeClass(
mixins: SerializedType, {
inherits: Array, mixins: SerializedType,
statics: { inherits: Array,
fromParser (parser, hint) { statics: {
const vector256 = new this() fromParser(parser, hint) {
const bytes = hint !== null ? hint : parser.size() - parser.pos() const vector256 = new this();
const hashes = bytes / 32 const bytes = hint !== null ? hint : parser.size() - parser.pos();
for (let i = 0; i < hashes; i++) { const hashes = bytes / 32;
vector256.push(Hash256.fromParser(parser)) for (let i = 0; i < hashes; i++) {
} vector256.push(Hash256.fromParser(parser));
return vector256 }
return vector256;
},
from(value) {
return ensureArrayLikeIs(Vector256, value).withChildren(Hash256);
},
},
toBytesSink(sink) {
this.forEach((h) => h.toBytesSink(sink));
},
toJSON() {
return this.map((hash) => hash.toJSON());
}, },
from (value) {
return ensureArrayLikeIs(Vector256, value).withChildren(Hash256)
}
}, },
toBytesSink (sink) { undefined
this.forEach(h => h.toBytesSink(sink)) );
},
toJSON () {
return this.map(hash => hash.toJSON())
}
}, undefined)
export { export { Vector256 };
Vector256
}

View File

@@ -1,113 +1,107 @@
import { strict as assert } from 'assert' import { strict as assert } from "assert";
function signum (a, b) { function signum(a, b) {
return a < b ? -1 : a === b ? 0 : 1 return a < b ? -1 : a === b ? 0 : 1;
} }
const hexLookup = (function () { const hexLookup = (function () {
const res = <any>{} const res = <any>{};
const reverse = res.reverse = new Array(256) const reverse = (res.reverse = new Array(256));
for (let i = 0; i < 16; i++) { for (let i = 0; i < 16; i++) {
const char = i.toString(16).toUpperCase() const char = i.toString(16).toUpperCase();
res[char] = i res[char] = i;
for (let j = 0; j < 16; j++) { for (let j = 0; j < 16; j++) {
const char2 = j.toString(16).toUpperCase() const char2 = j.toString(16).toUpperCase();
const byte = (i << 4) + j const byte = (i << 4) + j;
const byteHex = char + char2 const byteHex = char + char2;
res[byteHex] = byte res[byteHex] = byte;
reverse[byte] = byteHex reverse[byte] = byteHex;
} }
} }
return res return res;
}()) })();
const reverseHexLookup = hexLookup.reverse const reverseHexLookup = hexLookup.reverse;
function bytesToHex (sequence) { function bytesToHex(sequence) {
const buf = Array(sequence.length) const buf = Array(sequence.length);
for (let i = sequence.length - 1; i >= 0; i--) { for (let i = sequence.length - 1; i >= 0; i--) {
buf[i] = reverseHexLookup[sequence[i]] buf[i] = reverseHexLookup[sequence[i]];
} }
return buf.join('') return buf.join("");
} }
function byteForHex (hex) { function byteForHex(hex) {
const byte = hexLookup[hex] const byte = hexLookup[hex];
if (byte === undefined) { if (byte === undefined) {
throw new Error(`\`${hex}\` is not a valid hex representation of a byte`) throw new Error(`\`${hex}\` is not a valid hex representation of a byte`);
} }
return byte return byte;
} }
function parseBytes (val, Output = <any>Array) { function parseBytes(val, Output = <any>Array) {
if (!val || val.length === undefined) { if (!val || val.length === undefined) {
throw new Error(`${val} is not a sequence`) throw new Error(`${val} is not a sequence`);
} }
if (typeof val === 'string') { if (typeof val === "string") {
const start = val.length % 2 const start = val.length % 2;
const res = new Output((val.length + start) / 2) const res = new Output((val.length + start) / 2);
for (let i = val.length, to = res.length - 1; to >= start; i -= 2, to--) { for (let i = val.length, to = res.length - 1; to >= start; i -= 2, to--) {
res[to] = byteForHex(val.slice(i - 2, i)) res[to] = byteForHex(val.slice(i - 2, i));
} }
if (start === 1) { if (start === 1) {
res[0] = byteForHex(val[0]) res[0] = byteForHex(val[0]);
} }
return res return res;
} else if (val instanceof Output) { } else if (val instanceof Output) {
return val return val;
} else if (Output === Uint8Array) { } else if (Output === Uint8Array) {
return new Output(val) return new Output(val);
} }
const res = new Output(val.length) const res = new Output(val.length);
for (let i = val.length - 1; i >= 0; i--) { for (let i = val.length - 1; i >= 0; i--) {
res[i] = val[i] res[i] = val[i];
} }
return res return res;
} }
function serializeUIntN (val, width) { function serializeUIntN(val, width) {
const newBytes = new Uint8Array(width) const newBytes = new Uint8Array(width);
const lastIx = width - 1 const lastIx = width - 1;
for (let i = 0; i < width; i++) { for (let i = 0; i < width; i++) {
newBytes[lastIx - i] = (val >>> (i * 8) & 0xff) newBytes[lastIx - i] = (val >>> (i * 8)) & 0xff;
} }
return newBytes return newBytes;
} }
function compareBytes (a, b) { function compareBytes(a, b) {
assert(a.length === b.length) assert(a.length === b.length);
for (let i = 0; i < a.length; i++) { for (let i = 0; i < a.length; i++) {
const cmp = signum(a[i], b[i]) const cmp = signum(a[i], b[i]);
if (cmp !== 0) { if (cmp !== 0) {
return cmp return cmp;
} }
} }
return 0 return 0;
} }
function slice (val, startIx = 0, endIx = val.length, Output = val.constructor) { function slice(val, startIx = 0, endIx = val.length, Output = val.constructor) {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
if (startIx < 0) { if (startIx < 0) {
startIx += val.length startIx += val.length;
} }
if (endIx < 0) { if (endIx < 0) {
endIx += val.length endIx += val.length;
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
const len = endIx - startIx const len = endIx - startIx;
const res = new Output(len) const res = new Output(len);
for (let i = endIx - 1; i >= startIx; i--) { for (let i = endIx - 1; i >= startIx; i--) {
res[i - startIx] = val[i] res[i - startIx] = val[i];
} }
return res return res;
} }
export { export { parseBytes, bytesToHex, slice, compareBytes, serializeUIntN };
parseBytes,
bytesToHex,
slice,
compareBytes,
serializeUIntN
}

View File

@@ -1,83 +1,85 @@
import _ = require('lodash'); import _ = require("lodash");
const inherits = require('inherits') const inherits = require("inherits");
function forEach (obj, func) { function forEach(obj, func) {
Object.keys(obj || {}).forEach(k => { Object.keys(obj || {}).forEach((k) => {
func(obj[k], k) func(obj[k], k);
}) });
} }
function ensureArray (val) { function ensureArray(val) {
return Array.isArray(val) ? val : [val] return Array.isArray(val) ? val : [val];
} }
export function makeClass (klass_, definition_) { export function makeClass(klass_, definition_) {
const definition = definition_ || klass_ const definition = definition_ || klass_;
let klass = typeof klass_ === 'function' ? klass_ : null let klass = typeof klass_ === "function" ? klass_ : null;
if (klass === null) { if (klass === null) {
for (const k in definition) { for (const k in definition) {
if (k[0].match(/[A-Z]/)) { if (k[0].match(/[A-Z]/)) {
klass = definition[k] klass = definition[k];
break break;
} }
} }
} }
const parent = definition.inherits const parent = definition.inherits;
if (parent) { if (parent) {
if (klass === null) { if (klass === null) {
klass = function () { klass = function () {
parent.apply(this, arguments) parent.apply(this, arguments);
} };
} }
inherits(klass, parent) inherits(klass, parent);
_.defaults(klass, parent) _.defaults(klass, parent);
} }
if (klass === null) { if (klass === null) {
klass = function () {} klass = function () {};
} }
const proto = klass.prototype const proto = klass.prototype;
function addFunc (original, name, wrapper) { function addFunc(original, name, wrapper) {
proto[name] = wrapper || original proto[name] = wrapper || original;
} }
(definition.getters || []).forEach(k => { (definition.getters || []).forEach((k) => {
const key = '_' + k const key = "_" + k;
proto[k] = function () { proto[k] = function () {
return this[key] return this[key];
} };
}) });
forEach(definition.virtuals, (f, n) => { forEach(definition.virtuals, (f, n) => {
addFunc(f, n, function () { addFunc(f, n, function () {
throw new Error('unimplemented') throw new Error("unimplemented");
}) });
}) });
forEach(definition.methods, addFunc) forEach(definition.methods, addFunc);
forEach(definition, (f, n) => { forEach(definition, (f, n) => {
if (_.isFunction(f) && f !== klass) { if (_.isFunction(f) && f !== klass) {
addFunc(f, n, undefined) addFunc(f, n, undefined);
} }
}) });
_.assign(klass, definition.statics) _.assign(klass, definition.statics);
if (typeof klass.init === 'function') { if (typeof klass.init === "function") {
klass.init() klass.init();
} }
forEach(definition.cached, (f, n) => { forEach(definition.cached, (f, n) => {
const key = '_' + n const key = "_" + n;
addFunc(f, n, function () { addFunc(f, n, function () {
let value = this[key] let value = this[key];
if (value === undefined) { if (value === undefined) {
value = this[key] = f.call(this) value = this[key] = f.call(this);
} }
return value return value;
}) });
}) });
if (definition.mixins) { if (definition.mixins) {
const mixins = {} const mixins = {};
// Right-most in the list win // Right-most in the list win
ensureArray(definition.mixins).reverse().forEach(o => { ensureArray(definition.mixins)
_.defaults(mixins, o) .reverse()
}) .forEach((o) => {
_.defaults(proto, mixins) _.defaults(mixins, o);
});
_.defaults(proto, mixins);
} }
return klass return klass;
}; }

View File

@@ -0,0 +1,7 @@
// We need this file to run ESLint on our tests
// https://github.com/typescript-eslint/typescript-eslint/blob/master/packages/parser/README.md#configuration
{
// extend your base config so you don't have to redefine your compilerOptions
"extends": "./tsconfig.json",
"include": ["src/**/*.ts"]
}

File diff suppressed because it is too large Load Diff