Refactored ./src/serdes (#79)

Refactored `serdes` to use TypeScript Classes and documented functionality.
This commit is contained in:
Nathan Nichols
2020-07-06 16:16:32 -05:00
parent 7da60d30b5
commit 485ec4e924
13 changed files with 361 additions and 231 deletions

View File

@@ -35,7 +35,7 @@
"typescript": "^3.9.5" "typescript": "^3.9.5"
}, },
"scripts": { "scripts": {
"compile": "tsc && cp ./src/definitions/definitions.json ./dist/definitions", "compile": "tsc && cp ./src/enums/definitions.json ./dist/enums",
"prepare": "npm run compile && npm test", "prepare": "npm run compile && npm test",
"test": "jest --coverage=true", "test": "jest --coverage=true",
"lint": "eslint . --ext .ts --fix" "lint": "eslint . --ext .ts --fix"

View File

@@ -4,7 +4,7 @@ import {
LedgerEntryType, LedgerEntryType,
Type, Type,
TransactionResult, TransactionResult,
} from "./definitions"; } from "./enums";
const types = require("./types"); const types = require("./types");
const binary = require("./binary"); const binary = require("./binary");
const { ShaMap } = require("./shamap"); const { ShaMap } = require("./shamap");

View File

@@ -9,7 +9,7 @@ const TRANSACTION_RESULT_WIDTH = 1;
/* /*
* @brief: Serialize a field based on type_code and Field.nth * @brief: Serialize a field based on type_code and Field.nth
*/ */
function fieldHeader(type: number, nth: number): Uint8Array { function fieldHeader(type: number, nth: number): Buffer {
const header: Array<number> = []; const header: Array<number> = [];
if (type < 16) { if (type < 16) {
if (nth < 16) { if (nth < 16) {
@@ -22,7 +22,7 @@ function fieldHeader(type: number, nth: number): Uint8Array {
} else { } else {
header.push(0, type, nth); header.push(0, type, nth);
} }
return new Uint8Array(header); return Buffer.from(header);
} }
/* /*
@@ -69,7 +69,7 @@ class BytesLookup {
} }
/* /*
* type FieldInfo is the type of the objects constaining information about each field in definitions.json * type FieldInfo is the type of the objects containing information about each field in definitions.json
*/ */
interface FieldInfo { interface FieldInfo {
nth: number; nth: number;
@@ -87,12 +87,13 @@ interface FieldInstance {
readonly type: Bytes; readonly type: Bytes;
readonly ordinal: number; readonly ordinal: number;
readonly name: string; readonly name: string;
readonly header: Uint8Array; readonly header: Buffer;
readonly associatedType: any; readonly associatedType: any;
} }
function buildField([name, info]: [string, FieldInfo]): FieldInstance { function buildField([name, info]: [string, FieldInfo]): FieldInstance {
const typeOrdinal = enums.TYPES[info.type]; const typeOrdinal = enums.TYPES[info.type];
const field = fieldHeader(typeOrdinal, info.nth);
return { return {
name: name, name: name,
nth: info.nth, nth: info.nth,
@@ -101,13 +102,13 @@ function buildField([name, info]: [string, FieldInfo]): FieldInstance {
isSigningField: info.isSigningField, isSigningField: info.isSigningField,
ordinal: (typeOrdinal << 16) | info.nth, ordinal: (typeOrdinal << 16) | info.nth,
type: new Bytes(info.type, typeOrdinal, TYPE_WIDTH), type: new Bytes(info.type, typeOrdinal, TYPE_WIDTH),
header: fieldHeader(typeOrdinal, info.nth), header: field,
associatedType: undefined, // For later assignment in ./types/index.js associatedType: undefined, // For later assignment in ./types/index.js
}; };
} }
/* /*
* @brief: The collection of all fields as defined in definitons.json * @brief: The collection of all fields as defined in definitions.json
*/ */
class FieldLookup { class FieldLookup {
constructor(fields: Array<[string, FieldInfo]>) { constructor(fields: Array<[string, FieldInfo]>) {
@@ -137,4 +138,11 @@ const TransactionResult = new BytesLookup(
); );
const Field = new FieldLookup(enums.FIELDS as Array<[string, FieldInfo]>); const Field = new FieldLookup(enums.FIELDS as Array<[string, FieldInfo]>);
export { Field, Type, LedgerEntryType, TransactionResult, TransactionType }; export {
Field,
FieldInstance,
Type,
LedgerEntryType,
TransactionResult,
TransactionType,
};

View File

@@ -1,103 +1,179 @@
import { strict as assert } from "assert"; import * as assert from "assert";
import { makeClass } from "../utils/make-class"; import { Field, FieldInstance } from "../enums";
import { Field } from "../definitions";
import { slice, parseBytes } from "../utils/bytes-utils";
const BinaryParser = makeClass( /**
{ * BinaryParser is used to compute fields and values from a HexString
BinaryParser(buf) { */
this._buf = parseBytes(buf, Uint8Array); class BinaryParser {
this._length = this._buf.length; private bytes: Buffer;
this._cursor = 0;
}, /**
skip(n) { * Initialize bytes to a hex string
this._cursor += n; *
}, * @param hexBytes a hex string
read(n, to = Uint8Array) { */
const start = this._cursor; constructor(hexBytes: string) {
const end = this._cursor + n; this.bytes = Buffer.from(hexBytes, "hex");
assert(end <= this._buf.length); }
this._cursor = end;
return slice(this._buf, start, end, to); /**
}, * Consume the first n bytes of the BinaryParser
readUIntN(n) { *
return this.read(n, Array).reduce((a, b) => (a << 8) | b) >>> 0; * @param n the number of bytes to skip
}, */
readUInt8() { skip(n: number): void {
return this._buf[this._cursor++]; assert(n <= this.bytes.byteLength);
}, this.bytes = this.bytes.slice(n);
readUInt16() { }
return this.readUIntN(2);
}, /**
readUInt32() { * read the first n bytes from the BinaryParser
return this.readUIntN(4); *
}, * @param n The number of bytes to read
pos() { * @return The bytes
return this._cursor; */
}, read(n: number): Buffer {
size() { assert(n <= this.bytes.byteLength);
return this._buf.length;
}, const slice = this.bytes.slice(0, n);
end(customEnd) { this.skip(n);
const cursor = this.pos(); return slice;
return ( }
cursor >= this._length || (customEnd !== null && cursor >= customEnd)
/**
* Read an integer of given size
*
* @param n The number of bytes to read
* @return The number represented by those bytes
*/
readUIntN(n: number): number {
assert(0 < n && n <= 4, "invalid n");
return this.read(n).reduce((a, b) => (a << 8) | b) >>> 0;
}
readUInt8(): number {
return this.readUIntN(1);
}
readUInt16(): number {
return this.readUIntN(2);
}
readUInt32(): number {
return this.readUIntN(4);
}
size(): number {
return this.bytes.byteLength;
}
end(customEnd?: number): boolean {
const length = this.bytes.byteLength;
return length === 0 || (customEnd !== undefined && length <= customEnd);
}
/**
* Reads variable length encoded bytes
*
* @return The variable length bytes
*/
readVariableLength(): Buffer {
return this.read(this.readVariableLengthLength());
}
/**
* Reads the length of the variable length encoded bytes
*
* @return The length of the variable length encoded bytes
*/
readVariableLengthLength(): number {
const b1 = this.readUInt8();
if (b1 <= 192) {
return b1;
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error("Invalid variable length indicator");
}
/**
* Reads the field ordinal from the BinaryParser
*
* @return Field ordinal
*/
readFieldOrdinal(): number {
const tagByte = this.readUInt8();
const type = (tagByte & 0xf0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0f || this.readUInt8();
return (type << 16) | nth;
}
/**
* Read the field from the BinaryParser
*
* @return The field represented by the bytes at the head of the BinaryParser
*/
readField(): FieldInstance {
return Field.fromString(this.readFieldOrdinal().toString());
}
/**
* Read a given type from the BinaryParser
*
* @param type The type that you want to read from the BinaryParser
* @return The instance of that type read from the BinaryParser
*/
readType(type) {
return type.fromParser(this);
}
/**
* Get the type associated with a given field
*
* @param field The field that you wan to get the type of
* @return The type associated with the given field
*/
typeForField(field: FieldInstance) {
return field.associatedType;
}
/**
* Read value of the type specified by field from the BinaryParser
*
* @param field The field that you want to get the associated value for
* @return The value associated with the given field
*/
readFieldValue(field: FieldInstance) {
const type = this.typeForField(field);
if (!type) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVariableLengthEncoded
? this.readVariableLengthLength()
: null;
const value = type.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `
); );
}, }
readVL() { return value;
return this.read(this.readVLLength()); }
},
readVLLength() { /**
const b1 = this.readUInt8(); * Get the next field and value from the BinaryParser
if (b1 <= 192) { *
return b1; * @return The field and value
} else if (b1 <= 240) { */
const b2 = this.readUInt8(); readFieldAndValue() {
return 193 + (b1 - 193) * 256 + b2; const field = this.readField();
} else if (b1 <= 254) { return [field, this.readFieldValue(field)];
const b2 = this.readUInt8(); }
const b3 = this.readUInt8(); }
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error("Invalid varint length indicator");
},
readFieldOrdinal() {
const tagByte = this.readUInt8();
const type = (tagByte & 0xf0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0f || this.readUInt8();
return (type << 16) | nth;
},
readField() {
return Field.fromString(this.readFieldOrdinal().toString());
},
readType(type) {
return type.fromParser(this);
},
typeForField(field) {
return field.associatedType;
},
readFieldValue(field) {
const kls = this.typeForField(field);
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVariableLengthEncoded
? this.readVLLength()
: null;
const value = kls.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `
);
}
return value;
},
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
},
},
undefined
);
export { BinaryParser }; export { BinaryParser };

View File

@@ -1,110 +1,157 @@
import { strict as assert } from "assert"; import * as assert from "assert";
import { parseBytes, bytesToHex } from "../utils/bytes-utils"; import { Field, FieldInstance } from "../enums";
import { makeClass } from "../utils/make-class";
import { Field } from "../definitions";
const BytesSink = { /**
put(/* bytesSequence */) { * Bytes list is a collection of buffer objects
// any hex string or any object with a `length` and where 0 <= [ix] <= 255 */
}, class BytesList {
}; private bytesArray: Array<Buffer> = [];
const BytesList = makeClass( /**
{ * Get the total number of bytes in the BytesList
implementing: BytesSink, *
BytesList() { * @return the number of bytes
this.arrays = []; */
this.length = 0; public getLength(): number {
}, return Buffer.concat(this.bytesArray).byteLength;
put(bytesArg) { }
const bytes = parseBytes(bytesArg, Uint8Array);
this.length += bytes.length;
this.arrays.push(bytes);
return this;
},
toBytesSink(sink) {
this.arrays.forEach((arr) => {
sink.put(arr);
});
},
toBytes() {
const concatenated = new Uint8Array(this.length);
let pointer = 0;
this.arrays.forEach((arr) => {
concatenated.set(arr, pointer);
pointer += arr.length;
});
return concatenated;
},
toHex() {
return bytesToHex(this.toBytes());
},
},
undefined
);
const BinarySerializer = makeClass( /**
{ * Put bytes in the BytesList
BinarySerializer(sink) { *
this.sink = sink; * @param bytesArg A Buffer
}, * @return this BytesList
write(value) { */
value.toBytesSink(this.sink); public put(bytesArg: Buffer): BytesList {
}, const bytes = Buffer.from(bytesArg); // Temporary, to catch instances of Uint8Array being passed in
put(bytes) { this.bytesArray.push(bytes);
this.sink.put(bytes); return this;
}, }
writeType(type, value) {
this.write(type.from(value)); /**
}, * Write this BytesList to the back of another bytes list
writeBytesList(bl) { *
bl.toBytesSink(this.sink); * @param list The BytesList to write to
}, */
encodeVL(len) { public toBytesSink(list: BytesList): void {
let length = len; list.put(this.toBytes());
const lenBytes = new Uint8Array(4); }
if (length <= 192) {
lenBytes[0] = length; public toBytes(): Buffer {
return lenBytes.subarray(0, 1); return Buffer.concat(this.bytesArray);
} else if (length <= 12480) { }
length -= 193;
lenBytes[0] = 193 + (length >>> 8); toHex(): string {
lenBytes[1] = length & 0xff; return this.toBytes().toString("hex").toUpperCase();
return lenBytes.subarray(0, 2); }
} else if (length <= 918744) { }
length -= 12481;
lenBytes[0] = 241 + (length >>> 16); /**
lenBytes[1] = (length >> 8) & 0xff; * BinarySerializer is used to write fields and values to buffers
lenBytes[2] = length & 0xff; */
return lenBytes.subarray(0, 3); class BinarySerializer {
private sink: BytesList = new BytesList();
constructor(sink: BytesList) {
this.sink = sink;
}
/**
* Write a value to this BinarySerializer
*
* @param value a SerializedType value
*/
write(value): void {
value.toBytesSink(this.sink);
}
/**
* Write bytes to this BinarySerializer
*
* @param bytes the bytes to write
*/
put(bytes: Buffer): void {
this.sink.put(bytes);
}
/**
* Write a value of a given type to this BinarySerializer
*
* @param type the type to write
* @param value a value of that type
*/
writeType(type, value): void {
this.write(type.from(value));
}
/**
* Write BytesList to this BinarySerializer
*
* @param bl BytesList to write to BinarySerializer
*/
writeBytesList(bl: BytesList): void {
bl.toBytesSink(this.sink);
}
/**
* Calculate the header of Variable Length encoded bytes
*
* @param length the length of the bytes
*/
private encodeVariableLength(length: number): Buffer {
const lenBytes = Buffer.alloc(3);
if (length <= 192) {
lenBytes[0] = length;
return lenBytes.slice(0, 1);
} else if (length <= 12480) {
length -= 193;
lenBytes[0] = 193 + (length >>> 8);
lenBytes[1] = length & 0xff;
return lenBytes.slice(0, 2);
} else if (length <= 918744) {
length -= 12481;
lenBytes[0] = 241 + (length >>> 16);
lenBytes[1] = (length >> 8) & 0xff;
lenBytes[2] = length & 0xff;
return lenBytes.slice(0, 3);
}
throw new Error("Overflow error");
}
/**
* Write field and value to BinarySerializer
*
* @param field field to write to BinarySerializer
* @param value value to write to BinarySerializer
*/
writeFieldAndValue(field: FieldInstance, value): void {
const associatedValue = field.associatedType.from(value);
assert(associatedValue.toBytesSink, field.name);
this.sink.put(field.header);
if (field.isVariableLengthEncoded) {
this.writeLengthEncoded(associatedValue);
} else {
associatedValue.toBytesSink(this.sink);
if (field.type.name === "STObject") {
this.sink.put(Field["ObjectEndMarker"].header);
} else if (field.type.name === "STArray") {
this.sink.put(Field["ArrayEndMarker"].header);
} }
throw new Error("Overflow error"); }
}, }
writeFieldAndValue(field, _value) {
const sink = this.sink;
const value = field.associatedType.from(_value);
assert(value.toBytesSink, field);
sink.put(field.header);
if (field.isVariableLengthEncoded) { /**
this.writeLengthEncoded(value); * Write a variable length encoded value to the BinarySerializer
} else { *
value.toBytesSink(sink); * @param value length encoded value to write to BytesList
if (field.type.name === "STObject") { */
sink.put(Field["ObjectEndMarker"].header); public writeLengthEncoded(value): void {
} else if (field.type.name === "STArray") { const bytes = new BytesList();
sink.put(Field["ArrayEndMarker"].header); value.toBytesSink(bytes);
} this.put(this.encodeVariableLength(bytes.getLength()));
} this.writeBytesList(bytes);
}, }
writeLengthEncoded(value) { }
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVL(bytes.length));
this.writeBytesList(bytes);
},
},
undefined
);
export { BytesList, BinarySerializer }; export { BytesList, BinarySerializer };

View File

@@ -3,7 +3,7 @@ import {
TransactionResult, TransactionResult,
TransactionType, TransactionType,
LedgerEntryType, LedgerEntryType,
} from "../definitions"; } from "../enums";
import { AccountID } from "./account-id"; import { AccountID } from "./account-id";
import { Amount } from "./amount"; import { Amount } from "./amount";
import { Blob } from "./blob"; import { Blob } from "./blob";

View File

@@ -1,5 +1,5 @@
import { makeClass } from "../utils/make-class"; import { makeClass } from "../utils/make-class";
import { Field } from "../definitions"; import { Field } from "../enums";
const _ = require("lodash"); const _ = require("lodash");
const { BinarySerializer } = require("../serdes/binary-serializer"); const { BinarySerializer } = require("../serdes/binary-serializer");
const { SerializedType } = require("./serialized-type"); const { SerializedType } = require("./serialized-type");

View File

@@ -8,7 +8,7 @@ const { encodeAccountID } = require('ripple-address-codec')
const { binary } = require('../dist/coretypes') const { binary } = require('../dist/coretypes')
const { Amount, Hash160 } = coreTypes const { Amount, Hash160 } = coreTypes
const { makeParser, readJSON } = binary const { makeParser, readJSON } = binary
const { Field, TransactionType } = require('./../dist/definitions') const { Field, TransactionType } = require('./../dist/enums')
const { parseHexOnly, hexOnly, loadFixture } = require('./utils') const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
const { bytesToHex } = require('../dist/utils/bytes-utils') const { bytesToHex } = require('../dist/utils/bytes-utils')
const fixtures = loadFixture('data-driven-tests.json') const fixtures = loadFixture('data-driven-tests.json')
@@ -37,13 +37,12 @@ function basicApiTests () {
const bytes = parseHexOnly('00,01020304,0506', Uint8Array) const bytes = parseHexOnly('00,01020304,0506', Uint8Array)
test('can read slices of bytes', () => { test('can read slices of bytes', () => {
const parser = makeParser(bytes) const parser = makeParser(bytes)
expect(parser.pos()).toBe(0) expect(parser.bytes instanceof Buffer).toBe(true)
expect(parser._buf instanceof Uint8Array).toBe(true)
const read1 = parser.read(1) const read1 = parser.read(1)
expect(read1 instanceof Uint8Array).toBe(true) expect(read1 instanceof Buffer).toBe(true)
expect(read1).toEqual(Uint8Array.from([0])) expect(read1).toEqual(Buffer.from([0]))
expect(parser.read(4)).toEqual(Uint8Array.from([1, 2, 3, 4])) expect(parser.read(4)).toEqual(Buffer.from([1, 2, 3, 4]))
expect(parser.read(2)).toEqual(Uint8Array.from([5, 6])) expect(parser.read(2)).toEqual(Buffer.from([5, 6]))
expect(() => parser.read(1)).toThrow() expect(() => parser.read(1)).toThrow()
}) })
test('can read a Uint32 at full', () => { test('can read a Uint32 at full', () => {
@@ -108,12 +107,12 @@ function transactionParsingTests () {
expect(parser.readField()).toEqual(Field.Fee) expect(parser.readField()).toEqual(Field.Fee)
expect(parser.read(8)).not.toEqual([]) expect(parser.read(8)).not.toEqual([])
expect(parser.readField()).toEqual(Field.SigningPubKey) expect(parser.readField()).toEqual(Field.SigningPubKey)
expect(parser.readVLLength()).toBe(33) expect(parser.readVariableLengthLength()).toBe(33)
expect(bytesToHex(parser.read(33))).toEqual(tx_json.SigningPubKey) expect(bytesToHex(parser.read(33))).toEqual(tx_json.SigningPubKey)
expect(parser.readField()).toEqual(Field.TxnSignature) expect(parser.readField()).toEqual(Field.TxnSignature)
expect(bytesToHex(parser.readVL())).toEqual(tx_json.TxnSignature) expect(bytesToHex(parser.readVariableLength())).toEqual(tx_json.TxnSignature)
expect(parser.readField()).toEqual(Field.Account) expect(parser.readField()).toEqual(Field.Account)
expect(encodeAccountID(parser.readVL())).toEqual(tx_json.Account) expect(encodeAccountID(parser.readVariableLength())).toEqual(tx_json.Account)
expect(parser.end()).toBe(true) expect(parser.end()).toBe(true)
}) })

View File

@@ -52,18 +52,18 @@ const PaymentChannel = {
} }
function bytesListTest () { function bytesListTest () {
const list = new BytesList().put([0]).put([2, 3]).put([4, 5]) const list = new BytesList().put(Buffer.from([0])).put(Buffer.from([2, 3])).put(Buffer.from([4, 5]))
test('is an Array<Uint8Array>', function () { test('is an Array<Buffer>', function () {
expect(Array.isArray(list.arrays)).toBe(true) expect(Array.isArray(list.bytesArray)).toBe(true)
expect(list.arrays[0] instanceof Uint8Array).toBe(true) expect(list.bytesArray[0] instanceof Buffer).toBe(true)
}) })
test('keeps track of the length itself', function () { test('keeps track of the length itself', function () {
expect(list).toHaveLength(5) expect(list.getLength()).toBe(5)
}) })
test('can join all arrays into one via toBytes', function () { test('can join all arrays into one via toBytes', function () {
const joined = list.toBytes() const joined = list.toBytes()
expect(joined).toHaveLength(5) expect(joined).toHaveLength(5)
expect(joined).toEqual(Uint8Array.from([0, 2, 3, 4, 5])) expect(joined).toEqual(Buffer.from([0, 2, 3, 4, 5]))
}) })
} }
@@ -95,7 +95,7 @@ function check (type, n, expected) {
return return
} }
serializer.writeType(type, n) serializer.writeType(type, n)
expect(bl.toBytes()).toEqual(Uint8Array.from(expected)) expect(bl.toBytes()).toEqual(Buffer.from(expected))
}) })
} }

View File

@@ -943,7 +943,7 @@
"name": "TickSize", "name": "TickSize",
"nth_of_type": 16, "nth_of_type": 16,
"type": 16, "type": 16,
"expected_hex": "01010" "expected_hex": "001010"
} }
], ],
"whole_objects": [ "whole_objects": [