Refactored ./src/serdes (#79)

Refactored `serdes` to use TypeScript Classes and documented functionality.
This commit is contained in:
Nathan Nichols
2020-07-06 16:16:32 -05:00
parent 7da60d30b5
commit 485ec4e924
13 changed files with 361 additions and 231 deletions

View File

@@ -35,7 +35,7 @@
"typescript": "^3.9.5"
},
"scripts": {
"compile": "tsc && cp ./src/definitions/definitions.json ./dist/definitions",
"compile": "tsc && cp ./src/enums/definitions.json ./dist/enums",
"prepare": "npm run compile && npm test",
"test": "jest --coverage=true",
"lint": "eslint . --ext .ts --fix"

View File

@@ -4,7 +4,7 @@ import {
LedgerEntryType,
Type,
TransactionResult,
} from "./definitions";
} from "./enums";
const types = require("./types");
const binary = require("./binary");
const { ShaMap } = require("./shamap");

View File

@@ -9,7 +9,7 @@ const TRANSACTION_RESULT_WIDTH = 1;
/*
* @brief: Serialize a field based on type_code and Field.nth
*/
function fieldHeader(type: number, nth: number): Uint8Array {
function fieldHeader(type: number, nth: number): Buffer {
const header: Array<number> = [];
if (type < 16) {
if (nth < 16) {
@@ -22,7 +22,7 @@ function fieldHeader(type: number, nth: number): Uint8Array {
} else {
header.push(0, type, nth);
}
return new Uint8Array(header);
return Buffer.from(header);
}
/*
@@ -69,7 +69,7 @@ class BytesLookup {
}
/*
* type FieldInfo is the type of the objects constaining information about each field in definitions.json
* type FieldInfo is the type of the objects containing information about each field in definitions.json
*/
interface FieldInfo {
nth: number;
@@ -87,12 +87,13 @@ interface FieldInstance {
readonly type: Bytes;
readonly ordinal: number;
readonly name: string;
readonly header: Uint8Array;
readonly header: Buffer;
readonly associatedType: any;
}
function buildField([name, info]: [string, FieldInfo]): FieldInstance {
const typeOrdinal = enums.TYPES[info.type];
const field = fieldHeader(typeOrdinal, info.nth);
return {
name: name,
nth: info.nth,
@@ -101,13 +102,13 @@ function buildField([name, info]: [string, FieldInfo]): FieldInstance {
isSigningField: info.isSigningField,
ordinal: (typeOrdinal << 16) | info.nth,
type: new Bytes(info.type, typeOrdinal, TYPE_WIDTH),
header: fieldHeader(typeOrdinal, info.nth),
header: field,
associatedType: undefined, // For later assignment in ./types/index.js
};
}
/*
* @brief: The collection of all fields as defined in definitons.json
* @brief: The collection of all fields as defined in definitions.json
*/
class FieldLookup {
constructor(fields: Array<[string, FieldInfo]>) {
@@ -137,4 +138,11 @@ const TransactionResult = new BytesLookup(
);
const Field = new FieldLookup(enums.FIELDS as Array<[string, FieldInfo]>);
export { Field, Type, LedgerEntryType, TransactionResult, TransactionType };
export {
Field,
FieldInstance,
Type,
LedgerEntryType,
TransactionResult,
TransactionType,
};

View File

@@ -1,103 +1,179 @@
import { strict as assert } from "assert";
import { makeClass } from "../utils/make-class";
import { Field } from "../definitions";
import { slice, parseBytes } from "../utils/bytes-utils";
import * as assert from "assert";
import { Field, FieldInstance } from "../enums";
const BinaryParser = makeClass(
{
BinaryParser(buf) {
this._buf = parseBytes(buf, Uint8Array);
this._length = this._buf.length;
this._cursor = 0;
},
skip(n) {
this._cursor += n;
},
read(n, to = Uint8Array) {
const start = this._cursor;
const end = this._cursor + n;
assert(end <= this._buf.length);
this._cursor = end;
return slice(this._buf, start, end, to);
},
readUIntN(n) {
return this.read(n, Array).reduce((a, b) => (a << 8) | b) >>> 0;
},
readUInt8() {
return this._buf[this._cursor++];
},
readUInt16() {
return this.readUIntN(2);
},
readUInt32() {
return this.readUIntN(4);
},
pos() {
return this._cursor;
},
size() {
return this._buf.length;
},
end(customEnd) {
const cursor = this.pos();
return (
cursor >= this._length || (customEnd !== null && cursor >= customEnd)
/**
* BinaryParser is used to compute fields and values from a HexString
*/
class BinaryParser {
private bytes: Buffer;
/**
* Initialize bytes to a hex string
*
* @param hexBytes a hex string
*/
constructor(hexBytes: string) {
this.bytes = Buffer.from(hexBytes, "hex");
}
/**
* Consume the first n bytes of the BinaryParser
*
* @param n the number of bytes to skip
*/
skip(n: number): void {
assert(n <= this.bytes.byteLength);
this.bytes = this.bytes.slice(n);
}
/**
* read the first n bytes from the BinaryParser
*
* @param n The number of bytes to read
* @return The bytes
*/
read(n: number): Buffer {
assert(n <= this.bytes.byteLength);
const slice = this.bytes.slice(0, n);
this.skip(n);
return slice;
}
/**
* Read an integer of given size
*
* @param n The number of bytes to read
* @return The number represented by those bytes
*/
readUIntN(n: number): number {
assert(0 < n && n <= 4, "invalid n");
return this.read(n).reduce((a, b) => (a << 8) | b) >>> 0;
}
readUInt8(): number {
return this.readUIntN(1);
}
readUInt16(): number {
return this.readUIntN(2);
}
readUInt32(): number {
return this.readUIntN(4);
}
size(): number {
return this.bytes.byteLength;
}
end(customEnd?: number): boolean {
const length = this.bytes.byteLength;
return length === 0 || (customEnd !== undefined && length <= customEnd);
}
/**
* Reads variable length encoded bytes
*
* @return The variable length bytes
*/
readVariableLength(): Buffer {
return this.read(this.readVariableLengthLength());
}
/**
* Reads the length of the variable length encoded bytes
*
* @return The length of the variable length encoded bytes
*/
readVariableLengthLength(): number {
const b1 = this.readUInt8();
if (b1 <= 192) {
return b1;
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error("Invalid variable length indicator");
}
/**
* Reads the field ordinal from the BinaryParser
*
* @return Field ordinal
*/
readFieldOrdinal(): number {
const tagByte = this.readUInt8();
const type = (tagByte & 0xf0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0f || this.readUInt8();
return (type << 16) | nth;
}
/**
* Read the field from the BinaryParser
*
* @return The field represented by the bytes at the head of the BinaryParser
*/
readField(): FieldInstance {
return Field.fromString(this.readFieldOrdinal().toString());
}
/**
* Read a given type from the BinaryParser
*
* @param type The type that you want to read from the BinaryParser
* @return The instance of that type read from the BinaryParser
*/
readType(type) {
return type.fromParser(this);
}
/**
* Get the type associated with a given field
*
* @param field The field that you wan to get the type of
* @return The type associated with the given field
*/
typeForField(field: FieldInstance) {
return field.associatedType;
}
/**
* Read value of the type specified by field from the BinaryParser
*
* @param field The field that you want to get the associated value for
* @return The value associated with the given field
*/
readFieldValue(field: FieldInstance) {
const type = this.typeForField(field);
if (!type) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVariableLengthEncoded
? this.readVariableLengthLength()
: null;
const value = type.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `
);
},
readVL() {
return this.read(this.readVLLength());
},
readVLLength() {
const b1 = this.readUInt8();
if (b1 <= 192) {
return b1;
} else if (b1 <= 240) {
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
} else if (b1 <= 254) {
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error("Invalid varint length indicator");
},
readFieldOrdinal() {
const tagByte = this.readUInt8();
const type = (tagByte & 0xf0) >>> 4 || this.readUInt8();
const nth = tagByte & 0x0f || this.readUInt8();
return (type << 16) | nth;
},
readField() {
return Field.fromString(this.readFieldOrdinal().toString());
},
readType(type) {
return type.fromParser(this);
},
typeForField(field) {
return field.associatedType;
},
readFieldValue(field) {
const kls = this.typeForField(field);
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVariableLengthEncoded
? this.readVLLength()
: null;
const value = kls.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(
`fromParser for (${field.name}, ${field.type.name}) -> undefined `
);
}
return value;
},
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
},
},
undefined
);
}
return value;
}
/**
* Get the next field and value from the BinaryParser
*
* @return The field and value
*/
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
}
}
export { BinaryParser };

View File

@@ -1,110 +1,157 @@
import { strict as assert } from "assert";
import { parseBytes, bytesToHex } from "../utils/bytes-utils";
import { makeClass } from "../utils/make-class";
import { Field } from "../definitions";
import * as assert from "assert";
import { Field, FieldInstance } from "../enums";
const BytesSink = {
put(/* bytesSequence */) {
// any hex string or any object with a `length` and where 0 <= [ix] <= 255
},
};
/**
* Bytes list is a collection of buffer objects
*/
class BytesList {
private bytesArray: Array<Buffer> = [];
const BytesList = makeClass(
{
implementing: BytesSink,
BytesList() {
this.arrays = [];
this.length = 0;
},
put(bytesArg) {
const bytes = parseBytes(bytesArg, Uint8Array);
this.length += bytes.length;
this.arrays.push(bytes);
return this;
},
toBytesSink(sink) {
this.arrays.forEach((arr) => {
sink.put(arr);
});
},
toBytes() {
const concatenated = new Uint8Array(this.length);
let pointer = 0;
this.arrays.forEach((arr) => {
concatenated.set(arr, pointer);
pointer += arr.length;
});
return concatenated;
},
toHex() {
return bytesToHex(this.toBytes());
},
},
undefined
);
/**
* Get the total number of bytes in the BytesList
*
* @return the number of bytes
*/
public getLength(): number {
return Buffer.concat(this.bytesArray).byteLength;
}
const BinarySerializer = makeClass(
{
BinarySerializer(sink) {
this.sink = sink;
},
write(value) {
value.toBytesSink(this.sink);
},
put(bytes) {
this.sink.put(bytes);
},
writeType(type, value) {
this.write(type.from(value));
},
writeBytesList(bl) {
bl.toBytesSink(this.sink);
},
encodeVL(len) {
let length = len;
const lenBytes = new Uint8Array(4);
if (length <= 192) {
lenBytes[0] = length;
return lenBytes.subarray(0, 1);
} else if (length <= 12480) {
length -= 193;
lenBytes[0] = 193 + (length >>> 8);
lenBytes[1] = length & 0xff;
return lenBytes.subarray(0, 2);
} else if (length <= 918744) {
length -= 12481;
lenBytes[0] = 241 + (length >>> 16);
lenBytes[1] = (length >> 8) & 0xff;
lenBytes[2] = length & 0xff;
return lenBytes.subarray(0, 3);
/**
* Put bytes in the BytesList
*
* @param bytesArg A Buffer
* @return this BytesList
*/
public put(bytesArg: Buffer): BytesList {
const bytes = Buffer.from(bytesArg); // Temporary, to catch instances of Uint8Array being passed in
this.bytesArray.push(bytes);
return this;
}
/**
* Write this BytesList to the back of another bytes list
*
* @param list The BytesList to write to
*/
public toBytesSink(list: BytesList): void {
list.put(this.toBytes());
}
public toBytes(): Buffer {
return Buffer.concat(this.bytesArray);
}
toHex(): string {
return this.toBytes().toString("hex").toUpperCase();
}
}
/**
* BinarySerializer is used to write fields and values to buffers
*/
class BinarySerializer {
private sink: BytesList = new BytesList();
constructor(sink: BytesList) {
this.sink = sink;
}
/**
* Write a value to this BinarySerializer
*
* @param value a SerializedType value
*/
write(value): void {
value.toBytesSink(this.sink);
}
/**
* Write bytes to this BinarySerializer
*
* @param bytes the bytes to write
*/
put(bytes: Buffer): void {
this.sink.put(bytes);
}
/**
* Write a value of a given type to this BinarySerializer
*
* @param type the type to write
* @param value a value of that type
*/
writeType(type, value): void {
this.write(type.from(value));
}
/**
* Write BytesList to this BinarySerializer
*
* @param bl BytesList to write to BinarySerializer
*/
writeBytesList(bl: BytesList): void {
bl.toBytesSink(this.sink);
}
/**
* Calculate the header of Variable Length encoded bytes
*
* @param length the length of the bytes
*/
private encodeVariableLength(length: number): Buffer {
const lenBytes = Buffer.alloc(3);
if (length <= 192) {
lenBytes[0] = length;
return lenBytes.slice(0, 1);
} else if (length <= 12480) {
length -= 193;
lenBytes[0] = 193 + (length >>> 8);
lenBytes[1] = length & 0xff;
return lenBytes.slice(0, 2);
} else if (length <= 918744) {
length -= 12481;
lenBytes[0] = 241 + (length >>> 16);
lenBytes[1] = (length >> 8) & 0xff;
lenBytes[2] = length & 0xff;
return lenBytes.slice(0, 3);
}
throw new Error("Overflow error");
}
/**
* Write field and value to BinarySerializer
*
* @param field field to write to BinarySerializer
* @param value value to write to BinarySerializer
*/
writeFieldAndValue(field: FieldInstance, value): void {
const associatedValue = field.associatedType.from(value);
assert(associatedValue.toBytesSink, field.name);
this.sink.put(field.header);
if (field.isVariableLengthEncoded) {
this.writeLengthEncoded(associatedValue);
} else {
associatedValue.toBytesSink(this.sink);
if (field.type.name === "STObject") {
this.sink.put(Field["ObjectEndMarker"].header);
} else if (field.type.name === "STArray") {
this.sink.put(Field["ArrayEndMarker"].header);
}
throw new Error("Overflow error");
},
writeFieldAndValue(field, _value) {
const sink = this.sink;
const value = field.associatedType.from(_value);
assert(value.toBytesSink, field);
sink.put(field.header);
}
}
if (field.isVariableLengthEncoded) {
this.writeLengthEncoded(value);
} else {
value.toBytesSink(sink);
if (field.type.name === "STObject") {
sink.put(Field["ObjectEndMarker"].header);
} else if (field.type.name === "STArray") {
sink.put(Field["ArrayEndMarker"].header);
}
}
},
writeLengthEncoded(value) {
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVL(bytes.length));
this.writeBytesList(bytes);
},
},
undefined
);
/**
* Write a variable length encoded value to the BinarySerializer
*
* @param value length encoded value to write to BytesList
*/
public writeLengthEncoded(value): void {
const bytes = new BytesList();
value.toBytesSink(bytes);
this.put(this.encodeVariableLength(bytes.getLength()));
this.writeBytesList(bytes);
}
}
export { BytesList, BinarySerializer };

View File

@@ -3,7 +3,7 @@ import {
TransactionResult,
TransactionType,
LedgerEntryType,
} from "../definitions";
} from "../enums";
import { AccountID } from "./account-id";
import { Amount } from "./amount";
import { Blob } from "./blob";

View File

@@ -1,5 +1,5 @@
import { makeClass } from "../utils/make-class";
import { Field } from "../definitions";
import { Field } from "../enums";
const _ = require("lodash");
const { BinarySerializer } = require("../serdes/binary-serializer");
const { SerializedType } = require("./serialized-type");

View File

@@ -8,7 +8,7 @@ const { encodeAccountID } = require('ripple-address-codec')
const { binary } = require('../dist/coretypes')
const { Amount, Hash160 } = coreTypes
const { makeParser, readJSON } = binary
const { Field, TransactionType } = require('./../dist/definitions')
const { Field, TransactionType } = require('./../dist/enums')
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
const { bytesToHex } = require('../dist/utils/bytes-utils')
const fixtures = loadFixture('data-driven-tests.json')
@@ -37,13 +37,12 @@ function basicApiTests () {
const bytes = parseHexOnly('00,01020304,0506', Uint8Array)
test('can read slices of bytes', () => {
const parser = makeParser(bytes)
expect(parser.pos()).toBe(0)
expect(parser._buf instanceof Uint8Array).toBe(true)
expect(parser.bytes instanceof Buffer).toBe(true)
const read1 = parser.read(1)
expect(read1 instanceof Uint8Array).toBe(true)
expect(read1).toEqual(Uint8Array.from([0]))
expect(parser.read(4)).toEqual(Uint8Array.from([1, 2, 3, 4]))
expect(parser.read(2)).toEqual(Uint8Array.from([5, 6]))
expect(read1 instanceof Buffer).toBe(true)
expect(read1).toEqual(Buffer.from([0]))
expect(parser.read(4)).toEqual(Buffer.from([1, 2, 3, 4]))
expect(parser.read(2)).toEqual(Buffer.from([5, 6]))
expect(() => parser.read(1)).toThrow()
})
test('can read a Uint32 at full', () => {
@@ -108,12 +107,12 @@ function transactionParsingTests () {
expect(parser.readField()).toEqual(Field.Fee)
expect(parser.read(8)).not.toEqual([])
expect(parser.readField()).toEqual(Field.SigningPubKey)
expect(parser.readVLLength()).toBe(33)
expect(parser.readVariableLengthLength()).toBe(33)
expect(bytesToHex(parser.read(33))).toEqual(tx_json.SigningPubKey)
expect(parser.readField()).toEqual(Field.TxnSignature)
expect(bytesToHex(parser.readVL())).toEqual(tx_json.TxnSignature)
expect(bytesToHex(parser.readVariableLength())).toEqual(tx_json.TxnSignature)
expect(parser.readField()).toEqual(Field.Account)
expect(encodeAccountID(parser.readVL())).toEqual(tx_json.Account)
expect(encodeAccountID(parser.readVariableLength())).toEqual(tx_json.Account)
expect(parser.end()).toBe(true)
})

View File

@@ -52,18 +52,18 @@ const PaymentChannel = {
}
function bytesListTest () {
const list = new BytesList().put([0]).put([2, 3]).put([4, 5])
test('is an Array<Uint8Array>', function () {
expect(Array.isArray(list.arrays)).toBe(true)
expect(list.arrays[0] instanceof Uint8Array).toBe(true)
const list = new BytesList().put(Buffer.from([0])).put(Buffer.from([2, 3])).put(Buffer.from([4, 5]))
test('is an Array<Buffer>', function () {
expect(Array.isArray(list.bytesArray)).toBe(true)
expect(list.bytesArray[0] instanceof Buffer).toBe(true)
})
test('keeps track of the length itself', function () {
expect(list).toHaveLength(5)
expect(list.getLength()).toBe(5)
})
test('can join all arrays into one via toBytes', function () {
const joined = list.toBytes()
expect(joined).toHaveLength(5)
expect(joined).toEqual(Uint8Array.from([0, 2, 3, 4, 5]))
expect(joined).toEqual(Buffer.from([0, 2, 3, 4, 5]))
})
}
@@ -95,7 +95,7 @@ function check (type, n, expected) {
return
}
serializer.writeType(type, n)
expect(bl.toBytes()).toEqual(Uint8Array.from(expected))
expect(bl.toBytes()).toEqual(Buffer.from(expected))
})
}

View File

@@ -943,7 +943,7 @@
"name": "TickSize",
"nth_of_type": 16,
"type": 16,
"expected_hex": "01010"
"expected_hex": "001010"
}
],
"whole_objects": [