Refactor ./src/enums (#73)

* rewrote enums in TS
* changed folder name to src/definitions
This commit is contained in:
Nathan Nichols
2020-06-30 14:29:29 -07:00
parent ede5b254e3
commit cd0eb2b623
14 changed files with 189 additions and 177 deletions

View File

@@ -1,5 +1,10 @@
import { Enums } from "./enums";
const { Field } = Enums.Field;
import {
Field,
TransactionType,
LedgerEntryType,
Type,
TransactionResult,
} from "./definitions";
const types = require("./types");
const binary = require("./binary");
const { ShaMap } = require("./shamap");
@@ -12,9 +17,12 @@ export {
hashes,
binary,
ledgerHashes,
Enums,
quality,
Field,
TransactionType,
LedgerEntryType,
Type,
TransactionResult,
quality,
HashPrefix,
ShaMap,
types,

View File

@@ -2,11 +2,11 @@
## Types
TODO
These are the [types](https://xrpl.org/serialization.html#type-list) associated with a given Serialization Field
## Ledger Entry Types
TODO
Each ledger's state tree contain [ledger objects](https://xrpl.org/ledger-object-types.html), which represent all settings, balances, and relationships in the shared ledger.
## Fields

View File

@@ -0,0 +1,140 @@
import { serializeUIntN } from "../utils/bytes-utils";
import * as enums from "./definitions.json";
const TYPE_WIDTH = 2;
const LEDGER_ENTRY_WIDTH = 2;
const TRANSACTION_TYPE_WIDTH = 2;
const TRANSACTION_RESULT_WIDTH = 1;
/*
* @brief: Serialize a field based on type_code and Field.nth
*/
function fieldHeader(type: number, nth: number): Uint8Array {
const header: Array<number> = [];
if (type < 16) {
if (nth < 16) {
header.push((type << 4) | nth);
} else {
header.push(type << 4, nth);
}
} else if (nth < 16) {
header.push(nth, type);
} else {
header.push(0, type, nth);
}
return new Uint8Array(header);
}
/*
* @brief: Bytes, name, and ordinal representing one type, ledger_type, transaction type, or result
*/
class Bytes {
readonly bytes: Uint8Array;
constructor(
readonly name: string,
readonly ordinal: number,
readonly ordinalWidth: number
) {
this.bytes = serializeUIntN(ordinal, ordinalWidth);
}
toJSON(): string {
return this.name;
}
toBytesSink(sink): void {
sink.put(this.bytes);
}
}
/*
* @brief: Collection of Bytes objects, mapping bidirectionally
*/
class BytesLookup {
constructor(types: { [key: string]: number }, readonly ordinalWidth: number) {
Object.entries(types).forEach(([k, v]) => {
this[k] = new Bytes(k, v, ordinalWidth);
this[v.toString()] = this[k];
});
}
from(value: Bytes | string): Bytes {
return value instanceof Bytes ? value : (this[value] as Bytes);
}
fromParser(parser): Bytes {
return this.from(parser.readUIntN(this.ordinalWidth).toString());
}
}
/*
* type FieldInfo is the type of the objects constaining information about each field in definitions.json
*/
interface FieldInfo {
nth: number;
isVLEncoded: boolean;
isSerialized: boolean;
isSigningField: boolean;
type: string;
}
interface FieldInstance {
readonly nth: number;
readonly isVariableLengthEncoded: boolean;
readonly isSerialized: boolean;
readonly isSigningField: boolean;
readonly type: Bytes;
readonly ordinal: number;
readonly name: string;
readonly header: Uint8Array;
readonly associatedType: any;
}
function buildField([name, info]: [string, FieldInfo]): FieldInstance {
const typeOrdinal = enums.TYPES[info.type];
return {
name: name,
nth: info.nth,
isVariableLengthEncoded: info.isVLEncoded,
isSerialized: info.isSerialized,
isSigningField: info.isSigningField,
ordinal: (typeOrdinal << 16) | info.nth,
type: new Bytes(info.type, typeOrdinal, TYPE_WIDTH),
header: fieldHeader(typeOrdinal, info.nth),
associatedType: undefined, // For later assignment in ./types/index.js
};
}
/*
* @brief: The collection of all fields as defined in definitons.json
*/
class FieldLookup {
constructor(fields: Array<[string, FieldInfo]>) {
fields.forEach(([k, v]) => {
this[k] = buildField([k, v]);
this[this[k].ordinal.toString()] = this[k];
});
}
fromString(value: string): FieldInstance {
return this[value] as FieldInstance;
}
}
const Type = new BytesLookup(enums.TYPES, TYPE_WIDTH);
const LedgerEntryType = new BytesLookup(
enums.LEDGER_ENTRY_TYPES,
LEDGER_ENTRY_WIDTH
);
const TransactionType = new BytesLookup(
enums.TRANSACTION_TYPES,
TRANSACTION_TYPE_WIDTH
);
const TransactionResult = new BytesLookup(
enums.TRANSACTION_RESULTS,
TRANSACTION_RESULT_WIDTH
);
const Field = new FieldLookup(enums.FIELDS as Array<[string, FieldInfo]>);
export { Field, Type, LedgerEntryType, TransactionResult, TransactionType };

View File

@@ -1,138 +0,0 @@
import { makeClass } from "./../utils/make-class";
const assert = require("assert");
const _ = require("lodash");
const { parseBytes, serializeUIntN } = require("./../utils/bytes-utils");
const enums = require("./definitions.json");
function transformWith(func, obj) {
return _.transform(obj, func);
}
function biMap(obj, valueKey) {
return _.transform(obj, (result, value, key) => {
result[key] = value;
result[value[valueKey]] = value;
});
}
const EnumType = makeClass(
{
EnumType(definition) {
_.assign(this, definition);
// At minimum
assert(this.bytes instanceof Uint8Array);
assert(typeof this.ordinal === "number");
assert(typeof this.name === "string");
},
toString() {
return this.name;
},
toJSON() {
return this.name;
},
toBytesSink(sink) {
sink.put(this.bytes);
},
statics: {
ordinalByteWidth: 1,
fromParser(parser) {
return this.from(parser.readUIntN(this.ordinalByteWidth));
},
from(val) {
const ret = val instanceof this ? val : this[val];
if (!ret) {
throw new Error(
`${val} is not a valid name or ordinal for ${this.enumName}`
);
}
return ret;
},
valuesByName() {
return _.transform(this.initVals, (result, ordinal, name) => {
const bytes = serializeUIntN(ordinal, this.ordinalByteWidth);
const type = new this({ name, ordinal, bytes });
result[name] = type;
});
},
init() {
const mapped = this.valuesByName();
_.assign(this, biMap(mapped, "ordinal"));
this.values = _.values(mapped);
return this;
},
},
},
undefined
);
function makeEnum(name, definition) {
return makeClass(
{
inherits: EnumType,
statics: _.assign(definition, { enumName: name }),
},
undefined
);
}
function makeEnums(to, definition, name) {
to[name] = makeEnum(name, definition);
}
const Enums = transformWith(makeEnums, {
Type: {
initVals: enums.TYPES,
},
LedgerEntryType: {
initVals: enums.LEDGER_ENTRY_TYPES,
ordinalByteWidth: 2,
},
TransactionType: {
initVals: enums.TRANSACTION_TYPES,
ordinalByteWidth: 2,
},
TransactionResult: {
initVals: enums.TRANSACTION_RESULTS,
ordinalByteWidth: 1,
},
});
Enums.Field = makeClass(
{
inherits: EnumType,
statics: {
enumName: "Field",
initVals: enums.FIELDS,
valuesByName() {
const fields = _.map(this.initVals, ([name, definition]) => {
const type = Enums.Type[definition.type];
const bytes = this.header(type.ordinal, definition.nth);
const ordinal = (type.ordinal << 16) | definition.nth;
const extra = { ordinal, name, type, bytes };
return new this(_.assign(definition, extra));
});
return _.keyBy(fields, "name");
},
header(type, nth) {
const name = nth;
const header = <any>[];
const push = header.push.bind(header);
if (type < 16) {
if (name < 16) {
push((type << 4) | name);
} else {
push(type << 4, name);
}
} else if (name < 16) {
push(name, type);
} else {
push(0, type, name);
}
return parseBytes(header, Uint8Array);
},
},
},
undefined
);
export { Enums };

View File

@@ -1,6 +1,6 @@
import { strict as assert } from "assert";
import { makeClass } from "../utils/make-class";
import { Enums } from "../enums";
import { Field } from "../definitions";
import { slice, parseBytes } from "../utils/bytes-utils";
const BinaryParser = makeClass(
@@ -68,7 +68,7 @@ const BinaryParser = makeClass(
return (type << 16) | nth;
},
readField() {
return Enums.Field.from(this.readFieldOrdinal());
return Field.fromString(this.readFieldOrdinal().toString());
},
readType(type) {
return type.fromParser(this);
@@ -81,7 +81,9 @@ const BinaryParser = makeClass(
if (!kls) {
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
const sizeHint = field.isVLEncoded ? this.readVLLength() : null;
const sizeHint = field.isVariableLengthEncoded
? this.readVLLength()
: null;
const value = kls.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error(

View File

@@ -1,7 +1,7 @@
import { strict as assert } from "assert";
import { parseBytes, bytesToHex } from "../utils/bytes-utils";
import { makeClass } from "../utils/make-class";
import { Enums } from "../enums";
import { Field } from "../definitions";
const BytesSink = {
put(/* bytesSequence */) {
@@ -84,16 +84,16 @@ const BinarySerializer = makeClass(
const sink = this.sink;
const value = field.associatedType.from(_value);
assert(value.toBytesSink, field);
sink.put(field.bytes);
sink.put(field.header);
if (field.isVLEncoded) {
if (field.isVariableLengthEncoded) {
this.writeLengthEncoded(value);
} else {
value.toBytesSink(sink);
if (field.type === Enums.Type.STObject) {
sink.put(Enums.Field.ObjectEndMarker.bytes);
} else if (field.type === Enums.Type.STArray) {
sink.put(Enums.Field.ArrayEndMarker.bytes);
if (field.type.name === "STObject") {
sink.put(Field["ObjectEndMarker"].header);
} else if (field.type.name === "STArray") {
sink.put(Field["ArrayEndMarker"].header);
}
}
},

View File

@@ -1,8 +1,12 @@
import { Enums } from "../enums";
import {
Field,
TransactionResult,
TransactionType,
LedgerEntryType,
} from "../definitions";
import { AccountID } from "./account-id";
import { Amount } from "./amount";
import { Blob } from "./blob";
const Field = Enums.Field;
const { Currency } = require("./currency");
const { Hash128 } = require("./hash-128");
const { Hash160 } = require("./hash-160");
@@ -34,12 +38,12 @@ const coreTypes = {
Vector256,
};
Field.values.forEach((field) => {
field.associatedType = coreTypes[field.type];
Object.values(Field).forEach((field) => {
field.associatedType = coreTypes[field.type.name];
});
Field.TransactionType.associatedType = Enums.TransactionType;
Field.TransactionResult.associatedType = Enums.TransactionResult;
Field.LedgerEntryType.associatedType = Enums.LedgerEntryType;
Field["TransactionType"].associatedType = TransactionType;
Field["TransactionResult"].associatedType = TransactionResult;
Field["LedgerEntryType"].associatedType = LedgerEntryType;
export { coreTypes };

View File

@@ -1,8 +1,6 @@
import { makeClass } from "../utils/make-class";
import { ensureArrayLikeIs, SerializedType } from "./serialized-type";
import { Enums } from "../enums";
import { STObject } from "./st-object";
const { ArrayEndMarker } = Enums.Field;
const STArray = makeClass(
{
@@ -13,11 +11,11 @@ const STArray = makeClass(
const array = new STArray();
while (!parser.end()) {
const field = parser.readField();
if (field === ArrayEndMarker) {
if (field.name === "ArrayEndMarker") {
break;
}
const outer = new STObject();
outer[field] = parser.readFieldValue(field);
outer[field.name] = parser.readFieldValue(field);
array.push(outer);
}
return array;

View File

@@ -1,8 +1,7 @@
import { makeClass } from "../utils/make-class";
import { Enums } from "../enums";
import { Field } from "../definitions";
const _ = require("lodash");
const { BinarySerializer } = require("../serdes/binary-serializer");
const { ObjectEndMarker } = Enums.Field;
const { SerializedType } = require("./serialized-type");
const STObject = makeClass(
@@ -14,10 +13,10 @@ const STObject = makeClass(
const so = new this();
while (!parser.end(end)) {
const field = parser.readField();
if (field === ObjectEndMarker) {
if (field.name === "ObjectEndMarker") {
break;
}
so[field] = parser.readFieldValue(field);
so[field.name] = parser.readFieldValue(field);
}
return so;
},
@@ -29,9 +28,9 @@ const STObject = makeClass(
return _.transform(
value,
(so, val, key) => {
const field = Enums.Field[key];
const field = Field[key];
if (field) {
so[field] = field.associatedType.from(val);
so[field.name] = field.associatedType.from(val);
} else {
so[key] = val;
}
@@ -44,7 +43,7 @@ const STObject = makeClass(
},
fieldKeys() {
return Object.keys(this)
.map((k) => Enums.Field[k])
.map((k) => Field[k])
.filter(Boolean);
},
toJSON() {
@@ -63,7 +62,7 @@ const STObject = makeClass(
const fields = this.fieldKeys();
const sorted = _.sortBy(fields, "ordinal");
sorted.filter(filter).forEach((field) => {
const value = this[field];
const value = this[field.name];
if (!field.isSerialized) {
return;
}