diff --git a/.gitignore b/.gitignore index 5e11828..bf9d950 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .env .DS_Store /node_modules +/store diff --git a/index.mjs b/index.mjs new file mode 100644 index 0000000..024ef76 --- /dev/null +++ b/index.mjs @@ -0,0 +1,47 @@ +import { XrplClient } from 'xrpl-client' +import { createDirectory } from './lib/createDirectory.mjs' +import { onValidation } from './lib/onValidation.mjs' +// import { onLedger } from './lib/onLedger.mjs' +// import { onTransaction } from './lib/onTransaction.mjs' +import 'dotenv/config' +import assert from 'assert' + +assert(process.env?.NODES, 'ENV var missing: NODES, containing: a comma separated list of websocket endpoints') + +await createDirectory('store') + +process.env.NODES.split(',').map(h => h.trim()) + .map(h => new XrplClient(h)).map(async c => { + await c.ready() + + /** + * TODO: Auto disconnect if no messages for X + */ + + c.send({ command: "subscribe", streams: [ + "validations", + // "ledger", + // "transactions", + // "transactions_proposed" + ] }) + + c.on("validation", validation => onValidation({ + connectionUrl: c.getState()?.server?.uri, + networkId: c.getState()?.server?.networkId, + validation, + })) + + // c.on("ledger", ledger => onLedger({ + // connectionUrl: c.getState()?.server?.uri, + // networkId: c.getState()?.server?.networkId, + // ledger, + // connection: c, + // })) + + // c.on("transaction", transaction => onTransaction({ + // connectionUrl: c.getState()?.server?.uri, + // networkId: c.getState()?.server?.networkId, + // transaction, + // connection: c, + // })) + }) diff --git a/lib/createDirectory.mjs b/lib/createDirectory.mjs new file mode 100644 index 0000000..13ee304 --- /dev/null +++ b/lib/createDirectory.mjs @@ -0,0 +1,22 @@ +import { URL } from 'url' +import { mkdir, stat } from 'fs' + +const createDirectory = dir => new Promise((resolve, reject) => { + const dirToCreate = new URL('../' + dir, import.meta.url).pathname + stat(dirToCreate, staterr => { + if (staterr) { + mkdir(dirToCreate, { recursive: true }, mkerr => { + if (mkerr) { + reject(new Error('Error creating Store directory: ' + staterr?.message + ' ยป ' + mkerr?.message)) + } + resolve(dirToCreate) + }) + } else{ + resolve(dirToCreate) + } + }) +}) + +export { + createDirectory, +} diff --git a/lib/fetchUnl.mjs b/lib/fetchUnl.mjs new file mode 100644 index 0000000..94eccc9 --- /dev/null +++ b/lib/fetchUnl.mjs @@ -0,0 +1,138 @@ +/** + * Richard Holland, XRPL Labs, 2021 + * Originally part of https://github.com/RichardAH/xpop-generator/tree/master + */ + +import elliptic from 'elliptic' +const ed25519 = elliptic.eddsa('ed25519') +import fetch from 'node-fetch' +import address from 'ripple-address-codec' + +const fetchUnl = (url, master_public_key) => { + return new Promise(async (resolve, reject) => { + const codec = { address, } + const assert = (c, m) => { + if (!c) reject("Invalid manifest: " + (m ? m : "")); + } + + const parse_manifest = buf => { + let man = {} + let upto = 0 + + let verify_fields = [Buffer.from('MAN\x00', 'utf-8')]; + let last_signing = 0; + + // sequence number + assert(buf[upto++] == 0x24, "Missing Sequence Number") + man['Sequence'] = (buf[upto] << 24) + (buf[upto+1] << 16) + (buf[upto+2] << 8) + buf[upto+3] + upto += 4 + + // public key + assert(buf[upto++] == 0x71, "Missing Public Key") // type 7 = VL, 1 = PublicKey + assert(buf[upto++] == 33, "Missing Public Key size") // one byte size + man['PublicKey'] = buf.slice(upto, upto + 33).toString('hex') + upto += 33 + + // signing public key + assert(buf[upto++] == 0x73, "Missing Signing Public Key") // type 7 = VL, 3 = SigningPubKey + assert(buf[upto++] == 33, "Missing Signing Public Key size") // one byte size + man['SigningPubKey'] = buf.slice(upto, upto + 33).toString('hex') + upto += 33 + + // signature + verify_fields.push(buf.slice(last_signing, upto)) + assert(buf[upto++] == 0x76, "Missing Signature") // type 7 = VL, 6 = Signature + let signature_size = buf[upto++]; + man['Signature'] = buf.slice(upto, upto + signature_size).toString('hex') + upto += signature_size + last_signing = upto + + // domain field | optional + if (buf[upto] == 0x77) { + upto++ + let domain_size = buf[upto++] + man['Domain'] = buf.slice(upto, upto + domain_size).toString('utf-8') + upto += domain_size + } + + // master signature + verify_fields.push(buf.slice(last_signing, upto)) + assert(buf[upto++] == 0x70, "Missing Master Signature lead byte") // type 7 = VL, 0 = uncommon field + assert(buf[upto++] == 0x12, "Missing Master Signature follow byte") // un field = 0x12 master signature + let master_size = buf[upto++]; + man['MasterSignature'] = buf.slice(upto, upto + master_size).toString('hex') + upto += master_size + last_signing = upto // here in case more fields ever added below + + assert(upto == buf.length, "Extra bytes after end of manifest") + + // for signature verification + man.without_signing_fields = Buffer.concat(verify_fields) + return man; + } + + const unlData = await fetch(url) + const json = await unlData.json() + + // initial json validation + assert(json.public_key !== undefined, "public key missing from vl") + assert(json.signature !== undefined, "signature missing from vl") + assert(json.version !== undefined, "version missing from vl") + assert(json.manifest !== undefined, "manifest missing from vl") + assert(json.blob !== undefined, "blob missing from vl") + assert(json.version == 1, "vl version != 1") + + // check key is recognised + if (master_public_key) + assert(json.public_key.toUpperCase() == master_public_key.toUpperCase(), + "Provided VL key does not match") + else + master_public_key = json.public_key.toUpperCase() + + // parse blob + let blob = Buffer.from(json.blob, 'base64') + + // parse manifest + const manifest = parse_manifest(Buffer.from(json.manifest, 'base64')) + + // verify manifest signature and payload signature + const master_key = ed25519.keyFromPublic(master_public_key.slice(2), 'hex') + assert(master_key.verify(manifest.without_signing_fields, manifest.MasterSignature), + "Master signature in master manifest does not match vl key") + let signing_key = ed25519.keyFromPublic(manifest.SigningPubKey.slice(2), 'hex') + assert(signing_key.verify(blob.toString('hex'), json.signature), + "Payload signature in mantifest failed verification") + blob = JSON.parse(blob) + + assert(blob.validators !== undefined, "validators missing from blob") + + // parse manifests inside blob (actual validator list) + let unl = {} + for (let idx in blob.validators) { + assert(blob.validators[idx].manifest !== undefined, + "validators list in blob contains invalid entry (missing manifest)") + assert(blob.validators[idx].validation_public_key !== undefined, + "validators list in blob contains invalid entry (missing validation public key)") + + let manifest = parse_manifest(Buffer.from(blob.validators[idx].manifest, 'base64')) + + // verify signature + signing_key = ed25519.keyFromPublic(blob.validators[idx].validation_public_key.slice(2), 'hex') + + assert(signing_key.verify(manifest.without_signing_fields, manifest.MasterSignature), + "Validation manifest " + idx + " signature verification failed") + + blob.validators[idx].validation_public_key = Buffer.from(blob.validators[idx].validation_public_key, 'hex') + blob.validators[idx].manifest = manifest + + let nodepub = codec.address.encodeNodePublic(Buffer.from(manifest.SigningPubKey, 'hex')) + unl[nodepub] = manifest.SigningPubKey + } + + resolve({unl: {...unl}, vl: json}) + }) +} + +export { + fetchUnl, +} diff --git a/lib/ledgerIndexToFolders.mjs b/lib/ledgerIndexToFolders.mjs new file mode 100644 index 0000000..6ec950d --- /dev/null +++ b/lib/ledgerIndexToFolders.mjs @@ -0,0 +1,9 @@ +const ledgerIndexToFolders = ledgerIndex => { + return Math.floor(ledgerIndex / Math.pow(10, 6)) + '/' + + Math.floor((ledgerIndex % Math.pow(10, 6)) / Math.pow(10, 3)) + '/' + + ledgerIndex % Math.pow(10, 6) % Math.pow(10, 3) +} + +export { + ledgerIndexToFolders +} diff --git a/lib/onValidation.mjs b/lib/onValidation.mjs new file mode 100644 index 0000000..b59a2ad --- /dev/null +++ b/lib/onValidation.mjs @@ -0,0 +1,75 @@ +import { writeFile } from 'fs' +import { createDirectory } from './createDirectory.mjs' +import 'dotenv/config' +import { unlData } from './unlData.mjs' +import { ledgerIndexToFolders } from './ledgerIndexToFolders.mjs' + +const lastSeenValidations = [] +let lastCreatedLedgerDir + +const onValidation = async ({ + connectionUrl, + networkId, + validation, +}) => { + /** + * Only proceed if the pubkey is on preferred UNL & reported by node with expected network ID + */ + if (unlData.hosts.indexOf(validation.validation_public_key) > -1 && networkId === unlData.networkid) { + unlData.refresh() + + const lastSeenKey = `${validation.ledger_index} @ ${validation.validation_public_key}` + + /** + * Do not process & write same validation received from multiple nodes twice + */ + if (lastSeenValidations.indexOf(lastSeenKey) < 0) { + + lastSeenValidations.unshift(lastSeenKey) + lastSeenValidations.length = unlData.hosts.length * 10 + + /** + * Write to ledger index folder + */ + const relativeStorDir = 'store/' + networkId + '/' + ledgerIndexToFolders(validation.ledger_index) + const storeDir = new URL('../' + relativeStorDir, import.meta.url).pathname + + if (lastCreatedLedgerDir !== validation.ledger_index) { + await createDirectory(relativeStorDir) + lastCreatedLedgerDir = validation.ledger_index + + writeFile(storeDir + '/vl.json', Buffer.from(JSON.stringify(unlData.data), 'utf8'), err => { + if (err) { + console.log('Error writing file @ ' + storeDir) + } + }) + } + + /** + * Debug output + */ + console.log( + networkId, + validation.ledger_index, + relativeStorDir, + validation.validation_public_key, + // validation.validated_hash, // parent hash + validation.ledger_hash, // ledger_hash + connectionUrl, + ) + + /** + * Create validation file + */ + writeFile(storeDir + '/' + validation.validation_public_key + '.json', Buffer.from(JSON.stringify(validation), 'utf8'), err => { + if (err) { + console.log('Error writing file @ ' + storeDir) + } + }) + } + } +} + +export { + onValidation, +} diff --git a/lib/unlData.mjs b/lib/unlData.mjs new file mode 100644 index 0000000..0973a96 --- /dev/null +++ b/lib/unlData.mjs @@ -0,0 +1,61 @@ +import { fetchUnl } from './fetchUnl.mjs' +import 'dotenv/config' +import assert from 'assert' + +const unlCacheTimeSec = 120 + +class UNL { + data = {} + hosts = {} + networkid = 0 + updated = null + fetching = false + + constructor () { + assert(process.env?.UNLURL, 'ENV var missing: UNLURL, containing: the URL of the Validator List') + assert(process.env?.UNLKEY, 'ENV var missing: UNLKEY, containing: the signing (pub) key for the Validator List') + assert(process.env?.NETWORKID, 'ENV var missing: NETWORKID, containing: the network ID (int), e.g. 0 for mainnet') + + this.networkid = Number(process.env.NETWORKID) + + this.fetch() + } + + async refresh () { + const timeDiffSec = Math.floor((new Date() - this.updated) / 1000) + + if (timeDiffSec > unlCacheTimeSec && !this.fetching) { + return await this.fetch() + } + + return Promise.resolve() + } + + async fetch () { + this.fetching = true + + try { + const unl = await fetchUnl(process.env.UNLURL, process.env.UNLKEY) + const unlHosts = Object.keys(unl.unl) + console.log('Fetched UNL', process.env.UNLURL, 'found validators', unlHosts.length) + + if (unlHosts.length > 1) { + this.data = unl + this.hosts = unlHosts + this.updated = new Date() + } + } catch (e) { + this.fetching = false + return e + } + + this.fetching = false + return + } +} + +const unlData = new UNL() + +export { + unlData, +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..5ac633a --- /dev/null +++ b/package.json @@ -0,0 +1,24 @@ +{ + "name": "xrplp-validation-msg-store", + "version": "1.0.0", + "description": "Store XRPL Proof of Validation messages", + "main": "index.mjs", + "scripts": { + "dev": "nodemon ." + }, + "author": "Wietse Wind ", + "license": "MIT", + "nodemonConfig": { + "ignore": [ + "store/*" + ] + }, + "dependencies": { + "dotenv": "^16.3.1", + "ed25519": "^0.0.5", + "elliptic": "^6.5.4", + "node-fetch": "^3.3.2", + "ripple-address-codec": "^4.3.0", + "xrpl-client": "^2.2.0" + } +} diff --git a/store/.gitkeep b/store/.gitkeep new file mode 100644 index 0000000..e69de29