mirror of
https://github.com/Xahau/Validation-Ledger-Tx-Store-to-xPOP.git
synced 2025-11-28 15:45:51 +00:00
Beta, add webserver (socket, event, dirlisting), tx ordering, etc.
This commit is contained in:
91
lib/events/ledgerReady.mjs
Normal file
91
lib/events/ledgerReady.mjs
Normal file
@@ -0,0 +1,91 @@
|
||||
const ledgers = {}
|
||||
|
||||
const externalResolvablePromise = () => {
|
||||
let _resolve
|
||||
const meta = {
|
||||
resolved: false,
|
||||
}
|
||||
const promise = new Promise(resolve => {
|
||||
_resolve = (r) => {
|
||||
meta.resolved = true
|
||||
return resolve(r)
|
||||
}
|
||||
})
|
||||
|
||||
return { promise, resolve: _resolve, meta, }
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} ledger - Ledger Index
|
||||
* @param {(ledger_binary_transactions|ledger_info|vl|validation)} readyElement -
|
||||
*/
|
||||
|
||||
const ledgerReady = async (ledger, readyElement) => {
|
||||
// console.log('LedgerReady', ledger, readyElement)
|
||||
const ledgerIndexString = String(ledger)
|
||||
|
||||
if (!ledgers?.[ledgerIndexString]) {
|
||||
const ledger_binary_transactions = externalResolvablePromise()
|
||||
const ledger_info = externalResolvablePromise()
|
||||
const vl = externalResolvablePromise()
|
||||
|
||||
const ready = Promise.all([
|
||||
ledger_binary_transactions.promise,
|
||||
ledger_info.promise,
|
||||
vl.promise,
|
||||
])
|
||||
|
||||
Object.assign(ledgers, {
|
||||
[ledgerIndexString]: {
|
||||
ledger_binary_transactions,
|
||||
ledger_info,
|
||||
vl,
|
||||
validation: 0,
|
||||
ready,
|
||||
}
|
||||
})
|
||||
|
||||
// Set timeout to clean up
|
||||
setTimeout(() => {
|
||||
// console.log('Cleaning up', ledgerIndexString)
|
||||
if (ledgers?.[ledgerIndexString]) {
|
||||
ledgers?.[ledgerIndexString]?.ledger_binary_transactions?.resolve(false)
|
||||
ledgers?.[ledgerIndexString]?.ledger_info?.resolve(false)
|
||||
ledgers?.[ledgerIndexString]?.vl?.resolve(false)
|
||||
}
|
||||
|
||||
// Force GC
|
||||
setTimeout(() => {
|
||||
if (ledgers?.[ledgerIndexString]) delete ledgers?.[ledgerIndexString]
|
||||
}, 50)
|
||||
}, 20_000)
|
||||
}
|
||||
|
||||
if (
|
||||
readyElement === 'ledger_binary_transactions'
|
||||
|| readyElement === 'ledger_info'
|
||||
|| readyElement === 'vl'
|
||||
) {
|
||||
ledgers[ledgerIndexString][readyElement].resolve(new Date() / 1000)
|
||||
}
|
||||
if (readyElement === 'validation') {
|
||||
ledgers[ledgerIndexString][readyElement]++
|
||||
}
|
||||
}
|
||||
|
||||
const waitForLedgerReady = ledgerIndex => {
|
||||
return ledgers?.[String(ledgerIndex)]?.ready
|
||||
}
|
||||
|
||||
const isLedgerReady = ledgerIndex => {
|
||||
return ledgers?.[String(ledgerIndex)]?.ledger_binary_transactions.meta.resolved
|
||||
&& ledgers?.[String(ledgerIndex)]?.ledger_info.meta.resolved
|
||||
&& ledgers?.[String(ledgerIndex)]?.vl.meta.resolved
|
||||
}
|
||||
|
||||
export {
|
||||
ledgerReady,
|
||||
isLedgerReady,
|
||||
waitForLedgerReady,
|
||||
}
|
||||
@@ -2,6 +2,8 @@ import { writeFile } from 'fs'
|
||||
import { ledgerIndexToFolders } from './ledgerIndexToFolders.mjs'
|
||||
import { computeBinaryTransactionHash } from './computeBinaryTransactionHash.mjs'
|
||||
import { dirExists } from './dirExists.mjs'
|
||||
import { ledgerReady, waitForLedgerReady } from './events/ledgerReady.mjs'
|
||||
import { onTransaction } from './onTransaction.mjs'
|
||||
import 'dotenv/config'
|
||||
|
||||
const obtainedHumanReadableLedgers = []
|
||||
@@ -18,7 +20,7 @@ const onLedger = async ({
|
||||
const storeDir = new URL('../' + relativeStoreDir, import.meta.url).pathname
|
||||
|
||||
if (await dirExists(storeDir)) {
|
||||
;[
|
||||
const ledgerData = [
|
||||
...(
|
||||
obtainedBinaryTxLedgers.indexOf(ledger.ledger_index) < 0
|
||||
? [
|
||||
@@ -37,6 +39,9 @@ const onLedger = async ({
|
||||
connection.send({
|
||||
command: 'ledger',
|
||||
ledger_index: ledger.ledger_index,
|
||||
transactions: true,
|
||||
expand: true,
|
||||
binary: false,
|
||||
})
|
||||
]
|
||||
: []),
|
||||
@@ -71,6 +76,8 @@ const onLedger = async ({
|
||||
writeFile(storeDir + '/ledger_binary_transactions.json', Buffer.from(JSON.stringify(results.ledger), 'utf8'), err => {
|
||||
if (err) {
|
||||
console.log('Error writing file @ ' + storeDir)
|
||||
} else {
|
||||
ledgerReady(results.ledger_index, 'ledger_binary_transactions')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -87,16 +94,54 @@ const onLedger = async ({
|
||||
|
||||
console.log('Obtained ledger (JSON object)', relativeStoreDir, results.ledger_index, 'Hash', results.ledger.ledger_hash)
|
||||
|
||||
writeFile(storeDir + '/ledger_info.json', Buffer.from(JSON.stringify(results.ledger), 'utf8'), err => {
|
||||
writeFile(storeDir + '/ledger_info.json', Buffer.from(JSON.stringify({ ...results.ledger, transactions: undefined, }), 'utf8'), err => {
|
||||
if (err) {
|
||||
console.log('Error writing file @ ' + storeDir)
|
||||
} else {
|
||||
ledgerReady(ledger.ledger_index, 'ledger_info')
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
return results.ledger
|
||||
}))
|
||||
|
||||
/**
|
||||
* Deal with transactions & fire events
|
||||
*/
|
||||
waitForLedgerReady(ledger.ledger_index).then(async () => {
|
||||
if (ledgerData.length > 0) {
|
||||
const [binary, json] = await Promise.all(ledgerData)
|
||||
const sequetiallyMappedLedgerTxEvents = (json?.transactions || []).map(tx => {
|
||||
return {
|
||||
validated: true,
|
||||
ledger_index: ledger.ledger_index,
|
||||
transaction: tx,
|
||||
}
|
||||
})
|
||||
.sort((a, b) => a.transaction.Sequence - b.transaction.Sequence)
|
||||
.reduce((promiseChain, current) => {
|
||||
return promiseChain.then(() => {
|
||||
// console.log(' » Tx events: Processing', current.transaction.Sequence)
|
||||
return onTransaction({
|
||||
networkId,
|
||||
transaction: current,
|
||||
connection,
|
||||
})
|
||||
}).then(() => {
|
||||
// console.log(' » Tx events: Done ', current.transaction.Sequence)
|
||||
})
|
||||
}, Promise.resolve())
|
||||
|
||||
sequetiallyMappedLedgerTxEvents.then(() => {
|
||||
// console.log(' « « « « All transactions in ledger processed', ledger.ledger_index)
|
||||
});
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { writeFile } from 'fs'
|
||||
import { dirExists } from './dirExists.mjs'
|
||||
import { ledgerIndexToFolders } from './ledgerIndexToFolders.mjs'
|
||||
import { generateV1 as xpop } from '../xpop/generateV1.mjs'
|
||||
import { xpopGenerate } from './xpopGenerate.mjs'
|
||||
import { waitForLedgerReady } from './events/ledgerReady.mjs'
|
||||
import { emit } from '../bin/webserver.mjs'
|
||||
import 'dotenv/config'
|
||||
|
||||
const xpopBinaryDir = new URL('../store/xpop', import.meta.url).pathname
|
||||
const lastSeenTransactions = []
|
||||
|
||||
const fields = (process.env?.FIELDSREQUIRED || '')
|
||||
@@ -21,52 +24,94 @@ const onTransaction = async ({
|
||||
networkId,
|
||||
transaction,
|
||||
}) => {
|
||||
if (transaction?.validated) {
|
||||
const { transaction: tx } = transaction
|
||||
if (transaction?.validated) {
|
||||
const { transaction: tx } = transaction
|
||||
|
||||
if (tx.hash && lastSeenTransactions.indexOf(tx.hash) < 0) {
|
||||
lastSeenTransactions.unshift(tx.hash)
|
||||
lastSeenTransactions.length = 3000
|
||||
|
||||
const validTx = hasRequiredFields(tx)
|
||||
if (!process.env?.NOELIGIBLEFULLTXLOG) {
|
||||
console.log('TX', tx.hash, validTx)
|
||||
}
|
||||
|
||||
if (validTx && transaction?.ledger_index) {
|
||||
const relativeStorDir = 'store/' + networkId + '/' + ledgerIndexToFolders(transaction.ledger_index)
|
||||
const storeDir = new URL('../' + relativeStorDir, import.meta.url).pathname
|
||||
|
||||
console.log('xPOP eligible', relativeStorDir, process.env?.NOELIGIBLEFULLTXLOG ? tx.hash : tx)
|
||||
|
||||
if (await dirExists(storeDir)) {
|
||||
const wroteTxFile = await new Promise(resolve => {
|
||||
writeFile(storeDir + '/tx_' + tx.hash + '.json', Buffer.from(JSON.stringify(transaction), 'utf8'), err => {
|
||||
if (err) {
|
||||
console.log('Error writing file @ ' + storeDir)
|
||||
resolve(false)
|
||||
}
|
||||
resolve(true)
|
||||
})
|
||||
})
|
||||
|
||||
if (tx.hash && lastSeenTransactions.indexOf(tx.hash) < 0) {
|
||||
lastSeenTransactions.unshift(tx.hash)
|
||||
lastSeenTransactions.length = 3000
|
||||
|
||||
const validTx = hasRequiredFields(tx)
|
||||
if (!process.env?.NOELIGIBLEFULLTXLOG) {
|
||||
console.log('TX', tx.hash, validTx)
|
||||
}
|
||||
|
||||
if (validTx && transaction?.ledger_index) {
|
||||
const relativeStorDir = 'store/' + networkId + '/' + ledgerIndexToFolders(transaction.ledger_index)
|
||||
const storeDir = new URL('../' + relativeStorDir, import.meta.url).pathname
|
||||
|
||||
console.log('xPOP eligible', relativeStorDir, process.env?.NOELIGIBLEFULLTXLOG ? tx.hash : tx)
|
||||
|
||||
if (await dirExists(storeDir)) {
|
||||
writeFile(storeDir + '/tx_' + tx.hash + '.json', Buffer.from(JSON.stringify(transaction), 'utf8'), err => {
|
||||
if (err) {
|
||||
console.log('Error writing file @ ' + storeDir)
|
||||
} else {
|
||||
if (wroteTxFile) {
|
||||
await waitForLedgerReady(transaction.ledger_index)
|
||||
/**
|
||||
* TX all ready, written to filesystem, ...
|
||||
* This is where we start a slight delay to give the `onLedger`
|
||||
* routine some time to fetch & store and then we'll try to
|
||||
* generate an xPOP.
|
||||
*/
|
||||
setTimeout(async () => {
|
||||
await xpop({
|
||||
ledgerIndex: transaction.ledger_index,
|
||||
networkId,
|
||||
txHash: tx.hash,
|
||||
const xpopBinary = await xpopGenerate({
|
||||
ledgerIndex: transaction.ledger_index,
|
||||
networkId,
|
||||
txHash: tx.hash,
|
||||
})
|
||||
if (await dirExists(xpopBinaryDir)) {
|
||||
const xpopWritten = await new Promise(resolve => {
|
||||
writeFile(xpopBinaryDir + '/' + tx.hash, Buffer.from(xpopBinary, 'utf8'), err => {
|
||||
if (err) {
|
||||
console.log('Error writing binary XPOP', err)
|
||||
resolve(false)
|
||||
} else {
|
||||
console.log('Wrote binary xPOP: ' + xpopBinaryDir + '/' + tx.hash)
|
||||
resolve(true)
|
||||
}
|
||||
})
|
||||
})
|
||||
}, 500)
|
||||
// ^^ To check: is this enough? If e.g. retrieving the ledger info
|
||||
// would take longer this may not be enough. Best solution:
|
||||
// make this await the ledger fetching calls.
|
||||
// Dirty: extend to e.g. 2000.
|
||||
if (xpopWritten) {
|
||||
console.log(' ### EMIT XPOP READY FOR', tx?.Account, Number(tx.Sequence), tx.hash)
|
||||
|
||||
return await emit({
|
||||
account: tx?.Account,
|
||||
sequence: tx.Sequence,
|
||||
origin: {
|
||||
tx: tx.hash,
|
||||
networkId: networkId,
|
||||
ledgerIndex: transaction.ledger_index,
|
||||
burn: tx?.Fee,
|
||||
},
|
||||
destination: {
|
||||
networkId: tx?.OperationLimit,
|
||||
},
|
||||
...(
|
||||
process.env?.URL_PREFIX
|
||||
? {
|
||||
xpop: {
|
||||
binary: `${process.env.URL_PREFIX}/xpop/${tx.hash}`,
|
||||
source: `${process.env.URL_PREFIX}/${networkId}/${ledgerIndexToFolders(transaction.ledger_index)}/`,
|
||||
blob: xpopBinary,
|
||||
}
|
||||
}
|
||||
: {}
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createDirectory } from './createDirectory.mjs'
|
||||
import 'dotenv/config'
|
||||
import { unlData } from './unlData.mjs'
|
||||
import { ledgerIndexToFolders } from './ledgerIndexToFolders.mjs'
|
||||
import { ledgerReady } from './events/ledgerReady.mjs'
|
||||
|
||||
const lastSeenValidations = []
|
||||
let lastCreatedLedgerDir
|
||||
@@ -34,6 +35,8 @@ const onValidation = async ({
|
||||
const relativeStorDir = 'store/' + networkId + '/' + ledgerIndexToFolders(validation.ledger_index)
|
||||
const storeDir = new URL('../' + relativeStorDir, import.meta.url).pathname
|
||||
|
||||
ledgerReady(validation.ledger_index, 'validation')
|
||||
|
||||
if (lastCreatedLedgerDir !== validation.ledger_index) {
|
||||
await createDirectory(relativeStorDir)
|
||||
lastCreatedLedgerDir = validation.ledger_index
|
||||
@@ -41,6 +44,8 @@ const onValidation = async ({
|
||||
writeFile(storeDir + '/vl.json', Buffer.from(JSON.stringify(unlData.data), 'utf8'), err => {
|
||||
if (err) {
|
||||
console.log('Error writing file @ ' + storeDir)
|
||||
} else {
|
||||
ledgerReady(validation.ledger_index, 'vl')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
395
lib/xpop/v1.mjs
Normal file
395
lib/xpop/v1.mjs
Normal file
@@ -0,0 +1,395 @@
|
||||
import assert from 'assert'
|
||||
import crypto from 'crypto'
|
||||
|
||||
const xpop = async ({
|
||||
vl,
|
||||
ledger: {
|
||||
json,
|
||||
binary,
|
||||
},
|
||||
validations,
|
||||
tx,
|
||||
}) => {
|
||||
const proof = create_proof(binary?.transactions, tx?.transaction?.hash)
|
||||
const computed_transactions_root = hash_proof(proof)
|
||||
|
||||
const { tx_blob: blob, meta } = binary?.transactions?.filter(bintx => bintx?.tx_id === tx?.transaction?.hash)?.[0]
|
||||
|
||||
const computed_ledger_hash = hash_ledger(
|
||||
json.ledger_index,
|
||||
json.total_coins,
|
||||
json.parent_hash,
|
||||
computed_transactions_root,
|
||||
json.account_hash,
|
||||
json.parent_close_time,
|
||||
json.close_time,
|
||||
json.close_time_resolution,
|
||||
json.close_flags,
|
||||
)
|
||||
|
||||
const data = validations.reduce((a, b) => Object.assign(a, { [b.validation_public_key]: b.data.toString('hex'), }), {})
|
||||
|
||||
assert(computed_ledger_hash === json.ledger_hash, 'Invalid ledger hash computed vs. closed ledger')
|
||||
|
||||
const xpopObj = {
|
||||
ledger: {
|
||||
index: Number(json?.ledger_index),
|
||||
coins: json?.total_coins,
|
||||
phash: json?.parent_hash,
|
||||
txroot: computed_transactions_root,
|
||||
acroot: json?.account_hash,
|
||||
pclose: json?.parent_close_time,
|
||||
close: json?.close_time,
|
||||
cres: json?.close_time_resolution,
|
||||
flags: json?.close_flags,
|
||||
},
|
||||
validation: {
|
||||
data,
|
||||
unl: vl.vl,
|
||||
},
|
||||
transaction: {
|
||||
blob,
|
||||
meta,
|
||||
proof,
|
||||
}
|
||||
}
|
||||
|
||||
return JSON.stringify(xpopObj)
|
||||
}
|
||||
|
||||
export {
|
||||
xpop,
|
||||
}
|
||||
|
||||
/**
|
||||
* Libs down (@richardah)
|
||||
* https://github.com/RichardAH/xpop-generator/blob/master/pov.js
|
||||
*/
|
||||
|
||||
const make_vl_bytes = len =>
|
||||
{
|
||||
const report_error = e => { console.error(e) }
|
||||
if (typeof(len) != 'number')
|
||||
{
|
||||
report_error("non-numerical length passed to make_vl_bytes")
|
||||
return false
|
||||
}
|
||||
|
||||
len = Math.ceil(len)
|
||||
|
||||
if (len <= 192)
|
||||
{
|
||||
let b1 = len.toString(16)
|
||||
return (b1.length == 1 ? '0' + b1 : b1).toUpperCase()
|
||||
}
|
||||
else if (len <= 12480)
|
||||
{
|
||||
let b1 = Math.floor((len - 193) / 256 + 193)
|
||||
let b2 = len - 193 - 256 * (b1 - 193)
|
||||
b1 = b1.toString(16)
|
||||
b2 = b2.toString(16)
|
||||
return ((b1.length == 1 ? '0' + b1 : b1) +
|
||||
(b2.length == 1 ? '0' + b2 : b2)).toUpperCase()
|
||||
}
|
||||
else if (len <= 918744)
|
||||
{
|
||||
let b1 = Math.floor((len - 12481) / 65536 + 241)
|
||||
let b2 = Math.floor((len - 12481 - 65536 * (b1 - 241)) / 256)
|
||||
let b3 = len - 12481 - 65536 * (b1 - 241) - 256 * b2
|
||||
b1 = b1.toString(16)
|
||||
b2 = b2.toString(16)
|
||||
b3 = b3.toString(16)
|
||||
return ((b1.length == 1 ? '0' + b1 : b1) +
|
||||
(b2.length == 1 ? '0' + b2 : b2) +
|
||||
(b3.length == 1 ? '0' + b3 : b3)).toUpperCase()
|
||||
}
|
||||
else
|
||||
{
|
||||
report_error("cannot generate vl for length = " + len + ", too large")
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const sha512h = b =>
|
||||
{
|
||||
if (typeof(b) == 'string')
|
||||
b = Buffer.from(b, 'hex')
|
||||
return crypto.createHash('sha512').update(b).digest().slice(0, 32).toString('hex').toUpperCase()
|
||||
}
|
||||
|
||||
const prefix_LWR = '4C575200'
|
||||
const prefix_SND = '534E4400'
|
||||
const prefix_MIN = '4D494E00'
|
||||
const prefix_TXN = '54584E00'
|
||||
const hex = {0:'0', 1:'1', 2:'2', 3:'3', 4:'4', 5:'5', 6:'6', 7:'7',
|
||||
8:'8', 9:'9',10:'A',11:'B',12:'C',13:'D',14:'E',15:'F'}
|
||||
|
||||
const numToHex = (n, size) =>
|
||||
{
|
||||
if (typeof(n) != 'string')
|
||||
n = n.toString(16)
|
||||
n = '0'.repeat((size*2)-n.length) + n
|
||||
return n
|
||||
}
|
||||
|
||||
const hash_ledger =
|
||||
(ledger_index, total_coins,
|
||||
parent_hash, transaction_hash, account_hash,
|
||||
parent_close_time, close_time, close_time_resolution, close_flags) =>
|
||||
{
|
||||
if (typeof(parent_hash) != 'string')
|
||||
parent_hash = parent_hash.toString('hex')
|
||||
|
||||
if (typeof(transaction_hash) != 'string')
|
||||
transaction_hash = transaction_hash.toString('hex')
|
||||
|
||||
if (typeof(account_hash) != 'string')
|
||||
account_hash = account_hash.toString('hex')
|
||||
|
||||
if (typeof(ledger_index) == 'string')
|
||||
ledger_index = BigInt(ledger_index)
|
||||
|
||||
if (typeof(total_coins) == 'string')
|
||||
total_coins = BigInt(total_coins)
|
||||
|
||||
if (typeof(parent_close_time) == 'string')
|
||||
parent_close_time = BigInt(parent_close_time)
|
||||
|
||||
if (typeof(close_time) == 'string')
|
||||
close_time = BigInt(close_time)
|
||||
|
||||
if (typeof(close_time_resolution) == 'string')
|
||||
close_time_resolution = BigInt(close_time_resolution)
|
||||
|
||||
if (typeof(close_flags) == 'string')
|
||||
close_flags = BigInt(close_flags)
|
||||
|
||||
const payload =
|
||||
prefix_LWR +
|
||||
numToHex(ledger_index, 4) +
|
||||
numToHex(total_coins, 8) +
|
||||
parent_hash +
|
||||
transaction_hash +
|
||||
account_hash +
|
||||
numToHex(parent_close_time, 4) +
|
||||
numToHex(close_time, 4) +
|
||||
numToHex(close_time_resolution, 1) +
|
||||
numToHex(close_flags, 1).toUpperCase()
|
||||
|
||||
return crypto.createHash('sha512').
|
||||
update(Buffer.from(payload, 'hex')).
|
||||
digest().
|
||||
slice(0,32).
|
||||
toString('hex').
|
||||
toUpperCase()
|
||||
}
|
||||
|
||||
const compute_tree = (tree, depth=0) =>
|
||||
{
|
||||
|
||||
const nullhash = '0'.repeat(64)
|
||||
|
||||
let hasher = crypto.createHash('sha512')
|
||||
hasher.update(Buffer.from(prefix_MIN, 'hex'))
|
||||
for (let i = 0; i < 16; ++i)
|
||||
{
|
||||
let nibble = hex[i]
|
||||
let to_append = ''
|
||||
if (tree.children[nibble] === undefined)
|
||||
to_append = nullhash
|
||||
else if (Object.keys(tree.children[nibble].children).length == 0)
|
||||
to_append = tree.children[nibble].hash
|
||||
else
|
||||
to_append = compute_tree(tree.children[nibble], depth+1)
|
||||
|
||||
hasher.update(Buffer.from(to_append, 'hex'))
|
||||
}
|
||||
|
||||
tree.hash = hasher.digest().slice(0,32).toString('hex').toUpperCase()
|
||||
return tree.hash
|
||||
}
|
||||
|
||||
|
||||
const hash_txn = txn =>
|
||||
{
|
||||
if (typeof(txn) != 'string')
|
||||
txn = txn.toString('hex')
|
||||
return sha512h(prefix_TXN + txn)
|
||||
}
|
||||
|
||||
const hash_txn_and_meta = (txn, meta) =>
|
||||
{
|
||||
if (typeof(txn) != 'string')
|
||||
txn = txn.toString('hex')
|
||||
if (typeof(meta) != 'string')
|
||||
meta = meta.toString('hex')
|
||||
const vl1 = make_vl_bytes(txn.length/2)
|
||||
const vl2 = make_vl_bytes(meta.length/2)
|
||||
return sha512h(prefix_SND + vl1 + txn + vl2 + meta + hash_txn(txn))
|
||||
}
|
||||
|
||||
const report_error = e =>
|
||||
{
|
||||
throw(e)
|
||||
//console.error(e)
|
||||
}
|
||||
|
||||
const create_tree = txns =>
|
||||
{
|
||||
let root = {children: {}, hash: null, key: '0'.repeat(64)}
|
||||
|
||||
// pass one: populate
|
||||
for (let k = 0; k < txns.length; ++k)
|
||||
{
|
||||
const txn = txns[k].tx_blob
|
||||
const meta = txns[k].meta
|
||||
|
||||
const hash = hash_txn(txn)
|
||||
|
||||
let node = root
|
||||
let upto = 0
|
||||
|
||||
let error = true
|
||||
while (upto < hash.length)
|
||||
{
|
||||
let nibble = hash[upto]
|
||||
|
||||
if (!(nibble in node.children))
|
||||
{
|
||||
node.children[nibble] = {
|
||||
children: {},
|
||||
hash: hash_txn_and_meta(txn, meta),
|
||||
key : hash
|
||||
}
|
||||
error = false
|
||||
break
|
||||
}
|
||||
else if (Object.keys(node.children[nibble].children).length == 0)
|
||||
{
|
||||
// create a new node
|
||||
let oldnode = node.children[nibble]
|
||||
let newnibble = oldnode.key[upto+1]
|
||||
node.children[nibble] = {children: {}, hash: null, key: hash.slice(0,upto+1)}
|
||||
node.children[nibble].children[newnibble] = oldnode
|
||||
node = node.children[nibble]
|
||||
upto++
|
||||
continue
|
||||
}
|
||||
else
|
||||
{
|
||||
node = node.children[nibble]
|
||||
upto++
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (error)
|
||||
{
|
||||
report_error(error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// pass two: recursively compute hashes
|
||||
compute_tree(root)
|
||||
|
||||
return root
|
||||
}
|
||||
|
||||
// generate the proof
|
||||
// pass valid merkle tree and the canonical txn hash as key
|
||||
const create_proof_from_tree = (tree, key, upto = 0) =>
|
||||
{
|
||||
if (tree === undefined)
|
||||
return false
|
||||
|
||||
tree = tree.children
|
||||
|
||||
if (tree === undefined)
|
||||
return false
|
||||
|
||||
let proof = []
|
||||
|
||||
let n = parseInt(key[upto], 16)
|
||||
|
||||
for (let i = 0; i < 16; ++i)
|
||||
{
|
||||
const h = hex[i]
|
||||
if (i == n)
|
||||
{
|
||||
if (tree[h] === undefined)
|
||||
return false
|
||||
else if (tree[h].key == key)
|
||||
proof.push(tree[h].hash)
|
||||
else
|
||||
{
|
||||
let retval = create_proof_from_tree(tree[h], key, upto+1)
|
||||
if (!retval)
|
||||
return false
|
||||
proof.push(retval)
|
||||
}
|
||||
}
|
||||
else if (tree[h] === undefined)
|
||||
proof.push('0'.repeat(64))
|
||||
else
|
||||
proof.push(tree[h].hash)
|
||||
}
|
||||
return proof
|
||||
}
|
||||
|
||||
const create_proof = (txns, key) =>
|
||||
{
|
||||
const tree = create_tree(txns)
|
||||
if (!tree)
|
||||
return false
|
||||
return create_proof_from_tree(tree, key, 0)
|
||||
}
|
||||
|
||||
const hash_proof = (proof) =>
|
||||
{
|
||||
if (proof === undefined)
|
||||
return false
|
||||
|
||||
let hasher = crypto.createHash('sha512')
|
||||
hasher.update(Buffer.from(prefix_MIN, 'hex'))
|
||||
for (let i = 0; i < 16; ++i)
|
||||
{
|
||||
if (proof[i] === undefined)
|
||||
return false
|
||||
else if (typeof(proof[i]) == 'string')
|
||||
hasher.update(Buffer.from(proof[i], 'hex'))
|
||||
else
|
||||
hasher.update(Buffer.from(hash_proof(proof[i]), 'hex'))
|
||||
}
|
||||
return hasher.digest().slice(0,32).toString('hex').toUpperCase()
|
||||
}
|
||||
|
||||
const verify_proof = (root_hash, proof) =>
|
||||
{
|
||||
if (typeof(root_hash) != 'string' || typeof(proof) != 'object')
|
||||
return false
|
||||
|
||||
return root_hash.toUpperCase() == hash_proof(proof)
|
||||
}
|
||||
|
||||
const proof_contains = (proof, tx_blob, meta, already_computed = false) =>
|
||||
{
|
||||
if (proof === undefined)
|
||||
return false
|
||||
|
||||
const hash = (already_computed ? already_computed : hash_txn_and_meta(tx_blob, meta))
|
||||
|
||||
for (let i = 0; i < 16; ++i)
|
||||
{
|
||||
if (proof[i] === undefined)
|
||||
return false
|
||||
|
||||
if (proof[i] == hash)
|
||||
return true
|
||||
|
||||
if (typeof(proof[i]) == 'object' && proof_contains(proof[i], null, null, hash))
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
87
lib/xpopGenerate.mjs
Normal file
87
lib/xpopGenerate.mjs
Normal file
@@ -0,0 +1,87 @@
|
||||
import assert from 'assert'
|
||||
import { xpop } from './xpop/v1.mjs'
|
||||
import { writeFile, readFile, readdir } from 'fs'
|
||||
import { ledgerIndexToFolders } from '../lib/ledgerIndexToFolders.mjs'
|
||||
import { dirExists } from '../lib/dirExists.mjs'
|
||||
|
||||
const cat = async file => {
|
||||
return new Promise((resolve, reject) => {
|
||||
readFile(new URL('../' + file, import.meta.url).pathname, (err, data) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
}
|
||||
if (data) resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const catjson = async file => {
|
||||
const buffer = await cat(file)
|
||||
return JSON.parse(buffer.toString())
|
||||
}
|
||||
|
||||
const xpopGenerate = async ({
|
||||
ledgerIndex,
|
||||
networkId,
|
||||
txHash
|
||||
}) => {
|
||||
console.log('Generating XPOP for', ledgerIndex, txHash)
|
||||
|
||||
const relativeStorDir = 'store/' + networkId + '/' + ledgerIndexToFolders(ledgerIndex)
|
||||
const storeDir = new URL('../' + relativeStorDir, import.meta.url).pathname
|
||||
|
||||
if (await dirExists(storeDir)) {
|
||||
try {
|
||||
const files = await new Promise((resolve, reject) => {
|
||||
readdir(storeDir, (err, contents) => {
|
||||
if (err) reject(err)
|
||||
if (contents) resolve(contents)
|
||||
})
|
||||
})
|
||||
|
||||
const validationFiles = files.filter(f => f.match(/^validation_n[a-zA-Z0-9]{10,}.json$/))
|
||||
|
||||
assert(files.indexOf('ledger_binary_transactions.json') > -1, 'Missing ledger binary transactions')
|
||||
assert(files.indexOf('ledger_info.json') > -1, 'Missing ledger info')
|
||||
assert(files.indexOf('vl.json') > -1, 'Missing UNL info')
|
||||
assert(files.indexOf('tx_' + txHash + '.json') > -1, 'Missing TX ' + txHash)
|
||||
assert(validationFiles.length > 0, 'Validations missing')
|
||||
|
||||
const [
|
||||
vl,
|
||||
json,
|
||||
binary,
|
||||
validations,
|
||||
tx
|
||||
] = await Promise.all([
|
||||
catjson(relativeStorDir + '/vl.json'),
|
||||
catjson(relativeStorDir + '/ledger_info.json'),
|
||||
catjson(relativeStorDir + '/ledger_binary_transactions.json'),
|
||||
Promise.all(validationFiles.map(f => catjson(relativeStorDir + '/' + f))),
|
||||
catjson(relativeStorDir + '/tx_' + txHash + '.json'),
|
||||
])
|
||||
|
||||
const xpopJson = await xpop({ vl, ledger: { json, binary, }, validations, tx, })
|
||||
const xpopFilename = 'xpop_' + txHash + '.json'
|
||||
|
||||
writeFile(storeDir + '/' + xpopFilename, Buffer.from(xpopJson, 'utf8'), err => {
|
||||
if (err) {
|
||||
console.log(' !!!->> Error writing xpop-file @ ' + storeDir)
|
||||
} else {
|
||||
console.log(' ---->> xPOP stored @ ' + relativeStorDir + '/' + xpopFilename + ', strlen: ' + xpopJson.length)
|
||||
}
|
||||
})
|
||||
|
||||
return Buffer.from(xpopJson, 'utf-8').toString('hex')
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
throw new Error('Not all files required for xPOP generation found')
|
||||
}
|
||||
} else {
|
||||
throw new Error('xPOP source data storage dir missing: ' + relativeStorDir)
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
xpopGenerate,
|
||||
}
|
||||
Reference in New Issue
Block a user