mirror of
https://github.com/Xahau/xahaud.git
synced 2026-01-11 18:25:16 +00:00
Compare commits
9 Commits
fix-ips-fi
...
export
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8329d78f32 | ||
|
|
bf4579c1d1 | ||
|
|
73e099eb23 | ||
|
|
2e311b4259 | ||
|
|
7c8e940091 | ||
|
|
9b90c50789 | ||
|
|
a18e2cb2c6 | ||
|
|
be5f425122 | ||
|
|
fc6f4762da |
@@ -350,7 +350,10 @@ enum hook_return_code : int64_t {
|
||||
MEM_OVERLAP = -43, // one or more specified buffers are the same memory
|
||||
TOO_MANY_STATE_MODIFICATIONS = -44, // more than 5000 modified state
|
||||
// entires in the combined hook chains
|
||||
TOO_MANY_NAMESPACES = -45
|
||||
TOO_MANY_NAMESPACES = -45,
|
||||
EXPORT_FAILURE = -46,
|
||||
TOO_MANY_EXPORTED_TXN = -47,
|
||||
|
||||
};
|
||||
|
||||
enum ExitType : uint8_t {
|
||||
@@ -364,6 +367,7 @@ const uint16_t max_state_modifications = 256;
|
||||
const uint8_t max_slots = 255;
|
||||
const uint8_t max_nonce = 255;
|
||||
const uint8_t max_emit = 255;
|
||||
const uint8_t max_export = 4;
|
||||
const uint8_t max_params = 16;
|
||||
const double fee_base_multiplier = 1.1f;
|
||||
|
||||
@@ -469,6 +473,13 @@ static const APIWhitelist import_whitelist_1{
|
||||
// clang-format on
|
||||
};
|
||||
|
||||
static const APIWhitelist import_whitelist_2{
|
||||
// clang-format off
|
||||
HOOK_API_DEFINITION(I64, xport, (I32, I32)),
|
||||
HOOK_API_DEFINITION(I64, xport_reserve, (I32)),
|
||||
// clang-format on
|
||||
};
|
||||
|
||||
#undef HOOK_API_DEFINITION
|
||||
#undef I32
|
||||
#undef I64
|
||||
|
||||
@@ -1034,6 +1034,12 @@ validateGuards(
|
||||
{
|
||||
// PASS, this is a version 1 api
|
||||
}
|
||||
else if (rulesVersion & 0x04U &&
|
||||
hook_api::import_whitelist_2.find(import_name) !=
|
||||
hook_api::import_whitelist_2.end())
|
||||
{
|
||||
// PASS, this is an export api
|
||||
}
|
||||
else
|
||||
{
|
||||
GUARDLOG(hook::log::IMPORT_ILLEGAL)
|
||||
|
||||
@@ -406,6 +406,17 @@ DECLARE_HOOK_FUNCTION(
|
||||
uint32_t slot_no_tx,
|
||||
uint32_t slot_no_meta);
|
||||
|
||||
DECLARE_HOOK_FUNCTION(
|
||||
int64_t,
|
||||
xport,
|
||||
uint32_t write_ptr,
|
||||
uint32_t write_len,
|
||||
uint32_t read_ptr,
|
||||
uint32_t read_len);
|
||||
DECLARE_HOOK_FUNCTION(
|
||||
int64_t,
|
||||
xport_reserve,
|
||||
uint32_t count);
|
||||
/*
|
||||
DECLARE_HOOK_FUNCTION(int64_t, str_find, uint32_t hread_ptr,
|
||||
uint32_t hread_len, uint32_t nread_ptr, uint32_t nread_len, uint32_t mode,
|
||||
@@ -485,6 +496,8 @@ struct HookResult
|
||||
|
||||
std::queue<std::shared_ptr<ripple::Transaction>>
|
||||
emittedTxn{}; // etx stored here until accept/rollback
|
||||
std::queue<std::shared_ptr<ripple::Transaction>>
|
||||
exportedTxn{};
|
||||
HookStateMap& stateMap;
|
||||
uint16_t changedStateCount = 0;
|
||||
std::map<
|
||||
@@ -541,6 +554,7 @@ struct HookContext
|
||||
uint16_t ledger_nonce_counter{0};
|
||||
int64_t expected_etxn_count{-1}; // make this a 64bit int so the uint32
|
||||
// from the hookapi cant overflow it
|
||||
int64_t expected_export_count{-1};
|
||||
std::map<ripple::uint256, bool> nonce_used{};
|
||||
uint32_t generation =
|
||||
0; // used for caching, only generated when txn_generation is called
|
||||
@@ -877,6 +891,9 @@ public:
|
||||
ADD_HOOK_FUNCTION(meta_slot, ctx);
|
||||
ADD_HOOK_FUNCTION(xpop_slot, ctx);
|
||||
|
||||
ADD_HOOK_FUNCTION(xport, ctx);
|
||||
ADD_HOOK_FUNCTION(xport_reserve, ctx);
|
||||
|
||||
/*
|
||||
ADD_HOOK_FUNCTION(str_find, ctx);
|
||||
ADD_HOOK_FUNCTION(str_replace, ctx);
|
||||
|
||||
@@ -79,7 +79,7 @@ main(int argc, char** argv)
|
||||
|
||||
close(fd);
|
||||
|
||||
auto result = validateGuards(hook, std::cout, "", 3);
|
||||
auto result = validateGuards(hook, std::cout, "", 7);
|
||||
|
||||
if (!result)
|
||||
{
|
||||
|
||||
@@ -1971,6 +1971,8 @@ hook::finalizeHookResult(
|
||||
// directory) if we are allowed to
|
||||
std::vector<std::pair<uint256 /* txnid */, uint256 /* emit nonce */>>
|
||||
emission_txnid;
|
||||
std::vector<uint256 /* txnid */>
|
||||
exported_txnid;
|
||||
|
||||
if (doEmit)
|
||||
{
|
||||
@@ -2026,6 +2028,58 @@ hook::finalizeHookResult(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
DBG_PRINTF("exported txn count: %d\n", hookResult.exportedTxn.size());
|
||||
for (; hookResult.exportedTxn.size() > 0; hookResult.exportedTxn.pop())
|
||||
{
|
||||
auto& tpTrans = hookResult.exportedTxn.front();
|
||||
auto& id = tpTrans->getID();
|
||||
JLOG(j.trace()) << "HookExport[" << HR_ACC() << "]: " << id;
|
||||
|
||||
// exported txns must be marked bad by the hash router to ensure under
|
||||
// no circumstances they will enter consensus on *this* chain.
|
||||
applyCtx.app.getHashRouter().setFlags(id, SF_BAD);
|
||||
|
||||
std::shared_ptr<const ripple::STTx> ptr =
|
||||
tpTrans->getSTransaction();
|
||||
|
||||
auto exportedId = keylet::exportedTxn(id);
|
||||
auto sleExported = applyCtx.view().peek(exportedId);
|
||||
|
||||
if (!sleExported)
|
||||
{
|
||||
exported_txnid.emplace_back(id);
|
||||
|
||||
sleExported = std::make_shared<SLE>(exportedId);
|
||||
|
||||
// RH TODO: add a new constructor to STObject to avoid this
|
||||
// serder thing
|
||||
ripple::Serializer s;
|
||||
ptr->add(s);
|
||||
SerialIter sit(s.slice());
|
||||
|
||||
sleExported->emplace_back(ripple::STObject(sit, sfExportedTxn));
|
||||
auto page = applyCtx.view().dirInsert(
|
||||
keylet::exportedDir(), exportedId, [&](SLE::ref sle) {
|
||||
(*sle)[sfFlags] = lsfEmittedDir;
|
||||
});
|
||||
|
||||
if (page)
|
||||
{
|
||||
(*sleExported)[sfOwnerNode] = *page;
|
||||
applyCtx.view().insert(sleExported);
|
||||
}
|
||||
else
|
||||
{
|
||||
JLOG(j.warn())
|
||||
<< "HookError[" << HR_ACC() << "]: "
|
||||
<< "Export Directory full when trying to insert "
|
||||
<< id;
|
||||
return tecDIR_FULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool const fixV2 = applyCtx.view().rules().enabled(fixXahauV2);
|
||||
@@ -2052,6 +2106,12 @@ hook::finalizeHookResult(
|
||||
meta.setFieldU16(
|
||||
sfHookEmitCount,
|
||||
emission_txnid.size()); // this will never wrap, hard limit
|
||||
if (applyCtx.view().rules().enabled(featureExport))
|
||||
{
|
||||
meta.setFieldU16(
|
||||
sfHookExportCount,
|
||||
exported_txnid.size());
|
||||
}
|
||||
meta.setFieldU16(sfHookExecutionIndex, exec_index);
|
||||
meta.setFieldU16(sfHookStateChangeCount, hookResult.changedStateCount);
|
||||
meta.setFieldH256(sfHookHash, hookResult.hookHash);
|
||||
@@ -3888,6 +3948,27 @@ DEFINE_HOOK_FUNCTION(int64_t, etxn_reserve, uint32_t count)
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
DEFINE_HOOK_FUNCTION(int64_t, xport_reserve, uint32_t count)
|
||||
{
|
||||
HOOK_SETUP(); // populates memory_ctx, memory, memory_length, applyCtx,
|
||||
// hookCtx on current stack
|
||||
|
||||
if (hookCtx.expected_export_count > -1)
|
||||
return ALREADY_SET;
|
||||
|
||||
if (count < 1)
|
||||
return TOO_SMALL;
|
||||
|
||||
if (count > hook_api::max_export)
|
||||
return TOO_BIG;
|
||||
|
||||
hookCtx.expected_export_count = count;
|
||||
|
||||
return count;
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
// Compute the burden of an emitted transaction based on a number of factors
|
||||
DEFINE_HOOK_FUNCNARG(int64_t, etxn_burden)
|
||||
{
|
||||
@@ -6156,6 +6237,92 @@ DEFINE_HOOK_FUNCTION(
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
DEFINE_HOOK_FUNCTION(
|
||||
int64_t,
|
||||
xport,
|
||||
uint32_t write_ptr,
|
||||
uint32_t write_len,
|
||||
uint32_t read_ptr,
|
||||
uint32_t read_len)
|
||||
{
|
||||
HOOK_SETUP();
|
||||
|
||||
if (NOT_IN_BOUNDS(read_ptr, read_len, memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
if (NOT_IN_BOUNDS(write_ptr, write_len, memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
if (write_len < 32)
|
||||
return TOO_SMALL;
|
||||
|
||||
auto& app = hookCtx.applyCtx.app;
|
||||
|
||||
if (hookCtx.expected_export_count < 0)
|
||||
return PREREQUISITE_NOT_MET;
|
||||
|
||||
if (hookCtx.result.exportedTxn.size() >= hookCtx.expected_export_count)
|
||||
return TOO_MANY_EXPORTED_TXN;
|
||||
|
||||
ripple::Blob blob{memory + read_ptr, memory + read_ptr + read_len};
|
||||
|
||||
std::shared_ptr<STTx const> stpTrans;
|
||||
try
|
||||
{
|
||||
stpTrans = std::make_shared<STTx const>(
|
||||
SerialIter{memory + read_ptr, read_len});
|
||||
}
|
||||
catch (std::exception& e)
|
||||
{
|
||||
JLOG(j.trace()) << "HookExport[" << HC_ACC() << "]: Failed " << e.what()
|
||||
<< "\n";
|
||||
return EXPORT_FAILURE;
|
||||
}
|
||||
|
||||
if (!stpTrans->isFieldPresent(sfAccount) ||
|
||||
stpTrans->getAccountID(sfAccount) != hookCtx.result.account)
|
||||
{
|
||||
JLOG(j.trace()) << "HookExport[" << HC_ACC()
|
||||
<< "]: Attempted to export a txn that's not for this Hook's Account ID.";
|
||||
return EXPORT_FAILURE;
|
||||
}
|
||||
|
||||
std::string reason;
|
||||
auto tpTrans = std::make_shared<Transaction>(stpTrans, reason, app);
|
||||
// RHTODO: is this needed or wise? VVV
|
||||
if (tpTrans->getStatus() != NEW)
|
||||
{
|
||||
JLOG(j.trace()) << "HookExport[" << HC_ACC()
|
||||
<< "]: tpTrans->getStatus() != NEW";
|
||||
return EXPORT_FAILURE;
|
||||
}
|
||||
auto const& txID = tpTrans->getID();
|
||||
|
||||
if (txID.size() > write_len)
|
||||
return TOO_SMALL;
|
||||
|
||||
if (NOT_IN_BOUNDS(write_ptr, txID.size(), memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
auto const write_txid = [&]() -> int64_t {
|
||||
WRITE_WASM_MEMORY_AND_RETURN(
|
||||
write_ptr,
|
||||
txID.size(),
|
||||
txID.data(),
|
||||
txID.size(),
|
||||
memory,
|
||||
memory_length);
|
||||
};
|
||||
|
||||
int64_t result = write_txid();
|
||||
|
||||
if (result == 32)
|
||||
hookCtx.result.exportedTxn.push(tpTrans);
|
||||
|
||||
return result;
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
/*
|
||||
|
||||
DEFINE_HOOK_FUNCTION(
|
||||
|
||||
@@ -599,6 +599,13 @@ public:
|
||||
return validatorKeys_.publicKey;
|
||||
}
|
||||
|
||||
ValidatorKeys const&
|
||||
getValidatorKeys() const override
|
||||
{
|
||||
return validatorKeys_;
|
||||
}
|
||||
|
||||
|
||||
NetworkOPs&
|
||||
getOPs() override
|
||||
{
|
||||
|
||||
@@ -240,7 +240,8 @@ public:
|
||||
|
||||
virtual PublicKey const&
|
||||
getValidationPublicKey() const = 0;
|
||||
|
||||
virtual ValidatorKeys const&
|
||||
getValidatorKeys() const = 0;
|
||||
virtual Resource::Manager&
|
||||
getResourceManager() = 0;
|
||||
virtual PathRequests&
|
||||
|
||||
@@ -471,10 +471,6 @@ ManifestCache::applyManifest(Manifest m)
|
||||
|
||||
auto masterKey = m.masterKey;
|
||||
map_.emplace(std::move(masterKey), std::move(m));
|
||||
|
||||
// Increment sequence to invalidate cached manifest messages
|
||||
seq_++;
|
||||
|
||||
return ManifestDisposition::accepted;
|
||||
}
|
||||
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
#include <ripple/protocol/Feature.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <ripple/protocol/st.h>
|
||||
#include <ripple/app/misc/ValidatorKeys.h>
|
||||
#include <ripple/protocol/Sign.h>
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <numeric>
|
||||
@@ -1539,6 +1541,247 @@ TxQ::accept(Application& app, OpenView& view)
|
||||
}
|
||||
}
|
||||
|
||||
// Inject exported transactions/signatures, if any
|
||||
if (view.rules().enabled(featureExport))
|
||||
{
|
||||
do
|
||||
{
|
||||
// if we're not a validator we do nothing here
|
||||
if (app.getValidationPublicKey().empty())
|
||||
break;
|
||||
|
||||
auto const& keys = app.getValidatorKeys();
|
||||
|
||||
if (keys.configInvalid())
|
||||
break;
|
||||
|
||||
// and if we're not on the UNLReport we also do nothing
|
||||
|
||||
auto const unlRep = view.read(keylet::UNLReport());
|
||||
if (!unlRep || !unlRep->isFieldPresent(sfActiveValidators))
|
||||
{
|
||||
// nothing to do without a unlreport object
|
||||
break;
|
||||
}
|
||||
|
||||
bool found = false;
|
||||
auto const& avs = unlRep->getFieldArray(sfActiveValidators);
|
||||
for (auto const& av : avs)
|
||||
{
|
||||
if (PublicKey(av[sfPublicKey]) == keys.masterPublicKey)
|
||||
{
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found)
|
||||
break;
|
||||
|
||||
// execution to here means we're a validator and on the UNLReport
|
||||
|
||||
AccountID signingAcc = calcAccountID(keys.publicKey);
|
||||
|
||||
Keylet const exportedDirKeylet{keylet::exportedDir()};
|
||||
if (dirIsEmpty(view, exportedDirKeylet))
|
||||
break;
|
||||
|
||||
std::shared_ptr<SLE const> sleDirNode{};
|
||||
unsigned int uDirEntry{0};
|
||||
uint256 dirEntry{beast::zero};
|
||||
|
||||
if (!cdirFirst(
|
||||
view,
|
||||
exportedDirKeylet.key,
|
||||
sleDirNode,
|
||||
uDirEntry,
|
||||
dirEntry))
|
||||
break;
|
||||
|
||||
do
|
||||
{
|
||||
Keylet const itemKeylet{ltCHILD, dirEntry};
|
||||
auto sleItem = view.read(itemKeylet);
|
||||
if (!sleItem)
|
||||
{
|
||||
// Directory node has an invalid index. Bail out.
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn processing: directory node in ledger "
|
||||
<< view.seq()
|
||||
<< " has index to object that is missing: "
|
||||
<< to_string(dirEntry);
|
||||
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
LedgerEntryType const nodeType{
|
||||
safe_cast<LedgerEntryType>((*sleItem)[sfLedgerEntryType])};
|
||||
|
||||
if (nodeType != ltEXPORTED_TXN)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn processing: emitted directory contained "
|
||||
"non ltEMITTED_TXN type";
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
JLOG(j_.info()) << "Processing exported txn: " << *sleItem;
|
||||
|
||||
auto const& exported =
|
||||
const_cast<ripple::STLedgerEntry&>(*sleItem)
|
||||
.getField(sfExportedTxn)
|
||||
.downcast<STObject>();
|
||||
|
||||
auto const& txnHash = sleItem->getFieldH256(sfTransactionHash);
|
||||
|
||||
auto exportedLgrSeq = exported.getFieldU32(sfLedgerSequence);
|
||||
|
||||
auto const seq = view.seq();
|
||||
|
||||
if (exportedLgrSeq == seq)
|
||||
{
|
||||
// this shouldn't happen, but do nothing
|
||||
continue;
|
||||
}
|
||||
|
||||
if (exportedLgrSeq < seq - 1)
|
||||
{
|
||||
// all old entries need to be turned into Export transactions so they can be removed
|
||||
// from the directory
|
||||
|
||||
// in the previous ledger all the ExportSign transactions were executed, and one-by-one
|
||||
// added the validators' signatures to the ltEXPORTED_TXN's sfSigners array.
|
||||
// now we need to collect these together and place them inside the ExportedTxn blob
|
||||
// and publish the blob in the Export transaction type.
|
||||
|
||||
STArray signers = sleItem->getFieldArray(sfSigners);
|
||||
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exported.add(*s);
|
||||
SerialIter sitTrans(s->slice());
|
||||
try
|
||||
{
|
||||
auto stpTrans =
|
||||
std::make_shared<STTx>(std::ref(sitTrans));
|
||||
|
||||
if (!stpTrans->isFieldPresent(sfAccount) ||
|
||||
stpTrans->getAccountID(sfAccount) == beast::zero)
|
||||
{
|
||||
JLOG(j_.warn()) << "Hook: Export failure: "
|
||||
<< "sfAccount missing or zero.";
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
// RH TODO: should we force remove signingpubkey here?
|
||||
|
||||
stpTrans->setFieldArray(sfSigners, signers);
|
||||
|
||||
Blob const& blob = stpTrans->getSerializer().peekData();
|
||||
|
||||
STTx exportTx(ttEXPORT, [&](auto& obj) {
|
||||
obj.setFieldVL(sfExportedTxn, blob);
|
||||
obj.setFieldU32(sfLedgerSequence, seq);
|
||||
obj.setFieldH256(sfTransactionHash, txnHash);
|
||||
obj.setFieldArray(sfSigners, signers);
|
||||
});
|
||||
|
||||
// submit to the ledger
|
||||
{
|
||||
uint256 txID = exportTx.getTransactionID();
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exportTx.add(*s);
|
||||
app.getHashRouter().setFlags(txID, SF_PRIVATE2);
|
||||
app.getHashRouter().setFlags(txID, SF_EMITTED);
|
||||
view.rawTxInsert(txID, std::move(s), nullptr);
|
||||
ledgerChanged = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
catch (std::exception& e)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn Processing: Failure: " << e.what()
|
||||
<< "\n";
|
||||
}
|
||||
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// this ledger is the one after the exported txn was added to the directory
|
||||
// so generate the export sign txns
|
||||
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exported.add(*s);
|
||||
SerialIter sitTrans(s->slice());
|
||||
try
|
||||
{
|
||||
auto const& stpTrans =
|
||||
std::make_shared<STTx const>(std::ref(sitTrans));
|
||||
|
||||
if (!stpTrans->isFieldPresent(sfAccount) ||
|
||||
stpTrans->getAccountID(sfAccount) == beast::zero)
|
||||
{
|
||||
JLOG(j_.warn()) << "Hook: Export failure: "
|
||||
<< "sfAccount missing or zero.";
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
auto seq = view.info().seq;
|
||||
auto txnHash = stpTrans->getTransactionID();
|
||||
|
||||
Serializer s =
|
||||
buildMultiSigningData(*stpTrans, signingAcc);
|
||||
|
||||
auto multisig = ripple::sign(keys.publicKey, keys.secretKey, s.slice());
|
||||
|
||||
STTx exportSignTx(ttEXPORT_SIGN, [&](auto& obj) {
|
||||
obj.set(([&]() {
|
||||
auto inner = std::make_unique<STObject>(sfSigner);
|
||||
inner->setFieldVL(sfSigningPubKey, keys.publicKey);
|
||||
inner->setAccountID(sfAccount, signingAcc);
|
||||
inner->setFieldVL(sfTxnSignature, multisig);
|
||||
return inner;
|
||||
})());
|
||||
obj.setFieldU32(sfLedgerSequence, seq);
|
||||
obj.setFieldH256(sfTransactionHash, txnHash);
|
||||
});
|
||||
|
||||
// submit to the ledger
|
||||
{
|
||||
uint256 txID = exportSignTx.getTransactionID();
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exportSignTx.add(*s);
|
||||
app.getHashRouter().setFlags(txID, SF_PRIVATE2);
|
||||
app.getHashRouter().setFlags(txID, SF_EMITTED);
|
||||
view.rawTxInsert(txID, std::move(s), nullptr);
|
||||
ledgerChanged = true;
|
||||
}
|
||||
}
|
||||
|
||||
catch (std::exception& e)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn Processing: Failure: " << e.what()
|
||||
<< "\n";
|
||||
}
|
||||
|
||||
} while (cdirNext(
|
||||
view, exportedDirKeylet.key, sleDirNode, uDirEntry, dirEntry));
|
||||
|
||||
} while (0);
|
||||
|
||||
}
|
||||
|
||||
// Inject emitted transactions if any
|
||||
if (view.rules().enabled(featureHooks))
|
||||
do
|
||||
|
||||
@@ -96,6 +96,13 @@ Change::preflight(PreflightContext const& ctx)
|
||||
}
|
||||
}
|
||||
|
||||
if ((ctx.tx.getTxnType() == ttEXPORT_SIGN || ctx.tx.getTxnType() == ttEXPORT) &&
|
||||
!ctx.rules.enabled(featureExport))
|
||||
{
|
||||
JLOG(ctx.j.warn()) << "Change: Export not enabled";
|
||||
return temDISABLED;
|
||||
}
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
@@ -154,6 +161,8 @@ Change::preclaim(PreclaimContext const& ctx)
|
||||
case ttAMENDMENT:
|
||||
case ttUNL_MODIFY:
|
||||
case ttEMIT_FAILURE:
|
||||
case ttEXPORT:
|
||||
case ttEXPORT_SIGN:
|
||||
return tesSUCCESS;
|
||||
case ttUNL_REPORT: {
|
||||
if (!ctx.tx.isFieldPresent(sfImportVLKey) ||
|
||||
@@ -209,6 +218,11 @@ Change::doApply()
|
||||
return applyEmitFailure();
|
||||
case ttUNL_REPORT:
|
||||
return applyUNLReport();
|
||||
case ttEXPORT:
|
||||
return applyExport();
|
||||
case ttEXPORT_SIGN:
|
||||
return applyExportSign();
|
||||
|
||||
default:
|
||||
assert(0);
|
||||
return tefFAILURE;
|
||||
@@ -606,7 +620,8 @@ Change::activateXahauGenesis()
|
||||
loggerStream,
|
||||
"rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
|
||||
(ctx_.view().rules().enabled(featureHooksUpdate1) ? 1 : 0) +
|
||||
(ctx_.view().rules().enabled(fix20250131) ? 2 : 0));
|
||||
(ctx_.view().rules().enabled(fix20250131) ? 2 : 0) +
|
||||
(ctx_.view().rules().enabled(featureExport) ? 4 : 0));
|
||||
|
||||
if (!result)
|
||||
{
|
||||
@@ -1072,6 +1087,80 @@ Change::applyEmitFailure()
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
TER
|
||||
Change::applyExport()
|
||||
{
|
||||
uint256 txnID(ctx_.tx.getFieldH256(sfTransactionHash));
|
||||
do
|
||||
{
|
||||
JLOG(j_.info()) << "HookExport[" << txnID
|
||||
<< "]: ttExport exporting transaction";
|
||||
|
||||
auto key = keylet::exportedTxn(txnID);
|
||||
|
||||
auto const& sle = view().peek(key);
|
||||
|
||||
if (!sle)
|
||||
{
|
||||
// most likely explanation is that this was somehow a double-up, so just ignore
|
||||
JLOG(j_.warn())
|
||||
<< "HookError[" << txnID << "]: ttExport could not find exported txn in ledger";
|
||||
break;
|
||||
}
|
||||
|
||||
if (!view().dirRemove(
|
||||
keylet::exportedDir(),
|
||||
sle->getFieldU64(sfOwnerNode),
|
||||
key,
|
||||
false))
|
||||
{
|
||||
JLOG(j_.fatal()) << "HookError[" << txnID
|
||||
<< "]: ttExport (Change) tefBAD_LEDGER";
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
view().erase(sle);
|
||||
} while (0);
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
TER
|
||||
Change::applyExportSign()
|
||||
{
|
||||
uint256 txnID(ctx_.tx.getFieldH256(sfTransactionHash));
|
||||
do
|
||||
{
|
||||
JLOG(j_.info()) << "HookExport[" << txnID
|
||||
<< "]: ttExportSign adding signature to transaction";
|
||||
|
||||
auto key = keylet::exportedTxn(txnID);
|
||||
|
||||
auto const& sle = view().peek(key);
|
||||
|
||||
if (!sle)
|
||||
{
|
||||
// most likely explanation is that this was somehow a double-up, so just ignore
|
||||
JLOG(j_.warn())
|
||||
<< "HookError[" << txnID << "]: ttExportSign could not find exported txn in ledger";
|
||||
break;
|
||||
}
|
||||
|
||||
// grab the signer object off the txn
|
||||
STObject signerObj = const_cast<ripple::STTx&>(ctx_.tx)
|
||||
.getField(sfSigner)
|
||||
.downcast<STObject>();
|
||||
|
||||
// append it to the signers field in the ledger object
|
||||
STArray signers = sle->getFieldArray(sfSigners);
|
||||
signers.push_back(signerObj);
|
||||
sle->setFieldArray(sfSigners, signers);
|
||||
|
||||
// done
|
||||
view().update(sle);
|
||||
} while (0);
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
TER
|
||||
Change::applyUNLModify()
|
||||
{
|
||||
|
||||
@@ -74,6 +74,12 @@ private:
|
||||
TER
|
||||
applyEmitFailure();
|
||||
|
||||
TER
|
||||
applyExport();
|
||||
|
||||
TER
|
||||
applyExportSign();
|
||||
|
||||
TER
|
||||
applyUNLReport();
|
||||
};
|
||||
|
||||
@@ -37,9 +37,12 @@
|
||||
#include <charconv>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <ripple/app/hook/applyHook.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
static const uint256 shadowTicketNamespace = uint256::fromVoid("RESERVED NAMESPACE SHADOW TICKET");
|
||||
|
||||
TxConsequences
|
||||
Import::makeTxConsequences(PreflightContext const& ctx)
|
||||
{
|
||||
@@ -197,7 +200,7 @@ Import::preflight(PreflightContext const& ctx)
|
||||
if (!stpTrans || !meta)
|
||||
return temMALFORMED;
|
||||
|
||||
if (stpTrans->isFieldPresent(sfTicketSequence))
|
||||
if (stpTrans->isFieldPresent(sfTicketSequence) && !ctx.rules.enabled(featureExport))
|
||||
{
|
||||
JLOG(ctx.j.warn()) << "Import: cannot use TicketSequence XPOP.";
|
||||
return temMALFORMED;
|
||||
@@ -888,6 +891,26 @@ Import::preclaim(PreclaimContext const& ctx)
|
||||
return tefINTERNAL;
|
||||
}
|
||||
|
||||
bool const hasTicket = stpTrans->isFieldPresent(sfTicketSequence);
|
||||
|
||||
if (hasTicket)
|
||||
{
|
||||
if (!ctx.view.rules().enabled(featureExport))
|
||||
return tefINTERNAL;
|
||||
|
||||
auto const acc = stpTrans->getAccountID(sfAccount);
|
||||
uint256 const seq = uint256(stpTrans->getFieldU32(sfTicketSequence));
|
||||
|
||||
// check if there is a shadow ticket, and if not we won't allow
|
||||
// the txn to pass into consensus
|
||||
|
||||
if (!ctx.view.exists(keylet::hookState(acc, seq, shadowTicketNamespace)))
|
||||
{
|
||||
JLOG(ctx.j.warn()) << "Import: attempted to import a txn without shadow ticket.";
|
||||
return telSHADOW_TICKET_REQUIRED; // tel code to avoid consensus/forward without SF_BAD
|
||||
}
|
||||
}
|
||||
|
||||
auto const& sle = ctx.view.read(keylet::account(ctx.tx[sfAccount]));
|
||||
|
||||
auto const tt = stpTrans->getTxnType();
|
||||
@@ -928,13 +951,17 @@ Import::preclaim(PreclaimContext const& ctx)
|
||||
} while (0);
|
||||
}
|
||||
|
||||
if (sle && sle->isFieldPresent(sfImportSequence))
|
||||
if (!hasTicket)
|
||||
{
|
||||
uint32_t sleImportSequence = sle->getFieldU32(sfImportSequence);
|
||||
|
||||
// replay attempt
|
||||
if (sleImportSequence >= stpTrans->getFieldU32(sfSequence))
|
||||
return tefPAST_IMPORT_SEQ;
|
||||
if (sle && sle->isFieldPresent(sfImportSequence))
|
||||
{
|
||||
uint32_t sleImportSequence = sle->getFieldU32(sfImportSequence);
|
||||
|
||||
// replay attempt
|
||||
if (sleImportSequence >= stpTrans->getFieldU32(sfSequence))
|
||||
return tefPAST_IMPORT_SEQ;
|
||||
}
|
||||
}
|
||||
|
||||
// when importing for the first time the fee must be zero
|
||||
@@ -1242,7 +1269,11 @@ Import::doApply()
|
||||
auto const id = ctx_.tx[sfAccount];
|
||||
auto sle = view().peek(keylet::account(id));
|
||||
|
||||
if (sle && sle->getFieldU32(sfImportSequence) >= importSequence)
|
||||
std::optional<uint256> ticket;
|
||||
if (stpTrans->isFieldPresent(sfTicketSequence))
|
||||
ticket = uint256(stpTrans->getFieldU32(sfTicketSequence));
|
||||
|
||||
if (sle && !ticket.has_value() && sle->getFieldU32(sfImportSequence) >= importSequence)
|
||||
{
|
||||
// make double sure import seq hasn't passed
|
||||
JLOG(ctx_.journal.warn()) << "Import: ImportSequence passed";
|
||||
@@ -1335,8 +1366,24 @@ Import::doApply()
|
||||
}
|
||||
}
|
||||
|
||||
sle->setFieldU32(sfImportSequence, importSequence);
|
||||
if (!ticket.has_value())
|
||||
sle->setFieldU32(sfImportSequence, importSequence);
|
||||
|
||||
sle->setFieldAmount(sfBalance, finalBal);
|
||||
|
||||
if (ticket.has_value())
|
||||
{
|
||||
auto sleTicket = view().peek(keylet::hookState(id, *ticket, shadowTicketNamespace));
|
||||
if (!sleTicket)
|
||||
return tefINTERNAL;
|
||||
|
||||
TER result = hook::setHookState(ctx_, id, shadowTicketNamespace, *ticket, {});
|
||||
if (result != tesSUCCESS)
|
||||
return result;
|
||||
|
||||
// RHUPTO: ticketseq billing?
|
||||
}
|
||||
|
||||
|
||||
if (create)
|
||||
{
|
||||
|
||||
@@ -491,7 +491,8 @@ SetHook::validateHookSetEntry(SetHookCtx& ctx, STObject const& hookSetObj)
|
||||
logger,
|
||||
hsacc,
|
||||
(ctx.rules.enabled(featureHooksUpdate1) ? 1 : 0) +
|
||||
(ctx.rules.enabled(fix20250131) ? 2 : 0));
|
||||
(ctx.rules.enabled(fix20250131) ? 2 : 0) +
|
||||
(ctx.rules.enabled(featureExport) ? 4 : 0));
|
||||
|
||||
if (ctx.j.trace())
|
||||
{
|
||||
|
||||
@@ -374,6 +374,8 @@ invoke_calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
case ttUNL_MODIFY:
|
||||
case ttUNL_REPORT:
|
||||
case ttEMIT_FAILURE:
|
||||
case ttEXPORT_SIGN:
|
||||
case ttEXPORT:
|
||||
return Change::calculateBaseFee(view, tx);
|
||||
case ttNFTOKEN_MINT:
|
||||
return NFTokenMint::calculateBaseFee(view, tx);
|
||||
@@ -544,6 +546,8 @@ invoke_apply(ApplyContext& ctx)
|
||||
case ttFEE:
|
||||
case ttUNL_MODIFY:
|
||||
case ttUNL_REPORT:
|
||||
case ttEXPORT:
|
||||
case ttEXPORT_SIGN:
|
||||
case ttEMIT_FAILURE: {
|
||||
Change p(ctx);
|
||||
return p();
|
||||
|
||||
@@ -484,61 +484,44 @@ OverlayImpl::start()
|
||||
m_peerFinder->setConfig(config);
|
||||
m_peerFinder->start();
|
||||
|
||||
auto addIps = [this](std::vector<std::string> ips, bool fixed) {
|
||||
auto addIps = [&](std::vector<std::string> bootstrapIps) -> void {
|
||||
beast::Journal const& j = app_.journal("Overlay");
|
||||
for (auto& ip : ips)
|
||||
for (auto& ip : bootstrapIps)
|
||||
{
|
||||
std::size_t pos = ip.find('#');
|
||||
if (pos != std::string::npos)
|
||||
ip.erase(pos);
|
||||
|
||||
JLOG(j.trace())
|
||||
<< "Found " << (fixed ? "fixed" : "bootstrap") << " IP: " << ip;
|
||||
JLOG(j.trace()) << "Found boostrap IP: " << ip;
|
||||
}
|
||||
|
||||
m_resolver.resolve(
|
||||
ips,
|
||||
[this, fixed](
|
||||
std::string const& name,
|
||||
bootstrapIps,
|
||||
[&](std::string const& name,
|
||||
std::vector<beast::IP::Endpoint> const& addresses) {
|
||||
std::vector<std::string> ips;
|
||||
ips.reserve(addresses.size());
|
||||
beast::Journal const& j = app_.journal("Overlay");
|
||||
std::string const base("config: ");
|
||||
|
||||
std::vector<beast::IP::Endpoint> eps;
|
||||
eps.reserve(addresses.size());
|
||||
for (auto const& addr : addresses)
|
||||
{
|
||||
auto ep = addr.port() == 0 ? addr.at_port(DEFAULT_PEER_PORT)
|
||||
: addr;
|
||||
JLOG(j.trace())
|
||||
<< "Parsed " << (fixed ? "fixed" : "bootstrap")
|
||||
<< " IP: " << ep;
|
||||
eps.push_back(ep);
|
||||
std::string addrStr = addr.port() == 0
|
||||
? to_string(addr.at_port(DEFAULT_PEER_PORT))
|
||||
: to_string(addr);
|
||||
JLOG(j.trace()) << "Parsed boostrap IP: " << addrStr;
|
||||
ips.push_back(addrStr);
|
||||
}
|
||||
|
||||
if (eps.empty())
|
||||
return;
|
||||
|
||||
if (fixed)
|
||||
{
|
||||
m_peerFinder->addFixedPeer(base + name, eps);
|
||||
}
|
||||
else
|
||||
{
|
||||
std::vector<std::string> strs;
|
||||
strs.reserve(eps.size());
|
||||
for (auto const& ep : eps)
|
||||
strs.push_back(to_string(ep));
|
||||
m_peerFinder->addFallbackStrings(base + name, strs);
|
||||
}
|
||||
std::string const base("config: ");
|
||||
if (!ips.empty())
|
||||
m_peerFinder->addFallbackStrings(base + name, ips);
|
||||
});
|
||||
};
|
||||
|
||||
if (!app_.config().IPS.empty())
|
||||
addIps(app_.config().IPS, false);
|
||||
addIps(app_.config().IPS);
|
||||
|
||||
if (!app_.config().IPS_FIXED.empty())
|
||||
addIps(app_.config().IPS_FIXED, true);
|
||||
addIps(app_.config().IPS_FIXED);
|
||||
|
||||
auto const timer = std::make_shared<Timer>(*this);
|
||||
std::lock_guard lock(mutex_);
|
||||
|
||||
@@ -74,7 +74,7 @@ namespace detail {
|
||||
// Feature.cpp. Because it's only used to reserve storage, and determine how
|
||||
// large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than
|
||||
// the actual number of amendments. A LogicError on startup will verify this.
|
||||
static constexpr std::size_t numFeatures = 90;
|
||||
static constexpr std::size_t numFeatures = 91;
|
||||
|
||||
/** Amendments that this server supports and the default voting behavior.
|
||||
Whether they are enabled depends on the Rules defined in the validated
|
||||
@@ -378,6 +378,7 @@ extern uint256 const fixInvalidTxFlags;
|
||||
extern uint256 const featureExtendedHookState;
|
||||
extern uint256 const fixCronStacking;
|
||||
extern uint256 const fixHookAPI20251128;
|
||||
extern uint256 const featureExport;
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -56,9 +56,15 @@ namespace keylet {
|
||||
Keylet const&
|
||||
emittedDir() noexcept;
|
||||
|
||||
Keylet const&
|
||||
exportedDir() noexcept;
|
||||
|
||||
Keylet
|
||||
emittedTxn(uint256 const& id) noexcept;
|
||||
|
||||
Keylet
|
||||
exportedTxn(uint256 const& id) noexcept;
|
||||
|
||||
Keylet
|
||||
hookDefinition(uint256 const& hash) noexcept;
|
||||
|
||||
|
||||
@@ -260,6 +260,8 @@ enum LedgerEntryType : std::uint16_t
|
||||
\sa keylet::emitted
|
||||
*/
|
||||
ltEMITTED_TXN = 'E',
|
||||
|
||||
ltEXPORTED_TXN = 0x4578, // Ex (exported transaction)
|
||||
};
|
||||
// clang-format off
|
||||
|
||||
@@ -318,7 +320,8 @@ enum LedgerSpecificFlags {
|
||||
// ltDIR_NODE
|
||||
lsfNFTokenBuyOffers = 0x00000001,
|
||||
lsfNFTokenSellOffers = 0x00000002,
|
||||
lsfEmittedDir = 0x00000004,
|
||||
lsfEmittedDir = 0x00000004,
|
||||
lsfExportedDir = 0x00000008,
|
||||
|
||||
// ltNFTOKEN_OFFER
|
||||
lsfSellNFToken = 0x00000001,
|
||||
|
||||
@@ -355,6 +355,7 @@ extern SF_UINT16 const sfHookEmitCount;
|
||||
extern SF_UINT16 const sfHookExecutionIndex;
|
||||
extern SF_UINT16 const sfHookApiVersion;
|
||||
extern SF_UINT16 const sfHookStateScale;
|
||||
extern SF_UINT16 const sfHookExportCount;
|
||||
|
||||
// 32-bit integers (common)
|
||||
extern SF_UINT32 const sfNetworkID;
|
||||
@@ -595,6 +596,7 @@ extern SField const sfSigner;
|
||||
extern SField const sfMajority;
|
||||
extern SField const sfDisabledValidator;
|
||||
extern SField const sfEmittedTxn;
|
||||
extern SField const sfExportedTxn;
|
||||
extern SField const sfHookExecution;
|
||||
extern SField const sfHookDefinition;
|
||||
extern SField const sfHookParameter;
|
||||
|
||||
@@ -67,6 +67,7 @@ enum TELcodes : TERUnderlyingType {
|
||||
telNON_LOCAL_EMITTED_TXN,
|
||||
telIMPORT_VL_KEY_NOT_RECOGNISED,
|
||||
telCAN_NOT_QUEUE_IMPORT,
|
||||
telSHADOW_TICKET_REQUIRED,
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -149,6 +149,12 @@ enum TxType : std::uint16_t
|
||||
ttURITOKEN_CREATE_SELL_OFFER = 48,
|
||||
ttURITOKEN_CANCEL_SELL_OFFER = 49,
|
||||
|
||||
/* A pseudo-txn containing an exported transaction plus signatures from the validators */
|
||||
ttEXPORT = 90,
|
||||
|
||||
/* A pseudo-txn containing a validator's signature for an export transaction */
|
||||
ttEXPORT_SIGN = 91,
|
||||
|
||||
/* A pseudo-txn alarm signal for invoking a hook, emitted by validators after alarm set conditions are met */
|
||||
ttCRON = 92,
|
||||
|
||||
|
||||
@@ -484,6 +484,7 @@ REGISTER_FIX (fixInvalidTxFlags, Supported::yes, VoteBehavior::De
|
||||
REGISTER_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo);
|
||||
REGISTER_FIX (fixCronStacking, Supported::yes, VoteBehavior::DefaultYes);
|
||||
REGISTER_FIX (fixHookAPI20251128, Supported::yes, VoteBehavior::DefaultYes);
|
||||
REGISTER_FEATURE(Export, Supported::yes, VoteBehavior::DefaultNo);
|
||||
|
||||
// The following amendments are obsolete, but must remain supported
|
||||
// because they could potentially get enabled.
|
||||
|
||||
@@ -66,6 +66,8 @@ enum class LedgerNameSpace : std::uint16_t {
|
||||
HOOK_DEFINITION = 'D',
|
||||
EMITTED_TXN = 'E',
|
||||
EMITTED_DIR = 'F',
|
||||
EXPORTED_TXN = 0x4578, // Ex
|
||||
EXPORTED_DIR = 0x4564, // Ed
|
||||
NFTOKEN_OFFER = 'q',
|
||||
NFTOKEN_BUY_OFFERS = 'h',
|
||||
NFTOKEN_SELL_OFFERS = 'i',
|
||||
@@ -147,6 +149,14 @@ emittedDir() noexcept
|
||||
return ret;
|
||||
}
|
||||
|
||||
Keylet const&
|
||||
exportedDir() noexcept
|
||||
{
|
||||
static Keylet const ret{
|
||||
ltDIR_NODE, indexHash(LedgerNameSpace::EXPORTED_DIR)};
|
||||
return ret;
|
||||
}
|
||||
|
||||
Keylet
|
||||
hookStateDir(AccountID const& id, uint256 const& ns) noexcept
|
||||
{
|
||||
@@ -159,6 +169,12 @@ emittedTxn(uint256 const& id) noexcept
|
||||
return {ltEMITTED_TXN, indexHash(LedgerNameSpace::EMITTED_TXN, id)};
|
||||
}
|
||||
|
||||
Keylet
|
||||
exportedTxn(uint256 const& id) noexcept
|
||||
{
|
||||
return {ltEXPORTED_TXN, indexHash(LedgerNameSpace::EXPORTED_TXN, id)};
|
||||
}
|
||||
|
||||
Keylet
|
||||
hook(AccountID const& id) noexcept
|
||||
{
|
||||
|
||||
@@ -380,6 +380,15 @@ LedgerFormats::LedgerFormats()
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED}
|
||||
},
|
||||
commonFields);
|
||||
|
||||
add(jss::ExportedTxn,
|
||||
ltEXPORTED_TXN,
|
||||
{
|
||||
{sfExportedTxn, soeOPTIONAL},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@@ -103,6 +103,7 @@ CONSTRUCT_TYPED_SFIELD(sfHookEmitCount, "HookEmitCount", UINT16,
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookExecutionIndex, "HookExecutionIndex", UINT16, 19);
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookApiVersion, "HookApiVersion", UINT16, 20);
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookStateScale, "HookStateScale", UINT16, 21);
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookExportCount, "HookExportCount", UINT16, 22);
|
||||
|
||||
// 32-bit integers (common)
|
||||
CONSTRUCT_TYPED_SFIELD(sfNetworkID, "NetworkID", UINT32, 1);
|
||||
@@ -361,6 +362,7 @@ CONSTRUCT_UNTYPED_SFIELD(sfImportVLKey, "ImportVLKey", OBJECT,
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfHookEmission, "HookEmission", OBJECT, 93);
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfMintURIToken, "MintURIToken", OBJECT, 92);
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfAmountEntry, "AmountEntry", OBJECT, 91);
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfExportedTxn, "ExportedTxn", OBJECT, 90);
|
||||
|
||||
// array of objects
|
||||
// ARRAY/1 is reserved for end of array
|
||||
|
||||
@@ -141,6 +141,7 @@ transResults()
|
||||
MAKE_ERROR(telNON_LOCAL_EMITTED_TXN, "Emitted transaction cannot be applied because it was not generated locally."),
|
||||
MAKE_ERROR(telIMPORT_VL_KEY_NOT_RECOGNISED, "Import vl key was not recognized."),
|
||||
MAKE_ERROR(telCAN_NOT_QUEUE_IMPORT, "Import transaction was not able to be directly applied and cannot be queued."),
|
||||
MAKE_ERROR(telSHADOW_TICKET_REQUIRED, "The imported transaction uses a TicketSequence but no shadow ticket exists."),
|
||||
MAKE_ERROR(temMALFORMED, "Malformed transaction."),
|
||||
MAKE_ERROR(temBAD_AMOUNT, "Can only send positive amounts."),
|
||||
MAKE_ERROR(temBAD_CURRENCY, "Malformed: Bad currency."),
|
||||
|
||||
@@ -490,6 +490,26 @@ TxFormats::TxFormats()
|
||||
{sfStartTime, soeOPTIONAL},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
add(jss::ExportSign,
|
||||
ttEXPORT_SIGN,
|
||||
{
|
||||
{sfSigner, soeREQUIRED},
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
{sfTransactionHash, soeREQUIRED},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
add(jss::Export,
|
||||
ttEXPORT,
|
||||
{
|
||||
{sfTransactionHash, soeREQUIRED},
|
||||
{sfExportedTxn, soeREQUIRED},
|
||||
{sfSigners, soeREQUIRED},
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
}
|
||||
|
||||
TxFormats const&
|
||||
|
||||
@@ -140,6 +140,9 @@ JSS(HookState); // ledger type.
|
||||
JSS(HookStateData); // field.
|
||||
JSS(HookStateKey); // field.
|
||||
JSS(EmittedTxn); // ledger type.
|
||||
JSS(ExportedTxn);
|
||||
JSS(Export);
|
||||
JSS(ExportSign);
|
||||
JSS(SignerList); // ledger type.
|
||||
JSS(SignerListSet); // transaction type.
|
||||
JSS(SigningPubKey); // field.
|
||||
|
||||
Reference in New Issue
Block a user