Merge branch 'tmp-conan' into sync-1.12.0-conan

This commit is contained in:
tequ
2025-04-29 16:34:04 +09:00
13 changed files with 58 additions and 65 deletions

View File

@@ -32,6 +32,7 @@ target_link_libraries(ripple_boost
Boost::program_options Boost::program_options
Boost::regex Boost::regex
Boost::system Boost::system
Boost::iostreams
Boost::thread) Boost::thread)
if(Boost_COMPILER) if(Boost_COMPILER)
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking) target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)

View File

@@ -498,15 +498,11 @@ RCLConsensus::Adaptor::doAccept(
for (auto const& item : *result.txns.map_) for (auto const& item : *result.txns.map_)
{ {
#ifndef DEBUG
try try
{ {
#endif
retriableTxs.insert( retriableTxs.insert(
std::make_shared<STTx const>(SerialIter{item.slice()})); std::make_shared<STTx const>(SerialIter{item.slice()}));
JLOG(j_.debug()) << " Tx: " << item.key(); JLOG(j_.debug()) << " Tx: " << item.key();
#ifndef DEBUG
} }
catch (std::exception const& ex) catch (std::exception const& ex)
{ {
@@ -514,7 +510,6 @@ RCLConsensus::Adaptor::doAccept(
JLOG(j_.warn()) JLOG(j_.warn())
<< " Tx: " << item.key() << " throws: " << ex.what(); << " Tx: " << item.key() << " throws: " << ex.what();
} }
#endif
} }
auto built = buildLCL( auto built = buildLCL(

View File

@@ -311,10 +311,10 @@ Ledger::Ledger(
Family& family, Family& family,
SHAMap const& baseState) SHAMap const& baseState)
: mImmutable(false) : mImmutable(false)
, info_(info)
, txMap_(SHAMapType::TRANSACTION, family) , txMap_(SHAMapType::TRANSACTION, family)
, stateMap_(baseState, true) , stateMap_(baseState, true)
, rules_{config.features} , rules_{config.features}
, info_(info)
, j_(beast::Journal(beast::Journal::getNullSink())) , j_(beast::Journal(beast::Journal::getNullSink()))
{ {
} }

View File

@@ -116,10 +116,8 @@ applyTransactions(
{ {
auto const txid = it->first.getTXID(); auto const txid = it->first.getTXID();
#ifndef DEBUG
try try
{ {
#endif
if (pass == 0 && built->txExists(txid)) if (pass == 0 && built->txExists(txid))
{ {
it = txns.erase(it); it = txns.erase(it);
@@ -142,7 +140,6 @@ applyTransactions(
case ApplyResult::Retry: case ApplyResult::Retry:
++it; ++it;
} }
#ifndef DEBUG
} }
catch (std::exception const& ex) catch (std::exception const& ex)
{ {
@@ -151,7 +148,6 @@ applyTransactions(
failed.insert(txid); failed.insert(txid);
it = txns.erase(it); it = txns.erase(it);
} }
#endif
} }
JLOG(j.debug()) << (certainRetry ? "Pass: " : "Final pass: ") << pass JLOG(j.debug()) << (certainRetry ? "Pass: " : "Final pass: ") << pass

View File

@@ -44,8 +44,7 @@ convertBlobsToTxResult(
auto tr = std::make_shared<Transaction>(txn, reason, app); auto tr = std::make_shared<Transaction>(txn, reason, app);
auto metaset = auto metaset = std::make_shared<TxMeta>(tr->getID(), ledger_index, rawMeta);
std::make_shared<TxMeta>(tr->getID(), tr->getLedger(), rawMeta);
// if properly formed meta is available we can use it to generate ctid // if properly formed meta is available we can use it to generate ctid
if (metaset->getAsObject().isFieldPresent(sfTransactionIndex)) if (metaset->getAsObject().isFieldPresent(sfTransactionIndex))

View File

@@ -314,22 +314,21 @@ SetRemarks::preclaim(PreclaimContext const& ctx)
TER TER
SetRemarks::doApply() SetRemarks::doApply()
{ {
auto j = ctx_.journal;
Sandbox sb(&ctx_.view()); Sandbox sb(&ctx_.view());
auto const sle = sb.read(keylet::account(account_)); auto const sle = sb.read(keylet::account(account_));
if (!sle) if (!sle)
return terNO_ACCOUNT; return tefINTERNAL;
auto const objID = ctx_.tx[sfObjectID]; auto const objID = ctx_.tx[sfObjectID];
auto sleO = sb.peek(keylet::unchecked(objID)); auto sleO = sb.peek(keylet::unchecked(objID));
if (!sleO) if (!sleO)
return terNO_ACCOUNT; return tefINTERNAL;
std::optional<AccountID> issuer = getRemarksIssuer(sleO); std::optional<AccountID> issuer = getRemarksIssuer(sleO);
if (!issuer || *issuer != account_) if (!issuer || *issuer != account_)
return tecNO_PERMISSION; return tefINTERNAL;
auto const& remarksTxn = ctx_.tx.getFieldArray(sfRemarks); auto const& remarksTxn = ctx_.tx.getFieldArray(sfRemarks);
@@ -401,7 +400,7 @@ SetRemarks::doApply()
} }
if (newRemarks.size() > 32) if (newRemarks.size() > 32)
return tecTOO_MANY_REMARKS; return tefINTERNAL;
if (newRemarks.empty() && sleO->isFieldPresent(sfRemarks)) if (newRemarks.empty() && sleO->isFieldPresent(sfRemarks))
sleO->makeFieldAbsent(sfRemarks); sleO->makeFieldAbsent(sfRemarks);

View File

@@ -168,10 +168,8 @@ applyTransaction(
JLOG(j.debug()) << "TXN " << txn.getTransactionID() JLOG(j.debug()) << "TXN " << txn.getTransactionID()
<< (retryAssured ? "/retry" : "/final"); << (retryAssured ? "/retry" : "/final");
#ifndef DEBUG
try try
{ {
#endif
auto const result = apply(app, view, txn, flags, j); auto const result = apply(app, view, txn, flags, j);
if (result.second) if (result.second)
{ {
@@ -191,14 +189,12 @@ applyTransaction(
JLOG(j.debug()) << "Transaction retry: " << transHuman(result.first); JLOG(j.debug()) << "Transaction retry: " << transHuman(result.first);
return ApplyResult::Retry; return ApplyResult::Retry;
#ifndef DEBUG
} }
catch (std::exception const& ex) catch (std::exception const& ex)
{ {
JLOG(j.warn()) << "Throws: " << ex.what(); JLOG(j.warn()) << "Throws: " << ex.what();
return ApplyResult::Fail; return ApplyResult::Fail;
} }
#endif
} }
} // namespace ripple } // namespace ripple

View File

@@ -668,19 +668,15 @@ preflight(
{ {
PreflightContext const pfctx(app, tx, rules, flags, j); PreflightContext const pfctx(app, tx, rules, flags, j);
#ifndef DEBUG
try try
{ {
#endif
return {pfctx, invoke_preflight(pfctx)}; return {pfctx, invoke_preflight(pfctx)};
#ifndef DEBUG
} }
catch (std::exception const& e) catch (std::exception const& e)
{ {
JLOG(j.fatal()) << "apply: " << e.what(); JLOG(j.fatal()) << "apply: " << e.what();
return {pfctx, {tefEXCEPTION, TxConsequences{tx}}}; return {pfctx, {tefEXCEPTION, TxConsequences{tx}}};
} }
#endif
} }
PreclaimResult PreclaimResult
@@ -717,21 +713,17 @@ preclaim(
preflightResult.j); preflightResult.j);
} }
#ifndef DEBUG
try try
{ {
#endif
if (!isTesSuccess(ctx->preflightResult)) if (!isTesSuccess(ctx->preflightResult))
return {*ctx, ctx->preflightResult}; return {*ctx, ctx->preflightResult};
return {*ctx, invoke_preclaim(*ctx)}; return {*ctx, invoke_preclaim(*ctx)};
#ifndef DEBUG
} }
catch (std::exception const& e) catch (std::exception const& e)
{ {
JLOG(ctx->j.fatal()) << "apply: " << e.what(); JLOG(ctx->j.fatal()) << "apply: " << e.what();
return {*ctx, tefEXCEPTION}; return {*ctx, tefEXCEPTION};
} }
#endif
} }
XRPAmount XRPAmount
@@ -755,10 +747,8 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view)
// info to recover. // info to recover.
return {tefEXCEPTION, false}; return {tefEXCEPTION, false};
} }
#ifndef DEBUG
try try
{ {
#endif
if (!preclaimResult.likelyToClaimFee) if (!preclaimResult.likelyToClaimFee)
return {preclaimResult.ter, false}; return {preclaimResult.ter, false};
@@ -771,14 +761,12 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view)
preclaimResult.flags, preclaimResult.flags,
preclaimResult.j); preclaimResult.j);
return invoke_apply(ctx); return invoke_apply(ctx);
#ifndef DEBUG
} }
catch (std::exception const& e) catch (std::exception const& e)
{ {
JLOG(preclaimResult.j.fatal()) << "apply: " << e.what(); JLOG(preclaimResult.j.fatal()) << "apply: " << e.what();
return {tefEXCEPTION, false}; return {tefEXCEPTION, false};
} }
#endif
} }
} // namespace ripple } // namespace ripple

View File

@@ -69,7 +69,7 @@ static constexpr uint16_t CATALOGUE_VERSION_MASK =
0x00FF; // Lower 8 bits for version 0x00FF; // Lower 8 bits for version
static constexpr uint16_t CATALOGUE_COMPRESS_LEVEL_MASK = static constexpr uint16_t CATALOGUE_COMPRESS_LEVEL_MASK =
0x0F00; // Bits 8-11: compression level 0x0F00; // Bits 8-11: compression level
static constexpr uint16_t CATALOGUE_RESERVED_MASK = [[maybe_unused]] static constexpr uint16_t CATALOGUE_RESERVED_MASK =
0xF000; // Bits 12-15: reserved 0xF000; // Bits 12-15: reserved
std::string std::string
@@ -229,7 +229,7 @@ class CatalogueSizePredictor
private: private:
uint32_t minLedger_; uint32_t minLedger_;
uint32_t maxLedger_; uint32_t maxLedger_;
uint64_t headerSize_; [[maybe_unused]] uint64_t headerSize_;
// Keep track of actual bytes // Keep track of actual bytes
uint64_t totalBytesWritten_; uint64_t totalBytesWritten_;
@@ -246,9 +246,9 @@ public:
: minLedger_(minLedger) : minLedger_(minLedger)
, maxLedger_(maxLedger) , maxLedger_(maxLedger)
, headerSize_(headerSize) , headerSize_(headerSize)
, processedLedgers_(0)
, totalBytesWritten_(headerSize) , totalBytesWritten_(headerSize)
, firstLedgerSize_(0) , firstLedgerSize_(0)
, processedLedgers_(0)
{ {
} }

View File

@@ -396,6 +396,8 @@ private:
return "SetRegularKey"; return "SetRegularKey";
if (inp == "HookSet") if (inp == "HookSet")
return "SetHook"; return "SetHook";
if (inp == "RemarksSet")
return "SetRemarks";
return inp; return inp;
}; };

View File

@@ -1301,10 +1301,8 @@ SHAMap::serializeToStream(
std::size_t nodeCount = 0; std::size_t nodeCount = 0;
auto serializeLeaf = [&stream, auto serializeLeaf = [&stream, &localBytesWritten, &tryFlush](
&localBytesWritten, SHAMapLeafNode const& node) -> bool {
flushThreshold,
&tryFlush](SHAMapLeafNode const& node) -> bool {
// write the node type // write the node type
auto t = node.getType(); auto t = node.getType();
stream.write(reinterpret_cast<char const*>(&t), 1); stream.write(reinterpret_cast<char const*>(&t), 1);
@@ -1335,10 +1333,8 @@ SHAMap::serializeToStream(
return !stream.fail(); return !stream.fail();
}; };
auto serializeRemovedLeaf = [&stream, auto serializeRemovedLeaf =
&localBytesWritten, [&stream, &localBytesWritten, &tryFlush](uint256 const& key) -> bool {
flushThreshold,
&tryFlush](uint256 const& key) -> bool {
// to indicate a node is removed it is written with a removal type // to indicate a node is removed it is written with a removal type
auto t = SHAMapNodeType::tnREMOVE; auto t = SHAMapNodeType::tnREMOVE;
stream.write(reinterpret_cast<char const*>(&t), 1); stream.write(reinterpret_cast<char const*>(&t), 1);

View File

@@ -5469,7 +5469,11 @@ class Import_test : public beast::unit_test::suite
// burn 100'000 coins // burn 100'000 coins
{ {
test::jtx::Env env{ test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)}; *this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops; auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000); BEAST_EXPECT(envCoins == 100'000'000'000'000'000);
@@ -5509,7 +5513,11 @@ class Import_test : public beast::unit_test::suite
// burn all coins // burn all coins
{ {
test::jtx::Env env{ test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)}; *this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops; auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000); BEAST_EXPECT(envCoins == 100'000'000'000'000'000);
@@ -5549,7 +5557,11 @@ class Import_test : public beast::unit_test::suite
// burn no coins // burn no coins
{ {
test::jtx::Env env{ test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)}; *this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops; auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000); BEAST_EXPECT(envCoins == 100'000'000'000'000'000);

View File

@@ -317,7 +317,8 @@ class Catalogue_test : public beast::unit_test::suite
Env loadEnv{ Env loadEnv{
*this, *this,
test::jtx::envconfig(test::jtx::port_increment, 3), test::jtx::envconfig(test::jtx::port_increment, 3),
features}; features,
};
// Now load the catalogue // Now load the catalogue
Json::Value params{Json::objectValue}; Json::Value params{Json::objectValue};
@@ -400,18 +401,8 @@ class Catalogue_test : public beast::unit_test::suite
sourceLedger->info().accepted == loadedLedger->info().accepted); sourceLedger->info().accepted == loadedLedger->info().accepted);
// Check SLE counts // Check SLE counts
std::size_t sourceCount = 0; std::size_t sourceCount = std::ranges::distance(sourceLedger->sles);
std::size_t loadedCount = 0; std::size_t loadedCount = std::ranges::distance(loadedLedger->sles);
for (auto const& sle : sourceLedger->sles)
{
sourceCount++;
}
for (auto const& sle : loadedLedger->sles)
{
loadedCount++;
}
BEAST_EXPECT(sourceCount == loadedCount); BEAST_EXPECT(sourceCount == loadedCount);
@@ -511,7 +502,9 @@ class Catalogue_test : public beast::unit_test::suite
cfg->NETWORK_ID = 123; cfg->NETWORK_ID = 123;
return cfg; return cfg;
}), }),
features}; features,
};
prepareLedgerData(env1, 5); prepareLedgerData(env1, 5);
// Create catalogue with network ID 123 // Create catalogue with network ID 123
@@ -535,7 +528,8 @@ class Catalogue_test : public beast::unit_test::suite
cfg->NETWORK_ID = 456; cfg->NETWORK_ID = 456;
return cfg; return cfg;
}), }),
features}; features,
};
{ {
Json::Value params{Json::objectValue}; Json::Value params{Json::objectValue};
@@ -558,7 +552,15 @@ class Catalogue_test : public beast::unit_test::suite
using namespace test::jtx; using namespace test::jtx;
// Create environment and test data // Create environment and test data
Env env{*this, envconfig(), features};
Env env{
*this,
envconfig(),
features,
nullptr,
beast::severities::kDisabled,
};
prepareLedgerData(env, 3); prepareLedgerData(env, 3);
boost::filesystem::path tempDir = boost::filesystem::path tempDir =
@@ -649,7 +651,14 @@ class Catalogue_test : public beast::unit_test::suite
using namespace test::jtx; using namespace test::jtx;
// Create environment and test data // Create environment and test data
Env env{*this, envconfig(), features}; Env env{
*this,
envconfig(),
features,
nullptr,
beast::severities::kDisabled,
};
prepareLedgerData(env, 3); prepareLedgerData(env, 3);
boost::filesystem::path tempDir = boost::filesystem::path tempDir =
@@ -826,7 +835,7 @@ class Catalogue_test : public beast::unit_test::suite
{ {
auto result = env.client().invoke( auto result = env.client().invoke(
"catalogue_status", Json::objectValue)[jss::result]; "catalogue_status", Json::objectValue)[jss::result];
std::cout << to_string(result) << "\n"; // std::cout << to_string(result) << "\n";
BEAST_EXPECT(result[jss::job_status] == "no_job_running"); BEAST_EXPECT(result[jss::job_status] == "no_job_running");
} }