Merge branch 'sync-2.0.0-conan' into sync-2.0.1-conan

This commit is contained in:
tequ
2025-05-06 14:58:18 +09:00
15 changed files with 107 additions and 97 deletions

View File

@@ -1,4 +1,4 @@
name: Release - SH Runner
name: Build using Docker
on:
push:
@@ -11,38 +11,53 @@ concurrency:
cancel-in-progress: true
env:
DEBUG_CONTAINERS: 1
REMOVE_CONTAINERS: 0
DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1
jobs:
build-and-test:
checkout:
runs-on: [self-hosted, vanity]
outputs:
checkout_path: ${{ steps.vars.outputs.checkout_path }}
steps:
- name: Prepare workspace
- name: Prepare checkout path
id: vars
run: |
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}"
echo "CHECKOUT_PATH=${CHECKOUT_PATH}" >> $GITHUB_ENV
mkdir -p "$CHECKOUT_PATH"
echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT"
- name: Checkout code
uses: actions/checkout@v4
- uses: actions/checkout@v4
with:
path: ${{ env.CHECKOUT_PATH }}
path: ${{ steps.vars.outputs.checkout_path }}
clean: true
fetch-depth: 2
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
checkpatterns:
runs-on: [self-hosted, vanity]
needs: checkout
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Check for suspicious patterns
run: /bin/bash suspicious_patterns.sh
build:
runs-on: [self-hosted, vanity]
needs: [checkpatterns, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Set Cleanup Script Path
run: |
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV
- name: Build using Docker
working-directory: ${{ env.CHECKOUT_PATH }}
run: /bin/bash release-builder.sh
- name: Stop Container (Cleanup after build)
- name: Stop Container (Cleanup)
if: always()
working-directory: ${{ env.CHECKOUT_PATH }}
run: |
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
@@ -56,30 +71,35 @@ jobs:
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
fi
if [[ "${DEBUG_CONTAINERS}" == "1" ]]; then
if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
echo "🔍 Checking for leftover containers..."
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
CONTAINER_NAME="xahaud_cached_builder_${{ github.workflow }}-${{ github.ref_name }}"
if [[ -n "$BUILD_CONTAINERS" && "${REMOVE_CONTAINERS}" == "1" ]]; then
if [[ -n "$BUILD_CONTAINERS" ]]; then
echo "⚠️ WARNING: Some build containers are still running"
echo "$BUILD_CONTAINERS"
echo "Attempting to stop build containers.."
echo "Stopping container: $CONTAINER_NAME"
docker stop "$CONTAINER_NAME" || echo "Failed to stop container: $CONTAINER_NAME"
echo "Removing container: $CONTAINER_NAME"
docker rm -f "$CONTAINER_NAME" || echo "Failed to remove container: $CONTAINER_NAME"
echo "✅ Build container stopped and removed"
else
echo "✅ No build containers found"
fi
fi
- name: Run unit tests
working-directory: ${{ env.CHECKOUT_PATH }}
tests:
runs-on: [self-hosted, vanity]
needs: [build, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Unit tests
run: /bin/bash docker-unit-tests.sh
cleanup:
runs-on: [self-hosted, vanity]
needs: [tests, checkout]
if: always()
steps:
- name: Cleanup workspace
if: always()
run: |
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
echo "Cleaning workspace for ${CHECKOUT_PATH}"
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"

View File

@@ -34,6 +34,7 @@ target_link_libraries(ripple_boost
Boost::program_options
Boost::regex
Boost::system
Boost::iostreams
Boost::thread)
if(Boost_COMPILER)
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)

View File

@@ -498,15 +498,11 @@ RCLConsensus::Adaptor::doAccept(
for (auto const& item : *result.txns.map_)
{
#ifndef DEBUG
try
{
#endif
retriableTxs.insert(
std::make_shared<STTx const>(SerialIter{item.slice()}));
JLOG(j_.debug()) << " Tx: " << item.key();
#ifndef DEBUG
}
catch (std::exception const& ex)
{
@@ -514,7 +510,6 @@ RCLConsensus::Adaptor::doAccept(
JLOG(j_.warn())
<< " Tx: " << item.key() << " throws: " << ex.what();
}
#endif
}
auto built = buildLCL(

View File

@@ -311,10 +311,10 @@ Ledger::Ledger(
Family& family,
SHAMap const& baseState)
: mImmutable(false)
, info_(info)
, txMap_(SHAMapType::TRANSACTION, family)
, stateMap_(baseState, true)
, rules_{config.features}
, info_(info)
, j_(beast::Journal(beast::Journal::getNullSink()))
{
}

View File

@@ -116,10 +116,8 @@ applyTransactions(
{
auto const txid = it->first.getTXID();
#ifndef DEBUG
try
{
#endif
if (pass == 0 && built->txExists(txid))
{
it = txns.erase(it);
@@ -142,7 +140,6 @@ applyTransactions(
case ApplyResult::Retry:
++it;
}
#ifndef DEBUG
}
catch (std::exception const& ex)
{
@@ -151,7 +148,6 @@ applyTransactions(
failed.insert(txid);
it = txns.erase(it);
}
#endif
}
JLOG(j.debug()) << (certainRetry ? "Pass: " : "Final pass: ") << pass

View File

@@ -44,8 +44,7 @@ convertBlobsToTxResult(
auto tr = std::make_shared<Transaction>(txn, reason, app);
auto metaset =
std::make_shared<TxMeta>(tr->getID(), tr->getLedger(), rawMeta);
auto metaset = std::make_shared<TxMeta>(tr->getID(), ledger_index, rawMeta);
// if properly formed meta is available we can use it to generate ctid
if (metaset->getAsObject().isFieldPresent(sfTransactionIndex))

View File

@@ -314,22 +314,21 @@ SetRemarks::preclaim(PreclaimContext const& ctx)
TER
SetRemarks::doApply()
{
auto j = ctx_.journal;
Sandbox sb(&ctx_.view());
auto const sle = sb.read(keylet::account(account_));
if (!sle)
return terNO_ACCOUNT;
return tefINTERNAL;
auto const objID = ctx_.tx[sfObjectID];
auto sleO = sb.peek(keylet::unchecked(objID));
if (!sleO)
return terNO_ACCOUNT;
return tefINTERNAL;
std::optional<AccountID> issuer = getRemarksIssuer(sleO);
if (!issuer || *issuer != account_)
return tecNO_PERMISSION;
return tefINTERNAL;
auto const& remarksTxn = ctx_.tx.getFieldArray(sfRemarks);
@@ -401,7 +400,7 @@ SetRemarks::doApply()
}
if (newRemarks.size() > 32)
return tecTOO_MANY_REMARKS;
return tefINTERNAL;
if (newRemarks.empty() && sleO->isFieldPresent(sfRemarks))
sleO->makeFieldAbsent(sfRemarks);

View File

@@ -168,10 +168,8 @@ applyTransaction(
JLOG(j.debug()) << "TXN " << txn.getTransactionID()
<< (retryAssured ? "/retry" : "/final");
#ifndef DEBUG
try
{
#endif
auto const result = apply(app, view, txn, flags, j);
if (result.second)
{
@@ -191,14 +189,12 @@ applyTransaction(
JLOG(j.debug()) << "Transaction retry: " << transHuman(result.first);
return ApplyResult::Retry;
#ifndef DEBUG
}
catch (std::exception const& ex)
{
JLOG(j.warn()) << "Throws: " << ex.what();
return ApplyResult::Fail;
}
#endif
}
} // namespace ripple

View File

@@ -392,19 +392,15 @@ preflight(
{
PreflightContext const pfctx(app, tx, rules, flags, j);
#ifndef DEBUG
try
{
#endif
return {pfctx, invoke_preflight(pfctx)};
#ifndef DEBUG
}
catch (std::exception const& e)
{
JLOG(j.fatal()) << "apply: " << e.what();
return {pfctx, {tefEXCEPTION, TxConsequences{tx}}};
}
#endif
}
PreclaimResult
@@ -441,21 +437,17 @@ preclaim(
preflightResult.j);
}
#ifndef DEBUG
try
{
#endif
if (!isTesSuccess(ctx->preflightResult))
return {*ctx, ctx->preflightResult};
return {*ctx, invoke_preclaim(*ctx)};
#ifndef DEBUG
}
catch (std::exception const& e)
{
JLOG(ctx->j.fatal()) << "apply: " << e.what();
return {*ctx, tefEXCEPTION};
}
#endif
}
XRPAmount
@@ -479,10 +471,8 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view)
// info to recover.
return {tefEXCEPTION, false};
}
#ifndef DEBUG
try
{
#endif
if (!preclaimResult.likelyToClaimFee)
return {preclaimResult.ter, false};
@@ -495,14 +485,12 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view)
preclaimResult.flags,
preclaimResult.j);
return invoke_apply(ctx);
#ifndef DEBUG
}
catch (std::exception const& e)
{
JLOG(preclaimResult.j.fatal()) << "apply: " << e.what();
return {tefEXCEPTION, false};
}
#endif
}
} // namespace ripple

View File

@@ -69,7 +69,7 @@ static constexpr uint16_t CATALOGUE_VERSION_MASK =
0x00FF; // Lower 8 bits for version
static constexpr uint16_t CATALOGUE_COMPRESS_LEVEL_MASK =
0x0F00; // Bits 8-11: compression level
static constexpr uint16_t CATALOGUE_RESERVED_MASK =
[[maybe_unused]] static constexpr uint16_t CATALOGUE_RESERVED_MASK =
0xF000; // Bits 12-15: reserved
std::string
@@ -229,7 +229,7 @@ class CatalogueSizePredictor
private:
uint32_t minLedger_;
uint32_t maxLedger_;
uint64_t headerSize_;
[[maybe_unused]] uint64_t headerSize_;
// Keep track of actual bytes
uint64_t totalBytesWritten_;
@@ -246,9 +246,9 @@ public:
: minLedger_(minLedger)
, maxLedger_(maxLedger)
, headerSize_(headerSize)
, processedLedgers_(0)
, totalBytesWritten_(headerSize)
, firstLedgerSize_(0)
, processedLedgers_(0)
{
}

View File

@@ -396,6 +396,8 @@ private:
return "SetRegularKey";
if (inp == "HookSet")
return "SetHook";
if (inp == "RemarksSet")
return "SetRemarks";
return inp;
};

View File

@@ -1301,10 +1301,8 @@ SHAMap::serializeToStream(
std::size_t nodeCount = 0;
auto serializeLeaf = [&stream,
&localBytesWritten,
flushThreshold,
&tryFlush](SHAMapLeafNode const& node) -> bool {
auto serializeLeaf = [&stream, &localBytesWritten, &tryFlush](
SHAMapLeafNode const& node) -> bool {
// write the node type
auto t = node.getType();
stream.write(reinterpret_cast<char const*>(&t), 1);
@@ -1335,10 +1333,8 @@ SHAMap::serializeToStream(
return !stream.fail();
};
auto serializeRemovedLeaf = [&stream,
&localBytesWritten,
flushThreshold,
&tryFlush](uint256 const& key) -> bool {
auto serializeRemovedLeaf =
[&stream, &localBytesWritten, &tryFlush](uint256 const& key) -> bool {
// to indicate a node is removed it is written with a removal type
auto t = SHAMapNodeType::tnREMOVE;
stream.write(reinterpret_cast<char const*>(&t), 1);

View File

@@ -5469,7 +5469,11 @@ class Import_test : public beast::unit_test::suite
// burn 100'000 coins
{
test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)};
*this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000);
@@ -5509,7 +5513,11 @@ class Import_test : public beast::unit_test::suite
// burn all coins
{
test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)};
*this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000);
@@ -5549,7 +5557,11 @@ class Import_test : public beast::unit_test::suite
// burn no coins
{
test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)};
*this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000);

View File

@@ -317,7 +317,8 @@ class Catalogue_test : public beast::unit_test::suite
Env loadEnv{
*this,
test::jtx::envconfig(test::jtx::port_increment, 3),
features};
features,
};
// Now load the catalogue
Json::Value params{Json::objectValue};
@@ -400,18 +401,8 @@ class Catalogue_test : public beast::unit_test::suite
sourceLedger->info().accepted == loadedLedger->info().accepted);
// Check SLE counts
std::size_t sourceCount = 0;
std::size_t loadedCount = 0;
for (auto const& sle : sourceLedger->sles)
{
sourceCount++;
}
for (auto const& sle : loadedLedger->sles)
{
loadedCount++;
}
std::size_t sourceCount = std::ranges::distance(sourceLedger->sles);
std::size_t loadedCount = std::ranges::distance(loadedLedger->sles);
BEAST_EXPECT(sourceCount == loadedCount);
@@ -511,7 +502,8 @@ class Catalogue_test : public beast::unit_test::suite
cfg->NETWORK_ID = 123;
return cfg;
}),
features};
features,
};
prepareLedgerData(env1, 5);
// Create catalogue with network ID 123
@@ -535,7 +527,8 @@ class Catalogue_test : public beast::unit_test::suite
cfg->NETWORK_ID = 456;
return cfg;
}),
features};
features,
};
{
Json::Value params{Json::objectValue};
@@ -558,7 +551,13 @@ class Catalogue_test : public beast::unit_test::suite
using namespace test::jtx;
// Create environment and test data
Env env{*this, envconfig(), features};
Env env{
*this,
envconfig(),
features,
nullptr,
beast::severities::kDisabled,
};
prepareLedgerData(env, 3);
boost::filesystem::path tempDir =
@@ -649,7 +648,13 @@ class Catalogue_test : public beast::unit_test::suite
using namespace test::jtx;
// Create environment and test data
Env env{*this, envconfig(), features};
Env env{
*this,
envconfig(),
features,
nullptr,
beast::severities::kDisabled,
};
prepareLedgerData(env, 3);
boost::filesystem::path tempDir =
@@ -826,7 +831,7 @@ class Catalogue_test : public beast::unit_test::suite
{
auto result = env.client().invoke(
"catalogue_status", Json::objectValue)[jss::result];
std::cout << to_string(result) << "\n";
// std::cout << to_string(result) << "\n";
BEAST_EXPECT(result[jss::job_status] == "no_job_running");
}

View File

@@ -8,13 +8,18 @@ files_changed=$(git diff --name-only --relative HEAD~1 HEAD)
# Loop through each file and search for the patterns
for file in $files_changed; do
# Skip if the file is Import_test.cpp (exact filename match regardless of path)
if [[ "$(basename "$file")" == "Import_test.cpp" ]]; then
continue
fi
# Construct the absolute path
absolute_path="$repo_root/$file"
# Check if the file exists (it might have been deleted)
if [ -f "$absolute_path" ]; then
# Search the file for the given patterns
grep_output=$(grep -n -E '(([^rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]|^)(s|p)[rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]{25,60}([^(]|$)))|([^A-Fa-f0-9](02|03|ED)[A-Fa-f0-9]{64})' "$absolute_path")
# Search the file for the given patterns, but exclude lines containing 'public_key'
grep_output=$(grep -n -E '(([^rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]|^)(s|p)[rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]{25,60}([^(]|$)))|([^A-Fa-f0-9](02|03|ED)[A-Fa-f0-9]{64})' "$absolute_path" | grep -v "public_key")
# Check if grep found any matches
if [ ! -z "$grep_output" ]; then
@@ -25,7 +30,3 @@ for file in $files_changed; do
fi
fi
done
# If the loop completes without finding any suspicious patterns
echo "Success: No suspicious patterns found in the diff."
exit 0