diff --git a/.github/workflows/build-in-docker.yml b/.github/workflows/build-in-docker.yml index 621ea43cc..9d959bbc9 100644 --- a/.github/workflows/build-in-docker.yml +++ b/.github/workflows/build-in-docker.yml @@ -1,4 +1,4 @@ -name: Release - SH Runner +name: Build using Docker on: push: @@ -11,38 +11,53 @@ concurrency: cancel-in-progress: true env: - DEBUG_CONTAINERS: 1 - REMOVE_CONTAINERS: 0 + DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1 jobs: - build-and-test: + checkout: runs-on: [self-hosted, vanity] + outputs: + checkout_path: ${{ steps.vars.outputs.checkout_path }} steps: - - name: Prepare workspace + - name: Prepare checkout path + id: vars run: | SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g') CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}" - echo "CHECKOUT_PATH=${CHECKOUT_PATH}" >> $GITHUB_ENV - mkdir -p "$CHECKOUT_PATH" + echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT" - - name: Checkout code - uses: actions/checkout@v4 + - uses: actions/checkout@v4 with: - path: ${{ env.CHECKOUT_PATH }} + path: ${{ steps.vars.outputs.checkout_path }} clean: true - fetch-depth: 2 + fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history + checkpatterns: + runs-on: [self-hosted, vanity] + needs: checkout + defaults: + run: + working-directory: ${{ needs.checkout.outputs.checkout_path }} + steps: + - name: Check for suspicious patterns + run: /bin/bash suspicious_patterns.sh + + build: + runs-on: [self-hosted, vanity] + needs: [checkpatterns, checkout] + defaults: + run: + working-directory: ${{ needs.checkout.outputs.checkout_path }} + steps: - name: Set Cleanup Script Path run: | echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV - name: Build using Docker - working-directory: ${{ env.CHECKOUT_PATH }} run: /bin/bash release-builder.sh - - name: Stop Container (Cleanup after build) + - name: Stop Container (Cleanup) if: always() - working-directory: ${{ env.CHECKOUT_PATH }} run: | echo "Running cleanup script: $JOB_CLEANUP_SCRIPT" /bin/bash -e -x "$JOB_CLEANUP_SCRIPT" @@ -56,30 +71,35 @@ jobs: echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT" fi - if [[ "${DEBUG_CONTAINERS}" == "1" ]]; then + if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then echo "🔍 Checking for leftover containers..." BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "") - CONTAINER_NAME="xahaud_cached_builder_${{ github.workflow }}-${{ github.ref_name }}" - if [[ -n "$BUILD_CONTAINERS" && "${REMOVE_CONTAINERS}" == "1" ]]; then + + if [[ -n "$BUILD_CONTAINERS" ]]; then echo "⚠️ WARNING: Some build containers are still running" echo "$BUILD_CONTAINERS" - echo "Attempting to stop build containers.." - echo "Stopping container: $CONTAINER_NAME" - docker stop "$CONTAINER_NAME" || echo "Failed to stop container: $CONTAINER_NAME" - echo "Removing container: $CONTAINER_NAME" - docker rm -f "$CONTAINER_NAME" || echo "Failed to remove container: $CONTAINER_NAME" - echo "✅ Build container stopped and removed" else echo "✅ No build containers found" fi fi - - name: Run unit tests - working-directory: ${{ env.CHECKOUT_PATH }} + tests: + runs-on: [self-hosted, vanity] + needs: [build, checkout] + defaults: + run: + working-directory: ${{ needs.checkout.outputs.checkout_path }} + steps: + - name: Unit tests run: /bin/bash docker-unit-tests.sh + cleanup: + runs-on: [self-hosted, vanity] + needs: [tests, checkout] + if: always() + steps: - name: Cleanup workspace - if: always() run: | + CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}" echo "Cleaning workspace for ${CHECKOUT_PATH}" rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}" diff --git a/Builds/CMake/conan/Boost.cmake b/Builds/CMake/conan/Boost.cmake index d73cc2797..fe5412cf1 100644 --- a/Builds/CMake/conan/Boost.cmake +++ b/Builds/CMake/conan/Boost.cmake @@ -34,6 +34,7 @@ target_link_libraries(ripple_boost Boost::program_options Boost::regex Boost::system + Boost::iostreams Boost::thread) if(Boost_COMPILER) target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking) diff --git a/src/ripple/app/consensus/RCLConsensus.cpp b/src/ripple/app/consensus/RCLConsensus.cpp index 681645606..4fb0c5275 100644 --- a/src/ripple/app/consensus/RCLConsensus.cpp +++ b/src/ripple/app/consensus/RCLConsensus.cpp @@ -498,15 +498,11 @@ RCLConsensus::Adaptor::doAccept( for (auto const& item : *result.txns.map_) { -#ifndef DEBUG try { -#endif retriableTxs.insert( std::make_shared(SerialIter{item.slice()})); JLOG(j_.debug()) << " Tx: " << item.key(); - -#ifndef DEBUG } catch (std::exception const& ex) { @@ -514,7 +510,6 @@ RCLConsensus::Adaptor::doAccept( JLOG(j_.warn()) << " Tx: " << item.key() << " throws: " << ex.what(); } -#endif } auto built = buildLCL( diff --git a/src/ripple/app/ledger/Ledger.cpp b/src/ripple/app/ledger/Ledger.cpp index 345c3bb28..b013b35a0 100644 --- a/src/ripple/app/ledger/Ledger.cpp +++ b/src/ripple/app/ledger/Ledger.cpp @@ -311,10 +311,10 @@ Ledger::Ledger( Family& family, SHAMap const& baseState) : mImmutable(false) - , info_(info) , txMap_(SHAMapType::TRANSACTION, family) , stateMap_(baseState, true) , rules_{config.features} + , info_(info) , j_(beast::Journal(beast::Journal::getNullSink())) { } diff --git a/src/ripple/app/ledger/impl/BuildLedger.cpp b/src/ripple/app/ledger/impl/BuildLedger.cpp index fbdf9945f..56feda066 100644 --- a/src/ripple/app/ledger/impl/BuildLedger.cpp +++ b/src/ripple/app/ledger/impl/BuildLedger.cpp @@ -116,10 +116,8 @@ applyTransactions( { auto const txid = it->first.getTXID(); -#ifndef DEBUG try { -#endif if (pass == 0 && built->txExists(txid)) { it = txns.erase(it); @@ -142,7 +140,6 @@ applyTransactions( case ApplyResult::Retry: ++it; } -#ifndef DEBUG } catch (std::exception const& ex) { @@ -151,7 +148,6 @@ applyTransactions( failed.insert(txid); it = txns.erase(it); } -#endif } JLOG(j.debug()) << (certainRetry ? "Pass: " : "Final pass: ") << pass diff --git a/src/ripple/app/misc/impl/AccountTxPaging.cpp b/src/ripple/app/misc/impl/AccountTxPaging.cpp index f06c439f4..1babbc7ce 100644 --- a/src/ripple/app/misc/impl/AccountTxPaging.cpp +++ b/src/ripple/app/misc/impl/AccountTxPaging.cpp @@ -44,8 +44,7 @@ convertBlobsToTxResult( auto tr = std::make_shared(txn, reason, app); - auto metaset = - std::make_shared(tr->getID(), tr->getLedger(), rawMeta); + auto metaset = std::make_shared(tr->getID(), ledger_index, rawMeta); // if properly formed meta is available we can use it to generate ctid if (metaset->getAsObject().isFieldPresent(sfTransactionIndex)) diff --git a/src/ripple/app/tx/impl/SetRemarks.cpp b/src/ripple/app/tx/impl/SetRemarks.cpp index 30cc83e8b..4a63141f3 100644 --- a/src/ripple/app/tx/impl/SetRemarks.cpp +++ b/src/ripple/app/tx/impl/SetRemarks.cpp @@ -314,22 +314,21 @@ SetRemarks::preclaim(PreclaimContext const& ctx) TER SetRemarks::doApply() { - auto j = ctx_.journal; Sandbox sb(&ctx_.view()); auto const sle = sb.read(keylet::account(account_)); if (!sle) - return terNO_ACCOUNT; + return tefINTERNAL; auto const objID = ctx_.tx[sfObjectID]; auto sleO = sb.peek(keylet::unchecked(objID)); if (!sleO) - return terNO_ACCOUNT; + return tefINTERNAL; std::optional issuer = getRemarksIssuer(sleO); if (!issuer || *issuer != account_) - return tecNO_PERMISSION; + return tefINTERNAL; auto const& remarksTxn = ctx_.tx.getFieldArray(sfRemarks); @@ -401,7 +400,7 @@ SetRemarks::doApply() } if (newRemarks.size() > 32) - return tecTOO_MANY_REMARKS; + return tefINTERNAL; if (newRemarks.empty() && sleO->isFieldPresent(sfRemarks)) sleO->makeFieldAbsent(sfRemarks); diff --git a/src/ripple/app/tx/impl/apply.cpp b/src/ripple/app/tx/impl/apply.cpp index 3e369f06a..a39249a4d 100644 --- a/src/ripple/app/tx/impl/apply.cpp +++ b/src/ripple/app/tx/impl/apply.cpp @@ -168,10 +168,8 @@ applyTransaction( JLOG(j.debug()) << "TXN " << txn.getTransactionID() << (retryAssured ? "/retry" : "/final"); -#ifndef DEBUG try { -#endif auto const result = apply(app, view, txn, flags, j); if (result.second) { @@ -191,14 +189,12 @@ applyTransaction( JLOG(j.debug()) << "Transaction retry: " << transHuman(result.first); return ApplyResult::Retry; -#ifndef DEBUG } catch (std::exception const& ex) { JLOG(j.warn()) << "Throws: " << ex.what(); return ApplyResult::Fail; } -#endif } } // namespace ripple diff --git a/src/ripple/app/tx/impl/applySteps.cpp b/src/ripple/app/tx/impl/applySteps.cpp index f302a5731..243d56786 100644 --- a/src/ripple/app/tx/impl/applySteps.cpp +++ b/src/ripple/app/tx/impl/applySteps.cpp @@ -392,19 +392,15 @@ preflight( { PreflightContext const pfctx(app, tx, rules, flags, j); -#ifndef DEBUG try { -#endif return {pfctx, invoke_preflight(pfctx)}; -#ifndef DEBUG } catch (std::exception const& e) { JLOG(j.fatal()) << "apply: " << e.what(); return {pfctx, {tefEXCEPTION, TxConsequences{tx}}}; } -#endif } PreclaimResult @@ -441,21 +437,17 @@ preclaim( preflightResult.j); } -#ifndef DEBUG try { -#endif if (!isTesSuccess(ctx->preflightResult)) return {*ctx, ctx->preflightResult}; return {*ctx, invoke_preclaim(*ctx)}; -#ifndef DEBUG } catch (std::exception const& e) { JLOG(ctx->j.fatal()) << "apply: " << e.what(); return {*ctx, tefEXCEPTION}; } -#endif } XRPAmount @@ -479,10 +471,8 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view) // info to recover. return {tefEXCEPTION, false}; } -#ifndef DEBUG try { -#endif if (!preclaimResult.likelyToClaimFee) return {preclaimResult.ter, false}; @@ -495,14 +485,12 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view) preclaimResult.flags, preclaimResult.j); return invoke_apply(ctx); -#ifndef DEBUG } catch (std::exception const& e) { JLOG(preclaimResult.j.fatal()) << "apply: " << e.what(); return {tefEXCEPTION, false}; } -#endif } } // namespace ripple diff --git a/src/ripple/rpc/handlers/Catalogue.cpp b/src/ripple/rpc/handlers/Catalogue.cpp index de025de1a..8ca4425d6 100644 --- a/src/ripple/rpc/handlers/Catalogue.cpp +++ b/src/ripple/rpc/handlers/Catalogue.cpp @@ -69,7 +69,7 @@ static constexpr uint16_t CATALOGUE_VERSION_MASK = 0x00FF; // Lower 8 bits for version static constexpr uint16_t CATALOGUE_COMPRESS_LEVEL_MASK = 0x0F00; // Bits 8-11: compression level -static constexpr uint16_t CATALOGUE_RESERVED_MASK = +[[maybe_unused]] static constexpr uint16_t CATALOGUE_RESERVED_MASK = 0xF000; // Bits 12-15: reserved std::string @@ -229,7 +229,7 @@ class CatalogueSizePredictor private: uint32_t minLedger_; uint32_t maxLedger_; - uint64_t headerSize_; + [[maybe_unused]] uint64_t headerSize_; // Keep track of actual bytes uint64_t totalBytesWritten_; @@ -246,9 +246,9 @@ public: : minLedger_(minLedger) , maxLedger_(maxLedger) , headerSize_(headerSize) - , processedLedgers_(0) , totalBytesWritten_(headerSize) , firstLedgerSize_(0) + , processedLedgers_(0) { } diff --git a/src/ripple/rpc/handlers/ServerDefinitions.cpp b/src/ripple/rpc/handlers/ServerDefinitions.cpp index 0138266b5..09c0cebc6 100644 --- a/src/ripple/rpc/handlers/ServerDefinitions.cpp +++ b/src/ripple/rpc/handlers/ServerDefinitions.cpp @@ -396,6 +396,8 @@ private: return "SetRegularKey"; if (inp == "HookSet") return "SetHook"; + if (inp == "RemarksSet") + return "SetRemarks"; return inp; }; diff --git a/src/ripple/shamap/impl/SHAMap.cpp b/src/ripple/shamap/impl/SHAMap.cpp index 972cc64fb..46c61bbc4 100644 --- a/src/ripple/shamap/impl/SHAMap.cpp +++ b/src/ripple/shamap/impl/SHAMap.cpp @@ -1301,10 +1301,8 @@ SHAMap::serializeToStream( std::size_t nodeCount = 0; - auto serializeLeaf = [&stream, - &localBytesWritten, - flushThreshold, - &tryFlush](SHAMapLeafNode const& node) -> bool { + auto serializeLeaf = [&stream, &localBytesWritten, &tryFlush]( + SHAMapLeafNode const& node) -> bool { // write the node type auto t = node.getType(); stream.write(reinterpret_cast(&t), 1); @@ -1335,10 +1333,8 @@ SHAMap::serializeToStream( return !stream.fail(); }; - auto serializeRemovedLeaf = [&stream, - &localBytesWritten, - flushThreshold, - &tryFlush](uint256 const& key) -> bool { + auto serializeRemovedLeaf = + [&stream, &localBytesWritten, &tryFlush](uint256 const& key) -> bool { // to indicate a node is removed it is written with a removal type auto t = SHAMapNodeType::tnREMOVE; stream.write(reinterpret_cast(&t), 1); diff --git a/src/test/app/Import_test.cpp b/src/test/app/Import_test.cpp index 09ffd4a02..2bb56fbe9 100644 --- a/src/test/app/Import_test.cpp +++ b/src/test/app/Import_test.cpp @@ -5469,7 +5469,11 @@ class Import_test : public beast::unit_test::suite // burn 100'000 coins { test::jtx::Env env{ - *this, network::makeNetworkVLConfig(21337, keys)}; + *this, + network::makeNetworkVLConfig(21337, keys), + nullptr, + beast::severities::kDisabled, + }; auto const envCoins = env.current()->info().drops; BEAST_EXPECT(envCoins == 100'000'000'000'000'000); @@ -5509,7 +5513,11 @@ class Import_test : public beast::unit_test::suite // burn all coins { test::jtx::Env env{ - *this, network::makeNetworkVLConfig(21337, keys)}; + *this, + network::makeNetworkVLConfig(21337, keys), + nullptr, + beast::severities::kDisabled, + }; auto const envCoins = env.current()->info().drops; BEAST_EXPECT(envCoins == 100'000'000'000'000'000); @@ -5549,7 +5557,11 @@ class Import_test : public beast::unit_test::suite // burn no coins { test::jtx::Env env{ - *this, network::makeNetworkVLConfig(21337, keys)}; + *this, + network::makeNetworkVLConfig(21337, keys), + nullptr, + beast::severities::kDisabled, + }; auto const envCoins = env.current()->info().drops; BEAST_EXPECT(envCoins == 100'000'000'000'000'000); diff --git a/src/test/rpc/Catalogue_test.cpp b/src/test/rpc/Catalogue_test.cpp index c1be1a0c5..9cafebd07 100644 --- a/src/test/rpc/Catalogue_test.cpp +++ b/src/test/rpc/Catalogue_test.cpp @@ -317,7 +317,8 @@ class Catalogue_test : public beast::unit_test::suite Env loadEnv{ *this, test::jtx::envconfig(test::jtx::port_increment, 3), - features}; + features, + }; // Now load the catalogue Json::Value params{Json::objectValue}; @@ -400,18 +401,8 @@ class Catalogue_test : public beast::unit_test::suite sourceLedger->info().accepted == loadedLedger->info().accepted); // Check SLE counts - std::size_t sourceCount = 0; - std::size_t loadedCount = 0; - - for (auto const& sle : sourceLedger->sles) - { - sourceCount++; - } - - for (auto const& sle : loadedLedger->sles) - { - loadedCount++; - } + std::size_t sourceCount = std::ranges::distance(sourceLedger->sles); + std::size_t loadedCount = std::ranges::distance(loadedLedger->sles); BEAST_EXPECT(sourceCount == loadedCount); @@ -511,7 +502,8 @@ class Catalogue_test : public beast::unit_test::suite cfg->NETWORK_ID = 123; return cfg; }), - features}; + features, + }; prepareLedgerData(env1, 5); // Create catalogue with network ID 123 @@ -535,7 +527,8 @@ class Catalogue_test : public beast::unit_test::suite cfg->NETWORK_ID = 456; return cfg; }), - features}; + features, + }; { Json::Value params{Json::objectValue}; @@ -558,7 +551,13 @@ class Catalogue_test : public beast::unit_test::suite using namespace test::jtx; // Create environment and test data - Env env{*this, envconfig(), features}; + Env env{ + *this, + envconfig(), + features, + nullptr, + beast::severities::kDisabled, + }; prepareLedgerData(env, 3); boost::filesystem::path tempDir = @@ -649,7 +648,13 @@ class Catalogue_test : public beast::unit_test::suite using namespace test::jtx; // Create environment and test data - Env env{*this, envconfig(), features}; + Env env{ + *this, + envconfig(), + features, + nullptr, + beast::severities::kDisabled, + }; prepareLedgerData(env, 3); boost::filesystem::path tempDir = @@ -826,7 +831,7 @@ class Catalogue_test : public beast::unit_test::suite { auto result = env.client().invoke( "catalogue_status", Json::objectValue)[jss::result]; - std::cout << to_string(result) << "\n"; + // std::cout << to_string(result) << "\n"; BEAST_EXPECT(result[jss::job_status] == "no_job_running"); } diff --git a/suspicious_patterns.sh b/suspicious_patterns.sh index 6010c2494..39287a6a1 100755 --- a/suspicious_patterns.sh +++ b/suspicious_patterns.sh @@ -8,13 +8,18 @@ files_changed=$(git diff --name-only --relative HEAD~1 HEAD) # Loop through each file and search for the patterns for file in $files_changed; do + # Skip if the file is Import_test.cpp (exact filename match regardless of path) + if [[ "$(basename "$file")" == "Import_test.cpp" ]]; then + continue + fi + # Construct the absolute path absolute_path="$repo_root/$file" # Check if the file exists (it might have been deleted) if [ -f "$absolute_path" ]; then - # Search the file for the given patterns - grep_output=$(grep -n -E '(([^rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]|^)(s|p)[rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]{25,60}([^(]|$)))|([^A-Fa-f0-9](02|03|ED)[A-Fa-f0-9]{64})' "$absolute_path") + # Search the file for the given patterns, but exclude lines containing 'public_key' + grep_output=$(grep -n -E '(([^rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]|^)(s|p)[rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]{25,60}([^(]|$)))|([^A-Fa-f0-9](02|03|ED)[A-Fa-f0-9]{64})' "$absolute_path" | grep -v "public_key") # Check if grep found any matches if [ ! -z "$grep_output" ]; then @@ -25,7 +30,3 @@ for file in $files_changed; do fi fi done - -# If the loop completes without finding any suspicious patterns -echo "Success: No suspicious patterns found in the diff." -exit 0 \ No newline at end of file