Merge branch 'sync-2.0.0-conan' into sync-2.0.1-conan

This commit is contained in:
tequ
2025-05-06 14:58:18 +09:00
15 changed files with 107 additions and 97 deletions

View File

@@ -1,4 +1,4 @@
name: Release - SH Runner name: Build using Docker
on: on:
push: push:
@@ -11,38 +11,53 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
env: env:
DEBUG_CONTAINERS: 1 DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1
REMOVE_CONTAINERS: 0
jobs: jobs:
build-and-test: checkout:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, vanity]
outputs:
checkout_path: ${{ steps.vars.outputs.checkout_path }}
steps: steps:
- name: Prepare workspace - name: Prepare checkout path
id: vars
run: | run: |
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g') SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}" CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}"
echo "CHECKOUT_PATH=${CHECKOUT_PATH}" >> $GITHUB_ENV echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT"
mkdir -p "$CHECKOUT_PATH"
- name: Checkout code - uses: actions/checkout@v4
uses: actions/checkout@v4
with: with:
path: ${{ env.CHECKOUT_PATH }} path: ${{ steps.vars.outputs.checkout_path }}
clean: true clean: true
fetch-depth: 2 fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
checkpatterns:
runs-on: [self-hosted, vanity]
needs: checkout
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Check for suspicious patterns
run: /bin/bash suspicious_patterns.sh
build:
runs-on: [self-hosted, vanity]
needs: [checkpatterns, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Set Cleanup Script Path - name: Set Cleanup Script Path
run: | run: |
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV
- name: Build using Docker - name: Build using Docker
working-directory: ${{ env.CHECKOUT_PATH }}
run: /bin/bash release-builder.sh run: /bin/bash release-builder.sh
- name: Stop Container (Cleanup after build) - name: Stop Container (Cleanup)
if: always() if: always()
working-directory: ${{ env.CHECKOUT_PATH }}
run: | run: |
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT" echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT" /bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
@@ -56,30 +71,35 @@ jobs:
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT" echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
fi fi
if [[ "${DEBUG_CONTAINERS}" == "1" ]]; then if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
echo "🔍 Checking for leftover containers..." echo "🔍 Checking for leftover containers..."
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "") BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
CONTAINER_NAME="xahaud_cached_builder_${{ github.workflow }}-${{ github.ref_name }}"
if [[ -n "$BUILD_CONTAINERS" && "${REMOVE_CONTAINERS}" == "1" ]]; then if [[ -n "$BUILD_CONTAINERS" ]]; then
echo "⚠️ WARNING: Some build containers are still running" echo "⚠️ WARNING: Some build containers are still running"
echo "$BUILD_CONTAINERS" echo "$BUILD_CONTAINERS"
echo "Attempting to stop build containers.."
echo "Stopping container: $CONTAINER_NAME"
docker stop "$CONTAINER_NAME" || echo "Failed to stop container: $CONTAINER_NAME"
echo "Removing container: $CONTAINER_NAME"
docker rm -f "$CONTAINER_NAME" || echo "Failed to remove container: $CONTAINER_NAME"
echo "✅ Build container stopped and removed"
else else
echo "✅ No build containers found" echo "✅ No build containers found"
fi fi
fi fi
- name: Run unit tests tests:
working-directory: ${{ env.CHECKOUT_PATH }} runs-on: [self-hosted, vanity]
needs: [build, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Unit tests
run: /bin/bash docker-unit-tests.sh run: /bin/bash docker-unit-tests.sh
cleanup:
runs-on: [self-hosted, vanity]
needs: [tests, checkout]
if: always()
steps:
- name: Cleanup workspace - name: Cleanup workspace
if: always()
run: | run: |
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
echo "Cleaning workspace for ${CHECKOUT_PATH}" echo "Cleaning workspace for ${CHECKOUT_PATH}"
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}" rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"

View File

@@ -34,6 +34,7 @@ target_link_libraries(ripple_boost
Boost::program_options Boost::program_options
Boost::regex Boost::regex
Boost::system Boost::system
Boost::iostreams
Boost::thread) Boost::thread)
if(Boost_COMPILER) if(Boost_COMPILER)
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking) target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)

View File

@@ -498,15 +498,11 @@ RCLConsensus::Adaptor::doAccept(
for (auto const& item : *result.txns.map_) for (auto const& item : *result.txns.map_)
{ {
#ifndef DEBUG
try try
{ {
#endif
retriableTxs.insert( retriableTxs.insert(
std::make_shared<STTx const>(SerialIter{item.slice()})); std::make_shared<STTx const>(SerialIter{item.slice()}));
JLOG(j_.debug()) << " Tx: " << item.key(); JLOG(j_.debug()) << " Tx: " << item.key();
#ifndef DEBUG
} }
catch (std::exception const& ex) catch (std::exception const& ex)
{ {
@@ -514,7 +510,6 @@ RCLConsensus::Adaptor::doAccept(
JLOG(j_.warn()) JLOG(j_.warn())
<< " Tx: " << item.key() << " throws: " << ex.what(); << " Tx: " << item.key() << " throws: " << ex.what();
} }
#endif
} }
auto built = buildLCL( auto built = buildLCL(

View File

@@ -311,10 +311,10 @@ Ledger::Ledger(
Family& family, Family& family,
SHAMap const& baseState) SHAMap const& baseState)
: mImmutable(false) : mImmutable(false)
, info_(info)
, txMap_(SHAMapType::TRANSACTION, family) , txMap_(SHAMapType::TRANSACTION, family)
, stateMap_(baseState, true) , stateMap_(baseState, true)
, rules_{config.features} , rules_{config.features}
, info_(info)
, j_(beast::Journal(beast::Journal::getNullSink())) , j_(beast::Journal(beast::Journal::getNullSink()))
{ {
} }

View File

@@ -116,10 +116,8 @@ applyTransactions(
{ {
auto const txid = it->first.getTXID(); auto const txid = it->first.getTXID();
#ifndef DEBUG
try try
{ {
#endif
if (pass == 0 && built->txExists(txid)) if (pass == 0 && built->txExists(txid))
{ {
it = txns.erase(it); it = txns.erase(it);
@@ -142,7 +140,6 @@ applyTransactions(
case ApplyResult::Retry: case ApplyResult::Retry:
++it; ++it;
} }
#ifndef DEBUG
} }
catch (std::exception const& ex) catch (std::exception const& ex)
{ {
@@ -151,7 +148,6 @@ applyTransactions(
failed.insert(txid); failed.insert(txid);
it = txns.erase(it); it = txns.erase(it);
} }
#endif
} }
JLOG(j.debug()) << (certainRetry ? "Pass: " : "Final pass: ") << pass JLOG(j.debug()) << (certainRetry ? "Pass: " : "Final pass: ") << pass

View File

@@ -44,8 +44,7 @@ convertBlobsToTxResult(
auto tr = std::make_shared<Transaction>(txn, reason, app); auto tr = std::make_shared<Transaction>(txn, reason, app);
auto metaset = auto metaset = std::make_shared<TxMeta>(tr->getID(), ledger_index, rawMeta);
std::make_shared<TxMeta>(tr->getID(), tr->getLedger(), rawMeta);
// if properly formed meta is available we can use it to generate ctid // if properly formed meta is available we can use it to generate ctid
if (metaset->getAsObject().isFieldPresent(sfTransactionIndex)) if (metaset->getAsObject().isFieldPresent(sfTransactionIndex))

View File

@@ -314,22 +314,21 @@ SetRemarks::preclaim(PreclaimContext const& ctx)
TER TER
SetRemarks::doApply() SetRemarks::doApply()
{ {
auto j = ctx_.journal;
Sandbox sb(&ctx_.view()); Sandbox sb(&ctx_.view());
auto const sle = sb.read(keylet::account(account_)); auto const sle = sb.read(keylet::account(account_));
if (!sle) if (!sle)
return terNO_ACCOUNT; return tefINTERNAL;
auto const objID = ctx_.tx[sfObjectID]; auto const objID = ctx_.tx[sfObjectID];
auto sleO = sb.peek(keylet::unchecked(objID)); auto sleO = sb.peek(keylet::unchecked(objID));
if (!sleO) if (!sleO)
return terNO_ACCOUNT; return tefINTERNAL;
std::optional<AccountID> issuer = getRemarksIssuer(sleO); std::optional<AccountID> issuer = getRemarksIssuer(sleO);
if (!issuer || *issuer != account_) if (!issuer || *issuer != account_)
return tecNO_PERMISSION; return tefINTERNAL;
auto const& remarksTxn = ctx_.tx.getFieldArray(sfRemarks); auto const& remarksTxn = ctx_.tx.getFieldArray(sfRemarks);
@@ -401,7 +400,7 @@ SetRemarks::doApply()
} }
if (newRemarks.size() > 32) if (newRemarks.size() > 32)
return tecTOO_MANY_REMARKS; return tefINTERNAL;
if (newRemarks.empty() && sleO->isFieldPresent(sfRemarks)) if (newRemarks.empty() && sleO->isFieldPresent(sfRemarks))
sleO->makeFieldAbsent(sfRemarks); sleO->makeFieldAbsent(sfRemarks);

View File

@@ -168,10 +168,8 @@ applyTransaction(
JLOG(j.debug()) << "TXN " << txn.getTransactionID() JLOG(j.debug()) << "TXN " << txn.getTransactionID()
<< (retryAssured ? "/retry" : "/final"); << (retryAssured ? "/retry" : "/final");
#ifndef DEBUG
try try
{ {
#endif
auto const result = apply(app, view, txn, flags, j); auto const result = apply(app, view, txn, flags, j);
if (result.second) if (result.second)
{ {
@@ -191,14 +189,12 @@ applyTransaction(
JLOG(j.debug()) << "Transaction retry: " << transHuman(result.first); JLOG(j.debug()) << "Transaction retry: " << transHuman(result.first);
return ApplyResult::Retry; return ApplyResult::Retry;
#ifndef DEBUG
} }
catch (std::exception const& ex) catch (std::exception const& ex)
{ {
JLOG(j.warn()) << "Throws: " << ex.what(); JLOG(j.warn()) << "Throws: " << ex.what();
return ApplyResult::Fail; return ApplyResult::Fail;
} }
#endif
} }
} // namespace ripple } // namespace ripple

View File

@@ -392,19 +392,15 @@ preflight(
{ {
PreflightContext const pfctx(app, tx, rules, flags, j); PreflightContext const pfctx(app, tx, rules, flags, j);
#ifndef DEBUG
try try
{ {
#endif
return {pfctx, invoke_preflight(pfctx)}; return {pfctx, invoke_preflight(pfctx)};
#ifndef DEBUG
} }
catch (std::exception const& e) catch (std::exception const& e)
{ {
JLOG(j.fatal()) << "apply: " << e.what(); JLOG(j.fatal()) << "apply: " << e.what();
return {pfctx, {tefEXCEPTION, TxConsequences{tx}}}; return {pfctx, {tefEXCEPTION, TxConsequences{tx}}};
} }
#endif
} }
PreclaimResult PreclaimResult
@@ -441,21 +437,17 @@ preclaim(
preflightResult.j); preflightResult.j);
} }
#ifndef DEBUG
try try
{ {
#endif
if (!isTesSuccess(ctx->preflightResult)) if (!isTesSuccess(ctx->preflightResult))
return {*ctx, ctx->preflightResult}; return {*ctx, ctx->preflightResult};
return {*ctx, invoke_preclaim(*ctx)}; return {*ctx, invoke_preclaim(*ctx)};
#ifndef DEBUG
} }
catch (std::exception const& e) catch (std::exception const& e)
{ {
JLOG(ctx->j.fatal()) << "apply: " << e.what(); JLOG(ctx->j.fatal()) << "apply: " << e.what();
return {*ctx, tefEXCEPTION}; return {*ctx, tefEXCEPTION};
} }
#endif
} }
XRPAmount XRPAmount
@@ -479,10 +471,8 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view)
// info to recover. // info to recover.
return {tefEXCEPTION, false}; return {tefEXCEPTION, false};
} }
#ifndef DEBUG
try try
{ {
#endif
if (!preclaimResult.likelyToClaimFee) if (!preclaimResult.likelyToClaimFee)
return {preclaimResult.ter, false}; return {preclaimResult.ter, false};
@@ -495,14 +485,12 @@ doApply(PreclaimResult const& preclaimResult, Application& app, OpenView& view)
preclaimResult.flags, preclaimResult.flags,
preclaimResult.j); preclaimResult.j);
return invoke_apply(ctx); return invoke_apply(ctx);
#ifndef DEBUG
} }
catch (std::exception const& e) catch (std::exception const& e)
{ {
JLOG(preclaimResult.j.fatal()) << "apply: " << e.what(); JLOG(preclaimResult.j.fatal()) << "apply: " << e.what();
return {tefEXCEPTION, false}; return {tefEXCEPTION, false};
} }
#endif
} }
} // namespace ripple } // namespace ripple

View File

@@ -69,7 +69,7 @@ static constexpr uint16_t CATALOGUE_VERSION_MASK =
0x00FF; // Lower 8 bits for version 0x00FF; // Lower 8 bits for version
static constexpr uint16_t CATALOGUE_COMPRESS_LEVEL_MASK = static constexpr uint16_t CATALOGUE_COMPRESS_LEVEL_MASK =
0x0F00; // Bits 8-11: compression level 0x0F00; // Bits 8-11: compression level
static constexpr uint16_t CATALOGUE_RESERVED_MASK = [[maybe_unused]] static constexpr uint16_t CATALOGUE_RESERVED_MASK =
0xF000; // Bits 12-15: reserved 0xF000; // Bits 12-15: reserved
std::string std::string
@@ -229,7 +229,7 @@ class CatalogueSizePredictor
private: private:
uint32_t minLedger_; uint32_t minLedger_;
uint32_t maxLedger_; uint32_t maxLedger_;
uint64_t headerSize_; [[maybe_unused]] uint64_t headerSize_;
// Keep track of actual bytes // Keep track of actual bytes
uint64_t totalBytesWritten_; uint64_t totalBytesWritten_;
@@ -246,9 +246,9 @@ public:
: minLedger_(minLedger) : minLedger_(minLedger)
, maxLedger_(maxLedger) , maxLedger_(maxLedger)
, headerSize_(headerSize) , headerSize_(headerSize)
, processedLedgers_(0)
, totalBytesWritten_(headerSize) , totalBytesWritten_(headerSize)
, firstLedgerSize_(0) , firstLedgerSize_(0)
, processedLedgers_(0)
{ {
} }

View File

@@ -396,6 +396,8 @@ private:
return "SetRegularKey"; return "SetRegularKey";
if (inp == "HookSet") if (inp == "HookSet")
return "SetHook"; return "SetHook";
if (inp == "RemarksSet")
return "SetRemarks";
return inp; return inp;
}; };

View File

@@ -1301,10 +1301,8 @@ SHAMap::serializeToStream(
std::size_t nodeCount = 0; std::size_t nodeCount = 0;
auto serializeLeaf = [&stream, auto serializeLeaf = [&stream, &localBytesWritten, &tryFlush](
&localBytesWritten, SHAMapLeafNode const& node) -> bool {
flushThreshold,
&tryFlush](SHAMapLeafNode const& node) -> bool {
// write the node type // write the node type
auto t = node.getType(); auto t = node.getType();
stream.write(reinterpret_cast<char const*>(&t), 1); stream.write(reinterpret_cast<char const*>(&t), 1);
@@ -1335,10 +1333,8 @@ SHAMap::serializeToStream(
return !stream.fail(); return !stream.fail();
}; };
auto serializeRemovedLeaf = [&stream, auto serializeRemovedLeaf =
&localBytesWritten, [&stream, &localBytesWritten, &tryFlush](uint256 const& key) -> bool {
flushThreshold,
&tryFlush](uint256 const& key) -> bool {
// to indicate a node is removed it is written with a removal type // to indicate a node is removed it is written with a removal type
auto t = SHAMapNodeType::tnREMOVE; auto t = SHAMapNodeType::tnREMOVE;
stream.write(reinterpret_cast<char const*>(&t), 1); stream.write(reinterpret_cast<char const*>(&t), 1);

View File

@@ -5469,7 +5469,11 @@ class Import_test : public beast::unit_test::suite
// burn 100'000 coins // burn 100'000 coins
{ {
test::jtx::Env env{ test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)}; *this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops; auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000); BEAST_EXPECT(envCoins == 100'000'000'000'000'000);
@@ -5509,7 +5513,11 @@ class Import_test : public beast::unit_test::suite
// burn all coins // burn all coins
{ {
test::jtx::Env env{ test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)}; *this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops; auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000); BEAST_EXPECT(envCoins == 100'000'000'000'000'000);
@@ -5549,7 +5557,11 @@ class Import_test : public beast::unit_test::suite
// burn no coins // burn no coins
{ {
test::jtx::Env env{ test::jtx::Env env{
*this, network::makeNetworkVLConfig(21337, keys)}; *this,
network::makeNetworkVLConfig(21337, keys),
nullptr,
beast::severities::kDisabled,
};
auto const envCoins = env.current()->info().drops; auto const envCoins = env.current()->info().drops;
BEAST_EXPECT(envCoins == 100'000'000'000'000'000); BEAST_EXPECT(envCoins == 100'000'000'000'000'000);

View File

@@ -317,7 +317,8 @@ class Catalogue_test : public beast::unit_test::suite
Env loadEnv{ Env loadEnv{
*this, *this,
test::jtx::envconfig(test::jtx::port_increment, 3), test::jtx::envconfig(test::jtx::port_increment, 3),
features}; features,
};
// Now load the catalogue // Now load the catalogue
Json::Value params{Json::objectValue}; Json::Value params{Json::objectValue};
@@ -400,18 +401,8 @@ class Catalogue_test : public beast::unit_test::suite
sourceLedger->info().accepted == loadedLedger->info().accepted); sourceLedger->info().accepted == loadedLedger->info().accepted);
// Check SLE counts // Check SLE counts
std::size_t sourceCount = 0; std::size_t sourceCount = std::ranges::distance(sourceLedger->sles);
std::size_t loadedCount = 0; std::size_t loadedCount = std::ranges::distance(loadedLedger->sles);
for (auto const& sle : sourceLedger->sles)
{
sourceCount++;
}
for (auto const& sle : loadedLedger->sles)
{
loadedCount++;
}
BEAST_EXPECT(sourceCount == loadedCount); BEAST_EXPECT(sourceCount == loadedCount);
@@ -511,7 +502,8 @@ class Catalogue_test : public beast::unit_test::suite
cfg->NETWORK_ID = 123; cfg->NETWORK_ID = 123;
return cfg; return cfg;
}), }),
features}; features,
};
prepareLedgerData(env1, 5); prepareLedgerData(env1, 5);
// Create catalogue with network ID 123 // Create catalogue with network ID 123
@@ -535,7 +527,8 @@ class Catalogue_test : public beast::unit_test::suite
cfg->NETWORK_ID = 456; cfg->NETWORK_ID = 456;
return cfg; return cfg;
}), }),
features}; features,
};
{ {
Json::Value params{Json::objectValue}; Json::Value params{Json::objectValue};
@@ -558,7 +551,13 @@ class Catalogue_test : public beast::unit_test::suite
using namespace test::jtx; using namespace test::jtx;
// Create environment and test data // Create environment and test data
Env env{*this, envconfig(), features}; Env env{
*this,
envconfig(),
features,
nullptr,
beast::severities::kDisabled,
};
prepareLedgerData(env, 3); prepareLedgerData(env, 3);
boost::filesystem::path tempDir = boost::filesystem::path tempDir =
@@ -649,7 +648,13 @@ class Catalogue_test : public beast::unit_test::suite
using namespace test::jtx; using namespace test::jtx;
// Create environment and test data // Create environment and test data
Env env{*this, envconfig(), features}; Env env{
*this,
envconfig(),
features,
nullptr,
beast::severities::kDisabled,
};
prepareLedgerData(env, 3); prepareLedgerData(env, 3);
boost::filesystem::path tempDir = boost::filesystem::path tempDir =
@@ -826,7 +831,7 @@ class Catalogue_test : public beast::unit_test::suite
{ {
auto result = env.client().invoke( auto result = env.client().invoke(
"catalogue_status", Json::objectValue)[jss::result]; "catalogue_status", Json::objectValue)[jss::result];
std::cout << to_string(result) << "\n"; // std::cout << to_string(result) << "\n";
BEAST_EXPECT(result[jss::job_status] == "no_job_running"); BEAST_EXPECT(result[jss::job_status] == "no_job_running");
} }

View File

@@ -8,13 +8,18 @@ files_changed=$(git diff --name-only --relative HEAD~1 HEAD)
# Loop through each file and search for the patterns # Loop through each file and search for the patterns
for file in $files_changed; do for file in $files_changed; do
# Skip if the file is Import_test.cpp (exact filename match regardless of path)
if [[ "$(basename "$file")" == "Import_test.cpp" ]]; then
continue
fi
# Construct the absolute path # Construct the absolute path
absolute_path="$repo_root/$file" absolute_path="$repo_root/$file"
# Check if the file exists (it might have been deleted) # Check if the file exists (it might have been deleted)
if [ -f "$absolute_path" ]; then if [ -f "$absolute_path" ]; then
# Search the file for the given patterns # Search the file for the given patterns, but exclude lines containing 'public_key'
grep_output=$(grep -n -E '(([^rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]|^)(s|p)[rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]{25,60}([^(]|$)))|([^A-Fa-f0-9](02|03|ED)[A-Fa-f0-9]{64})' "$absolute_path") grep_output=$(grep -n -E '(([^rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]|^)(s|p)[rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz]{25,60}([^(]|$)))|([^A-Fa-f0-9](02|03|ED)[A-Fa-f0-9]{64})' "$absolute_path" | grep -v "public_key")
# Check if grep found any matches # Check if grep found any matches
if [ ! -z "$grep_output" ]; then if [ ! -z "$grep_output" ]; then
@@ -25,7 +30,3 @@ for file in $files_changed; do
fi fi
fi fi
done done
# If the loop completes without finding any suspicious patterns
echo "Success: No suspicious patterns found in the diff."
exit 0