Compare commits

..

3 Commits

Author SHA1 Message Date
Nicholas Dudfield
2dd31f8662 fix: make subscription webhook delivery synchronous to prevent connection exhaustion
RPCSub::sendThread() was passing the application's main io_service to
RPCCall::fromNetwork(), which posts async HTTP operations and returns
immediately. This caused sendThread() to fire unbounded concurrent
connections, exhausting file descriptors when endpoints return errors.

Use a local io_service with .run() (same pattern as rpcClient()) so each
delivery blocks until completion or timeout. Also catch all exceptions
to prevent mSending flag from getting stuck, and log errors at warn level.

Ref: XRPLF/rippled#6341
2026-02-09 15:09:11 +07:00
tequ
12e1afb694 Enhance dependency export process in GitHub Action to check for existing exports before executing. (#660) 2026-01-28 13:14:40 +10:00
tequ
c355ad9971 update mise-action to use cmake as aqua:Kitware/CMake (#671) 2026-01-27 19:30:50 +10:00
8 changed files with 65 additions and 68 deletions

View File

@@ -134,10 +134,17 @@ runs:
- name: Export custom recipes
shell: bash
run: |
conan export external/snappy --version 1.1.10 --user xahaud --channel stable
conan export external/soci --version 4.0.3 --user xahaud --channel stable
conan export external/wasmedge --version 0.11.2 --user xahaud --channel stable
# Export snappy if not already exported
conan list snappy/1.1.10@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
conan export external/snappy --version 1.1.10 --user xahaud --channel stable
# Export soci if not already exported
conan list soci/4.0.3@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
conan export external/soci --version 4.0.3 --user xahaud --channel stable
# Export wasmedge if not already exported
conan list wasmedge/0.11.2@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
conan export external/wasmedge --version 0.11.2 --user xahaud --channel stable
- name: Install dependencies
shell: bash
env:

View File

@@ -43,14 +43,22 @@ jobs:
# To isolate environments for each Runner, instead of installing globally with brew,
# use mise to isolate environments for each Runner directory.
- name: Setup toolchain (mise)
uses: jdx/mise-action@v2
uses: jdx/mise-action@v3.6.1
with:
cache: false
install: true
mise_toml: |
[tools]
cmake = "3.23.1"
python = "3.12"
pipx = "latest"
conan = "2"
ninja = "latest"
ccache = "latest"
- name: Install tools via mise
run: |
mise install
mise use cmake@3.23.1 python@3.12 pipx@latest conan@2 ninja@latest ccache@latest
mise reshim
echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"

View File

@@ -24,7 +24,6 @@
#include <ripple/app/misc/AmendmentTable.h>
#include <ripple/app/misc/NetworkOPs.h>
#include <ripple/app/tx/impl/Change.h>
#include <ripple/app/tx/impl/SetHook.h>
#include <ripple/app/tx/impl/SetSignerList.h>
#include <ripple/app/tx/impl/XahauGenesis.h>
#include <ripple/basics/Log.h>
@@ -584,6 +583,10 @@ Change::activateXahauGenesis()
SetSignerList::removeFromLedger(ctx_.app, sb, accid, j_);
// Step 4: install genesis hooks
sle->setFieldU32(
sfOwnerCount, sle->getFieldU32(sfOwnerCount) + genesis_hooks.size());
sb.update(sle);
if (sb.exists(keylet::hook(accid)))
{
JLOG(j_.warn()) << "featureXahauGenesis genesis account already has "
@@ -594,7 +597,6 @@ Change::activateXahauGenesis()
{
ripple::STArray hooks{sfHooks, static_cast<int>(genesis_hooks.size())};
int hookCount = 0;
uint32_t hookReserve = 0;
for (auto const& [hookOn, wasmBytes, params] : genesis_hooks)
{
@@ -700,14 +702,8 @@ Change::activateXahauGenesis()
}
hooks.push_back(hookObj);
hookReserve += SetHook::computeHookReserve(hookObj);
}
sle->setFieldU32(
sfOwnerCount, sle->getFieldU32(sfOwnerCount) + hookReserve);
sb.update(sle);
auto sle = std::make_shared<SLE>(keylet::hook(accid));
sle->setFieldArray(sfHooks, hooks);
sle->setAccountID(sfAccount, accid);
@@ -748,8 +744,6 @@ Change::activateXahauGenesis()
ripple::STArray hooks{sfHooks, 1};
STObject hookObj{sfHook};
hookObj.setFieldH256(sfHookHash, governHash);
uint32_t hookReserve = 0;
// parameters
{
std::vector<STObject> vec;
@@ -765,7 +759,6 @@ Change::activateXahauGenesis()
sfHookParameters, STArray(vec, sfHookParameters));
}
hookReserve += SetHook::computeHookReserve(hookObj);
hooks.push_back(hookObj);
auto sle = std::make_shared<SLE>(hookKL);
@@ -792,8 +785,7 @@ Change::activateXahauGenesis()
sle->setAccountID(sfRegularKey, noAccount());
sle->setFieldU32(sfFlags, lsfDisableMaster);
sle->setFieldU32(
sfOwnerCount, sle->getFieldU32(sfOwnerCount) + hookReserve);
sle->setFieldU32(sfOwnerCount, sle->getFieldU32(sfOwnerCount) + 1);
sb.update(sle);
}
}

View File

@@ -1173,23 +1173,6 @@ updateHookParameters(
return tesSUCCESS;
}
uint32_t
SetHook::computeHookReserve(STObject const& hookObj)
{
if (!hookObj.isFieldPresent(sfHookHash))
return 0;
int reserve{1};
if (hookObj.isFieldPresent(sfHookParameters))
reserve += hookObj.getFieldArray(sfHookParameters).size();
if (hookObj.isFieldPresent(sfHookGrants))
reserve += hookObj.getFieldArray(sfHookGrants).size();
return reserve;
};
struct KeyletComparator
{
bool
@@ -1854,14 +1837,28 @@ SetHook::setHook()
int oldHookReserve = 0;
int newHookReserve = 0;
auto const computeHookReserve = [](STObject const& hookObj) -> int {
if (!hookObj.isFieldPresent(sfHookHash))
return 0;
int reserve{1};
if (hookObj.isFieldPresent(sfHookParameters))
reserve += hookObj.getFieldArray(sfHookParameters).size();
if (hookObj.isFieldPresent(sfHookGrants))
reserve += hookObj.getFieldArray(sfHookGrants).size();
return reserve;
};
for (int i = 0; i < hook::maxHookChainLength(); ++i)
{
if (oldHooks && i < oldHookCount)
oldHookReserve +=
SetHook::computeHookReserve(((*oldHooks).get())[i]);
oldHookReserve += computeHookReserve(((*oldHooks).get())[i]);
if (i < newHooks.size())
newHookReserve += SetHook::computeHookReserve(newHooks[i]);
newHookReserve += computeHookReserve(newHooks[i]);
}
reserveDelta = newHookReserve - oldHookReserve;

View File

@@ -91,9 +91,6 @@ public:
static HookSetValidation
validateHookSetEntry(SetHookCtx& ctx, STObject const& hookSetObj);
static uint32_t
computeHookReserve(STObject const& hookObj);
private:
TER
setHook();

View File

@@ -1805,6 +1805,7 @@ rpcClient(
}
{
//@@start blocking-request
boost::asio::io_service isService;
RPCCall::fromNetwork(
isService,
@@ -1828,6 +1829,7 @@ rpcClient(
headers);
isService.run(); // This blocks until there are no more
// outstanding async calls.
//@@end blocking-request
}
if (jvOutput.isMember("result"))
{
@@ -1946,6 +1948,7 @@ fromNetwork(
// HTTP call?
auto constexpr RPC_NOTIFY = 30s;
//@@start async-request
HTTPClient::request(
bSSL,
io_service,
@@ -1970,6 +1973,7 @@ fromNetwork(
std::placeholders::_3,
j),
j);
//@@end async-request
}
} // namespace RPCCall

View File

@@ -156,13 +156,19 @@ private:
// Send outside of the lock.
if (bSend)
{
// XXX Might not need this in a try.
try
{
JLOG(j_.info()) << "RPCCall::fromNetwork: " << mIp;
// Use a local io_service so the HTTP call blocks
// until completion (or timeout). Without this,
// fromNetwork() posts async ops to the app's
// io_service and returns immediately, causing
// unbounded concurrent connections that exhaust
// file descriptors when endpoints are failing.
boost::asio::io_service io_service;
RPCCall::fromNetwork(
m_io_service,
io_service,
mIp,
mPort,
mUsername,
@@ -173,12 +179,17 @@ private:
mSSL,
true,
logs_);
io_service.run();
}
catch (const std::exception& e)
{
JLOG(j_.info())
JLOG(j_.warn())
<< "RPCCall::fromNetwork exception: " << e.what();
}
catch (...)
{
JLOG(j_.warn()) << "RPCCall::fromNetwork unknown exception";
}
}
} while (bSend);
}

View File

@@ -139,9 +139,7 @@ struct XahauGenesis_test : public beast::unit_test::suite
false, // means the calling test already burned some of the genesis
bool skipTests = false,
bool const testFlag = false,
bool const badNetID = false,
uint32_t const expectedOwnerCount =
10 /** testFlag ? 10 : 14 (default) */)
bool const badNetID = false)
{
using namespace jtx;
@@ -249,10 +247,7 @@ struct XahauGenesis_test : public beast::unit_test::suite
BEAST_EXPECT(
genesisAccRoot->getFieldAmount(sfBalance) ==
XahauGenesis::GenesisAmount);
BEAST_EXPECT(
genesisAccRoot->getFieldU32(sfOwnerCount) == !testFlag
? expectedOwnerCount
: 14);
BEAST_EXPECT(genesisAccRoot->getFieldU32(sfOwnerCount) == 2);
// ensure the definitions are correctly set
{
@@ -588,14 +583,7 @@ struct XahauGenesis_test : public beast::unit_test::suite
toBase58(t), membersStr);
}
activate(
__LINE__,
env,
true,
false,
true,
{},
3 /* IRR,IRD,IMC */ + members.size() + tables.size());
activate(__LINE__, env, true, false, true);
env.close();
env.close();
@@ -2247,8 +2235,6 @@ struct XahauGenesis_test : public beast::unit_test::suite
BEAST_EXPECT(!!hookLE);
uint256 const ns = beast::zero;
uint8_t mc = 0;
uint8_t paramsCount = 0;
if (hookLE)
{
auto const hooksArray = hookLE->getFieldArray(sfHooks);
@@ -2256,9 +2242,6 @@ struct XahauGenesis_test : public beast::unit_test::suite
hooksArray.size() == 1 &&
hooksArray[0].getFieldH256(sfHookHash) == governHookHash);
paramsCount =
hooksArray[0].getFieldArray(sfHookParameters).size();
for (Account const* m : members)
{
auto const mVec = vecFromAcc(*m);
@@ -2325,9 +2308,7 @@ struct XahauGenesis_test : public beast::unit_test::suite
BEAST_EXPECT(!!root);
if (root)
{
BEAST_EXPECT(
root->getFieldU32(sfOwnerCount) ==
mc * 2 + 2 + paramsCount);
BEAST_EXPECT(root->getFieldU32(sfOwnerCount) == mc * 2 + 2);
BEAST_EXPECT(root->getFieldU32(sfFlags) & lsfDisableMaster);
BEAST_EXPECT(root->getAccountID(sfRegularKey) == noAccount());
}