mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-23 04:25:51 +00:00
Compare commits
2 Commits
pratik/use
...
a1q123456/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e5158182f7 | ||
|
|
5ee23d92dd |
110
.github/scripts/strategy-matrix/generate.py
vendored
110
.github/scripts/strategy-matrix/generate.py
vendored
@@ -156,107 +156,15 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
# Add the configuration to the list, with the most unique fields first,
|
||||
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||
# names get truncated.
|
||||
# Add Address and Thread (both coupled with UB) sanitizers when the distro is bookworm.
|
||||
if os['distro_version'] == 'bookworm' and f'{os["compiler_name"]}-{os["compiler_version"]}' in {'gcc-15', 'clang-20'}:
|
||||
extra_warning_flags = ''
|
||||
linker_relocation_flags = ''
|
||||
linker_flags = ''
|
||||
cxx_flags += ' -DBOOST_USE_TSAN -DBOOST_USE_UBSAN -DBOOST_USE_UCONTEXT'
|
||||
|
||||
# Use large code model to avoid relocation errors with large binaries
|
||||
# Only for x86-64 (amd64) - ARM64 doesn't support -mcmodel=large
|
||||
if architecture['platform'] == 'linux/amd64' and os['compiler_name'] == 'gcc':
|
||||
# Add -mcmodel=large and -fPIC to both compiler AND linker flags
|
||||
# This is needed because sanitizers create very large binaries
|
||||
# -fPIC enables position independent code to avoid relocation range issues
|
||||
# large model removes the 2GB limitation that medium model has
|
||||
cxx_flags += ' -mcmodel=large -fno-PIC'
|
||||
linker_relocation_flags+=' -mcmodel=large -fno-PIC'
|
||||
|
||||
# Create default sanitizer flags
|
||||
sanitizers_flags = 'undefined,float-divide-by-zero'
|
||||
|
||||
if os['compiler_name'] == 'gcc':
|
||||
sanitizers_flags = f'{sanitizers_flags},signed-integer-overflow'
|
||||
# Suppress false positive warnings in GCC with stringop-overflow
|
||||
extra_warning_flags += ' -Wno-stringop-overflow'
|
||||
# Disable mold, gold and lld linkers.
|
||||
# Use default linker (bfd/ld) which is more lenient with mixed code models
|
||||
cmake_args += ' -Duse_mold=OFF -Duse_gold=OFF -Duse_lld=OFF'
|
||||
# Add linker flags for Sanitizers
|
||||
linker_flags += f' -DCMAKE_EXE_LINKER_FLAGS="{linker_relocation_flags} -fsanitize=address,{sanitizers_flags}"'
|
||||
linker_flags += f' -DCMAKE_SHARED_LINKER_FLAGS="{linker_relocation_flags} -fsanitize=address,{sanitizers_flags}"'
|
||||
elif os['compiler_name'] == 'clang':
|
||||
sanitizers_flags = f'{sanitizers_flags},signed-integer-overflow,unsigned-integer-overflow'
|
||||
linker_flags += f' -DCMAKE_EXE_LINKER_FLAGS="-fsanitize=address,{sanitizers_flags}"'
|
||||
linker_flags += f' -DCMAKE_SHARED_LINKER_FLAGS="-fsanitize=address,{sanitizers_flags}"'
|
||||
|
||||
# Sanitizers recommend minimum of -O1 for reasonable performance
|
||||
if "-O0" in cxx_flags:
|
||||
cxx_flags = cxx_flags.replace("-O0", "-O1")
|
||||
else:
|
||||
cxx_flags += " -O1"
|
||||
|
||||
# First create config for asan
|
||||
cmake_args_flags = f'{cmake_args} -DCMAKE_CXX_FLAGS="-fsanitize=address,{sanitizers_flags} -fno-omit-frame-pointer {cxx_flags} {extra_warning_flags}" {linker_flags}'
|
||||
|
||||
# Add config with asan
|
||||
configurations.append({
|
||||
'config_name': config_name + "_asan",
|
||||
'cmake_args': cmake_args_flags,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': build_only,
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
'sanitizers': 'Address'
|
||||
})
|
||||
|
||||
linker_flags = ''
|
||||
# Update configs for tsan
|
||||
# gcc doesn't supports atomic_thread_fence with tsan. Suppress warnings.
|
||||
# Also tsan doesn't work well with mcmode=large and bfd linker
|
||||
if os['compiler_name'] == 'gcc':
|
||||
extra_warning_flags += ' -Wno-tsan'
|
||||
cxx_flags = cxx_flags.replace('-mcmodel=large', '-mcmodel=medium')
|
||||
linker_relocation_flags = linker_relocation_flags.replace('-mcmodel=large', '-mcmodel=medium')
|
||||
# Add linker flags for Sanitizers
|
||||
linker_flags += f' -DCMAKE_EXE_LINKER_FLAGS="{linker_relocation_flags} -fsanitize=thread,{sanitizers_flags}"'
|
||||
linker_flags += f' -DCMAKE_SHARED_LINKER_FLAGS="{linker_relocation_flags} -fsanitize=thread,{sanitizers_flags}"'
|
||||
elif os['compiler_name'] == 'clang':
|
||||
cxx_flags += ' -fsanitize-blacklist=$GITHUB_WORKSPACE/external/sanitizer-blacklist.txt'
|
||||
linker_flags += f' -DCMAKE_EXE_LINKER_FLAGS="-fsanitize=thread,{sanitizers_flags}"'
|
||||
linker_flags += f' -DCMAKE_SHARED_LINKER_FLAGS="-fsanitize=thread,{sanitizers_flags}"'
|
||||
|
||||
# Note: We use $GITHUB_WORKSPACE environment variable which will be expanded by the shell
|
||||
# before CMake processes it. This ensures the compiler receives an absolute path.
|
||||
# CMAKE_SOURCE_DIR won't work here because it's inside CMAKE_CXX_FLAGS string.
|
||||
cmake_args_flags = f'{cmake_args} -DCMAKE_CXX_FLAGS="-fsanitize=thread,{sanitizers_flags} -fno-omit-frame-pointer {cxx_flags} {extra_warning_flags}" {linker_flags}'
|
||||
|
||||
configurations.append({
|
||||
'config_name': config_name+ "_tsan",
|
||||
'cmake_args': cmake_args_flags,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': build_only,
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
'sanitizers': 'Thread'
|
||||
})
|
||||
else:
|
||||
if cxx_flags:
|
||||
cmake_args_flags = f'{cmake_args} -DCMAKE_CXX_FLAGS={cxx_flags}'
|
||||
else:
|
||||
cmake_args_flags = f'{cmake_args}'
|
||||
configurations.append({
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args_flags,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': build_only,
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture
|
||||
})
|
||||
configurations.append({
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': build_only,
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
})
|
||||
|
||||
return configurations
|
||||
|
||||
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Upload rippled artifact (Linux)
|
||||
if: ${{ github.repository_owner == 'XRPLF' && runner.os == 'Linux' }}
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
|
||||
@@ -17,7 +17,7 @@ find_dependency (Boost
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine2
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
program_options
|
||||
|
||||
@@ -2,7 +2,7 @@ find_package(Boost 1.82 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
@@ -20,7 +20,7 @@ target_link_libraries(xrpl_boost
|
||||
Boost::headers
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::context
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
include(sanitizers)
|
||||
@@ -1,69 +0,0 @@
|
||||
include(default)
|
||||
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
|
||||
{% set default_sanitizer_flags = "undefined,float-divide-by-zero,signed-integer-overflow" %}
|
||||
{% set sanitizers = os.getenv("SANITIZERS") %}
|
||||
|
||||
[settings]
|
||||
|
||||
[conf]
|
||||
|
||||
{% if sanitizers == "Address" or sanitizers == "Thread" %}
|
||||
user.package:sanitizers={{ sanitizers }}
|
||||
tools.info.package_id:confs+=["user.package:sanitizers"]
|
||||
{% endif %}
|
||||
|
||||
{% if compiler == "gcc" %}
|
||||
|
||||
{% set asan_sanitizer_flags = "-fsanitize=address,"~default_sanitizer_flags~" -mcmodel=large -fno-PIC" %}
|
||||
{% set tsan_sanitizer_flags = "-fsanitize=thread,"~default_sanitizer_flags~" -mcmodel=medium -fno-PIC" %}
|
||||
|
||||
{% if sanitizers == "Address" %}
|
||||
tools.build:cxxflags+=['{{asan_sanitizer_flags}} -fno-omit-frame-pointer -O1 -Wno-stringop-overflow -DBOOST_USE_ASAN -DBOOST_USE_UBSAN -DBOOST_USE_UCONTEXT']
|
||||
tools.build:sharedlinkflags+=['{{asan_sanitizer_flags}}']
|
||||
tools.build:exelinkflags+=['{{asan_sanitizer_flags}}']
|
||||
tools.cmake.cmaketoolchain:extra_variables={"use_mold": "OFF", "use_gold": "OFF", "use_lld": "OFF"}
|
||||
tools.build:defines+=["BOOST_USE_ASAN", "BOOST_USE_UBSAN", "BOOST_USE_UCONTEXT"]
|
||||
|
||||
{% elif sanitizers == "Thread" %}
|
||||
tools.build:cxxflags+=['{{tsan_sanitizer_flags}} -fno-omit-frame-pointer -O1 -Wno-stringop-overflow -Wno-tsan -DBOOST_USE_TSAN -DBOOST_USE_UBSAN -DBOOST_USE_UCONTEXT']
|
||||
tools.build:sharedlinkflags+=['{{tsan_sanitizer_flags}}']
|
||||
tools.build:exelinkflags+=['{{tsan_sanitizer_flags}}']
|
||||
tools.build:defines+=["BOOST_USE_TSAN", "BOOST_USE_UBSAN", "BOOST_USE_UCONTEXT"]
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% elif compiler == "apple-clang" or compiler == "clang" %}
|
||||
|
||||
{% set asan_sanitizer_flags = "-fsanitize=address,"~default_sanitizer_flags~",unsigned-integer-overflow" %}
|
||||
{% set tsan_sanitizer_flags = "-fsanitize=thread,"~default_sanitizer_flags~",unsigned-integer-overflow" %}
|
||||
{% if sanitizers == "Address" %}
|
||||
tools.build:cxxflags+=['{{asan_sanitizer_flags}} -fno-omit-frame-pointer -O1 -DBOOST_USE_ASAN -DBOOST_USE_UBSAN -DBOOST_USE_UCONTEXT']
|
||||
tools.build:sharedlinkflags+=['{{asan_sanitizer_flags}}']
|
||||
tools.build:exelinkflags+=['{{asan_sanitizer_flags}}']
|
||||
tools.build:defines+=["BOOST_USE_ASAN", "BOOST_USE_UBSAN", "BOOST_USE_UCONTEXT"]
|
||||
|
||||
{% elif sanitizers == "Thread" %}
|
||||
tools.build:cxxflags+=['{{tsan_sanitizer_flags}} -fno-omit-frame-pointer -O1 -DBOOST_USE_TSAN -DBOOST_USE_UBSAN -DBOOST_USE_UCONTEXT']
|
||||
tools.build:sharedlinkflags+=['{{tsan_sanitizer_flags}}']
|
||||
tools.build:exelinkflags+=['{{tsan_sanitizer_flags}}']
|
||||
tools.build:defines+=["BOOST_USE_TSAN", "BOOST_USE_UBSAN", "BOOST_USE_UCONTEXT"]
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
[options]
|
||||
{% if compiler == "gcc" or compiler == "apple-clang" or compiler == "clang" %}
|
||||
{% if sanitizers == "Address" or sanitizers == "Thread" %}
|
||||
boost/*:without_context=False
|
||||
boost/*:without_stacktrace=True
|
||||
boost/*:without_coroutine2=False
|
||||
|
||||
{% if sanitizers == "Address" %}
|
||||
boost/*:extra_b2_flags="context-impl=ucontext address-sanitizer=norecover undefined-sanitizer=norecover --with-coroutine2"
|
||||
{% elif sanitizers == "Thread" %}
|
||||
boost/*:extra_b2_flags="context-impl=ucontext thread-sanitizer=norecover undefined-sanitizer=norecover --with-coroutine2"
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
@@ -102,7 +102,6 @@ class Xrpl(ConanFile):
|
||||
self.options['boost'].visibility = 'global'
|
||||
if self.settings.compiler in ['clang', 'gcc']:
|
||||
self.options['boost'].without_cobalt = True
|
||||
self.options['boost'].without_coroutine2 = False
|
||||
|
||||
def requirements(self):
|
||||
# Conan 2 requires transitive headers to be specified
|
||||
@@ -173,8 +172,7 @@ class Xrpl(ConanFile):
|
||||
'boost::headers',
|
||||
'boost::chrono',
|
||||
'boost::container',
|
||||
'boost::context',
|
||||
'boost::coroutine2',
|
||||
'boost::coroutine',
|
||||
'boost::date_time',
|
||||
'boost::filesystem',
|
||||
'boost::json',
|
||||
|
||||
@@ -65,11 +65,9 @@ XRPL_FIX (UniversalNumber, Supported::yes, VoteBehavior::DefaultNo
|
||||
XRPL_FEATURE(XRPFees, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DisallowIncoming, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (RemoveNFTokenAutoTrustLine, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(FlowSortStrands, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(RequireFullyCanonicalSig, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(DeletableAccounts, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(Checks, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(Flow, Supported::yes, VoteBehavior::DefaultYes)
|
||||
|
||||
// The following amendments are obsolete, but must remain supported
|
||||
// because they could potentially get enabled.
|
||||
@@ -124,7 +122,9 @@ XRPL_RETIRE_FEATURE(Escrow)
|
||||
XRPL_RETIRE_FEATURE(EnforceInvariants)
|
||||
XRPL_RETIRE_FEATURE(ExpandedSignerList)
|
||||
XRPL_RETIRE_FEATURE(FeeEscalation)
|
||||
XRPL_RETIRE_FEATURE(Flow)
|
||||
XRPL_RETIRE_FEATURE(FlowCross)
|
||||
XRPL_RETIRE_FEATURE(FlowSortStrands)
|
||||
XRPL_RETIRE_FEATURE(HardenedValidations)
|
||||
XRPL_RETIRE_FEATURE(ImmediateOfferKilled)
|
||||
XRPL_RETIRE_FEATURE(MultiSign)
|
||||
|
||||
@@ -267,9 +267,8 @@ public:
|
||||
// strand dry until the liquidity is actually used)
|
||||
|
||||
// The implementation allows any single step to consume at most 1000
|
||||
// offers. With the `FlowSortStrands` feature enabled, if the total
|
||||
// number of offers consumed by all the steps combined exceeds 1500, the
|
||||
// payment stops.
|
||||
// offers.If the total number of offers consumed by all the steps
|
||||
// combined exceeds 1500, the payment stops.
|
||||
{
|
||||
Env env(*this, features);
|
||||
|
||||
@@ -457,16 +456,12 @@ public:
|
||||
// below the limit. However, if all the offers are consumed it would
|
||||
// create a tecOVERSIZE error.
|
||||
|
||||
// The featureFlowSortStrands introduces a way of tracking the total
|
||||
// number of consumed offers; with this feature the transaction no
|
||||
// longer fails with a tecOVERSIZE error.
|
||||
// The implementation allows any single step to consume at most 1000
|
||||
// offers. With the `FlowSortStrands` feature enabled, if the total
|
||||
// number of offers consumed by all the steps combined exceeds 1500, the
|
||||
// payment stops. Since the first set of offers consumes 998 offers, the
|
||||
// second set will consume 998, which is not over the limit and the
|
||||
// payment stops. So 2*998, or 1996 is the expected value when
|
||||
// `FlowSortStrands` is enabled.
|
||||
// offers. If the total number of offers consumed by all the steps
|
||||
// combined exceeds 1500, the payment stops. Since the first set of
|
||||
// offers consumes 998 offers, the second set will consume 998, which is
|
||||
// not over the limit and the payment stops. So 2*998, or 1996 is the
|
||||
// expected value.
|
||||
n_offers(env, 998, alice, XRP(1.00), USD(1));
|
||||
n_offers(env, 998, alice, XRP(0.99), USD(1));
|
||||
n_offers(env, 998, alice, XRP(0.98), USD(1));
|
||||
@@ -474,22 +469,12 @@ public:
|
||||
n_offers(env, 998, alice, XRP(0.96), USD(1));
|
||||
n_offers(env, 998, alice, XRP(0.95), USD(1));
|
||||
|
||||
bool const withSortStrands = features[featureFlowSortStrands];
|
||||
|
||||
auto const expectedTER = [&]() -> TER {
|
||||
if (!withSortStrands)
|
||||
return TER{tecOVERSIZE};
|
||||
return tesSUCCESS;
|
||||
}();
|
||||
auto const expectedTER = tesSUCCESS;
|
||||
|
||||
env(offer(bob, USD(8000), XRP(8000)), ter(expectedTER));
|
||||
env.close();
|
||||
|
||||
auto const expectedUSD = [&] {
|
||||
if (!withSortStrands)
|
||||
return USD(0);
|
||||
return USD(1996);
|
||||
}();
|
||||
auto const expectedUSD = USD(1996);
|
||||
|
||||
env.require(balance(bob, expectedUSD));
|
||||
}
|
||||
@@ -507,9 +492,7 @@ public:
|
||||
using namespace jtx;
|
||||
auto const sa = testable_amendments();
|
||||
testAll(sa);
|
||||
testAll(sa - featureFlowSortStrands);
|
||||
testAll(sa - featurePermissionedDEX);
|
||||
testAll(sa - featureFlowSortStrands - featurePermissionedDEX);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -4330,7 +4330,7 @@ public:
|
||||
Account const ellie("ellie");
|
||||
Account const fiona("fiona");
|
||||
|
||||
constexpr int ledgersInQueue = 10;
|
||||
constexpr int ledgersInQueue = 20;
|
||||
auto cfg = makeConfig(
|
||||
{{"minimum_txn_in_ledger_standalone", "1"},
|
||||
{"ledgers_in_queue", std::to_string(ledgersInQueue)},
|
||||
|
||||
@@ -798,16 +798,18 @@ public:
|
||||
|
||||
{
|
||||
// a Env FeatureBitset has *only* those features
|
||||
Env env{*this, FeatureBitset{featureDynamicMPT | featureFlow}};
|
||||
Env env{
|
||||
*this, FeatureBitset{featureDynamicMPT | featureTokenEscrow}};
|
||||
BEAST_EXPECT(env.app().config().features.size() == 2);
|
||||
foreachFeature(supported, [&](uint256 const& f) {
|
||||
bool const has = (f == featureDynamicMPT || f == featureFlow);
|
||||
bool const has =
|
||||
(f == featureDynamicMPT || f == featureTokenEscrow);
|
||||
this->BEAST_EXPECT(has == hasFeature(env, f));
|
||||
});
|
||||
}
|
||||
|
||||
auto const missingSomeFeatures =
|
||||
testable_amendments() - featureDynamicMPT - featureFlow;
|
||||
testable_amendments() - featureDynamicMPT - featureTokenEscrow;
|
||||
BEAST_EXPECT(missingSomeFeatures.count() == (supported.count() - 2));
|
||||
{
|
||||
// a Env supported_features_except is missing *only* those features
|
||||
@@ -815,7 +817,8 @@ public:
|
||||
BEAST_EXPECT(
|
||||
env.app().config().features.size() == (supported.count() - 2));
|
||||
foreachFeature(supported, [&](uint256 const& f) {
|
||||
bool hasnot = (f == featureDynamicMPT || f == featureFlow);
|
||||
bool hasnot =
|
||||
(f == featureDynamicMPT || f == featureTokenEscrow);
|
||||
this->BEAST_EXPECT(hasnot != hasFeature(env, f));
|
||||
});
|
||||
}
|
||||
@@ -828,7 +831,9 @@ public:
|
||||
Env env{
|
||||
*this,
|
||||
FeatureBitset{
|
||||
featureDynamicMPT, featureFlow, *neverSupportedFeat}};
|
||||
featureDynamicMPT,
|
||||
featureTokenEscrow,
|
||||
*neverSupportedFeat}};
|
||||
|
||||
// this app will have just 2 supported amendments and
|
||||
// one additional never supported feature flag
|
||||
@@ -836,7 +841,7 @@ public:
|
||||
BEAST_EXPECT(hasFeature(env, *neverSupportedFeat));
|
||||
|
||||
foreachFeature(supported, [&](uint256 const& f) {
|
||||
bool has = (f == featureDynamicMPT || f == featureFlow);
|
||||
bool has = (f == featureDynamicMPT || f == featureTokenEscrow);
|
||||
this->BEAST_EXPECT(has == hasFeature(env, f));
|
||||
});
|
||||
}
|
||||
@@ -856,7 +861,8 @@ public:
|
||||
(supported.count() - 2 + 1));
|
||||
BEAST_EXPECT(hasFeature(env, *neverSupportedFeat));
|
||||
foreachFeature(supported, [&](uint256 const& f) {
|
||||
bool hasnot = (f == featureDynamicMPT || f == featureFlow);
|
||||
bool hasnot =
|
||||
(f == featureDynamicMPT || f == featureTokenEscrow);
|
||||
this->BEAST_EXPECT(hasnot != hasFeature(env, f));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ class Feature_test : public beast::unit_test::suite
|
||||
BEAST_EXPECT(
|
||||
featureToName(fixRemoveNFTokenAutoTrustLine) ==
|
||||
"fixRemoveNFTokenAutoTrustLine");
|
||||
BEAST_EXPECT(featureToName(featureFlow) == "Flow");
|
||||
BEAST_EXPECT(featureToName(featureBatch) == "Batch");
|
||||
BEAST_EXPECT(
|
||||
featureToName(featureDeletableAccounts) == "DeletableAccounts");
|
||||
BEAST_EXPECT(
|
||||
|
||||
@@ -1103,7 +1103,7 @@ class LedgerEntry_test : public beast::unit_test::suite
|
||||
checkErrorValue(
|
||||
jrr[jss::result],
|
||||
"malformedAuthorizedCredentials",
|
||||
"Invalid field 'authorized_credentials', array empty.");
|
||||
"Invalid field 'authorized_credentials', not array.");
|
||||
}
|
||||
|
||||
{
|
||||
@@ -1144,7 +1144,7 @@ class LedgerEntry_test : public beast::unit_test::suite
|
||||
checkErrorValue(
|
||||
jrr[jss::result],
|
||||
"malformedAuthorizedCredentials",
|
||||
"Invalid field 'authorized_credentials', array too long.");
|
||||
"Invalid field 'authorized_credentials', not array.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1584,6 +1584,8 @@ static RPCCallTestData const rpcCallTestArray[] = {
|
||||
"EUR/rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789",
|
||||
"junk", // Note: indexing bug in parseBookOffers() requires junk
|
||||
// param.
|
||||
"200",
|
||||
},
|
||||
RPCCallTestData::no_exception,
|
||||
@@ -1595,6 +1597,7 @@ static RPCCallTestData const rpcCallTestArray[] = {
|
||||
"issuer" : "rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"ledger_hash" : "ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789",
|
||||
"limit" : 200,
|
||||
"proof" : true,
|
||||
"taker_gets" : {
|
||||
"currency" : "EUR",
|
||||
"issuer" : "rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA"
|
||||
@@ -1614,8 +1617,8 @@ static RPCCallTestData const rpcCallTestArray[] = {
|
||||
"EUR/rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789",
|
||||
"junk", // Note: indexing bug in parseBookOffers() requires junk param.
|
||||
"200",
|
||||
"0",
|
||||
"MyMarker"},
|
||||
RPCCallTestData::no_exception,
|
||||
R"({
|
||||
@@ -1627,6 +1630,7 @@ static RPCCallTestData const rpcCallTestArray[] = {
|
||||
"ledger_hash" : "ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789",
|
||||
"limit" : 200,
|
||||
"marker" : "MyMarker",
|
||||
"proof" : true,
|
||||
"taker_gets" : {
|
||||
"currency" : "EUR",
|
||||
"issuer" : "rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA"
|
||||
@@ -1661,8 +1665,8 @@ static RPCCallTestData const rpcCallTestArray[] = {
|
||||
"EUR/rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789",
|
||||
"junk", // Note: indexing bug in parseBookOffers() requires junk param.
|
||||
"200",
|
||||
"0",
|
||||
"MyMarker",
|
||||
"extra"},
|
||||
RPCCallTestData::no_exception,
|
||||
@@ -1766,19 +1770,12 @@ static RPCCallTestData const rpcCallTestArray[] = {
|
||||
"EUR/rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"rnUy2SHTrB9DubsPmkJZUXTf5FcNDGrYEA",
|
||||
"ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789",
|
||||
"junk", // Note: indexing bug in parseBookOffers() requires junk
|
||||
// param.
|
||||
"not_a_number",
|
||||
},
|
||||
RPCCallTestData::no_exception,
|
||||
R"({
|
||||
"method" : "book_offers",
|
||||
"params" : [
|
||||
{
|
||||
"error" : "invalidParams",
|
||||
"error_code" : 31,
|
||||
"error_message" : "Invalid field 'limit'."
|
||||
}
|
||||
]
|
||||
})"},
|
||||
RPCCallTestData::bad_cast,
|
||||
R"()"},
|
||||
|
||||
// can_delete
|
||||
// ------------------------------------------------------------------
|
||||
|
||||
@@ -43,15 +43,6 @@ RippleCalc::rippleCalculate(
|
||||
PaymentSandbox flowSB(&view);
|
||||
auto j = l.journal("Flow");
|
||||
|
||||
if (!view.rules().enabled(featureFlow))
|
||||
{
|
||||
// The new payment engine was enabled several years ago. New transaction
|
||||
// should never use the old rules. Assume this is a replay
|
||||
j.fatal()
|
||||
<< "Old payment rules are required for this transaction. Assuming "
|
||||
"this is a replay and running with the new rules.";
|
||||
}
|
||||
|
||||
{
|
||||
bool const defaultPaths =
|
||||
!pInputs ? true : pInputs->defaultPathsAllowed;
|
||||
|
||||
@@ -433,7 +433,7 @@ public:
|
||||
// add the strands in `next_` to `cur_`, sorted by theoretical quality.
|
||||
// Best quality first.
|
||||
cur_.clear();
|
||||
if (v.rules().enabled(featureFlowSortStrands) && !next_.empty())
|
||||
if (!next_.empty())
|
||||
{
|
||||
std::vector<std::pair<Quality, Strand const*>> strandQuals;
|
||||
strandQuals.reserve(next_.size());
|
||||
@@ -719,46 +719,16 @@ flow(
|
||||
continue;
|
||||
}
|
||||
|
||||
if (baseView.rules().enabled(featureFlowSortStrands))
|
||||
{
|
||||
XRPL_ASSERT(!best, "ripple::flow : best is unset");
|
||||
if (!f.inactive)
|
||||
activeStrands.push(strand);
|
||||
best.emplace(f.in, f.out, std::move(*f.sandbox), *strand, q);
|
||||
activeStrands.pushRemainingCurToNext(strandIndex + 1);
|
||||
break;
|
||||
}
|
||||
|
||||
activeStrands.push(strand);
|
||||
|
||||
if (!best || best->quality < q ||
|
||||
(best->quality == q && best->out < f.out))
|
||||
{
|
||||
// If this strand is inactive (because it consumed too many
|
||||
// offers) and ends up having the best quality, remove it
|
||||
// from the activeStrands. If it doesn't end up having the
|
||||
// best quality, keep it active.
|
||||
|
||||
if (f.inactive)
|
||||
{
|
||||
// This should be `nextSize`, not `size`. This issue is
|
||||
// fixed in featureFlowSortStrands.
|
||||
markInactiveOnUse = activeStrands.size() - 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
markInactiveOnUse.reset();
|
||||
}
|
||||
|
||||
best.emplace(f.in, f.out, std::move(*f.sandbox), *strand, q);
|
||||
}
|
||||
XRPL_ASSERT(!best, "ripple::flow : best is unset");
|
||||
if (!f.inactive)
|
||||
activeStrands.push(strand);
|
||||
best.emplace(f.in, f.out, std::move(*f.sandbox), *strand, q);
|
||||
activeStrands.pushRemainingCurToNext(strandIndex + 1);
|
||||
break;
|
||||
}
|
||||
|
||||
bool const shouldBreak = [&] {
|
||||
if (baseView.rules().enabled(featureFlowSortStrands))
|
||||
return !best || offersConsidered >= maxOffersToConsider;
|
||||
return !best;
|
||||
}();
|
||||
bool const shouldBreak =
|
||||
!best || offersConsidered >= maxOffersToConsider;
|
||||
|
||||
if (best)
|
||||
{
|
||||
|
||||
@@ -16,16 +16,18 @@ JobQueue::Coro::Coro(
|
||||
, type_(type)
|
||||
, name_(name)
|
||||
, running_(false)
|
||||
, coro_([this, fn = std::forward<F>(f)](
|
||||
boost::coroutines2::asymmetric_coroutine<void>::push_type&
|
||||
do_yield) {
|
||||
yield_ = &do_yield;
|
||||
yield();
|
||||
fn(shared_from_this());
|
||||
, coro_(
|
||||
[this, fn = std::forward<F>(f)](
|
||||
boost::coroutines::asymmetric_coroutine<void>::push_type&
|
||||
do_yield) {
|
||||
yield_ = &do_yield;
|
||||
yield();
|
||||
fn(shared_from_this());
|
||||
#ifndef NDEBUG
|
||||
finished_ = true;
|
||||
finished_ = true;
|
||||
#endif
|
||||
})
|
||||
},
|
||||
boost::coroutines::attributes(megabytes(1)))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
#include <xrpl/basics/LocalValue.h>
|
||||
#include <xrpl/json/json_value.h>
|
||||
|
||||
#include <boost/coroutine2/all.hpp>
|
||||
#include <boost/coroutine/all.hpp>
|
||||
|
||||
#include <set>
|
||||
|
||||
@@ -50,8 +50,8 @@ public:
|
||||
std::mutex mutex_;
|
||||
std::mutex mutex_run_;
|
||||
std::condition_variable cv_;
|
||||
boost::coroutines2::asymmetric_coroutine<void>::pull_type coro_;
|
||||
boost::coroutines2::asymmetric_coroutine<void>::push_type* yield_;
|
||||
boost::coroutines::asymmetric_coroutine<void>::pull_type coro_;
|
||||
boost::coroutines::asymmetric_coroutine<void>::push_type* yield_;
|
||||
#ifndef NDEBUG
|
||||
bool finished_ = false;
|
||||
#endif
|
||||
@@ -334,7 +334,7 @@ private:
|
||||
other requests while the RPC command completes its work asynchronously.
|
||||
|
||||
postCoro() creates a Coro object. When the Coro ctor is called, and its
|
||||
coro_ member is initialized (a boost::coroutines2::pull_type), execution
|
||||
coro_ member is initialized (a boost::coroutines::pull_type), execution
|
||||
automatically passes to the coroutine, which we don't want at this point,
|
||||
since we are still in the handler thread context. It's important to note
|
||||
here that construction of a boost pull_type automatically passes execution to
|
||||
|
||||
@@ -332,31 +332,15 @@ private:
|
||||
|
||||
if (jvParams.size() >= 5)
|
||||
{
|
||||
try
|
||||
{
|
||||
int iLimit = jvParams[4u].asInt();
|
||||
int iLimit = jvParams[5u].asInt();
|
||||
|
||||
if (iLimit > 0)
|
||||
jvRequest[jss::limit] = iLimit;
|
||||
}
|
||||
catch (std::exception const&)
|
||||
{
|
||||
return RPC::invalid_field_error(jss::limit);
|
||||
}
|
||||
if (iLimit > 0)
|
||||
jvRequest[jss::limit] = iLimit;
|
||||
}
|
||||
|
||||
if (jvParams.size() >= 6)
|
||||
if (jvParams.size() >= 6 && jvParams[5u].asInt())
|
||||
{
|
||||
try
|
||||
{
|
||||
int bProof = jvParams[5u].asInt();
|
||||
if (bProof)
|
||||
jvRequest[jss::proof] = true;
|
||||
}
|
||||
catch (std::exception const&)
|
||||
{
|
||||
return RPC::invalid_field_error(jss::proof);
|
||||
}
|
||||
jvRequest[jss::proof] = true;
|
||||
}
|
||||
|
||||
if (jvParams.size() == 7)
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
#include <xrpl/protocol/STXChainBridge.h>
|
||||
#include <xrpl/protocol/jss.h>
|
||||
|
||||
#include <functional>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
static Expected<uint256, Json::Value>
|
||||
@@ -176,41 +178,18 @@ static Expected<STArray, Json::Value>
|
||||
parseAuthorizeCredentials(Json::Value const& jv)
|
||||
{
|
||||
if (!jv.isArray())
|
||||
{
|
||||
return LedgerEntryHelpers::invalidFieldError(
|
||||
"malformedAuthorizedCredentials",
|
||||
jss::authorized_credentials,
|
||||
"array");
|
||||
}
|
||||
|
||||
std::uint32_t const n = jv.size();
|
||||
if (n > maxCredentialsArraySize)
|
||||
{
|
||||
return Unexpected(LedgerEntryHelpers::malformedError(
|
||||
"malformedAuthorizedCredentials",
|
||||
"Invalid field '" + std::string(jss::authorized_credentials) +
|
||||
"', array too long."));
|
||||
}
|
||||
|
||||
if (n == 0)
|
||||
{
|
||||
return Unexpected(LedgerEntryHelpers::malformedError(
|
||||
"malformedAuthorizedCredentials",
|
||||
"Invalid field '" + std::string(jss::authorized_credentials) +
|
||||
"', array empty."));
|
||||
}
|
||||
|
||||
STArray arr(sfAuthorizeCredentials, n);
|
||||
STArray arr(sfAuthorizeCredentials, jv.size());
|
||||
for (auto const& jo : jv)
|
||||
{
|
||||
if (!jo.isObject())
|
||||
{
|
||||
return LedgerEntryHelpers::invalidFieldError(
|
||||
"malformedAuthorizedCredentials",
|
||||
jss::authorized_credentials,
|
||||
"array");
|
||||
}
|
||||
|
||||
if (auto const value = LedgerEntryHelpers::hasRequired(
|
||||
jo,
|
||||
{jss::issuer, jss::credential_type},
|
||||
@@ -281,6 +260,13 @@ parseDepositPreauth(Json::Value const& dp, Json::StaticString const fieldName)
|
||||
auto const arr = parseAuthorizeCredentials(ac);
|
||||
if (!arr.has_value())
|
||||
return Unexpected(arr.error());
|
||||
if (arr->empty() || (arr->size() > maxCredentialsArraySize))
|
||||
{
|
||||
return LedgerEntryHelpers::invalidFieldError(
|
||||
"malformedAuthorizedCredentials",
|
||||
jss::authorized_credentials,
|
||||
"array");
|
||||
}
|
||||
|
||||
auto const& sorted = credentials::makeSorted(arr.value());
|
||||
if (sorted.empty())
|
||||
|
||||
Reference in New Issue
Block a user