Compare commits

...

3 Commits

Author SHA1 Message Date
Bart
856470203a ci: Trigger clio pipeline on PRs targeting release branches (#6080)
This change triggers the Clio pipeline on PRs that target any of the `release*` branches (in addition to the `master` branch), as opposed to only the `release` branch.
2025-11-25 21:03:17 +00:00
Jingchen
b124c9f7e3 refactor: Retire Flow and FlowSortStrands amendments (#6054)
Amendments activated for more than 2 years can be retired. This change retires the Flow and FlowSortStrands amendments.
2025-11-25 14:21:17 +00:00
Bart
b550dc00ed ci: Remove missing commits check (#6077)
This change removes the CI check for missing commits, as well as a stray path to the publish-docs workflow that isn't used in the on-trigger workflow.
2025-11-24 21:43:39 -05:00
11 changed files with 48 additions and 170 deletions

View File

@@ -122,7 +122,7 @@ jobs:
needs: needs:
- should-run - should-run
- build-test - build-test
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }} if: ${{ needs.should-run.outputs.go == 'true' && (startsWith(github.base_ref, 'release') || github.base_ref == 'master') }}
uses: ./.github/workflows/reusable-notify-clio.yml uses: ./.github/workflows/reusable-notify-clio.yml
secrets: secrets:
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }} clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}

View File

@@ -14,9 +14,7 @@ on:
- "master" - "master"
paths: paths:
# These paths are unique to `on-trigger.yml`. # These paths are unique to `on-trigger.yml`.
- ".github/workflows/reusable-check-missing-commits.yml"
- ".github/workflows/on-trigger.yml" - ".github/workflows/on-trigger.yml"
- ".github/workflows/publish-docs.yml"
# Keep the paths below in sync with those in `on-pr.yml`. # Keep the paths below in sync with those in `on-pr.yml`.
- ".github/actions/build-deps/**" - ".github/actions/build-deps/**"
@@ -63,10 +61,6 @@ defaults:
shell: bash shell: bash
jobs: jobs:
check-missing-commits:
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
uses: ./.github/workflows/reusable-check-missing-commits.yml
build-test: build-test:
uses: ./.github/workflows/reusable-build-test.yml uses: ./.github/workflows/reusable-build-test.yml
strategy: strategy:

View File

@@ -1,62 +0,0 @@
# This workflow checks that all commits in the "master" branch are also in the
# "release" and "develop" branches, and that all commits in the "release" branch
# are also in the "develop" branch.
name: Check for missing commits
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-missing-commits
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
check:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
fetch-depth: 0
- name: Check for missing commits
env:
MESSAGE: |
If you are reading this, then the commits indicated above are missing
from the "develop" and/or "release" branch. Do a reverse-merge as soon
as possible. See CONTRIBUTING.md for instructions.
run: |
set -o pipefail
# Branches are ordered by how "canonical" they are. Every commit in one
# branch should be in all the branches behind it.
order=(master release develop)
branches=()
for branch in "${order[@]}"; do
# Check that the branches exist so that this job will work on forked
# repos, which don't necessarily have master and release branches.
echo "Checking if ${branch} exists."
if git ls-remote --exit-code --heads origin \
refs/heads/${branch} > /dev/null; then
branches+=(origin/${branch})
fi
done
prior=()
for branch in "${branches[@]}"; do
if [[ ${#prior[@]} -ne 0 ]]; then
echo "Checking ${prior[@]} for commits missing from ${branch}."
git log --oneline --no-merges "${prior[@]}" \
^$branch | tee -a "missing-commits.txt"
echo
fi
prior+=("${branch}")
done
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
echo "${MESSAGE}"
exit 1
fi

View File

@@ -64,8 +64,6 @@ XRPL_FEATURE(Clawback, Supported::yes, VoteBehavior::DefaultNo
XRPL_FIX (UniversalNumber, Supported::yes, VoteBehavior::DefaultNo) XRPL_FIX (UniversalNumber, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FEATURE(XRPFees, Supported::yes, VoteBehavior::DefaultNo) XRPL_FEATURE(XRPFees, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FIX (RemoveNFTokenAutoTrustLine, Supported::yes, VoteBehavior::DefaultYes) XRPL_FIX (RemoveNFTokenAutoTrustLine, Supported::yes, VoteBehavior::DefaultYes)
XRPL_FEATURE(FlowSortStrands, Supported::yes, VoteBehavior::DefaultYes)
XRPL_FEATURE(Flow, Supported::yes, VoteBehavior::DefaultYes)
// The following amendments are obsolete, but must remain supported // The following amendments are obsolete, but must remain supported
// because they could potentially get enabled. // because they could potentially get enabled.
@@ -124,7 +122,9 @@ XRPL_RETIRE_FEATURE(Escrow)
XRPL_RETIRE_FEATURE(EnforceInvariants) XRPL_RETIRE_FEATURE(EnforceInvariants)
XRPL_RETIRE_FEATURE(ExpandedSignerList) XRPL_RETIRE_FEATURE(ExpandedSignerList)
XRPL_RETIRE_FEATURE(FeeEscalation) XRPL_RETIRE_FEATURE(FeeEscalation)
XRPL_RETIRE_FEATURE(Flow)
XRPL_RETIRE_FEATURE(FlowCross) XRPL_RETIRE_FEATURE(FlowCross)
XRPL_RETIRE_FEATURE(FlowSortStrands)
XRPL_RETIRE_FEATURE(HardenedValidations) XRPL_RETIRE_FEATURE(HardenedValidations)
XRPL_RETIRE_FEATURE(ImmediateOfferKilled) XRPL_RETIRE_FEATURE(ImmediateOfferKilled)
XRPL_RETIRE_FEATURE(MultiSign) XRPL_RETIRE_FEATURE(MultiSign)

View File

@@ -267,9 +267,8 @@ public:
// strand dry until the liquidity is actually used) // strand dry until the liquidity is actually used)
// The implementation allows any single step to consume at most 1000 // The implementation allows any single step to consume at most 1000
// offers. With the `FlowSortStrands` feature enabled, if the total // offers.If the total number of offers consumed by all the steps
// number of offers consumed by all the steps combined exceeds 1500, the // combined exceeds 1500, the payment stops.
// payment stops.
{ {
Env env(*this, features); Env env(*this, features);
@@ -457,16 +456,12 @@ public:
// below the limit. However, if all the offers are consumed it would // below the limit. However, if all the offers are consumed it would
// create a tecOVERSIZE error. // create a tecOVERSIZE error.
// The featureFlowSortStrands introduces a way of tracking the total
// number of consumed offers; with this feature the transaction no
// longer fails with a tecOVERSIZE error.
// The implementation allows any single step to consume at most 1000 // The implementation allows any single step to consume at most 1000
// offers. With the `FlowSortStrands` feature enabled, if the total // offers. If the total number of offers consumed by all the steps
// number of offers consumed by all the steps combined exceeds 1500, the // combined exceeds 1500, the payment stops. Since the first set of
// payment stops. Since the first set of offers consumes 998 offers, the // offers consumes 998 offers, the second set will consume 998, which is
// second set will consume 998, which is not over the limit and the // not over the limit and the payment stops. So 2*998, or 1996 is the
// payment stops. So 2*998, or 1996 is the expected value when // expected value.
// `FlowSortStrands` is enabled.
n_offers(env, 998, alice, XRP(1.00), USD(1)); n_offers(env, 998, alice, XRP(1.00), USD(1));
n_offers(env, 998, alice, XRP(0.99), USD(1)); n_offers(env, 998, alice, XRP(0.99), USD(1));
n_offers(env, 998, alice, XRP(0.98), USD(1)); n_offers(env, 998, alice, XRP(0.98), USD(1));
@@ -474,24 +469,10 @@ public:
n_offers(env, 998, alice, XRP(0.96), USD(1)); n_offers(env, 998, alice, XRP(0.96), USD(1));
n_offers(env, 998, alice, XRP(0.95), USD(1)); n_offers(env, 998, alice, XRP(0.95), USD(1));
bool const withSortStrands = features[featureFlowSortStrands]; env(offer(bob, USD(8000), XRP(8000)), ter(tesSUCCESS));
auto const expectedTER = [&]() -> TER {
if (!withSortStrands)
return TER{tecOVERSIZE};
return tesSUCCESS;
}();
env(offer(bob, USD(8000), XRP(8000)), ter(expectedTER));
env.close(); env.close();
auto const expectedUSD = [&] { env.require(balance(bob, USD(1996)));
if (!withSortStrands)
return USD(0);
return USD(1996);
}();
env.require(balance(bob, expectedUSD));
} }
void void
@@ -507,9 +488,7 @@ public:
using namespace jtx; using namespace jtx;
auto const sa = testable_amendments(); auto const sa = testable_amendments();
testAll(sa); testAll(sa);
testAll(sa - featureFlowSortStrands);
testAll(sa - featurePermissionedDEX); testAll(sa - featurePermissionedDEX);
testAll(sa - featureFlowSortStrands - featurePermissionedDEX);
} }
}; };

View File

@@ -1054,7 +1054,7 @@ public:
// Charlie - queue a transaction, with a higher fee // Charlie - queue a transaction, with a higher fee
// than default // than default
env(noop(charlie), fee(15), queued); env(noop(charlie), fee(15), queued);
checkMetrics(*this, env, 6, initQueueMax, 4, 3); checkMetrics(*this, env, 6, initQueueMax, 4, 3, 257);
BEAST_EXPECT(env.seq(alice) == aliceSeq); BEAST_EXPECT(env.seq(alice) == aliceSeq);
BEAST_EXPECT(env.seq(bob) == bobSeq); BEAST_EXPECT(env.seq(bob) == bobSeq);
@@ -4330,7 +4330,7 @@ public:
Account const ellie("ellie"); Account const ellie("ellie");
Account const fiona("fiona"); Account const fiona("fiona");
constexpr int ledgersInQueue = 20; constexpr int ledgersInQueue = 30;
auto cfg = makeConfig( auto cfg = makeConfig(
{{"minimum_txn_in_ledger_standalone", "1"}, {{"minimum_txn_in_ledger_standalone", "1"},
{"ledgers_in_queue", std::to_string(ledgersInQueue)}, {"ledgers_in_queue", std::to_string(ledgersInQueue)},

View File

@@ -798,16 +798,18 @@ public:
{ {
// a Env FeatureBitset has *only* those features // a Env FeatureBitset has *only* those features
Env env{*this, FeatureBitset{featureDynamicMPT | featureFlow}}; Env env{
*this, FeatureBitset{featureDynamicMPT | featureTokenEscrow}};
BEAST_EXPECT(env.app().config().features.size() == 2); BEAST_EXPECT(env.app().config().features.size() == 2);
foreachFeature(supported, [&](uint256 const& f) { foreachFeature(supported, [&](uint256 const& f) {
bool const has = (f == featureDynamicMPT || f == featureFlow); bool const has =
(f == featureDynamicMPT || f == featureTokenEscrow);
this->BEAST_EXPECT(has == hasFeature(env, f)); this->BEAST_EXPECT(has == hasFeature(env, f));
}); });
} }
auto const missingSomeFeatures = auto const missingSomeFeatures =
testable_amendments() - featureDynamicMPT - featureFlow; testable_amendments() - featureDynamicMPT - featureTokenEscrow;
BEAST_EXPECT(missingSomeFeatures.count() == (supported.count() - 2)); BEAST_EXPECT(missingSomeFeatures.count() == (supported.count() - 2));
{ {
// a Env supported_features_except is missing *only* those features // a Env supported_features_except is missing *only* those features
@@ -815,7 +817,8 @@ public:
BEAST_EXPECT( BEAST_EXPECT(
env.app().config().features.size() == (supported.count() - 2)); env.app().config().features.size() == (supported.count() - 2));
foreachFeature(supported, [&](uint256 const& f) { foreachFeature(supported, [&](uint256 const& f) {
bool hasnot = (f == featureDynamicMPT || f == featureFlow); bool hasnot =
(f == featureDynamicMPT || f == featureTokenEscrow);
this->BEAST_EXPECT(hasnot != hasFeature(env, f)); this->BEAST_EXPECT(hasnot != hasFeature(env, f));
}); });
} }
@@ -828,7 +831,9 @@ public:
Env env{ Env env{
*this, *this,
FeatureBitset{ FeatureBitset{
featureDynamicMPT, featureFlow, *neverSupportedFeat}}; featureDynamicMPT,
featureTokenEscrow,
*neverSupportedFeat}};
// this app will have just 2 supported amendments and // this app will have just 2 supported amendments and
// one additional never supported feature flag // one additional never supported feature flag
@@ -836,7 +841,7 @@ public:
BEAST_EXPECT(hasFeature(env, *neverSupportedFeat)); BEAST_EXPECT(hasFeature(env, *neverSupportedFeat));
foreachFeature(supported, [&](uint256 const& f) { foreachFeature(supported, [&](uint256 const& f) {
bool has = (f == featureDynamicMPT || f == featureFlow); bool has = (f == featureDynamicMPT || f == featureTokenEscrow);
this->BEAST_EXPECT(has == hasFeature(env, f)); this->BEAST_EXPECT(has == hasFeature(env, f));
}); });
} }
@@ -856,7 +861,8 @@ public:
(supported.count() - 2 + 1)); (supported.count() - 2 + 1));
BEAST_EXPECT(hasFeature(env, *neverSupportedFeat)); BEAST_EXPECT(hasFeature(env, *neverSupportedFeat));
foreachFeature(supported, [&](uint256 const& f) { foreachFeature(supported, [&](uint256 const& f) {
bool hasnot = (f == featureDynamicMPT || f == featureFlow); bool hasnot =
(f == featureDynamicMPT || f == featureTokenEscrow);
this->BEAST_EXPECT(hasnot != hasFeature(env, f)); this->BEAST_EXPECT(hasnot != hasFeature(env, f));
}); });
} }

View File

@@ -1117,7 +1117,7 @@ class GetAmendments_test : public beast::unit_test::suite
// There should be at least 3 amendments. Don't do exact comparison // There should be at least 3 amendments. Don't do exact comparison
// to avoid maintenance as more amendments are added in the future. // to avoid maintenance as more amendments are added in the future.
BEAST_EXPECT(i == 254); BEAST_EXPECT(i == 254);
BEAST_EXPECT(majorities.size() >= 3); BEAST_EXPECT(majorities.size() >= 2);
// None of the amendments should be enabled yet. // None of the amendments should be enabled yet.
auto enableds = getEnabledAmendments(*env.closed()); auto enableds = getEnabledAmendments(*env.closed());
@@ -1135,7 +1135,7 @@ class GetAmendments_test : public beast::unit_test::suite
break; break;
} }
BEAST_EXPECT(i == 255); BEAST_EXPECT(i == 255);
BEAST_EXPECT(enableds.size() >= 3); BEAST_EXPECT(enableds.size() >= 2);
} }
void void

View File

@@ -123,7 +123,7 @@ class Feature_test : public beast::unit_test::suite
BEAST_EXPECT( BEAST_EXPECT(
featureToName(fixRemoveNFTokenAutoTrustLine) == featureToName(fixRemoveNFTokenAutoTrustLine) ==
"fixRemoveNFTokenAutoTrustLine"); "fixRemoveNFTokenAutoTrustLine");
BEAST_EXPECT(featureToName(featureFlow) == "Flow"); BEAST_EXPECT(featureToName(featureBatch) == "Batch");
BEAST_EXPECT(featureToName(featureDID) == "DID"); BEAST_EXPECT(featureToName(featureDID) == "DID");
BEAST_EXPECT( BEAST_EXPECT(
featureToName(fixIncludeKeyletFields) == "fixIncludeKeyletFields"); featureToName(fixIncludeKeyletFields) == "fixIncludeKeyletFields");
@@ -183,16 +183,16 @@ class Feature_test : public beast::unit_test::suite
using namespace test::jtx; using namespace test::jtx;
Env env{*this}; Env env{*this};
auto jrr = env.rpc("feature", "Flow")[jss::result]; auto jrr = env.rpc("feature", "fixAMMOverflowOffer")[jss::result];
BEAST_EXPECTS(jrr[jss::status] == jss::success, "status"); BEAST_EXPECTS(jrr[jss::status] == jss::success, "status");
jrr.removeMember(jss::status); jrr.removeMember(jss::status);
BEAST_EXPECT(jrr.size() == 1); BEAST_EXPECT(jrr.size() == 1);
BEAST_EXPECT( BEAST_EXPECT(
jrr.isMember("740352F2412A9909880C23A559FCECEDA3BE2126FED62FC7660D6" jrr.isMember("12523DF04B553A0B1AD74F42DDB741DE8DC06A03FC089A0EF197E"
"28A06927F11")); "2A87F1D8107"));
auto feature = *(jrr.begin()); auto feature = *(jrr.begin());
BEAST_EXPECTS(feature[jss::name] == "Flow", "name"); BEAST_EXPECTS(feature[jss::name] == "fixAMMOverflowOffer", "name");
BEAST_EXPECTS(!feature[jss::enabled].asBool(), "enabled"); BEAST_EXPECTS(!feature[jss::enabled].asBool(), "enabled");
BEAST_EXPECTS( BEAST_EXPECTS(
feature[jss::vetoed].isBool() && !feature[jss::vetoed].asBool(), feature[jss::vetoed].isBool() && !feature[jss::vetoed].asBool(),
@@ -200,7 +200,7 @@ class Feature_test : public beast::unit_test::suite
BEAST_EXPECTS(feature[jss::supported].asBool(), "supported"); BEAST_EXPECTS(feature[jss::supported].asBool(), "supported");
// feature names are case-sensitive - expect error here // feature names are case-sensitive - expect error here
jrr = env.rpc("feature", "flow")[jss::result]; jrr = env.rpc("feature", "fMM")[jss::result];
BEAST_EXPECT(jrr[jss::error] == "badFeature"); BEAST_EXPECT(jrr[jss::error] == "badFeature");
BEAST_EXPECT(jrr[jss::error_message] == "Feature unknown or invalid."); BEAST_EXPECT(jrr[jss::error_message] == "Feature unknown or invalid.");
} }
@@ -419,9 +419,9 @@ class Feature_test : public beast::unit_test::suite
break; break;
} }
// There should be at least 3 amendments. Don't do exact comparison // There should be at least 2 amendments. Don't do exact comparison
// to avoid maintenance as more amendments are added in the future. // to avoid maintenance as more amendments are added in the future.
BEAST_EXPECT(majorities.size() >= 3); BEAST_EXPECT(majorities.size() >= 2);
std::map<std::string, VoteBehavior> const& votes = std::map<std::string, VoteBehavior> const& votes =
ripple::detail::supportedAmendments(); ripple::detail::supportedAmendments();
@@ -476,8 +476,8 @@ class Feature_test : public beast::unit_test::suite
testcase("Veto"); testcase("Veto");
using namespace test::jtx; using namespace test::jtx;
Env env{*this, FeatureBitset{featureFlow}}; Env env{*this, FeatureBitset{featurePriceOracle}};
constexpr char const* featureName = "Flow"; constexpr char const* featureName = "fixAMMOverflowOffer";
auto jrr = env.rpc("feature", featureName)[jss::result]; auto jrr = env.rpc("feature", featureName)[jss::result];
if (!BEAST_EXPECTS(jrr[jss::status] == jss::success, "status")) if (!BEAST_EXPECTS(jrr[jss::status] == jss::success, "status"))

View File

@@ -43,15 +43,6 @@ RippleCalc::rippleCalculate(
PaymentSandbox flowSB(&view); PaymentSandbox flowSB(&view);
auto j = l.journal("Flow"); auto j = l.journal("Flow");
if (!view.rules().enabled(featureFlow))
{
// The new payment engine was enabled several years ago. New transaction
// should never use the old rules. Assume this is a replay
j.fatal()
<< "Old payment rules are required for this transaction. Assuming "
"this is a replay and running with the new rules.";
}
{ {
bool const defaultPaths = bool const defaultPaths =
!pInputs ? true : pInputs->defaultPathsAllowed; !pInputs ? true : pInputs->defaultPathsAllowed;

View File

@@ -433,7 +433,7 @@ public:
// add the strands in `next_` to `cur_`, sorted by theoretical quality. // add the strands in `next_` to `cur_`, sorted by theoretical quality.
// Best quality first. // Best quality first.
cur_.clear(); cur_.clear();
if (v.rules().enabled(featureFlowSortStrands) && !next_.empty()) if (!next_.empty())
{ {
std::vector<std::pair<Quality, Strand const*>> strandQuals; std::vector<std::pair<Quality, Strand const*>> strandQuals;
strandQuals.reserve(next_.size()); strandQuals.reserve(next_.size());
@@ -719,46 +719,16 @@ flow(
continue; continue;
} }
if (baseView.rules().enabled(featureFlowSortStrands)) XRPL_ASSERT(!best, "ripple::flow : best is unset");
{ if (!f.inactive)
XRPL_ASSERT(!best, "ripple::flow : best is unset"); activeStrands.push(strand);
if (!f.inactive) best.emplace(f.in, f.out, std::move(*f.sandbox), *strand, q);
activeStrands.push(strand); activeStrands.pushRemainingCurToNext(strandIndex + 1);
best.emplace(f.in, f.out, std::move(*f.sandbox), *strand, q); break;
activeStrands.pushRemainingCurToNext(strandIndex + 1);
break;
}
activeStrands.push(strand);
if (!best || best->quality < q ||
(best->quality == q && best->out < f.out))
{
// If this strand is inactive (because it consumed too many
// offers) and ends up having the best quality, remove it
// from the activeStrands. If it doesn't end up having the
// best quality, keep it active.
if (f.inactive)
{
// This should be `nextSize`, not `size`. This issue is
// fixed in featureFlowSortStrands.
markInactiveOnUse = activeStrands.size() - 1;
}
else
{
markInactiveOnUse.reset();
}
best.emplace(f.in, f.out, std::move(*f.sandbox), *strand, q);
}
} }
bool const shouldBreak = [&] { bool const shouldBreak =
if (baseView.rules().enabled(featureFlowSortStrands)) !best || offersConsidered >= maxOffersToConsider;
return !best || offersConsidered >= maxOffersToConsider;
return !best;
}();
if (best) if (best)
{ {