Compare commits

...

166 Commits

Author SHA1 Message Date
JCW
b5ee925be3 Rebuild all dependencies 2025-07-28 17:25:53 +01:00
JCW
7a8836c202 Rebuild all dependencies 2025-07-28 17:25:44 +01:00
JCW
386c0befe1 Skip rhel as the docker image is not working 2025-07-28 14:23:04 +01:00
JCW
76b34f7d82 Fix windows ENV:Path not found error 2025-07-28 14:00:05 +01:00
JCW
46073d7f87 Fix pipeline 2025-07-28 13:53:33 +01:00
Bart Thomee
30b64cb162 Only test RHEL 2025-07-28 08:30:26 -04:00
Bart Thomee
237d67b724 Run on all Linux, MacOS, and Windows variants 2025-07-27 18:38:09 -04:00
Bart Thomee
9bc08b25ed Run on all Linux, MacOS, and Windows variants 2025-07-27 18:36:20 -04:00
Bart Thomee
626f846ed6 Disable linux jobs, run only macos and windows 2025-07-27 18:19:40 -04:00
Bart Thomee
73f086b548 Disable linux jobs, run only macos and windows 2025-07-27 18:05:18 -04:00
Bart Thomee
c7578b00f3 Use self-hosted runners 2025-07-27 18:01:10 -04:00
Bart Thomee
e0a0d7c49d Use self-hosted runners 2025-07-27 17:59:59 -04:00
Bart Thomee
344df580e2 Add Ubuntu and MacOS 2025-07-27 17:05:47 -04:00
Bart Thomee
ed79654388 Add Ubuntu and MacOS 2025-07-27 17:04:12 -04:00
Bart Thomee
7125a6bd24 Add Ubuntu and MacOS 2025-07-27 17:00:10 -04:00
Bart Thomee
087a6044fd Add Ubuntu and MacOS 2025-07-27 16:59:13 -04:00
Bart Thomee
e02c49e7df Combine installing dependencies and building to optimize runtime 2025-07-27 15:15:39 -04:00
Bart Thomee
e1914cd8ef Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 15:01:46 -04:00
Bart Thomee
88ef67e73a Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:57:08 -04:00
Bart Thomee
06163a6d75 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:53:37 -04:00
Bart Thomee
d46c05e1b7 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:44:47 -04:00
Bart Thomee
2b3e131ec8 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:43:36 -04:00
Bart Thomee
9924bcc0d2 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:42:07 -04:00
Bart Thomee
195ac8ac46 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:38:52 -04:00
Bart Thomee
2c862a4060 Repo/org vars, unlike secrets, are not accessible in composite actions or reusable workflows 2025-07-27 14:30:27 -04:00
Bart Thomee
c6c836b82b Repo/org vars, unlike secrets, are not accessible in composite actions or reusable workflows 2025-07-27 14:29:22 -04:00
Bart Thomee
46d18c50f8 Re-enable installing dependencies and building+testing 2025-07-27 14:14:05 -04:00
Bart Thomee
ca1c2134ab Debug outputs 2025-07-27 14:09:50 -04:00
Bart Thomee
ea4f39baae Debug outputs 2025-07-27 14:08:27 -04:00
Bart Thomee
1291b574b6 Debug outputs 2025-07-27 14:06:44 -04:00
Bart Thomee
d05f6c1f90 Debug outputs 2025-07-27 14:05:23 -04:00
Bart Thomee
bd70aad699 Debug outputs 2025-07-27 14:04:49 -04:00
Bart Thomee
a5217e433d Bypass generate-matrix job 2025-07-27 12:53:52 -04:00
Bart Thomee
b869902453 Bypass generate-matrix job 2025-07-27 12:49:22 -04:00
Bart Thomee
b6ff27f51f Debug outputs 2025-07-27 12:46:31 -04:00
Bart Thomee
17b6235cc0 Debug outputs 2025-07-27 12:43:53 -04:00
Bart Thomee
355438b29c Debug outputs 2025-07-27 12:43:22 -04:00
Bart Thomee
0b44deb4e4 Debug outputs 2025-07-27 12:38:21 -04:00
Bart Thomee
b3637549f3 Fix strategy matrix outputs 2025-07-27 12:27:59 -04:00
Bart Thomee
a6c63dbc0c Fix workflow vs. action 2025-07-27 12:18:35 -04:00
Bart Thomee
7c9aad6e9f Fix workflow vs. action 2025-07-27 12:12:52 -04:00
Bart Thomee
5c4c658c38 Fix workflow vs. action 2025-07-27 12:08:13 -04:00
Bart Thomee
51a04d8c11 Use different concurrency groups for reusable workflows 2025-07-27 12:06:16 -04:00
Bart Thomee
74930b1bfe Use different concurrency groups for reusable workflows 2025-07-27 12:05:20 -04:00
Bart Thomee
a9d7393c42 Fix workflow vs. action 2025-07-27 12:01:12 -04:00
Bart Thomee
346666695b Fix workflow vs. action 2025-07-27 12:00:02 -04:00
Bart Thomee
a3a5e57e18 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:53:43 -04:00
Bart Thomee
7acf9f517d Secrets cannot have types 2025-07-27 11:15:38 -04:00
Bart Thomee
b260565373 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:13:17 -04:00
Bart Thomee
4fc0c9c1f2 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:11:49 -04:00
Bart Thomee
b698085d8d env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:03:58 -04:00
Bart Thomee
6b34810222 Temporarily remove comments to check if they confuse GitHub 2025-07-27 10:57:49 -04:00
Bart Thomee
323a3ed4b6 Use workflow_call 2025-07-27 10:51:24 -04:00
Bart Thomee
915b4568f3 Use workflow_call 2025-07-27 10:50:31 -04:00
Bart Thomee
4e7480125f Use workflow_call 2025-07-27 10:49:00 -04:00
Bart Thomee
d250365900 Use workflow_call 2025-07-27 10:47:30 -04:00
Bart Thomee
8030670edf Use workflow_call 2025-07-27 10:42:53 -04:00
Bart Thomee
521cf7f157 Temporarily comment out ARM as Clang builds fail 2025-07-26 20:55:37 -04:00
Bart Thomee
e5d99954ca Temporarily comment out ARM as Clang builds fail 2025-07-26 20:54:45 -04:00
Bart Thomee
71336d8f19 Composite actions must have a shell defined in each step 2025-07-26 19:13:06 -04:00
Bart Thomee
d283eb1287 Remove lsb_release check 2025-07-26 19:10:03 -04:00
Bart Thomee
8d9c1719c1 Use jq to generate the strategy matrix 2025-07-26 19:08:37 -04:00
Bart Thomee
e6535d64cb Use jq to generate the strategy matrix 2025-07-26 19:06:21 -04:00
Bart Thomee
15ffa2c71d Chomp newlines and use different random generator 2025-07-26 15:46:02 -04:00
Bart Thomee
a8753ebed1 Use different delimiter per output variable 2025-07-26 15:42:36 -04:00
Bart Thomee
9659fc8e05 Work around tr: write error 2025-07-26 15:39:57 -04:00
Bart Thomee
86c0c4cf48 Rename image to container 2025-07-26 15:29:26 -04:00
Bart Thomee
1e29ebde08 Rename image to container 2025-07-26 15:28:05 -04:00
Bart Thomee
4779fb28fb Revamp CI workflows 2025-07-26 15:17:11 -04:00
Bronek Kozicki
7179ce9c58 Build options cleanup (#5581)
As we no longer support old compiler versions, we are bringing back some warnings by removing no longer relevant `-Wno-...` options.
2025-07-25 15:48:22 -04:00
Bart
921aef9934 Updates Conan dependencies: Boost 1.86 (#5264) 2025-07-25 11:54:02 -04:00
Bronek Kozicki
e7a7bb83c1 VaultWithdraw destination account bugfix (#5572)
#5224 added (among other things) a `VaultWithdraw` transaction that allows setting the recipient of the withdrawn funds in the `Destination` transaction field. This technically turns this transaction into a payment, and in some respect the implementation does follow payment rules, e.g. enforcement of `lsfRequireDestTag` or `lsfDepositAuth`, or that MPT transfer has destination `MPToken`. However for IOUs, it missed verification that the destination account has a trust line to the asset issuer. Since the default behavior of `accountSendIOU` is to create this trust line (if missing), this is what `VaultWithdraw` currently does. This is incorrect, since the `Destination` might not be interested in holding the asset in question; this basically enables spammy transfers. This change, therefore, removes automatic creation of a trust line to the `Destination` account in `VaultWithdraw`.
2025-07-25 13:53:25 +00:00
Bart
5c2a3a2779 refactor: Update rocksdb (#5568)
This change updates RocksDB to its latest version. RocksDB is backward-compatible, so even though this is a major version bump, databases created with previous versions will continue to function.

The external RocksDB folder is removed, as the latest version available via Conan Center no longer needs custom patches.
2025-07-24 14:53:14 -04:00
Bronek Kozicki
b2960b9e7f Switch instrumentation workflow to use dependencies (#5607)
Before `XRPLF/ci` images, we did not have a `dependencies:` job for clang-16, so `instrumentation:` had to build its own dependencies. Now we have clang-16 Conan dependencies built in a separate job that can be used.
2025-07-24 09:20:50 -04:00
Bronek Kozicki
5713f9782a chore: Rename conan profile to default (#5599)
This change renames the `libxrpl` profile to `default` to make it more usable.
2025-07-24 10:35:47 +00:00
Chenna Keshava B S
60e340d356 Include network_id in validations and subscription stream responses (#5579)
This change includes `network_id` data in the validations and ledger subscription stream responses, as well as unit tests to validate the response fields. Fixes #4783
2025-07-23 17:53:18 +00:00
Bronek Kozicki
80d82c5b2b Add support for DomainID in MPTokenIssuance transactions (#5509)
This change adds support for `DomainID` to existing transactions `MPTokenIssuanceCreate` and `MPTokenIssuanceSet`.

In #5224 `DomainID` was added as an access control mechanism for `SingleAssetVault`. The actual implementation of this feature lies in `MPToken` and `MPTokenIssuance`, hence it makes sense to enable the use of `DomainID` also in `MPTokenIssuanceCreate` and `MPTokenIssuanceSet`, following same rules as in Vault:

* `MPTokenIssuanceCreate` and `MPTokenIssuanceSet` can only set `DomainID` if flag `MPTRequireAuth` is set.
* `MPTokenIssuanceCreate` requires that `DomainID` be a non-zero, uint256 number.
* `MPTokenIssuanceSet` allows `DomainID` to be zero (or empty) in which case it will remove `DomainID` from the `MPTokenIssuance` object.

The change is amendment-gated by `SingleAssetVault`. This is a non-breaking change because `SingleAssetVault` amendment is `Supported::no`, i.e. at this moment considered a work in progress, which cannot be enabled on the network.
2025-07-23 13:21:30 -04:00
Vlad
433eeabfa5 chore: Remove unused code after flow cross retirement (#5575)
After the `FlowCross` amendment was retired (#5562), there was still some unused code left. This change removes the remaining remnants.
2025-07-23 13:57:51 +00:00
Jingchen
faa781b71f Remove obsolete owner pays fee feature and XRPL_ABANDON stanza (#5550)
If a feature was never voted on then it is safe to remove.
2025-07-23 13:27:41 +00:00
Valentin Balaschenko
c233df720a refactor: Makes HashRouter flags more type-safe (#5371)
This change addresses the issue #5336: Refactor HashRouter flags to be more type-safe.

* Switched numeric flags to enum type.
* Updated unit tests
2025-07-23 12:03:12 +00:00
Bronek Kozicki
7ff4f79d30 Fix clang-format CI job (#5598)
For jobs running in containers, $GITHUB_WORKSPACE and ${{ github.workspace }} might not be the same directory. The actions/checkout step is supposed to checkout into `$GITHUB_WORKSPACE` and then add it to safe.directory (see instructions at https://github.com/actions/checkout), but that's apparently not happening for some container images. We can't be sure what is actually happening, so we preemptively add both directories to `safe.directory`. See also the GitHub issue opened in 2022 that still has not been resolved https://github.com/actions/runner/issues/2058.
2025-07-23 10:44:18 +00:00
Luc des Trois Maisons
60909655d3 Restructure beast::rngfill (#5563)
The current implementation of rngfill is prone to false warnings from GCC about array bounds violations. Looking at the code, the implementation naively manipulates both the bytes count and the buffer pointer directly to ensure the trailing memcpy doesn't overrun the buffer. As expressed, there is a data dependency on both fields between loop iterations.

Now, ideally, an optimizing compiler would realize that these dependencies were unnecessary and end up restructuring its intermediate representation into a functionally equivalent form with them absent. However, the point at which this occurs may be disjoint from when warning analyses are performed, potentially rendering them more difficult to
determine precisely.

In addition, it may also consume a portion of the budget the optimizer has allocated to attempting to improve a translation unit's performance. Given this is a function template which requires context-sensitive instantiation, this code would be more prone than most to being inlined, with a decrease in optimization budget corresponding to the effort the optimizer has already expended, having already optimized one or more calling functions. Thus, the scope for impacting the the ultimate quality of the code generated is elevated.

For this change, we rearrange things so that the location and contents of each memcpy can be computed independently, relying on a simple loop iteration counter as the only changing input between iterations.
2025-07-22 11:42:43 -04:00
Bronek Kozicki
03e46cd026 Remove include(default) from libxrpl profile (#5587)
Remove `include(default)` from `conan/profiles/libxrpl`. This means that we will now rely on compiler workarounds stored elsewhere e.g. in global.conf.
2025-07-21 14:03:53 +00:00
Vito Tumas
e95683a0fb refactor: Change boost::shared_mutex to std::shared_mutex (#5576)
This change reverts the usage of boost::shared_mutex back to std::shared_mutex. The change was originally introduced as a workaround for a bug in glibc 2.28 and older versions, which could cause threads using std::shared_mutex to stall. This issue primarily affected Ubuntu 18.04 and earlier distributions, which we no longer support.
2025-07-21 13:14:22 +00:00
Jingchen
13353ae36d Fix macos runner (#5585)
This change fixes the MacOS pipeline issue by limiting GitHub to choose the existing runners, ensuring the new experimental runners are excluded until they are ready.
2025-07-21 12:22:32 +00:00
Chenna Keshava B S
1a40f18bdd Remove the type filter from "ledger" RPC command (#4934)
This issue was reported on the Javascript client library: XRPLF/xrpl.js#2611

The type filter (Note: as of the latest version of rippled, type parameter is deprecated) does not work as expected. This PR removes the type filter from the ledger command.
2025-07-18 17:58:46 +00:00
Bart
90e6380383 refactor: Update date, libarchive, nudb, openssl, sqlite3, xxhash packages (#5567)
This PR updates several dependencies to their latest versions. Not all dependencies have been updated, as some need to be patched and some require additional code changes due to backward incompatibilities introduced by the version bump.
2025-07-18 16:55:15 +00:00
Vlad
8bfaa7fe0a test: Run unit tests regardless of 'Supported' amendment status (#5537) 2025-07-16 11:47:54 +00:00
Vlad
c9135a63cd Retire Flow Cross amendment (#5562)
The FlowCross amendment is now permanently enabled, so all code branches that have this amendment disabled are removed.
2025-07-16 06:53:13 -04:00
Michael Legleux
452263eaa5 chore: Update CI to use Conan 2 (#5556)
This is a minimally invasive update to use Conan 2 provided by our new build images.
2025-07-15 22:17:22 +00:00
yinyiqian1
8aa94ea09a fixAMMClawbackRounding: adjust last holder's LPToken balance (#5513)
Due to rounding, the LPTokenBalance of the last LP might not match the LP's trustline balance. This was fixed for `AMMWithdraw` in `fixAMMv1_1` by adjusting the LPTokenBalance to be the same as the trustline balance. Since `AMMClawback` is also performing a withdrawal, we need to adjust LPTokenBalance as well in `AMMClawback.`

This change includes:
1. Refactored `verifyAndAdjustLPTokenBalance` function in `AMMUtils`, which both`AMMWithdraw` and `AMMClawback` call to adjust LPTokenBalance.
2. Added the unit test `testLastHolderLPTokenBalance` to test the scenario.
3. Modify the existing unit tests for `fixAMMClawbackRounding`.
2025-07-11 20:03:28 +00:00
Bronek Kozicki
258ba71363 chore: Add gcc-12 workaround (#5554)
This change silences a dummy warning, which is breaking builds with GCC 12 (but not newer versions of GCC) in release mode only.
2025-07-11 18:57:09 +00:00
Shawn Xie
b8626ea3c6 Add MPT related txns into issuer's account history (#5530)
Currently there is no easy way to track MPT related transactions for the issuer. This change allows MPT transactions to show up on issuer's AccountTx RPC (to align with how IOUs work).
2025-07-11 17:50:03 +00:00
Vlad
6534757d85 chore: Remove unused headers (#5526) 2025-07-10 18:15:42 +00:00
Denis Angell
8e94ea3154 fix: add allowTrustLineLocking flag for account_info (#5525)
* Update the `account_info` API so that the `allowTrustLineLocking` flag is included in the response.
* The proposed `TokenEscrow` amendment added an `allowTrustLineLocking` flag in the `AccountRoot` object.
* In the API response, under `account_flags`, there is now an `allowTrustLineLocking` field with a boolean (`true` or `false`) value.
* For reference, the XLS-85 Token-Enabled Escrows implementation can be found in https://github.com/XRPLF/rippled/pull/5185
2025-07-10 16:29:51 +00:00
Bronek Kozicki
b113190563 Downgrade required CMake version for Antithesis SDK (#5548)
The current version was copied from `antithesis-sdk-cpp` but there is no logical reason to require this specific version of CMake. This change downgrades the version to make the project build with older CMake versions.
2025-07-10 11:46:02 -04:00
Ayaz Salikhov
358b7f50a7 fix: Link with boost libraries explicitly (#5546)
Having `boost::boost` in `self.requires` makes clio link with all boost libraries. There are additionally several Boost stacktrace backends that are both linked with, which violate ODR.
This change fixes the problem.
2025-07-10 06:14:27 -04:00
Bronek Kozicki
f47e2f4e82 chore: Fix compilation error with clang-20 and cleanup (#5543)
Removes clutter for old compilers, defaults to non-unity builds in cmake to match conanfile.py, and workaround for clang-20 compilation errors.
2025-07-09 17:47:34 +00:00
Bronek Kozicki
a7eea9546f test: Remove circular jtx.h dependencies (#5544)
Circular includes in header files can yield unpredictable results.
2025-07-09 08:43:11 -04:00
Jingchen
9874d47d7f Decouple CredentialHelpers from xrpld/app/tx (#5487)
This PR refactors `CredentialHelpers` and removes some unnecessary dependencies as a step of modularization.

The ledger component is almost independent except that it references `MPTokenAuthorize` and `CredentialHelpers.h`, and the latter further references `Transactor.h`. This PR partially clears the path to modularizing the ledger component and decouples `CredentialHelpers` from xrpld.
2025-07-03 14:27:37 +00:00
Mayukha Vadari
c2f3e2e263 fix: crash when trace-logging in tests (#5529)
This PR fixes a crash in tests when the test `Env is run at trace/debug log level.

This issue only affects tests, and only if logging at trace/debug level, so really only relevant during rippled development, and does not affect production servers.
2025-07-02 19:10:25 +00:00
Vlad
e18f27f5f7 test: switch some unit tests to doctest (#5383)
This change moves some tests from the current unit tests that are compiled into the rippled binary to using the doctest framework.
2025-06-26 19:35:31 +00:00
Jingchen
df6daf0d8f Add XRPL_ABANDON and use it to abandon OwnerPaysFee (#5510) 2025-06-26 12:09:05 -04:00
Jingchen
e9d46f0bfc Remove OwnerPaysFee as it's never fully supported (#5435)
The OwnerPaysFee amendment was never fully supported, and this change removes the feature to the extent possible.
2025-06-24 18:56:58 +00:00
Bart
42fd74b77b Removes release notes from codebase (#5508) 2025-06-24 13:10:00 -04:00
tequ
c55ea56c5e Add nftoken_id, nftoken_ids, offer_id to meta for transaction stream (#5230) 2025-06-24 09:02:22 -04:00
Michael Legleux
1e01cd34f7 Set version to 2.5.0 2025-06-23 10:13:01 -07:00
Alex Kremer
e2fa5c1b7c chore: Change libXRPL check conan remote to dev (#5482)
This change aligns the Conan remote used by the libXRPL Clio compatibility check workflow with the recent changes applied to Clio.
2025-06-20 17:02:16 +00:00
Ed Hennis
fc0984d286 Require a message on "Application::signalStop" (#5255)
This change adds a message parameter to Application::signalStop for extra context.
2025-06-20 16:24:34 +00:00
Valentin Balaschenko
8b3dcd41f7 refactor: Change getNodeFat Missing Node State Tree error into warning (#5455) 2025-06-20 15:44:42 +00:00
Denis Angell
8f2f5310e2 Fix: Improve error handling in Batch RPC response (#5503) 2025-06-18 17:46:45 -04:00
Michael Legleux
edb4f0342c Set version to 2.5.0-rc2 2025-06-11 17:10:45 -07:00
yinyiqian1
ea17abb92a fix: Ensure delegate tests do not silently fail with batch (#5476)
The tests that ensure `tfInnerBatchTxn` won't block delegated transactions silently fail in `Delegate_test.cpp`. This change removes these cases from that file and adds them to `Batch_test.cpp` instead where they do not silently fail, because there the batch delegate results are explicitly checked. Moving them to that file further avoids refactoring many helper functions.
2025-06-11 13:21:24 +08:00
Mayukha Vadari
35a40a8e62 fix: Improve multi-sign usage of simulate (#5479)
This change allows users to submit simulate requests from a multi-sign account without needing to specify the accounts that are doing the multi-signing, and fixes an error with simulate that allowed double-"signed" (both single-sign and multi-sign public keys are provided) transactions.
2025-06-10 14:47:27 +08:00
Ed Hennis
d494bf45b2 refactor: Collapse some split log messages into one (#5347)
Multi-line log messages are hard to work with. Writing these handful of related messages as one message should make the log a tiny bit easier to manage.
2025-06-06 16:01:02 +00:00
Vlad
8bf4a5cbff chore: Remove external project build cores division (#5475)
The CMake statements that make it seem as if the number of cores used to build external project dependencies is halved don't actually do anything. This change removes these statements.
2025-06-05 13:37:30 +00:00
Denis Angell
58c2c82a30 fix: Amendment-guard TokenEscrow preclaim and expand tests (#5473)
This change amendment-guards the preclaim for `TokenEscrow`, as well as expands tests to increase code coverage.
2025-06-05 12:54:45 +00:00
Michael Legleux
11edaa441d Set version to 2.5.0-rc1 (#5472) 2025-06-04 17:55:23 +00:00
yinyiqian1
a5e953b191 fix: Add tecNO_DELEGATE_PERMISSION and fix flags (#5465)
* Adds `tecNO_DELEGATE_PERMISSION` for unauthorized transactions sent by a delegated account.
* Returns `tecNO_TARGET` instead of `terNO_ACCOUNT` for the `DelegateSet` transaction if the delegated account does not exist.
* Fixes `tfFullyCanonicalSig` and `tfInnerBatchTxn` blocking transactions issue by adding `tfUniversal` in the permission related masks in `txFlags.h`
2025-06-03 22:20:29 +00:00
Mark Travis
506ae12a8c Increase network i/o capacity (#5464)
The change increases the default network I/O worker thread pool size from 2 to 6. This will improve stability, as worker thread saturation correlates to desyncs, particularly on high-traffic peers, such as hubs.
2025-06-03 21:33:09 +00:00
Ayaz Salikhov
0310c5cbe0 fix: Specify transitive_headers when building with Conan 2 (#5462)
To be able to consume `rippled` in Conan 2, the recipe should specify transitive_headers for external libraries that are present in the exported header files. This change remains compatibility with Conan 1, where this flag was not present.
2025-06-03 17:33:32 +00:00
Denis Angell
053e1af7ff Add support for XLS-85 Token Escrow (#5185)
- Specification: https://github.com/XRPLF/XRPL-Standards/pull/272
- Amendment: `TokenEscrow`
- Enables escrowing of IOU and MPT tokens in addition to native XRP.
- Allows accounts to lock issued tokens (IOU/MPT) in escrow objects, with support for freeze, authorization, and transfer rates.
- Adds new ledger fields (`sfLockedAmount`, `sfIssuerNode`, etc.) to track locked balances for IOU and MPT escrows.
- Updates EscrowCreate, EscrowFinish, and EscrowCancel transaction logic to support IOU and MPT assets, including proper handling of trustlines and MPT authorization, transfer rates, and locked balances.
- Enforces invariant checks for escrowed IOU/MPT amounts.
- Extends GatewayBalances RPC to report locked (escrowed) balances.
2025-06-03 12:51:55 -04:00
Vlad
7e24adbdd0 fix: Address NFT interactions with trustlines (#5297)
The changes are focused on fixing NFT transactions bypassing the trustline authorization requirement and potential invariant violation when interacting with deep frozen trustlines.
2025-06-02 16:13:20 +00:00
Gregory Tsipenyuk
621df422a7 fix: Add AMMv1_3 amendment (#5203)
* Add AMM bid/create/deposit/swap/withdraw/vote invariants:
  - Deposit, Withdrawal invariants: `sqrt(asset1Balance * asset2Balance) >= LPTokens`.
  - Bid: `sqrt(asset1Balance * asset2Balance) > LPTokens` and the pool balances don't change.
  - Create: `sqrt(asset1Balance * assetBalance2) == LPTokens`.
  - Swap: `asset1BalanceAfter * asset2BalanceAfter >= asset1BalanceBefore * asset2BalanceBefore`
     and `LPTokens` don't change.
  - Vote: `LPTokens` and pool balances don't change.
  - All AMM and swap transactions: amounts and tokens are greater than zero, except on withdrawal if all tokens
    are withdrawn.
* Add AMM deposit and withdraw rounding to ensure AMM invariant:
  - On deposit, tokens out are rounded downward and deposit amount is rounded upward.
  - On withdrawal, tokens in are rounded upward and withdrawal amount is rounded downward.
* Add Order Book Offer invariant to verify consumed amounts. Consumed amounts are less than the offer.
* Fix Bid validation. `AuthAccount` can't have duplicate accounts or the submitter account.
2025-06-02 09:52:10 -04:00
Shawn Xie
0a34b5c691 Add support for XLS-81 Permissioned DEX (#5404)
Modified transactions:
- OfferCreate
- Payment

Modified RPCs:
- book_changes
- subscribe
- book_offers
- ripple_path_find
- path_find

Spec: https://github.com/XRPLF/XRPL-Standards/pull/281
2025-05-30 13:24:48 -04:00
Matt Mankins
e0bc3dd51f docs: update example keyserver host in SECURITY.md (#5460) 2025-05-30 08:46:08 -04:00
Bronek Kozicki
dacecd24ba Fix unit build error (#5459)
This change fixes the issue that there is a `using namespace` statement inside a namespace scope.
2025-05-29 20:53:31 +00:00
Mayukha Vadari
05105743e9 chore[tests]: improve env.meta usage (#5457)
This commit changes the ledger close in env.meta to be conditional on if it hasn't already been closed (i.e. the current ledger doesn't have any transactions in it). This change will make it a bit easier to use, as it will still work if you close the ledger outside of this usage. Previously, if you accidentally closed the ledger outside of the meta function, it would segfault and it was incredibly difficult to debug.
2025-05-29 16:28:09 +00:00
Bronek Kozicki
9e1fe9a85e Fix: Improve handling of expired credentials in VaultDeposit (#5452)
This change returns `tecEXPIRED` from VaultDeposit to allow the Transactor to remove the expired credentials.
2025-05-28 10:28:18 -04:00
Vito Tumas
d71ce51901 feat: improve squelching configuration (#5438)
This commit introduces the following changes:
* Renames `vp_enable config` option to `vp_base_squelch_enable` to enable squelching for validators.
* Removes `vp_squelch` config option which was used to configure whether to send squelch messages to peers or not. With this flag removed, if squelching is enabled, squelch messages will be sent. This was an option used for debugging.
* Introduces a temporary `vp_base_squelch_max_trusted_peers` config option to change the max number of peers who are selected as validator message sources. This is a temporary option, which will be removed once a good value is found.
* Adds a traffic counter to count the number of times peers ignored squelch messages and kept sending messages for squelched validators.
* Moves the decision whether squelching is enabled and ready into Slot.h.
2025-05-28 06:30:03 -04:00
Michael Legleux
be668ee26d chore: Update CPP ref source (#5453) 2025-05-27 20:46:25 +00:00
Bart
cae5294b4e chore: Rename docs job (#5398) 2025-05-27 20:03:23 +00:00
Elliot.
cd777f79ef docs: add -j $(nproc) to BUILD.md (#5288)
This improves build times.
2025-05-27 19:11:13 +00:00
Valentin Balaschenko
8b9e21e3f5 docs: Update build instructions for Ubuntu 22.04+ (#5292) 2025-05-27 18:32:25 +00:00
Denis Angell
2a61aee562 Add Batch feature (XLS-56) (#5060)
- Specification: [XRPLF/XRPL-Standards 56](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-0056d-batch/README.md)
- Amendment: `Batch`
- Implements execution of multiple transactions within a single batch transaction with four execution modes: `tfAllOrNothing`, `tfOnlyOne`, `tfUntilFailure`, and `tfIndependent`.
- Enables atomic multi-party transactions where multiple accounts can participate in a single batch, with up to 8 inner transactions and 8 batch signers per batch transaction.
- Inner transactions use `tfInnerBatchTxn` flag with zero fees, no signature, and empty signing public key.
- Inner transactions are applied after the outer batch succeeds via the `applyBatchTransactions` function in apply.cpp.
- Network layer prevents relay of transactions with `tfInnerBatchTxn` flag - each peer applies inner transactions locally from the batch.
- Batch transactions are excluded from AccountDelegate permissions but inner transactions retain full delegation support.
- Metadata includes `ParentBatchID` linking inner transactions to their containing batch for traceability and auditing.
- Extended STTx with batch-specific signature verification methods and added protocol structures (`sfRawTransactions`, `sfBatchSigners`).
2025-05-23 19:53:53 +00:00
Bronek Kozicki
40ce8a8833 fix: Fix pseudo-account ID calculation (#5447)
Before #5224, the pseudoaccount ID was calculated using prefix expressed in `std::uint16_t`. The refactoring to move the pseudoaccount ID calculation to View.cpp had accidentally changed the prefix type to `int` (derived from `auto i = 0`) which in turn changed the length of the input to `sha512Half` from 2 bytes to 4, altering the result.

This resulted in a different ID of the pseudoaccount calculated from the function after the refactoring, breaking the ledger. This impacts AMMCreate, even when the `SingleAssetVault` amendment is not active. This change restores the prefix type to `std::uint16_t`.
2025-05-23 14:05:36 +00:00
Bronek Kozicki
7713ff8c5c Add codecov badge, raise .codecov.yml thresholds (#5428) 2025-05-22 14:43:41 +00:00
Olek
70371a4344 Fix initializer list initialization for GCC-15 (#5443) 2025-05-21 13:28:18 -04:00
Bronek Kozicki
e514de76ed Add single asset vault (XLS-65d) (#5224)
- Specification: XRPLF/XRPL-Standards#239
- Amendment: `SingleAssetVault`
- Implements a vault feature used to store a fungible asset (XRP, IOU, or MPT, but not NFT) and to receive shares in the vault (an MPT) in exchange.
- A vault can be private or public.
- A private vault can use permissioned domains, subject to the `PermissionedDomains` amendment.
- Shares can be exchanged back into asset with `VaultWithdraw`.
- Permissions on the asset in the vault are transitively applied on shares in the vault.
- Issuer of the asset in the vault can clawback with `VaultClawback`.
- Extended `MPTokenIssuance` with `DomainID`, used by the permissioned domain on the vault shares.

Co-authored-by: John Freeman <jfreeman08@gmail.com>
2025-05-20 14:06:41 -04:00
Bart
dd62cfcc22 fix: Update path in CODEOWNERS (#5440) 2025-05-20 15:24:07 +00:00
Michael Legleux
09690f1b38 Set version to 2.5.0-b1 2025-05-18 20:39:18 +01:00
Valentin Balaschenko
380ba9f1c1 Fix: Resolve slow test on macOS pipeline (#5392)
Using std::barrier performs extremely poorly (~1 hour vs ~1 minute to run the test suite) in certain macOS environments.
To unblock our macOS CI pipeline, std::barrier has been replaced with a custom mutex-based barrier (Barrier) that significantly improves performance without compromising correctness.
2025-05-16 10:31:51 +00:00
brettmollin
c3e9380fb4 fix: Update validators-example.txt fix xrplf example URL (#5384) 2025-05-16 09:49:14 +00:00
Jingchen
e3ebc253fa fix: Ensure that coverage file generation is atomic. (#5426)
Running unit tests in parallel and multiple threads can write into one file can corrupt output files, and then gcovr won't be able to parse the corrupted file. This change adds -fprofile-update=atomic as instructed by https://gcc.gnu.org/bugzilla/show_bug.cgi?id=68080.
2025-05-12 14:54:01 +00:00
Bart
c6c7c84355 Configure CODEOWNERS for changes to RPC code (#5266)
To ensure changes to any RPC-related code are compatible with other services, such as Clio, the RPC team will be required to review them.
2025-05-12 12:42:03 +00:00
yinyiqian1
28f50cb7cf fix: enable LedgerStateFix for delegation (#5427) 2025-05-10 10:36:11 -04:00
Vito Tumas
3e152fec74 refactor: use east const convention (#5409)
This change refactors the codebase to use the "east const convention", and adds a clang-format rule to follow this convention.
2025-05-08 11:00:42 +00:00
yinyiqian1
2db2791805 Add PermissionDelegation feature (#5354)
This change implements the account permission delegation described in XLS-75d, see https://github.com/XRPLF/XRPL-Standards/pull/257.

* Introduces transaction-level and granular permissions that can be delegated to other accounts.
* Adds `DelegateSet` transaction to grant specified permissions to another account.
* Adds `ltDelegate` ledger object to maintain the permission list for delegating/delegated account pair.
* Adds an optional `Delegate` field in common fields, allowing a delegated account to send transactions on behalf of the delegating account within the granted permission scope. The `Account` field remains the delegating account; the `Delegate` field specifies the delegated account. The transaction is signed by the delegated account.
2025-05-08 06:14:02 -04:00
Vito Tumas
9ec2d7f8ff Enable passive squelching (#5358)
This change updates the squelching logic to accept squelch messages for untrusted validators. As a result, servers will also squelch untrusted validator messages reducing duplicate traffic they generate.

In particular:
* Updates squelch message handling logic to squelch messages for all validators, not only trusted ones.
* Updates the logic to send squelch messages to peers that don't squelch themselves
* Increases the threshold for the number of messages that a peer has to deliver to consider it as a candidate for validator messages.
2025-05-02 11:01:45 -04:00
Ed Hennis
4a084ce34c Improve transaction relay logic (#4985)
Combines four related changes:
1. "Decrease `shouldRelay` limit to 30s." Pretty self-explanatory. Currently, the limit is 5 minutes, by which point the `HashRouter` entry could have expired, making this transaction look brand new (and thus causing it to be relayed back to peers which have sent it to us recently).
2.  "Give a transaction more chances to be retried." Will put a transaction into `LedgerMaster`'s held transactions if the transaction gets a `ter`, `tel`, or `tef` result. Old behavior was just `ter`.
     * Additionally, to prevent a transaction from being repeatedly held indefinitely, it must meet some extra conditions. (Documented in a comment in the code.)
3. "Pop all transactions with sequential sequences, or tickets." When a transaction is processed successfully, currently, one held transaction for the same account (if any) will be popped out of the held transactions list, and queued up for the next transaction batch. This change pops all transactions for the account, but only if they have sequential sequences (for non-ticket transactions) or use a ticket. This issue was identified from interactions with @mtrippled's #4504, which was merged, but unfortunately reverted later by #4852. When the batches were spaced out, it could potentially take a very long time for a large number of held transactions for an account to get processed through. However, whether batched or not, this change will help get held transactions cleared out, particularly if a missing earlier transaction is what held them up.
4. "Process held transactions through existing NetworkOPs batching." In the current processing, at the end of each consensus round, all held transactions are directly applied to the open ledger, then the held list is reset. This bypasses all of the logic in `NetworkOPs::apply` which, among other things, broadcasts successful transactions to peers. This means that the transaction may not get broadcast to peers for a really long time (5 minutes in the current implementation, or 30 seconds with this first commit). If the node is a bottleneck (either due to network configuration, or because the transaction was submitted locally), the transaction may not be seen by any other nodes or validators before it expires or causes other problems.
2025-05-01 13:58:18 -04:00
Vito Tumas
3502df2174 fix: Replaces random endpoint resolution with sequential (#5365)
This change addresses an issue where `rippled` attempts to connect to an IPv6 address, even when the local network lacks IPv6 support, resulting in a "Network is unreachable" error.

The fix replaces the custom endpoint selection logic with `boost::async_connect`, which sequentially attempts to connect to available endpoints until one succeeds or all fail.
2025-04-28 15:38:55 -04:00
Vlad
fa1e25abef chore: Small clarification to lsfDefaultRipple comment (#5410) 2025-04-25 15:21:27 +00:00
Denis Angell
217ba8dd4d fix: CTID to use correct ledger_index (#5408) 2025-04-24 10:24:10 -04:00
Ed Hennis
405f4613d8 chore: Run CI on PRs that are Ready or have the "DraftRunCI" label (#5400)
- Avoids costly overhead for idle PRs where the CI results don't add any
  value.
2025-04-11 22:20:59 +00:00
Mayukha Vadari
cba512068b refactor: Clean up test logging to make it easier to search (#5396)
This PR replaces the word `failed` with `failure` in any test names and renames some test files to fix MSVC warnings, so that it is easier to search through the test output to find tests that failed.
2025-04-11 09:07:42 +00:00
Valentin Balaschenko
1c99ea23d1 Temporary disable automatic triggering macOS pipeline (#5397)
We temporarily disable running unit tests on macOS on the CI pipeline while we are investigating the delays.
2025-04-10 21:58:29 +02:00
Denis Angell
c4308b216f fix: Adds CTID to RPC tx and updates error (#4738)
This change fixes a number of issues involved with CTID:
* CTID is not present on all RPC tx transactions.
* rpcWRONG_NETWORK is missing in the ErrorCodes.cpp
2025-04-10 12:38:52 +00:00
Wietse Wind
aafd2d8525 Fix: admin RPC webhook queue limit removal and timeout reduction (#5163)
When using subscribe at admin RPC port to send webhooks for the transaction stream to a backend, on large(r) ledgers the endpoint receives fewer HTTP POSTs with TX information than the amount of transactions in a ledger. This change removes the hardcoded queue length to avoid dropping TX notifications for the admin-only command. In addition, the per-request TTL for outgoing RPC HTTP calls has been reduced from 10 minutes to 30 seconds.
2025-04-10 06:37:24 +00:00
Denis Angell
a574ec6023 fix: fixPayChanV1 (#4717)
This change introduces a new fix amendment (`fixPayChanV1`) that prevents the creation of new `PaymentChannelCreate` transaction with a `CancelAfter` time less than the current ledger time. It piggy backs off of fix1571.

Once the amendment is activated, creating a new `PaymentChannel` will require that if you specify the `CancelAfter` time/value, that value must be greater than or equal to the current ledger time.

Currently users can create a payment channel where the `CancelAfter` time is before the current ledger time. This results in the payment channel being immediately closed on the next PaymentChannel transaction.
2025-04-09 22:08:44 +00:00
Mayukha Vadari
e429455f4d refactor(trivial): reorganize ledger entry tests and helper functions (#5376)
This PR splits out `ledger_entry` tests into its own file (`LedgerEntry_test.cpp`) and alphabetizes the helper functions in `LedgerEntry.cpp`. These commits were split out of #5237 to make that PR a little more manageable, since these basic trivial changes are most of the diff. There is no code change, just moving code around.
2025-04-09 17:02:03 +00:00
Vito Tumas
7692eeb9a0 Instrument proposal, validation and transaction messages (#5348)
Adds metric counters for the following P2P message types:

* Untrusted proposal and validation messages
* Duplicate proposal, validation and transaction messages
2025-04-09 15:33:17 +02:00
Bronek Kozicki
a099f5a804 Remove UNREACHABLE from NetworkOPsImp::processTrustedProposal (#5387)
It’s possible for this to happen legitimately if a set of peers, including a validator, are connected in a cycle, and the latency and message processing time between those peers is significantly less than the latency between the validator and the last peer. It’s unlikely in the real world, but obviously easy to simulate with Antithesis.
2025-04-08 14:43:34 +00:00
Michael Legleux
ca0bc767fe fix: Use the build image from ghcr.io (#5390)
The ci pipelines are constantly hitting Docker Hub's public rate limiting since increasing the number of jobs we're running. This change switches over to images hosted in GitHub's registry.
2025-04-05 02:24:31 +00:00
Mayukha Vadari
4ba9288935 fix: disable channel_authorize when signing_support is disabled (#5385) 2025-04-05 01:08:34 +00:00
Valentin Balaschenko
e923ec6d36 Fix to correct memory ordering for compare_exchange_weak and wait in the intrusive reference counting logic (#5381)
This change addresses a memory ordering assertion failure observed on one of the Windows test machines during the IntrusiveShared_test suite.
2025-04-04 18:21:17 +00:00
Vlad
851d99d99e fix: uint128 ambiguousness breaking macos unity build (#5386) 2025-04-04 08:28:33 -04:00
623 changed files with 42406 additions and 19522 deletions

View File

@@ -94,3 +94,4 @@ SpacesInSquareBrackets: false
Standard: Cpp11
TabWidth: 8
UseTab: Never
QualifierAlignment: Right

View File

@@ -7,13 +7,13 @@ comment:
show_carryforward_flags: false
coverage:
range: "60..80"
range: "70..85"
precision: 1
round: nearest
status:
project:
default:
target: 60%
target: 75%
threshold: 2%
patch:
default:

8
.github/CODEOWNERS vendored Normal file
View File

@@ -0,0 +1,8 @@
# Allow anyone to review any change by default.
*
# Require the rpc-reviewers team to review changes to the rpc code.
include/xrpl/protocol/ @xrplf/rpc-reviewers
src/libxrpl/protocol/ @xrplf/rpc-reviewers
src/xrpld/rpc/ @xrplf/rpc-reviewers
src/xrpld/app/misc/ @xrplf/rpc-reviewers

76
.github/actions/build-test/action.yml vendored Normal file
View File

@@ -0,0 +1,76 @@
name: Build and Test (Linux and MacOS)
inputs:
build_dir:
description: 'The directory where to build.'
required: true
type: string
build_type:
description: 'The build type to use.'
required: true
type: string
cmake_args:
description: 'Additional arguments to pass to CMake.'
required: false
type: string
cmake_generator:
description: 'The CMake generator to use for the build.'
required: true
type: string
cmake_target:
description: 'The CMake target to build.'
required: true
type: string
os:
description: 'A string representing which operating system is used.'
required: true
type: choice
options:
- Linux
- MacOS
- Windows
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Configure CMake
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
cmake \
${{ inputs.cmake_generator && format('-G "{0}"', inputs.cmake_generator) || '' }} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
-Dtests=TRUE \
-Dxrpld=TRUE \
${{ inputs.cmake_args }} \
..
- name: Build the binary
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
cmake --build . \
--config ${{ inputs.build_type }} \
--parallel $(nproc) \
--target ${{ inputs.cmake_target }}
- name: Check linking
if: inputs.os == 'Linux'
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Test the binary
shell: bash
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'Windows' && inputs.build_type || '' }}
run: |
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure

View File

@@ -1,34 +0,0 @@
name: build
inputs:
generator:
default: null
configuration:
required: true
cmake-args:
default: null
cmake-target:
default: all
# An implicit input is the environment variable `build_dir`.
runs:
using: composite
steps:
- name: configure
shell: bash
run: |
cd ${build_dir}
cmake \
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
-Dtests=TRUE \
-Dxrpld=TRUE \
${{ inputs.cmake-args }} \
..
- name: build
shell: bash
run: |
cmake \
--build ${build_dir} \
--config ${{ inputs.configuration }} \
--parallel ${NUM_PROCESSORS:-$(nproc)} \
--target ${{ inputs.cmake-target }}

View File

@@ -0,0 +1,51 @@
name: Configure Conan
inputs:
conan_global_conf:
description: 'The contents of the global Conan configuration file.'
required: true
type: string
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
type: string
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
type: string
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Install Conan profile
shell: bash
run: |
echo "${{ inputs.conan_global_conf }}" >> $(conan config home)/global.conf
conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo "Installed Conan profile:"
conan profile show
- name: Add Conan remote
shell: bash
run: |
if conan remote list | grep -q '${{ inputs.conan_remote_name }}'; then
conan remote remove ${{ inputs.conan_remote_name }}
echo "Removed Conan remote '${{ inputs.conan_remote_name }}'."
fi
conan remote add --index 0 ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
echo "Added new conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
- name: Log into Conan remote
shell: bash
run: |
conan remote login ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_username }} --password "${{ inputs.conan_remote_password }}"
conan remote list-users

View File

@@ -1,57 +0,0 @@
name: dependencies
inputs:
configuration:
required: true
# An implicit input is the environment variable `build_dir`.
runs:
using: composite
steps:
- name: unlock Conan
shell: bash
run: conan remove --locks
- name: export custom recipes
shell: bash
run: |
conan config set general.revisions_enabled=1
conan export external/snappy snappy/1.1.10@
conan export external/rocksdb rocksdb/9.7.3@
conan export external/soci soci/4.0.3@
conan export external/nudb nudb/2.0.8@
- name: add Ripple Conan remote
shell: bash
run: |
conan remote list
conan remote remove ripple || true
# Do not quote the URL. An empty string will be accepted (with
# a non-fatal warning), but a missing argument will not.
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
- name: try to authenticate to Ripple Conan remote
id: remote
shell: bash
run: |
# `conan user` implicitly uses the environment variables
# CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
echo outcome=$(conan user --remote ripple --password >&2 \
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
- name: list missing binaries
id: binaries
shell: bash
# Print the list of dependencies that would need to be built locally.
# A non-empty list means we have "failed" to cache binaries remotely.
run: |
echo missing=$(conan info . --build missing --settings build_type=${{ inputs.configuration }} --json 2>/dev/null | grep '^\[') | tee ${GITHUB_OUTPUT}
- name: install dependencies
shell: bash
run: |
mkdir ${build_dir}
cd ${build_dir}
conan install \
--output-folder . \
--build missing \
--options tests=True \
--options xrpld=True \
--settings build_type=${{ inputs.configuration }} \
..

View File

@@ -0,0 +1,37 @@
name: Install-dependencies
inputs:
build_dir:
description: 'The directory where to build.'
required: true
type: string
build_type:
description: 'The build type to use.'
required: true
type: string
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Install Conan dependencies
shell: bash
run: |
mkdir -p ${{ inputs.build_dir }}
cd ${{ inputs.build_dir }}
conan install \
--output-folder . \
--build * \
--options:host "&:tests=True" \
--options:host "&:xrpld=True" \
--settings:all build_type=${{ inputs.build_type }} \
..
- name: Upload Conan dependencies
shell: bash
run: conan upload '*' --confirm --remote ${{ inputs.conan_remote_name }}

225
.github/workflows/build-debian.yml vendored Normal file
View File

@@ -0,0 +1,225 @@
# This workflow builds and tests the binary on various Debian configurations.
name: Debian
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-debian
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/debian.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": ["self-hosted", "Linux", "X64", "devbox"]
},
{
"platform": "linux/arm64",
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "12"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "16"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "17"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "18"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "19"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "debian",
# "release": "bookworm",
# "compiler_name": "gcc",
# "compiler_version": "12"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

160
.github/workflows/build-macos.yml vendored Normal file
View File

@@ -0,0 +1,160 @@
# This workflow builds and tests the binary on various MacOS configurations.
name: MacOS
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-macos
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"runner": ["self-hosted", "macOS", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=OFF",
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
clang --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'MacOS'

202
.github/workflows/build-rhel.yml vendored Normal file
View File

@@ -0,0 +1,202 @@
# This workflow builds and tests the binary on various Red Hat Enterprise Linux
# configurations.
name: RHEL
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-rhel
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/rhel.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": ["self-hosted", "Linux", "X64", "devbox"]
},
{
"platform": "linux/arm64",
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "clang",
"compiler_version": "any"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "rhel",
# "release": "9.6",
# "compiler_name": "gcc",
# "compiler_version": "13"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

225
.github/workflows/build-ubuntu.yml vendored Normal file
View File

@@ -0,0 +1,225 @@
# This workflow builds and tests the binary on various Ubuntu configurations.
name: Ubuntu
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-ubuntu
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/ubuntu.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": [self-hosted, Linux, X64, devbox]
},
{
"platform": "linux/arm64",
"runner": [self-hosted, Linux, ARM64, devbox]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "ubuntu",
"release": "jammy",
"compiler_name": "gcc",
"compiler_version": "12"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "16"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "17"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "18"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "19"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "ubuntu",
# "release": "jammy",
# "compiler_name": "gcc",
# "compiler_version": "12"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

159
.github/workflows/build-windows.yml vendored Normal file
View File

@@ -0,0 +1,159 @@
# This workflow builds and tests the binary on various Windows configurations.
name: Windows
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-windows
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"runner": ["self-hosted", "Windows", "devbox"]
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "runner": "windows-latest"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Debug"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: pwsh
run: |
echo "Checking path"
$env:PATH -split ';' | Sort-Object
echo "Checking environment variables."
- name: choose Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: 3.13
- name: install Conan
run: pip install wheel conan
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking Conan version."
conan --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Visual Studio 17 2022'
cmake_target: 'install'
os: 'Windows'

View File

@@ -0,0 +1,57 @@
# This workflow checks if the code is formatted according to the .clang-format
# rules.
name: Clang Format
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-clang-format
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
clang-format:
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
steps:
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
# same directory for jobs running in containers. The actions/checkout step
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
# safe.directory (see instructions at https://github.com/actions/checkout)
# but that is apparently not happening for some container images. We
# therefore preemptively add both directories to safe.directory. See also
# https://github.com/actions/runner/issues/2058 for more details.
- name: Configure git safe.directory
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory ${{ github.workspace }}
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check version
run: clang-format --version
- name: Format code
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
- name: Check for differences
env:
MESSAGE: |
One or more files did not conform to the formatting specified in
.clang-format. Maybe you did not run 'git-clang-format' or
'clang-format' before committing, or your version of clang-format
has an incompatibility with the one used here (see the "Check
version" step above).
Run 'git-clang-format --extensions cpp,h,hpp,ipp develop' in your
repo, and then commit and push the changes.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the files that changed to give the contributor a hint about
# what to expect when running git-clang-format on their own machine.
git status
echo "${MESSAGE}"
exit 1
fi

View File

@@ -0,0 +1,46 @@
# This workflow checks if the dependencies between the modules are correctly
# indexed.
name: Levelization
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-levelization
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
levelization:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
env:
MESSAGE: |
The dependency relationships between the modules in rippled have
changed, which may be an improvement or a regression.
A rule of thumb is that if your changes caused something to be
removed from loops.txt, it's probably an improvement, while if
something was added, it's probably a regression.
Run './Builds/levelization/levelization.sh' in your repo, and then
commit and push the changes. See Builds/levelization/README.md for
more info.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the differences to give the contributor a hint about what to
# expect when running levelization on their own machine.
git diff
echo "${MESSAGE}"
exit 1
fi

View File

@@ -1,26 +1,28 @@
name: Check libXRPL compatibility with Clio
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_URL: https://conan.ripplex.io
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
on:
pull_request:
paths:
- 'src/libxrpl/protocol/BuildInfo.cpp'
- '.github/workflows/libxrpl.yml'
- 'check-libxrpl.yml'
types: [opened, reopened, synchronize, ready_for_review]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
publish:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
name: Publish libXRPL
outputs:
outcome: ${{ steps.upload.outputs.outcome }}
version: ${{ steps.version.outputs.version }}
channel: ${{ steps.channel.outputs.channel }}
runs-on: [self-hosted, heavy]
container: rippleci/rippled-build-ubuntu:aaf5e3e
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: Wait for essential checks to succeed
uses: lewagon/wait-on-check-action@v1.3.4
@@ -31,18 +33,15 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }}
wait-interval: 10
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Generate channel
id: channel
shell: bash
run: |
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
- name: Export new package
shell: bash
run: |
conan export . ${{ steps.channel.outputs.channel }}
- name: Add Ripple Conan remote
shell: bash
run: |
conan remote list
conan remote remove ripple || true
@@ -50,13 +49,11 @@ jobs:
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
- name: Parse new version
id: version
shell: bash
run: |
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
- name: Try to authenticate to Ripple Conan remote
id: remote
shell: bash
run: |
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
@@ -67,7 +64,6 @@ jobs:
- name: Upload new package
id: upload
if: (steps.remote.outputs.outcome == 'success')
shell: bash
run: |
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
@@ -81,7 +77,6 @@ jobs:
steps:
- name: Notify Clio about new version
if: (needs.publish.outputs.outcome == 'success')
shell: bash
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \

View File

@@ -1,59 +0,0 @@
name: clang-format
on: [push, pull_request]
jobs:
check:
runs-on: ubuntu-24.04
env:
CLANG_VERSION: 18
steps:
- uses: actions/checkout@v4
- name: Install clang-format
run: |
codename=$( lsb_release --codename --short )
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
EOF
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
sudo apt-get update
sudo apt-get install clang-format-${CLANG_VERSION}
- name: Format first-party sources
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-${CLANG_VERSION} -i {} +
- name: Check for differences
id: assert
run: |
set -o pipefail
git diff --exit-code | tee "clang-format.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: clang-format.patch
if-no-files-found: ignore
path: clang-format.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
PREAMBLE: |
If you are reading this, you are looking at a failed Github Actions
job. That means you pushed one or more files that did not conform
to the formatting specified in .clang-format. That may be because
you neglected to run 'git clang-format' or 'clang-format' before
committing, or that your version of clang-format has an
incompatibility with the one on this
machine, which is:
SUGGESTION: |
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run 'git-clang-format --extensions cpp,h,hpp,ipp develop'
in your repo, commit, and push.
run: |
echo "${PREAMBLE}"
clang-format-${CLANG_VERSION} --version
echo "${SUGGESTION}"
exit 1

53
.github/workflows/documentation.yml vendored Normal file
View File

@@ -0,0 +1,53 @@
name: Documentation
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
# This can only be done if the `checks` workflow already exists on the default
# branch (i.e. `develop`), so we cannot do this yet.
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
on:
push:
branches:
- develop
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
doxygen:
runs-on: ubuntu-latest
permissions:
contents: write
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
run: |
echo "Checking CMake version."
cmake --version
echo "Checking Doxygen version."
doxygen --version
- name: Build documentation
run: |
mkdir build
cd build
cmake -Donly_docs=TRUE ..
cmake --build . --target docs --parallel $(nproc)
- name: Publish documentation
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

View File

@@ -1,37 +0,0 @@
name: Build and publish Doxygen documentation
# To test this workflow, push your changes to your fork's `develop` branch.
on:
push:
branches:
- develop
- doxygen
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
job:
runs-on: ubuntu-latest
permissions:
contents: write
container: rippleci/rippled-build-ubuntu:aaf5e3e
steps:
- name: checkout
uses: actions/checkout@v4
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
cmake --version
doxygen --version
env | sort
- name: build
run: |
mkdir build
cd build
cmake -Donly_docs=TRUE ..
cmake --build . --target docs --parallel $(nproc)
- name: publish
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

View File

@@ -1,49 +0,0 @@
name: levelization
on: [push, pull_request]
jobs:
check:
runs-on: ubuntu-latest
env:
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v4
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
id: assert
run: |
set -o pipefail
git diff --exit-code | tee "levelization.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: levelization.patch
if-no-files-found: ignore
path: levelization.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
MESSAGE: |
If you are reading this, you are looking at a failed Github
Actions job. That means you changed the dependency relationships
between the modules in rippled. That may be an improvement or a
regression. This check doesn't judge.
A rule of thumb, though, is that if your changes caused
something to be removed from loops.txt, that's probably an
improvement. If something was added, it's probably a regression.
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run './Builds/levelization/levelization.sh' in your repo,
commit, and push.
See Builds/levelization/README.md for more info.
run: |
echo "${MESSAGE}"
exit 1

View File

@@ -1,94 +0,0 @@
name: macos
on:
pull_request:
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows, instrumentation)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- 'ci/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
strategy:
matrix:
platform:
- macos
generator:
- Ninja
configuration:
- Release
runs-on: [self-hosted, macOS]
env:
# The `build` action requires these variables.
build_dir: .build
NUM_PROCESSORS: 12
steps:
- name: checkout
uses: actions/checkout@v4
- name: install Conan
run: |
brew install conan@1
echo '/opt/homebrew/opt/conan@1/bin' >> $GITHUB_PATH
- name: install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
- name: install python
run: |
if which python > /dev/null 2>&1; then
echo "Python executable exists"
else
brew install python@3.13
ln -s /opt/homebrew/bin/python3 /opt/homebrew/bin/python
fi
- name: install cmake
run: |
if which cmake > /dev/null 2>&1; then
echo "cmake executable exists"
else
brew install cmake
fi
- name: install nproc
run: |
brew install coreutils
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
python --version
conan --version
cmake --version
nproc --version
echo -n "nproc returns: "
nproc
- name: configure Conan
run : |
conan profile new default --detect || true
conan profile update settings.compiler.cppstd=20 default
- name: build dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: ${{ matrix.generator }}
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
n=$(nproc)
echo "Using $n test jobs"
${build_dir}/rippled --unittest --unittest-jobs $n

90
.github/workflows/main.yml vendored Normal file
View File

@@ -0,0 +1,90 @@
# This workflow runs all workflows to check, build and test the project on
# various Linux flavors, as well as MacOS and Windows.
name: Main
# This workflow is triggered on every push to the repository, including pull
# requests. However, it will not run if the pull request is a draft unless it
# has the 'DraftRunCI' label. As GitHub Actions does not support such `if`
# conditions here, the individual jobs will check the pull request state and
# labels to determine whether to run or not. The workflows called by the jobs
# may also have their own conditions to partially or completely skip execution
# as needed.
on: push
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# check-clang-format:
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
# uses: ./.github/workflows/check-clang-format.yml
#
# check-levelization:
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
# uses: ./.github/workflows/check-levelization.yml
debian:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-debian.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
# rhel:
## needs:
## - check-clang-format
## - check-levelization
# uses: ./.github/workflows/build-rhel.yml
# with:
# conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
# conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
# secrets:
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
ubuntu:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-ubuntu.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
macos:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-macos.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
windows:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-windows.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}

View File

@@ -1,60 +1,66 @@
name: missing-commits
name: Check for missing commits
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
# This can only be done if the `checks` workflow already exists on the default
# branch (i.e. `develop`), so we cannot do this yet.
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
on:
push:
branches:
# Only check that the branches are up to date when updating the
# relevant branches.
- develop
- release
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
up_to_date:
runs-on: ubuntu-24.04
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- name: Check for missing commits
id: commits
env:
SUGGESTION: |
MESSAGE: |
If you are reading this, then the commits indicated above are
missing from "develop" and/or "release". Do a reverse-merge
as soon as possible. See CONTRIBUTING.md for instructions.
If you are reading this, then the commits indicated above are missing
from the "develop" and/or "release" branch. Do a reverse-merge as soon
as possible. See CONTRIBUTING.md for instructions.
run: |
set -o pipefail
# Branches ordered by how "canonical" they are. Every commit in
# one branch should be in all the branches behind it
order=( master release develop )
# Branches are ordered by how "canonical" they are. Every commit in one
# branch should be in all the branches behind it.
order=(master release develop)
branches=()
for branch in "${order[@]}"
do
# Check that the branches exist so that this job will work on
# forked repos, which don't necessarily have master and
# release branches.
for branch in "${order[@]}"; do
# Check that the branches exist so that this job will work on forked
# repos, which don't necessarily have master and release branches.
echo "Checking if ${branch} exists."
if git ls-remote --exit-code --heads origin \
refs/heads/${branch} > /dev/null
then
branches+=( origin/${branch} )
refs/heads/${branch} > /dev/null; then
branches+=(origin/${branch})
fi
done
prior=()
for branch in "${branches[@]}"
do
if [[ ${#prior[@]} -ne 0 ]]
then
echo "Checking ${prior[@]} for commits missing from ${branch}"
for branch in "${branches[@]}"; do
if [[ ${#prior[@]} -ne 0 ]]; then
echo "Checking ${prior[@]} for commits missing from ${branch}."
git log --oneline --no-merges "${prior[@]}" \
^$branch | tee -a "missing-commits.txt"
echo
fi
prior+=( "${branch}" )
prior+=("${branch}")
done
if [[ $( cat missing-commits.txt | wc -l ) -ne 0 ]]
then
echo "${SUGGESTION}"
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
echo "${MESSAGE}"
exit 1
fi

View File

@@ -1,440 +0,0 @@
name: nix
on:
pull_request:
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- "ci/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This workflow has multiple job matrixes.
# They can be considered phases because most of the matrices ("test",
# "coverage", "conan", ) depend on the first ("dependencies").
#
# The first phase has a job in the matrix for each combination of
# variables that affects dependency ABI:
# platform, compiler, and configuration.
# It creates a GitHub artifact holding the Conan profile,
# and builds and caches binaries for all the dependencies.
# If an Artifactory remote is configured, they are cached there.
# If not, they are added to the GitHub artifact.
# GitHub's "cache" action has a size limit (10 GB) that is too small
# to hold the binaries if they are built locally.
# We must use the "{upload,download}-artifact" actions instead.
#
# The remaining phases have a job in the matrix for each test
# configuration. They install dependency binaries from the cache,
# whichever was used, and build and test rippled.
#
# "instrumentation" is independent, but is included here because it also
# builds on linux in the same "on:" conditions.
jobs:
dependencies:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
include:
- compiler: gcc
profile:
version: 11
cc: /usr/bin/gcc
cxx: /usr/bin/g++
- compiler: clang
profile:
version: 14
cc: /usr/bin/clang-14
cxx: /usr/bin/clang++-14
runs-on: [self-hosted, heavy]
container: rippleci/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: checkout
uses: actions/checkout@v4
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
lsb_release -a || true
${{ matrix.profile.cc }} --version
conan --version
cmake --version
env | sort
- name: configure Conan
run: |
conan profile new default --detect
conan profile update settings.compiler.cppstd=20 default
conan profile update settings.compiler=${{ matrix.compiler }} default
conan profile update settings.compiler.version=${{ matrix.profile.version }} default
conan profile update settings.compiler.libcxx=libstdc++11 default
conan profile update env.CC=${{ matrix.profile.cc }} default
conan profile update env.CXX=${{ matrix.profile.cxx }} default
conan profile update conf.tools.build:compiler_executables='{"c": "${{ matrix.profile.cc }}", "cpp": "${{ matrix.profile.cxx }}"}' default
- name: archive profile
# Create this archive before dependencies are added to the local cache.
run: tar -czf conan.tar -C ~/.conan .
- name: build dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
with:
configuration: ${{ matrix.configuration }}
- name: upload archive
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
path: conan.tar
if-no-files-found: error
test:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
cmake-args:
-
- "-Dunity=ON"
needs: dependencies
runs-on: [self-hosted, heavy]
container: rippleci/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
${build_dir}/rippled --unittest --unittest-jobs $(nproc)
reference-fee-test:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
configuration:
- Debug
cmake-args:
- "-DUNIT_TEST_REFERENCE_FEE=200"
- "-DUNIT_TEST_REFERENCE_FEE=1000"
needs: dependencies
runs-on: [self-hosted, heavy]
container: rippleci/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
${build_dir}/rippled --unittest --unittest-jobs $(nproc)
coverage:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
configuration:
- Debug
needs: dependencies
runs-on: [self-hosted, heavy]
container: rippleci/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: install gcovr
run: pip install "gcovr>=7,<8"
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
gcovr --version
env | sort
ls ~/.conan
- name: checkout
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: >-
-Dassert=TRUE
-Dwerr=TRUE
-Dcoverage=ON
-Dcoverage_format=xml
-DCODE_COVERAGE_VERBOSE=ON
-DCMAKE_CXX_FLAGS="-O0"
-DCMAKE_C_FLAGS="-O0"
cmake-target: coverage
- name: move coverage report
shell: bash
run: |
mv "${build_dir}/coverage.xml" ./
- name: archive coverage report
uses: actions/upload-artifact@v4
with:
name: coverage.xml
path: coverage.xml
retention-days: 30
- name: upload coverage report
uses: wandalen/wretry.action@v1.4.10
with:
action: codecov/codecov-action@v4.5.0
with: |
files: coverage.xml
fail_ci_if_error: true
disable_search: true
verbose: true
plugin: noop
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 210000 # in milliseconds
conan:
needs: dependencies
runs-on: [self-hosted, heavy]
container: rippleci/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
configuration: Release
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@v4
with:
name: linux-gcc-${{ env.configuration }}
- name: extract cache
run: |
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ env.configuration }}
- name: export
run: |
version=$(conan inspect --raw version .)
reference="xrpl/${version}@local/test"
conan remove -f ${reference} || true
conan export . local/test
echo "reference=${reference}" >> "${GITHUB_ENV}"
- name: build
run: |
cd tests/conan
mkdir ${build_dir}
cd ${build_dir}
conan install .. --output-folder . \
--require-override ${reference} --build missing
cmake .. \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=./build/${configuration}/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${configuration}
cmake --build .
./example | grep '^[[:digit:]]\+\.[[:digit:]]\+\.[[:digit:]]\+'
# NOTE we are not using dependencies built above because it lags with
# compiler versions. Instrumentation requires clang version 16 or
# later
instrumentation-build:
env:
CLANG_RELEASE: 16
strategy:
fail-fast: false
runs-on: [self-hosted, heavy]
container: debian:bookworm
steps:
- name: install prerequisites
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update
apt-get install --yes --no-install-recommends \
clang-${CLANG_RELEASE} clang++-${CLANG_RELEASE} \
python3-pip python-is-python3 make cmake git wget
apt-get clean
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-${CLANG_RELEASE} 100 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-${CLANG_RELEASE}
update-alternatives --auto clang
pip install --no-cache --break-system-packages "conan<2"
- name: checkout
uses: actions/checkout@v4
- name: prepare environment
run: |
mkdir ${GITHUB_WORKSPACE}/.build
echo "SOURCE_DIR=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "BUILD_DIR=$GITHUB_WORKSPACE/.build" >> $GITHUB_ENV
echo "CC=/usr/bin/clang" >> $GITHUB_ENV
echo "CXX=/usr/bin/clang++" >> $GITHUB_ENV
- name: configure Conan
run: |
conan profile new --detect default
conan profile update settings.compiler=clang default
conan profile update settings.compiler.version=${CLANG_RELEASE} default
conan profile update settings.compiler.libcxx=libstdc++11 default
conan profile update settings.compiler.cppstd=20 default
conan profile update options.rocksdb=False default
conan profile update \
'conf.tools.build:compiler_executables={"c": "/usr/bin/clang", "cpp": "/usr/bin/clang++"}' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
conan export external/snappy snappy/1.1.10@
conan export external/soci soci/4.0.3@
- name: build dependencies
run: |
cd ${BUILD_DIR}
conan install ${SOURCE_DIR} \
--output-folder ${BUILD_DIR} \
--install-folder ${BUILD_DIR} \
--build missing \
--settings build_type=Debug
- name: build with instrumentation
run: |
cd ${BUILD_DIR}
cmake -S ${SOURCE_DIR} -B ${BUILD_DIR} \
-Dvoidstar=ON \
-Dtests=ON \
-Dxrpld=ON \
-DCMAKE_BUILD_TYPE=Debug \
-DSECP256K1_BUILD_BENCHMARK=OFF \
-DSECP256K1_BUILD_TESTS=OFF \
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
cmake --build . --parallel $(nproc)
- name: verify instrumentation enabled
run: |
cd ${BUILD_DIR}
./rippled --version | grep libvoidstar
- name: run unit tests
run: |
cd ${BUILD_DIR}
./rippled -u --unittest-jobs $(( $(nproc)/4 ))

View File

@@ -1,97 +0,0 @@
name: windows
on:
pull_request:
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows, instrumentation)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- 'ci/**'
# https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
strategy:
fail-fast: false
matrix:
version:
- generator: Visual Studio 17 2022
runs-on: windows-2022
configuration:
- type: Release
tests: true
- type: Debug
# Skip running unit tests on debug builds, because they
# take an unreasonable amount of time
tests: false
runtime: d
runs-on: ${{ matrix.version.runs-on }}
env:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@v4
- name: choose Python
uses: actions/setup-python@v5
with:
python-version: 3.9
- name: learn Python cache directory
id: pip-cache
shell: bash
run: |
python -m pip install --upgrade pip
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
- name: restore Python cache directory
uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
- name: install Conan
run: pip install wheel 'conan<2'
- name: check environment
run: |
dir env:
$env:PATH -split ';'
python --version
conan --version
cmake --version
- name: configure Conan
shell: bash
run: |
conan profile new default --detect
conan profile update settings.compiler.cppstd=20 default
conan profile update \
settings.compiler.runtime=MT${{ matrix.configuration.runtime }} \
default
- name: build dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
with:
configuration: ${{ matrix.configuration.type }}
- name: build
uses: ./.github/actions/build
with:
generator: '${{ matrix.version.generator }}'
configuration: ${{ matrix.configuration.type }}
# Hard code for now. Move to the matrix if varied options are needed
cmake-args: '-Dassert=TRUE -Dwerr=TRUE -Dreporting=OFF -Dunity=ON'
cmake-target: install
- name: test
shell: bash
if: ${{ matrix.configuration.tests }}
run: |
${build_dir}/${{ matrix.configuration.type }}/rippled --unittest \
--unittest-jobs $(nproc)

View File

@@ -1,6 +1,6 @@
# .pre-commit-config.yaml
repos:
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v18.1.3
rev: v18.1.8
hooks:
- id: clang-format

View File

@@ -83,9 +83,17 @@ The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conve
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
## XRP Ledger server version 2.5.0
As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
### Additions and bugfixes in 2.5.0
- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled.
## XRP Ledger server version 2.4.0
As of 2025-01-28, version 2.4.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
[Version 2.4.0](https://github.com/XRPLF/rippled/releases/tag/2.4.0) was released on March 4, 2025.
### Additions and bugfixes in 2.4.0

View File

@@ -167,43 +167,18 @@ It does not explicitly link the C++ standard library,
which allows you to statically link it with GCC, if you want.
```
# Conan 1.x
conan export external/snappy snappy/1.1.10@
# Conan 2.x
conan export --version 1.1.10 external/snappy
```
Export our [Conan recipe for RocksDB](./external/rocksdb).
It does not override paths to dependencies when building with Visual Studio.
```
# Conan 1.x
conan export external/rocksdb rocksdb/9.7.3@
# Conan 2.x
conan export --version 9.7.3 external/rocksdb
```
Export our [Conan recipe for SOCI](./external/soci).
It patches their CMake to correctly import its dependencies.
```
# Conan 1.x
conan export external/soci soci/4.0.3@
# Conan 2.x
conan export --version 4.0.3 external/soci
```
Export our [Conan recipe for NuDB](./external/nudb).
It fixes some source files to add missing `#include`s.
```
# Conan 1.x
conan export external/nudb nudb/2.0.8@
# Conan 2.x
conan export --version 2.0.8 external/nudb
```
### Build and Test
1. Create a build directory and move into it.
@@ -288,7 +263,7 @@ It fixes some source files to add missing `#include`s.
Single-config generators:
```
cmake --build .
cmake --build . -j $(nproc)
```
Multi-config generators:
@@ -395,18 +370,13 @@ and can be helpful for detecting `#include` omissions.
## Troubleshooting
### Conan
After any updates or changes to dependencies, you may need to do the following:
1. Remove your build directory.
2. Remove the Conan cache:
```
rm -rf ~/.conan/data
```
4. Re-run [conan install](#build-and-test).
2. Remove the Conan cache: `conan remove "*" -c`
3. Re-run [conan install](#build-and-test).
### 'protobuf/port_def.inc' file not found
@@ -424,54 +394,6 @@ For example, if you want to build Debug:
1. For conan install, pass `--settings build_type=Debug`
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
### no std::result_of
If your compiler version is recent enough to have removed `std::result_of` as
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
definition to your build.
```
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
```
### call to 'async_teardown' is ambiguous
If you are compiling with an early version of Clang 16, then you might hit
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
header][7]. You can workaround it by adding this preprocessor definition:
```
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
```
### recompile with -fPIC
If you get a linker error suggesting that you recompile Boost with
position-independent code, such as:
```
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
requires unsupported dynamic reloc 11; recompile with -fPIC
```
Conan most likely downloaded a bad binary distribution of the dependency.
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
for Linux. The solution is to build the dependency locally by passing
`--build boost` when calling `conan install`.
```
conan install --build boost ...
```
## Add a Dependency
If you want to experiment with a new package, follow these steps:

View File

@@ -72,15 +72,15 @@ It generates many files of [results](results):
desired as described above. In a perfect repo, this file will be
empty.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../.github/workflows/levelization.yml) to validate
Github workflow](../../.github/workflows/check-levelization.yml) to validate
that nothing changed.
* [`ordering.txt`](results/ordering.txt): A list showing relationships
between modules where there are no loops as they actually exist, as
opposed to how they are desired as described above.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../.github/workflows/levelization.yml) to validate
Github workflow](../../.github/workflows/check-levelization.yml) to validate
that nothing changed.
* [`levelization.yml`](../../.github/workflows/levelization.yml)
* [`levelization.yml`](../../.github/workflows/check-levelization.yml)
Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or

View File

@@ -132,6 +132,7 @@ test.shamap > xrpl.protocol
test.toplevel > test.csf
test.toplevel > xrpl.json
test.unit_test > xrpl.basics
tests.libxrpl > xrpl.basics
xrpl.json > xrpl.basics
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json

View File

@@ -90,6 +90,11 @@ set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
set(SECP256K1_INSTALL TRUE)
set(SECP256K1_BUILD_BENCHMARK FALSE)
set(SECP256K1_BUILD_TESTS FALSE)
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
set(SECP256K1_BUILD_EXAMPLES FALSE)
add_subdirectory(external/secp256k1)
add_library(secp256k1::secp256k1 ALIAS secp256k1)
add_subdirectory(external/ed25519-donna)
@@ -144,3 +149,8 @@ set(PROJECT_EXPORT_SET RippleExports)
include(RippledCore)
include(RippledInstall)
include(RippledValidatorKeys)
if(tests)
include(CTest)
add_subdirectory(src/tests/libxrpl)
endif()

View File

@@ -1,3 +1,5 @@
[![codecov](https://codecov.io/gh/XRPLF/rippled/graph/badge.svg?token=WyFr5ajq3O)](https://codecov.io/gh/XRPLF/rippled)
# The XRP Ledger
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.

File diff suppressed because it is too large Load Diff

View File

@@ -83,7 +83,7 @@ To report a qualifying bug, please send a detailed report to:
|Long Key ID | `0xCD49A0AFC57929BE` |
|Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
The full PGP key for this address, which is also available on several key servers (e.g. on [keys.gnupg.net](https://keys.gnupg.net)), is:
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
```
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt

View File

@@ -26,7 +26,7 @@
#
# Examples:
# https://vl.ripple.com
# https://vl.xrplf.org
# https://unl.xrplf.org
# http://127.0.0.1:8000
# file:///etc/opt/ripple/vl.txt
#

View File

@@ -98,6 +98,9 @@
# 2024-04-03, Bronek Kozicki
# - add support for output formats: jacoco, clover, lcov
#
# 2025-05-12, Jingchen Wu
# - add -fprofile-update=atomic to ensure atomic profile generation
#
# USAGE:
#
# 1. Copy this file into your cmake modules path.
@@ -200,15 +203,27 @@ set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag)
include(CheckCCompilerFlag)
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if(HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
include(CheckCCompilerFlag)
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if(HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
check_cxx_compiler_flag(-fprofile-update HAVE_cxx_fprofile_update)
if(HAVE_cxx_fprofile_update)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()
check_c_compiler_flag(-fprofile-update HAVE_c_fprofile_update)
if(HAVE_c_fprofile_update)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()
endif()
set(CMAKE_Fortran_FLAGS_COVERAGE

View File

@@ -90,28 +90,15 @@ if (MSVC)
-errorreport:none
-machine:X64)
else ()
# HACK : because these need to come first, before any warning demotion
string (APPEND CMAKE_CXX_FLAGS " -Wall -Wdeprecated")
if (wextra)
string (APPEND CMAKE_CXX_FLAGS " -Wextra -Wno-unused-parameter")
endif ()
# not MSVC
target_compile_options (common
INTERFACE
-Wall
-Wdeprecated
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
$<$<BOOL:${werr}>:-Werror>
$<$<COMPILE_LANGUAGE:CXX>:
-frtti
-Wnon-virtual-dtor
>
-Wno-sign-compare
-Wno-char-subscripts
-Wno-format
-Wno-unused-local-typedefs
-fstack-protector
$<$<BOOL:${is_gcc}>:
-Wno-unused-but-set-variable
-Wno-deprecated
>
-Wno-sign-compare
-Wno-unused-but-set-variable
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
# tweak gcc optimization for debug
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>

View File

@@ -53,9 +53,9 @@ set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake")
file(WRITE
"${download_script}"
"file(DOWNLOAD \
http://upload.cppreference.com/mwiki/images/b/b2/html_book_20190607.zip \
https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \
${CMAKE_BINARY_DIR}/docs/cppreference.zip \
EXPECTED_HASH MD5=82b3a612d7d35a83e3cb1195a63689ab \
EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \
)\n \
execute_process( \
COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \

View File

@@ -2,16 +2,6 @@
convenience variables and sanity checks
#]===================================================================]
include(ProcessorCount)
if (NOT ep_procs)
ProcessorCount(ep_procs)
if (ep_procs GREATER 1)
# never use more than half of cores for EP builds
math (EXPR ep_procs "${ep_procs} / 2")
message (STATUS "Using ${ep_procs} cores for ExternalProject builds.")
endif ()
endif ()
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)

View File

@@ -18,7 +18,7 @@ if(tests)
endif()
endif()
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
option(unity "Creates a build using UNITY support in cmake." OFF)
if(unity)
if(NOT is_ci)
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")

View File

@@ -2,7 +2,6 @@ find_package(Boost 1.82 REQUIRED
COMPONENTS
chrono
container
context
coroutine
date_time
filesystem
@@ -24,7 +23,7 @@ endif()
target_link_libraries(ripple_boost
INTERFACE
Boost::boost
Boost::headers
Boost::chrono
Boost::container
Boost::coroutine

41
cmake/xrpl_add_test.cmake Normal file
View File

@@ -0,0 +1,41 @@
include(isolate_headers)
function(xrpl_add_test name)
set(target ${PROJECT_NAME}.test.${name})
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
)
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
isolate_headers(
${target}
"${CMAKE_SOURCE_DIR}"
"${CMAKE_SOURCE_DIR}/tests/${name}"
PRIVATE
)
# Make sure the test isn't optimized away in unity builds
set_target_properties(${target} PROPERTIES
UNITY_BUILD_MODE GROUP
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
set_tests_properties(
${target} PROPERTIES
FIXTURES_REQUIRED ${target}_fixture
)
add_test(
NAME ${target}.build
COMMAND
${CMAKE_COMMAND}
--build ${CMAKE_BINARY_DIR}
--config $<CONFIG>
--target ${target}
)
set_tests_properties(${target}.build PROPERTIES
FIXTURES_SETUP ${target}_fixture
)
endfunction()

34
conan/profiles/default Normal file
View File

@@ -0,0 +1,34 @@
{% set os = detect_api.detect_os() %}
{% set arch = detect_api.detect_arch() %}
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
{% set compiler_version = version %}
{% if os == "Linux" %}
{% set compiler_version = detect_api.default_compiler_version(compiler, version) %}
{% endif %}
[settings]
os={{ os }}
arch={{ arch }}
build_type=Debug
compiler={{compiler}}
compiler.version={{ compiler_version }}
compiler.cppstd=20
{% if os == "Windows" %}
compiler.runtime=static
{% else %}
compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
{% endif %}
[conf]
{% if compiler == "clang" and compiler_version >= 19 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "apple-clang" and compiler_version >= 17 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "gcc" and compiler_version < 13 %}
tools.build:cxxflags=['-Wno-restrict']
{% endif %}
[tool_requires]
!cmake/*: cmake/[>=3 <4]

View File

@@ -1,4 +1,4 @@
from conan import ConanFile
from conan import ConanFile, __version__ as conan_version
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
import re
@@ -24,18 +24,20 @@ class Xrpl(ConanFile):
}
requires = [
'date/3.0.3',
'grpc/1.50.1',
'libarchive/3.7.6',
'nudb/2.0.8',
'openssl/1.1.1v',
'libarchive/3.8.1',
'nudb/2.0.9',
'openssl/1.1.1w',
'soci/4.0.3',
'xxhash/0.8.2',
'zlib/1.3.1',
]
test_requires = [
'doctest/2.4.11',
]
tool_requires = [
'protobuf/3.21.9',
'protobuf/3.21.12',
]
default_options = {
@@ -87,26 +89,31 @@ class Xrpl(ConanFile):
}
def set_version(self):
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
regex = r'versionString\s?=\s?\"(.*)\"'
with open(path, 'r') as file:
matches = (re.search(regex, line) for line in file)
match = next(m for m in matches if m)
self.version = match.group(1)
if self.version is None:
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
regex = r'versionString\s?=\s?\"(.*)\"'
with open(path, encoding='utf-8') as file:
matches = (re.search(regex, line) for line in file)
match = next(m for m in matches if m)
self.version = match.group(1)
def configure(self):
if self.settings.compiler == 'apple-clang':
self.options['boost'].visibility = 'global'
def requirements(self):
self.requires('boost/1.83.0', force=True)
# Conan 2 requires transitive headers to be specified
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
self.requires('boost/1.86.0', force=True, **transitive_headers_opt)
self.requires('date/3.0.4', **transitive_headers_opt)
self.requires('lz4/1.10.0', force=True)
self.requires('protobuf/3.21.9', force=True)
self.requires('sqlite3/3.47.0', force=True)
self.requires('protobuf/3.21.12', force=True)
self.requires('sqlite3/3.49.1', force=True)
if self.options.jemalloc:
self.requires('jemalloc/5.3.0')
if self.options.rocksdb:
self.requires('rocksdb/9.7.3')
self.requires('rocksdb/10.0.1')
self.requires('xxhash/0.8.3', **transitive_headers_opt)
exports_sources = (
'CMakeLists.txt',
@@ -161,7 +168,17 @@ class Xrpl(ConanFile):
# `include/`, not `include/ripple/proto/`.
libxrpl.includedirs = ['include', 'include/ripple/proto']
libxrpl.requires = [
'boost::boost',
'boost::headers',
'boost::chrono',
'boost::container',
'boost::coroutine',
'boost::date_time',
'boost::filesystem',
'boost::json',
'boost::program_options',
'boost::regex',
'boost::system',
'boost::thread',
'date::date',
'grpc::grpc++',
'libarchive::libarchive',

View File

@@ -23,7 +23,7 @@ direction.
```
apt update
apt install --yes curl git libssl-dev python3.10-dev python3-pip make g++-11 libprotobuf-dev protobuf-compiler
apt install --yes curl git libssl-dev pipx python3.10-dev python3-pip make g++-11 libprotobuf-dev protobuf-compiler
curl --location --remote-name \
"https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1.tar.gz"
@@ -35,7 +35,8 @@ make --jobs $(nproc)
make install
cd ..
pip3 install 'conan<2'
pipx install 'conan<2'
pipx ensurepath
```
[1]: https://github.com/thejohnfreeman/rippled-docker/blob/master/ubuntu-22.04/install.sh

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.25)
cmake_minimum_required(VERSION 3.18)
# Note, version set explicitly by rippled project
project(antithesis-sdk-cpp VERSION 0.4.4 LANGUAGES CXX)

View File

@@ -17,6 +17,9 @@ add_library(ed25519 STATIC
)
add_library(ed25519::ed25519 ALIAS ed25519)
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
if(NOT MSVC)
target_compile_options(ed25519 PRIVATE -Wno-implicit-fallthrough)
endif()
include(GNUInstallDirs)

View File

@@ -1,10 +0,0 @@
sources:
"2.0.8":
url: "https://github.com/CPPAlliance/NuDB/archive/2.0.8.tar.gz"
sha256: "9b71903d8ba111cd893ab064b9a8b6ac4124ed8bd6b4f67250205bc43c7f13a8"
patches:
"2.0.8":
- patch_file: "patches/2.0.8-0001-add-include-stdexcept-for-msvc.patch"
patch_description: "Fix build for MSVC by including stdexcept"
patch_type: "portability"
patch_source: "https://github.com/cppalliance/NuDB/pull/100/files"

View File

@@ -1,72 +0,0 @@
import os
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get
from conan.tools.layout import basic_layout
required_conan_version = ">=1.52.0"
class NudbConan(ConanFile):
name = "nudb"
description = "A fast key/value insert-only database for SSD drives in C++11"
license = "BSL-1.0"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/CPPAlliance/NuDB"
topics = ("header-only", "KVS", "insert-only")
package_type = "header-library"
settings = "os", "arch", "compiler", "build_type"
no_copy_source = True
@property
def _min_cppstd(self):
return 11
def export_sources(self):
export_conandata_patches(self)
def layout(self):
basic_layout(self, src_folder="src")
def requirements(self):
self.requires("boost/1.83.0")
def package_id(self):
self.info.clear()
def validate(self):
if self.settings.compiler.cppstd:
check_min_cppstd(self, self._min_cppstd)
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def build(self):
apply_conandata_patches(self)
def package(self):
copy(self, "LICENSE*",
dst=os.path.join(self.package_folder, "licenses"),
src=self.source_folder)
copy(self, "*",
dst=os.path.join(self.package_folder, "include"),
src=os.path.join(self.source_folder, "include"))
def package_info(self):
self.cpp_info.bindirs = []
self.cpp_info.libdirs = []
self.cpp_info.set_property("cmake_target_name", "NuDB")
self.cpp_info.set_property("cmake_target_aliases", ["NuDB::nudb"])
self.cpp_info.set_property("cmake_find_mode", "both")
self.cpp_info.components["core"].set_property("cmake_target_name", "nudb")
self.cpp_info.components["core"].names["cmake_find_package"] = "nudb"
self.cpp_info.components["core"].names["cmake_find_package_multi"] = "nudb"
self.cpp_info.components["core"].requires = ["boost::thread", "boost::system"]
# TODO: to remove in conan v2 once cmake_find_package_* generators removed
self.cpp_info.names["cmake_find_package"] = "NuDB"
self.cpp_info.names["cmake_find_package_multi"] = "NuDB"

View File

@@ -1,24 +0,0 @@
diff --git a/include/nudb/detail/stream.hpp b/include/nudb/detail/stream.hpp
index 6c07bf1..e0ce8ed 100644
--- a/include/nudb/detail/stream.hpp
+++ b/include/nudb/detail/stream.hpp
@@ -14,6 +14,7 @@
#include <cstdint>
#include <cstring>
#include <memory>
+#include <stdexcept>
namespace nudb {
namespace detail {
diff --git a/include/nudb/impl/context.ipp b/include/nudb/impl/context.ipp
index beb7058..ffde0b3 100644
--- a/include/nudb/impl/context.ipp
+++ b/include/nudb/impl/context.ipp
@@ -9,6 +9,7 @@
#define NUDB_IMPL_CONTEXT_IPP
#include <nudb/detail/store_base.hpp>
+#include <stdexcept>
namespace nudb {

View File

@@ -1,12 +0,0 @@
sources:
"9.7.3":
url: "https://github.com/facebook/rocksdb/archive/refs/tags/v9.7.3.tar.gz"
sha256: "acfabb989cbfb5b5c4d23214819b059638193ec33dad2d88373c46448d16d38b"
patches:
"9.7.3":
- patch_file: "patches/9.x.x-0001-exclude-thirdparty.patch"
patch_description: "Do not include thirdparty.inc"
patch_type: "portability"
- patch_file: "patches/9.7.3-0001-memory-leak.patch"
patch_description: "Fix a leak of obsolete blob files left open until DB::Close()"
patch_type: "portability"

View File

@@ -1,235 +0,0 @@
import os
import glob
import shutil
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, collect_libs, copy, export_conandata_patches, get, rm, rmdir
from conan.tools.microsoft import check_min_vs, is_msvc, is_msvc_static_runtime
from conan.tools.scm import Version
required_conan_version = ">=1.53.0"
class RocksDBConan(ConanFile):
name = "rocksdb"
description = "A library that provides an embeddable, persistent key-value store for fast storage"
license = ("GPL-2.0-only", "Apache-2.0")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/facebook/rocksdb"
topics = ("database", "leveldb", "facebook", "key-value")
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"lite": [True, False],
"with_gflags": [True, False],
"with_snappy": [True, False],
"with_lz4": [True, False],
"with_zlib": [True, False],
"with_zstd": [True, False],
"with_tbb": [True, False],
"with_jemalloc": [True, False],
"enable_sse": [False, "sse42", "avx2"],
"use_rtti": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"lite": False,
"with_snappy": False,
"with_lz4": False,
"with_zlib": False,
"with_zstd": False,
"with_gflags": False,
"with_tbb": False,
"with_jemalloc": False,
"enable_sse": False,
"use_rtti": False,
}
@property
def _min_cppstd(self):
return "11" if Version(self.version) < "8.8.1" else "17"
@property
def _compilers_minimum_version(self):
return {} if self._min_cppstd == "11" else {
"apple-clang": "10",
"clang": "7",
"gcc": "7",
"msvc": "191",
"Visual Studio": "15",
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
if self.settings.arch != "x86_64":
del self.options.with_tbb
if self.settings.build_type == "Debug":
self.options.use_rtti = True # Rtti are used in asserts for debug mode...
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
if self.options.with_gflags:
self.requires("gflags/2.2.2")
if self.options.with_snappy:
self.requires("snappy/1.1.10")
if self.options.with_lz4:
self.requires("lz4/1.10.0")
if self.options.with_zlib:
self.requires("zlib/[>=1.2.11 <2]")
if self.options.with_zstd:
self.requires("zstd/1.5.6")
if self.options.get_safe("with_tbb"):
self.requires("onetbb/2021.12.0")
if self.options.with_jemalloc:
self.requires("jemalloc/5.3.0")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, self._min_cppstd)
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
)
if self.settings.arch not in ["x86_64", "ppc64le", "ppc64", "mips64", "armv8"]:
raise ConanInvalidConfiguration("Rocksdb requires 64 bits")
check_min_vs(self, "191")
if self.version == "6.20.3" and \
self.settings.os == "Linux" and \
self.settings.compiler == "gcc" and \
Version(self.settings.compiler.version) < "5":
raise ConanInvalidConfiguration("Rocksdb 6.20.3 is not compilable with gcc <5.") # See https://github.com/facebook/rocksdb/issues/3522
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["FAIL_ON_WARNINGS"] = False
tc.variables["WITH_TESTS"] = False
tc.variables["WITH_TOOLS"] = False
tc.variables["WITH_CORE_TOOLS"] = False
tc.variables["WITH_BENCHMARK_TOOLS"] = False
tc.variables["WITH_FOLLY_DISTRIBUTED_MUTEX"] = False
if is_msvc(self):
tc.variables["WITH_MD_LIBRARY"] = not is_msvc_static_runtime(self)
tc.variables["ROCKSDB_INSTALL_ON_WINDOWS"] = self.settings.os == "Windows"
tc.variables["ROCKSDB_LITE"] = self.options.lite
tc.variables["WITH_GFLAGS"] = self.options.with_gflags
tc.variables["WITH_SNAPPY"] = self.options.with_snappy
tc.variables["WITH_LZ4"] = self.options.with_lz4
tc.variables["WITH_ZLIB"] = self.options.with_zlib
tc.variables["WITH_ZSTD"] = self.options.with_zstd
tc.variables["WITH_TBB"] = self.options.get_safe("with_tbb", False)
tc.variables["WITH_JEMALLOC"] = self.options.with_jemalloc
tc.variables["ROCKSDB_BUILD_SHARED"] = self.options.shared
tc.variables["ROCKSDB_LIBRARY_EXPORTS"] = self.settings.os == "Windows" and self.options.shared
tc.variables["ROCKSDB_DLL" ] = self.settings.os == "Windows" and self.options.shared
tc.variables["USE_RTTI"] = self.options.use_rtti
if not bool(self.options.enable_sse):
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = False
elif self.options.enable_sse == "sse42":
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = True
elif self.options.enable_sse == "avx2":
tc.variables["PORTABLE"] = False
tc.variables["FORCE_SSE42"] = False
# not available yet in CCI
tc.variables["WITH_NUMA"] = False
tc.generate()
deps = CMakeDeps(self)
if self.options.with_jemalloc:
deps.set_property("jemalloc", "cmake_file_name", "JeMalloc")
deps.set_property("jemalloc", "cmake_target_name", "JeMalloc::JeMalloc")
if self.options.with_zstd:
deps.set_property("zstd", "cmake_target_name", "zstd::zstd")
deps.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def _remove_static_libraries(self):
rm(self, "rocksdb.lib", os.path.join(self.package_folder, "lib"))
for lib in glob.glob(os.path.join(self.package_folder, "lib", "*.a")):
if not lib.endswith(".dll.a"):
os.remove(lib)
def _remove_cpp_headers(self):
for path in glob.glob(os.path.join(self.package_folder, "include", "rocksdb", "*")):
if path != os.path.join(self.package_folder, "include", "rocksdb", "c.h"):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
def package(self):
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
copy(self, "LICENSE*", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
if self.options.shared:
self._remove_static_libraries()
self._remove_cpp_headers() # Force stable ABI for shared libraries
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig"))
def package_info(self):
cmake_target = "rocksdb-shared" if self.options.shared else "rocksdb"
self.cpp_info.set_property("cmake_file_name", "RocksDB")
self.cpp_info.set_property("cmake_target_name", f"RocksDB::{cmake_target}")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["librocksdb"].libs = collect_libs(self)
if self.settings.os == "Windows":
self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"]
if self.options.shared:
self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"]
elif self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"]
if self.options.lite:
self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "RocksDB"
self.cpp_info.names["cmake_find_package_multi"] = "RocksDB"
self.cpp_info.components["librocksdb"].names["cmake_find_package"] = cmake_target
self.cpp_info.components["librocksdb"].names["cmake_find_package_multi"] = cmake_target
self.cpp_info.components["librocksdb"].set_property("cmake_target_name", f"RocksDB::{cmake_target}")
if self.options.with_gflags:
self.cpp_info.components["librocksdb"].requires.append("gflags::gflags")
if self.options.with_snappy:
self.cpp_info.components["librocksdb"].requires.append("snappy::snappy")
if self.options.with_lz4:
self.cpp_info.components["librocksdb"].requires.append("lz4::lz4")
if self.options.with_zlib:
self.cpp_info.components["librocksdb"].requires.append("zlib::zlib")
if self.options.with_zstd:
self.cpp_info.components["librocksdb"].requires.append("zstd::zstd")
if self.options.get_safe("with_tbb"):
self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb")
if self.options.with_jemalloc:
self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc")

View File

@@ -1,319 +0,0 @@
diff --git a/HISTORY.md b/HISTORY.md
index 36d472229..05ad1a202 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,6 +1,10 @@
# Rocksdb Change Log
> NOTE: Entries for next release do not go here. Follow instructions in `unreleased_history/README.txt`
+## 9.7.4 (10/31/2024)
+### Bug Fixes
+* Fix a leak of obsolete blob files left open until DB::Close(). This bug was introduced in version 9.4.0.
+
## 9.7.3 (10/16/2024)
### Behavior Changes
* OPTIONS file to be loaded by remote worker is now preserved so that it does not get purged by the primary host. A similar technique as how we are preserving new SST files from getting purged is used for this. min_options_file_numbers_ is tracked like pending_outputs_ is tracked.
diff --git a/db/blob/blob_file_cache.cc b/db/blob/blob_file_cache.cc
index 5f340aadf..1b9faa238 100644
--- a/db/blob/blob_file_cache.cc
+++ b/db/blob/blob_file_cache.cc
@@ -42,6 +42,7 @@ Status BlobFileCache::GetBlobFileReader(
assert(blob_file_reader);
assert(blob_file_reader->IsEmpty());
+ // NOTE: sharing same Cache with table_cache
const Slice key = GetSliceForKey(&blob_file_number);
assert(cache_);
@@ -98,4 +99,13 @@ Status BlobFileCache::GetBlobFileReader(
return Status::OK();
}
+void BlobFileCache::Evict(uint64_t blob_file_number) {
+ // NOTE: sharing same Cache with table_cache
+ const Slice key = GetSliceForKey(&blob_file_number);
+
+ assert(cache_);
+
+ cache_.get()->Erase(key);
+}
+
} // namespace ROCKSDB_NAMESPACE
diff --git a/db/blob/blob_file_cache.h b/db/blob/blob_file_cache.h
index 740e67ada..6858d012b 100644
--- a/db/blob/blob_file_cache.h
+++ b/db/blob/blob_file_cache.h
@@ -36,6 +36,15 @@ class BlobFileCache {
uint64_t blob_file_number,
CacheHandleGuard<BlobFileReader>* blob_file_reader);
+ // Called when a blob file is obsolete to ensure it is removed from the cache
+ // to avoid effectively leaking the open file and assicated memory
+ void Evict(uint64_t blob_file_number);
+
+ // Used to identify cache entries for blob files (not normally useful)
+ static const Cache::CacheItemHelper* GetHelper() {
+ return CacheInterface::GetBasicHelper();
+ }
+
private:
using CacheInterface =
BasicTypedCacheInterface<BlobFileReader, CacheEntryRole::kMisc>;
diff --git a/db/column_family.h b/db/column_family.h
index e4b7adde8..86637736a 100644
--- a/db/column_family.h
+++ b/db/column_family.h
@@ -401,6 +401,7 @@ class ColumnFamilyData {
SequenceNumber earliest_seq);
TableCache* table_cache() const { return table_cache_.get(); }
+ BlobFileCache* blob_file_cache() const { return blob_file_cache_.get(); }
BlobSource* blob_source() const { return blob_source_.get(); }
// See documentation in compaction_picker.h
diff --git a/db/db_impl/db_impl.cc b/db/db_impl/db_impl.cc
index 261593423..06573ac2e 100644
--- a/db/db_impl/db_impl.cc
+++ b/db/db_impl/db_impl.cc
@@ -659,8 +659,9 @@ Status DBImpl::CloseHelper() {
// We need to release them before the block cache is destroyed. The block
// cache may be destroyed inside versions_.reset(), when column family data
// list is destroyed, so leaving handles in table cache after
- // versions_.reset() may cause issues.
- // Here we clean all unreferenced handles in table cache.
+ // versions_.reset() may cause issues. Here we clean all unreferenced handles
+ // in table cache, and (for certain builds/conditions) assert that no obsolete
+ // files are hanging around unreferenced (leak) in the table/blob file cache.
// Now we assume all user queries have finished, so only version set itself
// can possibly hold the blocks from block cache. After releasing unreferenced
// handles here, only handles held by version set left and inside
@@ -668,6 +669,9 @@ Status DBImpl::CloseHelper() {
// time a handle is released, we erase it from the cache too. By doing that,
// we can guarantee that after versions_.reset(), table cache is empty
// so the cache can be safely destroyed.
+#ifndef NDEBUG
+ TEST_VerifyNoObsoleteFilesCached(/*db_mutex_already_held=*/true);
+#endif // !NDEBUG
table_cache_->EraseUnRefEntries();
for (auto& txn_entry : recovered_transactions_) {
@@ -3227,6 +3231,8 @@ Status DBImpl::MultiGetImpl(
s = Status::Aborted();
break;
}
+ // This could be a long-running operation
+ ROCKSDB_THREAD_YIELD_HOOK();
}
// Post processing (decrement reference counts and record statistics)
diff --git a/db/db_impl/db_impl.h b/db/db_impl/db_impl.h
index 5e4fa310b..ccc0abfa7 100644
--- a/db/db_impl/db_impl.h
+++ b/db/db_impl/db_impl.h
@@ -1241,9 +1241,14 @@ class DBImpl : public DB {
static Status TEST_ValidateOptions(const DBOptions& db_options) {
return ValidateOptions(db_options);
}
-
#endif // NDEBUG
+ // In certain configurations, verify that the table/blob file cache only
+ // contains entries for live files, to check for effective leaks of open
+ // files. This can only be called when purging of obsolete files has
+ // "settled," such as during parts of DB Close().
+ void TEST_VerifyNoObsoleteFilesCached(bool db_mutex_already_held) const;
+
// persist stats to column family "_persistent_stats"
void PersistStats();
diff --git a/db/db_impl/db_impl_debug.cc b/db/db_impl/db_impl_debug.cc
index 790a50d7a..67f5b4aaf 100644
--- a/db/db_impl/db_impl_debug.cc
+++ b/db/db_impl/db_impl_debug.cc
@@ -9,6 +9,7 @@
#ifndef NDEBUG
+#include "db/blob/blob_file_cache.h"
#include "db/column_family.h"
#include "db/db_impl/db_impl.h"
#include "db/error_handler.h"
@@ -328,5 +329,49 @@ size_t DBImpl::TEST_EstimateInMemoryStatsHistorySize() const {
InstrumentedMutexLock l(&const_cast<DBImpl*>(this)->stats_history_mutex_);
return EstimateInMemoryStatsHistorySize();
}
+
+void DBImpl::TEST_VerifyNoObsoleteFilesCached(
+ bool db_mutex_already_held) const {
+ // This check is somewhat expensive and obscure to make a part of every
+ // unit test in every build variety. Thus, we only enable it for ASAN builds.
+ if (!kMustFreeHeapAllocations) {
+ return;
+ }
+
+ std::optional<InstrumentedMutexLock> l;
+ if (db_mutex_already_held) {
+ mutex_.AssertHeld();
+ } else {
+ l.emplace(&mutex_);
+ }
+
+ std::vector<uint64_t> live_files;
+ for (auto cfd : *versions_->GetColumnFamilySet()) {
+ if (cfd->IsDropped()) {
+ continue;
+ }
+ // Sneakily add both SST and blob files to the same list
+ cfd->current()->AddLiveFiles(&live_files, &live_files);
+ }
+ std::sort(live_files.begin(), live_files.end());
+
+ auto fn = [&live_files](const Slice& key, Cache::ObjectPtr, size_t,
+ const Cache::CacheItemHelper* helper) {
+ if (helper != BlobFileCache::GetHelper()) {
+ // Skip non-blob files for now
+ // FIXME: diagnose and fix the leaks of obsolete SST files revealed in
+ // unit tests.
+ return;
+ }
+ // See TableCache and BlobFileCache
+ assert(key.size() == sizeof(uint64_t));
+ uint64_t file_number;
+ GetUnaligned(reinterpret_cast<const uint64_t*>(key.data()), &file_number);
+ // Assert file is in sorted live_files
+ assert(
+ std::binary_search(live_files.begin(), live_files.end(), file_number));
+ };
+ table_cache_->ApplyToAllEntries(fn, {});
+}
} // namespace ROCKSDB_NAMESPACE
#endif // NDEBUG
diff --git a/db/db_iter.cc b/db/db_iter.cc
index e02586377..bf4749eb9 100644
--- a/db/db_iter.cc
+++ b/db/db_iter.cc
@@ -540,6 +540,8 @@ bool DBIter::FindNextUserEntryInternal(bool skipping_saved_key,
} else {
iter_.Next();
}
+ // This could be a long-running operation due to tombstones, etc.
+ ROCKSDB_THREAD_YIELD_HOOK();
} while (iter_.Valid());
valid_ = false;
diff --git a/db/table_cache.cc b/db/table_cache.cc
index 71fc29c32..8a5be75e8 100644
--- a/db/table_cache.cc
+++ b/db/table_cache.cc
@@ -164,6 +164,7 @@ Status TableCache::GetTableReader(
}
Cache::Handle* TableCache::Lookup(Cache* cache, uint64_t file_number) {
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&file_number);
return cache->Lookup(key);
}
@@ -179,6 +180,7 @@ Status TableCache::FindTable(
size_t max_file_size_for_l0_meta_pin, Temperature file_temperature) {
PERF_TIMER_GUARD_WITH_CLOCK(find_table_nanos, ioptions_.clock);
uint64_t number = file_meta.fd.GetNumber();
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&number);
*handle = cache_.Lookup(key);
TEST_SYNC_POINT_CALLBACK("TableCache::FindTable:0",
diff --git a/db/version_builder.cc b/db/version_builder.cc
index ed8ab8214..c98f53f42 100644
--- a/db/version_builder.cc
+++ b/db/version_builder.cc
@@ -24,6 +24,7 @@
#include <vector>
#include "cache/cache_reservation_manager.h"
+#include "db/blob/blob_file_cache.h"
#include "db/blob/blob_file_meta.h"
#include "db/dbformat.h"
#include "db/internal_stats.h"
@@ -744,12 +745,9 @@ class VersionBuilder::Rep {
return Status::Corruption("VersionBuilder", oss.str());
}
- // Note: we use C++11 for now but in C++14, this could be done in a more
- // elegant way using generalized lambda capture.
- VersionSet* const vs = version_set_;
- const ImmutableCFOptions* const ioptions = ioptions_;
-
- auto deleter = [vs, ioptions](SharedBlobFileMetaData* shared_meta) {
+ auto deleter = [vs = version_set_, ioptions = ioptions_,
+ bc = cfd_ ? cfd_->blob_file_cache()
+ : nullptr](SharedBlobFileMetaData* shared_meta) {
if (vs) {
assert(ioptions);
assert(!ioptions->cf_paths.empty());
@@ -758,6 +756,9 @@ class VersionBuilder::Rep {
vs->AddObsoleteBlobFile(shared_meta->GetBlobFileNumber(),
ioptions->cf_paths.front().path);
}
+ if (bc) {
+ bc->Evict(shared_meta->GetBlobFileNumber());
+ }
delete shared_meta;
};
@@ -766,7 +767,7 @@ class VersionBuilder::Rep {
blob_file_number, blob_file_addition.GetTotalBlobCount(),
blob_file_addition.GetTotalBlobBytes(),
blob_file_addition.GetChecksumMethod(),
- blob_file_addition.GetChecksumValue(), deleter);
+ blob_file_addition.GetChecksumValue(), std::move(deleter));
mutable_blob_file_metas_.emplace(
blob_file_number, MutableBlobFileMetaData(std::move(shared_meta)));
diff --git a/db/version_set.h b/db/version_set.h
index 9336782b1..024f869e7 100644
--- a/db/version_set.h
+++ b/db/version_set.h
@@ -1514,7 +1514,6 @@ class VersionSet {
void GetLiveFilesMetaData(std::vector<LiveFileMetaData>* metadata);
void AddObsoleteBlobFile(uint64_t blob_file_number, std::string path) {
- // TODO: Erase file from BlobFileCache?
obsolete_blob_files_.emplace_back(blob_file_number, std::move(path));
}
diff --git a/include/rocksdb/version.h b/include/rocksdb/version.h
index 2a19796b8..0afa2cab1 100644
--- a/include/rocksdb/version.h
+++ b/include/rocksdb/version.h
@@ -13,7 +13,7 @@
// minor or major version number planned for release.
#define ROCKSDB_MAJOR 9
#define ROCKSDB_MINOR 7
-#define ROCKSDB_PATCH 3
+#define ROCKSDB_PATCH 4
// Do not use these. We made the mistake of declaring macros starting with
// double underscore. Now we have to live with our choice. We'll deprecate these
diff --git a/port/port.h b/port/port.h
index 13aa56d47..141716e5b 100644
--- a/port/port.h
+++ b/port/port.h
@@ -19,3 +19,19 @@
#elif defined(OS_WIN)
#include "port/win/port_win.h"
#endif
+
+#ifdef OS_LINUX
+// A temporary hook into long-running RocksDB threads to support modifying their
+// priority etc. This should become a public API hook once the requirements
+// are better understood.
+extern "C" void RocksDbThreadYield() __attribute__((__weak__));
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ { \
+ if (RocksDbThreadYield) { \
+ RocksDbThreadYield(); \
+ } \
+ }
+#else
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ {}
+#endif

View File

@@ -1,30 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 93b884d..b715cb6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -106,14 +106,9 @@ endif()
include(CMakeDependentOption)
if(MSVC)
- option(WITH_GFLAGS "build with GFlags" OFF)
option(WITH_XPRESS "build with windows built in compression" OFF)
- option(ROCKSDB_SKIP_THIRDPARTY "skip thirdparty.inc" OFF)
-
- if(NOT ROCKSDB_SKIP_THIRDPARTY)
- include(${CMAKE_CURRENT_SOURCE_DIR}/thirdparty.inc)
- endif()
-else()
+endif()
+if(TRUE)
if(CMAKE_SYSTEM_NAME MATCHES "FreeBSD" AND NOT CMAKE_SYSTEM_NAME MATCHES "kFreeBSD")
# FreeBSD has jemalloc as default malloc
# but it does not have all the jemalloc files in include/...
@@ -126,7 +121,7 @@ else()
endif()
endif()
- if(MINGW)
+ if(MSVC OR MINGW)
option(WITH_GFLAGS "build with GFlags" OFF)
else()
option(WITH_GFLAGS "build with GFlags" ON)

View File

@@ -1,40 +0,0 @@
sources:
"1.1.10":
url: "https://github.com/google/snappy/archive/1.1.10.tar.gz"
sha256: "49d831bffcc5f3d01482340fe5af59852ca2fe76c3e05df0e67203ebbe0f1d90"
"1.1.9":
url: "https://github.com/google/snappy/archive/1.1.9.tar.gz"
sha256: "75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7"
"1.1.8":
url: "https://github.com/google/snappy/archive/1.1.8.tar.gz"
sha256: "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f"
"1.1.7":
url: "https://github.com/google/snappy/archive/1.1.7.tar.gz"
sha256: "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4"
patches:
"1.1.10":
- patch_file: "patches/1.1.10-0001-fix-inlining-failure.patch"
patch_description: "disable inlining for compilation error"
patch_type: "portability"
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
patch_description: "disable 'warning as error' options"
patch_type: "portability"
- patch_file: "patches/1.1.10-0003-fix-clobber-list-older-llvm.patch"
patch_description: "disable inline asm on apple-clang"
patch_type: "portability"
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
patch_description: "remove 'disable rtti'"
patch_type: "conan"
"1.1.9":
- patch_file: "patches/1.1.9-0001-fix-inlining-failure.patch"
patch_description: "disable inlining for compilation error"
patch_type: "portability"
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
patch_description: "disable 'warning as error' options"
patch_type: "portability"
- patch_file: "patches/1.1.9-0003-fix-clobber-list-older-llvm.patch"
patch_description: "disable inline asm on apple-clang"
patch_type: "portability"
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
patch_description: "remove 'disable rtti'"
patch_type: "conan"

View File

@@ -1,89 +0,0 @@
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
from conan.tools.scm import Version
import os
required_conan_version = ">=1.54.0"
class SnappyConan(ConanFile):
name = "snappy"
description = "A fast compressor/decompressor"
topics = ("google", "compressor", "decompressor")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/google/snappy"
license = "BSD-3-Clause"
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == 'Windows':
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 11)
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["SNAPPY_BUILD_TESTS"] = False
if Version(self.version) >= "1.1.8":
tc.variables["SNAPPY_FUZZING_BUILD"] = False
tc.variables["SNAPPY_REQUIRE_AVX"] = False
tc.variables["SNAPPY_REQUIRE_AVX2"] = False
tc.variables["SNAPPY_INSTALL"] = True
if Version(self.version) >= "1.1.9":
tc.variables["SNAPPY_BUILD_BENCHMARKS"] = False
tc.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "Snappy")
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["snappylib"].libs = ["snappy"]
if not self.options.shared:
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["snappylib"].system_libs.append("m")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "Snappy"
self.cpp_info.names["cmake_find_package_multi"] = "Snappy"
self.cpp_info.components["snappylib"].names["cmake_find_package"] = "snappy"
self.cpp_info.components["snappylib"].names["cmake_find_package_multi"] = "snappy"
self.cpp_info.components["snappylib"].set_property("cmake_target_name", "Snappy::snappy")

View File

@@ -1,13 +0,0 @@
diff --git a/snappy-stubs-internal.h b/snappy-stubs-internal.h
index 1548ed7..3b4a9f3 100644
--- a/snappy-stubs-internal.h
+++ b/snappy-stubs-internal.h
@@ -100,7 +100,7 @@
// Inlining hints.
#if HAVE_ATTRIBUTE_ALWAYS_INLINE
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#else
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#endif // HAVE_ATTRIBUTE_ALWAYS_INLINE

View File

@@ -1,13 +0,0 @@
diff --git a/snappy.cc b/snappy.cc
index d414718..e4efb59 100644
--- a/snappy.cc
+++ b/snappy.cc
@@ -1132,7 +1132,7 @@ inline size_t AdvanceToNextTagX86Optimized(const uint8_t** ip_p, size_t* tag) {
size_t literal_len = *tag >> 2;
size_t tag_type = *tag;
bool is_literal;
-#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__)
+#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
// TODO clang misses the fact that the (c & 3) already correctly
// sets the zero flag.
asm("and $3, %k[tag_type]\n\t"

View File

@@ -1,14 +0,0 @@
Fixes the following error:
error: inlining failed in call to always_inline size_t snappy::AdvanceToNextTag(const uint8_t**, size_t*): function body can be overwritten at link time
--- snappy-stubs-internal.h
+++ snappy-stubs-internal.h
@@ -100,7 +100,7 @@
// Inlining hints.
#ifdef HAVE_ATTRIBUTE_ALWAYS_INLINE
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#else
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#endif

View File

@@ -1,12 +0,0 @@
--- CMakeLists.txt
+++ CMakeLists.txt
@@ -69,7 +69,7 @@
- # Use -Werror for clang only.
+if(0)
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
endif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
-
+endif()

View File

@@ -1,12 +0,0 @@
asm clobbers do not work for clang < 9 and apple-clang < 11 (found by SpaceIm)
--- snappy.cc
+++ snappy.cc
@@ -1026,7 +1026,7 @@
size_t literal_len = *tag >> 2;
size_t tag_type = *tag;
bool is_literal;
-#if defined(__GNUC__) && defined(__x86_64__)
+#if defined(__GNUC__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
// TODO clang misses the fact that the (c & 3) already correctly
// sets the zero flag.
asm("and $3, %k[tag_type]\n\t"

View File

@@ -1,20 +0,0 @@
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -53,8 +53,6 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
add_definitions(-D_HAS_EXCEPTIONS=0)
# Disable RTTI.
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Use -Wall for clang and gcc.
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
@@ -78,8 +76,6 @@ endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
# Disable RTTI.
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make

View File

@@ -1,12 +0,0 @@
sources:
"4.0.3":
url: "https://github.com/SOCI/soci/archive/v4.0.3.tar.gz"
sha256: "4b1ff9c8545c5d802fbe06ee6cd2886630e5c03bf740e269bb625b45cf934928"
patches:
"4.0.3":
- patch_file: "patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch"
patch_description: "Generate relocatable libraries on MacOS"
patch_type: "portability"
- patch_file: "patches/0002-Fix-soci_backend.patch"
patch_description: "Fix variable names for dependencies"
patch_type: "conan"

View File

@@ -1,212 +0,0 @@
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
from conan.tools.microsoft import is_msvc
from conan.tools.scm import Version
from conan.errors import ConanInvalidConfiguration
import os
required_conan_version = ">=1.55.0"
class SociConan(ConanFile):
name = "soci"
homepage = "https://github.com/SOCI/soci"
url = "https://github.com/conan-io/conan-center-index"
description = "The C++ Database Access Library "
topics = ("mysql", "odbc", "postgresql", "sqlite3")
license = "BSL-1.0"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"empty": [True, False],
"with_sqlite3": [True, False],
"with_db2": [True, False],
"with_odbc": [True, False],
"with_oracle": [True, False],
"with_firebird": [True, False],
"with_mysql": [True, False],
"with_postgresql": [True, False],
"with_boost": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"empty": False,
"with_sqlite3": False,
"with_db2": False,
"with_odbc": False,
"with_oracle": False,
"with_firebird": False,
"with_mysql": False,
"with_postgresql": False,
"with_boost": False,
}
def export_sources(self):
export_conandata_patches(self)
def layout(self):
cmake_layout(self, src_folder="src")
def config_options(self):
if self.settings.os == "Windows":
self.options.rm_safe("fPIC")
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def requirements(self):
if self.options.with_sqlite3:
self.requires("sqlite3/3.47.0")
if self.options.with_odbc and self.settings.os != "Windows":
self.requires("odbc/2.3.11")
if self.options.with_mysql:
self.requires("libmysqlclient/8.1.0")
if self.options.with_postgresql:
self.requires("libpq/15.5")
if self.options.with_boost:
self.requires("boost/1.83.0")
@property
def _minimum_compilers_version(self):
return {
"Visual Studio": "14",
"gcc": "4.8",
"clang": "3.8",
"apple-clang": "8.0"
}
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 11)
compiler = str(self.settings.compiler)
compiler_version = Version(self.settings.compiler.version.value)
if compiler not in self._minimum_compilers_version:
self.output.warning("{} recipe lacks information about the {} compiler support.".format(self.name, self.settings.compiler))
elif compiler_version < self._minimum_compilers_version[compiler]:
raise ConanInvalidConfiguration("{} requires a {} version >= {}".format(self.name, compiler, compiler_version))
prefix = "Dependencies for"
message = "not configured in this conan package."
if self.options.with_db2:
# self.requires("db2/0.0.0") # TODO add support for db2
raise ConanInvalidConfiguration("{} DB2 {} ".format(prefix, message))
if self.options.with_oracle:
# self.requires("oracle_db/0.0.0") # TODO add support for oracle
raise ConanInvalidConfiguration("{} ORACLE {} ".format(prefix, message))
if self.options.with_firebird:
# self.requires("firebird/0.0.0") # TODO add support for firebird
raise ConanInvalidConfiguration("{} firebird {} ".format(prefix, message))
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["SOCI_SHARED"] = self.options.shared
tc.variables["SOCI_STATIC"] = not self.options.shared
tc.variables["SOCI_TESTS"] = False
tc.variables["SOCI_CXX11"] = True
tc.variables["SOCI_EMPTY"] = self.options.empty
tc.variables["WITH_SQLITE3"] = self.options.with_sqlite3
tc.variables["WITH_DB2"] = self.options.with_db2
tc.variables["WITH_ODBC"] = self.options.with_odbc
tc.variables["WITH_ORACLE"] = self.options.with_oracle
tc.variables["WITH_FIREBIRD"] = self.options.with_firebird
tc.variables["WITH_MYSQL"] = self.options.with_mysql
tc.variables["WITH_POSTGRESQL"] = self.options.with_postgresql
tc.variables["WITH_BOOST"] = self.options.with_boost
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, "LICENSE_1_0.txt", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "SOCI")
target_suffix = "" if self.options.shared else "_static"
lib_prefix = "lib" if is_msvc(self) and not self.options.shared else ""
version = Version(self.version)
lib_suffix = "_{}_{}".format(version.major, version.minor) if self.settings.os == "Windows" else ""
# soci_core
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
if self.options.with_boost:
self.cpp_info.components["soci_core"].requires.append("boost::boost")
# soci_empty
if self.options.empty:
self.cpp_info.components["soci_empty"].set_property("cmake_target_name", "SOCI::soci_empty{}".format(target_suffix))
self.cpp_info.components["soci_empty"].libs = ["{}soci_empty{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_empty"].requires = ["soci_core"]
# soci_sqlite3
if self.options.with_sqlite3:
self.cpp_info.components["soci_sqlite3"].set_property("cmake_target_name", "SOCI::soci_sqlite3{}".format(target_suffix))
self.cpp_info.components["soci_sqlite3"].libs = ["{}soci_sqlite3{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_sqlite3"].requires = ["soci_core", "sqlite3::sqlite3"]
# soci_odbc
if self.options.with_odbc:
self.cpp_info.components["soci_odbc"].set_property("cmake_target_name", "SOCI::soci_odbc{}".format(target_suffix))
self.cpp_info.components["soci_odbc"].libs = ["{}soci_odbc{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_odbc"].requires = ["soci_core"]
if self.settings.os == "Windows":
self.cpp_info.components["soci_odbc"].system_libs.append("odbc32")
else:
self.cpp_info.components["soci_odbc"].requires.append("odbc::odbc")
# soci_mysql
if self.options.with_mysql:
self.cpp_info.components["soci_mysql"].set_property("cmake_target_name", "SOCI::soci_mysql{}".format(target_suffix))
self.cpp_info.components["soci_mysql"].libs = ["{}soci_mysql{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_mysql"].requires = ["soci_core", "libmysqlclient::libmysqlclient"]
# soci_postgresql
if self.options.with_postgresql:
self.cpp_info.components["soci_postgresql"].set_property("cmake_target_name", "SOCI::soci_postgresql{}".format(target_suffix))
self.cpp_info.components["soci_postgresql"].libs = ["{}soci_postgresql{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_postgresql"].requires = ["soci_core", "libpq::libpq"]
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "SOCI"
self.cpp_info.names["cmake_find_package_multi"] = "SOCI"
self.cpp_info.components["soci_core"].names["cmake_find_package"] = "soci_core{}".format(target_suffix)
self.cpp_info.components["soci_core"].names["cmake_find_package_multi"] = "soci_core{}".format(target_suffix)
if self.options.empty:
self.cpp_info.components["soci_empty"].names["cmake_find_package"] = "soci_empty{}".format(target_suffix)
self.cpp_info.components["soci_empty"].names["cmake_find_package_multi"] = "soci_empty{}".format(target_suffix)
if self.options.with_sqlite3:
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package"] = "soci_sqlite3{}".format(target_suffix)
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package_multi"] = "soci_sqlite3{}".format(target_suffix)
if self.options.with_odbc:
self.cpp_info.components["soci_odbc"].names["cmake_find_package"] = "soci_odbc{}".format(target_suffix)
self.cpp_info.components["soci_odbc"].names["cmake_find_package_multi"] = "soci_odbc{}".format(target_suffix)
if self.options.with_mysql:
self.cpp_info.components["soci_mysql"].names["cmake_find_package"] = "soci_mysql{}".format(target_suffix)
self.cpp_info.components["soci_mysql"].names["cmake_find_package_multi"] = "soci_mysql{}".format(target_suffix)
if self.options.with_postgresql:
self.cpp_info.components["soci_postgresql"].names["cmake_find_package"] = "soci_postgresql{}".format(target_suffix)
self.cpp_info.components["soci_postgresql"].names["cmake_find_package_multi"] = "soci_postgresql{}".format(target_suffix)

View File

@@ -1,39 +0,0 @@
From d491bf7b5040d314ffd0c6310ba01f78ff44c85e Mon Sep 17 00:00:00 2001
From: Rasmus Thomsen <rasmus.thomsen@dampsoft.de>
Date: Fri, 14 Apr 2023 09:16:29 +0200
Subject: [PATCH] Remove hardcoded INSTALL_NAME_DIR for relocatable libraries
on MacOS
---
cmake/SociBackend.cmake | 2 +-
src/core/CMakeLists.txt | 1 -
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
index 5d4ef0df..39fe1f77 100644
--- a/cmake/SociBackend.cmake
+++ b/cmake/SociBackend.cmake
@@ -171,7 +171,7 @@ macro(soci_backend NAME)
set_target_properties(${THIS_BACKEND_TARGET}
PROPERTIES
SOVERSION ${${PROJECT_NAME}_SOVERSION}
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib)
+ )
if(APPLE)
set_target_properties(${THIS_BACKEND_TARGET}
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index 3e7deeae..f9eae564 100644
--- a/src/core/CMakeLists.txt
+++ b/src/core/CMakeLists.txt
@@ -59,7 +59,6 @@ if (SOCI_SHARED)
PROPERTIES
VERSION ${SOCI_VERSION}
SOVERSION ${SOCI_SOVERSION}
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib
CLEAN_DIRECT_OUTPUT 1)
endif()
--
2.25.1

View File

@@ -1,24 +0,0 @@
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
index 0a664667..3fa2ed95 100644
--- a/cmake/SociBackend.cmake
+++ b/cmake/SociBackend.cmake
@@ -31,14 +31,13 @@ macro(soci_backend_deps_found NAME DEPS SUCCESS)
if(NOT DEPEND_FOUND)
list(APPEND DEPS_NOT_FOUND ${dep})
else()
- string(TOUPPER "${dep}" DEPU)
- if( ${DEPU}_INCLUDE_DIR )
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIR})
+ if( ${dep}_INCLUDE_DIR )
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIR})
endif()
- if( ${DEPU}_INCLUDE_DIRS )
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIRS})
+ if( ${dep}_INCLUDE_DIRS )
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIRS})
endif()
- list(APPEND DEPS_LIBRARIES ${${DEPU}_LIBRARIES})
+ list(APPEND DEPS_LIBRARIES ${${dep}_LIBRARIES})
endif()
endforeach()

View File

@@ -367,7 +367,7 @@ get(Section const& section,
}
inline std::string
get(Section const& section, std::string const& name, const char* defaultValue)
get(Section const& section, std::string const& name, char const* defaultValue)
{
try
{

View File

@@ -25,6 +25,7 @@
#include <cstdint>
#include <cstring>
#include <memory>
namespace ripple {

View File

@@ -55,7 +55,7 @@ lz4Compress(void const* in, std::size_t inSize, BufferFactory&& bf)
auto compressed = bf(outCapacity);
auto compressedSize = LZ4_compress_default(
reinterpret_cast<const char*>(in),
reinterpret_cast<char const*>(in),
reinterpret_cast<char*>(compressed),
inSize,
outCapacity);
@@ -89,7 +89,7 @@ lz4Decompress(
Throw<std::runtime_error>("lz4Decompress: integer overflow (output)");
if (LZ4_decompress_safe(
reinterpret_cast<const char*>(in),
reinterpret_cast<char const*>(in),
reinterpret_cast<char*>(decompressed),
inSize,
decompressedSize) != decompressedSize)

View File

@@ -22,8 +22,18 @@
#include <xrpl/basics/contract.h>
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated"
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#endif
#include <boost/outcome.hpp>
#if defined(__clang__)
#pragma clang diagnostic pop
#endif
#include <stdexcept>
namespace ripple {
@@ -93,7 +103,7 @@ public:
{
}
constexpr const E&
constexpr E const&
value() const&
{
return val_;
@@ -111,7 +121,7 @@ public:
return std::move(val_);
}
constexpr const E&&
constexpr E const&&
value() const&&
{
return std::move(val_);

View File

@@ -294,7 +294,7 @@ IntrusiveRefCounts::releaseStrongRef() const
}
if (refCounts.compare_exchange_weak(
prevIntVal, nextIntVal, std::memory_order_release))
prevIntVal, nextIntVal, std::memory_order_acq_rel))
{
// Can't be in partial destroy because only decrementing the strong
// count to zero can start a partial destroy, and that can't happen
@@ -351,7 +351,7 @@ IntrusiveRefCounts::addWeakReleaseStrongRef() const
}
}
if (refCounts.compare_exchange_weak(
prevIntVal, nextIntVal, std::memory_order_release))
prevIntVal, nextIntVal, std::memory_order_acq_rel))
{
XRPL_ASSERT(
(!(prevIntVal & partialDestroyStartedMask)),
@@ -374,7 +374,7 @@ IntrusiveRefCounts::releaseWeakRef() const
// This case should only be hit if the partialDestroyStartedBit is
// set non-atomically (and even then very rarely). The code is kept
// in case we need to set the flag non-atomically for perf reasons.
refCounts.wait(prevIntVal, std::memory_order_acq_rel);
refCounts.wait(prevIntVal, std::memory_order_acquire);
prevIntVal = refCounts.load(std::memory_order_acquire);
prev = RefCountPair{prevIntVal};
}
@@ -382,7 +382,7 @@ IntrusiveRefCounts::releaseWeakRef() const
{
// partial destroy MUST finish before running a full destroy (when
// using weak pointers)
refCounts.wait(prevIntVal - weakDelta, std::memory_order_acq_rel);
refCounts.wait(prevIntVal - weakDelta, std::memory_order_acquire);
}
return ReleaseWeakRefAction::destroy;
}
@@ -396,7 +396,7 @@ IntrusiveRefCounts::checkoutStrongRefFromWeak() const noexcept
auto desiredValue = RefCountPair{2, 1}.combinedValue();
while (!refCounts.compare_exchange_weak(
curValue, desiredValue, std::memory_order_release))
curValue, desiredValue, std::memory_order_acq_rel))
{
RefCountPair const prev{curValue};
if (!prev.strong)

View File

@@ -26,6 +26,7 @@
#include <boost/beast/core/string.hpp>
#include <boost/filesystem.hpp>
#include <fstream>
#include <map>
#include <memory>
#include <mutex>

View File

@@ -29,7 +29,6 @@
#include <array>
#include <cstdint>
#include <optional>
#include <sstream>
#include <string>
namespace ripple {

View File

@@ -115,7 +115,7 @@ public:
sweep();
bool
del(const key_type& key, bool valid);
del(key_type const& key, bool valid);
public:
/** Replace aliased objects with originals.
@@ -134,20 +134,20 @@ public:
template <class R>
bool
canonicalize(
const key_type& key,
key_type const& key,
SharedPointerType& data,
R&& replaceCallback);
bool
canonicalize_replace_cache(
const key_type& key,
key_type const& key,
SharedPointerType const& data);
bool
canonicalize_replace_client(const key_type& key, SharedPointerType& data);
canonicalize_replace_client(key_type const& key, SharedPointerType& data);
SharedPointerType
fetch(const key_type& key);
fetch(key_type const& key);
/** Insert the element into the container.
If the key already exists, nothing happens.
@@ -168,7 +168,7 @@ public:
// simply return an iterator.
//
bool
retrieve(const key_type& key, T& data);
retrieve(key_type const& key, T& data);
mutex_type&
peekMutex();
@@ -322,10 +322,10 @@ private:
std::string m_name;
// Desired number of cache entries (0 = ignore)
const int m_target_size;
int const m_target_size;
// Desired maximum cache age
const clock_type::duration m_target_age;
clock_type::duration const m_target_age;
// Number of items cached
int m_cache_count;

View File

@@ -365,7 +365,7 @@ TaggedCache<
SharedPointerType,
Hash,
KeyEqual,
Mutex>::del(const key_type& key, bool valid)
Mutex>::del(key_type const& key, bool valid)
{
// Remove from cache, if !valid, remove from map too. Returns true if
// removed from cache
@@ -414,7 +414,7 @@ TaggedCache<
KeyEqual,
Mutex>::
canonicalize(
const key_type& key,
key_type const& key,
SharedPointerType& data,
R&& replaceCallback)
{
@@ -509,7 +509,7 @@ TaggedCache<
KeyEqual,
Mutex>::
canonicalize_replace_cache(
const key_type& key,
key_type const& key,
SharedPointerType const& data)
{
return canonicalize(
@@ -535,7 +535,7 @@ TaggedCache<
Hash,
KeyEqual,
Mutex>::
canonicalize_replace_client(const key_type& key, SharedPointerType& data)
canonicalize_replace_client(key_type const& key, SharedPointerType& data)
{
return canonicalize(key, data, []() { return false; });
}
@@ -558,7 +558,7 @@ TaggedCache<
SharedPointerType,
Hash,
KeyEqual,
Mutex>::fetch(const key_type& key)
Mutex>::fetch(key_type const& key)
{
std::lock_guard<mutex_type> l(m_mutex);
auto ret = initialFetch(key, l);
@@ -656,7 +656,7 @@ TaggedCache<
SharedPointerType,
Hash,
KeyEqual,
Mutex>::retrieve(const key_type& key, T& data)
Mutex>::retrieve(key_type const& key, T& data)
{
// retrieve the value of the stored data
auto entry = fetch(key);

View File

@@ -20,7 +20,6 @@
#ifndef RIPPLE_ALGORITHM_H_INCLUDED
#define RIPPLE_ALGORITHM_H_INCLUDED
#include <iterator>
#include <utility>
namespace ripple {

View File

@@ -374,7 +374,7 @@ public:
}
base_uint&
operator^=(const base_uint& b)
operator^=(base_uint const& b)
{
for (int i = 0; i < WIDTH; i++)
data_[i] ^= b.data_[i];
@@ -383,7 +383,7 @@ public:
}
base_uint&
operator&=(const base_uint& b)
operator&=(base_uint const& b)
{
for (int i = 0; i < WIDTH; i++)
data_[i] &= b.data_[i];
@@ -392,7 +392,7 @@ public:
}
base_uint&
operator|=(const base_uint& b)
operator|=(base_uint const& b)
{
for (int i = 0; i < WIDTH; i++)
data_[i] |= b.data_[i];
@@ -415,11 +415,11 @@ public:
return *this;
}
const base_uint
base_uint const
operator++(int)
{
// postfix operator
const base_uint ret = *this;
base_uint const ret = *this;
++(*this);
return ret;
@@ -441,11 +441,11 @@ public:
return *this;
}
const base_uint
base_uint const
operator--(int)
{
// postfix operator
const base_uint ret = *this;
base_uint const ret = *this;
--(*this);
return ret;
@@ -466,7 +466,7 @@ public:
}
base_uint&
operator+=(const base_uint& b)
operator+=(base_uint const& b)
{
std::uint64_t carry = 0;
@@ -511,7 +511,7 @@ public:
}
[[nodiscard]] constexpr bool
parseHex(const char* str)
parseHex(char const* str)
{
return parseHex(std::string_view{str});
}

View File

@@ -43,7 +43,7 @@ struct less
using result_type = bool;
constexpr bool
operator()(const T& left, const T& right) const
operator()(T const& left, T const& right) const
{
return std::less<T>()(left, right);
}
@@ -55,7 +55,7 @@ struct equal_to
using result_type = bool;
constexpr bool
operator()(const T& left, const T& right) const
operator()(T const& left, T const& right) const
{
return std::equal_to<T>()(left, right);
}

View File

@@ -24,12 +24,8 @@
#include <xrpl/beast/hash/xxhasher.h>
#include <cstdint>
#include <functional>
#include <mutex>
#include <random>
#include <type_traits>
#include <unordered_map>
#include <unordered_set>
#include <utility>
namespace ripple {

View File

@@ -23,7 +23,6 @@
#include <cstdint>
#include <limits>
#include <optional>
#include <utility>
namespace ripple {
auto constexpr muldiv_max = std::numeric_limits<std::uint64_t>::max();

View File

@@ -52,7 +52,7 @@ template <
typename Value,
typename Hash,
typename Pred = std::equal_to<Key>,
typename Alloc = std::allocator<std::pair<const Key, Value>>>
typename Alloc = std::allocator<std::pair<Key const, Value>>>
class partitioned_unordered_map
{
std::size_t partitions_;

View File

@@ -24,10 +24,8 @@
#include <boost/operators.hpp>
#include <functional>
#include <iostream>
#include <type_traits>
#include <utility>
namespace ripple {
@@ -76,13 +74,13 @@ public:
}
bool
operator<(const tagged_integer& rhs) const noexcept
operator<(tagged_integer const& rhs) const noexcept
{
return m_value < rhs.m_value;
}
bool
operator==(const tagged_integer& rhs) const noexcept
operator==(tagged_integer const& rhs) const noexcept
{
return m_value == rhs.m_value;
}
@@ -144,14 +142,14 @@ public:
}
tagged_integer&
operator<<=(const tagged_integer& rhs) noexcept
operator<<=(tagged_integer const& rhs) noexcept
{
m_value <<= rhs.m_value;
return *this;
}
tagged_integer&
operator>>=(const tagged_integer& rhs) noexcept
operator>>=(tagged_integer const& rhs) noexcept
{
m_value >>= rhs.m_value;
return *this;

View File

@@ -20,9 +20,6 @@
#ifndef BEAST_CHRONO_ABSTRACT_CLOCK_H_INCLUDED
#define BEAST_CHRONO_ABSTRACT_CLOCK_H_INCLUDED
#include <chrono>
#include <string>
namespace beast {
/** Abstract interface to a clock.

View File

@@ -23,6 +23,8 @@
#include <xrpl/beast/clock/abstract_clock.h>
#include <xrpl/beast/utility/instrumentation.h>
#include <chrono>
namespace beast {
/** Manual clock implementation.

View File

@@ -22,6 +22,7 @@
#include <xrpl/beast/container/aged_container.h>
#include <chrono>
#include <type_traits>
namespace beast {

View File

@@ -20,8 +20,6 @@
#ifndef BEAST_CONTAINER_DETAIL_AGED_ASSOCIATIVE_CONTAINER_H_INCLUDED
#define BEAST_CONTAINER_DETAIL_AGED_ASSOCIATIVE_CONTAINER_H_INCLUDED
#include <type_traits>
namespace beast {
namespace detail {

View File

@@ -33,7 +33,6 @@
#include <algorithm>
#include <functional>
#include <initializer_list>
#include <iterator>
#include <memory>
#include <type_traits>
#include <utility>

View File

@@ -3257,7 +3257,6 @@ operator==(aged_unordered_container<
{
if (size() != other.size())
return false;
using EqRng = std::pair<const_iterator, const_iterator>;
for (auto iter(cbegin()), last(cend()); iter != last;)
{
auto const& k(extract(*iter));

View File

@@ -29,11 +29,9 @@
#include <charconv>
#include <cstdlib>
#include <iterator>
#include <limits>
#include <string>
#include <type_traits>
#include <typeinfo>
#include <utility>
namespace beast {

View File

@@ -24,14 +24,36 @@
#include <boost/container/flat_set.hpp>
#include <boost/endian/conversion.hpp>
/*
Workaround for overzealous clang warning, which trips on libstdc++ headers
In file included from
/usr/lib/gcc/x86_64-linux-gnu/12/../../../../include/c++/12/bits/stl_algo.h:61:
/usr/lib/gcc/x86_64-linux-gnu/12/../../../../include/c++/12/bits/stl_tempbuf.h:263:8:
error: 'get_temporary_buffer<std::pair<ripple::Quality, const
std::vector<std::unique_ptr<ripple::Step>> *>>' is deprecated
[-Werror,-Wdeprecated-declarations] 263 |
std::get_temporary_buffer<value_type>(_M_original_len));
^
*/
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated"
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#endif
#include <functional>
#if defined(__clang__)
#pragma clang diagnostic pop
#endif
#include <array>
#include <chrono>
#include <cstdint>
#include <cstring>
#include <functional>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <system_error>
#include <tuple>

View File

@@ -30,7 +30,7 @@ namespace beast {
template <class Hasher = xxhasher>
struct uhash
{
explicit uhash() = default;
uhash() = default;
using result_type = typename Hasher::result_type;

View File

@@ -29,11 +29,7 @@
#include <boost/asio/ip/address.hpp>
#include <boost/functional/hash.hpp>
#include <cstdint>
#include <ios>
#include <sstream>
#include <string>
#include <typeinfo>
//------------------------------------------------------------------------------

View File

@@ -24,12 +24,6 @@
#include <boost/asio/ip/address_v4.hpp>
#include <cstdint>
#include <functional>
#include <ios>
#include <string>
#include <utility>
namespace beast {
namespace IP {

View File

@@ -24,12 +24,6 @@
#include <boost/asio/ip/address_v6.hpp>
#include <cstdint>
#include <functional>
#include <ios>
#include <string>
#include <utility>
namespace beast {
namespace IP {

View File

@@ -25,7 +25,6 @@
#include <xrpl/beast/net/IPAddress.h>
#include <cstdint>
#include <ios>
#include <optional>
#include <string>
@@ -215,7 +214,7 @@ namespace std {
template <>
struct hash<::beast::IP::Endpoint>
{
explicit hash() = default;
hash() = default;
std::size_t
operator()(::beast::IP::Endpoint const& endpoint) const
@@ -230,7 +229,7 @@ namespace boost {
template <>
struct hash<::beast::IP::Endpoint>
{
explicit hash() = default;
hash() = default;
std::size_t
operator()(::beast::IP::Endpoint const& endpoint) const

View File

@@ -28,10 +28,8 @@
#include <algorithm>
#include <cctype>
#include <cstdint>
#include <iterator>
#include <string>
#include <utility>
#include <vector>
namespace beast {

View File

@@ -13,7 +13,6 @@
#include <boost/optional.hpp>
#include <condition_variable>
#include <functional>
#include <mutex>
#include <thread>
#include <vector>

View File

@@ -16,7 +16,6 @@
#include <algorithm>
#include <chrono>
#include <functional>
#include <iomanip>
#include <iostream>
#include <sstream>

View File

@@ -13,7 +13,6 @@
#include <boost/assert.hpp>
#include <mutex>
#include <ostream>
#include <string>
namespace beast {

View File

@@ -31,36 +31,28 @@ namespace beast {
template <class Generator>
void
rngfill(void* buffer, std::size_t bytes, Generator& g)
rngfill(void* const buffer, std::size_t const bytes, Generator& g)
{
using result_type = typename Generator::result_type;
constexpr std::size_t result_size = sizeof(result_type);
while (bytes >= sizeof(result_type))
std::uint8_t* const buffer_start = static_cast<std::uint8_t*>(buffer);
std::size_t const complete_iterations = bytes / result_size;
std::size_t const bytes_remaining = bytes % result_size;
for (std::size_t count = 0; count < complete_iterations; ++count)
{
auto const v = g();
std::memcpy(buffer, &v, sizeof(v));
buffer = reinterpret_cast<std::uint8_t*>(buffer) + sizeof(v);
bytes -= sizeof(v);
result_type const v = g();
std::size_t const offset = count * result_size;
std::memcpy(buffer_start + offset, &v, result_size);
}
XRPL_ASSERT(
bytes < sizeof(result_type), "beast::rngfill(void*) : maximum bytes");
#ifdef __GNUC__
// gcc 11.1 (falsely) warns about an array-bounds overflow in release mode.
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Warray-bounds"
#endif
if (bytes > 0)
if (bytes_remaining > 0)
{
auto const v = g();
std::memcpy(buffer, &v, bytes);
result_type const v = g();
std::size_t const offset = complete_iterations * result_size;
std::memcpy(buffer_start + offset, &v, bytes_remaining);
}
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
}
template <

Some files were not shown because too many files have changed in this diff Show More