Compare commits

...

437 Commits

Author SHA1 Message Date
JCW
b5ee925be3 Rebuild all dependencies 2025-07-28 17:25:53 +01:00
JCW
7a8836c202 Rebuild all dependencies 2025-07-28 17:25:44 +01:00
JCW
386c0befe1 Skip rhel as the docker image is not working 2025-07-28 14:23:04 +01:00
JCW
76b34f7d82 Fix windows ENV:Path not found error 2025-07-28 14:00:05 +01:00
JCW
46073d7f87 Fix pipeline 2025-07-28 13:53:33 +01:00
Bart Thomee
30b64cb162 Only test RHEL 2025-07-28 08:30:26 -04:00
Bart Thomee
237d67b724 Run on all Linux, MacOS, and Windows variants 2025-07-27 18:38:09 -04:00
Bart Thomee
9bc08b25ed Run on all Linux, MacOS, and Windows variants 2025-07-27 18:36:20 -04:00
Bart Thomee
626f846ed6 Disable linux jobs, run only macos and windows 2025-07-27 18:19:40 -04:00
Bart Thomee
73f086b548 Disable linux jobs, run only macos and windows 2025-07-27 18:05:18 -04:00
Bart Thomee
c7578b00f3 Use self-hosted runners 2025-07-27 18:01:10 -04:00
Bart Thomee
e0a0d7c49d Use self-hosted runners 2025-07-27 17:59:59 -04:00
Bart Thomee
344df580e2 Add Ubuntu and MacOS 2025-07-27 17:05:47 -04:00
Bart Thomee
ed79654388 Add Ubuntu and MacOS 2025-07-27 17:04:12 -04:00
Bart Thomee
7125a6bd24 Add Ubuntu and MacOS 2025-07-27 17:00:10 -04:00
Bart Thomee
087a6044fd Add Ubuntu and MacOS 2025-07-27 16:59:13 -04:00
Bart Thomee
e02c49e7df Combine installing dependencies and building to optimize runtime 2025-07-27 15:15:39 -04:00
Bart Thomee
e1914cd8ef Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 15:01:46 -04:00
Bart Thomee
88ef67e73a Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:57:08 -04:00
Bart Thomee
06163a6d75 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:53:37 -04:00
Bart Thomee
d46c05e1b7 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:44:47 -04:00
Bart Thomee
2b3e131ec8 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:43:36 -04:00
Bart Thomee
9924bcc0d2 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:42:07 -04:00
Bart Thomee
195ac8ac46 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:38:52 -04:00
Bart Thomee
2c862a4060 Repo/org vars, unlike secrets, are not accessible in composite actions or reusable workflows 2025-07-27 14:30:27 -04:00
Bart Thomee
c6c836b82b Repo/org vars, unlike secrets, are not accessible in composite actions or reusable workflows 2025-07-27 14:29:22 -04:00
Bart Thomee
46d18c50f8 Re-enable installing dependencies and building+testing 2025-07-27 14:14:05 -04:00
Bart Thomee
ca1c2134ab Debug outputs 2025-07-27 14:09:50 -04:00
Bart Thomee
ea4f39baae Debug outputs 2025-07-27 14:08:27 -04:00
Bart Thomee
1291b574b6 Debug outputs 2025-07-27 14:06:44 -04:00
Bart Thomee
d05f6c1f90 Debug outputs 2025-07-27 14:05:23 -04:00
Bart Thomee
bd70aad699 Debug outputs 2025-07-27 14:04:49 -04:00
Bart Thomee
a5217e433d Bypass generate-matrix job 2025-07-27 12:53:52 -04:00
Bart Thomee
b869902453 Bypass generate-matrix job 2025-07-27 12:49:22 -04:00
Bart Thomee
b6ff27f51f Debug outputs 2025-07-27 12:46:31 -04:00
Bart Thomee
17b6235cc0 Debug outputs 2025-07-27 12:43:53 -04:00
Bart Thomee
355438b29c Debug outputs 2025-07-27 12:43:22 -04:00
Bart Thomee
0b44deb4e4 Debug outputs 2025-07-27 12:38:21 -04:00
Bart Thomee
b3637549f3 Fix strategy matrix outputs 2025-07-27 12:27:59 -04:00
Bart Thomee
a6c63dbc0c Fix workflow vs. action 2025-07-27 12:18:35 -04:00
Bart Thomee
7c9aad6e9f Fix workflow vs. action 2025-07-27 12:12:52 -04:00
Bart Thomee
5c4c658c38 Fix workflow vs. action 2025-07-27 12:08:13 -04:00
Bart Thomee
51a04d8c11 Use different concurrency groups for reusable workflows 2025-07-27 12:06:16 -04:00
Bart Thomee
74930b1bfe Use different concurrency groups for reusable workflows 2025-07-27 12:05:20 -04:00
Bart Thomee
a9d7393c42 Fix workflow vs. action 2025-07-27 12:01:12 -04:00
Bart Thomee
346666695b Fix workflow vs. action 2025-07-27 12:00:02 -04:00
Bart Thomee
a3a5e57e18 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:53:43 -04:00
Bart Thomee
7acf9f517d Secrets cannot have types 2025-07-27 11:15:38 -04:00
Bart Thomee
b260565373 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:13:17 -04:00
Bart Thomee
4fc0c9c1f2 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:11:49 -04:00
Bart Thomee
b698085d8d env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:03:58 -04:00
Bart Thomee
6b34810222 Temporarily remove comments to check if they confuse GitHub 2025-07-27 10:57:49 -04:00
Bart Thomee
323a3ed4b6 Use workflow_call 2025-07-27 10:51:24 -04:00
Bart Thomee
915b4568f3 Use workflow_call 2025-07-27 10:50:31 -04:00
Bart Thomee
4e7480125f Use workflow_call 2025-07-27 10:49:00 -04:00
Bart Thomee
d250365900 Use workflow_call 2025-07-27 10:47:30 -04:00
Bart Thomee
8030670edf Use workflow_call 2025-07-27 10:42:53 -04:00
Bart Thomee
521cf7f157 Temporarily comment out ARM as Clang builds fail 2025-07-26 20:55:37 -04:00
Bart Thomee
e5d99954ca Temporarily comment out ARM as Clang builds fail 2025-07-26 20:54:45 -04:00
Bart Thomee
71336d8f19 Composite actions must have a shell defined in each step 2025-07-26 19:13:06 -04:00
Bart Thomee
d283eb1287 Remove lsb_release check 2025-07-26 19:10:03 -04:00
Bart Thomee
8d9c1719c1 Use jq to generate the strategy matrix 2025-07-26 19:08:37 -04:00
Bart Thomee
e6535d64cb Use jq to generate the strategy matrix 2025-07-26 19:06:21 -04:00
Bart Thomee
15ffa2c71d Chomp newlines and use different random generator 2025-07-26 15:46:02 -04:00
Bart Thomee
a8753ebed1 Use different delimiter per output variable 2025-07-26 15:42:36 -04:00
Bart Thomee
9659fc8e05 Work around tr: write error 2025-07-26 15:39:57 -04:00
Bart Thomee
86c0c4cf48 Rename image to container 2025-07-26 15:29:26 -04:00
Bart Thomee
1e29ebde08 Rename image to container 2025-07-26 15:28:05 -04:00
Bart Thomee
4779fb28fb Revamp CI workflows 2025-07-26 15:17:11 -04:00
Bronek Kozicki
7179ce9c58 Build options cleanup (#5581)
As we no longer support old compiler versions, we are bringing back some warnings by removing no longer relevant `-Wno-...` options.
2025-07-25 15:48:22 -04:00
Bart
921aef9934 Updates Conan dependencies: Boost 1.86 (#5264) 2025-07-25 11:54:02 -04:00
Bronek Kozicki
e7a7bb83c1 VaultWithdraw destination account bugfix (#5572)
#5224 added (among other things) a `VaultWithdraw` transaction that allows setting the recipient of the withdrawn funds in the `Destination` transaction field. This technically turns this transaction into a payment, and in some respect the implementation does follow payment rules, e.g. enforcement of `lsfRequireDestTag` or `lsfDepositAuth`, or that MPT transfer has destination `MPToken`. However for IOUs, it missed verification that the destination account has a trust line to the asset issuer. Since the default behavior of `accountSendIOU` is to create this trust line (if missing), this is what `VaultWithdraw` currently does. This is incorrect, since the `Destination` might not be interested in holding the asset in question; this basically enables spammy transfers. This change, therefore, removes automatic creation of a trust line to the `Destination` account in `VaultWithdraw`.
2025-07-25 13:53:25 +00:00
Bart
5c2a3a2779 refactor: Update rocksdb (#5568)
This change updates RocksDB to its latest version. RocksDB is backward-compatible, so even though this is a major version bump, databases created with previous versions will continue to function.

The external RocksDB folder is removed, as the latest version available via Conan Center no longer needs custom patches.
2025-07-24 14:53:14 -04:00
Bronek Kozicki
b2960b9e7f Switch instrumentation workflow to use dependencies (#5607)
Before `XRPLF/ci` images, we did not have a `dependencies:` job for clang-16, so `instrumentation:` had to build its own dependencies. Now we have clang-16 Conan dependencies built in a separate job that can be used.
2025-07-24 09:20:50 -04:00
Bronek Kozicki
5713f9782a chore: Rename conan profile to default (#5599)
This change renames the `libxrpl` profile to `default` to make it more usable.
2025-07-24 10:35:47 +00:00
Chenna Keshava B S
60e340d356 Include network_id in validations and subscription stream responses (#5579)
This change includes `network_id` data in the validations and ledger subscription stream responses, as well as unit tests to validate the response fields. Fixes #4783
2025-07-23 17:53:18 +00:00
Bronek Kozicki
80d82c5b2b Add support for DomainID in MPTokenIssuance transactions (#5509)
This change adds support for `DomainID` to existing transactions `MPTokenIssuanceCreate` and `MPTokenIssuanceSet`.

In #5224 `DomainID` was added as an access control mechanism for `SingleAssetVault`. The actual implementation of this feature lies in `MPToken` and `MPTokenIssuance`, hence it makes sense to enable the use of `DomainID` also in `MPTokenIssuanceCreate` and `MPTokenIssuanceSet`, following same rules as in Vault:

* `MPTokenIssuanceCreate` and `MPTokenIssuanceSet` can only set `DomainID` if flag `MPTRequireAuth` is set.
* `MPTokenIssuanceCreate` requires that `DomainID` be a non-zero, uint256 number.
* `MPTokenIssuanceSet` allows `DomainID` to be zero (or empty) in which case it will remove `DomainID` from the `MPTokenIssuance` object.

The change is amendment-gated by `SingleAssetVault`. This is a non-breaking change because `SingleAssetVault` amendment is `Supported::no`, i.e. at this moment considered a work in progress, which cannot be enabled on the network.
2025-07-23 13:21:30 -04:00
Vlad
433eeabfa5 chore: Remove unused code after flow cross retirement (#5575)
After the `FlowCross` amendment was retired (#5562), there was still some unused code left. This change removes the remaining remnants.
2025-07-23 13:57:51 +00:00
Jingchen
faa781b71f Remove obsolete owner pays fee feature and XRPL_ABANDON stanza (#5550)
If a feature was never voted on then it is safe to remove.
2025-07-23 13:27:41 +00:00
Valentin Balaschenko
c233df720a refactor: Makes HashRouter flags more type-safe (#5371)
This change addresses the issue #5336: Refactor HashRouter flags to be more type-safe.

* Switched numeric flags to enum type.
* Updated unit tests
2025-07-23 12:03:12 +00:00
Bronek Kozicki
7ff4f79d30 Fix clang-format CI job (#5598)
For jobs running in containers, $GITHUB_WORKSPACE and ${{ github.workspace }} might not be the same directory. The actions/checkout step is supposed to checkout into `$GITHUB_WORKSPACE` and then add it to safe.directory (see instructions at https://github.com/actions/checkout), but that's apparently not happening for some container images. We can't be sure what is actually happening, so we preemptively add both directories to `safe.directory`. See also the GitHub issue opened in 2022 that still has not been resolved https://github.com/actions/runner/issues/2058.
2025-07-23 10:44:18 +00:00
Luc des Trois Maisons
60909655d3 Restructure beast::rngfill (#5563)
The current implementation of rngfill is prone to false warnings from GCC about array bounds violations. Looking at the code, the implementation naively manipulates both the bytes count and the buffer pointer directly to ensure the trailing memcpy doesn't overrun the buffer. As expressed, there is a data dependency on both fields between loop iterations.

Now, ideally, an optimizing compiler would realize that these dependencies were unnecessary and end up restructuring its intermediate representation into a functionally equivalent form with them absent. However, the point at which this occurs may be disjoint from when warning analyses are performed, potentially rendering them more difficult to
determine precisely.

In addition, it may also consume a portion of the budget the optimizer has allocated to attempting to improve a translation unit's performance. Given this is a function template which requires context-sensitive instantiation, this code would be more prone than most to being inlined, with a decrease in optimization budget corresponding to the effort the optimizer has already expended, having already optimized one or more calling functions. Thus, the scope for impacting the the ultimate quality of the code generated is elevated.

For this change, we rearrange things so that the location and contents of each memcpy can be computed independently, relying on a simple loop iteration counter as the only changing input between iterations.
2025-07-22 11:42:43 -04:00
Bronek Kozicki
03e46cd026 Remove include(default) from libxrpl profile (#5587)
Remove `include(default)` from `conan/profiles/libxrpl`. This means that we will now rely on compiler workarounds stored elsewhere e.g. in global.conf.
2025-07-21 14:03:53 +00:00
Vito Tumas
e95683a0fb refactor: Change boost::shared_mutex to std::shared_mutex (#5576)
This change reverts the usage of boost::shared_mutex back to std::shared_mutex. The change was originally introduced as a workaround for a bug in glibc 2.28 and older versions, which could cause threads using std::shared_mutex to stall. This issue primarily affected Ubuntu 18.04 and earlier distributions, which we no longer support.
2025-07-21 13:14:22 +00:00
Jingchen
13353ae36d Fix macos runner (#5585)
This change fixes the MacOS pipeline issue by limiting GitHub to choose the existing runners, ensuring the new experimental runners are excluded until they are ready.
2025-07-21 12:22:32 +00:00
Chenna Keshava B S
1a40f18bdd Remove the type filter from "ledger" RPC command (#4934)
This issue was reported on the Javascript client library: XRPLF/xrpl.js#2611

The type filter (Note: as of the latest version of rippled, type parameter is deprecated) does not work as expected. This PR removes the type filter from the ledger command.
2025-07-18 17:58:46 +00:00
Bart
90e6380383 refactor: Update date, libarchive, nudb, openssl, sqlite3, xxhash packages (#5567)
This PR updates several dependencies to their latest versions. Not all dependencies have been updated, as some need to be patched and some require additional code changes due to backward incompatibilities introduced by the version bump.
2025-07-18 16:55:15 +00:00
Vlad
8bfaa7fe0a test: Run unit tests regardless of 'Supported' amendment status (#5537) 2025-07-16 11:47:54 +00:00
Vlad
c9135a63cd Retire Flow Cross amendment (#5562)
The FlowCross amendment is now permanently enabled, so all code branches that have this amendment disabled are removed.
2025-07-16 06:53:13 -04:00
Michael Legleux
452263eaa5 chore: Update CI to use Conan 2 (#5556)
This is a minimally invasive update to use Conan 2 provided by our new build images.
2025-07-15 22:17:22 +00:00
yinyiqian1
8aa94ea09a fixAMMClawbackRounding: adjust last holder's LPToken balance (#5513)
Due to rounding, the LPTokenBalance of the last LP might not match the LP's trustline balance. This was fixed for `AMMWithdraw` in `fixAMMv1_1` by adjusting the LPTokenBalance to be the same as the trustline balance. Since `AMMClawback` is also performing a withdrawal, we need to adjust LPTokenBalance as well in `AMMClawback.`

This change includes:
1. Refactored `verifyAndAdjustLPTokenBalance` function in `AMMUtils`, which both`AMMWithdraw` and `AMMClawback` call to adjust LPTokenBalance.
2. Added the unit test `testLastHolderLPTokenBalance` to test the scenario.
3. Modify the existing unit tests for `fixAMMClawbackRounding`.
2025-07-11 20:03:28 +00:00
Bronek Kozicki
258ba71363 chore: Add gcc-12 workaround (#5554)
This change silences a dummy warning, which is breaking builds with GCC 12 (but not newer versions of GCC) in release mode only.
2025-07-11 18:57:09 +00:00
Shawn Xie
b8626ea3c6 Add MPT related txns into issuer's account history (#5530)
Currently there is no easy way to track MPT related transactions for the issuer. This change allows MPT transactions to show up on issuer's AccountTx RPC (to align with how IOUs work).
2025-07-11 17:50:03 +00:00
Vlad
6534757d85 chore: Remove unused headers (#5526) 2025-07-10 18:15:42 +00:00
Denis Angell
8e94ea3154 fix: add allowTrustLineLocking flag for account_info (#5525)
* Update the `account_info` API so that the `allowTrustLineLocking` flag is included in the response.
* The proposed `TokenEscrow` amendment added an `allowTrustLineLocking` flag in the `AccountRoot` object.
* In the API response, under `account_flags`, there is now an `allowTrustLineLocking` field with a boolean (`true` or `false`) value.
* For reference, the XLS-85 Token-Enabled Escrows implementation can be found in https://github.com/XRPLF/rippled/pull/5185
2025-07-10 16:29:51 +00:00
Bronek Kozicki
b113190563 Downgrade required CMake version for Antithesis SDK (#5548)
The current version was copied from `antithesis-sdk-cpp` but there is no logical reason to require this specific version of CMake. This change downgrades the version to make the project build with older CMake versions.
2025-07-10 11:46:02 -04:00
Ayaz Salikhov
358b7f50a7 fix: Link with boost libraries explicitly (#5546)
Having `boost::boost` in `self.requires` makes clio link with all boost libraries. There are additionally several Boost stacktrace backends that are both linked with, which violate ODR.
This change fixes the problem.
2025-07-10 06:14:27 -04:00
Bronek Kozicki
f47e2f4e82 chore: Fix compilation error with clang-20 and cleanup (#5543)
Removes clutter for old compilers, defaults to non-unity builds in cmake to match conanfile.py, and workaround for clang-20 compilation errors.
2025-07-09 17:47:34 +00:00
Bronek Kozicki
a7eea9546f test: Remove circular jtx.h dependencies (#5544)
Circular includes in header files can yield unpredictable results.
2025-07-09 08:43:11 -04:00
Jingchen
9874d47d7f Decouple CredentialHelpers from xrpld/app/tx (#5487)
This PR refactors `CredentialHelpers` and removes some unnecessary dependencies as a step of modularization.

The ledger component is almost independent except that it references `MPTokenAuthorize` and `CredentialHelpers.h`, and the latter further references `Transactor.h`. This PR partially clears the path to modularizing the ledger component and decouples `CredentialHelpers` from xrpld.
2025-07-03 14:27:37 +00:00
Mayukha Vadari
c2f3e2e263 fix: crash when trace-logging in tests (#5529)
This PR fixes a crash in tests when the test `Env is run at trace/debug log level.

This issue only affects tests, and only if logging at trace/debug level, so really only relevant during rippled development, and does not affect production servers.
2025-07-02 19:10:25 +00:00
Vlad
e18f27f5f7 test: switch some unit tests to doctest (#5383)
This change moves some tests from the current unit tests that are compiled into the rippled binary to using the doctest framework.
2025-06-26 19:35:31 +00:00
Jingchen
df6daf0d8f Add XRPL_ABANDON and use it to abandon OwnerPaysFee (#5510) 2025-06-26 12:09:05 -04:00
Jingchen
e9d46f0bfc Remove OwnerPaysFee as it's never fully supported (#5435)
The OwnerPaysFee amendment was never fully supported, and this change removes the feature to the extent possible.
2025-06-24 18:56:58 +00:00
Bart
42fd74b77b Removes release notes from codebase (#5508) 2025-06-24 13:10:00 -04:00
tequ
c55ea56c5e Add nftoken_id, nftoken_ids, offer_id to meta for transaction stream (#5230) 2025-06-24 09:02:22 -04:00
Michael Legleux
1e01cd34f7 Set version to 2.5.0 2025-06-23 10:13:01 -07:00
Alex Kremer
e2fa5c1b7c chore: Change libXRPL check conan remote to dev (#5482)
This change aligns the Conan remote used by the libXRPL Clio compatibility check workflow with the recent changes applied to Clio.
2025-06-20 17:02:16 +00:00
Ed Hennis
fc0984d286 Require a message on "Application::signalStop" (#5255)
This change adds a message parameter to Application::signalStop for extra context.
2025-06-20 16:24:34 +00:00
Valentin Balaschenko
8b3dcd41f7 refactor: Change getNodeFat Missing Node State Tree error into warning (#5455) 2025-06-20 15:44:42 +00:00
Denis Angell
8f2f5310e2 Fix: Improve error handling in Batch RPC response (#5503) 2025-06-18 17:46:45 -04:00
Michael Legleux
edb4f0342c Set version to 2.5.0-rc2 2025-06-11 17:10:45 -07:00
yinyiqian1
ea17abb92a fix: Ensure delegate tests do not silently fail with batch (#5476)
The tests that ensure `tfInnerBatchTxn` won't block delegated transactions silently fail in `Delegate_test.cpp`. This change removes these cases from that file and adds them to `Batch_test.cpp` instead where they do not silently fail, because there the batch delegate results are explicitly checked. Moving them to that file further avoids refactoring many helper functions.
2025-06-11 13:21:24 +08:00
Mayukha Vadari
35a40a8e62 fix: Improve multi-sign usage of simulate (#5479)
This change allows users to submit simulate requests from a multi-sign account without needing to specify the accounts that are doing the multi-signing, and fixes an error with simulate that allowed double-"signed" (both single-sign and multi-sign public keys are provided) transactions.
2025-06-10 14:47:27 +08:00
Ed Hennis
d494bf45b2 refactor: Collapse some split log messages into one (#5347)
Multi-line log messages are hard to work with. Writing these handful of related messages as one message should make the log a tiny bit easier to manage.
2025-06-06 16:01:02 +00:00
Vlad
8bf4a5cbff chore: Remove external project build cores division (#5475)
The CMake statements that make it seem as if the number of cores used to build external project dependencies is halved don't actually do anything. This change removes these statements.
2025-06-05 13:37:30 +00:00
Denis Angell
58c2c82a30 fix: Amendment-guard TokenEscrow preclaim and expand tests (#5473)
This change amendment-guards the preclaim for `TokenEscrow`, as well as expands tests to increase code coverage.
2025-06-05 12:54:45 +00:00
Michael Legleux
11edaa441d Set version to 2.5.0-rc1 (#5472) 2025-06-04 17:55:23 +00:00
yinyiqian1
a5e953b191 fix: Add tecNO_DELEGATE_PERMISSION and fix flags (#5465)
* Adds `tecNO_DELEGATE_PERMISSION` for unauthorized transactions sent by a delegated account.
* Returns `tecNO_TARGET` instead of `terNO_ACCOUNT` for the `DelegateSet` transaction if the delegated account does not exist.
* Fixes `tfFullyCanonicalSig` and `tfInnerBatchTxn` blocking transactions issue by adding `tfUniversal` in the permission related masks in `txFlags.h`
2025-06-03 22:20:29 +00:00
Mark Travis
506ae12a8c Increase network i/o capacity (#5464)
The change increases the default network I/O worker thread pool size from 2 to 6. This will improve stability, as worker thread saturation correlates to desyncs, particularly on high-traffic peers, such as hubs.
2025-06-03 21:33:09 +00:00
Ayaz Salikhov
0310c5cbe0 fix: Specify transitive_headers when building with Conan 2 (#5462)
To be able to consume `rippled` in Conan 2, the recipe should specify transitive_headers for external libraries that are present in the exported header files. This change remains compatibility with Conan 1, where this flag was not present.
2025-06-03 17:33:32 +00:00
Denis Angell
053e1af7ff Add support for XLS-85 Token Escrow (#5185)
- Specification: https://github.com/XRPLF/XRPL-Standards/pull/272
- Amendment: `TokenEscrow`
- Enables escrowing of IOU and MPT tokens in addition to native XRP.
- Allows accounts to lock issued tokens (IOU/MPT) in escrow objects, with support for freeze, authorization, and transfer rates.
- Adds new ledger fields (`sfLockedAmount`, `sfIssuerNode`, etc.) to track locked balances for IOU and MPT escrows.
- Updates EscrowCreate, EscrowFinish, and EscrowCancel transaction logic to support IOU and MPT assets, including proper handling of trustlines and MPT authorization, transfer rates, and locked balances.
- Enforces invariant checks for escrowed IOU/MPT amounts.
- Extends GatewayBalances RPC to report locked (escrowed) balances.
2025-06-03 12:51:55 -04:00
Vlad
7e24adbdd0 fix: Address NFT interactions with trustlines (#5297)
The changes are focused on fixing NFT transactions bypassing the trustline authorization requirement and potential invariant violation when interacting with deep frozen trustlines.
2025-06-02 16:13:20 +00:00
Gregory Tsipenyuk
621df422a7 fix: Add AMMv1_3 amendment (#5203)
* Add AMM bid/create/deposit/swap/withdraw/vote invariants:
  - Deposit, Withdrawal invariants: `sqrt(asset1Balance * asset2Balance) >= LPTokens`.
  - Bid: `sqrt(asset1Balance * asset2Balance) > LPTokens` and the pool balances don't change.
  - Create: `sqrt(asset1Balance * assetBalance2) == LPTokens`.
  - Swap: `asset1BalanceAfter * asset2BalanceAfter >= asset1BalanceBefore * asset2BalanceBefore`
     and `LPTokens` don't change.
  - Vote: `LPTokens` and pool balances don't change.
  - All AMM and swap transactions: amounts and tokens are greater than zero, except on withdrawal if all tokens
    are withdrawn.
* Add AMM deposit and withdraw rounding to ensure AMM invariant:
  - On deposit, tokens out are rounded downward and deposit amount is rounded upward.
  - On withdrawal, tokens in are rounded upward and withdrawal amount is rounded downward.
* Add Order Book Offer invariant to verify consumed amounts. Consumed amounts are less than the offer.
* Fix Bid validation. `AuthAccount` can't have duplicate accounts or the submitter account.
2025-06-02 09:52:10 -04:00
Shawn Xie
0a34b5c691 Add support for XLS-81 Permissioned DEX (#5404)
Modified transactions:
- OfferCreate
- Payment

Modified RPCs:
- book_changes
- subscribe
- book_offers
- ripple_path_find
- path_find

Spec: https://github.com/XRPLF/XRPL-Standards/pull/281
2025-05-30 13:24:48 -04:00
Matt Mankins
e0bc3dd51f docs: update example keyserver host in SECURITY.md (#5460) 2025-05-30 08:46:08 -04:00
Bronek Kozicki
dacecd24ba Fix unit build error (#5459)
This change fixes the issue that there is a `using namespace` statement inside a namespace scope.
2025-05-29 20:53:31 +00:00
Mayukha Vadari
05105743e9 chore[tests]: improve env.meta usage (#5457)
This commit changes the ledger close in env.meta to be conditional on if it hasn't already been closed (i.e. the current ledger doesn't have any transactions in it). This change will make it a bit easier to use, as it will still work if you close the ledger outside of this usage. Previously, if you accidentally closed the ledger outside of the meta function, it would segfault and it was incredibly difficult to debug.
2025-05-29 16:28:09 +00:00
Bronek Kozicki
9e1fe9a85e Fix: Improve handling of expired credentials in VaultDeposit (#5452)
This change returns `tecEXPIRED` from VaultDeposit to allow the Transactor to remove the expired credentials.
2025-05-28 10:28:18 -04:00
Vito Tumas
d71ce51901 feat: improve squelching configuration (#5438)
This commit introduces the following changes:
* Renames `vp_enable config` option to `vp_base_squelch_enable` to enable squelching for validators.
* Removes `vp_squelch` config option which was used to configure whether to send squelch messages to peers or not. With this flag removed, if squelching is enabled, squelch messages will be sent. This was an option used for debugging.
* Introduces a temporary `vp_base_squelch_max_trusted_peers` config option to change the max number of peers who are selected as validator message sources. This is a temporary option, which will be removed once a good value is found.
* Adds a traffic counter to count the number of times peers ignored squelch messages and kept sending messages for squelched validators.
* Moves the decision whether squelching is enabled and ready into Slot.h.
2025-05-28 06:30:03 -04:00
Michael Legleux
be668ee26d chore: Update CPP ref source (#5453) 2025-05-27 20:46:25 +00:00
Bart
cae5294b4e chore: Rename docs job (#5398) 2025-05-27 20:03:23 +00:00
Elliot.
cd777f79ef docs: add -j $(nproc) to BUILD.md (#5288)
This improves build times.
2025-05-27 19:11:13 +00:00
Valentin Balaschenko
8b9e21e3f5 docs: Update build instructions for Ubuntu 22.04+ (#5292) 2025-05-27 18:32:25 +00:00
Denis Angell
2a61aee562 Add Batch feature (XLS-56) (#5060)
- Specification: [XRPLF/XRPL-Standards 56](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-0056d-batch/README.md)
- Amendment: `Batch`
- Implements execution of multiple transactions within a single batch transaction with four execution modes: `tfAllOrNothing`, `tfOnlyOne`, `tfUntilFailure`, and `tfIndependent`.
- Enables atomic multi-party transactions where multiple accounts can participate in a single batch, with up to 8 inner transactions and 8 batch signers per batch transaction.
- Inner transactions use `tfInnerBatchTxn` flag with zero fees, no signature, and empty signing public key.
- Inner transactions are applied after the outer batch succeeds via the `applyBatchTransactions` function in apply.cpp.
- Network layer prevents relay of transactions with `tfInnerBatchTxn` flag - each peer applies inner transactions locally from the batch.
- Batch transactions are excluded from AccountDelegate permissions but inner transactions retain full delegation support.
- Metadata includes `ParentBatchID` linking inner transactions to their containing batch for traceability and auditing.
- Extended STTx with batch-specific signature verification methods and added protocol structures (`sfRawTransactions`, `sfBatchSigners`).
2025-05-23 19:53:53 +00:00
Bronek Kozicki
40ce8a8833 fix: Fix pseudo-account ID calculation (#5447)
Before #5224, the pseudoaccount ID was calculated using prefix expressed in `std::uint16_t`. The refactoring to move the pseudoaccount ID calculation to View.cpp had accidentally changed the prefix type to `int` (derived from `auto i = 0`) which in turn changed the length of the input to `sha512Half` from 2 bytes to 4, altering the result.

This resulted in a different ID of the pseudoaccount calculated from the function after the refactoring, breaking the ledger. This impacts AMMCreate, even when the `SingleAssetVault` amendment is not active. This change restores the prefix type to `std::uint16_t`.
2025-05-23 14:05:36 +00:00
Bronek Kozicki
7713ff8c5c Add codecov badge, raise .codecov.yml thresholds (#5428) 2025-05-22 14:43:41 +00:00
Olek
70371a4344 Fix initializer list initialization for GCC-15 (#5443) 2025-05-21 13:28:18 -04:00
Bronek Kozicki
e514de76ed Add single asset vault (XLS-65d) (#5224)
- Specification: XRPLF/XRPL-Standards#239
- Amendment: `SingleAssetVault`
- Implements a vault feature used to store a fungible asset (XRP, IOU, or MPT, but not NFT) and to receive shares in the vault (an MPT) in exchange.
- A vault can be private or public.
- A private vault can use permissioned domains, subject to the `PermissionedDomains` amendment.
- Shares can be exchanged back into asset with `VaultWithdraw`.
- Permissions on the asset in the vault are transitively applied on shares in the vault.
- Issuer of the asset in the vault can clawback with `VaultClawback`.
- Extended `MPTokenIssuance` with `DomainID`, used by the permissioned domain on the vault shares.

Co-authored-by: John Freeman <jfreeman08@gmail.com>
2025-05-20 14:06:41 -04:00
Bart
dd62cfcc22 fix: Update path in CODEOWNERS (#5440) 2025-05-20 15:24:07 +00:00
Michael Legleux
09690f1b38 Set version to 2.5.0-b1 2025-05-18 20:39:18 +01:00
Valentin Balaschenko
380ba9f1c1 Fix: Resolve slow test on macOS pipeline (#5392)
Using std::barrier performs extremely poorly (~1 hour vs ~1 minute to run the test suite) in certain macOS environments.
To unblock our macOS CI pipeline, std::barrier has been replaced with a custom mutex-based barrier (Barrier) that significantly improves performance without compromising correctness.
2025-05-16 10:31:51 +00:00
brettmollin
c3e9380fb4 fix: Update validators-example.txt fix xrplf example URL (#5384) 2025-05-16 09:49:14 +00:00
Jingchen
e3ebc253fa fix: Ensure that coverage file generation is atomic. (#5426)
Running unit tests in parallel and multiple threads can write into one file can corrupt output files, and then gcovr won't be able to parse the corrupted file. This change adds -fprofile-update=atomic as instructed by https://gcc.gnu.org/bugzilla/show_bug.cgi?id=68080.
2025-05-12 14:54:01 +00:00
Bart
c6c7c84355 Configure CODEOWNERS for changes to RPC code (#5266)
To ensure changes to any RPC-related code are compatible with other services, such as Clio, the RPC team will be required to review them.
2025-05-12 12:42:03 +00:00
yinyiqian1
28f50cb7cf fix: enable LedgerStateFix for delegation (#5427) 2025-05-10 10:36:11 -04:00
Vito Tumas
3e152fec74 refactor: use east const convention (#5409)
This change refactors the codebase to use the "east const convention", and adds a clang-format rule to follow this convention.
2025-05-08 11:00:42 +00:00
yinyiqian1
2db2791805 Add PermissionDelegation feature (#5354)
This change implements the account permission delegation described in XLS-75d, see https://github.com/XRPLF/XRPL-Standards/pull/257.

* Introduces transaction-level and granular permissions that can be delegated to other accounts.
* Adds `DelegateSet` transaction to grant specified permissions to another account.
* Adds `ltDelegate` ledger object to maintain the permission list for delegating/delegated account pair.
* Adds an optional `Delegate` field in common fields, allowing a delegated account to send transactions on behalf of the delegating account within the granted permission scope. The `Account` field remains the delegating account; the `Delegate` field specifies the delegated account. The transaction is signed by the delegated account.
2025-05-08 06:14:02 -04:00
Vito Tumas
9ec2d7f8ff Enable passive squelching (#5358)
This change updates the squelching logic to accept squelch messages for untrusted validators. As a result, servers will also squelch untrusted validator messages reducing duplicate traffic they generate.

In particular:
* Updates squelch message handling logic to squelch messages for all validators, not only trusted ones.
* Updates the logic to send squelch messages to peers that don't squelch themselves
* Increases the threshold for the number of messages that a peer has to deliver to consider it as a candidate for validator messages.
2025-05-02 11:01:45 -04:00
Ed Hennis
4a084ce34c Improve transaction relay logic (#4985)
Combines four related changes:
1. "Decrease `shouldRelay` limit to 30s." Pretty self-explanatory. Currently, the limit is 5 minutes, by which point the `HashRouter` entry could have expired, making this transaction look brand new (and thus causing it to be relayed back to peers which have sent it to us recently).
2.  "Give a transaction more chances to be retried." Will put a transaction into `LedgerMaster`'s held transactions if the transaction gets a `ter`, `tel`, or `tef` result. Old behavior was just `ter`.
     * Additionally, to prevent a transaction from being repeatedly held indefinitely, it must meet some extra conditions. (Documented in a comment in the code.)
3. "Pop all transactions with sequential sequences, or tickets." When a transaction is processed successfully, currently, one held transaction for the same account (if any) will be popped out of the held transactions list, and queued up for the next transaction batch. This change pops all transactions for the account, but only if they have sequential sequences (for non-ticket transactions) or use a ticket. This issue was identified from interactions with @mtrippled's #4504, which was merged, but unfortunately reverted later by #4852. When the batches were spaced out, it could potentially take a very long time for a large number of held transactions for an account to get processed through. However, whether batched or not, this change will help get held transactions cleared out, particularly if a missing earlier transaction is what held them up.
4. "Process held transactions through existing NetworkOPs batching." In the current processing, at the end of each consensus round, all held transactions are directly applied to the open ledger, then the held list is reset. This bypasses all of the logic in `NetworkOPs::apply` which, among other things, broadcasts successful transactions to peers. This means that the transaction may not get broadcast to peers for a really long time (5 minutes in the current implementation, or 30 seconds with this first commit). If the node is a bottleneck (either due to network configuration, or because the transaction was submitted locally), the transaction may not be seen by any other nodes or validators before it expires or causes other problems.
2025-05-01 13:58:18 -04:00
Vito Tumas
3502df2174 fix: Replaces random endpoint resolution with sequential (#5365)
This change addresses an issue where `rippled` attempts to connect to an IPv6 address, even when the local network lacks IPv6 support, resulting in a "Network is unreachable" error.

The fix replaces the custom endpoint selection logic with `boost::async_connect`, which sequentially attempts to connect to available endpoints until one succeeds or all fail.
2025-04-28 15:38:55 -04:00
Vlad
fa1e25abef chore: Small clarification to lsfDefaultRipple comment (#5410) 2025-04-25 15:21:27 +00:00
Denis Angell
217ba8dd4d fix: CTID to use correct ledger_index (#5408) 2025-04-24 10:24:10 -04:00
Ed Hennis
405f4613d8 chore: Run CI on PRs that are Ready or have the "DraftRunCI" label (#5400)
- Avoids costly overhead for idle PRs where the CI results don't add any
  value.
2025-04-11 22:20:59 +00:00
Mayukha Vadari
cba512068b refactor: Clean up test logging to make it easier to search (#5396)
This PR replaces the word `failed` with `failure` in any test names and renames some test files to fix MSVC warnings, so that it is easier to search through the test output to find tests that failed.
2025-04-11 09:07:42 +00:00
Valentin Balaschenko
1c99ea23d1 Temporary disable automatic triggering macOS pipeline (#5397)
We temporarily disable running unit tests on macOS on the CI pipeline while we are investigating the delays.
2025-04-10 21:58:29 +02:00
Denis Angell
c4308b216f fix: Adds CTID to RPC tx and updates error (#4738)
This change fixes a number of issues involved with CTID:
* CTID is not present on all RPC tx transactions.
* rpcWRONG_NETWORK is missing in the ErrorCodes.cpp
2025-04-10 12:38:52 +00:00
Wietse Wind
aafd2d8525 Fix: admin RPC webhook queue limit removal and timeout reduction (#5163)
When using subscribe at admin RPC port to send webhooks for the transaction stream to a backend, on large(r) ledgers the endpoint receives fewer HTTP POSTs with TX information than the amount of transactions in a ledger. This change removes the hardcoded queue length to avoid dropping TX notifications for the admin-only command. In addition, the per-request TTL for outgoing RPC HTTP calls has been reduced from 10 minutes to 30 seconds.
2025-04-10 06:37:24 +00:00
Denis Angell
a574ec6023 fix: fixPayChanV1 (#4717)
This change introduces a new fix amendment (`fixPayChanV1`) that prevents the creation of new `PaymentChannelCreate` transaction with a `CancelAfter` time less than the current ledger time. It piggy backs off of fix1571.

Once the amendment is activated, creating a new `PaymentChannel` will require that if you specify the `CancelAfter` time/value, that value must be greater than or equal to the current ledger time.

Currently users can create a payment channel where the `CancelAfter` time is before the current ledger time. This results in the payment channel being immediately closed on the next PaymentChannel transaction.
2025-04-09 22:08:44 +00:00
Mayukha Vadari
e429455f4d refactor(trivial): reorganize ledger entry tests and helper functions (#5376)
This PR splits out `ledger_entry` tests into its own file (`LedgerEntry_test.cpp`) and alphabetizes the helper functions in `LedgerEntry.cpp`. These commits were split out of #5237 to make that PR a little more manageable, since these basic trivial changes are most of the diff. There is no code change, just moving code around.
2025-04-09 17:02:03 +00:00
Vito Tumas
7692eeb9a0 Instrument proposal, validation and transaction messages (#5348)
Adds metric counters for the following P2P message types:

* Untrusted proposal and validation messages
* Duplicate proposal, validation and transaction messages
2025-04-09 15:33:17 +02:00
Bronek Kozicki
a099f5a804 Remove UNREACHABLE from NetworkOPsImp::processTrustedProposal (#5387)
It’s possible for this to happen legitimately if a set of peers, including a validator, are connected in a cycle, and the latency and message processing time between those peers is significantly less than the latency between the validator and the last peer. It’s unlikely in the real world, but obviously easy to simulate with Antithesis.
2025-04-08 14:43:34 +00:00
Michael Legleux
ca0bc767fe fix: Use the build image from ghcr.io (#5390)
The ci pipelines are constantly hitting Docker Hub's public rate limiting since increasing the number of jobs we're running. This change switches over to images hosted in GitHub's registry.
2025-04-05 02:24:31 +00:00
Mayukha Vadari
4ba9288935 fix: disable channel_authorize when signing_support is disabled (#5385) 2025-04-05 01:08:34 +00:00
Valentin Balaschenko
e923ec6d36 Fix to correct memory ordering for compare_exchange_weak and wait in the intrusive reference counting logic (#5381)
This change addresses a memory ordering assertion failure observed on one of the Windows test machines during the IntrusiveShared_test suite.
2025-04-04 18:21:17 +00:00
Vlad
851d99d99e fix: uint128 ambiguousness breaking macos unity build (#5386) 2025-04-04 08:28:33 -04:00
Bart
f608e653ca Fix undefined uint128_t type on Windows non-unity builds (#5377)
As part of import optimization, a transitive include had been removed that defined `BOOST_COMP_MSVC` on Windows. In unity builds, this definition was pulled in, but in non-unity builds it was not - causing a compilation error. An inspection of the Boost code revealed that we can just gate the statements by `_MS_VER` instead. A `#pragma message` is added to verify that the statement is only printed on Windows builds.
2025-04-01 11:21:59 -04:00
Vlad
72e076b694 test: enable compile time param to change reference fee value (#5159)
Adds an extra CI pipeline to perform unit tests using different values for fees.
2025-03-27 23:40:36 +00:00
Bart
6cf37c4abe refactor: Move integration tests from 'examples/' into 'tests/' (#5367)
This change moves `examples/example` into `tests/conan` to make it clear it is an integration test, and adjusts the `conan` CI job accordingly
2025-03-27 14:49:09 +00:00
Valentin Balaschenko
fc204773d6 Intrusive SHAMap smart pointers for efficient memory use and lock-free synchronization (#5152)
The main goal of this optimisation is memory reduction in SHAMapTreeNodes by introducing intrusive pointers instead of standard std::shared_ptr and std::weak_ptr.
2025-03-25 18:40:25 +00:00
Vlad
2bc5cb240f test: enable unit tests to work with variable reference fee (#5145)
Fix remaining unit tests to be able to process reference fee values other than 10.
2025-03-25 10:31:25 -04:00
Vlad
67028d6ea6 test: enable TxQ unit tests work with variable reference fee (#5118)
In preparation for a potential reference fee change we would like to verify that fee change works as expected. The first step is to fix all unit tests to be able to work with different reference fee values.
2025-03-24 14:56:19 -04:00
Ed Hennis
d22a5057b9 Prevent consensus from getting stuck in the establish phase (#5277)
- Detects if the consensus process is "stalled". If it is, then we can declare a 
  consensus and end successfully even if we do not have 80% agreement on
  our proposal.
  - "Stalled" is defined as:
    - We have a close time consensus
    - Each disputed transaction is individually stalled:
      - It has been in the final "stuck" 95% requirement for at least 2
        (avMIN_ROUNDS) "inner rounds" of phaseEstablish,
      - and either all of the other trusted proposers or this validator, if proposing,
        have had the same vote(s) for at least 4 (avSTALLED_ROUNDS) "inner
        rounds", and at least 80% of the validators (including this one, if
        appropriate) agree about the vote (whether yes or no).
- If we have been in the establish phase for more than 10x the previous
  consensus establish phase's time, then consensus is considered "expired",
  and we will leave the round, which sends a partial validation (indicating
  that the node is moving on without validating). Two restrictions avoid
  prematurely exiting, or having an extended exit in extreme situations.
  - The 10x time is clamped to be within a range of 15s
    (ledgerMAX_CONSENSUS) to 120s (ledgerABANDON_CONSENSUS).
  - If consensus has not had an opportunity to walk through all avalanche
    states (defined as not going through 8 "inner rounds" of phaseEstablish),
    then ConsensusState::Expired is treated as ConsensusState::No.
- When enough nodes leave the round, any remaining nodes will see they've
  fallen behind, and move on, too, generally before hitting the timeout. Any
  validations or partial validations sent during this time will help the
  consensus process bring the nodes back together.
2025-03-20 12:41:44 -04:00
Alex Kremer
75a20194c5 chore: Update link to ripple-binary-codec (#5355)
The link to ripple-binary-codec's definitions.json appears to be outdated. The updated link is also documented here: https://xrpl.org/docs/references/protocol/binary-format#definitions-file
2025-03-19 17:33:23 -04:00
Alex Kremer
7fe81fe62e chore: Add PR number to payload (#5310)
This PR adds one more payload field to the libXRPL compatibility check workflow - the PR number itself.
2025-03-18 17:26:08 +00:00
Bronek Kozicki
345ddc7234 fix: Remove null pointer deref, just do abort (#5338)
This change removes the existing undefined behavior from `LogicError`, so we can be certain that there will be always a stacktrace.

De-referencing a null pointer is an old trick to generate `SIGSEGV`, which would typically also create a stacktrace. However it is also an undefined behaviour and compilers can do something else. A more robust way to create a stacktrace while crashing the program is to use `std::abort`, which we have also used in this location for a long time. If we combine the two, we might not get the expected behaviour - namely, the nullpointer deref followed by `std::abort`, as handled in certain compiler versions may not immediately cause a crash. We have observed stacktrace being wiped instead, and thread put in indeterminate state, then stacktrace created without any useful information.
2025-03-18 12:45:25 -04:00
Bart
d167d4864f refactor: Updates Conan dependencies: RocksDB (#5335)
Updates RocksDB to version 9.7.3, the latest version supported in Conan 1.x. A patch for 9.7.4 that fixes a memory leak is included.
2025-03-18 11:25:48 -04:00
Vlad
bf504912a4 fix: trust line RPC no ripple flag (#5345)
The Trustline RPC `no_ripple` flag gets set depending on `lsfDefaultRipple` flag, which is not a flag of a trustline but of the account root. The `lsfDefaultRipple` flag does not provide any insight if this particular trust line has `lsfLowNoRipple` or `lsfHighNoRipple` flag set, so it should not be used here at all. This change simplifies the logic.
2025-03-18 09:03:03 -04:00
cyan317
a7fb8ae915 fix: Handle invalid marker parameter in grpc call (#5317)
The `end_marker` is used to limit the range of ledger entries to fetch. If `end_marker` is less than `marker`, a crash can occur. This change adds an additional check.
2025-03-18 08:21:33 -04:00
Sergey Kuznetsov
d9b7a2688f fix: Error message for ledger_entry rpc (#5344)
Changes the error to `malformedAddress` for `permissioned_domain` in the `ledger_entry` rpc, when the account is not a string. This change makes it more clear to a user what is wrong with their request.
2025-03-17 09:14:49 -04:00
Darius Tumas
c0299dba88 Adds hub.xrpl-commons.org as a new Bootstrap Cluster (#5263) 2025-03-17 07:04:46 -04:00
Bronek Kozicki
c3ecdb4746 Rename "deadlock" to "stall" in LoadManager (#5341)
What the LoadManager class does is stall detection, which is not the same as deadlock detection. In the condition of severe CPU starvation, LoadManager will currently intentionally crash rippled reporting `LogicError: Deadlock detected`. This error message is misleading as the condition being detected is not a deadlock. This change fixes and refactors the code in response.
2025-03-14 16:15:09 -04:00
Ed Hennis
c17676a9be refactor: Improve ordering of headers with clang-format (#5343)
Removes all manual header groupings from source and header files by leveraging clang-format options.
2025-03-12 18:33:21 -04:00
Ed Hennis
ed8e32cc92 refactor: Calculate numFeatures automatically (#5324)
Requiring manual updates of numFeatures is an annoying manual process that is easily forgotten, and leads to frequent merge conflicts. This change takes advantage of the `XRPL_FEATURE` and `XRPL_FIX` macros, and adds a new `XRPL_RETIRE` macro to automatically set `numFeatures`.
2025-03-12 17:34:06 -04:00
Bart
2406b28e64 refactor: Remove unused and add missing includes (#5293)
The codebase is filled with includes that are unused, and which thus can be removed. At the same time, the files often do not include all headers that contain the definitions used in those files. This change uses clang-format and clang-tidy to clean up the includes, with minor manual intervention to ensure the code compiles on all platforms.
2025-03-11 14:16:45 -04:00
Michael Legleux
2216e5a13f Set version to 2.4.0 2025-03-06 10:41:58 -08:00
Michael Legleux
5bf3a308d5 Set version to 2.4.0-rc4 2025-03-03 10:31:11 -08:00
Darius Tumas
53ea31c69a chore: Update XRPL Foundation Validator List URL (#5326) 2025-02-28 18:14:01 -05:00
Ed Hennis
c1c2b5bf52 chore: Move "assert" and "werr" flags from "actions/build" (#5325)
- PR #5228 added assert=TRUE and werr=TRUE CMake flags to the
  build/action.yml script which is used by all CI jobs to build rippled,
  ensuring those flags were always set. The assumption was that only the
  CI jobs used that script, so any extra time cost was offset by the
  benefit of the extra checks. That assumption was incorrect. That
  script is used by other downstream projects. Therefore, those flags
  have been moved into the individual CI jobs' "cmake-args" parameter
  passed to build/action.yml. This will have the same effect for CI jobs
  without any side effects.
2025-02-27 20:42:06 -05:00
Mark Travis
af018c7b0b Log detailed correlated consensus data together (#5302)
Combine multiple related debug log data points into a single
message. Allows quick correlation of events that
previously were either not logged or, if logged, strewn
across multiple lines, making correlation difficult.
The Heartbeat Timer and consensus ledger accept processing
each have this capability.

Also guarantees that log entries will be written if the
node is a validator, regardless of log severity level.
Otherwise, the level of these messages is at INFO severity.
2025-02-27 13:02:57 -05:00
Michael Legleux
0a1ca0600f Set version to 2.4.0-rc3 2025-02-26 12:41:15 -08:00
Mark Travis
cd7c62818b fix: Acquire previously failed transaction set from network as new proposal arrives (#5318)
Reset the failure variable.
2025-02-25 20:00:50 -05:00
Bronek Kozicki
37d06bcce8 Fix Replace assert with XRPL_ASSERT (#5312) 2025-02-25 11:43:26 -05:00
Bronek Kozicki
9745718467 fix: Remove 'new parent hash' assert (#5313)
This assert is known to occasionally trigger, without causing errors
downstream. It is replaced with a log message.
2025-02-25 09:14:10 -05:00
Michael Legleux
ab44cc31e2 Set version to 2.4.0-rc2 2025-02-20 15:29:54 -08:00
Ed Hennis
dce3e1efa6 Add logging and improve counting of amendment votes from UNL (#5173)
* Add logging for amendment voting decision process
* When counting "received validations" to determine quorum, count the number of validators actually voting, not the total number of possible votes.
2025-02-20 13:35:04 -05:00
Ed Hennis
159dfb5acb Revert "Reduce duplicate peer traffic for ledger data (#5126)" (#5300)
This reverts commit dd5e6559dd. It has
introduced a regression causing slow close times and syncing issues.
A fix will be attempted later.
2025-02-19 18:52:08 -05:00
Bart
844646dc50 docs: Revert peer port to 51235 (#5299)
Reverts the [port_peer] back to the legacy port 51235 rather than to the default port 2459, to avoid potentially inconveniencing existing operators.
2025-02-19 17:14:00 -05:00
Michael Legleux
01fc8f2209 Set version to 2.4.0-rc1 2025-02-18 13:58:56 -08:00
Olek
43e1d4440e fix: Switch Permissioned Domain to Supported::yes (#5287)
Switch Permissioned Domain feature's supported flag from Supported::no to Supported::yes for it to be votable.
2025-02-15 10:08:25 -05:00
Bart
466849efe8 docs: Clarifies default port of hosts (#5290)
The current comment in the example cfg file incorrectly mentions both "may" and "must". This change fixes this comment to clarify that the default port of hosts is 2459 and that specifying it is therefore optional. It further sets the default port to 2459 instead of the legacy 51235.
2025-02-14 21:37:14 -05:00
Mark Travis
db0fad6826 Log proposals and validations (#5291)
Adds detailed log messages for each validation and proposal received from the network.
2025-02-14 20:48:12 -05:00
Ed Hennis
dd5e6559dd Reduce duplicate peer traffic for ledger data (#5126)
- Drop duplicate outgoing TMGetLedger messages per peer
  - Allow a retry after 30s in case of peer or network congestion.
  - Addresses RIPD-1870
  - (Changes levelization. That is not desirable, and will need to be fixed.)
- Drop duplicate incoming TMGetLedger messages per peer
  - Allow a retry after 15s in case of peer or network congestion.
  - The requestCookie is ignored when computing the hash, thus increasing
    the chances of detecting duplicate messages.
  - With duplicate messages, keep track of the different requestCookies
    (or lack of cookie). When work is finally done for a given request,
    send the response to all the peers that are waiting on the request,
    sending one message per peer, including all the cookies and
    a "directResponse" flag indicating the data is intended for the
    sender, too.
  - Addresses RIPD-1871
- Drop duplicate incoming TMLedgerData messages
  - Addresses RIPD-1869
- Improve logging related to ledger acquisition
- Class "CanProcess" to keep track of processing of distinct items

---------

Co-authored-by: Valentin Balaschenko <13349202+vlntb@users.noreply.github.com>
2025-02-14 18:51:51 -05:00
Bart
7c9d652d9b Support canonical ledger entry names (#5271)
This change enhances the filtering in the ledger, ledger_data, and account_objects methods by also supporting filtering by the canonical name of the LedgerEntryType using case-insensitive matching.
2025-02-14 08:12:19 -08:00
Darius Tumas
dc9e6c37fe chore: Update XRPL Foundation public key (#5289)
Following the XRPL Foundation UNL migration a new set of keys was generated.
2025-02-14 06:54:29 -08:00
Ed Hennis
01fe9477f4 refactor: Change recursive_mutex to mutex in DatabaseRotatingImp (#5276)
Rewrites the code so that the lock is not held during the callback. Instead it locks twice, once before, and once after. This is safe due to the structure of the code, but is checked after the second lock. This allows mutex_ to be changed back to a regular mutex.
2025-02-13 14:32:37 -08:00
Bart
97e3dae6f4 fix: Replace charge() by fee_.update() in OnMessage functions (#5269)
In PeerImpl.cpp, if the function is a message handler (onMessage) or called directly from a message handler, then it should use fee_, since when the handler returns (OnMessageEnd) then the charge function is called. If the function is not a message handler, such as a job queue item, it should remain charge.
2025-02-13 08:54:01 -08:00
Elliot Lee
e8e7888a23 docs: ensure build_type and CMAKE_BUILD_TYPE match (#5274) 2025-02-13 07:28:23 -08:00
code0xff
b02b8d016c chore: Fix small typos in protocol files (#5279) 2025-02-13 05:48:48 -08:00
Ed Hennis
a079bac153 chore: Rename missing-commits job, and combine nix job files (#5268)
- Rename the job in missing-commits.yml from "check" to "up_to_date",
  because other jobs named "check" prevent merges, but this one should
  not prevent merges. How else are branches going to get caught up?
- Move the job in instrumentation.yml to nix.yml, but keep it entirely
  independent.
2025-02-12 05:44:03 -08:00
Ed Hennis
3a55a64e1c docs: Add a summary of the git commit message rules (#5283) 2025-02-11 15:50:51 -05:00
Olek
fa5a85439f fix: Amendment to add transaction flag checking functionality for Credentials (#5250)
CredentialCreate / CredentialAccept / CredentialDelete transactions will check sfFlags field in preflight() when the amendment is enabled.
2025-02-10 12:33:37 -08:00
Donovan Hide
81034596a8 fix: Omit superfluous setCurrentThreadName call in GRPCServer.cpp (#5280) 2025-02-10 09:08:36 -08:00
Bronek Kozicki
0968cdf340 fix: Do not allow creating Permissioned Domains if credentials are not enabled (#5275)
If the permissioned domains amendment XLS-80 is enabled before credentials XLS-70, then the permissioned domain users will not be able to match any credentials. The changes here prevent the creation of any permissioned domain objects if credentials are not enabled.
2025-02-07 15:11:29 -08:00
Mayukha Vadari
d9e4009e33 fix: issues in simulate RPC (#5265)
Make `simulate` RPC easier to use:
* Prevent the use of `seed`, `secret`, `seed_hex`, and `passphrase` fields (to avoid confusing with the signing methods).
* Add autofilling of the `NetworkID` field.
2025-02-07 12:17:37 -08:00
Bart
02387fd227 Updates Conan dependencies (#5256)
This PR updates several Conan dependencies:
* boost
* date
* libarchive
* libmysqlclient
* libpq
* lz4
* onetbb
* openssl
* sqlite3
* zlib
* zstd
2025-02-06 13:11:49 -08:00
Shawn Xie
fb3713bc25 Amendment fixFrozenLPTokenTransfer (#5227)
Prohibits LPToken holders from sending LPToken to others if they have been frozen by one of the assets in AMM pool.
2025-02-05 10:05:24 -08:00
Ed Hennis
f6d63082c0 Improve git commit hash lookup (#5225)
- Also get the branch name.
- Use rev-parse instead of describe to get a clean hash.
- Return the git hash and branch name in server_info for admin
  connections.
- Include git hash and branch name on separate lines in --version.
2025-02-05 11:36:43 -05:00
Vlad
33e1c42599 Add deep freeze feature (XLS-77d) (#5187)
- spec: XRPLF/XRPL-Standards#220
- amendment: "DeepFreeze"
- implemented deep freeze spec to allow token issuers to prevent currency holders from being able to acquire more of these tokens.
- in combination with normal freeze, deep freeze effectively prevents any balance trust line balance change of a currency holder (except direct issuer <-> holder payments).
- added 2 new invariant checks to verify that deep freeze cannot be enacted without normal freeze and transfer is not frozen.
- made some fixes to existing freeze handling.

Co-authored-by: Ed Hennis <ed@ripple.com>
Co-authored-by: Howard Hinnant <howard.hinnant@gmail.com>
2025-01-31 13:40:33 -05:00
Ed Hennis
1b75dc8bcd Set version to 2.4.0-b3 2025-01-29 19:19:26 -05:00
Ed Hennis
3d02580c09 Merge remote-tracking branch 'upstream/master' into merge231
Hotfix: version 2.3.1
  Reduce the peer charges for well-behaved peers
  Update conan in the "nix" CI jobs
2025-01-29 18:11:02 -05:00
Ed Hennis
8458233a31 Set version to 2.3.1 2025-01-29 09:40:31 -05:00
Ed Hennis
cb0ddbf863 Update conan in the "nix" CI jobs 2025-01-29 09:40:30 -05:00
Mayukha Vadari
dcc4581220 Add RPC "simulate" to execute a dry run of a transaction (#5069)
- Spec: https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate
- Also update signing methods to autofill fees better and properly handle transactions that require a non-standard fee.
2025-01-28 19:02:28 -05:00
Olek
50b8f19cb5 Fix CI unit tests (#5196)
- Add retries for rpc client
- Add dynamic port allocation for rpc servers
2025-01-28 10:45:59 -05:00
Valentin Balaschenko
f3e201f983 Set version to 2.3.1-rc1 2025-01-27 19:43:14 -05:00
Valentin Balaschenko
b14c24960b Reduce the peer charges for well-behaved peers:
- Fix an erroneous high fee penalty that peers could incur for sending
  older transactions.
- Update to the fees charged for imposing a load on the server.
- Prevent the relaying of internal pseudo-transactions.
  - Before: Pseudo-transactions received from a peer will fail the signature
    check, even if they were requested (using TMGetObjectByHash), because
    they have no signature. This causes the peer to be charge for an
    invalid signature.
  - After: Pseudo-transactions, are put into the global cache
    (TransactionMaster) only. If the transaction is not part of
    a TMTransactions batch, the peer is charged an unwanted data fee.
    These fees will not be a problem in the normal course of operations,
    but should dissuade peers from behaving badly by sending a bunch of
    junk.
- Improve logging: include the reason for fees charged to a peer.

Co-authored-by: Ed Hennis <ed@ripple.com>
2025-01-27 19:41:22 -05:00
Michael Legleux
b6e3453f49 Update secp256k1 library to 0.6.0 (#5254) 2025-01-27 19:47:47 +00:00
Ed Hennis
ed4870cdb4 chore: Update Visual Studio CI to VS 2022, and add VS Debug builds (#5240)
* Debug builds do not run tests, because they take too long.
2025-01-24 18:46:47 -05:00
Bronek Kozicki
5fbee8c824 Add [validator_list_threshold] to validators.txt to improve UNL security (#5112) 2025-01-23 18:00:34 -05:00
Bronek Kozicki
3868c04e99 Switch from assert to XRPL_ASSERT (#5245) 2025-01-23 16:56:37 -05:00
tequ
409c1d5aa2 Add missing space character to a log message (#5251) 2025-01-23 15:08:14 -05:00
Bronek Kozicki
20710f5232 Cleanup API-CHANGELOG.md (#5207) 2025-01-23 14:38:18 -05:00
Ed Hennis
870882f567 test: Unit tests to recreate invalid index logic error (#5242)
* One hits the global cache, one does not.
* Also some extra checking.

Co-authored-by: Bronek Kozicki <brok@incorrekt.com>
2025-01-23 13:35:13 -05:00
Ed Hennis
e1e67b2c9e Update branch management and merge / release processes (#5215)
* Has more steps, but allows merges to develop to continue when a
  beta / RC is pending, increasing developer velocity.
* Add a CI job to check that no reverse merges have been missed.
* Add some useful scripts in bin/git:
  * Set up upstreams as expected for safer pushes
  * Squash a bunch of branches
  * Set the version number
2025-01-22 19:02:13 -05:00
Sergey Kuznetsov
eac3abdca9 fix: Error consistency in LedgerEntry::parsePermissionedDomains() (#5252)
Update errors for parsing permissioned domains in the LedgerEntry handler to make them consistent with other parsers.
2025-01-21 13:00:21 -05:00
Ed Hennis
ebd8e63276 Set version to 2.4.0-b2 2025-01-16 16:25:55 -05:00
Ed Hennis
839d17e7bd fix: Use consistent CMake settings for all modules (#5228)
* Resolves an issue introduced in #5111, which inadvertently removed the
  -Wno-maybe-uninitialized compiler option from some xrpl.libxrpl
  modules. This resulted in new "may be used uninitialized" build
  warnings, first noticed in the "protocol" module. When compiling with
  derr=TRUE, those warnings became errors, which made the build fail.
* Github CI actions will build with the assert and werr options turned
  on. This will cause CI jobs to fail if a developer introduces a new
  compiler warning, or causes an assert to fail in release builds.
* Includes the OS and compiler version in the linux dependencies jobs in
  the "check environment" step.
* Translates the `unity` build option into `CMAKE_UNITY_BUILD` setting.
2025-01-16 16:10:30 -05:00
Valentin Balaschenko
7be5c31bc6 Fix levelization script to ignore commented includes (#5194)
Check to ignore single-line comments during dependency analysis.
2025-01-16 15:23:40 -05:00
tequ
9e4a7d5871 Fix the flag processing of NFTokenModify (#5246)
Adds checks for invalid flags.
2025-01-16 10:37:52 -05:00
Mayukha Vadari
ff8b9aa439 Fix failing assert in connect RPC (#5235) 2025-01-14 14:52:38 -05:00
Olek
ccc0889803 Permissioned Domains (XLS-80d) (#5161) 2025-01-10 12:44:14 -05:00
Mayukha Vadari
07f118caec chore: update deprecated Github Actions (#5241) 2025-01-09 16:32:32 -05:00
tequ
58af62f388 XLS-46: DynamicNFT (#5048)
This Amendment adds functionality to update the URI of NFToken objects as described in the XLS-46d: Dynamic Non Fungible Tokens (dNFTs) spec.
2025-01-09 11:22:11 -05:00
rrmanukyan
040cd23e4a chore: add macos dependency installation (#5233)
* python (3.13) and cmake (latest)
2025-01-07 12:08:39 -05:00
Shawn Xie
0324764a83 prefix Uint384 and Uint512 with Hash in server_definitions (#5231) 2025-01-02 16:32:15 -05:00
Mayukha Vadari
679e35fd46 refactor: add rpcName to LEDGER_ENTRY macro (#5202)
The LEDGER_ENTRY macro now takes an additional parameter, which makes it easier to avoid missing including the new field in jss.h and to the list of account_objects/ledger_data filters.
2025-01-02 11:54:36 -05:00
Ed Hennis
49e0d54c76 Set version to 2.4.0-b1 2024-12-16 18:14:02 -05:00
Michael Legleux
7506852a99 fix: Add header for set_difference (#5197)
Fix `error C2039: 'set_difference': is not a member of 'std'`
2024-12-16 18:01:45 -05:00
Mayukha Vadari
bcbfb04992 fix: allow overlapping types in Expected (#5218)
For example, Expected<std::uint32_t, Json::Value>, will now build even though there is animplicit conversion from unsigned int to Json::Value.
2024-12-16 18:00:14 -05:00
Gregory Tsipenyuk
5cd72f2431 Add MPTIssue to STIssue (#5200)
Replace Issue in STIssue with Asset. STIssue with MPTIssue is only used in MPT tests.
Will be used in Vault and in transactions with STIssue fields once MPT is integrated into DEX.
2024-12-16 17:52:48 -05:00
Bronek Kozicki
eabca8439f Antithesis instrumentation improvements (#5213)
* Rename ASSERT to XRPL_ASSERT
* Upgrade to Anthithesis SDK 0.4.4, and use new 0.4.4 features
  * automatic cast to bool, like assert
* Add instrumentation workflow to verify build with instrumentation enabled
2024-12-16 17:48:33 -05:00
John Freeman
ea1fffeebf Enforce levelization in libxrpl with CMake (#5111)
Adds two CMake functions:

* add_module(library subdirectory): Declares an OBJECT "library" (a CMake abstraction for a collection of object files) with sources from the given subdirectory of the given library, representing a module. Isolates the module's headers by creating a subdirectory in the build directory, e.g. .build/tmp123, that contains just a symlink, e.g. .build/tmp123/basics, to the module's header directory, e.g. include/xrpl/basics, in the source directory, and putting .build/tmp123 (but not include/xrpl) on the include path of the module sources. This prevents the module sources from including headers not explicitly linked to the module in CMake with target_link_libraries.
* target_link_modules(library scope modules...): Links the library target to each of the module targets, and removes their sources from its source list (so they are not compiled and linked twice).

Uses these functions to separate and explicitly link modules in libxrpl:

    Level 01: beast
    Level 02: basics
    Level 03: json, crypto
    Level 04: protocol
    Level 05: resource, server
2024-12-06 17:54:40 -05:00
Mayukha Vadari
6d58065909 refactor: clean up LedgerEntry.cpp (#5199)
Refactors LedgerEntry to make it easier to read and understand.
2024-12-04 15:33:50 -05:00
Ed Hennis
47b0543461 test: Add more test cases for Base58 parser (#5174)
---------
Co-authored-by: John Freeman <jfreeman08@gmail.com>
2024-12-03 16:13:31 -05:00
Ed Hennis
8215c605b4 test: Check for some unlikely null dereferences in tests (#5004) 2024-12-03 15:03:22 -05:00
Bronek Kozicki
d7e949193f Add Antithesis intrumentation (#5042)
* Copy Antithesis SDK version 0.4.0 to directory external/
* Add build option `voidstar` to enable instrumentation with Antithesis SDK
* Define instrumentation macros ASSERT and UNREACHABLE in terms of regular C assert
* Replace asserts with named ASSERT or UNREACHABLE
* Add UNREACHABLE to LogicError
* Document instrumentation macros in CONTRIBUTING.md
2024-12-03 14:54:44 -05:00
Elliot Lee
f64cf9187a Set version to 2.3.0 2024-11-25 13:27:20 -08:00
Elliot Lee
b54d85d862 refactor(AMMClawback): move tfClawTwoAssets check (#5201)
Move tfClawTwoAssets check to preflight and return
error temINVALID_FLAG

---------

Co-authored-by: yinyiqian1 <yqian@ripple.com>
2024-11-25 13:16:47 -08:00
Elliot Lee
f419c18056 Add a new serialized type: STNumber (#5121)
`STNumber` lets objects and transactions contain multiple fields for
quantities of XRP, IOU, or MPT without duplicating information about the
"issue" (represented by `STIssue`). It is a straightforward serialization of
the `Number` type that uniformly represents those quantities.

---------

Co-authored-by: John Freeman <jfreeman08@gmail.com>
Co-authored-by: Howard Hinnant <howard.hinnant@gmail.com>
2024-11-25 13:16:32 -08:00
Olek
0ec17b6026 fix: check for valid ammID field in amm_info RPC (#5188) 2024-11-18 13:58:25 -05:00
Ed Hennis
838978b869 Set version to 2.3.0-rc2 2024-11-12 18:40:22 -05:00
Mayukha Vadari
8186253707 fix: include index in server_definitions RPC (#5190) 2024-11-12 18:37:15 -05:00
Bronek Kozicki
2316d843d7 Fix ledger_entry crash on invalid credentials request (#5189) 2024-11-12 18:24:52 -05:00
Ed Hennis
9d58f11a60 Set version to 2.3.0-rc1 2024-11-06 17:37:59 -05:00
Shawn Xie
7b18006193 Replace Uint192 with Hash192 in server_definitions response (#5177) 2024-11-06 17:33:16 -05:00
Bronek Kozicki
9e48fc0c83 Fix potential deadlock (#5124)
* 2.2.2 changed functions acquireAsync and NetworkOPsImp::recvValidation to add an item to a collection under lock, unlock, do some work, then lock again to do remove the item. It will deadlock if an exception is thrown while adding the item - before unlocking.
* Replace ScopedUnlock with scope_unlock.
2024-11-06 17:22:42 -05:00
Olek
8e827e32ac Introduce Credentials support (XLS-70d): (#5103)
Amendment:
    - Credentials
    
    New Transactions:
    - CredentialCreate
    - CredentialAccept
    - CredentialDelete
    
    Modified Transactions:
    - DepositPreauth
    - Payment
    - EscrowFinish
    - PaymentChannelClaim
    - AccountDelete
    
    New Object:
    - Credential

    Modified Object:
    - DepositPreauth
    
    API updates:
    - ledger_entry
    - account_objects
    - ledger_data
    - deposit_authorized
    
    Read full spec: https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0070d-credentials
2024-11-06 17:05:03 -05:00
Gregory Tsipenyuk
c5c0e70e23 Fix token comparison in Payment (#5172)
* Checks only Currency or MPT Issuance ID part of the Asset object.
* Resolves temREDUNDANT regression detected in testing.
2024-11-06 11:20:30 -05:00
Gregory Tsipenyuk
ec61f5e9d3 Add fixAMMv1_2 amendment (#5176)
* Add reserve check on AMM Withdraw
* Try AMM max offer if changeSpotPriceQuality() fails
2024-11-05 15:06:16 -05:00
Gregory Tsipenyuk
d57cced17b Fix unity build (#5179) 2024-11-05 10:48:02 -05:00
yinyiqian1
54a350be79 Add AMMClawback Transaction (XLS-0073d) (#5142)
Amendment:
- AMMClawback

New Transactions:
- AMMClawback

Modified Transactions:
- AMMCreate
- AMMDeposit
2024-11-04 15:27:57 -05:00
Alloy Networks
d6dbf0e0a6 Add hubs.xrpkuwait.com to bootstrap (#5169) 2024-10-31 18:14:55 -04:00
Valentin Balaschenko
0d887ad815 docs: Add protobuf dependencies to linux setup instructions (#5156) 2024-10-29 16:26:20 -04:00
yinyiqian1
d4a5f8390e fix: reject invalid markers in account_objects RPC calls (#5046) 2024-10-29 16:13:01 -04:00
Bob Conan
ab5d450d3c Update RELEASENOTES.md (#5154)
fix the typo "concensus" -> "consensus"
2024-10-29 15:43:56 -04:00
Gregory Tsipenyuk
23c37fa506 Introduce MPT support (XLS-33d): (#5143)
Amendment:
- MPTokensV1

New Transactions:
- MPTokenIssuanceCreate
- MPTokenIssuanceDestroy
- MPTokenIssuanceSet
- MPTokenAuthorize

Modified Transactions:
- Payment
- Clawback

New Objects:
- MPTokenIssuance
- MPToken

API updates:
- ledger_entry
- account_objects
- ledger_data

Other:
- Add += and -= operators to ValueProxy

Read full spec: https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033d-multi-purpose-tokens

---------
Co-authored-by: Shawn Xie <shawnxie920@gmail.com>
Co-authored-by: Howard Hinnant <howard.hinnant@gmail.com>
Co-authored-by: Ed Hennis <ed@ripple.com>
Co-authored-by: John Freeman <jfreeman08@gmail.com>
2024-10-29 15:19:28 -04:00
John Freeman
63209c2646 Consolidate definitions of fields, objects, transactions, and features (#5122) 2024-10-16 14:02:29 -05:00
John Freeman
f0dabd1446 Ignore reformat when blaming 2024-10-15 18:28:43 -05:00
John Freeman
552377c76f Reformat code with clang-format-18 2024-10-15 18:27:56 -05:00
John Freeman
e7cd03325b Update pre-commit hook 2024-10-15 18:25:14 -05:00
John Freeman
decb3c178e Update clang-format settings 2024-10-15 18:24:22 -05:00
John Freeman
f6d647d6c3 Update clang-format workflow 2024-10-15 18:22:57 -05:00
Chenna Keshava B S
bf4a7b6ce8 Expand Error Message for rpcInternal (#4959)
Validator operators have been confused by the rpcInternal error, which can occur if the server is not running in another process.
2024-10-01 14:09:42 -07:00
Elliot Lee
8e2c85d14d docs: clean up API-CHANGELOG.md (#5064)
Move the newest information to the top, i.e., use reverse chronological order within each of the two sections ("API Versions" and "XRP Ledger server versions")
2024-10-01 14:07:31 -07:00
Elliot Lee
1fbf8da79f Set version to 2.3.0-b4 2024-09-20 14:27:37 -07:00
Denis Angell
a75309919e feat(SQLite): allow configurable database pragma values (#5135)
Make page_size and journal_size_limit configurable values in rippled.cfg
2024-09-20 14:26:21 -07:00
Vlad
0ece395c24 refactor: re-order PRAGMA statements (#5140)
The page_size will soon be made configurable with #5135, making this
re-ordering necessary.

When opening SQLite connection, there are specific pragmas set with
commonPragmas.

In particular, PRAGMA journal_mode creates journal file and locks the
page_size; as of this commit, this sets the page size to the default
value of 4096. Coincidentally, the hardcoded page_size was also 4096, so
no issue was noticed.
2024-09-20 11:04:10 -07:00
Chenna Keshava B S
b6391fe011 fix(book_changes): add "validated" field and reduce RPC latency (#5096)
Update book_changes RPC to reduce latency, add "validated" field, and accept shortcut strings (current, closed, validated) for ledger_index.

`"validated": true` indicates that the transaction has been included in a validated ledger so the result of the transaction is immutable.

Fix #5033

Fix #5034

Fix #5035

Fix #5036

---------

Co-authored-by: Bronek Kozicki <brok@incorrekt.com>
2024-09-19 08:39:10 -07:00
luozexuan
9a6af9c431 chore: fix typos in comments (#5094)
Signed-off-by: luozexuan <fetchcode@139.com>
2024-09-16 13:53:19 -07:00
Elliot Lee
fa1cbb0746 Merge remote-tracking branch 'origin/master' into develop-next 2024-09-14 18:36:46 -07:00
Elliot Lee
68e1be3cf5 Set version to 2.2.3 2024-09-14 13:21:23 -07:00
J. Scott Branson
9abc4868d6 Update SQLite3 max_page_count to match current defaults (#5114)
When rippled initiates a connection to SQLite3, rippled sends a "PRAGMA"
statement defining the maximum number of pages allowed in the database.
Update the max_page_count so it is consistent with the default for newer
versions of SQLite3. Increasing max_page_count is critical for keeping
full history servers online.

Fix #5102
2024-09-14 11:38:25 -07:00
Ed Hennis
23991c99c3 test: Retry RPC commands to try to fix MacOS CI jobs (#5120)
* Retry some failed RPC connections / commands in unit tests
* Remove orphaned `getAccounts` function

Co-authored-by: John Freeman <jfreeman08@gmail.com>
2024-09-11 11:29:06 +01:00
Ed Hennis
cc0177be87 Update Release Notes for 2.2.1 and 2.2.2 2024-09-03 18:37:47 -04:00
Ed Hennis
37b3e96b04 Merge remote-tracking branch 'upstream/master' into upstream--develop
* upstream/master:
  Set version to 2.2.2
  Allow only 1 job queue slot for each validation ledger check
  Allow only 1 job queue slot for acquiring inbound ledger.
  Track latencies of certain code blocks, and log if they take too long
2024-09-03 17:58:54 -04:00
Ed Hennis
85214bdf81 Set version to 2.2.2 2024-08-31 15:12:59 -04:00
Mark Travis
fbbea9e6e2 Allow only 1 job queue slot for each validation ledger check
* refactor filtering of validations to specifically avoid
 concurrent checkAccept() calls for the same validation ledger hash.
* Log when duplicate concurrent validation requests are filtered.
* RAII for containers that track concurrent validation requests.
2024-08-31 15:12:59 -04:00
Mark Travis
7741483894 Allow only 1 job queue slot for acquiring inbound ledger.
* Log when duplicate concurrent inbound ledger are filtered.
* RAII for containers that track concurrent inbound ledger.
* Comment on when to asynchronously acquire inbound ledgers, which
   is possible to be always OK, but should have further review.
* Other small logging changes

Co-authored-by: Ed Hennis <ed@ripple.com>
2024-08-31 15:12:22 -04:00
John Freeman
2f432e812c docs: Update options documentation (#5083)
Co-authored-by: Elliot Lee <github.public@intelliot.com>
2024-08-28 17:31:33 -05:00
John Freeman
cad8970a57 refactor: Remove dead headers (#5081) 2024-08-28 14:23:38 -05:00
John Freeman
4d7aed84ec refactor: Remove reporting mode (#5092) 2024-08-28 13:00:50 -05:00
Valentin Balaschenko
00ed7c9424 Track latencies of certain code blocks, and log if they take too long 2024-08-26 19:03:56 -04:00
Ed Hennis
d9bd75e683 chore: Fix documentation generation job: (#5091)
* Add "doxygen" to list of supported branches to allow for testing and
  development.
* Add titles / H1 to some .md files that don't have them.
2024-08-15 17:03:50 -04:00
Ed Hennis
93d8bafb24 chore: libxrpl verification on CI (#5028)
Implements a CI workflow that detects when a new version of libxrpl is
proposed, uploads it to artifactory under the `clio` channel and
notifies Clio's CI to check this newly proposed version.
2024-08-15 12:51:50 -04:00
Scott Schurr
c19a88fee9 Address rare corruption of NFTokenPage linked list (#4945)
* Add fixNFTokenPageLinks amendment:

It was discovered that under rare circumstances the links between
NFTokenPages could be removed.  If this happens, then the
account_objects and account_nfts RPC commands under-report the
NFTokens owned by an account.

The fixNFTokenPageLinks amendment does the following to address
the problem:

- It fixes the underlying problem so no further broken links
  should be created.
- It adds Invariants so, if such damage were introduced in the
  future, an invariant would stop it.
- It adds a new FixLedgerState transaction that repairs
  directories that were damaged in this fashion.
- It adds unit tests for all of it.
2024-08-07 18:14:19 -04:00
Bronek Kozicki
0a331ea72e Factor out Transactor::trapTransaction (#5087) 2024-08-05 12:05:12 -04:00
John Freeman
7d27b11190 Remove shards (#5066) 2024-08-02 20:03:05 -04:00
Bronek Kozicki
eedfec015e Update gcovr EXCLUDE (#5084) 2024-08-02 17:25:44 -04:00
Bronek Kozicki
ffc343a2bc Fix crash inside OverlayImpl loops over ids_ (#5071) 2024-08-02 16:58:05 -04:00
Ed Hennis
e5aa605742 Set version to 2.3.0-b2 2024-07-31 17:14:04 -04:00
Bronek Kozicki
8b181ed818 Merge branch 'master' into develop 2024-07-31 15:12:15 +01:00
Ed Hennis
f5a349558e docs: Document the process for merging pull requests (#5010) 2024-07-30 20:19:03 -04:00
Scott Schurr
b9b75ddcf5 Remove unused constants from resource/Fees.h (#4856) 2024-07-30 12:47:04 -04:00
Mayukha Vadari
a39720e94a fix: change error for invalid feature param in feature RPC (#5063)
* Returns an "Invalid parameters" error if the `feature` parameter is provided and is not a string.
2024-07-30 11:18:25 -04:00
Ed Hennis
2820feb02a Ensure levelization sorting is ASCII-order across platforms (#5072) 2024-07-29 19:02:12 -04:00
Ed Hennis
8fc805d2e2 fix: Fix NuDB build error via Conan patch (#5061)
* Includes updated instructions in BUILD.md.
2024-07-29 18:14:41 -04:00
yinyiqian1
d54151e7c4 Disallow filtering account_objects by unsupported types (#5056)
* `account_objects` returns an invalid field error if `type` is not supported.
  This includes objects an account can't own, or which are unsupported by `account_objects`
* Includes:
  * Amendments
  * Directory Node
  * Fee Settings
  * Ledger Hashes
  * Negative UNL
2024-07-29 16:30:02 -04:00
Scott Schurr
21a0a64648 chore: Add comments to SignerEntries.h (#5059) 2024-07-25 17:12:59 -04:00
Scott Schurr
20707fac4a chore: Rename two files from Directory* to Dir*: (#5058)
The names of the files should reflect the name of the Dir class.

Co-authored-by: Zack Brunson <Zshooter@gmail.com>
Co-authored-by: Ed Hennis <ed@ripple.com>
2024-07-25 14:50:27 -04:00
Ed Hennis
e6ef0fc26c Set version to 2.2.1 2024-07-24 19:37:40 -04:00
John Freeman
c157816017 Use error codes throughout fast Base58 implementation 2024-07-24 19:37:34 -04:00
Mayukha Vadari
eba5d19377 Improve error handling in some RPC commands 2024-07-24 19:25:40 -04:00
Denis Angell
ad14d09a2b Update BUILD.md after PR #5052 (#5067)
* Document the need to specify "xrpld" and "tests" to build and test rippled.
2024-07-23 16:49:16 -04:00
John Freeman
f3bcc651c7 Add xrpld build option and Conan package test (#5052)
* Make xrpld target optional

* Add job to test Conan recipe

* [fold] address review comments

* [fold] Enable tests in workflows

* [fold] Rename with_xrpld option

* [fold] Fix grep expression
2024-07-11 15:04:30 -07:00
dashangcun
e8602b81fa chore: remove repeat words (#5053)
Signed-off-by: dashangcun <jchaodaohang@foxmail.com>
Co-authored-by: dashangcun <jchaodaohang@foxmail.com>
Co-authored-by: Zack Brunson <Zshooter@gmail.com>
2024-07-09 14:05:14 -07:00
yinyiqian1
0f32109993 fix CTID in tx command returns invalidParams on lowercase hex (#5049)
* fix CTID in tx command returns invalidParams on lowercase hex

* test mixed case and change auto to explicit type

* add header cctype because std::tolower is called

* remove unused local variable

* change test case comment from 'lowercase' to 'mixed case'

---------

Co-authored-by: Zack Brunson <Zshooter@gmail.com>
2024-07-05 11:10:54 -07:00
Ed Hennis
a17ccca615 Invariant: prevent a deleted account from leaving (most) artifacts on the ledger. (#4663)
* Add feature / amendment "InvariantsV1_1"

* Adds invariant AccountRootsDeletedClean:

* Checks that a deleted account doesn't leave any directly
  accessible artifacts behind.
* Always tests, but only changes the transaction result if
  featureInvariantsV1_1 is enabled.
* Unit tests.

* Resolves #4638

* [FOLD] Review feedback from @gregtatcam:

* Fix unused variable warning
* Improve Invariant test const correctness

* [FOLD] Review feedback from @mvadari:

* Centralize the account keylet function list, and some optimization

* [FOLD] Some structured binding doesn't work in clang

* [FOLD] Review feedback 2 from @mvadari:

* Clean up and clarify some comments.

* [FOLD] Change InvariantsV1_1 to unsupported

* Will allow multiple PRs to be merged over time using the same amendment.

* fixup! [FOLD] Change InvariantsV1_1 to unsupported

* [FOLD] Update and clarify some comments. No code changes.

* Move CMake directory

* Rearrange sources

* Rewrite includes

* Recompute loops

* Fix merge issue and formatting

---------

Co-authored-by: Pretty Printer <cpp@ripple.com>
2024-07-05 10:27:15 -07:00
Bronek Kozicki
7a1b238035 Bump codecov plugin version to version 4.5.0 (#5055)
This version includes fix https://github.com/codecov/codecov-action/pull/1471
which should end the codecov upload errors due to throttling.
2024-07-02 12:42:56 -07:00
yinyiqian1
e1534a3200 fix "account_nfts" with unassociated marker returning issue (#5045)
* fix "account_nfts" with unassociated marker returning issue

* create unit test for fixing nft page invalid marker not returning error

add more test

change test name

create unit test

* fix "account_nfts" with unassociated marker returning issue

* fix "account_nfts" with unassociated marker returning issue

* fix "account_nfts" with unassociated marker returning issue

* fix "account_nfts" with unassociated marker returning issue

* fix "account_nfts" with unassociated marker returning issue

* fix "account_nfts" with unassociated marker returning issue

* fix "account_nfts" with unassociated marker returning issue

* fix "account_nfts" with unassociated marker returning issue

* [FOLD] accumulated review suggestions

* move BEAST check out of lambda function

---------

Authored-by: Scott Schurr <scott@ripple.com>
2024-07-02 11:58:03 -07:00
Scott Schurr
9fec615dca fixInnerObjTemplate2 amendment (#5047)
* fixInnerObjTemplate2 amendment:

Apply inner object templates to all remaining (non-AMM)
inner objects.

Adds a unit test for applying the template to sfMajorities.
Other remaining inner objects showed no problems having
templates applied.

* Move CMake directory

* Rearrange sources

* Rewrite includes

* Recompute loops

---------

Co-authored-by: Pretty Printer <cpp@ripple.com>
2024-06-27 11:52:02 -07:00
seelabs
ef02893f2f Set version to 2.3.0-b1 2024-06-20 15:30:07 -04:00
John Freeman
7cf4611d7c Ignore restructuring commits (#4997) 2024-06-20 13:57:22 -05:00
Pretty Printer
d028005aa6 Recompute loops (#4997) 2024-06-20 13:57:18 -05:00
Pretty Printer
1d23148e6d Rewrite includes (#4997) 2024-06-20 13:57:16 -05:00
Pretty Printer
e416ee72ca Rearrange sources (#4997) 2024-06-20 13:57:14 -05:00
Pretty Printer
2e902dee53 Move CMake directory (#4997) 2024-06-20 13:57:12 -05:00
John Freeman
f6879da6c9 Add bin/physical.sh (#4997) 2024-06-20 13:57:10 -05:00
John Freeman
ae20a3ad3f Prepare to rearrange sources: (#4997)
- Remove CMake module "MultiConfig".
- Update clang-format configuration, CodeCov configuration,
  levelization script.
- Replace source lists in CMake with globs.
2024-06-20 13:57:03 -05:00
Bronek Kozicki
c706926ee3 Change order of checks in amm_info: (#4924)
* Change order of checks in amm_info

* Change amm_info error message in API version 3

* Change amm_info error tests
2024-06-18 12:55:40 -04:00
Scott Schurr
223e6c7590 Add the fixEnforceNFTokenTrustline amendment: (#4946)
Fix interactions between NFTokenOffers and trust lines.

Since the NFTokenAcceptOffer does not check the trust line that
the issuer receives as a transfer fee in the NFTokenAcceptOffer,
if the issuer deletes the trust line after NFTokenCreateOffer,
the trust line is created for the issuer by the
NFTokenAcceptOffer.  That's fixed.

Resolves #4925.
2024-06-18 02:47:54 -04:00
Chenna Keshava B S
825864032a Replaces the usage of boost::string_view with std::string_view (#4509) 2024-06-17 16:41:03 -04:00
Elliot Lee
06733ec21a docs: explain how to find a clang-format patch generated by CI (#4521) 2024-06-17 15:32:08 -04:00
tequ
9f7c619e4f XLS-52d: NFTokenMintOffer (#4845) 2024-06-14 19:32:25 -04:00
todaymoon
3f5e3212fe chore: remove repeat words (#5041) 2024-06-14 15:36:59 -04:00
Alex Kremer
20d05492d2 Expose all amendments known by libxrpl (#5026) 2024-06-14 14:00:57 -04:00
Scott Schurr
ae7ea33b75 fixReducedOffersV2: prevent offers from blocking order books: (#5032)
Fixes issue #4937.

The fixReducedOffersV1 amendment fixed certain forms of offer
modification that could lead to blocked order books.  Reduced
offers can block order books if the effective quality of the
reduced offer is worse than the quality of the original offer
(from the perspective of the taker). It turns out that, for
small values, the quality of the reduced offer can be
significantly affected by the rounding mode used during
scaling computations.

Issue #4937 identified an additional code path that modified
offers in a way that could lead to blocked order books.  This
commit changes the rounding in that newly located code path so
the quality of the modified offer is never worse than the
quality of the offer as it was originally placed.

It is possible that additional ways of producing blocking
offers will come to light.  Therefore there may be a future
need for a V3 amendment.
2024-06-13 17:57:12 -04:00
Chenna Keshava B S
263e984bf4 Additional unit tests for testing deletion of trust lines (#4886) 2024-06-13 14:44:39 -04:00
Olek
58f3abe3c6 Fix conan typo: (#5044)
Add missed coma in 'exportes_sources'
2024-06-12 13:16:54 -04:00
Bronek Kozicki
d576416953 Add new command line option to make replaying transactions easier: (#5027)
* Add trap_tx_hash command line option

This new option can be used only if replay is also enabled. It takes a transaction hash from the ledger loaded for replay, and will cause a specific line to be hit in Transactor.cpp, right before the selected transaction is applied.
2024-06-11 14:34:02 -04:00
John Freeman
e3d1bb271f Fix compatibility with Conan 2.x: (#5001)
Closes #4926, #4990
2024-06-11 13:26:01 -04:00
seelabs
2df635693d Set version to 2.2.0 2024-06-04 17:56:09 -04:00
seelabs
40b4adc9cc Set version to 2.2.0-rc3 2024-05-20 17:46:43 -04:00
Alex Kremer
0c971b4415 Add xrpl.libpp as an exported lib in conan (#5022) 2024-05-20 17:43:31 -04:00
Gregory Tsipenyuk
f2d37da4ca Fix Oracle's token pair deterministic order: (#5021)
Price Oracle data-series logic uses `unordered_map` to update the Oracle object.
This results in different servers disagreeing on the order of that hash table.
Consequently, the generated ledgers will have different hashes.
The fix uses `map` instead to guarantee the order of the token pairs
in the data-series.
2024-05-20 16:33:01 -04:00
seelabs
d5e5c3c220 Set version to 2.2.0-rc2 2024-05-16 15:36:21 -04:00
Gregory Tsipenyuk
15390bedd5 Fix last Liquidity Provider withdrawal:
Due to the rounding, LPTokenBalance of the last
Liquidity Provider (LP), might not match this LP's
trustline balance. This fix sets LPTokenBalance on
last LP withdrawal to this LP's LPToken trustline
balance.
2024-05-16 13:36:24 -04:00
Gregory Tsipenyuk
7f6a079aa4 Fix offer crossing via single path AMM with transfer fee:
Single path AMM offer has to factor in the transfer in rate
when calculating the upper bound quality and the quality function
because single path AMM's offer quality is not constant.
This fix factors in the transfer fee in
BookStep::adjustQualityWithFees().
2024-05-16 13:36:24 -04:00
Gregory Tsipenyuk
2a25f58d40 Fix adjustAmountsByLPTokens():
The fix is to return the actual adjusted lp tokens and amounts
by the function.
2024-05-16 13:32:05 -04:00
Gregory Tsipenyuk
2705109592 Add the fixAMMOfferRounding amendment: (#4983)
* Fix AMM offer rounding and low quality LOB offer blocking AMM:

A single-path AMM offer with account offer on DEX, is always generated
starting with the takerPays first, which is rounded up, and then
the takerGets, which is rounded down. This rounding ensures that the pool's
product invariant is maintained. However, when one of the offer's side
is XRP, this rounding can result in the AMM offer having a lower
quality, potentially causing offer generation to fail if the quality
is lower than the account's offer quality.

To address this issue, the proposed fix adjusts the offer generation process
to start with the XRP side first and always rounds it down. This results
in a smaller offer size, improving the offer's quality. Regardless if the offer
has XRP or not, the rounding is done so that the offer size is minimized.
This change still ensures the product invariant, as the other generated
side is the exact result of the swap-in or swap-out equations.

If a liquidity can be provided by both AMM and LOB offer on offer crossing
then AMM offer is generated so that it matches LOB offer quality. If LOB
offer quality is less than limit quality then generated AMM offer quality
is also less than limit quality and the offer doesn't cross. To address
this issue, if LOB quality is better than limit quality then use LOB
quality to generate AMM offer. Otherwise, don't use the quality to generate
AMM offer. In this case, limitOut() function in StrandFlow limits
the out amount to match strand's quality to limit quality and consume
maximum AMM liquidity.
2024-05-14 15:28:38 -04:00
Denis Angell
244ac5e024 Update CONTRIBUTING.md (#4904) 2024-05-13 10:54:34 -04:00
Gregory Tsipenyuk
f4da2e31d9 Price Oracle: validate input parameters and extend test coverage: (#5013)
* Price Oracle: validate input parameters and extend test coverage:

Validate trim, time_threshold, document_id are valid
Int, UInt, or string convertible to UInt. Validate base_asset
and quote_asset are valid currency. Update error codes.
Extend Oracle and GetAggregatePrice unit-tests.
Denote unreachable coverage code.

* Set one-line LCOV_EXCL_LINE

* Move ledger_entry tests to LedgerRPC_test.cpp

* Add constants for "None"

* Fix LedgerRPC test

---------

Co-authored-by: Scott Determan <scott.determan@yahoo.com>
2024-05-09 15:17:16 -04:00
Michael Legleux
f650949573 Add external directory to Conan recipe's exports (#5006) 2024-05-02 15:44:42 -04:00
John Freeman
76128051c0 Add missing includes (#5011) 2024-05-02 11:14:59 -04:00
seelabs
5aa1106ba1 Remove flow assert: (#5009)
Rounding in the payment engine is causing an assert to sometimes fire
with "dust" amounts. This is causing issues when running debug builds of
rippled. This issue will be addressed, but the assert is no longer
serving its purpose.
2024-05-01 13:25:31 -04:00
Nik Bougalis
dccf3f49ef Update list of maintainers: (#4984)
I am resigning from my role as maintainer of the `rippled` codebase.

Please update repository permissions accordingly, prior to merging this pull request.

Thanks to everyone who has contributed, especially those whom I had the opportunity to closely collaborate with.
2024-04-29 18:44:20 -04:00
Ed Hennis
02ec8b7962 Set version to 2.2.0-rc1 2024-04-26 10:56:09 -04:00
seelabs
3f7ce939c8 fix amendment: AMM swap should honor invariants: (#5002)
The AMM has an invariant for swaps where:
new_balance_1*new_balance_2 >= old_balance_1*old_balance_2

Due to rounding, this invariant could sometimes be violated (although by
very small amounts).

This patch introduces an amendment `fixAMMRounding` that changes the
rounding to always favor the AMM. Doing this should maintain the
invariant.

Co-authored-by: Bronek Kozicki
Co-authored-by: thejohnfreeman
2024-04-25 21:15:19 -04:00
seelabs
b65cea1984 Add global access to the current ledger rules:
It can be difficult to make transaction breaking changes to low level
code because the low level code does not have access to a ledger and the
current activated amendments in that ledger (the "rules"). This patch
adds global access to the current ledger rules as a `std::optional`. If
the optional is not seated, then there is no active transaction.
2024-04-25 21:15:19 -04:00
Snoppy
b422e71eed chore: fix typos (#4958) 2024-04-25 12:05:12 -05:00
Ed Hennis
e9859ac1b1 test: Add RPC error checking support to unit tests (#4987) 2024-04-24 13:54:46 -04:00
John Freeman
b84f7e7c10 Ignore more commits 2024-04-19 11:45:51 -05:00
John Freeman
513842b23f Address compiler warnings 2024-04-19 11:32:16 -05:00
John Freeman
985c80fbc6 Add markers around source lists 2024-04-19 11:32:16 -05:00
John Freeman
35fe957020 Fix source lists 2024-04-19 11:32:16 -05:00
Pretty Printer
0eebe6a5f4 Rewrite includes
$ find src/ripple/ src/test/ -type f -exec sed -i 's:include\s*["<]ripple/\(.*\)\.h\(pp\)\?[">]:include <ripple/\1.h>:' {} +
2024-04-19 11:32:15 -05:00
Pretty Printer
760f16f568 Format formerly .hpp files 2024-04-19 11:32:15 -05:00
Pretty Printer
241b9ddde9 Rename .hpp to .h 2024-04-19 11:32:15 -05:00
John Freeman
3fcfb5cd49 Simplify protobuf generation 2024-04-19 11:32:14 -05:00
Pretty Printer
e2384885f5 Consolidate external libraries 2024-04-19 11:32:14 -05:00
John Freeman
dd312c3cc5 Remove packaging scripts 2024-04-19 11:32:14 -05:00
John Freeman
80379927e8 Remove unused files 2024-04-19 11:32:13 -05:00
Ed Hennis
676aae2755 Set version to 2.2.0-b3 2024-04-18 21:09:31 -04:00
Ed Hennis
f20e66e6f9 fix: Remove redundant STAmount conversion in test (#4996) 2024-04-18 21:09:25 -04:00
Scott Determan
cd737ad7d3 fix: resolve database deadlock: (#4989)
The `rotateWithLock` function holds a lock while it calls a callback
function that's passed in by the caller. This is a problematic design
that needs to be used very carefully. In this case, at least one caller
passed in a callback that eventually relocks the mutex on the same
thread, causing UB (a deadlock was observed). The caller was from
SHAMapStoreImpl, and it called `clearCaches`. This `clearCaches` can
potentially call `fetchNodeObject`, which tried to relock the mutex.

This patch resolves the issue by changing the mutex type to a
`recursive_mutex`. Ideally, the code should be rewritten so it doesn't
hold the mutex during the callback and the mutex should be changed back
to a regular mutex.

Co-authored-by: Ed Hennis <ed@ripple.com>
2024-04-18 16:44:59 -04:00
Chenna Keshava B S
df3aa84523 test: verify the rounding behavior of equal-asset AMM deposits (#4982)
* Specifically, test using tfLPToken flag
2024-04-18 16:15:31 -04:00
John Freeman
24a275ba25 test: Add tests to raise coverage of AMM (#4971)
---------

Co-authored-by: Howard Hinnant <howard.hinnant@gmail.com>
Co-authored-by: Mark Travis <mtravis@ripple.com>
Co-authored-by: Bronek Kozicki <brok@incorrekt.com>
Co-authored-by: Mayukha Vadari <mvadari@gmail.com>
Co-authored-by: Chenna Keshava <ckeshavabs@gmail.com>
2024-04-18 15:25:22 -04:00
Bronek Kozicki
aae438315f chore: Improve codecov coverage reporting (#4977)
* Amend `.codecov.yml` to disable coverage reporting of test sources
  and explicitly set most parameters
* Increase codecov upload retry time to 210s (from 35s)
* Upgrade gcovr adding support for more coverage formats (lcov, clover, jacoco)
* Upgrade github actions in coverage workflow
* Explicitly disable codecov plugins (also removing `gcov` coverage, which is not
  correctly handled by codecov https://github.com/codecov/feedback/issues/334)
2024-04-18 13:21:33 -04:00
Bronek Kozicki
8b0d049b9f test: Unit test for AMM offer overflow (#4986) 2024-04-18 12:30:18 -04:00
Mayukha Vadari
659bd99a67 fix amendment to add PreviousTxnID/PreviousTxnLgrSequence (#4751)
This amendment, `fixPreviousTxnID`, adds `PreviousTxnID` and
`PreviousTxnLgrSequence` as fields to all ledger objects that did
not already have them included (`DirectoryNode`, `Amendments`,
`FeeSettings`, `NegativeUNL`, and `AMM`). This makes it much easier
to go through the history of these ledger objects.
2024-04-18 10:41:25 -04:00
Ed Hennis
c88166e055 Set version to 2.2.0-b2 2024-04-04 15:42:39 -04:00
Michael Legleux
099c0bcd34 fix Conan component reference typo 2024-04-04 15:42:37 -04:00
Bronek Kozicki
d992e63075 Remove unused lambdas from MultiApiJson_test 2024-04-04 18:22:23 +01:00
Ed Hennis
c187f750fe chore: Default validator-keys-tool to master branch: (#4943)
* master is the default branch for that project. There's no point in
  using develop.
2024-04-04 10:40:35 -04:00
Ed Hennis
bcbf6c1973 Merge pull request #4968 from XRPLF/master
Merging changes for 2.1.1 from master into develop
2024-03-28 13:51:17 -04:00
Ed Hennis
4bcbf70cae chore: change Github Action triggers for build/test jobs (#4956)
Github Actions for the build/test jobs (nix.yml, mac.yml, windows.yml) will only run on branches that build packages (develop, release, master), and branches with names starting with "ci/". This is intended as a compromise between disabling CI jobs on personal forks entirely, and having the jobs run as a free-for-all. Note that it will not affect PR jobs at all.
2024-03-28 13:29:23 -04:00
seelabs
2d1854f354 Set version to 2.1.1 2024-03-27 13:46:59 +01:00
Gregory Tsipenyuk
a7c4a47723 fix: improper handling of large synthetic AMM offers:
A large synthetic offer was not handled correctly in the payment engine.
This patch fixes that issue and introduces a new invariant check while
processing synthetic offers.
2024-03-27 13:46:59 +01:00
Scott Determan
61672ad3ff fixXChainRewardRounding: round reward shares down: (#4933)
When calculating reward shares, the amount should always be rounded
down. If the `fixUniversalNumber` amendment is not active, this works
correctly. If it is not active, then the amount is incorrectly rounded
up. This patch introduces an amendment so it will be rounded down.
2024-03-22 17:02:17 -04:00
Mark Travis
cea43099d2 Don't reach consensus as quickly if no other proposals seen: (#4763)
This fixes a case where a peer can desync under a certain timing
circumstance--if it reaches a certain point in consensus before it receives
proposals. 

This was noticed under high transaction volumes. Namely, when we arrive at the
point of deciding whether consensus is reached after minimum establish phase
duration but before having received any proposals. This could be caused by
finishing the previous round slightly faster and/or having some delay in
receiving proposals. Existing behavior arrives at consensus immediately after
the minimum establish duration with no proposals. This causes us to desync
because we then close a non-validated ledger. The change in this PR causes us to
wait for a configured threshold before making the decision to arrive at
consensus with no proposals. This allows validators to catch up and for brief
delays in receiving proposals to be absorbed. There should be no drawback since,
with no proposals coming in, we needn't be in a huge rush to jump ahead.
2024-03-22 16:22:29 -04:00
Bronek Kozicki
6edf03c152 Write improved forAllApiVersions used in NetworkOPs (#4833) 2024-03-22 15:28:16 -04:00
Alloy Networks
47c8cc24f4 Remove zaphod.alloy.ee hub from default server list: (#4903)
Remove the zaphod.alloy.ee hubs from the bootstrap and default configuration after 5 years. It has been an honor to run these servers, but it is now time for another entity to step into this role.

The zaphod servers will be taken offline in a phased manner keeping all those who have peering arrangements informed.

These would be the preferred attributes of a boostrap set of hubs:

    1. Commitment to run the hubs for a minimum of 2 years
    2. Highly available
    3. Geographically dispersed
    4. Secure and up to date
    5. Committed to ensure that peering information is kept private
2024-03-22 14:52:45 -04:00
Bronek Kozicki
64e46878e0 Enforce no duplicate slots from incoming connections: (#4944)
We do not currently enforce that incoming peer connection does not have
remote_endpoint which is already used (either by incoming or outgoing
connection), hence already stored in slots_. If we happen to receive a
connection from such a duplicate remote_endpoint, it will eventually result in a
crash (when disconnecting) or weird behavior (when updating slot state), as a
result of an apparently matching remote_endpoint in slots_ being used by a
different connection.
2024-03-22 14:08:16 -04:00
Mayukha Vadari
ea9b1e3503 fixEmptyDID: fix amendment to handle empty DID edge case: (#4950)
This amendment fixes an edge case where an empty DID object can be
created. It adds an additional check to ensure that DIDs are
non-empty when created, and returns a `tecEMPTY_DID` error if the DID
would be empty.
2024-03-22 11:09:54 -04:00
Olek
2e9261cb26 perf: improve account_tx SQL query: (#4955)
The witness server makes heavily use of the `account_tx` RPC command. Perf
testing showed that the SQL query used by `account_tx` became unacceptably slow
when the DB was large and there was a `marker` parameter. The plan for the query
showed only indexed reads. This appears to be an issue with the internal SQLite
optimizer. This patch rewrote the query to use `UNION` instead of `OR` and
significantly improves performance. See RXI-896 and RIPD-1847 for more details.
2024-03-21 18:42:29 -04:00
John Freeman
69143d71f8 Fix workflows (#4951)
- Update container for Doxygen workflow. Matches Linux workflow, with newer GLIBC version required by newer actions.
- Fixes macOS workflow to install and configure Conan correctly. Still fails on tests, but that does not seem attributable to the workflow.
2024-03-20 09:19:48 -05:00
Ed Hennis
0c32fc5f2a test: Env unit test RPC errors return a unique result: (#4877)
* telENV_RPC_FAILED is a new code, reserved exclusively
  for unit tests when RPC fails. This will
  make those types of errors distinct and easier to test
  for when expected and/or diagnose when not.
* Output RPC command result when result is not expected.
2024-03-19 12:13:52 -04:00
Michael Legleux
af9cabe100 Install more public headers (#4940)
Fixes some mistakes in #4885
2024-03-13 21:14:43 -05:00
John Freeman
2ecb851926 Update remaining actions (#4949)
Downgrade {upload,download}-artifact action to v3 because of unreliability with v4.
2024-03-13 17:42:41 -05:00
Bronek Kozicki
2ffead76c1 Upgrade to xxhash 0.8.2 as a Conan requirement, enable SIMD hashing (#4893)
We are currently using old version 0.6.2 of `xxhash`, as a verbatim copy and paste of its header file `xxhash.h`. Switch to the more recent version 0.8.2. Since this version is in Conan Center (and properly protects its ABI by keeping the state object incomplete), add it as a Conan requirement. Switch to the SIMD instructions (in the new `XXH3` family) supported by the new version.
2024-03-13 16:12:22 -05:00
John Freeman
5cc377751a Fix workflows (#4948)
The problem was `CONAN_USERNAME` environment variable, which Conan 1.x uses as the default user in package references.
2024-03-13 13:23:43 -05:00
Scott Determan
ad8e9764e6 fix: order book update variable swap: (#4890)
This is likely the result of a typo when the code was simplified.
2024-03-12 17:52:07 -04:00
John Freeman
4ce426d8f6 Embed patched recipe for RocksDB 6.29.5 (#4947) 2024-03-12 17:01:15 -04:00
Gregory Tsipenyuk
c28e005087 build: add STCurrency.h to xrpl_core to fix clio build (#4939) 2024-03-06 16:24:07 -05:00
Mayukha Vadari
22b751834f feat: add user version of feature RPC (#4781)
* uses same formatting as admin RPC
* hides potentially sensitive data
2024-03-05 16:43:31 -05:00
Scott Determan
cce09b717e Fast base58 codec: (#4327)
This algorithm is about an order of magnitude faster than the existing
algorithm (about 10x faster for encoding and about 15x faster for
decoding - including the double hash for the checksum). The algorithms
use gcc's int128 (fast MS version will have to wait, in the meantime MS
falls back to the slow code).
2024-03-05 15:23:27 -05:00
Chenna Keshava B S
62dae3c6c6 Remove default ctors from SecretKey and PublicKey: (#4607)
* It is now an invariant that all constructed Public Keys are valid,
  non-empty and contain 33 bytes of data.
* Additionally, the memory footprint of the PublicKey class is reduced.
  The size_ data member is declared as static.
* Distinguish and identify the PublisherList retrieved from the local
  config file, versus the ones obtained from other validators.
* Fixes #2942
2024-03-05 12:02:53 -05:00
seelabs
97863e0b62 Set version to 2.2.0-b1 2024-02-29 11:07:24 -05:00
Gregory Tsipenyuk
8a2f6bec33 fix compile error on gcc 13: (#4932)
The compilation fails due to an issue in the initializer list
of an optional argument, which holds a vector of pairs.
The code compiles correctly on earlier gcc versions, but fails on gcc 13.
2024-02-28 09:34:22 -05:00
Gregory Tsipenyuk
e718378bdb Price Oracle (XLS-47d): (#4789) (#4789)
Implement native support for Price Oracles.

 A Price Oracle is used to bring real-world data, such as market prices,
 onto the blockchain, enabling dApps to access and utilize information
 that resides outside the blockchain.

 Add Price Oracle functionality:
 - OracleSet: create or update the Oracle object
 - OracleDelete: delete the Oracle object

 To support this functionality add:
 - New RPC method, `get_aggregate_price`, to calculate aggregate price for a token pair of the specified oracles
 - `ltOracle` object

 The `ltOracle` object maintains:
 - Oracle Owner's account
 - Oracle's metadata
 - Up to ten token pairs with the scaled price
 - The last update time the token pairs were updated

 Add Oracle unit-tests
2024-02-26 06:28:26 -05:00
seelabs
d7d15a922a Set version to 2.1.0 2024-02-20 19:56:19 -05:00
Ed Hennis
e74cb35aa4 test: guarantee proper lifetime for temporary Rules object: (#4917)
* Commit 01c37fe introduced a change to the STTx unit test where a local
  "defaultRules" object was created with a temporary inline "presets"
  value provided to the ctor. Rules::Impl stores a const ref to the
  presets provided to the ctor.  This particular call provided an inline
  temp variable, which goes out of scope as soon as the object is
  created. On Windows, attempting to use the presets (e.g. via the
  enabled() function) causes an access violation, which crashes the test
  run.
* An audit of the code indicates that all other instances of Rules use
  the Application's config.features list, which will have a sufficient
  lifetime.
2024-02-16 13:31:03 -08:00
Elliot Lee
da68651f61 Set version to 2.1.0-rc1 2024-02-07 13:59:44 -08:00
Gregory Tsipenyuk
be12136b8a fixInnerObjTemplate: set inner object template (#4906)
Add `STObject` constructor to explicitly set the inner object template.
This allows certain AMM transactions to apply in the same ledger:

There is no issue if the trading fee is greater than or equal to 0.01%.
If the trading fee is less than 0.01%, then:
- After AMM create, AMM transactions must wait for one ledger to close
  (3-5 seconds).
- After one ledger is validated, all AMM transactions succeed, as
  appropriate, except for AMMVote.
- The first AMMVote which votes for a 0 trading fee in a ledger will
  succeed. Subsequent AMMVote transactions which vote for a 0 trading
  fee will wait for the next ledger (3-5 seconds). This behavior repeats
  for each ledger.

This has no effect on the ultimate correctness of AMM. This amendment
will allow the transactions described above to succeed as expected, even
if the trading fee is 0 and the transactions are applied within one
ledger (block).
2024-02-07 13:58:12 -08:00
Chenna Keshava B S
6d3c21e369 feat: allow port_grpc to be specified in [server] stanza (#4728)
Prior to this commit, `port_grpc` could not be added to the [server]
stanza. Instead of validating gRPC IP/Port/Protocol information in
ServerHandler, validate grpc port info in GRPCServer constructor. This
should not break backwards compatibility.

gRPC-related config info must be in a section (stanza) called
[port_gprc].

* Close #4015 - That was an alternate solution. It was decided that with
  relaxed validation, it is not necessary to rename port_grpc.
* Fix #4557
2024-02-06 20:14:40 -08:00
Michael Legleux
1e96a1d6eb build: add headers needed in Conan package for libxrpl (#4885)
These headers are required in the xrpl Conan package in order for
xbridge witness server (xbwd) to build. This change to libxrpl may help
any dependents of libxrpl. This addition does not change any C++ code.
2024-02-05 08:17:32 -08:00
Shawn Xie
828bb64ebc fixNFTokenReserve: ensure NFT tx fails when reserve is not met (#4767)
Without this amendment, an NFTokenAcceptOffer transaction can succeed
even when the NFToken recipient does not have sufficient reserves for
the new NFTokenPage. This allowed accounts to accept NFT sell offers
without having a sufficient reserve. (However, there was no issue in
brokered mode or when a buy offer is involved.)

Instead, the transaction should fail with `tecINSUFFICIENT_RESERVE` as
appropriate. The `fixNFTokenReserve` amendment adds checks in the
NFTokenAcceptOffer transactor to check if the OwnerCount changed. If it
did, then it checks the new reserve requirement.

Fix #4679
2024-02-02 14:27:21 -08:00
John Freeman
6f00d32f7e fix(libxrpl): change library names in Conan recipe (#4831)
Use consistent platform-agnostic library names on all platforms.

Fix an issue that prevents dependents like validator-keys-tool from
linking to libxrpl on Windows.

It is bad practice to change the binary base name depending on the
platform. CMake already manipulates the base name into a final name that
fits the conventions of the platform. Linkers accept base names on the
command line and then look for conventional names on disk.
2024-02-01 16:57:29 -08:00
Ed Hennis
f9e365828a test: check for success/failure of Windows CI unit tests (#4871)
* Disable the Windows CI unit tests "allowed to fail" workaround which
  was previously introduced in #4596.
* The runner hardware was upgraded, and the unit tests have been passing
  since then.
2024-01-31 22:29:29 -08:00
Bronek Kozicki
90d463b925 test: add unit test for redundant payment (#4860)
If the payee and payer are the same account, then the transaction fails
in preflight with temREDUNDANT.
2024-01-30 22:14:57 -08:00
Elliot Lee
22cdb5728b Set version to 2.0.1 2024-01-29 08:36:10 -08:00
Bronek Kozicki
901152bd93 chore: retry codecov upload (#4896)
Update to #4849, using a workaround for spurious codecov upload errors.

Spurious codecov upload errors are expected in public repos which rely
on PRs via forks. Retrying uploads is a decent and easy workaround.
2024-01-24 16:46:24 -08:00
John Freeman
d9a5bca625 docs: fix broken links in docs/build/conan.md (#4699) 2024-01-23 20:35:22 -08:00
Elliot Lee
1676e9fe21 Set version to 2.0.1-rc1 2024-01-22 14:48:05 -08:00
Bronek Kozicki
fad9d639bf test: improve code coverage reporting (#4849)
* Speed up the generation of coverage reports by using multiple cores.

* Add codecov step to coverage workflow.
2024-01-22 13:09:18 -08:00
Chenna Keshava B S
efe6722bf8 docs: update help message about unit test-suite pattern matching (#4846)
Update the "rippled --help" message for the "-u" parameter. This
documents the unit test name pattern matching rule implemented by #4634.

Fix #4800
2024-01-19 21:55:57 -08:00
Mark Travis
a41f38547b Revert "Asynchronously write batches to NuDB. (#4503)" (#4882)
This reverts commit 1d9db1bfdd.

This improves the stability of online deletion.
2024-01-19 13:26:26 -08:00
1858 changed files with 130065 additions and 91440 deletions

View File

@@ -44,18 +44,26 @@ DerivePointerAlignment: false
DisableFormat: false
ExperimentalAutoDetectBinPacking: false
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
IncludeBlocks: Regroup
IncludeCategories:
- Regex: '^<(BeastConfig)'
- Regex: '^<(test)/'
Priority: 0
- Regex: '^<(ripple)/'
- Regex: '^<(xrpld)/'
Priority: 1
- Regex: '^<(xrpl)/'
Priority: 2
- Regex: '^<(boost)/'
Priority: 3
- Regex: '.*'
- Regex: '^.*/'
Priority: 4
- Regex: '^.*\.h'
Priority: 5
- Regex: '.*'
Priority: 6
IncludeIsMainRegex: '$'
IndentCaseLabels: true
IndentFunctionDeclarationAfterType: false
IndentRequiresClause: true
IndentWidth: 4
IndentWrappedFunctionNames: false
KeepEmptyLinesAtTheStartOfBlocks: false
@@ -71,6 +79,7 @@ PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 200
PointerAlignment: Left
ReflowComments: true
RequiresClausePosition: OwnLine
SortIncludes: true
SpaceAfterCStyleCast: false
SpaceBeforeAssignmentOperators: true
@@ -85,3 +94,4 @@ SpacesInSquareBrackets: false
Standard: Cpp11
TabWidth: 8
UseTab: Never
QualifierAlignment: Right

View File

@@ -1,5 +1,37 @@
codecov:
ci:
- !appveyor
- travis
require_ci_to_pass: true
comment:
behavior: default
layout: reach,diff,flags,tree,reach
show_carryforward_flags: false
coverage:
range: "70..85"
precision: 1
round: nearest
status:
project:
default:
target: 75%
threshold: 2%
patch:
default:
target: auto
threshold: 2%
changes: false
github_checks:
annotations: true
parsers:
cobertura:
partials_as_hits: true
handle_missing_conditions : true
slack_app: false
ignore:
- "src/test/"
- "include/xrpl/beast/test/"
- "include/xrpl/beast/unit_test/"

View File

@@ -2,3 +2,12 @@
# To use it by default in git blame:
# git config blame.ignoreRevsFile .git-blame-ignore-revs
50760c693510894ca368e90369b0cc2dabfd07f3
e2384885f5f630c8f0ffe4bf21a169b433a16858
241b9ddde9e11beb7480600fd5ed90e1ef109b21
760f16f56835663d9286bd29294d074de26a7ba6
0eebe6a5f4246fced516d52b83ec4e7f47373edd
2189cc950c0cebb89e4e2fa3b2d8817205bf7cef
b9d007813378ad0ff45660dc07285b823c7e9855
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
552377c76f55b403a1c876df873a23d780fcc81c

8
.github/CODEOWNERS vendored Normal file
View File

@@ -0,0 +1,8 @@
# Allow anyone to review any change by default.
*
# Require the rpc-reviewers team to review changes to the rpc code.
include/xrpl/protocol/ @xrplf/rpc-reviewers
src/libxrpl/protocol/ @xrplf/rpc-reviewers
src/xrpld/rpc/ @xrplf/rpc-reviewers
src/xrpld/app/misc/ @xrplf/rpc-reviewers

76
.github/actions/build-test/action.yml vendored Normal file
View File

@@ -0,0 +1,76 @@
name: Build and Test (Linux and MacOS)
inputs:
build_dir:
description: 'The directory where to build.'
required: true
type: string
build_type:
description: 'The build type to use.'
required: true
type: string
cmake_args:
description: 'Additional arguments to pass to CMake.'
required: false
type: string
cmake_generator:
description: 'The CMake generator to use for the build.'
required: true
type: string
cmake_target:
description: 'The CMake target to build.'
required: true
type: string
os:
description: 'A string representing which operating system is used.'
required: true
type: choice
options:
- Linux
- MacOS
- Windows
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Configure CMake
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
cmake \
${{ inputs.cmake_generator && format('-G "{0}"', inputs.cmake_generator) || '' }} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
-Dtests=TRUE \
-Dxrpld=TRUE \
${{ inputs.cmake_args }} \
..
- name: Build the binary
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
cmake --build . \
--config ${{ inputs.build_type }} \
--parallel $(nproc) \
--target ${{ inputs.cmake_target }}
- name: Check linking
if: inputs.os == 'Linux'
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Test the binary
shell: bash
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'Windows' && inputs.build_type || '' }}
run: |
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure

View File

@@ -1,29 +0,0 @@
name: build
inputs:
generator:
default: null
configuration:
required: true
cmake-args:
default: null
# An implicit input is the environment variable `build_dir`.
runs:
using: composite
steps:
- name: configure
shell: bash
run: |
cd ${build_dir}
cmake \
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
${{ inputs.cmake-args }} \
..
- name: build
shell: bash
run: |
cmake \
--build ${build_dir} \
--config ${{ inputs.configuration }} \
--parallel ${NUM_PROCESSORS:-$(nproc)}

View File

@@ -0,0 +1,51 @@
name: Configure Conan
inputs:
conan_global_conf:
description: 'The contents of the global Conan configuration file.'
required: true
type: string
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
type: string
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
type: string
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Install Conan profile
shell: bash
run: |
echo "${{ inputs.conan_global_conf }}" >> $(conan config home)/global.conf
conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo "Installed Conan profile:"
conan profile show
- name: Add Conan remote
shell: bash
run: |
if conan remote list | grep -q '${{ inputs.conan_remote_name }}'; then
conan remote remove ${{ inputs.conan_remote_name }}
echo "Removed Conan remote '${{ inputs.conan_remote_name }}'."
fi
conan remote add --index 0 ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
echo "Added new conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
- name: Log into Conan remote
shell: bash
run: |
conan remote login ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_username }} --password "${{ inputs.conan_remote_password }}"
conan remote list-users

View File

@@ -1,26 +0,0 @@
name: dependencies
inputs:
configuration:
required: true
# An implicit input is the environment variable `build_dir`.
runs:
using: composite
steps:
- name: unlock Conan
shell: bash
run: conan remove --locks
- name: export custom recipes
shell: bash
run: |
conan export external/snappy snappy/1.1.10@
conan export external/soci soci/4.0.3@
- name: install dependencies
shell: bash
run: |
mkdir ${build_dir}
cd ${build_dir}
conan install \
--output-folder . \
--build missing \
--settings build_type=${{ inputs.configuration }} \
..

View File

@@ -0,0 +1,37 @@
name: Install-dependencies
inputs:
build_dir:
description: 'The directory where to build.'
required: true
type: string
build_type:
description: 'The build type to use.'
required: true
type: string
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Install Conan dependencies
shell: bash
run: |
mkdir -p ${{ inputs.build_dir }}
cd ${{ inputs.build_dir }}
conan install \
--output-folder . \
--build * \
--options:host "&:tests=True" \
--options:host "&:xrpld=True" \
--settings:all build_type=${{ inputs.build_type }} \
..
- name: Upload Conan dependencies
shell: bash
run: conan upload '*' --confirm --remote ${{ inputs.conan_remote_name }}

View File

@@ -1,6 +1,12 @@
<!--
This PR template helps you to write a good pull request description.
Please feel free to include additional useful information even beyond what is requested below.
If your branch is on a personal fork and has a name that allows it to
run CI build/test jobs (e.g. "ci/foo"), remember to rename it BEFORE
opening the PR. This avoids unnecessary redundant test runs. Renaming
the branch after opening the PR will close the PR.
https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/managing-branches-in-your-repository/renaming-a-branch
-->
## High Level Overview of Change

225
.github/workflows/build-debian.yml vendored Normal file
View File

@@ -0,0 +1,225 @@
# This workflow builds and tests the binary on various Debian configurations.
name: Debian
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-debian
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/debian.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": ["self-hosted", "Linux", "X64", "devbox"]
},
{
"platform": "linux/arm64",
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "12"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "16"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "17"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "18"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "19"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "debian",
# "release": "bookworm",
# "compiler_name": "gcc",
# "compiler_version": "12"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

160
.github/workflows/build-macos.yml vendored Normal file
View File

@@ -0,0 +1,160 @@
# This workflow builds and tests the binary on various MacOS configurations.
name: MacOS
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-macos
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"runner": ["self-hosted", "macOS", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=OFF",
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
clang --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'MacOS'

202
.github/workflows/build-rhel.yml vendored Normal file
View File

@@ -0,0 +1,202 @@
# This workflow builds and tests the binary on various Red Hat Enterprise Linux
# configurations.
name: RHEL
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-rhel
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/rhel.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": ["self-hosted", "Linux", "X64", "devbox"]
},
{
"platform": "linux/arm64",
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "clang",
"compiler_version": "any"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "rhel",
# "release": "9.6",
# "compiler_name": "gcc",
# "compiler_version": "13"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

225
.github/workflows/build-ubuntu.yml vendored Normal file
View File

@@ -0,0 +1,225 @@
# This workflow builds and tests the binary on various Ubuntu configurations.
name: Ubuntu
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-ubuntu
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/ubuntu.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": [self-hosted, Linux, X64, devbox]
},
{
"platform": "linux/arm64",
"runner": [self-hosted, Linux, ARM64, devbox]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "ubuntu",
"release": "jammy",
"compiler_name": "gcc",
"compiler_version": "12"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "16"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "17"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "18"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "19"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "ubuntu",
# "release": "jammy",
# "compiler_name": "gcc",
# "compiler_version": "12"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

159
.github/workflows/build-windows.yml vendored Normal file
View File

@@ -0,0 +1,159 @@
# This workflow builds and tests the binary on various Windows configurations.
name: Windows
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-windows
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"runner": ["self-hosted", "Windows", "devbox"]
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "runner": "windows-latest"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Debug"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: pwsh
run: |
echo "Checking path"
$env:PATH -split ';' | Sort-Object
echo "Checking environment variables."
- name: choose Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: 3.13
- name: install Conan
run: pip install wheel conan
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking Conan version."
conan --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Visual Studio 17 2022'
cmake_target: 'install'
os: 'Windows'

View File

@@ -0,0 +1,57 @@
# This workflow checks if the code is formatted according to the .clang-format
# rules.
name: Clang Format
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-clang-format
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
clang-format:
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
steps:
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
# same directory for jobs running in containers. The actions/checkout step
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
# safe.directory (see instructions at https://github.com/actions/checkout)
# but that is apparently not happening for some container images. We
# therefore preemptively add both directories to safe.directory. See also
# https://github.com/actions/runner/issues/2058 for more details.
- name: Configure git safe.directory
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory ${{ github.workspace }}
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check version
run: clang-format --version
- name: Format code
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
- name: Check for differences
env:
MESSAGE: |
One or more files did not conform to the formatting specified in
.clang-format. Maybe you did not run 'git-clang-format' or
'clang-format' before committing, or your version of clang-format
has an incompatibility with the one used here (see the "Check
version" step above).
Run 'git-clang-format --extensions cpp,h,hpp,ipp develop' in your
repo, and then commit and push the changes.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the files that changed to give the contributor a hint about
# what to expect when running git-clang-format on their own machine.
git status
echo "${MESSAGE}"
exit 1
fi

View File

@@ -0,0 +1,46 @@
# This workflow checks if the dependencies between the modules are correctly
# indexed.
name: Levelization
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-levelization
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
levelization:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
env:
MESSAGE: |
The dependency relationships between the modules in rippled have
changed, which may be an improvement or a regression.
A rule of thumb is that if your changes caused something to be
removed from loops.txt, it's probably an improvement, while if
something was added, it's probably a regression.
Run './Builds/levelization/levelization.sh' in your repo, and then
commit and push the changes. See Builds/levelization/README.md for
more info.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the differences to give the contributor a hint about what to
# expect when running levelization on their own machine.
git diff
echo "${MESSAGE}"
exit 1
fi

84
.github/workflows/check-libxrpl.yml vendored Normal file
View File

@@ -0,0 +1,84 @@
name: Check libXRPL compatibility with Clio
env:
CONAN_URL: https://conan.ripplex.io
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
on:
pull_request:
paths:
- 'src/libxrpl/protocol/BuildInfo.cpp'
- 'check-libxrpl.yml'
types: [opened, reopened, synchronize, ready_for_review]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
publish:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
name: Publish libXRPL
outputs:
outcome: ${{ steps.upload.outputs.outcome }}
version: ${{ steps.version.outputs.version }}
channel: ${{ steps.channel.outputs.channel }}
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: Wait for essential checks to succeed
uses: lewagon/wait-on-check-action@v1.3.4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
running-workflow-name: wait-for-check-regexp
check-regexp: '(dependencies|test).*linux.*' # Ignore windows and mac tests but make sure linux passes
repo-token: ${{ secrets.GITHUB_TOKEN }}
wait-interval: 10
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Generate channel
id: channel
run: |
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
- name: Export new package
run: |
conan export . ${{ steps.channel.outputs.channel }}
- name: Add Ripple Conan remote
run: |
conan remote list
conan remote remove ripple || true
# Do not quote the URL. An empty string will be accepted (with a non-fatal warning), but a missing argument will not.
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
- name: Parse new version
id: version
run: |
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
- name: Try to authenticate to Ripple Conan remote
id: remote
run: |
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
echo outcome=$(conan user --remote ripple --password >&2 \
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
- name: Upload new package
id: upload
if: (steps.remote.outputs.outcome == 'success')
run: |
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
notify_clio:
name: Notify Clio
runs-on: ubuntu-latest
needs: publish
env:
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
steps:
- name: Notify Clio about new version
if: (needs.publish.outputs.outcome == 'success')
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[version]=${{ needs.publish.outputs.version }}@${{ needs.publish.outputs.channel }}" \
-F "client_payload[pr]=${{ github.event.pull_request.number }}"

View File

@@ -1,61 +0,0 @@
name: clang-format
on: [push, pull_request]
jobs:
check:
runs-on: ubuntu-20.04
env:
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v3
- name: Install clang-format
run: |
codename=$( lsb_release --codename --short )
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
EOF
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
sudo apt-get update
sudo apt-get install clang-format-${CLANG_VERSION}
- name: Format src/ripple
run: find src/ripple -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
- name: Format src/test
run: find src/test -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
- name: Check for differences
id: assert
run: |
set -o pipefail
git diff --exit-code | tee "clang-format.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v2
continue-on-error: true
with:
name: clang-format.patch
if-no-files-found: ignore
path: clang-format.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
PREAMBLE: |
If you are reading this, you are looking at a failed Github Actions
job. That means you pushed one or more files that did not conform
to the formatting specified in .clang-format. That may be because
you neglected to run 'git clang-format' or 'clang-format' before
committing, or that your version of clang-format has an
incompatibility with the one on this
machine, which is:
SUGGESTION: |
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run 'git-clang-format --extensions c,cpp,h,cxx,ipp develop'
in your repo, commit, and push.
run: |
echo "${PREAMBLE}"
clang-format-${CLANG_VERSION} --version
echo "${SUGGESTION}"
exit 1

53
.github/workflows/documentation.yml vendored Normal file
View File

@@ -0,0 +1,53 @@
name: Documentation
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
# This can only be done if the `checks` workflow already exists on the default
# branch (i.e. `develop`), so we cannot do this yet.
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
on:
push:
branches:
- develop
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
doxygen:
runs-on: ubuntu-latest
permissions:
contents: write
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
run: |
echo "Checking CMake version."
cmake --version
echo "Checking Doxygen version."
doxygen --version
- name: Build documentation
run: |
mkdir build
cd build
cmake -Donly_docs=TRUE ..
cmake --build . --target docs --parallel $(nproc)
- name: Publish documentation
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

View File

@@ -1,37 +0,0 @@
name: Build and publish Doxygen documentation
# To test this workflow, push your changes to your fork's `develop` branch.
on:
push:
branches:
- develop
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
job:
runs-on: ubuntu-latest
permissions:
contents: write
container:
image: docker://rippleci/rippled-ci-builder:2944b78d22db
steps:
- name: checkout
uses: actions/checkout@v3
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
cmake --version
doxygen --version
env | sort
- name: build
run: |
mkdir build
cd build
cmake -Donly_docs=TRUE ..
cmake --build . --target docs --parallel $(nproc)
- name: publish
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

View File

@@ -1,49 +0,0 @@
name: levelization
on: [push, pull_request]
jobs:
check:
runs-on: ubuntu-latest
env:
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v3
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
id: assert
run: |
set -o pipefail
git diff --exit-code | tee "levelization.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v2
continue-on-error: true
with:
name: levelization.patch
if-no-files-found: ignore
path: levelization.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
MESSAGE: |
If you are reading this, you are looking at a failed Github
Actions job. That means you changed the dependency relationships
between the modules in rippled. That may be an improvement or a
regression. This check doesn't judge.
A rule of thumb, though, is that if your changes caused
something to be removed from loops.txt, that's probably an
improvement. If something was added, it's probably a regression.
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run './Builds/levelization/levelization.sh' in your repo,
commit, and push.
See Builds/levelization/README.md for more info.
run: |
echo "${MESSAGE}"
exit 1

View File

@@ -1,51 +0,0 @@
name: macos
on: [push, pull_request]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
strategy:
matrix:
platform:
- macos
generator:
- Ninja
configuration:
- Release
runs-on: [self-hosted, macOS]
env:
# The `build` action requires these variables.
build_dir: .build
NUM_PROCESSORS: 12
steps:
- name: checkout
uses: actions/checkout@v3
- name: install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
python --version
conan --version
cmake --version
env | sort
- name: configure Conan
run : |
conan profile get env.CXXFLAGS default || true
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: ${{ matrix.generator }}
configuration: ${{ matrix.configuration }}
- name: test
run: |
${build_dir}/rippled --unittest

90
.github/workflows/main.yml vendored Normal file
View File

@@ -0,0 +1,90 @@
# This workflow runs all workflows to check, build and test the project on
# various Linux flavors, as well as MacOS and Windows.
name: Main
# This workflow is triggered on every push to the repository, including pull
# requests. However, it will not run if the pull request is a draft unless it
# has the 'DraftRunCI' label. As GitHub Actions does not support such `if`
# conditions here, the individual jobs will check the pull request state and
# labels to determine whether to run or not. The workflows called by the jobs
# may also have their own conditions to partially or completely skip execution
# as needed.
on: push
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# check-clang-format:
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
# uses: ./.github/workflows/check-clang-format.yml
#
# check-levelization:
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
# uses: ./.github/workflows/check-levelization.yml
debian:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-debian.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
# rhel:
## needs:
## - check-clang-format
## - check-levelization
# uses: ./.github/workflows/build-rhel.yml
# with:
# conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
# conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
# secrets:
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
ubuntu:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-ubuntu.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
macos:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-macos.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
windows:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-windows.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}

66
.github/workflows/missing-commits.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
name: Check for missing commits
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
# This can only be done if the `checks` workflow already exists on the default
# branch (i.e. `develop`), so we cannot do this yet.
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
on:
push:
branches:
- develop
- release
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
check:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- name: Check for missing commits
env:
MESSAGE: |
If you are reading this, then the commits indicated above are missing
from the "develop" and/or "release" branch. Do a reverse-merge as soon
as possible. See CONTRIBUTING.md for instructions.
run: |
set -o pipefail
# Branches are ordered by how "canonical" they are. Every commit in one
# branch should be in all the branches behind it.
order=(master release develop)
branches=()
for branch in "${order[@]}"; do
# Check that the branches exist so that this job will work on forked
# repos, which don't necessarily have master and release branches.
echo "Checking if ${branch} exists."
if git ls-remote --exit-code --heads origin \
refs/heads/${branch} > /dev/null; then
branches+=(origin/${branch})
fi
done
prior=()
for branch in "${branches[@]}"; do
if [[ ${#prior[@]} -ne 0 ]]; then
echo "Checking ${prior[@]} for commits missing from ${branch}."
git log --oneline --no-merges "${prior[@]}" \
^$branch | tee -a "missing-commits.txt"
echo
fi
prior+=("${branch}")
done
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
echo "${MESSAGE}"
exit 1
fi

View File

@@ -1,161 +0,0 @@
name: nix
on: [push, pull_request]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This workflow has two job matrixes.
# They can be considered phases because the second matrix ("test")
# depends on the first ("dependencies").
#
# The first phase has a job in the matrix for each combination of
# variables that affects dependency ABI:
# platform, compiler, and configuration.
# It creates a GitHub artifact holding the Conan profile,
# and builds and caches binaries for all the dependencies.
# If an Artifactory remote is configured, they are cached there.
# If not, they are added to the GitHub artifact.
# GitHub's "cache" action has a size limit (10 GB) that is too small
# to hold the binaries if they are built locally.
# We must use the "{upload,download}-artifact" actions instead.
#
# The second phase has a job in the matrix for each test configuration.
# It installs dependency binaries from the cache, whichever was used,
# and builds and tests rippled.
jobs:
dependencies:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
include:
- compiler: gcc
profile:
version: 11
cc: /usr/bin/gcc
cxx: /usr/bin/g++
- compiler: clang
profile:
version: 14
cc: /usr/bin/clang-14
cxx: /usr/bin/clang++-14
runs-on: [self-hosted, heavy]
container: thejohnfreeman/rippled-build-ubuntu:12e19cd9034b
env:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@v3
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
env | sort
- name: configure Conan
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
run: |
conan profile new default --detect
conan profile update settings.compiler.cppstd=20 default
conan profile update settings.compiler=${{ matrix.compiler }} default
conan profile update settings.compiler.version=${{ matrix.profile.version }} default
conan profile update settings.compiler.libcxx=libstdc++11 default
conan profile update env.CC=${{ matrix.profile.cc }} default
conan profile update env.CXX=${{ matrix.profile.cxx }} default
conan profile update conf.tools.build:compiler_executables='{"c": "${{ matrix.profile.cc }}", "cpp": "${{ matrix.profile.cxx }}"}' default
# Do not quote the URL. An empty string will be accepted (with
# a non-fatal warning), but a missing argument will not.
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
- name: try to authenticate to ripple Conan remote
id: remote
run: |
echo outcome=$(conan user --remote ripple ${{ secrets.CONAN_USERNAME }} --password ${{ secrets.CONAN_TOKEN }} >&2 && echo success || echo failure) | tee ${GITHUB_OUTPUT}
- name: archive profile
# Create this archive before dependencies are added to the local cache.
run: tar -czf conan.tar -C ~/.conan .
- name: list missing binaries
id: binaries
# Print the list of dependencies that would need to be built locally.
# A non-empty list means we have "failed" to cache binaries remotely.
run: |
echo missing=$(conan info . --build missing --settings build_type=${{ matrix.configuration }} --json 2>/dev/null | grep '^\[') | tee ${GITHUB_OUTPUT}
- name: build dependencies
if: (steps.binaries.outputs.missing != '[]')
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: upload dependencies to remote
if: (steps.binaries.outputs.missing != '[]') && (steps.remote.outputs.outcome == 'success')
run: conan upload --remote ripple '*' --all --parallel --confirm
- name: recreate archive with dependencies
if: (steps.binaries.outputs.missing != '[]') && (steps.remote.outputs.outcome == 'failure')
run: tar -czf conan.tar -C ~/.conan .
- name: upload archive
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
path: conan.tar
if-no-files-found: error
test:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
cmake-args:
-
- "-Dunity=ON"
needs: dependencies
runs-on: [self-hosted, heavy]
container: thejohnfreeman/rippled-build-ubuntu:12e19cd9034b
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@v3
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
env | sort
ls ~/.conan
- name: checkout
uses: actions/checkout@v3
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: ${{ matrix.cmake-args }}
- name: test
run: |
${build_dir}/rippled --unittest --unittest-jobs $(nproc)

View File

@@ -1,100 +0,0 @@
name: windows
on: [push, pull_request]
# https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
strategy:
fail-fast: false
matrix:
generator:
- Visual Studio 16 2019
configuration:
- Release
# Github hosted runners tend to hang when running Debug unit tests.
# Instead of trying to work around it, disable the Debug job until
# something beefier (i.e. a heavy self-hosted runner) becomes
# available.
# - Debug
runs-on: windows-2019
env:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@v3
- name: choose Python
uses: actions/setup-python@v3
with:
python-version: 3.9
- name: learn Python cache directory
id: pip-cache
shell: bash
run: |
python -m pip install --upgrade pip
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
- name: restore Python cache directory
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
- name: install Conan
run: pip install wheel 'conan<2'
- name: check environment
run: |
$env:PATH -split ';'
python --version
conan --version
cmake --version
dir env:
- name: configure Conan
shell: bash
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
run: |
conan profile new default --detect
conan profile update settings.compiler.runtime=MT${{ matrix.configuration == 'Debug' && 'd' || '' }} default
# Do not quote the URL. An empty string will be accepted (with
# a non-fatal warning), but a missing argument will not.
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
- name: try to authenticate to ripple Conan remote
shell: bash
id: remote
run: |
echo outcome=$(conan user --remote ripple ${{ secrets.CONAN_USERNAME }} \
--password ${{ secrets.CONAN_TOKEN }} >&2 && echo success || \
echo failure) | tee ${GITHUB_OUTPUT}
- name: list missing binaries
id: binaries
shell: bash
# Print the list of dependencies that would need to be built locally.
# A non-empty list means we have "failed" to cache binaries remotely.
run: |
echo missing=$(conan info . --build missing --settings build_type=${{ matrix.configuration }} --json 2>/dev/null | grep '^\[') | tee ${GITHUB_OUTPUT}
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: upload dependencies to remote
if: (steps.binaries.outputs.missing != '[]') && (steps.remote.outputs.outcome == 'success')
run: conan upload --remote ripple '*' --all --parallel --confirm
- name: build
uses: ./.github/actions/build
with:
generator: '${{ matrix.generator }}'
configuration: ${{ matrix.configuration }}
# Hard code for now. Move to the matrix if varied options are needed
cmake-args: '-Dassert=ON -Dreporting=OFF -Dunity=ON'
- name: test (permitted to silently fail)
shell: bash
# Github runners are resource limited, which causes unit tests to fail
# (e.g. OOM). To allow forward progress until self-hosted runners are
# up and running reliably, allow the job to succeed even if tests fail.
continue-on-error: true
run: |
${build_dir}/${{ matrix.configuration }}/rippled --unittest --unittest-jobs $(nproc)

View File

@@ -1,6 +1,6 @@
# .pre-commit-config.yaml
repos:
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v10.0.1
rev: v18.1.8
hooks:
- id: clang-format

View File

@@ -8,42 +8,138 @@ The API version controls the API behavior you see. This includes what properties
For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release.
## API Version 2
API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
#### Removed methods
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
- `ledger_header` - Instead, use the `ledger` method.
#### Modifications to JSON transaction element in V2
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
- JSON transaction element is named `tx_json`
- Binary transaction element is named `tx_blob`
- JSON transaction metadata element is named `meta`
- Binary transaction metadata element is named `meta_blob`
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
- `ledger_index` - Ledger index (only set on validated ledgers)
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
This change affects the following methods:
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
#### Modification to `Payment` transaction JSON schema
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
#### Modifications to account_info response
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770)
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
## API Version 1
This version is supported by all `rippled` versions. At time of writing, it is the default API version, used when no `api_version` is specified. When a new API version is introduced, the command line interface will default to the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified.
### Idiosyncrasies
The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
#### V1 account_info response
### Inconsistency: server_info - network_id
In [the response to the `account_info` command](https://xrpl.org/account_info.html#response-format), there is `account_data` - which is supposed to be an `AccountRoot` object - and `signer_lists` is returned in this object. However, the docs say that `signer_lists` should be at the root level of the reponse.
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
It makes sense for `signer_lists` to be at the root level because signer lists are not part of the AccountRoot object. (First reported in [xrpl-dev-portal#938](https://github.com/XRPLF/xrpl-dev-portal/issues/938).)
## XRP Ledger server version 2.5.0
In `api_version: 2`, the `signer_lists` field [will be moved](#modifications-to-account_info-response-in-v2) to the root level of the account_info response. (https://github.com/XRPLF/rippled/pull/3770)
As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
#### server_info - network_id
### Additions and bugfixes in 2.5.0
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode).
- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled.
## XRP Ledger server version 2.4.0
[Version 2.4.0](https://github.com/XRPLF/rippled/releases/tag/2.4.0) was released on March 4, 2025.
### Additions and bugfixes in 2.4.0
- `ledger_entry`: `state` is added an alias for `ripple_state`.
- `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name.
- `validators`: Added new field `validator_list_threshold` in response.
- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate)
- Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field.
## XRP Ledger server version 2.3.0
[Version 2.3.0](https://github.com/XRPLF/rippled/releases/tag/2.3.0) was released on Nov 25, 2024.
### Breaking changes in 2.3.0
- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs).
Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC.
### Additions and bugfixes in 2.3.0
- `book_changes`: Returns a `validated` field in its response, which was missing in prior versions.
## XRP Ledger server version 2.2.0
[Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive):
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 2.0.0
### Additions in 2.0
Additions are intended to be non-breaking (because they are purely additive).
[Version 2.0.0](https://github.com/XRPLF/rippled/releases/tag/2.0.0) was released on Jan 9, 2024. The following additions are non-breaking (because they are purely additive):
- `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries.
- In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking.
- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting).
## XRP Ledger server version 2.2.0
The following is a non-breaking addition to the API.
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 1.12.0
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023.
### Additions in 1.12
Additions are intended to be non-breaking (because they are purely additive).
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive).
- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427)
- `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port.
@@ -120,71 +216,6 @@ was released on Mar 14, 2023.
removed from the [ledger subscription stream](https://xrpl.org/subscribe.html#ledger-stream), because it will no longer
have any meaning.
## API Version 2
API version 2 is introduced in `rippled` version 2.0. Users can request it explicitly by specifying `"api_version" : 2`.
#### Removed methods
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
- `ledger_header` - Instead, use the `ledger` method.
#### Modifications to JSON transaction element in V2
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
- JSON transaction element is named `tx_json`
- Binary transaction element is named `tx_blob`
- JSON transaction metadata element is named `meta`
- Binary transaction metadata element is named `meta_blob`
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
- `ledger_index` - Ledger index (only set on validated ledgers)
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
This change affects the following methods:
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
#### Modification to `Payment` transaction JSON schema
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
#### Modifications to account_info response
- `signer_lists` is returned in the root of the response. In API version 1, it was nested under `account_data`. (https://github.com/XRPLF/rippled/pull/3770)
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
# Unit tests for API changes
The following information is useful to developers contributing to this project:

199
BUILD.md
View File

@@ -36,9 +36,15 @@ See [System Requirements](https://xrpl.org/system-requirements.html).
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
- [Python 3.7](https://www.python.org/downloads/)
- [Conan 1.55](https://conan.io/downloads.html)
- [Conan 1.60](https://conan.io/downloads.html)[^1]
- [CMake 3.16](https://cmake.org/download/)
[^1]: It is possible to build with Conan 2.x,
but the instructions are significantly different,
which is why we are not recommending it yet.
Notably, the `conan profile update` command is removed in 2.x.
Profiles must be edited by hand.
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
The [minimum compiler versions][2] required are:
@@ -67,9 +73,6 @@ Here are [sample instructions for setting up a C++ development environment on ma
Windows is not recommended for production use at this time.
- Additionally, 32-bit Windows development is not supported.
- Visual Studio 2022 is not yet supported.
- rippled generally requires [Boost][] 1.77, which Conan cannot build with VS 2022.
- Until rippled is updated for compatibility with later versions of Boost, Windows developers may need to use Visual Studio 2019.
[Boost]: https://www.boost.org/
@@ -95,6 +98,12 @@ Update the compiler settings:
conan profile update settings.compiler.cppstd=20 default
```
Configure Conan (1.x only) to use recipe revisions:
```
conan config set general.revisions_enabled=1
```
**Linux** developers will commonly have a default Conan [profile][] that compiles
with GCC and links with libstdc++.
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
@@ -104,6 +113,20 @@ then you will need to choose the `libstdc++11` ABI:
conan profile update settings.compiler.libcxx=libstdc++11 default
```
Ensure inter-operability between `boost::string_view` and `std::string_view` types:
```
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_BEAST_USE_STD_STRING_VIEW"]' default
conan profile update 'env.CXXFLAGS="-DBOOST_BEAST_USE_STD_STRING_VIEW"' default
```
If you have other flags in the `conf.tools.build` or `env.CXXFLAGS` sections, make sure to retain the existing flags and append the new ones. You can check them with:
```
conan profile show default
```
**Windows** developers may need to use the x64 native build tools.
An easy way to do that is to run the shortcut "x64 Native Tools Command
Prompt" for the version of Visual Studio that you have installed.
@@ -144,14 +167,16 @@ It does not explicitly link the C++ standard library,
which allows you to statically link it with GCC, if you want.
```
conan export external/snappy snappy/1.1.10@
# Conan 2.x
conan export --version 1.1.10 external/snappy
```
Export our [Conan recipe for SOCI](./external/soci).
It patches their CMake to correctly import its dependencies.
```
conan export external/soci soci/4.0.3@
# Conan 2.x
conan export --version 4.0.3 external/soci
```
### Build and Test
@@ -172,13 +197,15 @@ It patches their CMake to correctly import its dependencies.
the `install-folder` or `-if` option to every `conan install` command
in the next step.
2. Generate CMake files for every configuration you want to build.
2. Use conan to generate CMake files for every configuration you want to build:
```
conan install .. --output-folder . --build missing --settings build_type=Release
conan install .. --output-folder . --build missing --settings build_type=Debug
```
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
you only need to run this command once.
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
@@ -189,13 +216,13 @@ It patches their CMake to correctly import its dependencies.
generated by the first. You can pass the build type on the command line with
`--settings build_type=$BUILD_TYPE` or in the profile itself,
under the section `[settings]` with the key `build_type`.
If you are using a Microsoft Visual C++ compiler,
then you will need to ensure consistency between the `build_type` setting
and the `compiler.runtime` setting.
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
```
@@ -208,23 +235,26 @@ It patches their CMake to correctly import its dependencies.
Single-config generators:
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
and make sure it matches the one of the `build_type` settings
you chose in the previous step.
For example, to build Debug, in the next command, replace "Release" with "Debug"
```
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
```
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
and make sure it matches the `build_type` setting you chose in the previous
step.
Multi-config generators:
```
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
```
**Note:** You can pass build options for `rippled` in this step.
4. Build `rippled`.
5. Build `rippled`.
For a single-configuration generator, it will build whatever configuration
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
@@ -233,7 +263,7 @@ It patches their CMake to correctly import its dependencies.
Single-config generators:
```
cmake --build .
cmake --build . -j $(nproc)
```
Multi-config generators:
@@ -243,7 +273,7 @@ It patches their CMake to correctly import its dependencies.
cmake --build . --config Debug
```
5. Test rippled.
6. Test rippled.
Single-config generators:
@@ -262,15 +292,75 @@ It patches their CMake to correctly import its dependencies.
generator. Pass `--help` to see the rest of the command line options.
## Coverage report
The coverage report is intended for developers using compilers GCC
or Clang (including Apple Clang). It is generated by the build target `coverage`,
which is only enabled when the `coverage` option is set, e.g. with
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
Prerequisites for the coverage report:
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
- `gcov` for GCC (installed with the compiler by default) or
- `llvm-cov` for Clang (installed with the compiler by default)
- `Debug` build type
A coverage report is created when the following steps are completed, in order:
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
option mentioned above
2. completed run of unit tests, which populates coverage capture data
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
to assemble both instrumentation data and the coverage capture data into a coverage report
The above steps are automated into a single target `coverage`. The instrumented
`rippled` binary can also be used for regular development or testing work, at
the cost of extra disk space utilization and a small performance hit
(to store coverage capture). In case of a spurious failure of unit tests, it is
possible to re-run the `coverage` target without rebuilding the `rippled` binary
(since it is simply a dependency of the coverage report target). It is also possible
to select only specific tests for the purpose of the coverage report, by setting
the `coverage_test` variable in `cmake`
The default coverage report format is `html-details`, but the user
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
by setting the `coverage_format` variable in `cmake`. It is also possible
to generate more than one format at a time by setting the `coverage_extra_args`
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
set to the number of available CPU cores. This may cause spurious test
errors on Apple. Developers can override the number of unit test jobs with
the `coverage_test_parallelism` variable in `cmake`.
Example use with some cmake variables set:
```
cd .build
conan install .. --output-folder . --build missing --settings build_type=Debug
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dxrpld=ON -Dtests=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
cmake --build . --target coverage
```
After the `coverage` target is completed, the generated coverage report will be
stored inside the build directory, as either of:
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
## Options
| Option | Default Value | Description |
| --- | ---| ---|
| `assert` | OFF | Enable assertions.
| `reporting` | OFF | Build the reporting mode feature. |
| `tests` | ON | Build tests. |
| `unity` | ON | Configure a unity build. |
| `coverage` | OFF | Prepare the coverage report. |
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
| `tests` | OFF | Build tests. |
| `unity` | ON | Configure a unity build. |
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
[Unity builds][5] may be faster for the first build
(at the cost of much more memory) since they concatenate sources into fewer
@@ -280,65 +370,29 @@ and can be helpful for detecting `#include` omissions.
## Troubleshooting
### Conan
After any updates or changes to dependencies, you may need to do the following:
1. Remove your build directory.
2. Remove the Conan cache:
```
rm -rf ~/.conan/data
```
4. Re-run [conan install](#build-and-test).
2. Remove the Conan cache: `conan remove "*" -c`
3. Re-run [conan install](#build-and-test).
### 'protobuf/port_def.inc' file not found
### no std::result_of
If your compiler version is recent enough to have removed `std::result_of` as
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
definition to your build.
If `cmake --build .` results in an error due to a missing a protobuf file, then you might have generated CMake files for a different `build_type` than the `CMAKE_BUILD_TYPE` you passed to conan.
```
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
10 | #include <google/protobuf/port_def.inc>
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1 error generated.
```
For example, if you want to build Debug:
### call to 'async_teardown' is ambiguous
If you are compiling with an early version of Clang 16, then you might hit
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
header][7]. You can workaround it by adding this preprocessor definition:
```
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
```
### recompile with -fPIC
If you get a linker error suggesting that you recompile Boost with
position-independent code, such as:
```
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
requires unsupported dynamic reloc 11; recompile with -fPIC
```
Conan most likely downloaded a bad binary distribution of the dependency.
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
for Linux. The solution is to build the dependency locally by passing
`--build boost` when calling `conan install`.
```
conan install --build boost ...
```
1. For conan install, pass `--settings build_type=Debug`
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
## Add a Dependency
@@ -362,12 +416,7 @@ If you want to experiment with a new package, follow these steps:
[5]: https://en.wikipedia.org/wiki/Unity_build
[6]: https://github.com/boostorg/beast/issues/2648
[7]: https://github.com/boostorg/beast/issues/2661
[gcovr]: https://gcovr.com/en/stable/getting-started.html
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html
[pcf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-configuration-file
[pvf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-version-file
[find_package]: https://cmake.org/cmake/help/latest/command/find_package.html
[search]: https://cmake.org/cmake/help/latest/command/find_package.html#search-procedure
[prefix_path]: https://cmake.org/cmake/help/latest/variable/CMAKE_PREFIX_PATH.html
[profile]: https://docs.conan.io/en/latest/reference/profiles.html

View File

@@ -1,207 +0,0 @@
macro(group_sources_in source_dir curdir)
file(GLOB children RELATIVE ${source_dir}/${curdir}
${source_dir}/${curdir}/*)
foreach (child ${children})
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
group_sources_in(${source_dir} ${curdir}/${child})
else()
string(REPLACE "/" "\\" groupname ${curdir})
source_group(${groupname} FILES
${source_dir}/${curdir}/${child})
endif()
endforeach()
endmacro()
macro(group_sources curdir)
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
endmacro()
macro (exclude_from_default target_)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
endmacro ()
macro (exclude_if_included target_)
get_directory_property(has_parent PARENT_DIRECTORY)
if (has_parent)
exclude_from_default (${target_})
endif ()
endmacro ()
function (print_ep_logs _target)
ExternalProject_Get_Property (${_target} STAMP_DIR)
add_custom_command(TARGET ${_target} POST_BUILD
COMMENT "${_target} BUILD OUTPUT"
COMMAND ${CMAKE_COMMAND}
-DIN_FILE=${STAMP_DIR}/${_target}-build-out.log
-P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/echo_file.cmake
COMMAND ${CMAKE_COMMAND}
-DIN_FILE=${STAMP_DIR}/${_target}-build-err.log
-P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/echo_file.cmake)
endfunction ()
#[=========================================================[
This is a function override for one function in the
standard ExternalProject module. We want to change
the generated build script slightly to include printing
the build logs in the case of failure. Those modifications
have been made here. This function override could break
in the future if the ExternalProject module changes internal
function names or changes the way it generates the build
scripts.
See:
https://gitlab.kitware.com/cmake/cmake/blob/df1ddeec128d68cc636f2dde6c2acd87af5658b6/Modules/ExternalProject.cmake#L1855-1952
#]=========================================================]
function(_ep_write_log_script name step cmd_var)
ExternalProject_Get_Property(${name} stamp_dir)
set(command "${${cmd_var}}")
set(make "")
set(code_cygpath_make "")
if(command MATCHES "^\\$\\(MAKE\\)")
# GNU make recognizes the string "$(MAKE)" as recursive make, so
# ensure that it appears directly in the makefile.
string(REGEX REPLACE "^\\$\\(MAKE\\)" "\${make}" command "${command}")
set(make "-Dmake=$(MAKE)")
if(WIN32 AND NOT CYGWIN)
set(code_cygpath_make "
if(\${make} MATCHES \"^/\")
execute_process(
COMMAND cygpath -w \${make}
OUTPUT_VARIABLE cygpath_make
ERROR_VARIABLE cygpath_make
RESULT_VARIABLE cygpath_error
OUTPUT_STRIP_TRAILING_WHITESPACE
)
if(NOT cygpath_error)
set(make \${cygpath_make})
endif()
endif()
")
endif()
endif()
set(config "")
if("${CMAKE_CFG_INTDIR}" MATCHES "^\\$")
string(REPLACE "${CMAKE_CFG_INTDIR}" "\${config}" command "${command}")
set(config "-Dconfig=${CMAKE_CFG_INTDIR}")
endif()
# Wrap multiple 'COMMAND' lines up into a second-level wrapper
# script so all output can be sent to one log file.
if(command MATCHES "(^|;)COMMAND;")
set(code_execute_process "
${code_cygpath_make}
execute_process(COMMAND \${command} RESULT_VARIABLE result)
if(result)
set(msg \"Command failed (\${result}):\\n\")
foreach(arg IN LISTS command)
set(msg \"\${msg} '\${arg}'\")
endforeach()
message(FATAL_ERROR \"\${msg}\")
endif()
")
set(code "")
set(cmd "")
set(sep "")
foreach(arg IN LISTS command)
if("x${arg}" STREQUAL "xCOMMAND")
if(NOT "x${cmd}" STREQUAL "x")
string(APPEND code "set(command \"${cmd}\")${code_execute_process}")
endif()
set(cmd "")
set(sep "")
else()
string(APPEND cmd "${sep}${arg}")
set(sep ";")
endif()
endforeach()
string(APPEND code "set(command \"${cmd}\")${code_execute_process}")
file(GENERATE OUTPUT "${stamp_dir}/${name}-${step}-$<CONFIG>-impl.cmake" CONTENT "${code}")
set(command ${CMAKE_COMMAND} "-Dmake=\${make}" "-Dconfig=\${config}" -P ${stamp_dir}/${name}-${step}-$<CONFIG>-impl.cmake)
endif()
# Wrap the command in a script to log output to files.
set(script ${stamp_dir}/${name}-${step}-$<CONFIG>.cmake)
set(logbase ${stamp_dir}/${name}-${step})
set(code "
${code_cygpath_make}
function (_echo_file _fil)
file (READ \${_fil} _cont)
execute_process (COMMAND \${CMAKE_COMMAND} -E echo \"\${_cont}\")
endfunction ()
set(command \"${command}\")
execute_process(
COMMAND \${command}
RESULT_VARIABLE result
OUTPUT_FILE \"${logbase}-out.log\"
ERROR_FILE \"${logbase}-err.log\"
)
if(result)
set(msg \"Command failed: \${result}\\n\")
foreach(arg IN LISTS command)
set(msg \"\${msg} '\${arg}'\")
endforeach()
execute_process (COMMAND \${CMAKE_COMMAND} -E echo \"Build output for ${logbase} : \")
_echo_file (\"${logbase}-out.log\")
_echo_file (\"${logbase}-err.log\")
set(msg \"\${msg}\\nSee above\\n\")
message(FATAL_ERROR \"\${msg}\")
else()
set(msg \"${name} ${step} command succeeded. See also ${logbase}-*.log\")
message(STATUS \"\${msg}\")
endif()
")
file(GENERATE OUTPUT "${script}" CONTENT "${code}")
set(command ${CMAKE_COMMAND} ${make} ${config} -P ${script})
set(${cmd_var} "${command}" PARENT_SCOPE)
endfunction()
find_package(Git)
# function that calls git log to get current hash
function (git_hash hash_val)
# note: optional second extra string argument not in signature
if (NOT GIT_FOUND)
return ()
endif ()
set (_hash "")
set (_format "%H")
if (ARGC GREATER_EQUAL 2)
string (TOLOWER ${ARGV1} _short)
if (_short STREQUAL "short")
set (_format "%h")
endif ()
endif ()
execute_process (COMMAND ${GIT_EXECUTABLE} "log" "--pretty=${_format}" "-n1"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE _git_exit_code
OUTPUT_VARIABLE _temp_hash
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET)
if (_git_exit_code EQUAL 0)
set (_hash ${_temp_hash})
endif ()
set (${hash_val} "${_hash}" PARENT_SCOPE)
endfunction ()
function (git_branch branch_val)
if (NOT GIT_FOUND)
return ()
endif ()
set (_branch "")
execute_process (COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE _git_exit_code
OUTPUT_VARIABLE _temp_branch
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET)
if (_git_exit_code EQUAL 0)
set (_branch ${_temp_branch})
endif ()
set (${branch_val} "${_branch}" PARENT_SCOPE)
endfunction ()

View File

@@ -1,60 +0,0 @@
#[=========================================================[
SQLITE doesn't provide build files in the
standard source-only distribution. So we wrote
a simple cmake file and we copy it to the
external project folder so that we can use
this file to build the lib with ExternalProject
#]=========================================================]
add_library (sqlite3 STATIC sqlite3.c)
#[=========================================================[
When compiled with SQLITE_THREADSAFE=1, SQLite operates
in serialized mode. In this mode, SQLite can be safely
used by multiple threads with no restriction.
NOTE: This implies a global mutex!
When compiled with SQLITE_THREADSAFE=2, SQLite can be
used in a multithreaded program so long as no two
threads attempt to use the same database connection at
the same time.
NOTE: This is the preferred threading model, but not
currently enabled because we need to investigate our
use-model and concurrency requirements.
TODO: consider whether any other options should be
used: https://www.sqlite.org/compile.html
#]=========================================================]
target_compile_definitions (sqlite3
PRIVATE
SQLITE_THREADSAFE=1
HAVE_USLEEP=1)
target_compile_options (sqlite3
PRIVATE
$<$<BOOL:${MSVC}>:
-wd4100
-wd4127
-wd4232
-wd4244
-wd4701
-wd4706
-wd4996
>
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-array-bounds>)
install (
TARGETS
sqlite3
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES DESTINATION include)
install (
FILES
sqlite3.h
sqlite3ext.h
DESTINATION include)

File diff suppressed because it is too large Load Diff

View File

@@ -1,98 +0,0 @@
#[===================================================================[
coverage report target
#]===================================================================]
if (coverage)
if (is_clang)
if (APPLE)
execute_process (COMMAND xcrun -f llvm-profdata
OUTPUT_VARIABLE LLVM_PROFDATA
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program (LLVM_PROFDATA llvm-profdata)
endif ()
if (NOT LLVM_PROFDATA)
message (WARNING "unable to find llvm-profdata - skipping coverage_report target")
endif ()
if (APPLE)
execute_process (COMMAND xcrun -f llvm-cov
OUTPUT_VARIABLE LLVM_COV
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program (LLVM_COV llvm-cov)
endif ()
if (NOT LLVM_COV)
message (WARNING "unable to find llvm-cov - skipping coverage_report target")
endif ()
set (extract_pattern "")
if (coverage_core_only)
set (extract_pattern "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/")
endif ()
if (LLVM_COV AND LLVM_PROFDATA)
add_custom_target (coverage_report
USES_TERMINAL
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage - results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
COMMAND ${LLVM_PROFDATA}
merge -sparse default.profraw -o rip.profdata
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LLVM_COV}
report -instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
# generate html report
COMMAND ${LLVM_COV}
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
-instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
BYPRODUCTS coverage/index.html)
endif ()
elseif (is_gcc)
find_program (LCOV lcov)
if (NOT LCOV)
message (WARNING "unable to find lcov - skipping coverage_report target")
endif ()
find_program (GENHTML genhtml)
if (NOT GENHTML)
message (WARNING "unable to find genhtml - skipping coverage_report target")
endif ()
set (extract_pattern "*")
if (coverage_core_only)
set (extract_pattern "*/src/ripple/*")
endif ()
if (LCOV AND GENHTML)
add_custom_target (coverage_report
USES_TERMINAL
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage- results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
# create baseline info file
COMMAND ${LCOV}
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -i -o baseline.info
| grep -v "ignoring data for external file"
# run tests
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests for coverage report."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
# Create test coverage data file
COMMAND ${LCOV}
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -o tests.info
| grep -v "ignoring data for external file"
# Combine baseline and test coverage data
COMMAND ${LCOV}
-a baseline.info -a tests.info -o lcov-all.info
# extract our files
COMMAND ${LCOV}
-e lcov-all.info "${extract_pattern}" -o lcov.info
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LCOV} --summary lcov.info
# generate HTML report
COMMAND ${GENHTML}
-o ${CMAKE_BINARY_DIR}/coverage lcov.info
BYPRODUCTS coverage/index.html)
endif ()
endif ()
endif ()

View File

@@ -1,39 +0,0 @@
#[===================================================================[
multiconfig misc
#]===================================================================]
if (is_multiconfig)
# This code finds all source files in the src subdirectory for inclusion
# in the IDE file tree as non-compiled sources. Since this file list will
# have some overlap with files we have already added to our targets to
# be compiled, we explicitly remove any of these target source files from
# this list.
file (GLOB_RECURSE all_sources RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
CONFIGURE_DEPENDS
src/*.* Builds/*.md docs/*.md src/*.md Builds/*.cmake)
file(GLOB md_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} CONFIGURE_DEPENDS
*.md)
LIST(APPEND all_sources ${md_files})
foreach (_target secp256k1::secp256k1 ed25519::ed25519 xrpl_core rippled)
get_target_property (_type ${_target} TYPE)
if(_type STREQUAL "INTERFACE_LIBRARY")
continue()
endif()
get_target_property (_src ${_target} SOURCES)
list (REMOVE_ITEM all_sources ${_src})
endforeach ()
target_sources (rippled PRIVATE ${all_sources})
set_property (
SOURCE ${all_sources}
APPEND
PROPERTY HEADER_FILE_ONLY true)
if (MSVC)
set_property(
DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
PROPERTY VS_STARTUP_PROJECT rippled)
endif ()
group_sources(src)
group_sources(docs)
group_sources(Builds)
endif ()

View File

@@ -1,180 +0,0 @@
#[===================================================================[
package/container targets - (optional)
#]===================================================================]
if (is_root_project)
if (NOT DOCKER)
find_program (DOCKER docker)
endif ()
if (DOCKER)
# if no container label is provided, use current git hash
git_hash (commit_hash)
if (NOT container_label)
set (container_label ${commit_hash})
endif ()
message (STATUS "using [${container_label}] as build container tag...")
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/packages)
#[===================================================================[
rpm
#]===================================================================]
add_custom_target (rpm_container
docker build
--pull
--build-arg GIT_COMMIT=${commit_hash}
-t rippleci/rippled-rpm-builder:${container_label}
$<$<BOOL:${rpm_cache_from}>:--cache-from=${rpm_cache_from}>
-f centos-builder/Dockerfile .
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/Builds/containers
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/centos-builder/Dockerfile
Builds/containers/centos-builder/centos_setup.sh
Builds/containers/shared/update-rippled.sh
Builds/containers/shared/update_sources.sh
Builds/containers/shared/rippled.service
Builds/containers/shared/rippled-reporting.service
Builds/containers/packaging/rpm/rippled.spec
Builds/containers/packaging/rpm/build_rpm.sh
Builds/containers/packaging/rpm/50-rippled.preset
Builds/containers/packaging/rpm/50-rippled-reporting.preset
bin/getRippledInfo
)
exclude_from_default (rpm_container)
add_custom_target (rpm
docker run
-v ${CMAKE_CURRENT_SOURCE_DIR}:/opt/rippled_bld/pkg/rippled
-v ${CMAKE_CURRENT_BINARY_DIR}/packages:/opt/rippled_bld/pkg/out
-t rippled-rpm-builder:${container_label}
/bin/bash -c "cp -fpu rippled/Builds/containers/packaging/rpm/build_rpm.sh . && ./build_rpm.sh"
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/packaging/rpm/rippled.spec
)
exclude_from_default (rpm)
if (NOT have_package_container)
add_dependencies(rpm rpm_container)
endif ()
#[===================================================================[
dpkg
#]===================================================================]
# currently use ubuntu 18.04 as a base b/c it has one of
# the lower versions of libc among ubuntu and debian releases.
# we could change this in the future and build with some other deb
# based system.
add_custom_target (dpkg_container
docker build
--pull
--build-arg DIST_TAG=18.04
--build-arg GIT_COMMIT=${commit_hash}
-t rippled-dpkg-builder:${container_label}
$<$<BOOL:${dpkg_cache_from}>:--cache-from=${dpkg_cache_from}>
-f ubuntu-builder/Dockerfile .
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/Builds/containers
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/packaging/dpkg/debian/rippled-reporting.links
Builds/containers/packaging/dpkg/debian/copyright
Builds/containers/packaging/dpkg/debian/rules
Builds/containers/packaging/dpkg/debian/rippled-reporting.install
Builds/containers/packaging/dpkg/debian/rippled-reporting.postinst
Builds/containers/packaging/dpkg/debian/rippled.links
Builds/containers/packaging/dpkg/debian/rippled.prerm
Builds/containers/packaging/dpkg/debian/rippled.postinst
Builds/containers/packaging/dpkg/debian/rippled-dev.install
Builds/containers/packaging/dpkg/debian/dirs
Builds/containers/packaging/dpkg/debian/rippled.postrm
Builds/containers/packaging/dpkg/debian/rippled.conffiles
Builds/containers/packaging/dpkg/debian/compat
Builds/containers/packaging/dpkg/debian/source/format
Builds/containers/packaging/dpkg/debian/source/local-options
Builds/containers/packaging/dpkg/debian/README.Debian
Builds/containers/packaging/dpkg/debian/rippled.install
Builds/containers/packaging/dpkg/debian/rippled.preinst
Builds/containers/packaging/dpkg/debian/docs
Builds/containers/packaging/dpkg/debian/control
Builds/containers/packaging/dpkg/debian/rippled-reporting.dirs
Builds/containers/packaging/dpkg/build_dpkg.sh
Builds/containers/ubuntu-builder/Dockerfile
Builds/containers/ubuntu-builder/ubuntu_setup.sh
bin/getRippledInfo
Builds/containers/shared/install_cmake.sh
Builds/containers/shared/update-rippled.sh
Builds/containers/shared/update_sources.sh
Builds/containers/shared/rippled.service
Builds/containers/shared/rippled-reporting.service
Builds/containers/shared/rippled-logrotate
Builds/containers/shared/update-rippled-cron
)
exclude_from_default (dpkg_container)
add_custom_target (dpkg
docker run
-v ${CMAKE_CURRENT_SOURCE_DIR}:/opt/rippled_bld/pkg/rippled
-v ${CMAKE_CURRENT_BINARY_DIR}/packages:/opt/rippled_bld/pkg/out
-t rippled-dpkg-builder:${container_label}
/bin/bash -c "cp -fpu rippled/Builds/containers/packaging/dpkg/build_dpkg.sh . && ./build_dpkg.sh"
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/packaging/dpkg/debian/control
)
exclude_from_default (dpkg)
if (NOT have_package_container)
add_dependencies(dpkg dpkg_container)
endif ()
#[===================================================================[
ci container
#]===================================================================]
# now use the same ubuntu image for our travis-ci docker images,
# but we use a newer distro (18.04 vs 16.04).
#
# the following steps assume the github pkg repo, but it's possible to
# adapt these for other docker hub repositories.
#
# steps for publishing a new CI image when you make changes:
#
# mkdir bld.ci && cd bld.ci && cmake -Dpackages_only=ON -Dcontainer_label=CI_LATEST
# cmake --build . --target ci_container --verbose
# docker tag rippled-ci-builder:CI_LATEST <HUB REPO PATH>/rippled-ci-builder:YYYY-MM-DD
# (NOTE: change YYYY-MM-DD to match current date, or use a different
# tag/version scheme if you prefer)
# docker push <HUB REPO PATH>/rippled-ci-builder:YYYY-MM-DD
# (NOTE: <HUB REPO PATH> is probably your user or org name if using
# docker hub, or it might be something like
# docker.pkg.github.com/ripple/rippled if using the github pkg
# registry. for any registry, you will need to be logged-in via
# docker and have push access.)
#
# ...then change the DOCKER_IMAGE line in .travis.yml :
# - DOCKER_IMAGE="<HUB REPO PATH>/rippled-ci-builder:YYYY-MM-DD"
add_custom_target (ci_container
docker build
--pull
--build-arg DIST_TAG=18.04
--build-arg GIT_COMMIT=${commit_hash}
--build-arg CI_USE=true
-t rippled-ci-builder:${container_label}
$<$<BOOL:${ci_cache_from}>:--cache-from=${ci_cache_from}>
-f ubuntu-builder/Dockerfile .
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/Builds/containers
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/ubuntu-builder/Dockerfile
Builds/containers/ubuntu-builder/ubuntu_setup.sh
)
exclude_from_default (ci_container)
else ()
message (STATUS "docker NOT found -- won't be able to build containers for packaging")
endif ()
endif ()

View File

@@ -1,124 +0,0 @@
#[===================================================================[
declare user options/settings
#]===================================================================]
option (assert "Enables asserts, even in release builds" OFF)
option (reporting "Build rippled with reporting mode enabled" OFF)
option (tests "Build tests" ON)
option (unity "Creates a build using UNITY support in cmake. This is the default" ON)
if (unity)
if (NOT is_ci)
set (CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
endif ()
endif ()
if (is_gcc OR is_clang)
option (coverage "Generates coverage info." OFF)
option (profile "Add profiling flags" OFF)
set (coverage_test "" CACHE STRING
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
if (coverage_test AND NOT coverage)
set (coverage ON CACHE BOOL "gcc/clang only" FORCE)
endif ()
option (coverage_core_only
"Include only src/ripple files when generating coverage report. \
Set to OFF to include all sources in coverage report."
ON)
option (wextra "compile with extra gcc/clang warnings enabled" ON)
else ()
set (profile OFF CACHE BOOL "gcc/clang only" FORCE)
set (coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set (wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif ()
if (is_linux)
option (BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
option (static "link protobuf, openssl, libc++, and boost statically" ON)
option (perf "Enables flags that assist with perf recording" OFF)
option (use_gold "enables detection of gold (binutils) linker" ON)
option (use_mold "enables detection of mold (binutils) linker" ON)
else ()
# we are not ready to allow shared-libs on windows because it would require
# export declarations. On macos it's more feasible, but static openssl
# produces odd linker errors, thus we disable shared lib builds for now.
set (BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
set (static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
set (use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
endif ()
if (is_clang)
option (use_lld "enables detection of lld linker" ON)
else ()
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif ()
option (jemalloc "Enables jemalloc for heap profiling" OFF)
option (werr "treat warnings as errors" OFF)
option (local_protobuf
"Force a local build of protobuf instead of looking for an installed version." OFF)
option (local_grpc
"Force a local build of gRPC instead of looking for an installed version." OFF)
# this one is a string and therefore can't be an option
set (san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
if (san)
string (TOLOWER ${san} san)
set (SAN_FLAG "-fsanitize=${san}")
set (SAN_LIB "")
if (is_gcc)
if (san STREQUAL "address")
set (SAN_LIB "asan")
elseif (san STREQUAL "thread")
set (SAN_LIB "tsan")
elseif (san STREQUAL "memory")
set (SAN_LIB "msan")
elseif (san STREQUAL "undefined")
set (SAN_LIB "ubsan")
endif ()
endif ()
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
check_cxx_compiler_flag (${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
if (NOT COMPILER_SUPPORTS_SAN)
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif ()
endif ()
set (container_label "" CACHE STRING "tag to use for package building containers")
option (packages_only
"ONLY generate package building targets. This is special use-case and almost \
certainly not what you want. Use with caution as you won't be able to build \
any compiled targets locally." OFF)
option (have_package_container
"Sometimes you already have the tagged container you want to use for package \
building and you don't want docker to rebuild it. This flag will detach the \
dependency of the package build from the container build. It's an advanced \
use case and most likely you should not be touching this flag." OFF)
# the remaining options are obscure and rarely used
option (beast_no_unit_test_inline
"Prevents unit test definitions from being inserted into global table"
OFF)
option (single_io_service_thread
"Restricts the number of threads calling io_service::run to one. \
This can be useful when debugging."
OFF)
option (boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls."
OFF)
option (beast_hashers
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
OFF)
if (WIN32)
option (beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else ()
set (beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif ()
if (coverage)
message (STATUS "coverage build requested - forcing Debug build")
set (CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
endif ()

View File

@@ -1,15 +0,0 @@
#[===================================================================[
read version from source
#]===================================================================]
file (STRINGS src/ripple/protocol/impl/BuildInfo.cpp BUILD_INFO)
foreach (line_ ${BUILD_INFO})
if (line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
set (rippled_version ${CMAKE_MATCH_1})
endif ()
endforeach ()
if (rippled_version)
message (STATUS "rippled version: ${rippled_version}")
else ()
message (FATAL_ERROR "unable to determine rippled version")
endif ()

View File

@@ -1,106 +0,0 @@
################################################################################
# SociConfig.cmake - CMake build configuration of SOCI library
################################################################################
# Copyright (C) 2010 Mateusz Loskot <mateusz@loskot.net>
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
################################################################################
include(CheckCXXSymbolExists)
if(WIN32)
check_cxx_symbol_exists("_M_AMD64" "" SOCI_TARGET_ARCH_X64)
if(NOT RTC_ARCH_X64)
check_cxx_symbol_exists("_M_IX86" "" SOCI_TARGET_ARCH_X86)
endif(NOT RTC_ARCH_X64)
# add check for arm here
# see http://msdn.microsoft.com/en-us/library/b0084kay.aspx
else(WIN32)
check_cxx_symbol_exists("__i386__" "" SOCI_TARGET_ARCH_X86)
check_cxx_symbol_exists("__x86_64__" "" SOCI_TARGET_ARCH_X64)
check_cxx_symbol_exists("__arm__" "" SOCI_TARGET_ARCH_ARM)
endif(WIN32)
if(NOT DEFINED LIB_SUFFIX)
if(SOCI_TARGET_ARCH_X64)
set(_lib_suffix "64")
else()
set(_lib_suffix "")
endif()
set(LIB_SUFFIX ${_lib_suffix} CACHE STRING "Specifies suffix for the lib directory")
endif()
#
# C++11 Option
#
if(NOT SOCI_CXX_C11)
set (SOCI_CXX_C11 OFF CACHE BOOL "Build to the C++11 standard")
endif()
#
# Force compilation flags and set desired warnings level
#
if (MSVC)
add_definitions(-D_CRT_SECURE_NO_DEPRECATE)
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
add_definitions(-D_CRT_NONSTDC_NO_WARNING)
add_definitions(-D_SCL_SECURE_NO_WARNINGS)
if(CMAKE_CXX_FLAGS MATCHES "/W[0-4]")
string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /we4266")
endif()
else()
set(SOCI_GCC_CLANG_COMMON_FLAGS "")
# "-pedantic -Werror -Wno-error=parentheses -Wall -Wextra -Wpointer-arith -Wcast-align -Wcast-qual -Wfloat-equal -Woverloaded-virtual -Wredundant-decls -Wno-long-long")
if (SOCI_CXX_C11)
set(SOCI_CXX_VERSION_FLAGS "-std=c++11")
else()
set(SOCI_CXX_VERSION_FLAGS "-std=gnu++98")
endif()
if("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang" OR "${CMAKE_CXX_COMPILER}" MATCHES "clang")
if(NOT CMAKE_CXX_COMPILER_VERSION LESS 3.1 AND SOCI_ASAN)
set(SOCI_GCC_CLANG_COMMON_FLAGS "${SOCI_GCC_CLANG_COMMON_FLAGS} -fsanitize=address")
endif()
# enforce C++11 for Clang
set(SOCI_CXX_C11 ON)
set(SOCI_CXX_VERSION_FLAGS "-std=c++11")
add_definitions(-DCATCH_CONFIG_CPP11_NO_IS_ENUM)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SOCI_GCC_CLANG_COMMON_FLAGS} ${SOCI_CXX_VERSION_FLAGS}")
elseif(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
if(NOT CMAKE_CXX_COMPILER_VERSION LESS 4.8 AND SOCI_ASAN)
set(SOCI_GCC_CLANG_COMMON_FLAGS "${SOCI_GCC_CLANG_COMMON_FLAGS} -fsanitize=address")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SOCI_GCC_CLANG_COMMON_FLAGS} ${SOCI_CXX_VERSION_FLAGS} ")
if (CMAKE_COMPILER_IS_GNUCXX)
if (CMAKE_SYSTEM_NAME MATCHES "FreeBSD")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-variadic-macros")
endif()
endif()
else()
message(WARNING "Unknown toolset - using default flags to build SOCI")
endif()
endif()
# Set SOCI_HAVE_* variables for soci-config.h generator
set(SOCI_HAVE_CXX_C11 ${SOCI_CXX_C11} CACHE INTERNAL "Enables C++11 support")

View File

@@ -1,27 +0,0 @@
find_package(Protobuf 3.8)
set(output_dir ${CMAKE_BINARY_DIR}/proto_gen)
file(MAKE_DIRECTORY ${output_dir})
set(ccbd ${CMAKE_CURRENT_BINARY_DIR})
set(CMAKE_CURRENT_BINARY_DIR ${output_dir})
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto)
set(CMAKE_CURRENT_BINARY_DIR ${ccbd})
target_include_directories(xrpl_core SYSTEM PUBLIC
# The generated implementation imports the header relative to the output
# directory.
$<BUILD_INTERFACE:${output_dir}>
$<BUILD_INTERFACE:${output_dir}/src>
)
target_sources(xrpl_core PRIVATE ${output_dir}/src/ripple/proto/ripple.pb.cc)
install(
FILES ${output_dir}/src/ripple/proto/ripple.pb.h
DESTINATION include/ripple/proto)
target_link_libraries(xrpl_core PUBLIC protobuf::libprotobuf)
target_compile_options(xrpl_core
PUBLIC
$<$<BOOL:${XCODE}>:
--system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec
>
)

View File

@@ -1,82 +0,0 @@
find_package(gRPC 1.23)
#[=================================[
generate protobuf sources for
grpc defs and bundle into a
static lib
#]=================================]
set(output_dir "${CMAKE_BINARY_DIR}/proto_gen_grpc")
set(GRPC_GEN_DIR "${output_dir}/ripple/proto")
file(MAKE_DIRECTORY ${GRPC_GEN_DIR})
set(GRPC_PROTO_SRCS)
set(GRPC_PROTO_HDRS)
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
file(GLOB_RECURSE GRPC_DEFINITION_FILES "${GRPC_PROTO_ROOT}/*.proto")
foreach(file ${GRPC_DEFINITION_FILES})
# /home/user/rippled/src/ripple/proto/org/.../v1/get_ledger.proto
get_filename_component(_abs_file ${file} ABSOLUTE)
# /home/user/rippled/src/ripple/proto/org/.../v1
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
# get_ledger
get_filename_component(_basename ${file} NAME_WE)
# /home/user/rippled/src/ripple/proto
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
# org/.../v1/get_ledger.proto
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
# org/.../v1
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
# src/ripple/proto/org/.../v1
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
# .cmake/proto_gen_grpc/ripple/proto/org/.../v1/get_ledger.grpc.pb.cc
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
add_custom_command(
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
COMMAND protobuf::protoc
ARGS --grpc_out=${GRPC_GEN_DIR}
--cpp_out=${GRPC_GEN_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
-I ${_proto_inc} -I ${_rel_dir}
${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc gRPC::grpc_cpp_plugin
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
VERBATIM)
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES
GENERATED TRUE
SKIP_UNITY_BUILD_INCLUSION ON
)
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
endforeach()
target_include_directories(xrpl_core SYSTEM PUBLIC
$<BUILD_INTERFACE:${output_dir}>
$<BUILD_INTERFACE:${output_dir}/ripple/proto>
# The generated sources include headers relative to this path. Fix it later.
$<INSTALL_INTERFACE:include/ripple/proto>
)
target_sources(xrpl_core PRIVATE ${GRPC_PROTO_SRCS})
install(
DIRECTORY ${output_dir}/ripple
DESTINATION include/
FILES_MATCHING PATTERN "*.h"
)
target_link_libraries(xrpl_core PUBLIC
"gRPC::grpc++"
# libgrpc is missing references.
absl::random_random
)
target_compile_options(xrpl_core
PRIVATE
$<$<BOOL:${MSVC}>:-wd4065>
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
PUBLIC
$<$<BOOL:${MSVC}>:-wd4996>
$<$<BOOL:${XCODE}>:
--system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec
>)

View File

@@ -1,17 +0,0 @@
#[=========================================================[
This is a CMake script file that is used to write
the contents of a file to stdout (using the cmake
echo command). The input file is passed via the
IN_FILE variable.
#]=========================================================]
if (EXISTS ${IN_FILE})
file (READ ${IN_FILE} contents)
## only print files that actually have some text in them
if (contents MATCHES "[a-z0-9A-Z]+")
execute_process(
COMMAND
${CMAKE_COMMAND} -E echo "${contents}")
endif ()
endif ()

View File

@@ -1 +0,0 @@
[Please see the BUILD instructions here](../BUILD.md)

View File

@@ -1,405 +0,0 @@
#!/usr/bin/env python
# This file is part of rippled: https://github.com/ripple/rippled
# Copyright (c) 2012 - 2017 Ripple Labs Inc.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Invocation:
./Builds/Test.py - builds and tests all configurations
The build must succeed without shell aliases for this to work.
To pass flags to cmake, put them at the very end of the command line, after
the -- flag - like this:
./Builds/Test.py -- -j4 # Pass -j4 to cmake --build
Common problems:
1) Boost not found. Solution: export BOOST_ROOT=[path to boost folder]
2) OpenSSL not found. Solution: export OPENSSL_ROOT=[path to OpenSSL folder]
3) cmake is not found. Solution: Be sure cmake directory is on your $PATH
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import itertools
import os
import platform
import re
import shutil
import sys
import subprocess
def powerset(iterable):
"""powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"""
s = list(iterable)
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s) + 1))
IS_WINDOWS = platform.system().lower() == 'windows'
IS_OS_X = platform.system().lower() == 'darwin'
# CMake
if IS_WINDOWS:
CMAKE_UNITY_CONFIGS = ['Debug', 'Release']
CMAKE_NONUNITY_CONFIGS = ['Debug', 'Release']
else:
CMAKE_UNITY_CONFIGS = []
CMAKE_NONUNITY_CONFIGS = []
CMAKE_UNITY_COMBOS = { '' : [['rippled'], CMAKE_UNITY_CONFIGS],
'.nounity' : [['rippled'], CMAKE_NONUNITY_CONFIGS] }
if IS_WINDOWS:
CMAKE_DIR_TARGETS = { ('msvc' + unity,) : targets for unity, targets in
CMAKE_UNITY_COMBOS.items() }
elif IS_OS_X:
CMAKE_DIR_TARGETS = { (build + unity,) : targets
for build in ['debug', 'release']
for unity, targets in CMAKE_UNITY_COMBOS.items() }
else:
CMAKE_DIR_TARGETS = { (cc + "." + build + unity,) : targets
for cc in ['gcc', 'clang']
for build in ['debug', 'release', 'coverage', 'profile']
for unity, targets in CMAKE_UNITY_COMBOS.items() }
# list of tuples of all possible options
if IS_WINDOWS or IS_OS_X:
CMAKE_ALL_GENERATE_OPTIONS = [tuple(x) for x in powerset(['-GNinja', '-Dassert=true'])]
else:
CMAKE_ALL_GENERATE_OPTIONS = list(set(
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=address'])] +
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=thread'])]))
parser = argparse.ArgumentParser(
description='Test.py - run ripple tests'
)
parser.add_argument(
'--all', '-a',
action='store_true',
help='Build all configurations.',
)
parser.add_argument(
'--keep_going', '-k',
action='store_true',
help='Keep going after one configuration has failed.',
)
parser.add_argument(
'--silent', '-s',
action='store_true',
help='Silence all messages except errors',
)
parser.add_argument(
'--verbose', '-v',
action='store_true',
help=('Report more information about which commands are executed and the '
'results.'),
)
parser.add_argument(
'--test', '-t',
default='',
help='Add a prefix for unit tests',
)
parser.add_argument(
'--testjobs',
default='0',
type=int,
help='Run tests in parallel'
)
parser.add_argument(
'--ipv6',
action='store_true',
help='Use IPv6 localhost when running unit tests.',
)
parser.add_argument(
'--clean', '-c',
action='store_true',
help='delete all build artifacts after testing',
)
parser.add_argument(
'--quiet', '-q',
action='store_true',
help='Reduce output where possible (unit tests)',
)
parser.add_argument(
'--dir', '-d',
default=(),
nargs='*',
help='Specify one or more CMake dir names. '
'Will also be used as -Dtarget=<dir> running cmake.'
)
parser.add_argument(
'--target',
default=(),
nargs='*',
help='Specify one or more CMake build targets. '
'Will be used as --target <target> running cmake --build.'
)
parser.add_argument(
'--config',
default=(),
nargs='*',
help='Specify one or more CMake build configs. '
'Will be used as --config <config> running cmake --build.'
)
parser.add_argument(
'--generator_option',
action='append',
help='Specify a CMake generator option. Repeat for multiple options. '
'Will be passed to the cmake generator. '
'Due to limits of the argument parser, arguments starting with \'-\' '
'must be attached to this option. e.g. --generator_option=-GNinja.')
parser.add_argument(
'--build_option',
action='append',
help='Specify a build option. Repeat for multiple options. '
'Will be passed to the build tool via cmake --build. '
'Due to limits of the argument parser, arguments starting with \'-\' '
'must be attached to this option. e.g. --build_option=-j8.')
parser.add_argument(
'extra_args',
default=(),
nargs='*',
help='Extra arguments are passed through to the tools'
)
ARGS = parser.parse_args()
def decodeString(line):
# Python 2 vs. Python 3
if isinstance(line, str):
return line
else:
return line.decode()
def shell(cmd, args=(), silent=False, cust_env=None):
""""Execute a shell command and return the output."""
silent = ARGS.silent or silent
verbose = not silent and ARGS.verbose
if verbose:
print('$' + cmd, *args)
command = (cmd,) + args
# shell is needed in Windows to find executable in the path
process = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=cust_env,
shell=IS_WINDOWS)
lines = []
count = 0
# readline returns '' at EOF
for line in iter(process.stdout.readline, ''):
if process.poll() is None:
decoded = decodeString(line)
lines.append(decoded)
if verbose:
print(decoded, end='')
elif not silent:
count += 1
if count >= 80:
print()
count = 0
else:
print('.', end='')
else:
break
if not verbose and count:
print()
process.wait()
return process.returncode, lines
def get_cmake_dir(cmake_dir):
return os.path.join('build' , 'cmake' , cmake_dir)
def run_cmake(directory, cmake_dir, args):
print('Generating build in', directory, 'with', *args or ('default options',))
old_dir = os.getcwd()
if not os.path.exists(directory):
os.makedirs(directory)
os.chdir(directory)
if IS_WINDOWS and not any(arg.startswith("-G") for arg in args) and not os.path.exists("CMakeCache.txt"):
if '--ninja' in args:
args += ( '-GNinja', )
else:
args += ( '-GVisual Studio 14 2015 Win64', )
# hack to extract cmake options/args from the legacy target format
if re.search('\.unity', cmake_dir):
args += ( '-Dunity=ON', )
if re.search('\.nounity', cmake_dir):
args += ( '-Dunity=OFF', )
if re.search('coverage', cmake_dir):
args += ( '-Dcoverage=ON', )
if re.search('profile', cmake_dir):
args += ( '-Dprofile=ON', )
if re.search('debug', cmake_dir):
args += ( '-DCMAKE_BUILD_TYPE=Debug', )
if re.search('release', cmake_dir):
args += ( '-DCMAKE_BUILD_TYPE=Release', )
m = re.search('gcc(-[^.]*)', cmake_dir)
if m:
args += ( '-DCMAKE_C_COMPILER=' + m.group(0),
'-DCMAKE_CXX_COMPILER=g++' + m.group(1), )
elif re.search('gcc', cmake_dir):
args += ( '-DCMAKE_C_COMPILER=gcc', '-DCMAKE_CXX_COMPILER=g++', )
m = re.search('clang(-[^.]*)', cmake_dir)
if m:
args += ( '-DCMAKE_C_COMPILER=' + m.group(0),
'-DCMAKE_CXX_COMPILER=clang++' + m.group(1), )
elif re.search('clang', cmake_dir):
args += ( '-DCMAKE_C_COMPILER=clang', '-DCMAKE_CXX_COMPILER=clang++', )
args += ( os.path.join('..', '..', '..'), )
resultcode, lines = shell('cmake', args)
if resultcode:
print('Generating FAILED:')
if not ARGS.verbose:
print(*lines, sep='')
sys.exit(1)
os.chdir(old_dir)
def run_cmake_build(directory, target, config, args):
print('Building', target, config, 'in', directory, 'with', *args or ('default options',))
build_args=('--build', directory)
if target:
build_args += ('--target', target)
if config:
build_args += ('--config', config)
if args:
build_args += ('--',)
build_args += tuple(args)
resultcode, lines = shell('cmake', build_args)
if resultcode:
print('Build FAILED:')
if not ARGS.verbose:
print(*lines, sep='')
sys.exit(1)
def run_cmake_tests(directory, target, config):
failed = []
if IS_WINDOWS:
target += '.exe'
executable = os.path.join(directory, config if config else 'Debug', target)
if(not os.path.exists(executable)):
executable = os.path.join(directory, target)
print('Unit tests for', executable)
testflag = '--unittest'
quiet = ''
testjobs = ''
ipv6 = ''
if ARGS.test:
testflag += ('=' + ARGS.test)
if ARGS.quiet:
quiet = '-q'
if ARGS.ipv6:
ipv6 = '--unittest-ipv6'
if ARGS.testjobs:
testjobs = ('--unittest-jobs=' + str(ARGS.testjobs))
resultcode, lines = shell(executable, (testflag, quiet, testjobs, ipv6))
if resultcode:
if not ARGS.verbose:
print('ERROR:', *lines, sep='')
failed.append([target, 'unittest'])
return failed
def main():
all_failed = []
if ARGS.all:
build_dir_targets = CMAKE_DIR_TARGETS
generator_options = CMAKE_ALL_GENERATE_OPTIONS
else:
build_dir_targets = { tuple(ARGS.dir) : [ARGS.target, ARGS.config] }
if ARGS.generator_option:
generator_options = [tuple(ARGS.generator_option)]
else:
generator_options = [tuple()]
if not build_dir_targets:
# Let CMake choose the build tool.
build_dir_targets = { () : [] }
if ARGS.build_option:
ARGS.build_option = ARGS.build_option + list(ARGS.extra_args)
else:
ARGS.build_option = list(ARGS.extra_args)
for args in generator_options:
for build_dirs, (build_targets, build_configs) in build_dir_targets.items():
if not build_dirs:
build_dirs = ('default',)
if not build_targets:
build_targets = ('rippled',)
if not build_configs:
build_configs = ('',)
for cmake_dir in build_dirs:
cmake_full_dir = get_cmake_dir(cmake_dir)
run_cmake(cmake_full_dir, cmake_dir, args)
for target in build_targets:
for config in build_configs:
run_cmake_build(cmake_full_dir, target, config, ARGS.build_option)
failed = run_cmake_tests(cmake_full_dir, target, config)
if failed:
print('FAILED:', *(':'.join(f) for f in failed))
if not ARGS.keep_going:
sys.exit(1)
else:
all_failed.extend([decodeString(cmake_dir +
"." + target + "." + config), ':'.join(f)]
for f in failed)
else:
print('Success')
if ARGS.clean:
shutil.rmtree(cmake_full_dir)
if all_failed:
if len(all_failed) > 1:
print()
print('FAILED:', *(':'.join(f) for f in all_failed))
sys.exit(1)
if __name__ == '__main__':
main()
sys.exit(0)

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env bash
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # number of physical cores
path=$(cd $(dirname $0) && pwd)
cd $(dirname $path)
${path}/Test.py -a -c --testjobs=${num_procs} -- -j${num_procs}

View File

@@ -1,31 +0,0 @@
# rippled Packaging and Containers
This folder contains docker container definitions and configuration
files to support building rpm and deb packages of rippled. The container
definitions include some additional software/packages that are used
for general build/test CI workflows of rippled but are not explicitly
needed for the package building workflow.
## CMake Targets
If you have docker installed on your local system, then the main
CMake file will enable several targets related to building packages:
`rpm_container`, `rpm`, `dpkg_container`, and `dpkg`. The package targets
depend on the container targets and will trigger a build of those first.
The container builds can take several dozen minutes to complete (depending
on hardware specs), so quick build cycles are not possible currently. As
such, these targets are often best suited to CI/automated build systems.
The package build can be invoked like any other cmake target from the
rippled root folder:
```
mkdir -p build/pkg && cd build/pkg
cmake -Dpackages_only=ON ../..
cmake --build . --target rpm
```
Upon successful completion, the generated package files will be in
the `build/pkg/packages` directory. For deb packages, simply replace
`rpm` with `dpkg` in the build command above.

View File

@@ -1,26 +0,0 @@
FROM rippleci/centos:7
ARG GIT_COMMIT=unknown
ARG CI_USE=false
LABEL git-commit=$GIT_COMMIT
COPY centos-builder/centos_setup.sh /tmp/
COPY shared/install_cmake.sh /tmp/
RUN chmod +x /tmp/centos_setup.sh && \
chmod +x /tmp/install_cmake.sh
RUN /tmp/centos_setup.sh
RUN /tmp/install_cmake.sh 3.16.3 /opt/local/cmake-3.16
RUN ln -s /opt/local/cmake-3.16 /opt/local/cmake
ENV PATH="/opt/local/cmake/bin:$PATH"
# TODO: Install latest CMake for testing
RUN if [ "${CI_USE}" = true ] ; then /tmp/install_cmake.sh 3.16.3 /opt/local/cmake-3.16; fi
RUN mkdir -m 777 -p /opt/rippled_bld/pkg
WORKDIR /opt/rippled_bld/pkg
RUN mkdir -m 777 ./rpmbuild
RUN mkdir -m 777 ./rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
COPY packaging/rpm/build_rpm.sh ./
CMD ./build_rpm.sh

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env bash
set -ex
source /etc/os-release
yum -y upgrade
yum -y update
yum -y install epel-release centos-release-scl
yum -y install \
wget curl time gcc-c++ yum-utils autoconf automake pkgconfig libtool \
libstdc++-static rpm-build gnupg which make cmake \
devtoolset-11 devtoolset-11-gdb devtoolset-11-binutils devtoolset-11-libstdc++-devel \
devtoolset-11-libasan-devel devtoolset-11-libtsan-devel devtoolset-11-libubsan-devel devtoolset-11-liblsan-devel \
flex flex-devel bison bison-devel parallel \
ncurses ncurses-devel ncurses-libs graphviz graphviz-devel \
lzip p7zip bzip2 bzip2-devel lzma-sdk lzma-sdk-devel xz-devel \
zlib zlib-devel zlib-static texinfo openssl openssl-static \
jemalloc jemalloc-devel \
libicu-devel htop \
rh-python38 \
ninja-build git svn \
swig perl-Digest-MD5

View File

@@ -1,28 +0,0 @@
#!/usr/bin/env sh
set -ex
pkgtype=$1
if [ "${pkgtype}" = "rpm" ] ; then
container_name="${RPM_CONTAINER_NAME}"
elif [ "${pkgtype}" = "dpkg" ] ; then
container_name="${DPKG_CONTAINER_NAME}"
else
echo "invalid package type"
exit 1
fi
if docker pull "${ARTIFACTORY_HUB}/${container_name}:latest_${CI_COMMIT_REF_SLUG}"; then
echo "found container for latest - using as cache."
docker tag \
"${ARTIFACTORY_HUB}/${container_name}:latest_${CI_COMMIT_REF_SLUG}" \
"${container_name}:latest_${CI_COMMIT_REF_SLUG}"
CMAKE_EXTRA="-D${pkgtype}_cache_from=${container_name}:latest_${CI_COMMIT_REF_SLUG}"
fi
cmake --version
test -d build && rm -rf build
mkdir -p build/container && cd build/container
eval time \
cmake -Dpackages_only=ON -DCMAKE_VERBOSE_MAKEFILE=ON ${CMAKE_EXTRA} \
-G Ninja ../..
time cmake --build . --target "${pkgtype}_container" -- -v

View File

@@ -1,28 +0,0 @@
#!/usr/bin/env sh
set -ex
pkgtype=$1
if [ "${pkgtype}" = "rpm" ] ; then
container_name="${RPM_CONTAINER_FULLNAME}"
container_tag="${RPM_CONTAINER_TAG}"
elif [ "${pkgtype}" = "dpkg" ] ; then
container_name="${DPKG_CONTAINER_FULLNAME}"
container_tag="${DPKG_CONTAINER_TAG}"
else
echo "invalid package type"
exit 1
fi
time docker pull "${ARTIFACTORY_HUB}/${container_name}"
docker tag \
"${ARTIFACTORY_HUB}/${container_name}" \
"${container_name}"
docker images
test -d build && rm -rf build
mkdir -p build/${pkgtype} && cd build/${pkgtype}
time cmake \
-Dpackages_only=ON \
-Dcontainer_label="${container_tag}" \
-Dhave_package_container=ON \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-Dunity=OFF \
-G Ninja ../..
time cmake --build . --target ${pkgtype} -- -v

View File

@@ -1,15 +0,0 @@
#!/usr/bin/env sh
set -e
# used as a before/setup script for docker steps in gitlab-ci
# expects to be run in standard alpine/dind image
echo $(nproc)
docker login -u rippled \
-p ${ARTIFACTORY_DEPLOY_KEY_RIPPLED} ${ARTIFACTORY_HUB}
apk add --update py-pip
apk add \
bash util-linux coreutils binutils grep \
make ninja cmake build-base gcc g++ abuild git \
python3 python3-dev
pip3 install awscli --break-system-packages
# list curdir contents to build log:
ls -la

View File

@@ -1,16 +0,0 @@
#!/usr/bin/env sh
case ${CI_COMMIT_REF_NAME} in
develop)
export COMPONENT="nightly"
;;
release)
export COMPONENT="unstable"
;;
master)
export COMPONENT="stable"
;;
*)
export COMPONENT="_unknown_"
;;
esac

View File

@@ -1,771 +0,0 @@
#########################################################################
## ##
## gitlab CI defintition for rippled build containers and distro ##
## packages (rpm and dpkg). ##
## ##
#########################################################################
# NOTE: these are sensible defaults for Ripple pipelines. These
# can be overridden by project or group variables as needed.
variables:
# these containers are built manually using the rippled
# cmake build (container targets) and tagged/pushed so they
# can be used here
RPM_CONTAINER_TAG: "2023-02-13"
RPM_CONTAINER_NAME: "rippled-rpm-builder"
RPM_CONTAINER_FULLNAME: "${RPM_CONTAINER_NAME}:${RPM_CONTAINER_TAG}"
DPKG_CONTAINER_TAG: "2023-07-31"
DPKG_CONTAINER_NAME: "rippled-dpkg-builder"
DPKG_CONTAINER_FULLNAME: "${DPKG_CONTAINER_NAME}:${DPKG_CONTAINER_TAG}"
ARTIFACTORY_HOST: "artifactory.ops.ripple.com"
ARTIFACTORY_HUB: "${ARTIFACTORY_HOST}:6555"
GIT_SIGN_PUBKEYS_URL: "https://gitlab.ops.ripple.com/xrpledger/rippled-packages/snippets/49/raw"
PUBLIC_REPO_ROOT: "https://repos.ripple.com/repos"
# also need to define this variable ONLY for the primary
# build/publish pipeline on the mainline repo:
# IS_PRIMARY_REPO = "true"
stages:
- build_packages
- sign_packages
- smoketest
- verify_sig
- tag_images
- push_to_test
- verify_from_test
- wait_approval_prod
- push_to_prod
- verify_from_prod
- get_final_hashes
- build_containers
.dind_template: &dind_param
before_script:
- . ./Builds/containers/gitlab-ci/docker_alpine_setup.sh
variables:
docker_driver: overlay2
DOCKER_TLS_CERTDIR: ""
image:
name: artifactory.ops.ripple.com/docker:latest
services:
# workaround for TLS issues - consider going back
# back to unversioned `dind` when issues are resolved
- name: artifactory.ops.ripple.com/docker:stable-dind
alias: docker
tags:
- 4xlarge
.only_primary_template: &only_primary
only:
refs:
- /^(master|release|develop)$/
variables:
- $IS_PRIMARY_REPO == "true"
.smoketest_local_template: &run_local_smoketest
tags:
- xlarge
script:
- . ./Builds/containers/gitlab-ci/smoketest.sh local
.smoketest_repo_template: &run_repo_smoketest
tags:
- xlarge
script:
- . ./Builds/containers/gitlab-ci/smoketest.sh repo
#########################################################################
## ##
## stage: build_packages ##
## ##
## build packages using containers from previous stage. ##
## ##
#########################################################################
rpm_build:
timeout: "1h 30m"
stage: build_packages
<<: *dind_param
artifacts:
paths:
- build/rpm/packages/
script:
- . ./Builds/containers/gitlab-ci/build_package.sh rpm
dpkg_build:
timeout: "1h 30m"
stage: build_packages
<<: *dind_param
artifacts:
paths:
- build/dpkg/packages/
script:
- . ./Builds/containers/gitlab-ci/build_package.sh dpkg
#########################################################################
## ##
## stage: sign_packages ##
## ##
## build packages using containers from previous stage. ##
## ##
#########################################################################
rpm_sign:
stage: sign_packages
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/centos:7
<<: *only_primary
before_script:
- |
# Make sure GnuPG is installed
yum -y install gnupg rpm-sign
# checking GPG signing support
if [ -n "$GPG_KEY_B64" ]; then
echo "$GPG_KEY_B64"| base64 -d | gpg --batch --no-tty --allow-secret-key-import --import -
unset GPG_KEY_B64
export GPG_PASSPHRASE=$(echo $GPG_KEY_PASS_B64 | base64 -di)
unset GPG_KEY_PASS_B64
export GPG_KEYID=$(gpg --with-colon --list-secret-keys | head -n1 | cut -d : -f 5)
else
echo -e "\033[0;31m****** GPG signing disabled ******\033[0m"
exit 1
fi
artifacts:
paths:
- build/rpm/packages/
script:
- ls -alh build/rpm/packages
- . ./Builds/containers/gitlab-ci/sign_package.sh rpm
dpkg_sign:
stage: sign_packages
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
<<: *only_primary
before_script:
- |
# make sure we have GnuPG
apt update
apt install -y gpg dpkg-sig
# checking GPG signing support
if [ -n "$GPG_KEY_B64" ]; then
echo "$GPG_KEY_B64"| base64 -d | gpg --batch --no-tty --allow-secret-key-import --import -
unset GPG_KEY_B64
export GPG_PASSPHRASE=$(echo $GPG_KEY_PASS_B64 | base64 -di)
unset GPG_KEY_PASS_B64
export GPG_KEYID=$(gpg --with-colon --list-secret-keys | head -n1 | cut -d : -f 5)
else
echo -e "\033[0;31m****** GPG signing disabled ******\033[0m"
exit 1
fi
artifacts:
paths:
- build/dpkg/packages/
script:
- ls -alh build/dpkg/packages
- . ./Builds/containers/gitlab-ci/sign_package.sh dpkg
#########################################################################
## ##
## stage: smoketest ##
## ##
## install unsigned packages from previous step and run unit tests. ##
## ##
#########################################################################
centos_7_smoketest:
stage: smoketest
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/centos:7
<<: *run_local_smoketest
rocky_8_smoketest:
stage: smoketest
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/rockylinux/rockylinux:8
<<: *run_local_smoketest
rocky_9_smoketest:
stage: smoketest
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/rockylinux/rockylinux:9
<<: *run_local_smoketest
alma_8_smoketest:
stage: smoketest
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/almalinux:8
<<: *run_local_smoketest
alma_9_smoketest:
stage: smoketest
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/almalinux:9
<<: *run_local_smoketest
fedora_38_smoketest:
stage: smoketest
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/fedora:38
<<: *run_local_smoketest
fedora_39_smoketest:
stage: smoketest
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/fedora:39
<<: *run_local_smoketest
ubuntu_18_smoketest:
stage: smoketest
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
<<: *run_local_smoketest
ubuntu_20_smoketest:
stage: smoketest
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/ubuntu:20.04
<<: *run_local_smoketest
ubuntu_22_smoketest:
stage: smoketest
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/ubuntu:22.04
<<: *run_local_smoketest
debian_10_smoketest:
stage: smoketest
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/debian:10
<<: *run_local_smoketest
debian_11_smoketest:
stage: smoketest
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/debian:11
<<: *run_local_smoketest
debian_12_smoketest:
stage: smoketest
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/debian:12
<<: *run_local_smoketest
#########################################################################
## ##
## stage: verify_sig ##
## ##
## use git/gpg to verify that HEAD is signed by an approved ##
## committer. The whitelist of pubkeys is manually mantained ##
## and fetched from GIT_SIGN_PUBKEYS_URL (currently a snippet ##
## link). ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
verify_head_signed:
stage: verify_sig
image:
name: artifactory.ops.ripple.com/ubuntu:latest
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/verify_head_commit.sh
#########################################################################
## ##
## stage: tag_images ##
## ##
## apply rippled version tag to containers from previous stage. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
tag_bld_images:
stage: tag_images
variables:
docker_driver: overlay2
DOCKER_TLS_CERTDIR: ""
image:
name: artifactory.ops.ripple.com/docker:latest
services:
# workaround for TLS issues - consider going back
# back to unversioned `dind` when issues are resolved
- name: artifactory.ops.ripple.com/docker:stable-dind
alias: docker
tags:
- large
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/tag_docker_image.sh
#########################################################################
## ##
## stage: push_to_test ##
## ##
## push packages to artifactory repositories (test) ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
push_test:
stage: push_to_test
variables:
DEB_REPO: "rippled-deb-test-mirror"
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/alpine:latest
artifacts:
paths:
- files.info
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "PUT" "."
#########################################################################
## ##
## stage: verify_from_test ##
## ##
## install/test packages from test repos. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
centos_7_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/centos:7
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
rocky_8_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/rockylinux/rockylinux:8
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
rocky_9_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/rockylinux/rockylinux:9
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
almalinux_8_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/almalinux:8
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
almalinux_9_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/almalinux:9
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
fedora_38_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/fedora:38
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
fedora_39_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/fedora:39
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_18_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "bionic"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_20_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "focal"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/ubuntu:20.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_22_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "jammy"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/ubuntu:22.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_10_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "buster"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/debian:10
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_11_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "bullseye"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/debian:11
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_12_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "bookworm"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/debian:12
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
#########################################################################
## ##
## stage: wait_approval_prod ##
## ##
## wait for manual approval before proceeding to next stage ##
## which pushes to prod repo. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
wait_before_push_prod:
stage: wait_approval_prod
image:
name: artifactory.ops.ripple.com/alpine:latest
<<: *only_primary
script:
- echo "proceeding to next stage"
when: manual
allow_failure: false
#########################################################################
## ##
## stage: push_to_prod ##
## ##
## push packages to artifactory repositories (prod) ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
push_prod:
variables:
DEB_REPO: "rippled-deb"
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/alpine:latest
stage: push_to_prod
artifacts:
paths:
- files.info
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "PUT" "."
#########################################################################
## ##
## stage: verify_from_prod ##
## ##
## install/test packages from prod repos. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
centos_7_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/centos:7
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
rocky_8_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/rockylinux/rockylinux:8
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
rocky_9_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/rockylinux/rockylinux:9
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
alma_8_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/almalinux:8
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
alma_9_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/almalinux:9
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
fedora_37_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/fedora:37
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
fedora_38_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/fedora:38
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_18_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "bionic"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_20_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "focal"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/ubuntu:20.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_22_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "jammy"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/ubuntu:22.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_10_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "buster"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/debian:10
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_11_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "bullseye"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/debian:11
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_12_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "bookworm"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/debian:12
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
#########################################################################
## ##
## stage: get_final_hashes ##
## ##
## fetch final hashes from artifactory. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
get_prod_hashes:
variables:
DEB_REPO: "rippled-deb"
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/alpine:latest
stage: get_final_hashes
artifacts:
paths:
- files.info
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "GET" ".checksums"
#########################################################################
## ##
## stage: build_containers ##
## ##
## build containers from docker definitions. These containers are NOT ##
## used for the package build. This step is only used to ensure that ##
## the package build targets and files are still working properly. ##
## ##
#########################################################################
build_centos_container:
stage: build_containers
<<: *dind_param
script:
- . ./Builds/containers/gitlab-ci/build_container.sh rpm
build_ubuntu_container:
stage: build_containers
<<: *dind_param
script:
- . ./Builds/containers/gitlab-ci/build_container.sh dpkg

View File

@@ -1,92 +0,0 @@
#!/usr/bin/env sh
set -e
action=$1
filter=$2
. ./Builds/containers/gitlab-ci/get_component.sh
apk add curl jq coreutils util-linux
TOPDIR=$(pwd)
# DPKG
cd $TOPDIR
cd build/dpkg/packages
CURLARGS="-sk -X${action} -urippled:${ARTIFACTORY_DEPLOY_KEY_RIPPLED}"
RIPPLED_PKG=$(ls rippled_*.deb)
RIPPLED_REPORTING_PKG=$(ls rippled-reporting_*.deb)
RIPPLED_DBG_PKG=$(ls rippled-dbgsym_*.*deb)
RIPPLED_REPORTING_DBG_PKG=$(ls rippled-reporting-dbgsym_*.*deb)
# TODO - where to upload src tgz?
RIPPLED_SRC=$(ls rippled_*.orig.tar.gz)
DEB_MATRIX=";deb.component=${COMPONENT};deb.architecture=amd64"
for dist in bookworm buster bullseye bionic focal jammy; do
DEB_MATRIX="${DEB_MATRIX};deb.distribution=${dist}"
done
echo "{ \"debs\": {" > "${TOPDIR}/files.info"
for deb in ${RIPPLED_PKG} ${RIPPLED_DBG_PKG} ${RIPPLED_REPORTING_PKG} ${RIPPLED_REPORTING_DBG_PKG}; do
# first item doesn't get a comma separator
if [ $deb != $RIPPLED_PKG ] ; then
echo "," >> "${TOPDIR}/files.info"
fi
echo "\"${deb}\"": | tee -a "${TOPDIR}/files.info"
ca="${CURLARGS}"
if [ "${action}" = "PUT" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/${DEB_REPO}/pool/${COMPONENT}/${deb}${DEB_MATRIX}"
ca="${ca} -T${deb}"
elif [ "${action}" = "GET" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/api/storage/${DEB_REPO}/pool/${COMPONENT}/${deb}"
fi
echo "file info request url --> ${url}"
eval "curl ${ca} \"${url}\"" | jq -M "${filter}" | tee -a "${TOPDIR}/files.info"
done
echo "}," >> "${TOPDIR}/files.info"
# RPM
cd $TOPDIR
cd build/rpm/packages
RIPPLED_PKG=$(ls rippled-[0-9]*.x86_64.rpm)
RIPPLED_DEV_PKG=$(ls rippled-devel*.rpm)
RIPPLED_DBG_PKG=$(ls rippled-debuginfo*.rpm)
RIPPLED_REPORTING_PKG=$(ls rippled-reporting*.rpm)
# TODO - where to upload src rpm ?
RIPPLED_SRC=$(ls rippled-[0-9]*.src.rpm)
echo "\"rpms\": {" >> "${TOPDIR}/files.info"
for rpm in ${RIPPLED_PKG} ${RIPPLED_DEV_PKG} ${RIPPLED_DBG_PKG} ${RIPPLED_REPORTING_PKG}; do
# first item doesn't get a comma separator
if [ $rpm != $RIPPLED_PKG ] ; then
echo "," >> "${TOPDIR}/files.info"
fi
echo "\"${rpm}\"": | tee -a "${TOPDIR}/files.info"
ca="${CURLARGS}"
if [ "${action}" = "PUT" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/${RPM_REPO}/${COMPONENT}/"
ca="${ca} -T${rpm}"
elif [ "${action}" = "GET" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/api/storage/${RPM_REPO}/${COMPONENT}/${rpm}"
fi
echo "file info request url --> ${url}"
eval "curl ${ca} \"${url}\"" | jq -M "${filter}" | tee -a "${TOPDIR}/files.info"
done
echo "}}" >> "${TOPDIR}/files.info"
jq '.' "${TOPDIR}/files.info" > "${TOPDIR}/files.info.tmp"
mv "${TOPDIR}/files.info.tmp" "${TOPDIR}/files.info"
if [ ! -z "${SLACK_NOTIFY_URL}" ] && [ "${action}" = "GET" ] ; then
# extract files.info content to variable and sanitize so it can
# be interpolated into a slack text field below
finfo=$(cat ${TOPDIR}/files.info | sed -e ':a' -e 'N' -e '$!ba' -e 's/\n/\\n/g' | sed -E 's/"/\\"/g')
# try posting file info to slack.
# can add channel field to payload if the
# default channel is incorrect. Get rid of
# newlines in payload json since slack doesn't accept them
CONTENT=$(tr -d '[\n]' <<JSON
payload={
"username": "GitlabCI",
"text": "The package build for branch \`${CI_COMMIT_REF_NAME}\` is complete. File hashes are: \`\`\`${finfo}\`\`\`",
"icon_emoji": ":package:"}
JSON
)
curl ${SLACK_NOTIFY_URL} --data-urlencode "${CONTENT}"
fi

View File

@@ -1,38 +0,0 @@
#!/usr/bin/env bash
set -eo pipefail
sign_dpkg() {
if [ -n "${GPG_KEYID}" ]; then
dpkg-sig \
-g "--no-tty --digest-algo 'sha512' --passphrase '${GPG_PASSPHRASE}' --pinentry-mode=loopback" \
-k "${GPG_KEYID}" \
--sign builder \
"build/dpkg/packages/*.deb"
fi
}
sign_rpm() {
if [ -n "${GPG_KEYID}" ] ; then
find build/rpm/packages -name "*.rpm" -exec bash -c '
echo "yes" | setsid rpm \
--define "_gpg_name ${GPG_KEYID}" \
--define "_signature gpg" \
--define "__gpg_check_password_cmd /bin/true" \
--define "__gpg_sign_cmd %{__gpg} gpg --batch --no-armor --digest-algo 'sha512' --passphrase '${GPG_PASSPHRASE}' --no-secmem-warning -u '%{_gpg_name}' --sign --detach-sign --output %{__signature_filename} %{__plaintext_filename}" \
--addsign '{} \;
fi
}
case "${1}" in
dpkg)
sign_dpkg
;;
rpm)
sign_rpm
;;
*)
echo "Usage: ${0} (dpkg|rpm)"
;;
esac

View File

@@ -1,101 +0,0 @@
#!/usr/bin/env sh
set -e
install_from=$1
use_private=${2:-0} # this option not currently needed by any CI scripts,
# reserved for possible future use
if [ "$use_private" -gt 0 ] ; then
REPO_ROOT="https://rippled:${ARTIFACTORY_DEPLOY_KEY_RIPPLED}@${ARTIFACTORY_HOST}/artifactory"
else
REPO_ROOT="${PUBLIC_REPO_ROOT}"
fi
. ./Builds/containers/gitlab-ci/get_component.sh
. /etc/os-release
case ${ID} in
ubuntu|debian)
pkgtype="dpkg"
;;
fedora|centos|rhel|scientific|rocky|almalinux)
pkgtype="rpm"
;;
*)
echo "unrecognized distro!"
exit 1
;;
esac
# this script provides info variables about pkg version
. build/${pkgtype}/packages/build_vars
if [ "${pkgtype}" = "dpkg" ] ; then
# sometimes update fails and requires a cleanup
updateWithRetry()
{
if ! apt-get -y update ; then
rm -rvf /var/lib/apt/lists/*
apt-get -y clean
apt-get -y update
fi
}
if [ "${install_from}" = "repo" ] ; then
apt-get -y upgrade
updateWithRetry
apt-get -y install apt apt-transport-https ca-certificates coreutils util-linux wget gnupg
wget -q -O - "${REPO_ROOT}/api/gpg/key/public" | apt-key add -
echo "deb ${REPO_ROOT}/${DEB_REPO} ${DISTRO} ${COMPONENT}" >> /etc/apt/sources.list
updateWithRetry
# uncomment this next line if you want to see the available package versions
# apt-cache policy rippled
apt-get -y install rippled=${dpkg_full_version}
elif [ "${install_from}" = "local" ] ; then
# cached pkg install
updateWithRetry
apt-get -y install libprotobuf-dev libprotoc-dev protobuf-compiler libssl-dev
rm -f build/dpkg/packages/rippled-dbgsym*.*
dpkg --no-debsig -i build/dpkg/packages/*.deb
else
echo "unrecognized pkg source!"
exit 1
fi
else
yum -y update
if [ "${install_from}" = "repo" ] ; then
pkgs=("yum-utils coreutils util-linux")
case "$ID" in
rocky|almalinux)
pkgs="${pkgs[@]/coreutils}"
esac
yum install -y $pkgs
REPOFILE="/etc/yum.repos.d/artifactory.repo"
echo "[Artifactory]" > ${REPOFILE}
echo "name=Artifactory" >> ${REPOFILE}
echo "baseurl=${REPO_ROOT}/${RPM_REPO}/${COMPONENT}/" >> ${REPOFILE}
echo "enabled=1" >> ${REPOFILE}
echo "gpgcheck=0" >> ${REPOFILE}
echo "gpgkey=${REPO_ROOT}/${RPM_REPO}/${COMPONENT}/repodata/repomd.xml.key" >> ${REPOFILE}
echo "repo_gpgcheck=1" >> ${REPOFILE}
yum -y update
# uncomment this next line if you want to see the available package versions
# yum --showduplicates list rippled
yum -y install ${rpm_version_release}
elif [ "${install_from}" = "local" ] ; then
rm -f build/rpm/packages/rippled-debug*.rpm
rm -f build/rpm/packages/rippled-devel*.rpm
rm -f build/rpm/packages/*.src.rpm
rpm -i build/rpm/packages/*.rpm
else
echo "unrecognized pkg source!"
exit 1
fi
fi
# verify installed version
INSTALLED=$(/opt/ripple/bin/rippled --version | awk '{print $NF}')
if [ "${rippled_version}" != "${INSTALLED}" ] ; then
echo "INSTALLED version ${INSTALLED} does not match ${rippled_version}"
exit 1
fi
# run unit tests
/opt/ripple/bin/rippled --unittest --unittest-jobs $(nproc)
/opt/ripple/bin/validator-keys --unittest

View File

@@ -1,21 +0,0 @@
#!/usr/bin/env sh
set -e
docker login -u rippled \
-p ${ARTIFACTORY_DEPLOY_KEY_RIPPLED} "${ARTIFACTORY_HUB}"
# this gives us rippled_version :
source build/rpm/packages/build_vars
docker pull "${ARTIFACTORY_HUB}/${RPM_CONTAINER_FULLNAME}"
docker pull "${ARTIFACTORY_HUB}/${DPKG_CONTAINER_FULLNAME}"
# tag/push two labels...one using the current rippled version and one just using "latest"
for label in ${rippled_version} latest ; do
docker tag \
"${ARTIFACTORY_HUB}/${RPM_CONTAINER_FULLNAME}" \
"${ARTIFACTORY_HUB}/${RPM_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
docker push \
"${ARTIFACTORY_HUB}/${RPM_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
docker tag \
"${ARTIFACTORY_HUB}/${DPKG_CONTAINER_FULLNAME}" \
"${ARTIFACTORY_HUB}/${DPKG_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
docker push \
"${ARTIFACTORY_HUB}/${DPKG_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
done

View File

@@ -1,17 +0,0 @@
#!/usr/bin/env sh
set -ex
apt -y update
DEBIAN_FRONTEND="noninteractive" apt-get -y install tzdata
apt -y install software-properties-common curl git gnupg
curl -sk -o rippled-pubkeys.txt "${GIT_SIGN_PUBKEYS_URL}"
gpg --import rippled-pubkeys.txt
if git verify-commit HEAD; then
echo "git commit signature check passed"
else
echo "git commit signature check failed"
git log -n 5 --color \
--pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an> [%G?]%Creset' \
--abbrev-commit
exit 1
fi

View File

@@ -1,99 +0,0 @@
#!/usr/bin/env bash
set -ex
# make sure pkg source files are up to date with repo
cd /opt/rippled_bld/pkg
cp -fpru rippled/Builds/containers/packaging/dpkg/debian/. debian/
cp -fpu rippled/Builds/containers/shared/rippled*.service debian/
cp -fpu rippled/Builds/containers/shared/update_sources.sh .
source update_sources.sh
# Build the dpkg
#dpkg uses - as separator, so we need to change our -bN versions to tilde
RIPPLED_DPKG_VERSION=$(echo "${RIPPLED_VERSION}" | sed 's!-!~!g')
# TODO - decide how to handle the trailing/release
# version here (hardcoded to 1). Does it ever need to change?
RIPPLED_DPKG_FULL_VERSION="${RIPPLED_DPKG_VERSION}-1"
git config --global --add safe.directory /opt/rippled_bld/pkg/rippled
cd /opt/rippled_bld/pkg/rippled
if [[ -n $(git status --porcelain) ]]; then
git status
error "Unstaged changes in this repo - please commit first"
fi
git archive --format tar.gz --prefix rippled-${RIPPLED_DPKG_VERSION}/ -o ../rippled-${RIPPLED_DPKG_VERSION}.tar.gz HEAD
cd ..
# dpkg debmake would normally create this link, but we do it manually
ln -s ./rippled-${RIPPLED_DPKG_VERSION}.tar.gz rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz
tar xvf rippled-${RIPPLED_DPKG_VERSION}.tar.gz
cd rippled-${RIPPLED_DPKG_VERSION}
cp -pr ../debian .
# dpkg requires a changelog. We don't currently maintain
# a useable one, so let's just fake it with our current version
# TODO : not sure if the "unstable" will need to change for
# release packages (?)
NOWSTR=$(TZ=UTC date -R)
cat << CHANGELOG > ./debian/changelog
rippled (${RIPPLED_DPKG_FULL_VERSION}) unstable; urgency=low
* see RELEASENOTES
-- Ripple Labs Inc. <support@ripple.com> ${NOWSTR}
CHANGELOG
# PATH must be preserved for our more modern cmake in /opt/local
# TODO : consider allowing lintian to run in future ?
export DH_BUILD_DDEBS=1
debuild --no-lintian --preserve-envvar PATH --preserve-env -us -uc
rc=$?; if [[ $rc != 0 ]]; then
error "error building dpkg"
fi
cd ..
# copy artifacts
cp rippled-reporting_${RIPPLED_DPKG_FULL_VERSION}_amd64.deb ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.deb ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}.dsc ${PKG_OUTDIR}
# dbgsym suffix is ddeb under newer debuild, but just deb under earlier
cp rippled-dbgsym_${RIPPLED_DPKG_FULL_VERSION}_amd64.* ${PKG_OUTDIR}
cp rippled-reporting-dbgsym_${RIPPLED_DPKG_FULL_VERSION}_amd64.* ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.changes ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.build ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}.debian.tar.xz ${PKG_OUTDIR}
# buildinfo is only generated by later version of debuild
if [ -e rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.buildinfo ] ; then
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.buildinfo ${PKG_OUTDIR}
fi
pushd ${PKG_OUTDIR}
for f in *.ddeb; do mv -- "$f" "${f%.ddeb}.deb"; done
popd
cat rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.changes
# extract the text in the .changes file that appears between
# Checksums-Sha256: ...
# and
# Files: ...
awk '/Checksums-Sha256:/{hit=1;next}/Files:/{hit=0}hit' \
rippled_${RIPPLED_DPKG_VERSION}-1_amd64.changes | \
sed -E 's!^[[:space:]]+!!' > shasums
DEB_SHA256=$(cat shasums | \
grep "rippled_${RIPPLED_DPKG_VERSION}-1_amd64.deb" | cut -d " " -f 1)
DBG_SHA256=$(cat shasums | \
grep "rippled-dbgsym_${RIPPLED_DPKG_VERSION}-1_amd64.*" | cut -d " " -f 1)
REPORTING_DBG_SHA256=$(cat shasums | \
grep "rippled-reporting-dbgsym_${RIPPLED_DPKG_VERSION}-1_amd64.*" | cut -d " " -f 1)
REPORTING_SHA256=$(cat shasums | \
grep "rippled-reporting_${RIPPLED_DPKG_VERSION}-1_amd64.deb" | cut -d " " -f 1)
SRC_SHA256=$(cat shasums | \
grep "rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz" | cut -d " " -f 1)
echo "deb_sha256=${DEB_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "dbg_sha256=${DBG_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "reporting_sha256=${REPORTING_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "reporting_dbg_sha256=${REPORTING_DBG_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "src_sha256=${SRC_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "rippled_version=${RIPPLED_VERSION}" >> ${PKG_OUTDIR}/build_vars
echo "dpkg_version=${RIPPLED_DPKG_VERSION}" >> ${PKG_OUTDIR}/build_vars
echo "dpkg_full_version=${RIPPLED_DPKG_FULL_VERSION}" >> ${PKG_OUTDIR}/build_vars

View File

@@ -1,3 +0,0 @@
rippled daemon
-- Mike Ellery <mellery451@gmail.com> Tue, 04 Dec 2018 18:19:03 +0000

View File

@@ -1 +0,0 @@
10

View File

@@ -1,19 +0,0 @@
Source: rippled
Section: misc
Priority: extra
Maintainer: Ripple Labs Inc. <support@ripple.com>
Build-Depends: cmake, debhelper (>=9), zlib1g-dev, dh-systemd, ninja-build
Standards-Version: 3.9.7
Homepage: http://ripple.com/
Package: rippled
Architecture: any
Multi-Arch: foreign
Depends: ${misc:Depends}, ${shlibs:Depends}
Description: rippled daemon
Package: rippled-reporting
Architecture: any
Multi-Arch: foreign
Depends: ${misc:Depends}, ${shlibs:Depends}
Description: rippled reporting daemon

View File

@@ -1,86 +0,0 @@
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: rippled
Source: https://github.com/ripple/rippled
Files: *
Copyright: 2012-2019 Ripple Labs Inc.
License: __UNKNOWN__
The accompanying files under various copyrights.
Copyright (c) 2012, 2013, 2014 Ripple Labs Inc.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
The accompanying files incorporate work covered by the following copyright
and previous license notice:
Copyright (c) 2011 Arthur Britto, David Schwartz, Jed McCaleb,
Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant
Some code from Raw Material Software, Ltd., provided under the terms of the
ISC License. See the corresponding source files for more details.
Copyright (c) 2013 - Raw Material Software Ltd.
Please visit http://www.juce.com
Some code from ASIO examples:
// Copyright (c) 2003-2011 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
Some code from Bitcoin:
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2011 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file license.txt or http://www.opensource.org/licenses/mit-license.php.
Some code from Tom Wu:
This software is covered under the following copyright:
/*
* Copyright (c) 2003-2005 Tom Wu
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
* EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
* WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
*
* IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
* INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
* RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
* THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* In addition, the following condition applies:
*
* All redistributions must retain an intact copy of this copyright notice
* and disclaimer.
*/
Address all questions regarding this license to:
Tom Wu
tjw@cs.Stanford.EDU

View File

@@ -1,3 +0,0 @@
/var/log/rippled/
/var/lib/rippled/
/etc/systemd/system/rippled.service.d/

View File

@@ -1,3 +0,0 @@
README.md
LICENSE.md
RELEASENOTES.md

View File

@@ -1,3 +0,0 @@
opt/ripple/include
opt/ripple/lib/*.a
opt/ripple/lib/cmake/ripple

View File

@@ -1,3 +0,0 @@
/var/log/rippled-reporting/
/var/lib/rippled-reporting/
/etc/systemd/system/rippled-reporting.service.d/

View File

@@ -1,8 +0,0 @@
build.rippled-reporting/rippled-reporting opt/rippled-reporting/bin
cfg/rippled-reporting.cfg opt/rippled-reporting/etc
debian/tmp/opt/rippled-reporting/etc/validators.txt opt/rippled-reporting/etc
opt/rippled-reporting/bin/update-rippled-reporting.sh
opt/rippled-reporting/bin/getRippledReportingInfo
opt/rippled-reporting/etc/update-rippled-reporting-cron
etc/logrotate.d/rippled-reporting

View File

@@ -1,3 +0,0 @@
opt/rippled-reporting/etc/rippled-reporting.cfg etc/opt/rippled-reporting/rippled-reporting.cfg
opt/rippled-reporting/etc/validators.txt etc/opt/rippled-reporting/validators.txt
opt/rippled-reporting/bin/rippled-reporting usr/local/bin/rippled-reporting

View File

@@ -1,33 +0,0 @@
#!/bin/sh
set -e
USER_NAME=rippled-reporting
GROUP_NAME=rippled-reporting
case "$1" in
configure)
id -u $USER_NAME >/dev/null 2>&1 || \
adduser --system --quiet \
--home /nonexistent --no-create-home \
--disabled-password \
--group "$GROUP_NAME"
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled-reporting/
chmod 755 /var/log/rippled-reporting/
chmod 755 /var/lib/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME /opt/rippled-reporting
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -1,2 +0,0 @@
/opt/ripple/etc/rippled.cfg
/opt/ripple/etc/validators.txt

View File

@@ -1,8 +0,0 @@
opt/ripple/bin/rippled
opt/ripple/bin/validator-keys
opt/ripple/bin/update-rippled.sh
opt/ripple/bin/getRippledInfo
opt/ripple/etc/rippled.cfg
opt/ripple/etc/validators.txt
opt/ripple/etc/update-rippled-cron
etc/logrotate.d/rippled

View File

@@ -1,3 +0,0 @@
opt/ripple/etc/rippled.cfg etc/opt/ripple/rippled.cfg
opt/ripple/etc/validators.txt etc/opt/ripple/validators.txt
opt/ripple/bin/rippled usr/local/bin/rippled

View File

@@ -1,35 +0,0 @@
#!/bin/sh
set -e
USER_NAME=rippled
GROUP_NAME=rippled
case "$1" in
configure)
id -u $USER_NAME >/dev/null 2>&1 || \
adduser --system --quiet \
--home /nonexistent --no-create-home \
--disabled-password \
--group "$GROUP_NAME"
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
chown -R $USER_NAME:$GROUP_NAME /opt/ripple
chmod 755 /var/log/rippled/
chmod 755 /var/lib/rippled/
chmod 644 /opt/ripple/etc/update-rippled-cron
chmod 644 /etc/logrotate.d/rippled
chown -R root:$GROUP_NAME /opt/ripple/etc/update-rippled-cron
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -1,17 +0,0 @@
#!/bin/sh
set -e
case "$1" in
purge|remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
;;
*)
echo "postrm called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -1,20 +0,0 @@
#!/bin/sh
set -e
case "$1" in
install|upgrade)
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -1,20 +0,0 @@
#!/bin/sh
set -e
case "$1" in
remove|upgrade|deconfigure)
;;
failed-upgrade)
;;
*)
echo "prerm called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -1,81 +0,0 @@
#!/usr/bin/make -f
export DH_VERBOSE = 1
export DH_OPTIONS = -v
# debuild sets some warnings that don't work well
# for our curent build..so try to remove those flags here:
export CFLAGS:=$(subst -Wformat,,$(CFLAGS))
export CFLAGS:=$(subst -Werror=format-security,,$(CFLAGS))
export CXXFLAGS:=$(subst -Wformat,,$(CXXFLAGS))
export CXXFLAGS:=$(subst -Werror=format-security,,$(CXXFLAGS))
%:
dh $@ --with systemd
override_dh_systemd_start:
dh_systemd_start --no-restart-on-upgrade
override_dh_auto_configure:
/root/.pyenv/shims/conan export external/snappy snappy/1.1.10@
/root/.pyenv/shims/conan export external/soci soci/4.0.3@
mkdir build.rippled
cd build.rippled && \
/root/.pyenv/shims/conan install .. \
--profile gcc \
--output-folder . \
--build missing \
--settings build_type=Release
cd build.rippled && \
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-Dvalidator_keys=ON \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake
mkdir build.rippled-reporting
cd build.rippled-reporting && \
/root/.pyenv/shims/conan install .. \
--profile gcc \
--output-folder . \
--settings compiler.cppstd=17 \
--settings build_type=Release \
--build missing \
--build boost \
--build sqlite3 \
--build libuv \
--options reporting=True
cd build.rippled-reporting && \
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-Dvalidator_keys=ON \
-Dstatic=ON \
-Dunity=OFF \
-Dreporting=ON \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake
override_dh_auto_build:
cmake --build build.rippled --target rippled --target validator-keys --parallel 8
cmake --build build.rippled-reporting --target rippled --parallel 8
override_dh_auto_install:
cmake --install build.rippled --prefix debian/tmp/opt/ripple
install -D build.rippled/validator-keys/validator-keys debian/tmp/opt/ripple/bin/validator-keys
install -D Builds/containers/shared/update-rippled.sh debian/tmp/opt/ripple/bin/update-rippled.sh
install -D bin/getRippledInfo debian/tmp/opt/ripple/bin/getRippledInfo
install -D Builds/containers/shared/update-rippled-cron debian/tmp/opt/ripple/etc/update-rippled-cron
install -D Builds/containers/shared/rippled-logrotate debian/tmp/etc/logrotate.d/rippled
rm -rf debian/tmp/opt/ripple/lib64/cmake/date
mkdir -p debian/tmp/opt/rippled-reporting/etc
mkdir -p debian/tmp/opt/rippled-reporting/bin
cp cfg/validators-example.txt debian/tmp/opt/rippled-reporting/etc/validators.txt
sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/update-rippled.sh > debian/tmp/opt/rippled-reporting/bin/update-rippled-reporting.sh
sed -E 's/rippled?/rippled-reporting/g' bin/getRippledInfo > debian/tmp/opt/rippled-reporting/bin/getRippledReportingInfo
sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/update-rippled-cron > debian/tmp/opt/rippled-reporting/etc/update-rippled-reporting-cron
sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/rippled-logrotate > debian/tmp/etc/logrotate.d/rippled-reporting

View File

@@ -1 +0,0 @@
3.0 (quilt)

View File

@@ -1,2 +0,0 @@
#abort-on-upstream-changes
#unapply-patches

View File

@@ -1 +0,0 @@
enable rippled-reporting.service

View File

@@ -1 +0,0 @@
enable rippled.service

View File

@@ -1,82 +0,0 @@
#!/usr/bin/env bash
set -ex
cd /opt/rippled_bld/pkg
cp -fpu rippled/Builds/containers/packaging/rpm/rippled.spec .
cp -fpu rippled/Builds/containers/shared/update_sources.sh .
source update_sources.sh
# Build the rpm
IFS='-' read -r RIPPLED_RPM_VERSION RELEASE <<< "$RIPPLED_VERSION"
export RIPPLED_RPM_VERSION
RPM_RELEASE=${RPM_RELEASE-1}
# post-release version
if [ "hf" = "$(echo "$RELEASE" | cut -c -2)" ]; then
RPM_RELEASE="${RPM_RELEASE}.${RELEASE}"
# pre-release version (-b or -rc)
elif [[ $RELEASE ]]; then
RPM_RELEASE="0.${RPM_RELEASE}.${RELEASE}"
fi
export RPM_RELEASE
if [[ $RPM_PATCH ]]; then
RPM_PATCH=".${RPM_PATCH}"
export RPM_PATCH
fi
cd /opt/rippled_bld/pkg/rippled
if [[ -n $(git status --porcelain) ]]; then
git status
error "Unstaged changes in this repo - please commit first"
fi
git archive --format tar.gz --prefix rippled/ -o ../rpmbuild/SOURCES/rippled.tar.gz HEAD
cd ..
source /opt/rh/devtoolset-11/enable
rpmbuild --define "_topdir ${PWD}/rpmbuild" -ba rippled.spec
rc=$?; if [[ $rc != 0 ]]; then
error "error building rpm"
fi
# Make a tar of the rpm and source rpm
RPM_VERSION_RELEASE=$(rpm -qp --qf='%{NAME}-%{VERSION}-%{RELEASE}' ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm)
tar_file=$RPM_VERSION_RELEASE.tar.gz
cp ./rpmbuild/RPMS/x86_64/* ${PKG_OUTDIR}
cp ./rpmbuild/SRPMS/* ${PKG_OUTDIR}
RPM_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm 2>/dev/null)
DBG_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-debuginfo*.rpm 2>/dev/null)
DEV_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-devel*.rpm 2>/dev/null)
REP_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-reporting*.rpm 2>/dev/null)
SRC_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/SRPMS/*.rpm 2>/dev/null)
RPM_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm | awk '{ print $1}')"
DBG_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-debuginfo*.rpm | awk '{ print $1}')"
REP_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-reporting*.rpm | awk '{ print $1}')"
DEV_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-devel*.rpm | awk '{ print $1}')"
SRC_SHA256="$(sha256sum ./rpmbuild/SRPMS/*.rpm | awk '{ print $1}')"
echo "rpm_md5sum=$RPM_MD5SUM" > ${PKG_OUTDIR}/build_vars
echo "rep_md5sum=$REP_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "dbg_md5sum=$DBG_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "dev_md5sum=$DEV_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "src_md5sum=$SRC_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "rpm_sha256=$RPM_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "rep_sha256=$REP_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "dbg_sha256=$DBG_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "dev_sha256=$DEV_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "src_sha256=$SRC_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "rippled_version=$RIPPLED_VERSION" >> ${PKG_OUTDIR}/build_vars
echo "rpm_version=$RIPPLED_RPM_VERSION" >> ${PKG_OUTDIR}/build_vars
echo "rpm_file_name=$tar_file" >> ${PKG_OUTDIR}/build_vars
echo "rpm_version_release=$RPM_VERSION_RELEASE" >> ${PKG_OUTDIR}/build_vars

View File

@@ -1,239 +0,0 @@
%define rippled_version %(echo $RIPPLED_RPM_VERSION)
%define rpm_release %(echo $RPM_RELEASE)
%define rpm_patch %(echo $RPM_PATCH)
%define _prefix /opt/ripple
Name: rippled
# Dashes in Version extensions must be converted to underscores
Version: %{rippled_version}
Release: %{rpm_release}%{?dist}%{rpm_patch}
Summary: rippled daemon
License: MIT
URL: http://ripple.com/
Source0: rippled.tar.gz
BuildRequires: cmake zlib-static ninja-build
%description
rippled
%package devel
Summary: Files for development of applications using xrpl core library
Group: Development/Libraries
Requires: zlib-static
%description devel
core library for development of standalone applications that sign transactions.
%package reporting
Summary: Reporting Server for rippled
%description reporting
History server for XRP Ledger
%prep
%setup -c -n rippled
%build
source /opt/rh/devtoolset-11/enable
source /opt/rh/rh-python38/enable
pip install "conan<2"
conan profile new default --detect
conan profile update settings.compiler.cppstd=20 default
conan profile update settings.compiler.libcxx=libstdc++11 default
cd rippled
conan export external/snappy snappy/1.1.10@
conan export external/soci soci/4.0.3@
mkdir -p bld.rippled
pushd bld.rippled
cp /opt/libcstd/libstdc++.so.6.0.22 /usr/lib64
cp /opt/libcstd/libstdc++.so.6.0.22 /lib64
ln -sf /usr/lib64/libstdc++.so.6.0.22 /usr/lib64/libstdc++.so.6
ln -sf /lib64/libstdc++.so.6.0.22 /usr/lib64/libstdc++.so.6
conan install .. \
--profile default \
--output-folder . \
--build missing \
--settings build_type=Release
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-Dvalidator_keys=ON \
-DCMAKE_INSTALL_PREFIX=%{_prefix} \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake
cmake --build . --parallel $(nproc) --target rippled --target validator-keys
popd
mkdir -p bld.rippled-reporting
pushd bld.rippled-reporting
conan install .. \
--settings build_type=Release \
--output-folder . \
--build missing \
--settings compiler.cppstd=17 \
--options reporting=True
cmake .. \
-G Ninja \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_INSTALL_PREFIX=%{_prefix} \
-DCMAKE_BUILD_TYPE=Release \
-Dunity=OFF \
-Dstatic=ON \
-Dvalidator_keys=ON \
-Dreporting=ON \
-DCMAKE_VERBOSE_MAKEFILE=ON \
cmake --build . --parallel $(nproc) --target rippled
%pre
test -e /etc/pki/tls || { mkdir -p /etc/pki; ln -s /usr/lib/ssl /etc/pki/tls; }
%install
rm -rf $RPM_BUILD_ROOT
DESTDIR=$RPM_BUILD_ROOT cmake --build rippled/bld.rippled --target install #-- -v
mkdir -p $RPM_BUILD_ROOT
rm -rf ${RPM_BUILD_ROOT}%{_prefix}/lib64/
install -d ${RPM_BUILD_ROOT}/etc/opt/ripple
install -d ${RPM_BUILD_ROOT}/usr/local/bin
install -D ./rippled/cfg/rippled-example.cfg ${RPM_BUILD_ROOT}/%{_prefix}/etc/rippled.cfg
install -D ./rippled/cfg/validators-example.txt ${RPM_BUILD_ROOT}/%{_prefix}/etc/validators.txt
ln -sf %{_prefix}/etc/rippled.cfg ${RPM_BUILD_ROOT}/etc/opt/ripple/rippled.cfg
ln -sf %{_prefix}/etc/validators.txt ${RPM_BUILD_ROOT}/etc/opt/ripple/validators.txt
ln -sf %{_prefix}/bin/rippled ${RPM_BUILD_ROOT}/usr/local/bin/rippled
install -D rippled/bld.rippled/validator-keys/validator-keys ${RPM_BUILD_ROOT}%{_bindir}/validator-keys
install -D ./rippled/Builds/containers/shared/rippled.service ${RPM_BUILD_ROOT}/usr/lib/systemd/system/rippled.service
install -D ./rippled/Builds/containers/packaging/rpm/50-rippled.preset ${RPM_BUILD_ROOT}/usr/lib/systemd/system-preset/50-rippled.preset
install -D ./rippled/Builds/containers/shared/update-rippled.sh ${RPM_BUILD_ROOT}%{_bindir}/update-rippled.sh
install -D ./rippled/bin/getRippledInfo ${RPM_BUILD_ROOT}%{_bindir}/getRippledInfo
install -D ./rippled/Builds/containers/shared/update-rippled-cron ${RPM_BUILD_ROOT}%{_prefix}/etc/update-rippled-cron
install -D ./rippled/Builds/containers/shared/rippled-logrotate ${RPM_BUILD_ROOT}/etc/logrotate.d/rippled
install -d $RPM_BUILD_ROOT/var/log/rippled
install -d $RPM_BUILD_ROOT/var/lib/rippled
# reporting mode
%define _prefix /opt/rippled-reporting
mkdir -p ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/
install -D rippled/bld.rippled-reporting/rippled-reporting ${RPM_BUILD_ROOT}%{_bindir}/rippled-reporting
install -D ./rippled/cfg/rippled-reporting.cfg ${RPM_BUILD_ROOT}%{_prefix}/etc/rippled-reporting.cfg
install -D ./rippled/cfg/validators-example.txt ${RPM_BUILD_ROOT}%{_prefix}/etc/validators.txt
install -D ./rippled/Builds/containers/packaging/rpm/50-rippled-reporting.preset ${RPM_BUILD_ROOT}/usr/lib/systemd/system-preset/50-rippled-reporting.preset
ln -sf %{_prefix}/bin/rippled-reporting ${RPM_BUILD_ROOT}/usr/local/bin/rippled-reporting
ln -sf %{_prefix}/etc/rippled-reporting.cfg ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/rippled-reporting.cfg
ln -sf %{_prefix}/etc/validators.txt ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/validators.txt
install -d $RPM_BUILD_ROOT/var/log/rippled-reporting
install -d $RPM_BUILD_ROOT/var/lib/rippled-reporting
install -D ./rippled/Builds/containers/shared/rippled-reporting.service ${RPM_BUILD_ROOT}/usr/lib/systemd/system/rippled-reporting.service
sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/update-rippled.sh > ${RPM_BUILD_ROOT}%{_bindir}/update-rippled-reporting.sh
sed -E 's/rippled?/rippled-reporting/g' ./rippled/bin/getRippledInfo > ${RPM_BUILD_ROOT}%{_bindir}/getRippledReportingInfo
sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/update-rippled-cron > ${RPM_BUILD_ROOT}%{_prefix}/etc/update-rippled-reporting-cron
sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/rippled-logrotate > ${RPM_BUILD_ROOT}/etc/logrotate.d/rippled-reporting
%post
%define _prefix /opt/ripple
USER_NAME=rippled
GROUP_NAME=rippled
getent passwd $USER_NAME &>/dev/null || useradd $USER_NAME
getent group $GROUP_NAME &>/dev/null || groupadd $GROUP_NAME
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
chown -R $USER_NAME:$GROUP_NAME %{_prefix}/
chmod 755 /var/log/rippled/
chmod 755 /var/lib/rippled/
chmod 644 %{_prefix}/etc/update-rippled-cron
chmod 644 /etc/logrotate.d/rippled
chown -R root:$GROUP_NAME %{_prefix}/etc/update-rippled-cron
%post reporting
%define _prefix /opt/rippled-reporting
USER_NAME=rippled-reporting
GROUP_NAME=rippled-reporting
getent passwd $USER_NAME &>/dev/null || useradd -r $USER_NAME
getent group $GROUP_NAME &>/dev/null || groupadd $GROUP_NAME
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME %{_prefix}/
chmod 755 /var/log/rippled-reporting/
chmod 755 /var/lib/rippled-reporting/
chmod -x /usr/lib/systemd/system/rippled-reporting.service
%files
%define _prefix /opt/ripple
%doc rippled/README.md rippled/LICENSE.md
%{_bindir}/rippled
/usr/local/bin/rippled
%{_bindir}/update-rippled.sh
%{_bindir}/getRippledInfo
%{_prefix}/etc/update-rippled-cron
%{_bindir}/validator-keys
%config(noreplace) %{_prefix}/etc/rippled.cfg
%config(noreplace) /etc/opt/ripple/rippled.cfg
%config(noreplace) %{_prefix}/etc/validators.txt
%config(noreplace) /etc/opt/ripple/validators.txt
%config(noreplace) /etc/logrotate.d/rippled
%config(noreplace) /usr/lib/systemd/system/rippled.service
%config(noreplace) /usr/lib/systemd/system-preset/50-rippled.preset
%dir /var/log/rippled/
%dir /var/lib/rippled/
%files devel
%{_prefix}/include
%{_prefix}/lib/*.a
%{_prefix}/lib/cmake/ripple
%files reporting
%define _prefix /opt/rippled-reporting
%doc rippled/README.md rippled/LICENSE.md
%{_bindir}/rippled-reporting
/usr/local/bin/rippled-reporting
%config(noreplace) /etc/opt/rippled-reporting/rippled-reporting.cfg
%config(noreplace) %{_prefix}/etc/rippled-reporting.cfg
%config(noreplace) %{_prefix}/etc/validators.txt
%config(noreplace) /etc/opt/rippled-reporting/validators.txt
%config(noreplace) /usr/lib/systemd/system/rippled-reporting.service
%config(noreplace) /usr/lib/systemd/system-preset/50-rippled-reporting.preset
%dir /var/log/rippled-reporting/
%dir /var/lib/rippled-reporting/
%{_bindir}/update-rippled-reporting.sh
%{_bindir}/getRippledReportingInfo
%{_prefix}/etc/update-rippled-reporting-cron
%config(noreplace) /etc/logrotate.d/rippled-reporting
%changelog
* Wed Aug 28 2019 Mike Ellery <mellery451@gmail.com>
- Switch to subproject build for validator-keys
* Wed May 15 2019 Mike Ellery <mellery451@gmail.com>
- Make validator-keys use local rippled build for core lib
* Wed Aug 01 2018 Mike Ellery <mellery451@gmail.com>
- add devel package for signing library
* Thu Jun 02 2016 Brandon Wilson <bwilson@ripple.com>
- Install validators.txt

View File

@@ -1,37 +0,0 @@
#!/usr/bin/env bash
set -e
IFS=. read cm_maj cm_min cm_rel <<<"$1"
: ${cm_rel:-0}
CMAKE_ROOT=${2:-"${HOME}/cmake"}
function cmake_version ()
{
if [[ -d ${CMAKE_ROOT} ]] ; then
local perms=$(test $(uname) = "Linux" && echo "/111" || echo "+111")
local installed=$(find ${CMAKE_ROOT} -perm ${perms} -type f -name cmake)
if [[ "${installed}" != "" ]] ; then
echo "$(${installed} --version | head -1)"
fi
fi
}
installed=$(cmake_version)
if [[ "${installed}" != "" && ${installed} =~ ${cm_maj}.${cm_min}.${cm_rel} ]] ; then
echo "cmake already installed: ${installed}"
exit
fi
# From CMake 20+ "Linux" is lowercase so using `uname` won't create be the correct path
if [ ${cm_min} -gt 19 ]; then
linux="linux"
else
linux=$(uname)
fi
pkgname="cmake-${cm_maj}.${cm_min}.${cm_rel}-${linux}-x86_64.tar.gz"
tmppkg="/tmp/cmake.tar.gz"
wget --quiet https://cmake.org/files/v${cm_maj}.${cm_min}/${pkgname} -O ${tmppkg}
mkdir -p ${CMAKE_ROOT}
cd ${CMAKE_ROOT}
tar --strip-components 1 -xf ${tmppkg}
rm -f ${tmppkg}
echo "installed: $(cmake_version)"

View File

@@ -1,15 +0,0 @@
/var/log/rippled/*.log {
daily
minsize 200M
rotate 7
nocreate
missingok
notifempty
compress
compresscmd /usr/bin/nice
compressoptions -n19 ionice -c3 gzip
compressext .gz
postrotate
/opt/ripple/bin/rippled --conf /opt/ripple/etc/rippled.cfg logrotate
endscript
}

View File

@@ -1,15 +0,0 @@
[Unit]
Description=Ripple Daemon
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=/opt/rippled-reporting/bin/rippled-reporting --silent --conf /etc/opt/rippled-reporting/rippled-reporting.cfg
Restart=on-failure
User=rippled-reporting
Group=rippled-reporting
LimitNOFILE=65536
[Install]
WantedBy=multi-user.target

View File

@@ -1,15 +0,0 @@
[Unit]
Description=Ripple Daemon
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=/opt/ripple/bin/rippled --net --silent --conf /etc/opt/ripple/rippled.cfg
Restart=on-failure
User=rippled
Group=rippled
LimitNOFILE=65536
[Install]
WantedBy=multi-user.target

View File

@@ -1,10 +0,0 @@
# For automatic updates, symlink this file to /etc/cron.d/
# Do not remove the newline at the end of this cron script
# bash required for use of RANDOM below.
SHELL=/bin/bash
PATH=/sbin;/bin;/usr/sbin;/usr/bin
# invoke check/update script with random delay up to 59 mins
0 * * * * root sleep $((RANDOM*3540/32768)) && /opt/ripple/bin/update-rippled.sh

View File

@@ -1,65 +0,0 @@
#!/usr/bin/env bash
# auto-update script for rippled daemon
# Check for sudo/root permissions
if [[ $(id -u) -ne 0 ]] ; then
echo "This update script must be run as root or sudo"
exit 1
fi
LOCKDIR=/tmp/rippleupdate.lock
UPDATELOG=/var/log/rippled/update.log
function cleanup {
# If this directory isn't removed, future updates will fail.
rmdir $LOCKDIR
}
# Use mkdir to check if process is already running. mkdir is atomic, as against file create.
if ! mkdir $LOCKDIR 2>/dev/null; then
echo $(date -u) "lockdir exists - won't proceed." >> $UPDATELOG
exit 1
fi
trap cleanup EXIT
source /etc/os-release
can_update=false
if [[ "$ID" == "ubuntu" || "$ID" == "debian" ]] ; then
# Silent update
apt-get update -qq
# The next line is an "awk"ward way to check if the package needs to be updated.
RIPPLE=$(apt-get install -s --only-upgrade rippled | awk '/^Inst/ { print $2 }')
test "$RIPPLE" == "rippled" && can_update=true
function apply_update {
apt-get install rippled -qq
}
elif [[ "$ID" == "fedora" || "$ID" == "centos" || "$ID" == "rhel" || "$ID" == "scientific" ]] ; then
RIPPLE_REPO=${RIPPLE_REPO-stable}
yum --disablerepo=* --enablerepo=ripple-$RIPPLE_REPO clean expire-cache
yum check-update -q --enablerepo=ripple-$RIPPLE_REPO rippled || can_update=true
function apply_update {
yum update -y --enablerepo=ripple-$RIPPLE_REPO rippled
}
else
echo "unrecognized distro!"
exit 1
fi
# Do the actual update and restart the service after reloading systemctl daemon.
if [ "$can_update" = true ] ; then
exec 3>&1 1>>${UPDATELOG} 2>&1
set -e
apply_update
systemctl daemon-reload
systemctl restart rippled.service
echo $(date -u) "rippled daemon updated."
else
echo $(date -u) "no updates available" >> $UPDATELOG
fi

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
function error {
echo $1
exit 1
}
cd /opt/rippled_bld/pkg/rippled
export RIPPLED_VERSION=$(egrep -i -o "\b(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-[0-9a-z\-]+(\.[0-9a-z\-]+)*)?(\+[0-9a-z\-]+(\.[0-9a-z\-]+)*)?\b" src/ripple/protocol/impl/BuildInfo.cpp)
: ${PKG_OUTDIR:=/opt/rippled_bld/pkg/out}
export PKG_OUTDIR
if [ ! -d ${PKG_OUTDIR} ]; then
error "${PKG_OUTDIR} is not mounted"
fi
if [ -x ${OPENSSL_ROOT}/bin/openssl ]; then
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${OPENSSL_ROOT}/lib ${OPENSSL_ROOT}/bin/openssl version -a
fi

View File

@@ -1,22 +0,0 @@
ARG DIST_TAG=18.04
FROM ubuntu:$DIST_TAG
ARG GIT_COMMIT=unknown
ARG CI_USE=false
LABEL git-commit=$GIT_COMMIT
WORKDIR /root
COPY ubuntu-builder/ubuntu_setup.sh .
COPY ubuntu-builder/ubuntu_setup2.sh .
RUN ./ubuntu_setup.sh && rm ubuntu_setup.sh
RUN ./ubuntu_setup2.sh && rm ubuntu_setup2.sh
COPY ubuntu-builder/ubuntu_setup3.sh .
RUN ./ubuntu_setup3.sh && rm ubuntu_setup3.sh
RUN mkdir -m 777 -p /opt/rippled_bld/pkg/
WORKDIR /opt/rippled_bld/pkg
COPY packaging/dpkg/build_dpkg.sh ./
CMD ./build_dpkg.sh

View File

@@ -1,47 +0,0 @@
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o xtrace
# Parameters
gcc_version=${GCC_VERSION:-11}
export DEBIAN_FRONTEND=noninteractive
apt update
# Iteratively build the list of packages to install so that we can interleave
# the lines with comments explaining their inclusion.
dependencies=''
# - for add-apt-repository
dependencies+=' software-properties-common'
# - to download CMake
dependencies+=' curl'
# - to build CMake
dependencies+=' libssl-dev'
# - for Python
dependencies+=' libbz2-dev liblzma-dev libsqlite3-dev'
# - to download rippled
dependencies+=' git'
# - CMake generators (but not CMake itself)
dependencies+=' make ninja-build'
apt-get install --yes ${dependencies}
add-apt-repository --yes ppa:ubuntu-toolchain-r/test
apt-get install --yes gcc-${gcc_version} g++-${gcc_version}
apt-get install --yes build-essential libssl-dev zlib1g-dev \
libbz2-dev libreadline-dev libsqlite3-dev curl \
libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev
# Give us nice unversioned aliases for gcc and company.
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-${gcc_version} 100 \
--slave /usr/bin/g++ g++ /usr/bin/g++-${gcc_version} \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-${gcc_version} \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-${gcc_version} \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-${gcc_version} \
--slave /usr/bin/gcov gcov /usr/bin/gcov-${gcc_version} \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-${gcc_version} \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-${gcc_version}
update-alternatives --auto gcc

View File

@@ -1,49 +0,0 @@
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o xtrace
# Parameters
gcc_version=${GCC_VERSION:-11}
cmake_version=${CMAKE_VERSION:-3.25.1}
cmake_sha256=1c511d09516af493694ed9baf13c55947a36389674d657a2d5e0ccedc6b291d8
conan_version=${CONAN_VERSION:-1.60}
curl https://pyenv.run | bash
export PYENV_ROOT="$HOME/.pyenv"
command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
eval "$(pyenv init -)"
pyenv install 3.11.2
pyenv global 3.11.2
# Download and unpack CMake.
cmake_slug="cmake-${cmake_version}"
cmake_archive="${cmake_slug}.tar.gz"
curl --location --remote-name \
"https://github.com/Kitware/CMake/releases/download/v${cmake_version}/${cmake_archive}"
echo "${cmake_sha256} ${cmake_archive}" | sha256sum --check
tar -xzf ${cmake_archive}
rm ${cmake_archive}
# Build and install CMake.
cd ${cmake_slug}
./bootstrap --parallel=$(nproc)
make --jobs $(nproc)
make install
cd ..
rm --recursive --force ${cmake_slug}
# Install Conan.
pip install --upgrade pip
pip install conan==${conan_version}
conan profile new --detect gcc
conan profile update settings.compiler=gcc gcc
conan profile update settings.compiler.version=${gcc_version} gcc
conan profile update settings.compiler.libcxx=libstdc++11 gcc
conan profile update settings.compiler.cppstd=20 gcc
conan profile update env.CC=/usr/bin/gcc gcc
conan profile update env.CXX=/usr/bin/g++ gcc

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o xtrace
apt-get install --yes build-essential fakeroot devscripts cmake debhelper dh-systemd

View File

@@ -72,15 +72,15 @@ It generates many files of [results](results):
desired as described above. In a perfect repo, this file will be
empty.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../.github/workflows/levelization.yml) to validate
Github workflow](../../.github/workflows/check-levelization.yml) to validate
that nothing changed.
* [`ordering.txt`](results/ordering.txt): A list showing relationships
between modules where there are no loops as they actually exist, as
opposed to how they are desired as described above.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../.github/workflows/levelization.yml) to validate
Github workflow](../../.github/workflows/check-levelization.yml) to validate
that nothing changed.
* [`levelization.yml`](../../.github/workflows/levelization.yml)
* [`levelization.yml`](../../.github/workflows/check-levelization.yml)
Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or

View File

@@ -13,12 +13,15 @@ then
git clean -ix
fi
# Ensure all sorting is ASCII-order consistently across platforms.
export LANG=C
rm -rfv results
mkdir results
includes="$( pwd )/results/rawincludes.txt"
pushd ../..
echo Raw includes:
grep -r '#include.*/.*\.h' src/ripple/ src/test/ | \
grep -r '^[ ]*#include.*/.*\.h' include src | \
grep -v boost | tee ${includes}
popd
pushd results

View File

@@ -1,51 +1,42 @@
Loop: ripple.app ripple.core
ripple.app > ripple.core
Loop: ripple.app ripple.ledger
ripple.app > ripple.ledger
Loop: ripple.app ripple.net
ripple.app > ripple.net
Loop: ripple.app ripple.nodestore
ripple.app > ripple.nodestore
Loop: ripple.app ripple.overlay
ripple.overlay ~= ripple.app
Loop: ripple.app ripple.peerfinder
ripple.app > ripple.peerfinder
Loop: ripple.app ripple.rpc
ripple.rpc > ripple.app
Loop: ripple.app ripple.shamap
ripple.app > ripple.shamap
Loop: ripple.basics ripple.core
ripple.core > ripple.basics
Loop: ripple.basics ripple.json
ripple.json ~= ripple.basics
Loop: ripple.basics ripple.protocol
ripple.protocol > ripple.basics
Loop: ripple.core ripple.net
ripple.net > ripple.core
Loop: ripple.net ripple.rpc
ripple.rpc > ripple.net
Loop: ripple.nodestore ripple.overlay
ripple.overlay ~= ripple.nodestore
Loop: ripple.overlay ripple.rpc
ripple.rpc ~= ripple.overlay
Loop: test.jtx test.toplevel
test.toplevel > test.jtx
Loop: test.jtx test.unit_test
test.unit_test == test.jtx
Loop: xrpld.app xrpld.core
xrpld.app > xrpld.core
Loop: xrpld.app xrpld.ledger
xrpld.app > xrpld.ledger
Loop: xrpld.app xrpld.net
xrpld.app > xrpld.net
Loop: xrpld.app xrpld.overlay
xrpld.overlay > xrpld.app
Loop: xrpld.app xrpld.peerfinder
xrpld.peerfinder ~= xrpld.app
Loop: xrpld.app xrpld.rpc
xrpld.rpc > xrpld.app
Loop: xrpld.app xrpld.shamap
xrpld.app > xrpld.shamap
Loop: xrpld.core xrpld.net
xrpld.net > xrpld.core
Loop: xrpld.core xrpld.perflog
xrpld.perflog == xrpld.core
Loop: xrpld.net xrpld.rpc
xrpld.rpc ~= xrpld.net
Loop: xrpld.overlay xrpld.rpc
xrpld.rpc ~= xrpld.overlay
Loop: xrpld.perflog xrpld.rpc
xrpld.rpc ~= xrpld.perflog

View File

@@ -1,230 +1,197 @@
ripple.app > ripple.basics
ripple.app > ripple.beast
ripple.app > ripple.conditions
ripple.app > ripple.consensus
ripple.app > ripple.crypto
ripple.app > ripple.json
ripple.app > ripple.protocol
ripple.app > ripple.resource
ripple.app > test.unit_test
ripple.basics > ripple.beast
ripple.conditions > ripple.basics
ripple.conditions > ripple.protocol
ripple.consensus > ripple.basics
ripple.consensus > ripple.beast
ripple.consensus > ripple.json
ripple.consensus > ripple.protocol
ripple.core > ripple.beast
ripple.core > ripple.json
ripple.core > ripple.protocol
ripple.crypto > ripple.basics
ripple.json > ripple.beast
ripple.ledger > ripple.basics
ripple.ledger > ripple.beast
ripple.ledger > ripple.core
ripple.ledger > ripple.json
ripple.ledger > ripple.protocol
ripple.net > ripple.basics
ripple.net > ripple.beast
ripple.net > ripple.json
ripple.net > ripple.protocol
ripple.net > ripple.resource
ripple.nodestore > ripple.basics
ripple.nodestore > ripple.beast
ripple.nodestore > ripple.core
ripple.nodestore > ripple.json
ripple.nodestore > ripple.protocol
ripple.nodestore > ripple.unity
ripple.overlay > ripple.basics
ripple.overlay > ripple.beast
ripple.overlay > ripple.core
ripple.overlay > ripple.json
ripple.overlay > ripple.peerfinder
ripple.overlay > ripple.protocol
ripple.overlay > ripple.resource
ripple.overlay > ripple.server
ripple.peerfinder > ripple.basics
ripple.peerfinder > ripple.beast
ripple.peerfinder > ripple.core
ripple.peerfinder > ripple.protocol
ripple.perflog > ripple.basics
ripple.perflog > ripple.beast
ripple.perflog > ripple.core
ripple.perflog > ripple.json
ripple.perflog > ripple.nodestore
ripple.perflog > ripple.protocol
ripple.perflog > ripple.rpc
ripple.protocol > ripple.beast
ripple.protocol > ripple.crypto
ripple.protocol > ripple.json
ripple.resource > ripple.basics
ripple.resource > ripple.beast
ripple.resource > ripple.json
ripple.resource > ripple.protocol
ripple.rpc > ripple.basics
ripple.rpc > ripple.beast
ripple.rpc > ripple.core
ripple.rpc > ripple.crypto
ripple.rpc > ripple.json
ripple.rpc > ripple.ledger
ripple.rpc > ripple.nodestore
ripple.rpc > ripple.protocol
ripple.rpc > ripple.resource
ripple.rpc > ripple.server
ripple.rpc > ripple.shamap
ripple.server > ripple.basics
ripple.server > ripple.beast
ripple.server > ripple.crypto
ripple.server > ripple.json
ripple.server > ripple.protocol
ripple.shamap > ripple.basics
ripple.shamap > ripple.beast
ripple.shamap > ripple.crypto
ripple.shamap > ripple.nodestore
ripple.shamap > ripple.protocol
test.app > ripple.app
test.app > ripple.basics
test.app > ripple.beast
test.app > ripple.core
test.app > ripple.json
test.app > ripple.ledger
test.app > ripple.overlay
test.app > ripple.protocol
test.app > ripple.resource
test.app > ripple.rpc
libxrpl.basics > xrpl.basics
libxrpl.crypto > xrpl.basics
libxrpl.json > xrpl.basics
libxrpl.json > xrpl.json
libxrpl.protocol > xrpl.basics
libxrpl.protocol > xrpl.json
libxrpl.protocol > xrpl.protocol
libxrpl.resource > xrpl.basics
libxrpl.resource > xrpl.json
libxrpl.resource > xrpl.resource
libxrpl.server > xrpl.basics
libxrpl.server > xrpl.json
libxrpl.server > xrpl.protocol
libxrpl.server > xrpl.server
test.app > test.jtx
test.app > test.rpc
test.app > test.toplevel
test.app > test.unit_test
test.basics > ripple.basics
test.basics > ripple.beast
test.basics > ripple.json
test.basics > ripple.protocol
test.basics > ripple.rpc
test.app > xrpl.basics
test.app > xrpld.app
test.app > xrpld.core
test.app > xrpld.ledger
test.app > xrpld.nodestore
test.app > xrpld.overlay
test.app > xrpld.rpc
test.app > xrpl.json
test.app > xrpl.protocol
test.app > xrpl.resource
test.basics > test.jtx
test.basics > test.unit_test
test.beast > ripple.basics
test.beast > ripple.beast
test.conditions > ripple.basics
test.conditions > ripple.beast
test.conditions > ripple.conditions
test.consensus > ripple.app
test.consensus > ripple.basics
test.consensus > ripple.beast
test.consensus > ripple.consensus
test.consensus > ripple.ledger
test.basics > xrpl.basics
test.basics > xrpld.perflog
test.basics > xrpld.rpc
test.basics > xrpl.json
test.basics > xrpl.protocol
test.beast > xrpl.basics
test.conditions > xrpl.basics
test.conditions > xrpld.conditions
test.consensus > test.csf
test.consensus > test.toplevel
test.consensus > test.unit_test
test.core > ripple.basics
test.core > ripple.beast
test.core > ripple.core
test.core > ripple.crypto
test.core > ripple.json
test.core > ripple.server
test.consensus > xrpl.basics
test.consensus > xrpld.app
test.consensus > xrpld.consensus
test.consensus > xrpld.ledger
test.consensus > xrpl.json
test.core > test.jtx
test.core > test.toplevel
test.core > test.unit_test
test.csf > ripple.basics
test.csf > ripple.beast
test.csf > ripple.consensus
test.csf > ripple.json
test.csf > ripple.protocol
test.json > ripple.beast
test.json > ripple.json
test.json > ripple.rpc
test.core > xrpl.basics
test.core > xrpld.core
test.core > xrpld.perflog
test.core > xrpl.json
test.core > xrpl.server
test.csf > xrpl.basics
test.csf > xrpld.consensus
test.csf > xrpl.json
test.csf > xrpl.protocol
test.json > test.jtx
test.jtx > ripple.app
test.jtx > ripple.basics
test.jtx > ripple.beast
test.jtx > ripple.consensus
test.jtx > ripple.core
test.jtx > ripple.json
test.jtx > ripple.ledger
test.jtx > ripple.net
test.jtx > ripple.protocol
test.jtx > ripple.resource
test.jtx > ripple.rpc
test.jtx > ripple.server
test.ledger > ripple.app
test.ledger > ripple.basics
test.ledger > ripple.beast
test.ledger > ripple.core
test.ledger > ripple.ledger
test.ledger > ripple.protocol
test.json > xrpl.json
test.jtx > xrpl.basics
test.jtx > xrpld.app
test.jtx > xrpld.core
test.jtx > xrpld.ledger
test.jtx > xrpld.net
test.jtx > xrpld.rpc
test.jtx > xrpl.json
test.jtx > xrpl.protocol
test.jtx > xrpl.resource
test.jtx > xrpl.server
test.ledger > test.jtx
test.ledger > test.toplevel
test.net > ripple.net
test.net > test.jtx
test.net > test.toplevel
test.net > test.unit_test
test.nodestore > ripple.app
test.nodestore > ripple.basics
test.nodestore > ripple.beast
test.nodestore > ripple.core
test.nodestore > ripple.nodestore
test.nodestore > ripple.protocol
test.nodestore > ripple.unity
test.ledger > xrpl.basics
test.ledger > xrpld.app
test.ledger > xrpld.core
test.ledger > xrpld.ledger
test.ledger > xrpl.protocol
test.nodestore > test.jtx
test.nodestore > test.toplevel
test.nodestore > test.unit_test
test.overlay > ripple.app
test.overlay > ripple.basics
test.overlay > ripple.beast
test.overlay > ripple.overlay
test.overlay > ripple.peerfinder
test.overlay > ripple.protocol
test.overlay > ripple.shamap
test.nodestore > xrpl.basics
test.nodestore > xrpld.core
test.nodestore > xrpld.nodestore
test.nodestore > xrpld.unity
test.overlay > test.jtx
test.overlay > test.toplevel
test.overlay > test.unit_test
test.peerfinder > ripple.basics
test.peerfinder > ripple.beast
test.peerfinder > ripple.core
test.peerfinder > ripple.peerfinder
test.peerfinder > ripple.protocol
test.overlay > xrpl.basics
test.overlay > xrpld.app
test.overlay > xrpld.overlay
test.overlay > xrpld.peerfinder
test.overlay > xrpld.shamap
test.overlay > xrpl.protocol
test.peerfinder > test.beast
test.peerfinder > test.unit_test
test.protocol > ripple.basics
test.protocol > ripple.beast
test.protocol > ripple.crypto
test.protocol > ripple.json
test.protocol > ripple.protocol
test.peerfinder > xrpl.basics
test.peerfinder > xrpld.core
test.peerfinder > xrpld.peerfinder
test.peerfinder > xrpl.protocol
test.protocol > test.toplevel
test.resource > ripple.basics
test.resource > ripple.beast
test.resource > ripple.resource
test.protocol > xrpl.basics
test.protocol > xrpl.json
test.protocol > xrpl.protocol
test.resource > test.unit_test
test.rpc > ripple.app
test.rpc > ripple.basics
test.rpc > ripple.beast
test.rpc > ripple.core
test.rpc > ripple.json
test.rpc > ripple.net
test.rpc > ripple.nodestore
test.rpc > ripple.overlay
test.rpc > ripple.protocol
test.rpc > ripple.resource
test.rpc > ripple.rpc
test.resource > xrpl.basics
test.resource > xrpl.resource
test.rpc > test.jtx
test.rpc > test.nodestore
test.rpc > test.toplevel
test.server > ripple.app
test.server > ripple.basics
test.server > ripple.beast
test.server > ripple.core
test.server > ripple.json
test.server > ripple.rpc
test.server > ripple.server
test.rpc > xrpl.basics
test.rpc > xrpld.app
test.rpc > xrpld.core
test.rpc > xrpld.net
test.rpc > xrpld.overlay
test.rpc > xrpld.rpc
test.rpc > xrpl.json
test.rpc > xrpl.protocol
test.rpc > xrpl.resource
test.server > test.jtx
test.server > test.toplevel
test.server > test.unit_test
test.shamap > ripple.basics
test.shamap > ripple.beast
test.shamap > ripple.nodestore
test.shamap > ripple.protocol
test.shamap > ripple.shamap
test.server > xrpl.basics
test.server > xrpld.app
test.server > xrpld.core
test.server > xrpld.rpc
test.server > xrpl.json
test.server > xrpl.server
test.shamap > test.unit_test
test.toplevel > ripple.json
test.shamap > xrpl.basics
test.shamap > xrpld.nodestore
test.shamap > xrpld.shamap
test.shamap > xrpl.protocol
test.toplevel > test.csf
test.unit_test > ripple.basics
test.unit_test > ripple.beast
test.toplevel > xrpl.json
test.unit_test > xrpl.basics
tests.libxrpl > xrpl.basics
xrpl.json > xrpl.basics
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json
xrpl.resource > xrpl.basics
xrpl.resource > xrpl.json
xrpl.resource > xrpl.protocol
xrpl.server > xrpl.basics
xrpl.server > xrpl.json
xrpl.server > xrpl.protocol
xrpld.app > test.unit_test
xrpld.app > xrpl.basics
xrpld.app > xrpld.conditions
xrpld.app > xrpld.consensus
xrpld.app > xrpld.nodestore
xrpld.app > xrpld.perflog
xrpld.app > xrpl.json
xrpld.app > xrpl.protocol
xrpld.app > xrpl.resource
xrpld.conditions > xrpl.basics
xrpld.conditions > xrpl.protocol
xrpld.consensus > xrpl.basics
xrpld.consensus > xrpl.json
xrpld.consensus > xrpl.protocol
xrpld.core > xrpl.basics
xrpld.core > xrpl.json
xrpld.core > xrpl.protocol
xrpld.ledger > xrpl.basics
xrpld.ledger > xrpl.json
xrpld.ledger > xrpl.protocol
xrpld.net > xrpl.basics
xrpld.net > xrpl.json
xrpld.net > xrpl.protocol
xrpld.net > xrpl.resource
xrpld.nodestore > xrpl.basics
xrpld.nodestore > xrpld.core
xrpld.nodestore > xrpld.unity
xrpld.nodestore > xrpl.json
xrpld.nodestore > xrpl.protocol
xrpld.overlay > xrpl.basics
xrpld.overlay > xrpld.core
xrpld.overlay > xrpld.peerfinder
xrpld.overlay > xrpld.perflog
xrpld.overlay > xrpl.json
xrpld.overlay > xrpl.protocol
xrpld.overlay > xrpl.resource
xrpld.overlay > xrpl.server
xrpld.peerfinder > xrpl.basics
xrpld.peerfinder > xrpld.core
xrpld.peerfinder > xrpl.protocol
xrpld.perflog > xrpl.basics
xrpld.perflog > xrpl.json
xrpld.rpc > xrpl.basics
xrpld.rpc > xrpld.core
xrpld.rpc > xrpld.ledger
xrpld.rpc > xrpld.nodestore
xrpld.rpc > xrpl.json
xrpld.rpc > xrpl.protocol
xrpld.rpc > xrpl.resource
xrpld.rpc > xrpl.server
xrpld.shamap > xrpl.basics
xrpld.shamap > xrpld.nodestore
xrpld.shamap > xrpl.protocol

View File

@@ -9,23 +9,43 @@ endif()
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
project(rippled)
project(xrpl)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
# GCC-specific fixes
add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage)
# -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# Clang-specific fixes
add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options
elseif(MSVC)
# MSVC-specific fixes
add_compile_options(/wd4068) # Ignore unknown pragmas
endif()
# make GIT_COMMIT_HASH define available to all sources
find_package(Git)
if(Git_FOUND)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git describe --always --abbrev=40
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
if(gch)
set(GIT_COMMIT_HASH "${gch}")
message(STATUS gch: ${GIT_COMMIT_HASH})
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
endif()
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
if(gb)
set(GIT_BRANCH "${gb}")
message(STATUS gb: ${GIT_BRANCH})
add_definitions(-DGIT_BRANCH="${GIT_BRANCH}")
endif()
endif() #git
if(thread_safety_analysis)
@@ -38,7 +58,6 @@ include (CheckCXXCompilerFlag)
include (FetchContent)
include (ExternalProject)
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
include (ProcessorCount)
if (target)
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
endif ()
@@ -46,7 +65,6 @@ endif ()
include(RippledSanity)
include(RippledVersion)
include(RippledSettings)
include(RippledRelease)
# this check has to remain in the top-level cmake
# because of the early return statement
if (packages_only)
@@ -72,9 +90,16 @@ set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
set(SECP256K1_INSTALL TRUE)
add_subdirectory(src/secp256k1)
set(SECP256K1_BUILD_BENCHMARK FALSE)
set(SECP256K1_BUILD_TESTS FALSE)
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
set(SECP256K1_BUILD_EXAMPLES FALSE)
add_subdirectory(external/secp256k1)
add_library(secp256k1::secp256k1 ALIAS secp256k1)
add_subdirectory(src/ed25519-donna)
add_subdirectory(external/ed25519-donna)
add_subdirectory(external/antithesis-sdk)
find_package(gRPC REQUIRED)
find_package(lz4 REQUIRED)
# Target names with :: are not allowed in a generator expression.
# We need to pull the include directories and imported location properties
@@ -82,7 +107,6 @@ find_package(lz4 REQUIRED)
find_package(LibArchive REQUIRED)
find_package(SOCI REQUIRED)
find_package(SQLite3 REQUIRED)
find_package(Snappy REQUIRED)
option(rocksdb "Enable RocksDB" ON)
if(rocksdb)
@@ -95,10 +119,10 @@ endif()
find_package(nudb REQUIRED)
find_package(date REQUIRED)
find_package(xxHash REQUIRED)
target_link_libraries(ripple_libs INTERFACE
ed25519::ed25519
LibArchive::LibArchive
lz4::lz4
OpenSSL::Crypto
OpenSSL::SSL
@@ -117,21 +141,16 @@ else()
endif()
target_link_libraries(ripple_libs INTERFACE ${nudb})
if(reporting)
find_package(cassandra-cpp-driver REQUIRED)
find_package(PostgreSQL REQUIRED)
target_link_libraries(ripple_libs INTERFACE
cassandra-cpp-driver::cassandra-cpp-driver
PostgreSQL::PostgreSQL
)
if(coverage)
include(RippledCov)
endif()
###
set(PROJECT_EXPORT_SET RippleExports)
include(RippledCore)
include(deps/Protobuf)
include(deps/gRPC)
include(RippledInstall)
include(RippledCov)
include(RippledMultiConfig)
include(RippledValidatorKeys)
if(tests)
include(CTest)
add_subdirectory(src/tests/libxrpl)
endif()

Some files were not shown because too many files have changed in this diff Show More