Compare commits

..

83 Commits

Author SHA1 Message Date
Valentin Balaschenko
b1ab757c1e Merge branch 'develop' into vlntb/malloc-trim 2026-02-20 15:58:54 +00:00
Valentin Balaschenko
5d7e7d6458 fixing tests 2026-02-20 15:57:30 +00:00
Valentin Balaschenko
0955d371d4 std::chrono::microseconds + correcting report 2026-02-20 15:37:05 +00:00
Ayaz Salikhov
d03d72bfd5 ci: Add dependabot config (#6379) 2026-02-20 09:19:00 +00:00
Ed Hennis
6f35d94b2f Fix tautological assertion (#6393) 2026-02-20 01:58:47 +00:00
Ayaz Salikhov
2c1fad1023 chore: Apply clang-format width 100 (#6387) 2026-02-19 23:30:00 +00:00
Ayaz Salikhov
25cca46553 chore: Set clang-format width to 100 in config file (#6387) 2026-02-19 23:29:46 +00:00
Ayaz Salikhov
469ce9f291 chore: Set cmake-format width to 100 (#6386) 2026-02-19 19:42:51 +00:00
Sergey Kuznetsov
31302877ab ci: Add clang tidy workflow to ci (#6369) 2026-02-19 14:06:44 -05:00
Valentin Balaschenko
db1c8c228b clang-format 2026-02-18 11:25:20 +00:00
Valentin Balaschenko
4d7141437d Merge branch 'develop' into vlntb/malloc-trim 2026-02-18 11:22:50 +00:00
Jingchen
0976b2b68b refactor: Modularize app/tx (#6228) 2026-02-17 18:10:07 +00:00
Valentin Balaschenko
2e7485b14f Merge branch 'develop' into vlntb/malloc-trim 2026-02-17 17:25:26 +00:00
Valentin Balaschenko
b401645c0e not searchable fix 2026-02-17 17:15:47 +00:00
Jingchen
36240116a5 refactor: Decouple app/tx from Application and Config (#6227)
This change decouples app/tx from `Application` and `Config` to clear the way to moving transactors to `libxrpl`.
2026-02-17 11:29:53 -05:00
Sergey Kuznetsov
958d8f3754 chore: Update clang-format to 21.1.8 (#6352) 2026-02-16 14:31:18 -05:00
Jingchen
ac0ad3627f refactor: Modularize HashRouter, Conditions, and OrderBookDB (#6226)
This change modularizes additional components by moving code to `libxrpl`.
2026-02-13 10:34:37 -05:00
nuxtreact
cd218346ff chore: Fix minor issues in comments (#6346) 2026-02-12 14:55:27 -05:00
Jingchen
5edd3566f7 refactor: Modularize the NetworkOPs interface (#6225)
This change moves the NetworkOPs interface into `libxrpl` and it leaves its implementation in `xrpld`.
2026-02-12 13:15:03 -05:00
Pratik Mankawde
11e8d1f8a2 chore: Fix gcov lib coverage build failure on macOS (#6350)
For coverage builds, we try to link against the `gcov` library (specific to the environment). But as macOS doesn't have this library and thus doesn't have the coverage tools to generate reports, the coverage builds on that platform were failing on linking.

We actually don't need to explicitly force this linking, as the `CodeCoverage` file already has correct detection logic (currently on lines 177-193), which is invoked when the `--coverage` flag is provided:
* AppleClang: Uses `xcrun -f llvm-cov` to set `GCOV_TOOL="llvm-cov gcov"`.
* Clang: Finds `llvm-cov` to set `GCOV_TOOL="llvm-cov gcov"`.
* GCC: Finds `gcov` to set `GCOV_TOOL="gcov"`.
The `GCOV_TOOL` is then passed to `gcovr` on line 416, so the correct tool is used for processing coverage data.

This change therefore removes the `gcov` suffix from lines 473 and 475 in the `CodeCoverage.cmake` file.
2026-02-12 06:11:26 -05:00
Valentin Balaschenko
45c1e93339 Merge branch 'develop' into vlntb/malloc-trim 2026-02-11 16:57:23 +00:00
Jingchen
9f17d10348 refactor: Modularize RelationalDB (#6224)
The rdb module was not properly designed, which is fixed in this change. The module had three classes:
1) The abstract class `RelationalDB`.
2) The abstract class `SQLiteDatabase`, which inherited from `RelationalDB` and added some pure virtual methods.
3) The concrete class `SQLiteDatabaseImp`, which inherited from `SQLiteDatabase` and implemented all methods.

The updated code simplifies this as follows:
* The `SQLiteDatabaseImp` has become `SQLiteDatabase`, and
* The former `SQLiteDatabase `has merged with `RelationalDatabase`.
2026-02-11 16:22:01 +00:00
Valentin Balaschenko
97def26f07 lcov excl 2026-02-11 16:09:33 +00:00
Valentin Balaschenko
d548bfc0cb lcov excl 2026-02-11 16:06:31 +00:00
Valentin Balaschenko
2b3060b3bb cspell 2026-02-11 15:07:51 +00:00
Valentin Balaschenko
a0e98631e6 updated unit test 2026-02-11 15:03:57 +00:00
Valentin Balaschenko
4d1e979a48 pragma 2026-02-11 14:14:03 +00:00
Valentin Balaschenko
50ba98e4e2 cspell 2026-02-11 14:09:06 +00:00
Valentin Balaschenko
a82c1b17d9 Merge branch 'develop' into vlntb/malloc-trim 2026-02-11 13:52:45 +00:00
Jingchen
ef284692db refactor: Modularize WalletDB and Manifest (#6223)
This change modularizes the `WalletDB` and `Manifest`. Note that the wallet db has nothing to do with account wallets and it stores node configuration, which is why it depends on the manifest code.
2026-02-11 13:42:31 +00:00
Valentin Balaschenko
fcf3bd340e cleanup 2026-02-11 13:34:44 +00:00
Valentin Balaschenko
3fe807142d simplify RSS reporting 2026-02-11 11:55:41 +00:00
Valentin Balaschenko
3bceec0dbc file read optimisation 2026-02-11 11:48:02 +00:00
Valentin Balaschenko
94a14e9e7e Merge branch 'develop' into vlntb/malloc-trim 2026-02-11 10:58:29 +00:00
Olek
e11f6190b7 fix: Update invariant checks for Permissioned Domains (#6134) 2026-02-10 14:02:53 -05:00
Valentin Balaschenko
db2734cbc9 refactor: Change main thread name to xrpld-main (#6336)
This change builds on the thread-renaming PR (#6212), by renaming the main thread name to reduce ambiguity in performance monitoring tools.
2026-02-06 16:33:42 -05:00
Mayukha Vadari
bf4674f42b refactor: Fix spelling issues in tests (#6199)
This change removes the `src/tests` exception from the `cspell` config and fixes all the issues that arise as a result. No functionality/test change.
2026-02-06 20:30:22 +00:00
Mayukha Vadari
f5208fc850 test: Add file and line location to Env (#6276)
This change uses `std::source_location` to output the file and line location of the call that triggered a failed transaction.
2026-02-06 18:37:01 +00:00
Ayaz Salikhov
2305bc98a4 chore: Remove CODEOWNERS (#6337) 2026-02-06 11:39:23 -05:00
Bart
677758b1cc perf: Remove unnecessary caches (#5439)
This change removes the cache in `DatabaseNodeImp` and simplifies the caching logic in `SHAMapStoreImp`. As NuDB and RocksDB internally already use caches, additional caches in the code are not very valuable or may even be unnecessary, as also confirmed during preliminary performance analyses.
2026-02-06 09:42:35 -05:00
Bart
25d7c2c4ec chore: Restore unity builds (#6328)
In certain cases, such as when modifying headers used by many compilation units, performing a unity build is slower than when performing a regular build with `ccache` enabled. There is also a benefit to a unity build in that it can detect things such as macro redefinitions within the group of files that are compiled together as a unit. This change therefore restores the ability to perform unity builds. However, instead of running every configuration with and without unity enabled, it is now only enabled for a single configuration to maintain lower computational use.

As part of restoring the code, it became clear that currently two configurations have coverage enabled, since the check doesn't focus specifically on Debian Bookworm so it also applies to Debian Trixie. This has been fixed too in this change.
2026-02-06 14:12:45 +00:00
Bart
0a626d95f4 refactor: Update secp256k1 to 0.7.1 (#6331)
The latest secp256k1 release, 0.7.1, contains bug fixes that we may benefit from, see https://github.com/bitcoin-core/secp256k1/blob/master/CHANGELOG.md.
2026-02-05 16:45:57 +00:00
Niq Dudfield
6006c281e2 fix: Increment sequence when accepting new manifests (#6059)
The `ManifestCache::applyManifest` function was returning early without incrementing `seq_`. `OverlayImpl `uses this sequence to identify/invalidate a cached `TMManifests` message, which is exchanged with peers on connection. Depending on network size, startup sequencing, and topology, this can cause syncing issues. This change therefore increments `seq_` when a new manifest is accepted.
2026-02-05 10:40:27 -05:00
Vito Tumas
e79673cf40 fix typo in LendingHelpers unit-test (#6215) 2026-02-05 10:23:44 +00:00
Ayaz Salikhov
7f41012e59 chore: Update secp256k1 and openssl (#6327) 2026-02-04 18:27:10 +00:00
Bart
b449a6ee84 chore: Remove unnecessary script (#6326) 2026-02-04 11:30:16 -05:00
Bart
34ef577604 refactor: Replace include guards by '#pragma once' (#6322)
This change replaces all include guards in the `src/` and `include/` directories by `#pragma once`.
2026-02-04 09:50:21 -05:00
Bart
3a172301ce chore: Remove unity builds (#6300)
Unity builds were intended to speed up builds, by bundling multiple files into compilation units. However, now that ccache is available on all platforms, there is no need for unity builds anymore, as ccache stores compiled individual build objects for reuse. This change therefore removes the ability to make unity builds.
2026-02-03 22:55:22 +00:00
Jingchen
6c1a92fe93 refactor: Add ServiceRegistry to help modularization (#6222)
Currently we're passing the `Application` object around, whereby the `Application` class acts more like a service registry that gives other classes access to other services. In order to allow modularization, we should replace `Application` with a service registry class so that modules depending on `Application` for other services can be moved easily. This change adds the `ServiceRegistry` class.
2026-02-03 19:08:27 +00:00
Valentin Balaschenko
f5ccd7b476 experimenting with padding 2026-02-03 11:32:51 +00:00
Valentin Balaschenko
1e408cb401 remove inefficient triggers + fix test 2026-02-02 17:57:35 +00:00
Valentin Balaschenko
0e78c8cd5b Merge branch 'develop' into vlntb/malloc-trim 2026-02-02 16:31:19 +00:00
Valentin Balaschenko
5976206f46 sweep only 2 2026-01-19 15:12:49 +00:00
Valentin Balaschenko
26aa1536c1 trim on separate thread 2026-01-16 12:58:30 +00:00
Valentin Balaschenko
98e7d8e3bc leave only sweep 2026-01-16 12:40:20 +00:00
Valentin Balaschenko
3b47a677c3 16MB pad 2026-01-16 12:21:41 +00:00
Valentin Balaschenko
b8c56ba83e 1MB pad 2026-01-16 12:20:55 +00:00
Valentin Balaschenko
dfd832ba7d 256Kb pad 2026-01-16 12:17:08 +00:00
Valentin Balaschenko
421f61feef Merge branch 'develop' into vlntb/malloc-trim 2026-01-15 10:07:28 +00:00
Valentin Balaschenko
99648f7986 added measuring duration and page faults instrumentation 2026-01-14 18:49:12 +00:00
Valentin Balaschenko
e57ffcc2e8 Merge branch 'develop' into vlntb/malloc-trim 2026-01-13 16:56:51 +00:00
Valentin Balaschenko
98cc608889 update namespaces 2026-01-13 16:54:16 +00:00
Valentin Balaschenko
508a9c990d Merge branch 'develop' into vlntb/malloc-trim 2026-01-13 14:59:03 +00:00
Valentin Balaschenko
523eb17883 Merge branch 'develop' into vlntb/malloc-trim 2026-01-08 10:03:28 +00:00
Valentin Balaschenko
521abcc476 Merge branch 'develop' into vlntb/malloc-trim 2026-01-06 16:41:45 +00:00
Valentin Balaschenko
030e64938b Merge branch 'develop' into vlntb/malloc-trim 2025-12-02 10:33:41 -05:00
Valentin Balaschenko
8973ec16ad Merge branch 'develop' into vlntb/malloc-trim 2025-12-01 10:52:29 -05:00
Valentin Balaschenko
645fddaf82 remove unused 2025-11-19 11:48:16 +00:00
Valentin Balaschenko
265ea4b270 Merge branch 'develop' into vlntb/malloc-trim 2025-11-19 11:38:54 +00:00
Valentin Balaschenko
e77bd4e2d8 remove untested 2025-11-19 11:38:18 +00:00
Valentin Balaschenko
6a8a1b7e28 Merge branch 'develop' into vlntb/malloc-trim 2025-11-14 17:33:23 +02:00
Valentin Balaschenko
efe7177d1b load mode with relaxed ordering 2025-11-14 13:07:04 +00:00
Valentin Balaschenko
2b2b361c87 add malloc trim after sync complete 2025-11-14 13:01:38 +00:00
Valentin Balaschenko
ff8b4353bc malloc trim once orderbook update finished 2025-11-14 12:11:20 +00:00
Valentin Balaschenko
50d606539c fixing test 2025-11-13 17:16:53 +00:00
Valentin Balaschenko
d85f7073dd Merge branch 'develop' into vlntb/malloc-trim 2025-11-13 15:57:05 +02:00
Valentin Balaschenko
334382f031 cleanup and notes 2025-11-13 13:56:36 +00:00
Valentin Balaschenko
2d41bfec05 Merge branch 'develop' into vlntb/malloc-trim 2025-11-12 15:36:29 +02:00
Valentin Balaschenko
52c83684cd unit tests + refactore 2025-11-12 13:35:21 +00:00
Valentin Balaschenko
72b34e6615 efficient call from doSweep and online delete 2025-11-11 16:53:02 +00:00
Valentin Balaschenko
a1ed175b66 trim min internal 2025-11-11 16:08:04 +00:00
Valentin Balaschenko
3fdd42af63 encapsulate and instrument 2025-11-11 15:19:50 +00:00
Valentin Balaschenko
ac5554e9f5 testing malloc trim 2025-11-05 21:01:13 +00:00
1308 changed files with 32567 additions and 20056 deletions

View File

@@ -37,7 +37,7 @@ BinPackParameters: false
BreakBeforeBinaryOperators: false
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: true
ColumnLimit: 120
ColumnLimit: 100
CommentPragmas: "^ IWYU pragma:"
ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 4

191
.clang-tidy Normal file
View File

@@ -0,0 +1,191 @@
---
Checks: "-*,
bugprone-argument-comment
"
# bugprone-assert-side-effect,
# bugprone-bad-signal-to-kill-thread,
# bugprone-bool-pointer-implicit-conversion,
# bugprone-casting-through-void,
# bugprone-chained-comparison,
# bugprone-compare-pointer-to-member-virtual-function,
# bugprone-copy-constructor-init,
# bugprone-crtp-constructor-accessibility,
# bugprone-dangling-handle,
# bugprone-dynamic-static-initializers,
# bugprone-empty-catch,
# bugprone-fold-init-type,
# bugprone-forward-declaration-namespace,
# bugprone-inaccurate-erase,
# bugprone-inc-dec-in-conditions,
# bugprone-incorrect-enable-if,
# bugprone-incorrect-roundings,
# bugprone-infinite-loop,
# bugprone-integer-division,
# bugprone-lambda-function-name,
# bugprone-macro-parentheses,
# bugprone-macro-repeated-side-effects,
# bugprone-misplaced-operator-in-strlen-in-alloc,
# bugprone-misplaced-pointer-arithmetic-in-alloc,
# bugprone-misplaced-widening-cast,
# bugprone-move-forwarding-reference,
# bugprone-multi-level-implicit-pointer-conversion,
# bugprone-multiple-new-in-one-expression,
# bugprone-multiple-statement-macro,
# bugprone-no-escape,
# bugprone-non-zero-enum-to-bool-conversion,
# bugprone-optional-value-conversion,
# bugprone-parent-virtual-call,
# bugprone-pointer-arithmetic-on-polymorphic-object,
# bugprone-posix-return,
# bugprone-redundant-branch-condition,
# bugprone-reserved-identifier,
# bugprone-return-const-ref-from-parameter,
# bugprone-shared-ptr-array-mismatch,
# bugprone-signal-handler,
# bugprone-signed-char-misuse,
# bugprone-sizeof-container,
# bugprone-sizeof-expression,
# bugprone-spuriously-wake-up-functions,
# bugprone-standalone-empty,
# bugprone-string-constructor,
# bugprone-string-integer-assignment,
# bugprone-string-literal-with-embedded-nul,
# bugprone-stringview-nullptr,
# bugprone-suspicious-enum-usage,
# bugprone-suspicious-include,
# bugprone-suspicious-memory-comparison,
# bugprone-suspicious-memset-usage,
# bugprone-suspicious-missing-comma,
# bugprone-suspicious-realloc-usage,
# bugprone-suspicious-semicolon,
# bugprone-suspicious-string-compare,
# bugprone-suspicious-stringview-data-usage,
# bugprone-swapped-arguments,
# bugprone-switch-missing-default-case,
# bugprone-terminating-continue,
# bugprone-throw-keyword-missing,
# bugprone-too-small-loop-variable,
# bugprone-undefined-memory-manipulation,
# bugprone-undelegated-constructor,
# bugprone-unhandled-exception-at-new,
# bugprone-unhandled-self-assignment,
# bugprone-unique-ptr-array-mismatch,
# bugprone-unsafe-functions,
# bugprone-unused-local-non-trivial-variable,
# bugprone-unused-raii,
# bugprone-unused-return-value,
# bugprone-use-after-move,
# bugprone-virtual-near-miss,
# cppcoreguidelines-init-variables,
# cppcoreguidelines-misleading-capture-default-by-value,
# cppcoreguidelines-no-suspend-with-lock,
# cppcoreguidelines-pro-type-member-init,
# cppcoreguidelines-pro-type-static-cast-downcast,
# cppcoreguidelines-rvalue-reference-param-not-moved,
# cppcoreguidelines-use-default-member-init,
# cppcoreguidelines-virtual-class-destructor,
# hicpp-ignored-remove-result,
# llvm-namespace-comment,
# misc-const-correctness,
# misc-definitions-in-headers,
# misc-header-include-cycle,
# misc-include-cleaner,
# misc-misplaced-const,
# misc-redundant-expression,
# misc-static-assert,
# misc-throw-by-value-catch-by-reference,
# misc-unused-alias-decls,
# misc-unused-using-decls,
# modernize-concat-nested-namespaces,
# modernize-deprecated-headers,
# modernize-make-shared,
# modernize-make-unique,
# modernize-pass-by-value,
# modernize-type-traits,
# modernize-use-designated-initializers,
# modernize-use-emplace,
# modernize-use-equals-default,
# modernize-use-equals-delete,
# modernize-use-override,
# modernize-use-ranges,
# modernize-use-starts-ends-with,
# modernize-use-std-numbers,
# modernize-use-using,
# performance-faster-string-find,
# performance-for-range-copy,
# performance-implicit-conversion-in-loop,
# performance-inefficient-vector-operation,
# performance-move-const-arg,
# performance-move-constructor-init,
# performance-no-automatic-move,
# performance-trivially-destructible,
# readability-avoid-nested-conditional-operator,
# readability-avoid-return-with-void-value,
# readability-braces-around-statements,
# readability-const-return-type,
# readability-container-contains,
# readability-container-size-empty,
# readability-convert-member-functions-to-static,
# readability-duplicate-include,
# readability-else-after-return,
# readability-enum-initial-value,
# readability-implicit-bool-conversion,
# readability-inconsistent-declaration-parameter-name,
# readability-identifier-naming,
# readability-make-member-function-const,
# readability-math-missing-parentheses,
# readability-misleading-indentation,
# readability-non-const-parameter,
# readability-redundant-casting,
# readability-redundant-declaration,
# readability-redundant-inline-specifier,
# readability-redundant-member-init,
# readability-redundant-string-init,
# readability-reference-to-constructed-temporary,
# readability-simplify-boolean-expr,
# readability-static-accessed-through-instance,
# readability-static-definition-in-anonymous-namespace,
# readability-suspicious-call-argument,
# readability-use-std-min-max
#
# CheckOptions:
# readability-braces-around-statements.ShortStatementLines: 2
# readability-identifier-naming.MacroDefinitionCase: UPPER_CASE
# readability-identifier-naming.ClassCase: CamelCase
# readability-identifier-naming.StructCase: CamelCase
# readability-identifier-naming.UnionCase: CamelCase
# readability-identifier-naming.EnumCase: CamelCase
# readability-identifier-naming.EnumConstantCase: CamelCase
# readability-identifier-naming.ScopedEnumConstantCase: CamelCase
# readability-identifier-naming.GlobalConstantCase: UPPER_CASE
# readability-identifier-naming.GlobalConstantPrefix: "k"
# readability-identifier-naming.GlobalVariableCase: CamelCase
# readability-identifier-naming.GlobalVariablePrefix: "g"
# readability-identifier-naming.ConstexprFunctionCase: camelBack
# readability-identifier-naming.ConstexprMethodCase: camelBack
# readability-identifier-naming.ClassMethodCase: camelBack
# readability-identifier-naming.ClassMemberCase: camelBack
# readability-identifier-naming.ClassConstantCase: UPPER_CASE
# readability-identifier-naming.ClassConstantPrefix: "k"
# readability-identifier-naming.StaticConstantCase: UPPER_CASE
# readability-identifier-naming.StaticConstantPrefix: "k"
# readability-identifier-naming.StaticVariableCase: UPPER_CASE
# readability-identifier-naming.StaticVariablePrefix: "k"
# readability-identifier-naming.ConstexprVariableCase: UPPER_CASE
# readability-identifier-naming.ConstexprVariablePrefix: "k"
# readability-identifier-naming.LocalConstantCase: camelBack
# readability-identifier-naming.LocalVariableCase: camelBack
# readability-identifier-naming.TemplateParameterCase: CamelCase
# readability-identifier-naming.ParameterCase: camelBack
# readability-identifier-naming.FunctionCase: camelBack
# readability-identifier-naming.MemberCase: camelBack
# readability-identifier-naming.PrivateMemberSuffix: _
# readability-identifier-naming.ProtectedMemberSuffix: _
# readability-identifier-naming.PublicMemberSuffix: ""
# readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
# bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
# bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
# misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h;__chrono/.*;fmt/chrono.h;boost/uuid/uuid_hash.hpp'
#
# HeaderFilterRegex: '^.*/(src|tests)/.*\.(h|hpp)$'
WarningsAsErrors: "*"

View File

@@ -29,7 +29,7 @@ format:
disable: false
_help_line_width:
- How wide to allow formatted cmake files
line_width: 120
line_width: 100
_help_tab_size:
- How many spaces to tab for indent
tab_size: 4

8
.github/CODEOWNERS vendored
View File

@@ -1,8 +0,0 @@
# Allow anyone to review any change by default.
*
# Require the rpc-reviewers team to review changes to the rpc code.
include/xrpl/protocol/ @xrplf/rpc-reviewers
src/libxrpl/protocol/ @xrplf/rpc-reviewers
src/xrpld/rpc/ @xrplf/rpc-reviewers
src/xrpld/app/misc/ @xrplf/rpc-reviewers

56
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,56 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/build-deps/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/generate-version/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/print-env/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/setup-conan/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop

View File

@@ -4,14 +4,11 @@ Loop: test.jtx test.toplevel
Loop: test.jtx test.unit_test
test.unit_test == test.jtx
Loop: xrpld.app xrpld.core
xrpld.app > xrpld.core
Loop: xrpld.app xrpld.overlay
xrpld.overlay > xrpld.app
xrpld.overlay ~= xrpld.app
Loop: xrpld.app xrpld.peerfinder
xrpld.peerfinder ~= xrpld.app
xrpld.peerfinder == xrpld.app
Loop: xrpld.app xrpld.rpc
xrpld.rpc > xrpld.app

View File

@@ -1,4 +1,6 @@
libxrpl.basics > xrpl.basics
libxrpl.conditions > xrpl.basics
libxrpl.conditions > xrpl.conditions
libxrpl.core > xrpl.basics
libxrpl.core > xrpl.core
libxrpl.crypto > xrpl.basics
@@ -17,16 +19,27 @@ libxrpl.nodestore > xrpl.protocol
libxrpl.protocol > xrpl.basics
libxrpl.protocol > xrpl.json
libxrpl.protocol > xrpl.protocol
libxrpl.rdb > xrpl.basics
libxrpl.rdb > xrpl.rdb
libxrpl.resource > xrpl.basics
libxrpl.resource > xrpl.json
libxrpl.resource > xrpl.resource
libxrpl.server > xrpl.basics
libxrpl.server > xrpl.json
libxrpl.server > xrpl.protocol
libxrpl.server > xrpl.rdb
libxrpl.server > xrpl.server
libxrpl.shamap > xrpl.basics
libxrpl.shamap > xrpl.protocol
libxrpl.shamap > xrpl.shamap
libxrpl.tx > xrpl.basics
libxrpl.tx > xrpl.conditions
libxrpl.tx > xrpl.core
libxrpl.tx > xrpl.json
libxrpl.tx > xrpl.ledger
libxrpl.tx > xrpl.protocol
libxrpl.tx > xrpl.server
libxrpl.tx > xrpl.tx
test.app > test.jtx
test.app > test.rpc
test.app > test.toplevel
@@ -41,7 +54,10 @@ test.app > xrpl.json
test.app > xrpl.ledger
test.app > xrpl.nodestore
test.app > xrpl.protocol
test.app > xrpl.rdb
test.app > xrpl.resource
test.app > xrpl.server
test.app > xrpl.tx
test.basics > test.jtx
test.basics > test.unit_test
test.basics > xrpl.basics
@@ -51,7 +67,7 @@ test.basics > xrpl.json
test.basics > xrpl.protocol
test.beast > xrpl.basics
test.conditions > xrpl.basics
test.conditions > xrpld.conditions
test.conditions > xrpl.conditions
test.consensus > test.csf
test.consensus > test.toplevel
test.consensus > test.unit_test
@@ -60,6 +76,7 @@ test.consensus > xrpld.app
test.consensus > xrpld.consensus
test.consensus > xrpl.json
test.consensus > xrpl.ledger
test.consensus > xrpl.tx
test.core > test.jtx
test.core > test.toplevel
test.core > test.unit_test
@@ -67,6 +84,7 @@ test.core > xrpl.basics
test.core > xrpl.core
test.core > xrpld.core
test.core > xrpl.json
test.core > xrpl.rdb
test.core > xrpl.server
test.csf > xrpl.basics
test.csf > xrpld.consensus
@@ -75,6 +93,7 @@ test.csf > xrpl.protocol
test.json > test.jtx
test.json > xrpl.json
test.jtx > xrpl.basics
test.jtx > xrpl.core
test.jtx > xrpld.app
test.jtx > xrpld.core
test.jtx > xrpld.rpc
@@ -84,6 +103,7 @@ test.jtx > xrpl.net
test.jtx > xrpl.protocol
test.jtx > xrpl.resource
test.jtx > xrpl.server
test.jtx > xrpl.tx
test.ledger > test.jtx
test.ledger > test.toplevel
test.ledger > xrpl.basics
@@ -95,8 +115,8 @@ test.nodestore > test.jtx
test.nodestore > test.toplevel
test.nodestore > test.unit_test
test.nodestore > xrpl.basics
test.nodestore > xrpld.core
test.nodestore > xrpl.nodestore
test.nodestore > xrpl.rdb
test.overlay > test.jtx
test.overlay > test.toplevel
test.overlay > test.unit_test
@@ -129,8 +149,11 @@ test.rpc > xrpld.core
test.rpc > xrpld.overlay
test.rpc > xrpld.rpc
test.rpc > xrpl.json
test.rpc > xrpl.ledger
test.rpc > xrpl.protocol
test.rpc > xrpl.resource
test.rpc > xrpl.server
test.rpc > xrpl.tx
test.server > test.jtx
test.server > test.toplevel
test.server > test.unit_test
@@ -151,39 +174,57 @@ test.unit_test > xrpl.basics
tests.libxrpl > xrpl.basics
tests.libxrpl > xrpl.json
tests.libxrpl > xrpl.net
xrpl.conditions > xrpl.basics
xrpl.conditions > xrpl.protocol
xrpl.core > xrpl.basics
xrpl.core > xrpl.json
xrpl.core > xrpl.ledger
xrpl.core > xrpl.protocol
xrpl.json > xrpl.basics
xrpl.ledger > xrpl.basics
xrpl.ledger > xrpl.protocol
xrpl.ledger > xrpl.server
xrpl.ledger > xrpl.shamap
xrpl.net > xrpl.basics
xrpl.nodestore > xrpl.basics
xrpl.nodestore > xrpl.protocol
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json
xrpl.rdb > xrpl.basics
xrpl.rdb > xrpl.core
xrpl.rdb > xrpl.protocol
xrpl.resource > xrpl.basics
xrpl.resource > xrpl.json
xrpl.resource > xrpl.protocol
xrpl.server > xrpl.basics
xrpl.server > xrpl.core
xrpl.server > xrpl.json
xrpl.server > xrpl.protocol
xrpl.server > xrpl.rdb
xrpl.server > xrpl.resource
xrpl.server > xrpl.shamap
xrpl.shamap > xrpl.basics
xrpl.shamap > xrpl.nodestore
xrpl.shamap > xrpl.protocol
xrpl.tx > xrpl.basics
xrpl.tx > xrpl.core
xrpl.tx > xrpl.ledger
xrpl.tx > xrpl.protocol
xrpld.app > test.unit_test
xrpld.app > xrpl.basics
xrpld.app > xrpl.core
xrpld.app > xrpld.conditions
xrpld.app > xrpld.consensus
xrpld.app > xrpld.core
xrpld.app > xrpl.json
xrpld.app > xrpl.ledger
xrpld.app > xrpl.net
xrpld.app > xrpl.nodestore
xrpld.app > xrpl.protocol
xrpld.app > xrpl.rdb
xrpld.app > xrpl.resource
xrpld.app > xrpl.server
xrpld.app > xrpl.shamap
xrpld.conditions > xrpl.basics
xrpld.conditions > xrpl.protocol
xrpld.app > xrpl.tx
xrpld.consensus > xrpl.basics
xrpld.consensus > xrpl.json
xrpld.consensus > xrpl.protocol
@@ -192,17 +233,21 @@ xrpld.core > xrpl.core
xrpld.core > xrpl.json
xrpld.core > xrpl.net
xrpld.core > xrpl.protocol
xrpld.core > xrpl.rdb
xrpld.overlay > xrpl.basics
xrpld.overlay > xrpl.core
xrpld.overlay > xrpld.core
xrpld.overlay > xrpld.peerfinder
xrpld.overlay > xrpl.json
xrpld.overlay > xrpl.protocol
xrpld.overlay > xrpl.rdb
xrpld.overlay > xrpl.resource
xrpld.overlay > xrpl.server
xrpld.overlay > xrpl.tx
xrpld.peerfinder > xrpl.basics
xrpld.peerfinder > xrpld.core
xrpld.peerfinder > xrpl.protocol
xrpld.peerfinder > xrpl.rdb
xrpld.perflog > xrpl.basics
xrpld.perflog > xrpl.core
xrpld.perflog > xrpld.rpc
@@ -215,6 +260,8 @@ xrpld.rpc > xrpl.ledger
xrpld.rpc > xrpl.net
xrpld.rpc > xrpl.nodestore
xrpld.rpc > xrpl.protocol
xrpld.rpc > xrpl.rdb
xrpld.rpc > xrpl.resource
xrpld.rpc > xrpl.server
xrpld.rpc > xrpl.tx
xrpld.shamap > xrpl.shamap

30
.github/scripts/rename/include.sh vendored Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# This script checks whether there are no new include guards introduced by a new
# PR, as header files should use "#pragma once" instead. The script assumes any
# include guards will use "XRPL_" as prefix.
# Usage: .github/scripts/rename/include.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" \) | while read -r FILE; do
echo "Processing file: ${FILE}"
if grep -q "#ifndef XRPL_" "${FILE}"; then
echo "Please replace all include guards by #pragma once."
exit 1
fi
done
echo "Checking complete."

View File

@@ -51,22 +51,20 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# Only generate a subset of configurations in PRs.
if not all:
# Debian:
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
# the reference fee to 500.
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
# code coverage (which will be done below).
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
# enable voidstar.
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
# set the reference fee to 1000.
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
# - Bookworm using GCC 13: Release on linux/amd64, set the reference
# fee to 500.
# - Bookworm using GCC 15: Debug on linux/amd64, enable code
# coverage (which will be done below).
# - Bookworm using Clang 16: Debug on linux/arm64, enable voidstar.
# - Bookworm using Clang 17: Release on linux/amd64, set the
# reference fee to 1000.
# - Bookworm using Clang 20: Debug on linux/amd64.
if os["distro_name"] == "debian":
skip = True
if os["distro_version"] == "bookworm":
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
@@ -74,14 +72,12 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64"
):
cmake_args = f"-Dvoidstar=ON {cmake_args}"
@@ -89,7 +85,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
@@ -97,7 +92,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
@@ -105,15 +99,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue
# RHEL:
# - 9 using GCC 12: Debug and Unity on linux/amd64.
# - 10 using Clang: Release and no Unity on linux/amd64.
# - 9 using GCC 12: Debug on linux/amd64.
# - 10 using Clang: Release on linux/amd64.
if os["distro_name"] == "rhel":
skip = True
if os["distro_version"] == "9":
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
@@ -121,7 +114,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any"
and build_type == "Release"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
@@ -129,17 +121,16 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue
# Ubuntu:
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
# - Noble using GCC 14: Release and Unity on linux/amd64.
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
# - Noble using Clang 19: Release and Unity on linux/arm64.
# - Jammy using GCC 12: Debug on linux/arm64.
# - Noble using GCC 14: Release on linux/amd64.
# - Noble using Clang 18: Debug on linux/amd64.
# - Noble using Clang 19: Release on linux/arm64.
if os["distro_name"] == "ubuntu":
skip = True
if os["distro_version"] == "jammy":
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64"
):
skip = False
@@ -147,21 +138,18 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/arm64"
):
skip = False
@@ -169,20 +157,16 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue
# MacOS:
# - Debug and no Unity on macos/arm64.
# - Debug on macos/arm64.
if os["distro_name"] == "macos" and not (
build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "macos/arm64"
build_type == "Debug" and architecture["platform"] == "macos/arm64"
):
continue
# Windows:
# - Release and Unity on windows/amd64.
# - Release on windows/amd64.
if os["distro_name"] == "windows" and not (
build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "windows/amd64"
build_type == "Release" and architecture["platform"] == "windows/amd64"
):
continue
@@ -209,18 +193,28 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
):
continue
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
# Unity on linux/amd64
# Enable code coverage for Debian Bookworm using GCC 15 in Debug on
# linux/amd64
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
f"{os['distro_name']}-{os['distro_version']}" == "debian-bookworm"
and f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}"
cmake_args = f"{cmake_args} -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0"
# Enable unity build for Ubuntu Jammy using GCC 12 in Debug on
# linux/amd64.
if (
f"{os['distro_name']}-{os['distro_version']}" == "ubuntu-jammy"
and f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug"
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"{cmake_args} -Dunity=ON"
# Generate a unique name for the configuration, e.g. macos-arm64-debug
# or debian-bookworm-gcc-12-amd64-release-unity.
# or debian-bookworm-gcc-12-amd64-release.
config_name = os["distro_name"]
if (n := os["distro_version"]) != "":
config_name += f"-{n}"

View File

@@ -17,13 +17,196 @@
"compiler_version": "12",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "15",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "16",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "17",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "18",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "19",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "20",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "gcc",
"compiler_version": "15",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "clang",
"compiler_version": "20",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "clang",
"compiler_version": "21",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "8",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "8",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "12"
"compiler_version": "12",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "10",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "10",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "jammy",
"compiler_name": "gcc",
"compiler_version": "12",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "16",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "17",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "18",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "19",
"image_sha": "ab4d1f0"
}
],
"build_type": ["Debug", "Release"],
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
"cmake_args": [""]
}

View File

@@ -15,8 +15,5 @@
}
],
"build_type": ["Debug", "Release"],
"cmake_args": [
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
]
"cmake_args": ["-DCMAKE_POLICY_VERSION_MINIMUM=3.5"]
}

View File

@@ -15,5 +15,5 @@
}
],
"build_type": ["Debug", "Release"],
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
"cmake_args": [""]
}

View File

@@ -1,14 +1,11 @@
# This workflow runs all workflows to check, build, package and test the project on
# various Linux flavors, as well as on macOS and Windows, on every push to a
# This workflow runs all workflows to check, build and test the project on
# various Linux flavors, as well as on MacOS and Windows, on every push to a
# user branch. However, it will not run if the pull request is a draft unless it
# has the 'DraftRunCI' label. For commits to PRs that target a release branch,
# it also uploads the libxrpl recipe to the Conan remote.
name: PR
on:
push:
branches:
- legleux/build
merge_group:
types:
- checks_requested
@@ -68,18 +65,17 @@ jobs:
.github/workflows/reusable-build.yml
.github/workflows/reusable-build-test-config.yml
.github/workflows/reusable-build-test.yml
.github/workflows/reusable-build-pkg.yml
.github/workflows/reusable-pkg.yml
.github/workflows/reusable-package.yml
.github/workflows/reusable-clang-tidy.yml
.github/workflows/reusable-clang-tidy-files.yml
.github/workflows/reusable-strategy-matrix.yml
.github/workflows/reusable-test.yml
.github/workflows/reusable-upload-recipe.yml
.clang-tidy
.codecov.yml
cmake/**
conan/**
external/**
include/**
pkgs/**
src/**
tests/**
CMakeLists.txt
@@ -104,57 +100,80 @@ jobs:
outputs:
go: ${{ steps.go.outputs.go == 'true' }}
# check-levelization:
# needs: should-run
# if: ${{ needs.should-run.outputs.go == 'true' }}
# uses: ./.github/workflows/reusable-check-levelization.yml
# build-test:
# needs: should-run
# if: ${{ needs.should-run.outputs.go == 'true' }}
# uses: ./.github/workflows/reusable-build-test.yml
# strategy:
# matrix:
# os: [linux, macos, windows]
# with:
# os: ${{ matrix.os }}
# secrets:
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
build-package:
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
check-levelization:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-build-pkg.yml
secrets: inherit
uses: ./.github/workflows/reusable-check-levelization.yml
check-rename:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-check-rename.yml
clang-tidy:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-clang-tidy.yml
permissions:
issues: write
contents: read
with:
check_only_changed: true
create_issue_on_failure: false
build-test:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-build-test.yml
strategy:
fail-fast: false
matrix:
# pkg_type: [rpm]
pkg_type: [deb, rpm]
arch: [amd64]
# arch: [amd64, arm64]
os: [linux, macos, windows]
with:
pkg_type: ${{ matrix.pkg_type }}
arch: ${{ matrix.arch }}
# Enable ccache only for events targeting the XRPLF repository, since
# other accounts will not have access to our remote cache storage.
ccache_enabled: ${{ github.repository_owner == 'XRPLF' }}
os: ${{ matrix.os }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
# notify-clio:
# needs:
# - should-run
# - build-test
# if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
# uses: ./.github/workflows/reusable-notify-clio.yml
# secrets:
# clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
upload-recipe:
needs:
- should-run
- build-test
# Only run when committing to a PR that targets a release branch in the
# XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' && needs.should-run.outputs.go == 'true' && startsWith(github.ref, 'refs/heads/release') }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
notify-clio:
needs: upload-recipe
runs-on: ubuntu-latest
steps:
# Notify the Clio repository about the newly proposed release version, so
# it can be checked for compatibility before the release is actually made.
- name: Notify Clio
env:
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[ref]=${{ needs.upload-recipe.outputs.recipe_ref }}" \
-F "client_payload[pr_url]=${PR_URL}"
passed:
if: failure() || cancelled()
needs:
# - build-test
# - check-levelization
- build-package
- check-levelization
- check-rename
- clang-tidy
- build-test
- upload-recipe
- notify-clio
runs-on: ubuntu-latest
steps:
- name: Fail

View File

@@ -1,97 +0,0 @@
# This workflow runs all workflows to build and test the code on various Linux
# flavors, as well as on MacOS and Windows, on a scheduled basis, on merge into
# the 'develop' or 'release*' branches, or when requested manually. Upon pushes
# to the develop branch it also uploads the libxrpl recipe to the Conan remote.
name: Trigger
on:
push:
branches:
- legleux/linux_packages
- develop
- release
- master
paths:
# These paths are unique to `on-trigger.yml`.
- ".github/workflows/on-trigger.yml"
# Keep the paths below in sync with those in `on-pr.yml`.
- ".github/actions/build-deps/**"
- ".github/actions/build-test/**"
- ".github/actions/generate-version/**"
- ".github/actions/setup-conan/**"
- ".github/scripts/strategy-matrix/**"
- ".github/workflows/reusable-build.yml"
- ".github/workflows/reusable-build-test-config.yml"
- ".github/workflows/reusable-build-test.yml"
- ".github/workflows/reusable-build-pkg.yml"
- ".github/workflows/reusable-pkg.yml"
- ".github/workflows/reusable-package.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml"
- ".github/workflows/reusable-upload-recipe.yml"
- ".codecov.yml"
- "cmake/**"
- "conan/**"
- "external/**"
- "include/**"
- "pkgs/**"
- "src/**"
- "tests/**"
- "CMakeLists.txt"
- "conanfile.py"
- "conan.lock"
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
# will force all dependencies to be rebuilt, which is useful to verify that
# all dependencies can be built successfully. Only the dependencies that
# are actually missing from the remote will be uploaded.
schedule:
- cron: "32 6 * * 1-5"
# Run when manually triggered via the GitHub UI or API.
workflow_dispatch:
concurrency:
# When a PR is merged into the develop branch it will be assigned a unique
# group identifier, so execution will continue even if another PR is merged
# while it is still running. In all other cases the group identifier is shared
# per branch, so that any in-progress runs are cancelled when a new commit is
# pushed.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# check-missing-commits:
# if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
# uses: ./.github/workflows/reusable-check-missing-commits.yml
# build-test:
# uses: ./.github/workflows/reusable-build-test.yml
# strategy:
# matrix:
# os: [linux, macos, windows]
# with:
# os: ${{ matrix.os }}
# strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
# secrets:
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
build-package:
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
uses: ./.github/workflows/reusable-build-pkg.yml
secrets: inherit
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
# pkg_type: [rpm]
pkg_type: [deb, rpm]
arch: [amd64]
# arch: [amd64, arm64]
with:
pkg_type: ${{ matrix.pkg_type }}
arch: ${{ matrix.arch }}

View File

@@ -9,7 +9,6 @@ on:
branches:
- "develop"
- "release*"
- "linux_packages_squashed"
paths:
# These paths are unique to `on-trigger.yml`.
- ".github/workflows/on-trigger.yml"
@@ -23,18 +22,17 @@ on:
- ".github/workflows/reusable-build.yml"
- ".github/workflows/reusable-build-test-config.yml"
- ".github/workflows/reusable-build-test.yml"
- ".github/workflows/reusable-build-pkg.yml"
- ".github/workflows/reusable-pkg.yml"
- ".github/workflows/reusable-package.yml"
- ".github/workflows/reusable-clang-tidy.yml"
- ".github/workflows/reusable-clang-tidy-files.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml"
- ".github/workflows/reusable-upload-recipe.yml"
- ".clang-tidy"
- ".codecov.yml"
- "cmake/**"
- "conan/**"
- "external/**"
- "include/**"
- "pkgs/**"
- "src/**"
- "tests/**"
- "CMakeLists.txt"
@@ -65,6 +63,15 @@ defaults:
shell: bash
jobs:
clang-tidy:
uses: ./.github/workflows/reusable-clang-tidy.yml
permissions:
issues: write
contents: read
with:
check_only_changed: false
create_issue_on_failure: ${{ github.event_name == 'schedule' }}
build-test:
uses: ./.github/workflows/reusable-build-test.yml
strategy:
@@ -83,21 +90,6 @@ jobs:
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
build-package:
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
uses: ./.github/workflows/reusable-build-pkg.yml
secrets: inherit
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
# pkg_type: [rpm]
pkg_type: [deb, rpm]
arch: [amd64]
# arch: [amd64, arm64]
with:
pkg_type: ${{ matrix.pkg_type }}
arch: ${{ matrix.arch }}
upload-recipe:
needs: build-test
# Only run when pushing to the develop branch in the XRPLF repository.

View File

@@ -1,34 +0,0 @@
name: Test rippled
on:
workflow_call:
inputs:
pkg_type:
description: "Whether to run unit tests"
required: true
type: boolean
arch:
description: Runner to run the job on as a JSON string
required: true
type: string
jobs:
test:
name: Test ${{ inputs.pkg_type }}-${{ inputs.arch }}
strategy:
fail-fast: false
matrix:
include:
- { pkg: rpm, distro: "rocky:9" }
- { pkg: deb, distro: "ubuntu:jammy" }
- { pkg: deb, distro: "debian:trixie" }
runs-on: ubuntu-latest
container: ${{ matrix.distro }}
steps:
- name: run unittests
run: |
ls -lh
# - name: Download rippled artifact
# uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
# with:
# name: rippled-${{ inputs.config_name }}

View File

@@ -1,148 +0,0 @@
on:
workflow_call:
inputs:
pkg_type:
required: false
type: string
arch:
required: false
type: string
# secrets:
# GPG_KEY_B64:
# description: "The gpg key to sign packages."
# required: true
# GPG_KEY_PASS_B64:
# description: "The gpg key passphrase."
# required: true
defaults:
run:
shell: bash
jobs:
build:
name: Build ${{ inputs.pkg_type }} ${{ inputs.arch }} package
runs-on: heavy${{ inputs.arch == 'arm64' && '-arm64' || '' }}
container: ghcr.io/xrplf/ci/${{ inputs.pkg_type == 'rpm' && 'rhel-9' || 'ubuntu-jammy' }}:gcc-12
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Build packages
run: |
./pkgs/build.sh
cat build_vars >> $GITHUB_STEP_SUMMARY
- uses: actions/upload-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
path: |
*.deb
*.ddeb
if-no-files-found: error
if: inputs.pkg_type == 'deb'
- uses: actions/upload-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
path: "*${{ inputs.arch }}.${{ inputs.pkg_type }}"
if-no-files-found: error
if: inputs.pkg_type == 'rpm'
test:
name: Test ${{ inputs.pkg_type }} ${{ inputs.arch }} package
needs: build
runs-on: heavy${{ inputs.arch == 'arm64' && '-arm64' || '' }}
container: ghcr.io/xrplf/ci/${{ inputs.pkg_type == 'rpm' && 'rhel-9' || 'ubuntu-jammy' }}:gcc-12
steps:
- uses: actions/download-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
- name: Running tests
run: echo "Running tests..."
sign:
name: Sign ${{ inputs.pkg_type }} ${{ inputs.arch }} package
needs: build
runs-on: ubuntu-latest
container: ghcr.io/astral-sh/uv:python3.13-bookworm-slim
steps:
- name: Install gpg & rpm
run: apt-get update && apt-get install -y gpg rpm
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: actions/download-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
- name: Sign
env:
PYTHONUNBUFFERED: 1
GPG_KEY_B64: ${{ secrets.GPG_KEY_B64 }}
GPG_KEY_PASS_B64: ${{ secrets.GPG_KEY_PASS_B64 }}
run: |
if [ "${{ inputs.pkg_type }}" = "rpm" ]; then
for i in $(find . -maxdepth 1 -type f -name "rippled-[0-9]*.rpm"); do
echo "found $i"
./pkgs/sign_packages.py "$i"
done
elif [ "${{ inputs.pkg_type }}" = "deb" ]; then
for i in $(find . -maxdepth 1 -type f -name "rippled_*.deb"); do
echo "found $i"
./pkgs/sign_packages.py "$i"
done
fi
- uses: actions/upload-artifact@v4
with:
name: signed-rippled-${{ inputs.pkg_type }}-${{ inputs.arch }}
path: |
*.deb
*.ddeb
if-no-files-found: error
if: inputs.pkg_type == 'deb'
- uses: actions/upload-artifact@v4
with:
name: signed-rippled-${{ inputs.pkg_type }}-${{ inputs.arch }}
path: "*${{ inputs.arch }}.${{ inputs.pkg_type }}"
if-no-files-found: error
if: inputs.pkg_type == 'rpm'
docker:
name: Build Docker image
if: inputs.pkg_type == 'deb'
needs: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: actions/download-artifact@v4
with:
name: deb-${{ inputs.arch }}
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/metadata-action@v5
id: meta
with:
images: ghcr.io/${{ github.repository_owner }}/rippled
tags: |
type=ref,event=branch
type=ref,event=tag
type=sha
- uses: docker/build-push-action@v6
with:
context: .
file: pkgs/docker/Dockerfile
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -31,6 +31,8 @@ jobs:
run: .github/scripts/rename/namespace.sh .
- name: Check config name
run: .github/scripts/rename/config.sh .
- name: Check include guards
run: .github/scripts/rename/include.sh .
- name: Check for differences
env:
MESSAGE: |

View File

@@ -0,0 +1,162 @@
name: Run clang-tidy on files
on:
workflow_call:
inputs:
files:
description: "List of files to check (empty means check all files)"
type: string
default: ""
create_issue_on_failure:
description: "Whether to create an issue if the check failed"
type: boolean
default: false
defaults:
run:
shell: bash
env:
# Conan installs the generators in the build/generators directory, see the
# layout() method in conanfile.py. We then run CMake from the build directory.
BUILD_DIR: build
BUILD_TYPE: Release
jobs:
run-clang-tidy:
name: Run clang tidy
runs-on: ["self-hosted", "Linux", "X64", "heavy"]
container: "ghcr.io/xrplf/ci/debian-trixie:clang-21-sha-53033a2"
permissions:
issues: write
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d
with:
enable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf
id: nproc
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_nproc: ${{ steps.nproc.outputs.nproc }}
build_type: ${{ env.BUILD_TYPE }}
log_verbosity: verbose
- name: Configure CMake
working-directory: ${{ env.BUILD_DIR }}
run: |
cmake \
-G 'Ninja' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
-Dtests=ON \
-Dwerr=ON \
-Dxrpld=ON \
..
# clang-tidy needs headers generated from proto files
- name: Build libxrpl.libpb
working-directory: ${{ env.BUILD_DIR }}
run: |
ninja -j ${{ steps.nproc.outputs.nproc }} xrpl.libpb
- name: Run clang tidy
id: run_clang_tidy
continue-on-error: true
env:
FILES: ${{ inputs.files }}
run: |
run-clang-tidy -j ${{ steps.nproc.outputs.nproc }} -p "$BUILD_DIR" $FILES 2>&1 | tee clang-tidy-output.txt
- name: Upload clang-tidy output
if: steps.run_clang_tidy.outcome != 'success'
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: clang-tidy-results
path: clang-tidy-output.txt
retention-days: 30
- name: Create an issue
if: steps.run_clang_tidy.outcome != 'success' && inputs.create_issue_on_failure
id: create_issue
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
# Prepare issue body with clang-tidy output
cat > issue.md <<EOF
## Clang-tidy Check Failed
**Workflow:** ${{ github.workflow }}
**Run ID:** ${{ github.run_id }}
**Commit:** ${{ github.sha }}
**Branch/Ref:** ${{ github.ref }}
**Triggered by:** ${{ github.actor }}
### Clang-tidy Output:
\`\`\`
EOF
# Append clang-tidy output (filter for errors and warnings)
if [ -f clang-tidy-output.txt ]; then
# Extract lines containing 'error:', 'warning:', or 'note:'
grep -E '(error:|warning:|note:)' clang-tidy-output.txt > filtered-output.txt || true
# If filtered output is empty, use original (might be a different error format)
if [ ! -s filtered-output.txt ]; then
cp clang-tidy-output.txt filtered-output.txt
fi
# Truncate if too large
head -c 60000 filtered-output.txt >> issue.md
if [ "$(wc -c < filtered-output.txt)" -gt 60000 ]; then
echo "" >> issue.md
echo "... (output truncated, see artifacts for full output)" >> issue.md
fi
rm filtered-output.txt
else
echo "No output file found" >> issue.md
fi
cat >> issue.md <<EOF
\`\`\`
**Workflow run:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
---
*This issue was automatically created by the clang-tidy workflow.*
EOF
# Create the issue
gh issue create \
--label "Bug,Clang-tidy" \
--title "Clang-tidy check failed" \
--body-file ./issue.md \
> create_issue.log
created_issue="$(sed 's|.*/||' create_issue.log)"
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
echo "Created issue #$created_issue"
rm -f create_issue.log issue.md clang-tidy-output.txt
- name: Fail the workflow if clang-tidy failed
if: steps.run_clang_tidy.outcome != 'success'
run: |
echo "Clang-tidy check failed!"
exit 1

View File

@@ -0,0 +1,47 @@
name: Clang-tidy check
on:
workflow_call:
inputs:
check_only_changed:
description: "Check only changed files in PR. If false, checks all files in the repository."
type: boolean
default: false
create_issue_on_failure:
description: "Whether to create an issue if the check failed"
type: boolean
default: false
defaults:
run:
shell: bash
jobs:
determine-files:
name: Determine files to check
if: ${{ inputs.check_only_changed }}
runs-on: ubuntu-latest
outputs:
any_changed: ${{ steps.changed_files.outputs.any_changed }}
all_changed_files: ${{ steps.changed_files.outputs.all_changed_files }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Get changed C++ files
id: changed_files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
**/*.cpp
**/*.h
**/*.ipp
separator: " "
run-clang-tidy:
needs: [determine-files]
if: ${{ always() && !cancelled() && (!inputs.check_only_changed || needs.determine-files.outputs.any_changed == 'true') }}
uses: ./.github/workflows/reusable-clang-tidy-files.yml
with:
files: ${{ inputs.check_only_changed && needs.determine-files.outputs.all_changed_files || '' }}
create_issue_on_failure: ${{ inputs.create_issue_on_failure }}

View File

@@ -1,69 +0,0 @@
name: Package rippled
on:
workflow_call:
inputs:
build_type:
description: 'The build type to use ("Debug", "Release").'
required: false
type: string
default: 'Release'
cmake_args:
description: "Additional arguments to pass to CMake."
required: false
type: string
cmake_target:
description: "The CMake target to build."
required: false
type: string
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: false
type: string
default: ''
config_name:
description: "The name of the configuration."
required: false
type: string
defaults:
run:
shell: bash
jobs:
build:
name: Package ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Build packages
run: |
export BUILD_TYPE=${{ inputs.build_type }}
export CMAKE_ARGS=${{ inputs.cmake_args }}
export CMAKE_TARGETS=${{ inputs.cmake_target }}
./pkgs/build.sh
{
echo "<table>"
while IFS='=' read -r k v; do
printf '<tr><td>%s</td><td align="right"><code>%s</code></td></tr>\n' "$k" "$v"
done < build_vars
echo "</table>"
} >> "$GITHUB_STEP_SUMMARY"
- uses: actions/upload-artifact@v4
with:
name: ${{ inputs.config_name }}
path: '**/*.{deb,rpm}'
if-no-files-found: error

View File

@@ -1,41 +0,0 @@
name: Package
on:
workflow_call:
inputs:
build_dir:
description: "The directory where to build."
required: false
type: string
default: ".build"
os:
description: 'The operating system to use for the build ("linux", "macos", "windows").'
required: false
type: string
default: linux
strategy_matrix_subset:
description: 'The strategy matrix to use for generating a subset of configurations.'
required: false
type: string
default: "package"
jobs:
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
os: ${{ inputs.os }}
strategy_matrix_subset: ${{ inputs.strategy_matrix_subset }}
package:
needs:
- generate-matrix
uses: ./.github/workflows/reusable-package.yml
strategy:
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
with:
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_target: ${{ matrix.cmake_target }}
runs_on: ${{ toJSON(matrix.architecture.runner) }}
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-5dd7158', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || '' }}
config_name: ${{ matrix.config_name }}

View File

@@ -13,10 +13,6 @@ on:
required: false
type: string
default: "minimal"
strategy_matrix_subset:
description: 'The strategy matrix to use for generating a subset of configs).'
required: false
type: string
outputs:
matrix:
description: "The generated strategy matrix."
@@ -46,5 +42,4 @@ jobs:
env:
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
GENERATE_SUBSET: ${{ inputs.strategy_matrix_subset != '' && format('--{0}', inputs.strategy_matrix_subset) || '' }}
run: ./generate.py ${{ env.GENERATE_SUBSET }} ${{ env.GENERATE_OPTION }} ${{ env.GENERATE_CONFIG }} >> "${GITHUB_OUTPUT}"
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"

3
.gitignore vendored
View File

@@ -71,3 +71,6 @@ DerivedData
/.augment
/.claude
/CLAUDE.md
# clangd cache
/.cache

View File

@@ -20,7 +20,7 @@ repos:
args: [--assume-in-merge]
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
rev: 75ca4ad908dc4a99f57921f29b7e6c1521e10b26 # frozen: v21.1.8
hooks:
- id: clang-format
args: [--style=file]

View File

@@ -368,6 +368,36 @@ The workaround for this error is to add two lines to your profile:
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
```
### Set Up Ccache
To speed up repeated compilations, we recommend that you install
[ccache](https://ccache.dev), a tool that wraps your compiler so that it can
cache build objects locally.
#### Linux
You can install it using the package manager, e.g. `sudo apt install ccache`
(Ubuntu) or `sudo dnf install ccache` (RHEL).
#### macOS
You can install it using Homebrew, i.e. `brew install ccache`.
#### Windows
You can install it using Chocolatey, i.e. `choco install ccache`. If you already
have Ccache installed, then `choco upgrade ccache` will update it to the latest
version. However, if you see an error such as:
```
terminate called after throwing an instance of 'std::bad_alloc'
what(): std::bad_alloc
C:\Program Files\Microsoft Visual Studio\2022\Community\MSBuild\Microsoft\VC\v170\Microsoft.CppCommon.targets(617,5): error MSB6006: "cl.exe" exited with code 3.
```
then please install a specific version of Ccache that we know works, via: `choco
install ccache --version 4.11.3 --allow-downgrade`.
### Build and Test
1. Create a build directory and move into it.
@@ -550,10 +580,10 @@ See [Sanitizers docs](./docs/build/sanitizers.md) for more details.
| `werr` | OFF | Treat compilation warnings as errors |
| `wextra` | OFF | Enable additional compilation warnings |
[Unity builds][5] may be faster for the first build
(at the cost of much more memory) since they concatenate sources into fewer
translation units. Non-unity builds may be faster for incremental builds,
and can be helpful for detecting `#include` omissions.
[Unity builds][5] may be faster for the first build (at the cost of much more
memory) since they concatenate sources into fewer translation units. Non-unity
builds may be faster for incremental builds, and can be helpful for detecting
`#include` omissions.
## Troubleshooting

View File

@@ -17,6 +17,7 @@ project(xrpl)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
include(CompilationEnv)
@@ -38,16 +39,16 @@ include(Ccache)
# make GIT_COMMIT_HASH define available to all sources
find_package(Git)
if (Git_FOUND)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse
HEAD OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
if (gch)
set(GIT_COMMIT_HASH "${gch}")
message(STATUS gch: ${GIT_COMMIT_HASH})
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
endif ()
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse
--abbrev-ref HEAD OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
if (gb)
set(GIT_BRANCH "${gb}")
message(STATUS gb: ${GIT_BRANCH})
@@ -67,7 +68,8 @@ include(FetchContent)
include(ExternalProject)
include(CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
if (target)
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build")
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build"
)
endif ()
include(XrplSanity)
@@ -76,7 +78,8 @@ include(XrplSettings)
# this check has to remain in the top-level cmake because of the early return statement
if (packages_only)
if (NOT TARGET rpm)
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?"
)
endif ()
return()
endif ()
@@ -118,7 +121,8 @@ target_link_libraries(
option(rocksdb "Enable RocksDB" ON)
if (rocksdb)
find_package(RocksDB REQUIRED)
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1)
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS
XRPL_ROCKSDB_AVAILABLE=1)
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
endif ()

View File

@@ -219,7 +219,7 @@ coherent rather than a set of _thou shalt not_ commandments.
## Formatting
All code must conform to `clang-format` version 18,
All code must conform to `clang-format` version 21,
according to the settings in [`.clang-format`](./.clang-format),
unless the result would be unreasonably difficult to read or maintain.
To demarcate lines that should be left as-is, surround them with comments like

View File

@@ -940,23 +940,7 @@
#
# path Location to store the database
#
# Optional keys
#
# cache_size Size of cache for database records. Default is 16384.
# Setting this value to 0 will use the default value.
#
# cache_age Length of time in minutes to keep database records
# cached. Default is 5 minutes. Setting this value to
# 0 will use the default value.
#
# Note: if neither cache_size nor cache_age is
# specified, the cache for database records will not
# be created. If only one of cache_size or cache_age
# is specified, the cache will be created using the
# default value for the unspecified parameter.
#
# Note: the cache will not be created if online_delete
# is specified.
# Optional keys for NuDB and RocksDB:
#
# fast_load Boolean. If set, load the last persisted ledger
# from disk upon process start before syncing to
@@ -964,8 +948,6 @@
# if sufficient IOPS capacity is available.
# Default 0.
#
# Optional keys for NuDB or RocksDB:
#
# earliest_seq The default is 32570 to match the XRP ledger
# network's earliest allowed sequence. Alternate
# networks may set this value. Minimum value of 1.

View File

@@ -43,7 +43,8 @@ set(CMAKE_VS_GLOBALS "CLToolExe=cl.exe" "CLToolPath=${CMAKE_BINARY_DIR}" "TrackF
# By default Visual Studio generators will use /Zi to capture debug information, which is not compatible with ccache, so
# tell it to use /Z7 instead.
if (MSVC)
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG
CMAKE_CXX_FLAGS_RELEASE)
string(REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
endforeach ()
endif ()

View File

@@ -180,7 +180,8 @@ elseif (DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if (APPLE)
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program(LLVMCOV_PATH llvm-cov)
endif ()
@@ -199,8 +200,8 @@ foreach (LANG ${LANGUAGES})
if ("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
endif ()
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES
"(LLVM)?[Ff]lang")
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}"
MATCHES "(LLVM)?[Ff]lang")
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif ()
endforeach ()
@@ -321,14 +322,16 @@ function (setup_target_for_coverage_gcovr)
endif ()
if ("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting..."
)
else ()
if ((Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested"))
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
elseif (Coverage_FORMAT STREQUAL "html-single")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL "json-details")
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL
"json-details")
OR (Coverage_FORMAT STREQUAL "coveralls"))
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
elseif (Coverage_FORMAT STREQUAL "txt")
@@ -452,8 +455,10 @@ function (setup_target_for_coverage_gcovr)
COMMENT "Running gcovr to produce code coverage report.")
# Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD COMMAND echo
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}")
add_custom_command(
TARGET ${Coverage_NAME} POST_BUILD COMMAND echo
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
)
endfunction () # setup_target_for_coverage_gcovr
function (add_code_coverage_to_target name scope)
@@ -463,14 +468,10 @@ function (add_code_coverage_to_target name scope)
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
# Add compiler options to the target
target_compile_options(${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
target_compile_options(
${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
target_link_libraries(
${name}
${scope}
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS}
gcov>
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}
gcov>)
target_link_libraries(${name} ${scope} $<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS}>
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}>)
endfunction () # add_code_coverage_to_target

View File

@@ -9,8 +9,5 @@ function (xrpl_add_test name)
isolate_headers(${target} "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests/${name}" PRIVATE)
# Make sure the test isn't optimized away in unity builds
set_target_properties(${target} PROPERTIES UNITY_BUILD_MODE GROUP UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
endfunction ()

View File

@@ -17,7 +17,8 @@ link_libraries(Xrpl::common)
if (NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
endif ()
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE
${CMAKE_POSITION_INDEPENDENT_CODE})
set(CMAKE_CXX_EXTENSIONS OFF)
target_compile_definitions(
common
@@ -37,7 +38,8 @@ if (MSVC)
# remove existing exception flag since we set it to -EHa
string(REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG
CMAKE_CXX_FLAGS_RELEASE)
# also remove dynamic runtime
string(REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
@@ -143,20 +145,23 @@ if (voidstar)
elseif (NOT is_linux)
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting..."
)
endif ()
endif ()
if (use_mold)
# use mold linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET
OUTPUT_VARIABLE LD_VERSION)
if ("${LD_VERSION}" MATCHES "mold")
target_link_libraries(common INTERFACE -fuse-ld=mold)
endif ()
unset(LD_VERSION)
elseif (use_gold AND is_gcc)
# use gold linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET
OUTPUT_VARIABLE LD_VERSION)
#[=========================================================[
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
default instead of DT_RPATH, so you might have slightly
@@ -186,7 +191,8 @@ elseif (use_gold AND is_gcc)
unset(LD_VERSION)
elseif (use_lld)
# use lld linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET
OUTPUT_VARIABLE LD_VERSION)
if ("${LD_VERSION}" MATCHES "LLD")
target_link_libraries(common INTERFACE -fuse-ld=lld)
endif ()

View File

@@ -14,7 +14,8 @@ target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/x
PROTOS include/xrpl/proto/xrpl.proto)
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto PROTOS "${protos}")
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto
PROTOS "${protos}")
target_protobuf_sources(
xrpl.libpb xrpl/proto
LANGUAGE grpc
@@ -24,8 +25,9 @@ target_protobuf_sources(
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc)
target_compile_options(
xrpl.libpb PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>: --system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec >
xrpl.libpb
PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>:
--system-header-prefix="google/protobuf" -Wno-deprecated-dynamic-exception-spec >
PRIVATE $<$<BOOL:${is_msvc}>:-wd4065> $<$<NOT:$<BOOL:${is_msvc}>>:-Wno-deprecated-declarations>)
target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf gRPC::grpc++)
@@ -73,7 +75,8 @@ target_link_libraries(xrpl.libxrpl.protocol PUBLIC xrpl.libxrpl.crypto xrpl.libx
# Level 05
add_module(xrpl core)
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
xrpl.libxrpl.protocol)
# Level 06
add_module(xrpl resource)
@@ -81,21 +84,40 @@ target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
# Level 07
add_module(xrpl net)
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol
xrpl.libxrpl.resource)
add_module(xrpl server)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
xrpl.libxrpl.protocol xrpl.libxrpl.resource)
add_module(xrpl nodestore)
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
xrpl.libxrpl.protocol)
add_module(xrpl shamap)
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto xrpl.libxrpl.protocol
xrpl.libxrpl.nodestore)
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto
xrpl.libxrpl.protocol xrpl.libxrpl.nodestore)
add_module(xrpl rdb)
target_link_libraries(xrpl.libxrpl.rdb PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.core)
add_module(xrpl server)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol xrpl.libxrpl.core
xrpl.libxrpl.rdb xrpl.libxrpl.resource)
add_module(xrpl conditions)
target_link_libraries(xrpl.libxrpl.conditions PUBLIC xrpl.libxrpl.server)
add_module(xrpl ledger)
target_link_libraries(xrpl.libxrpl.ledger PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
target_link_libraries(
xrpl.libxrpl.ledger
PUBLIC xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.rdb
xrpl.libxrpl.server
xrpl.libxrpl.shamap
xrpl.libxrpl.conditions)
add_module(xrpl tx)
target_link_libraries(xrpl.libxrpl.tx PUBLIC xrpl.libxrpl.ledger)
add_library(xrpl.libxrpl)
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
@@ -110,16 +132,19 @@ target_link_modules(
PUBLIC
basics
beast
conditions
core
crypto
json
ledger
net
nodestore
protocol
rdb
resource
server
nodestore
shamap
net
ledger)
tx)
# All headers in libxrpl are in modules.
# Uncomment this stanza if you have not yet moved new headers into a module.
@@ -160,12 +185,4 @@ if (xrpld)
# antithesis_instrumentation.h, which is not exported as INTERFACE
target_include_directories(xrpld PRIVATE ${CMAKE_SOURCE_DIR}/external/antithesis-sdk)
endif ()
# any files that don't play well with unity should be added here
if (tests)
set_source_files_properties(
# these two seem to produce conflicts in beast teardown template methods
src/test/rpc/ValidatorRPC_test.cpp src/test/ledger/Invariants_test.cpp PROPERTIES SKIP_UNITY_BUILD_INCLUSION
TRUE)
endif ()
endif ()

View File

@@ -65,8 +65,8 @@ add_custom_command(
OUTPUT "${doxygen_index_file}"
COMMAND "${CMAKE_COMMAND}" -E env "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}"
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" "DOXYGEN_DOT_PATH=${doxygen_dot_path}"
"${DOXYGEN_EXECUTABLE}" "${doxyfile}"
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}"
"DOXYGEN_DOT_PATH=${doxygen_dot_path}" "${DOXYGEN_EXECUTABLE}" "${doxyfile}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
DEPENDS "${dependencies}" "${tagfile}")
add_custom_target(docs DEPENDS "${doxygen_index_file}" SOURCES "${dependencies}")

View File

@@ -20,9 +20,11 @@ install(TARGETS common
xrpl.libxrpl
xrpl.libxrpl.basics
xrpl.libxrpl.beast
xrpl.libxrpl.conditions
xrpl.libxrpl.core
xrpl.libxrpl.crypto
xrpl.libxrpl.json
xrpl.libxrpl.rdb
xrpl.libxrpl.ledger
xrpl.libxrpl.net
xrpl.libxrpl.nodestore
@@ -30,6 +32,7 @@ install(TARGETS common
xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl.shamap
xrpl.libxrpl.tx
antithesis-sdk-cpp
EXPORT XrplExports
LIBRARY DESTINATION lib
@@ -38,11 +41,13 @@ install(TARGETS common
INCLUDES
DESTINATION include)
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
install(EXPORT XrplExports FILE XrplTargets.cmake NAMESPACE Xrpl:: DESTINATION lib/cmake/xrpl)
include(CMakePackageConfigHelpers)
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version} COMPATIBILITY SameMajorVersion)
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version}
COMPATIBILITY SameMajorVersion)
if (is_root_project AND TARGET xrpld)
install(TARGETS xrpld RUNTIME DESTINATION bin)
@@ -69,5 +74,5 @@ if (is_root_project AND TARGET xrpld)
")
endif ()
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake ${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
DESTINATION lib/cmake/xrpl)
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake DESTINATION lib/cmake/xrpl)

View File

@@ -33,10 +33,13 @@ target_compile_definitions(
target_compile_options(
opts
INTERFACE $<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized> $<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
if (jemalloc)
find_package(jemalloc REQUIRED)

View File

@@ -19,7 +19,8 @@ if (NOT is_multiconfig)
endif ()
if (is_clang) # both Clang and AppleClang
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS
16.0)
message(FATAL_ERROR "This project requires clang 16 or later")
endif ()
elseif (is_gcc)
@@ -32,7 +33,8 @@ endif ()
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message(FATAL_ERROR "Builds (in-source) are not allowed in "
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory."
)
endif ()
if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")

View File

@@ -70,7 +70,8 @@ if (is_linux AND NOT SANITIZER)
else ()
set(TRUNCATED_LOGS_DEFAULT OFF)
endif ()
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux." ${TRUNCATED_LOGS_DEFAULT})
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux."
${TRUNCATED_LOGS_DEFAULT})
if (TRUNCATED_THREAD_NAME_LOGS)
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
endif ()
@@ -92,11 +93,13 @@ endif ()
option(jemalloc "Enables jemalloc for heap profiling" OFF)
option(werr "treat warnings as errors" OFF)
option(local_protobuf "Force a local build of protobuf instead of looking for an installed version." OFF)
option(local_protobuf
"Force a local build of protobuf instead of looking for an installed version." OFF)
option(local_grpc "Force a local build of gRPC instead of looking for an installed version." OFF)
# the remaining options are obscure and rarely used
option(beast_no_unit_test_inline "Prevents unit test definitions from being inserted into global table" OFF)
option(beast_no_unit_test_inline
"Prevents unit test definitions from being inserted into global table" OFF)
option(single_io_service_thread "Restricts the number of threads calling io_context::run to one. \
This can be useful when debugging." OFF)
option(boost_show_deprecated "Allow boost to fail on deprecated usage. Only useful if you're trying\

View File

@@ -1,4 +1,6 @@
option(validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
option(validator_keys
"Enables building of validator-keys tool as a separate target (imported via FetchContent)"
OFF)
if (validator_keys)
git_branch(current_branch)
@@ -8,8 +10,9 @@ if (validator_keys)
endif ()
message(STATUS "Tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare(validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}")
FetchContent_Declare(
validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}")
FetchContent_MakeAvailable(validator_keys)
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})

View File

@@ -15,11 +15,12 @@ include(isolate_headers)
function (add_module parent name)
set(target ${PROJECT_NAME}.lib${parent}.${name})
add_library(${target} OBJECT)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp")
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp")
target_sources(${target} PRIVATE ${sources})
target_include_directories(${target} PUBLIC "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>")
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/include"
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" PUBLIC)
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src" "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
PRIVATE)
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src"
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}" PRIVATE)
endfunction ()

View File

@@ -39,6 +39,7 @@ if (SANITIZERS_ENABLED AND is_clang)
endif ()
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
target_compile_options(opts INTERFACE # ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
target_compile_options(
opts INTERFACE # ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
endif ()

View File

@@ -6,11 +6,11 @@
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1765850149.926",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1765850149.46",
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1765850147.878",
"secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765850147.928",
"secp256k1/0.7.1#3a61e95e220062ef32c48d019e9c81f7%1770306721.686",
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1765850186.86",
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1765850148.103",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038",
"openssl/3.5.4#1b986e61b38fdfda3b40bebc1b234393%1768312656.257",
"openssl/3.5.5#05a4ac5b7323f7a329b2db1391d9941f%1769599205.414",
"nudb/2.0.9#0432758a24204da08fee953ec9ea03cb%1769436073.32",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1765850143.914",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1765842973.492",
@@ -23,7 +23,7 @@
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1765850143.772",
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1765850144.336",
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1765850143.837",
"boost/1.90.0#d5e8defe7355494953be18524a7f135b%1765955095.179",
"boost/1.90.0#d5e8defe7355494953be18524a7f135b%1769454080.269",
"abseil/20250127.0#99262a368bd01c0ccca8790dfced9719%1766517936.993"
],
"build_requires": [
@@ -31,7 +31,7 @@
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1765850165.196",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038",
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1765850144.707",
"msys2/cci.latest#1996656c3c98e5765b25b60ff5cf77b4%1764840888.758",
"msys2/cci.latest#eea83308ad7e9023f7318c60d5a9e6cb%1770199879.083",
"m4/1.4.19#70dc8bbb33e981d119d2acc0175cf381%1763158052.846",
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1765850153.937",
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1765850153.479",

View File

@@ -32,8 +32,8 @@ class Xrpl(ConanFile):
"grpc/1.72.0",
"libarchive/3.8.1",
"nudb/2.0.9",
"openssl/3.5.4",
"secp256k1/0.7.0",
"openssl/3.5.5",
"secp256k1/0.7.1",
"soci/4.0.3",
"zlib/1.3.1",
]

View File

@@ -1,14 +1,14 @@
ignorePaths:
- build/**
- src/libxrpl/crypto
- src/test/** # Will be removed in the future
- CMakeUserPresets.json
- Doxyfile
- docs/**/*.puml
- cmake/**
- LICENSE.md
- .clang-tidy
language: en
allowCompoundWords: true
allowCompoundWords: true # TODO (#6334)
ignoreRandomStrings: true
minWordLength: 5
dictionaries:
@@ -16,20 +16,29 @@ dictionaries:
- en_US
- en_GB
ignoreRegExpList:
- /[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds
- /(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards
- /(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
- /\b[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds
- /\bC[A-Z0-9]{15}/g # CTIDs
- /\b(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards
- /\b(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
- /::[a-z:_]+/g # things from other namespaces
- /lib[a-z]+/g # libraries
- /[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates
- /[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years
- /\blib[a-z]+/g # libraries
- /\b[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates
- /\b[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years
- /\[[A-Za-z0-9-]+\]\(https:\/\/github.com\/[A-Za-z0-9-]+\)/g # Github usernames
- /-[DWw][a-zA-Z0-9_-]+=/g # compile flags
- /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags
- ABCDEFGHIJKLMNOPQRSTUVWXYZ
- ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
overrides:
- filename: "**/*_test.cpp" # all test files
ignoreRegExpList:
- /"[^"]*"/g # double-quoted strings
- /'[^']*'/g # single-quoted strings
- /`[^`]*`/g # backtick strings
suggestWords:
- xprl->xrpl
- xprld->xrpld
- unsynched->unsynced
- xprld->xrpld # cspell: disable-line not sure what this problem is....
- unsynched->unsynced # cspell: disable-line not sure what this problem is....
- synched->synced
- synch->sync
words:
@@ -51,6 +60,7 @@ words:
- Britto
- Btrfs
- canonicality
- changespq
- checkme
- choco
- chrono
@@ -106,12 +116,14 @@ words:
- inequation
- insuf
- insuff
- invasively
- iou
- ious
- isrdc
- itype
- jemalloc
- jlog
- jtnofill
- keylet
- keylets
- keyvadb
@@ -138,6 +150,7 @@ words:
- Metafuncton
- misprediction
- mptbalance
- MPTDEX
- mptflags
- mptid
- mptissuance
@@ -147,6 +160,7 @@ words:
- mptokenissuance
- mptokens
- mpts
- mtgox
- multisig
- multisign
- multisigned
@@ -174,6 +188,7 @@ words:
- perminute
- permissioned
- pointee
- populator
- preauth
- preauthorization
- preauthorize
@@ -182,6 +197,7 @@ words:
- protobuf
- protos
- ptrs
- pushd
- pyenv
- qalloc
- queuable

View File

@@ -17,8 +17,8 @@ guideline is to maintain the standards that are used in those libraries.
## Guidelines
If you want to do something contrary to these guidelines, understand
why you're doing it. Think, use common sense, and consider that this
your changes will probably need to be maintained long after you've
why you're doing it. Think, use common sense, and consider that these
changes will probably need to be maintained long after you've
moved on to other projects.
- Use white space and blank lines to guide the eye and keep your intent clear.

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_ARCHIVE_H_INCLUDED
#define XRPL_BASICS_ARCHIVE_H_INCLUDED
#pragma once
#include <boost/filesystem.hpp>
@@ -16,5 +15,3 @@ void
extractTarLz4(boost::filesystem::path const& src, boost::filesystem::path const& dst);
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_BASICCONFIG_H_INCLUDED
#define XRPL_BASICS_BASICCONFIG_H_INCLUDED
#pragma once
#include <xrpl/basics/contract.h>
@@ -85,7 +84,8 @@ public:
if (lines_.empty())
return "";
if (lines_.size() > 1)
Throw<std::runtime_error>("A legacy value must have exactly one line. Section: " + name_);
Throw<std::runtime_error>(
"A legacy value must have exactly one line. Section: " + name_);
return lines_[0];
}
@@ -269,7 +269,8 @@ public:
bool
had_trailing_comments() const
{
return std::any_of(map_.cbegin(), map_.cend(), [](auto s) { return s.second.had_trailing_comments(); });
return std::any_of(
map_.cbegin(), map_.cend(), [](auto s) { return s.second.had_trailing_comments(); });
}
protected:
@@ -369,5 +370,3 @@ get_if_exists<bool>(Section const& section, std::string const& name, bool& v)
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_BLOB_H_INCLUDED
#define XRPL_BASICS_BLOB_H_INCLUDED
#pragma once
#include <vector>
@@ -11,5 +10,3 @@ namespace xrpl {
using Blob = std::vector<unsigned char>;
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_BUFFER_H_INCLUDED
#define XRPL_BASICS_BUFFER_H_INCLUDED
#pragma once
#include <xrpl/basics/Slice.h>
#include <xrpl/beast/utility/instrumentation.h>
@@ -213,5 +212,3 @@ operator!=(Buffer const& lhs, Buffer const& rhs) noexcept
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_BYTEUTILITIES_H_INCLUDED
#define XRPL_BASICS_BYTEUTILITIES_H_INCLUDED
#pragma once
namespace xrpl {
@@ -20,5 +19,3 @@ megabytes(T value) noexcept
static_assert(kilobytes(2) == 2048, "kilobytes(2) == 2048");
static_assert(megabytes(3) == 3145728, "megabytes(3) == 3145728");
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_COMPRESSIONALGORITHMS_H_INCLUDED
#define XRPL_COMPRESSIONALGORITHMS_H_INCLUDED
#pragma once
#include <xrpl/basics/contract.h>
@@ -36,7 +35,10 @@ lz4Compress(void const* in, std::size_t inSize, BufferFactory&& bf)
auto compressed = bf(outCapacity);
auto compressedSize = LZ4_compress_default(
reinterpret_cast<char const*>(in), reinterpret_cast<char*>(compressed), inSize, outCapacity);
reinterpret_cast<char const*>(in),
reinterpret_cast<char*>(compressed),
inSize,
outCapacity);
if (compressedSize == 0)
Throw<std::runtime_error>("lz4 compress: failed");
@@ -67,8 +69,10 @@ lz4Decompress(
Throw<std::runtime_error>("lz4Decompress: integer overflow (output)");
if (LZ4_decompress_safe(
reinterpret_cast<char const*>(in), reinterpret_cast<char*>(decompressed), inSize, decompressedSize) !=
decompressedSize)
reinterpret_cast<char const*>(in),
reinterpret_cast<char*>(decompressed),
inSize,
decompressedSize) != decompressedSize)
Throw<std::runtime_error>("lz4Decompress: failed");
return decompressedSize;
@@ -84,7 +88,11 @@ lz4Decompress(
*/
template <typename InputStream>
std::size_t
lz4Decompress(InputStream& in, std::size_t inSize, std::uint8_t* decompressed, std::size_t decompressedSize)
lz4Decompress(
InputStream& in,
std::size_t inSize,
std::uint8_t* decompressed,
std::size_t decompressedSize)
{
std::vector<std::uint8_t> compressed;
std::uint8_t const* chunk = nullptr;
@@ -133,5 +141,3 @@ lz4Decompress(InputStream& in, std::size_t inSize, std::uint8_t* decompressed, s
} // namespace compression_algorithms
} // namespace xrpl
#endif // XRPL_COMPRESSIONALGORITHMS_H_INCLUDED

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_COUNTEDOBJECT_H_INCLUDED
#define XRPL_BASICS_COUNTEDOBJECT_H_INCLUDED
#pragma once
#include <xrpl/beast/type_name.h>
@@ -134,5 +133,3 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_DECAYINGSAMPLE_H_INCLUDED
#define XRPL_BASICS_DECAYINGSAMPLE_H_INCLUDED
#pragma once
#include <chrono>
#include <cmath>
@@ -56,7 +55,8 @@ private:
if (m_value != value_type())
{
std::size_t elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - m_when).count();
std::size_t elapsed =
std::chrono::duration_cast<std::chrono::seconds>(now - m_when).count();
// A span larger than four times the window decays the
// value to an insignificant amount so just reset it.
@@ -130,5 +130,3 @@ private:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_EXPECTED_H_INCLUDED
#define XRPL_BASICS_EXPECTED_H_INCLUDED
#pragma once
#include <xrpl/basics/contract.h>
@@ -121,7 +120,8 @@ public:
template <typename U>
requires std::convertible_to<U, E> && (!std::is_reference_v<U>)
constexpr Expected(Unexpected<U> e) : Base(boost::outcome_v2::in_place_type_t<E>{}, std::move(e.value()))
constexpr Expected(Unexpected<U> e)
: Base(boost::outcome_v2::in_place_type_t<E>{}, std::move(e.value()))
{
}
@@ -192,7 +192,8 @@ public:
// Specialization of Expected<void, E>. Allows returning either success
// (without a value) or the reason for the failure.
template <class E>
class [[nodiscard]] Expected<void, E> : private boost::outcome_v2::result<void, E, detail::throw_policy>
class [[nodiscard]]
Expected<void, E> : private boost::outcome_v2::result<void, E, detail::throw_policy>
{
using Base = boost::outcome_v2::result<void, E, detail::throw_policy>;
@@ -229,5 +230,3 @@ public:
};
} // namespace xrpl
#endif // XRPL_BASICS_EXPECTED_H_INCLUDED

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_FILEUTILITIES_H_INCLUDED
#define XRPL_BASICS_FILEUTILITIES_H_INCLUDED
#pragma once
#include <boost/filesystem.hpp>
#include <boost/system/error_code.hpp>
@@ -15,8 +14,9 @@ getFileContents(
std::optional<std::size_t> maxSize = std::nullopt);
void
writeFileContents(boost::system::error_code& ec, boost::filesystem::path const& destPath, std::string const& contents);
writeFileContents(
boost::system::error_code& ec,
boost::filesystem::path const& destPath,
std::string const& contents);
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_INTRUSIVEPOINTER_H_INCLUDED
#define XRPL_BASICS_INTRUSIVEPOINTER_H_INCLUDED
#pragma once
#include <concepts>
#include <cstdint>
@@ -45,8 +44,8 @@ struct SharedIntrusiveAdoptNoIncrementTag
//
template <class T>
concept CAdoptTag =
std::is_same_v<T, SharedIntrusiveAdoptIncrementStrongTag> || std::is_same_v<T, SharedIntrusiveAdoptNoIncrementTag>;
concept CAdoptTag = std::is_same_v<T, SharedIntrusiveAdoptIncrementStrongTag> ||
std::is_same_v<T, SharedIntrusiveAdoptNoIncrementTag>;
//------------------------------------------------------------------------------
@@ -444,7 +443,8 @@ make_SharedIntrusive(Args&&... args)
auto p = new TT(std::forward<Args>(args)...);
static_assert(
noexcept(SharedIntrusive<TT>(std::declval<TT*>(), std::declval<SharedIntrusiveAdoptNoIncrementTag>())),
noexcept(SharedIntrusive<TT>(
std::declval<TT*>(), std::declval<SharedIntrusiveAdoptNoIncrementTag>())),
"SharedIntrusive constructor should not throw or this can leak "
"memory");
@@ -485,4 +485,3 @@ dynamic_pointer_cast(TT const& v)
}
} // namespace intr_ptr
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_INTRUSIVEPOINTER_IPP_INCLUDED
#define XRPL_BASICS_INTRUSIVEPOINTER_IPP_INCLUDED
#pragma once
#include <xrpl/basics/IntrusivePointer.h>
#include <xrpl/basics/IntrusiveRefCounts.h>
@@ -209,7 +208,8 @@ SharedIntrusive<T>::operator->() const noexcept
}
template <class T>
SharedIntrusive<T>::operator bool() const noexcept
SharedIntrusive<T>::
operator bool() const noexcept
{
return bool(unsafeGetRawPtr());
}
@@ -504,7 +504,8 @@ SharedWeakUnion<T>::getStrong() const
}
template <class T>
SharedWeakUnion<T>::operator bool() const noexcept
SharedWeakUnion<T>::
operator bool() const noexcept
{
return bool(get());
}
@@ -703,4 +704,3 @@ SharedWeakUnion<T>::unsafeReleaseNoStore()
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_INTRUSIVEREFCOUNTS_H_INCLUDED
#define XRPL_BASICS_INTRUSIVEREFCOUNTS_H_INCLUDED
#pragma once
#include <xrpl/beast/utility/instrumentation.h>
@@ -185,7 +184,8 @@ private:
/** Mask that will zero out everything except the weak count.
*/
static constexpr FieldType weakMask = (((one << WeakCountNumBits) - 1) << StrongCountNumBits) & valueMask;
static constexpr FieldType weakMask =
(((one << WeakCountNumBits) - 1) << StrongCountNumBits) & valueMask;
/** Unpack the count and tag fields from the packed atomic integer form. */
struct RefCountPair
@@ -210,8 +210,10 @@ private:
FieldType
combinedValue() const noexcept;
static constexpr CountType maxStrongValue = static_cast<CountType>((one << StrongCountNumBits) - 1);
static constexpr CountType maxWeakValue = static_cast<CountType>((one << WeakCountNumBits) - 1);
static constexpr CountType maxStrongValue =
static_cast<CountType>((one << StrongCountNumBits) - 1);
static constexpr CountType maxWeakValue =
static_cast<CountType>((one << WeakCountNumBits) - 1);
/** Put an extra margin to detect when running up against limits.
This is only used in debug code, and is useful if we reduce the
number of bits in the strong and weak counts (to 16 and 14 bits).
@@ -396,7 +398,8 @@ inline IntrusiveRefCounts::~IntrusiveRefCounts() noexcept
{
#ifndef NDEBUG
auto v = refCounts.load(std::memory_order_acquire);
XRPL_ASSERT((!(v & valueMask)), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : count must be zero");
XRPL_ASSERT(
(!(v & valueMask)), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : count must be zero");
auto t = v & tagMask;
XRPL_ASSERT((!t || t == tagMask), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : valid tag");
#endif
@@ -434,8 +437,10 @@ IntrusiveRefCounts::RefCountPair::combinedValue() const noexcept
(strong < checkStrongMaxValue && weak < checkWeakMaxValue),
"xrpl::IntrusiveRefCounts::RefCountPair::combinedValue : inputs "
"inside range");
return (static_cast<IntrusiveRefCounts::FieldType>(weak) << IntrusiveRefCounts::StrongCountNumBits) |
static_cast<IntrusiveRefCounts::FieldType>(strong) | partialDestroyStartedBit | partialDestroyFinishedBit;
return (static_cast<IntrusiveRefCounts::FieldType>(weak)
<< IntrusiveRefCounts::StrongCountNumBits) |
static_cast<IntrusiveRefCounts::FieldType>(strong) | partialDestroyStartedBit |
partialDestroyFinishedBit;
}
template <class T>
@@ -443,7 +448,8 @@ inline void
partialDestructorFinished(T** o)
{
T& self = **o;
IntrusiveRefCounts::RefCountPair p = self.refCounts.fetch_or(IntrusiveRefCounts::partialDestroyFinishedMask);
IntrusiveRefCounts::RefCountPair p =
self.refCounts.fetch_or(IntrusiveRefCounts::partialDestroyFinishedMask);
XRPL_ASSERT(
(!p.partialDestroyFinishedBit && p.partialDestroyStartedBit && !p.strong),
"xrpl::partialDestructorFinished : not a weak ref");
@@ -461,4 +467,3 @@ partialDestructorFinished(T** o)
//------------------------------------------------------------------------------
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_KEYCACHE_H
#define XRPL_BASICS_KEYCACHE_H
#pragma once
#include <xrpl/basics/TaggedCache.h>
#include <xrpl/basics/base_uint.h>
@@ -9,5 +8,3 @@ namespace xrpl {
using KeyCache = TaggedCache<uint256, int, true>;
} // namespace xrpl
#endif // XRPL_BASICS_KEYCACHE_H

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_LOCALVALUE_H_INCLUDED
#define XRPL_BASICS_LOCALVALUE_H_INCLUDED
#pragma once
#include <boost/thread/tss.hpp>
@@ -104,8 +103,7 @@ LocalValue<T>::operator*()
}
return *reinterpret_cast<T*>(
lvs->values.emplace(this, std::make_unique<detail::LocalValues::Value<T>>(t_)).first->second->get());
lvs->values.emplace(this, std::make_unique<detail::LocalValues::Value<T>>(t_))
.first->second->get());
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_LOG_H_INCLUDED
#define XRPL_BASICS_LOG_H_INCLUDED
#pragma once
#include <xrpl/basics/UnorderedContainers.h>
#include <xrpl/beast/utility/Journal.h>
@@ -171,7 +170,11 @@ public:
partition_severities() const;
void
write(beast::severities::Severity level, std::string const& partition, std::string const& text, bool console);
write(
beast::severities::Severity level,
std::string const& partition,
std::string const& text,
bool console);
std::string
rotate();
@@ -258,5 +261,3 @@ beast::Journal
debugLog();
} // namespace xrpl
#endif

View File

@@ -0,0 +1,73 @@
#pragma once
#include <xrpl/beast/utility/Journal.h>
#include <chrono>
#include <optional>
#include <string>
namespace xrpl {
// cSpell:ignore ptmalloc
// -----------------------------------------------------------------------------
// Allocator interaction note:
// - This facility invokes glibc's malloc_trim(0) on Linux/glibc to request that
// ptmalloc return free heap pages to the OS.
// - If an alternative allocator (e.g. jemalloc or tcmalloc) is linked or
// preloaded (LD_PRELOAD), calling glibc's malloc_trim typically has no effect
// on the *active* heap. The call is harmless but may not reclaim memory
// because those allocators manage their own arenas.
// - Only glibc sbrk/arena space is eligible for trimming; large mmap-backed
// allocations are usually returned to the OS on free regardless of trimming.
// - Call at known reclamation points (e.g., after cache sweeps / online delete)
// and consider rate limiting to avoid churn.
// -----------------------------------------------------------------------------
struct MallocTrimReport
{
bool supported{false};
int trimResult{-1};
long rssBeforeKB{-1};
long rssAfterKB{-1};
std::chrono::microseconds durationUs{-1};
long minfltDelta{-1};
long majfltDelta{-1};
[[nodiscard]] long
deltaKB() const noexcept
{
if (rssBeforeKB < 0 || rssAfterKB < 0)
return 0;
return rssAfterKB - rssBeforeKB;
}
};
/**
* @brief Attempt to return freed memory to the operating system.
*
* On Linux with glibc malloc, this issues ::malloc_trim(0), which may release
* free space from ptmalloc arenas back to the kernel. On other platforms, or if
* a different allocator is in use, this function is a no-op and the report will
* indicate that trimming is unsupported or had no effect.
*
* @param tag Optional identifier for logging/debugging purposes.
* @param journal Journal for diagnostic logging.
* @return Report containing before/after metrics and the trim result.
*
* @note If an alternative allocator (jemalloc/tcmalloc) is linked or preloaded,
* calling glibc's malloc_trim may have no effect on the active heap. The
* call is harmless but typically does not reclaim memory under those
* allocators.
*
* @note Only memory served from glibc's sbrk/arena heaps is eligible for trim.
* Large allocations satisfied via mmap are usually returned on free
* independently of trimming.
*
* @note Intended for use after operations that free significant memory (e.g.,
* cache sweeps, ledger cleanup, online delete). Consider rate limiting.
*/
MallocTrimReport
mallocTrim(std::optional<std::string> const& tag, beast::Journal journal);
} // namespace xrpl

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_MATHUTILITIES_H_INCLUDED
#define XRPL_BASICS_MATHUTILITIES_H_INCLUDED
#pragma once
#include <algorithm>
#include <cassert>
@@ -45,5 +44,3 @@ static_assert(calculatePercent(50'000'001, 100'000'000) == 51);
static_assert(calculatePercent(99'999'999, 100'000'000) == 100);
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_NUMBER_H_INCLUDED
#define XRPL_BASICS_NUMBER_H_INCLUDED
#pragma once
#include <xrpl/beast/utility/instrumentation.h>
@@ -241,7 +240,11 @@ public:
Number(rep mantissa);
explicit Number(rep mantissa, int exponent);
explicit constexpr Number(bool negative, internalrep mantissa, int exponent, unchecked) noexcept;
explicit constexpr Number(
bool negative,
internalrep mantissa,
int exponent,
unchecked) noexcept;
// Assume unsigned values are... unsigned. i.e. positive
explicit constexpr Number(internalrep mantissa, int exponent, unchecked) noexcept;
// Only unit tests are expected to use this ctor
@@ -295,7 +298,8 @@ public:
friend constexpr bool
operator==(Number const& x, Number const& y) noexcept
{
return x.negative_ == y.negative_ && x.mantissa_ == y.mantissa_ && x.exponent_ == y.exponent_;
return x.negative_ == y.negative_ && x.mantissa_ == y.mantissa_ &&
x.exponent_ == y.exponent_;
}
friend constexpr bool
@@ -503,7 +507,11 @@ private:
class Guard;
};
inline constexpr Number::Number(bool negative, internalrep mantissa, int exponent, unchecked) noexcept
inline constexpr Number::Number(
bool negative,
internalrep mantissa,
int exponent,
unchecked) noexcept
: negative_(negative), mantissa_{mantissa}, exponent_{exponent}
{
}
@@ -521,7 +529,8 @@ inline Number::Number(bool negative, internalrep mantissa, int exponent, normali
normalize();
}
inline Number::Number(internalrep mantissa, int exponent, normalized) : Number(false, mantissa, exponent, normalized{})
inline Number::Number(internalrep mantissa, int exponent, normalized)
: Number(false, mantissa, exponent, normalized{})
{
}
@@ -683,8 +692,8 @@ Number::isnormal() const noexcept
MantissaRange const& range = range_;
auto const abs_m = mantissa_;
return *this == Number{} ||
(range.min <= abs_m && abs_m <= range.max && (abs_m <= maxRep || abs_m % 10 == 0) && minExponent <= exponent_ &&
exponent_ <= maxExponent);
(range.min <= abs_m && abs_m <= range.max && (abs_m <= maxRep || abs_m % 10 == 0) &&
minExponent <= exponent_ && exponent_ <= maxExponent);
}
template <Integral64 T>
@@ -696,7 +705,10 @@ Number::normalizeToRange(T minMantissa, T maxMantissa) const
int exponent = exponent_;
if constexpr (std::is_unsigned_v<T>)
XRPL_ASSERT_PARTS(!negative, "xrpl::Number::normalizeToRange", "Number is non-negative for unsigned range.");
XRPL_ASSERT_PARTS(
!negative,
"xrpl::Number::normalizeToRange",
"Number is non-negative for unsigned range.");
Number::normalize(negative, mantissa, exponent, minMantissa, maxMantissa);
auto const sign = negative ? -1 : 1;
@@ -782,7 +794,8 @@ class NumberRoundModeGuard
saveNumberRoundMode saved_;
public:
explicit NumberRoundModeGuard(Number::rounding_mode mode) noexcept : saved_{Number::setround(mode)}
explicit NumberRoundModeGuard(Number::rounding_mode mode) noexcept
: saved_{Number::setround(mode)}
{
}
@@ -802,7 +815,8 @@ class NumberMantissaScaleGuard
MantissaRange::mantissa_scale const saved_;
public:
explicit NumberMantissaScaleGuard(MantissaRange::mantissa_scale scale) noexcept : saved_{Number::getMantissaScale()}
explicit NumberMantissaScaleGuard(MantissaRange::mantissa_scale scale) noexcept
: saved_{Number::getMantissaScale()}
{
Number::setMantissaScale(scale);
}
@@ -819,5 +833,3 @@ public:
};
} // namespace xrpl
#endif // XRPL_BASICS_NUMBER_H_INCLUDED

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_RANGESET_H_INCLUDED
#define XRPL_BASICS_RANGESET_H_INCLUDED
#pragma once
#include <xrpl/beast/core/LexicalCast.h>
@@ -173,5 +172,3 @@ prevMissing(RangeSet<T> const& rs, T t, T minVal = 0)
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_RESOLVER_H_INCLUDED
#define XRPL_BASICS_RESOLVER_H_INCLUDED
#pragma once
#include <xrpl/beast/net/IPEndpoint.h>
@@ -45,5 +44,3 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_RESOLVERASIO_H_INCLUDED
#define XRPL_BASICS_RESOLVERASIO_H_INCLUDED
#pragma once
#include <xrpl/basics/Resolver.h>
#include <xrpl/beast/utility/Journal.h>
@@ -18,5 +17,3 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_SHAMAP_HASH_H_INCLUDED
#define XRPL_BASICS_SHAMAP_HASH_H_INCLUDED
#pragma once
#include <xrpl/basics/base_uint.h>
#include <xrpl/basics/partitioned_unordered_map.h>
@@ -98,5 +97,3 @@ extract(SHAMapHash const& key)
}
} // namespace xrpl
#endif // XRPL_BASICS_SHAMAP_HASH_H_INCLUDED

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_SHAREDWEAKCACHEPOINTER_H_INCLUDED
#define XRPL_BASICS_SHAREDWEAKCACHEPOINTER_H_INCLUDED
#pragma once
#include <memory>
#include <variant>
@@ -113,4 +112,3 @@ private:
std::variant<std::shared_ptr<T>, std::weak_ptr<T>> combo_;
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_SHAREDWEAKCACHEPOINTER_IPP_INCLUDED
#define XRPL_BASICS_SHAREDWEAKCACHEPOINTER_IPP_INCLUDED
#pragma once
#include <xrpl/basics/SharedWeakCachePointer.h>
@@ -20,7 +19,8 @@ SharedWeakCachePointer<T>::SharedWeakCachePointer(SharedWeakCachePointer&& rhs)
template <class T>
template <class TT>
requires std::convertible_to<TT*, T*>
SharedWeakCachePointer<T>::SharedWeakCachePointer(std::shared_ptr<TT>&& rhs) : combo_{std::move(rhs)}
SharedWeakCachePointer<T>::SharedWeakCachePointer(std::shared_ptr<TT>&& rhs)
: combo_{std::move(rhs)}
{
}
@@ -64,7 +64,8 @@ SharedWeakCachePointer<T>::getStrong() const
}
template <class T>
SharedWeakCachePointer<T>::operator bool() const noexcept
SharedWeakCachePointer<T>::
operator bool() const noexcept
{
return !!std::get_if<std::shared_ptr<T>>(&combo_);
}
@@ -164,4 +165,3 @@ SharedWeakCachePointer<T>::convertToWeak()
return false;
}
} // namespace xrpl
#endif

View File

@@ -1,7 +1,6 @@
// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>
#ifndef XRPL_BASICS_SLABALLOCATOR_H_INCLUDED
#define XRPL_BASICS_SLABALLOCATOR_H_INCLUDED
#pragma once
#include <xrpl/basics/ByteUtilities.h>
#include <xrpl/beast/type_name.h>
@@ -156,13 +155,17 @@ public:
contexts (e.g. when minimal memory usage is needed) and
allows for graceful failure.
*/
constexpr explicit SlabAllocator(std::size_t extra, std::size_t alloc = 0, std::size_t align = 0)
constexpr explicit SlabAllocator(
std::size_t extra,
std::size_t alloc = 0,
std::size_t align = 0)
: itemAlignment_(align ? align : alignof(Type))
, itemSize_(boost::alignment::align_up(sizeof(Type) + extra, itemAlignment_))
, slabSize_(alloc)
{
XRPL_ASSERT(
(itemAlignment_ & (itemAlignment_ - 1)) == 0, "xrpl::SlabAllocator::SlabAllocator : valid alignment");
(itemAlignment_ & (itemAlignment_ - 1)) == 0,
"xrpl::SlabAllocator::SlabAllocator : valid alignment");
}
SlabAllocator(SlabAllocator const& other) = delete;
@@ -216,7 +219,7 @@ public:
// clang-format off
if (!buf) [[unlikely]]
return nullptr;
// clang-format on
// clang-format on
#if BOOST_OS_LINUX
// When allocating large blocks, attempt to leverage Linux's
@@ -229,7 +232,8 @@ public:
// We need to carve out a bit of memory for the slab header
// and then align the rest appropriately:
auto slabData = reinterpret_cast<void*>(reinterpret_cast<std::uint8_t*>(buf) + sizeof(SlabBlock));
auto slabData =
reinterpret_cast<void*>(reinterpret_cast<std::uint8_t*>(buf) + sizeof(SlabBlock));
auto slabSize = size - sizeof(SlabBlock);
// This operation is essentially guaranteed not to fail but
@@ -240,10 +244,12 @@ public:
return nullptr;
}
slab = new (buf) SlabBlock(slabs_.load(), reinterpret_cast<std::uint8_t*>(slabData), slabSize, itemSize_);
slab = new (buf) SlabBlock(
slabs_.load(), reinterpret_cast<std::uint8_t*>(slabData), slabSize, itemSize_);
// Link the new slab
while (!slabs_.compare_exchange_weak(slab->next_, slab, std::memory_order_release, std::memory_order_relaxed))
while (!slabs_.compare_exchange_weak(
slab->next_, slab, std::memory_order_release, std::memory_order_relaxed))
{
; // Nothing to do
}
@@ -300,7 +306,10 @@ public:
std::size_t align;
public:
constexpr SlabConfig(std::size_t extra_, std::size_t alloc_ = 0, std::size_t align_ = alignof(Type))
constexpr SlabConfig(
std::size_t extra_,
std::size_t alloc_ = 0,
std::size_t align_ = alignof(Type))
: extra(extra_), alloc(alloc_), align(align_)
{
}
@@ -310,15 +319,18 @@ public:
{
// Ensure that the specified allocators are sorted from smallest to
// largest by size:
std::sort(
std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) { return a.extra < b.extra; });
std::sort(std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) {
return a.extra < b.extra;
});
// We should never have two slabs of the same size
if (std::adjacent_find(std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) {
return a.extra == b.extra;
}) != cfg.end())
if (std::adjacent_find(
std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) {
return a.extra == b.extra;
}) != cfg.end())
{
throw std::runtime_error("SlabAllocatorSet<" + beast::type_name<Type>() + ">: duplicate slab size");
throw std::runtime_error(
"SlabAllocatorSet<" + beast::type_name<Type>() + ">: duplicate slab size");
}
for (auto const& c : cfg)
@@ -386,5 +398,3 @@ public:
};
} // namespace xrpl
#endif // XRPL_BASICS_SLABALLOCATOR_H_INCLUDED

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_SLICE_H_INCLUDED
#define XRPL_BASICS_SLICE_H_INCLUDED
#pragma once
#include <xrpl/basics/contract.h>
#include <xrpl/basics/strHex.h>
@@ -41,7 +40,8 @@ public:
operator=(Slice const&) noexcept = default;
/** Create a slice pointing to existing memory. */
Slice(void const* data, std::size_t size) noexcept : data_(reinterpret_cast<std::uint8_t const*>(data)), size_(size)
Slice(void const* data, std::size_t size) noexcept
: data_(reinterpret_cast<std::uint8_t const*>(data)), size_(size)
{
}
@@ -198,7 +198,8 @@ operator!=(Slice const& lhs, Slice const& rhs) noexcept
inline bool
operator<(Slice const& lhs, Slice const& rhs) noexcept
{
return std::lexicographical_compare(lhs.data(), lhs.data() + lhs.size(), rhs.data(), rhs.data() + rhs.size());
return std::lexicographical_compare(
lhs.data(), lhs.data() + lhs.size(), rhs.data(), rhs.data() + rhs.size());
}
template <class Stream>
@@ -231,5 +232,3 @@ makeSlice(std::basic_string<char, Traits, Alloc> const& s)
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_STRINGUTILITIES_H_INCLUDED
#define XRPL_BASICS_STRINGUTILITIES_H_INCLUDED
#pragma once
#include <xrpl/basics/Blob.h>
#include <xrpl/basics/strHex.h>
@@ -109,7 +108,8 @@ struct parsedURL
bool
operator==(parsedURL const& other) const
{
return scheme == other.scheme && domain == other.domain && port == other.port && path == other.path;
return scheme == other.scheme && domain == other.domain && port == other.port &&
path == other.path;
}
};
@@ -132,5 +132,3 @@ bool
isProperlyFormedTomlDomain(std::string_view domain);
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_TAGGEDCACHE_H_INCLUDED
#define XRPL_BASICS_TAGGEDCACHE_H_INCLUDED
#pragma once
#include <xrpl/basics/IntrusivePointer.h>
#include <xrpl/basics/Log.h>
@@ -176,7 +175,10 @@ private:
struct Stats
{
template <class Handler>
Stats(std::string const& prefix, Handler const& handler, beast::insight::Collector::ptr const& collector)
Stats(
std::string const& prefix,
Handler const& handler,
beast::insight::Collector::ptr const& collector)
: hook(collector->make_hook(handler))
, size(collector->make_gauge(prefix, "size"))
, hit_rate(collector->make_gauge(prefix, "hit_rate"))
@@ -198,7 +200,8 @@ private:
public:
clock_type::time_point last_access;
explicit KeyOnlyEntry(clock_type::time_point const& last_access_) : last_access(last_access_)
explicit KeyOnlyEntry(clock_type::time_point const& last_access_)
: last_access(last_access_)
{
}
@@ -298,5 +301,3 @@ private:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_TAGGEDCACHE_IPP_INCLUDED
#define XRPL_BASICS_TAGGEDCACHE_IPP_INCLUDED
#pragma once
#include <xrpl/basics/IntrusivePointer.ipp>
#include <xrpl/basics/TaggedCache.h>
@@ -15,13 +14,22 @@ template <
class Hash,
class KeyEqual,
class Mutex>
inline TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::TaggedCache(
std::string const& name,
int size,
clock_type::duration expiration,
clock_type& clock,
beast::Journal journal,
beast::insight::Collector::ptr const& collector)
inline TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::
TaggedCache(
std::string const& name,
int size,
clock_type::duration expiration,
clock_type& clock,
beast::Journal journal,
beast::insight::Collector::ptr const& collector)
: m_journal(journal)
, m_clock(clock)
, m_stats(name, std::bind(&TaggedCache::collect_metrics, this), collector)
@@ -44,8 +52,8 @@ template <
class KeyEqual,
class Mutex>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::clock()
-> clock_type&
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
clock() -> clock_type&
{
return m_clock;
}
@@ -60,7 +68,8 @@ template <
class KeyEqual,
class Mutex>
inline std::size_t
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::size() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
size() const
{
std::lock_guard lock(m_mutex);
return m_cache.size();
@@ -76,7 +85,8 @@ template <
class KeyEqual,
class Mutex>
inline int
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getCacheSize() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getCacheSize() const
{
std::lock_guard lock(m_mutex);
return m_cache_count;
@@ -92,7 +102,8 @@ template <
class KeyEqual,
class Mutex>
inline int
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getTrackSize() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getTrackSize() const
{
std::lock_guard lock(m_mutex);
return m_cache.size();
@@ -108,7 +119,8 @@ template <
class KeyEqual,
class Mutex>
inline float
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getHitRate()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getHitRate()
{
std::lock_guard lock(m_mutex);
auto const total = static_cast<float>(m_hits + m_misses);
@@ -125,7 +137,8 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::clear()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
clear()
{
std::lock_guard lock(m_mutex);
m_cache.clear();
@@ -142,7 +155,8 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::reset()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
reset()
{
std::lock_guard lock(m_mutex);
m_cache.clear();
@@ -162,8 +176,8 @@ template <
class Mutex>
template <class KeyComparable>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::touch_if_exists(
KeyComparable const& key)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
touch_if_exists(KeyComparable const& key)
{
std::lock_guard lock(m_mutex);
auto const iter(m_cache.find(key));
@@ -187,7 +201,8 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweep()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
sweep()
{
// Keep references to all the stuff we sweep
// For performance, each worker thread should exit before the swept data
@@ -213,8 +228,9 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (when_expire > (now - minimumAge))
when_expire = now - minimumAge;
JLOG(m_journal.trace()) << m_name << " is growing fast " << m_cache.size() << " of " << m_target_size
<< " aging at " << (now - when_expire).count() << " of " << m_target_age.count();
JLOG(m_journal.trace())
<< m_name << " is growing fast " << m_cache.size() << " of " << m_target_size
<< " aging at " << (now - when_expire).count() << " of " << m_target_age.count();
}
std::vector<std::thread> workers;
@@ -223,7 +239,8 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
for (std::size_t p = 0; p < m_cache.partitions(); ++p)
{
workers.push_back(sweepHelper(when_expire, now, m_cache.map()[p], allStuffToSweep[p], allRemovals, lock));
workers.push_back(sweepHelper(
when_expire, now, m_cache.map()[p], allStuffToSweep[p], allRemovals, lock));
}
for (std::thread& worker : workers)
worker.join();
@@ -232,10 +249,11 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
}
// At this point allStuffToSweep will go out of scope outside the lock
// and decrement the reference count on each strong pointer.
JLOG(m_journal.debug())
<< m_name << " TaggedCache sweep lock duration "
<< std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count()
<< "ms";
JLOG(m_journal.debug()) << m_name << " TaggedCache sweep lock duration "
<< std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::steady_clock::now() - start)
.count()
<< "ms";
}
template <
@@ -248,9 +266,8 @@ template <
class KeyEqual,
class Mutex>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::del(
key_type const& key,
bool valid)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
del(key_type const& key, bool valid)
{
// Remove from cache, if !valid, remove from map too. Returns true if
// removed from cache
@@ -289,10 +306,8 @@ template <
class Mutex>
template <class R>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::canonicalize(
key_type const& key,
SharedPointerType& data,
R&& replaceCallback)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
canonicalize(key_type const& key, SharedPointerType& data, R&& replaceCallback)
{
// Return canonical value, store if needed, refresh in cache
// Return values: true=we had the data already
@@ -303,7 +318,9 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (cit == m_cache.end())
{
m_cache.emplace(
std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(m_clock.now(), data));
std::piecewise_construct,
std::forward_as_tuple(key),
std::forward_as_tuple(m_clock.now(), data));
++m_cache_count;
return false;
}
@@ -405,8 +422,8 @@ template <
class KeyEqual,
class Mutex>
inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::fetch(
key_type const& key)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
fetch(key_type const& key)
{
std::lock_guard<mutex_type> l(m_mutex);
auto ret = initialFetch(key, l);
@@ -426,9 +443,8 @@ template <
class Mutex>
template <class ReturnType>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::insert(
key_type const& key,
T const& value) -> std::enable_if_t<!IsKeyCache, ReturnType>
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
insert(key_type const& key, T const& value) -> std::enable_if_t<!IsKeyCache, ReturnType>
{
static_assert(
std::is_same_v<std::shared_ptr<T>, SharedPointerType> ||
@@ -457,13 +473,13 @@ template <
class Mutex>
template <class ReturnType>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::insert(
key_type const& key) -> std::enable_if_t<IsKeyCache, ReturnType>
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
insert(key_type const& key) -> std::enable_if_t<IsKeyCache, ReturnType>
{
std::lock_guard lock(m_mutex);
clock_type::time_point const now(m_clock.now());
auto [it, inserted] =
m_cache.emplace(std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(now));
auto [it, inserted] = m_cache.emplace(
std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(now));
if (!inserted)
it->second.last_access = now;
return inserted;
@@ -479,9 +495,8 @@ template <
class KeyEqual,
class Mutex>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::retrieve(
key_type const& key,
T& data)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
retrieve(key_type const& key, T& data)
{
// retrieve the value of the stored data
auto entry = fetch(key);
@@ -503,8 +518,8 @@ template <
class KeyEqual,
class Mutex>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::peekMutex()
-> mutex_type&
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
peekMutex() -> mutex_type&
{
return m_mutex;
}
@@ -519,8 +534,8 @@ template <
class KeyEqual,
class Mutex>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getKeys() const
-> std::vector<key_type>
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getKeys() const -> std::vector<key_type>
{
std::vector<key_type> v;
@@ -544,7 +559,8 @@ template <
class KeyEqual,
class Mutex>
inline double
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::rate() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
rate() const
{
std::lock_guard lock(m_mutex);
auto const tot = m_hits + m_misses;
@@ -564,9 +580,8 @@ template <
class Mutex>
template <class Handler>
inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::fetch(
key_type const& digest,
Handler const& h)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
fetch(key_type const& digest, Handler const& h)
{
{
std::lock_guard l(m_mutex);
@@ -597,9 +612,8 @@ template <
class KeyEqual,
class Mutex>
inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::initialFetch(
key_type const& key,
std::lock_guard<mutex_type> const& l)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
initialFetch(key_type const& key, std::lock_guard<mutex_type> const& l)
{
auto cit = m_cache.find(key);
if (cit == m_cache.end())
@@ -635,7 +649,8 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::collect_metrics()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
collect_metrics()
{
m_stats.size.set(getCacheSize());
@@ -661,13 +676,14 @@ template <
class KeyEqual,
class Mutex>
inline std::thread
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweepHelper(
clock_type::time_point const& when_expire,
[[maybe_unused]] clock_type::time_point const& now,
typename KeyValueCacheType::map_type& partition,
SweptPointersVector& stuffToSweep,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
sweepHelper(
clock_type::time_point const& when_expire,
[[maybe_unused]] clock_type::time_point const& now,
typename KeyValueCacheType::map_type& partition,
SweptPointersVector& stuffToSweep,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
{
return std::thread([&, this]() {
int cacheRemovals = 0;
@@ -721,8 +737,9 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (mapRemovals || cacheRemovals)
{
JLOG(m_journal.debug()) << "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
JLOG(m_journal.debug())
<< "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
}
allRemovals += cacheRemovals;
@@ -739,13 +756,14 @@ template <
class KeyEqual,
class Mutex>
inline std::thread
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweepHelper(
clock_type::time_point const& when_expire,
clock_type::time_point const& now,
typename KeyOnlyCacheType::map_type& partition,
SweptPointersVector&,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
sweepHelper(
clock_type::time_point const& when_expire,
clock_type::time_point const& now,
typename KeyOnlyCacheType::map_type& partition,
SweptPointersVector&,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
{
return std::thread([&, this]() {
int cacheRemovals = 0;
@@ -775,8 +793,9 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (mapRemovals || cacheRemovals)
{
JLOG(m_journal.debug()) << "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
JLOG(m_journal.debug())
<< "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
}
allRemovals += cacheRemovals;
@@ -784,5 +803,3 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_TOSTRING_H_INCLUDED
#define XRPL_BASICS_TOSTRING_H_INCLUDED
#pragma once
#include <string>
#include <type_traits>
@@ -44,5 +43,3 @@ to_string(char const* s)
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_UNORDEREDCONTAINERS_H_INCLUDED
#define XRPL_BASICS_UNORDEREDCONTAINERS_H_INCLUDED
#pragma once
#include <xrpl/basics/hardened_hash.h>
#include <xrpl/basics/partitioned_unordered_map.h>
@@ -99,5 +98,3 @@ template <
using hardened_hash_multiset = std::unordered_multiset<Value, Hash, Pred, Allocator>;
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_UPTIMETIMER_H_INCLUDED
#define XRPL_BASICS_UPTIMETIMER_H_INCLUDED
#pragma once
#include <atomic>
#include <chrono>
@@ -46,5 +45,3 @@ private:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_ALGORITHM_H_INCLUDED
#define XRPL_ALGORITHM_H_INCLUDED
#pragma once
#include <utility>
@@ -52,7 +51,13 @@ generalized_set_intersection(
// std::set_intersection.
template <class FwdIter1, class InputIter2, class Pred, class Comp>
FwdIter1
remove_if_intersect_or_match(FwdIter1 first1, FwdIter1 last1, InputIter2 first2, InputIter2 last2, Pred pred, Comp comp)
remove_if_intersect_or_match(
FwdIter1 first1,
FwdIter1 last1,
InputIter2 first2,
InputIter2 last2,
Pred pred,
Comp comp)
{
// [original-first1, current-first1) is the set of elements to be preserved.
// [current-first1, i) is the set of elements that have been removed.
@@ -90,5 +95,3 @@ remove_if_intersect_or_match(FwdIter1 first1, FwdIter1 last1, InputIter2 first2,
}
} // namespace xrpl
#endif

View File

@@ -32,8 +32,7 @@
*/
#ifndef XRPL_BASICS_BASE64_H_INCLUDED
#define XRPL_BASICS_BASE64_H_INCLUDED
#pragma once
#include <cstdint>
#include <string>
@@ -53,5 +52,3 @@ std::string
base64_decode(std::string_view data);
} // namespace xrpl
#endif

View File

@@ -3,8 +3,7 @@
// Distributed under the MIT/X11 software license, see the accompanying
// file license.txt or http://www.opensource.org/licenses/mit-license.php.
#ifndef XRPL_BASICS_BASE_UINT_H_INCLUDED
#define XRPL_BASICS_BASE_UINT_H_INCLUDED
#pragma once
#include <xrpl/basics/Expected.h>
#include <xrpl/basics/Slice.h>
@@ -215,7 +214,8 @@ private:
std::uint32_t accum = {};
for (std::uint32_t shift : {4u, 0u, 12u, 8u, 20u, 16u, 28u, 24u})
{
if (auto const result = hexCharToUInt(*in++, shift, accum); result != ParseResult::okay)
if (auto const result = hexCharToUInt(*in++, shift, accum);
result != ParseResult::okay)
return Unexpected(result);
}
ret[i++] = accum;
@@ -254,7 +254,8 @@ public:
// This constructor is intended to be used at compile time since it might
// throw at runtime. Consider declaring this constructor consteval once
// we get to C++23.
explicit constexpr base_uint(std::string_view sv) noexcept(false) : data_(parseFromStringViewThrows(sv))
explicit constexpr base_uint(std::string_view sv) noexcept(false)
: data_(parseFromStringViewThrows(sv))
{
}
@@ -443,7 +444,8 @@ public:
for (int i = WIDTH; i--;)
{
std::uint64_t n = carry + boost::endian::big_to_native(data_[i]) + boost::endian::big_to_native(b.data_[i]);
std::uint64_t n = carry + boost::endian::big_to_native(data_[i]) +
boost::endian::big_to_native(b.data_[i]);
data_[i] = boost::endian::native_to_big(static_cast<std::uint32_t>(n));
carry = n >> 32;
@@ -644,5 +646,3 @@ struct is_uniquely_represented<xrpl::base_uint<Bits, Tag>> : public std::true_ty
};
} // namespace beast
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_CHRONO_H_INCLUDED
#define XRPL_BASICS_CHRONO_H_INCLUDED
#pragma once
#include <xrpl/beast/clock/abstract_clock.h>
#include <xrpl/beast/clock/basic_seconds_clock.h>
@@ -16,7 +15,8 @@ namespace xrpl {
// A few handy aliases
using days = std::chrono::duration<int, std::ratio_multiply<std::chrono::hours::period, std::ratio<24>>>;
using days =
std::chrono::duration<int, std::ratio_multiply<std::chrono::hours::period, std::ratio<24>>>;
using weeks = std::chrono::duration<int, std::ratio_multiply<days::period, std::ratio<7>>>;
@@ -99,5 +99,3 @@ stopwatch()
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_COMPARATORS_H_INCLUDED
#define XRPL_BASICS_COMPARATORS_H_INCLUDED
#pragma once
#include <functional>
@@ -53,5 +52,3 @@ using equal_to = std::equal_to<T>;
#endif
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_CONTRACT_H_INCLUDED
#define XRPL_BASICS_CONTRACT_H_INCLUDED
#pragma once
#include <xrpl/beast/type_name.h>
@@ -36,7 +35,9 @@ template <class E, class... Args>
[[noreturn]] inline void
Throw(Args&&... args)
{
static_assert(std::is_convertible<E*, std::exception*>::value, "Exception must derive from std::exception.");
static_assert(
std::is_convertible<E*, std::exception*>::value,
"Exception must derive from std::exception.");
E e(std::forward<Args>(args)...);
LogThrow(std::string("Throwing exception of type " + beast::type_name<E>() + ": ") + e.what());
@@ -48,5 +49,3 @@ Throw(Args&&... args)
LogicError(std::string const& how) noexcept;
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_HARDENED_HASH_H_INCLUDED
#define XRPL_BASICS_HARDENED_HASH_H_INCLUDED
#pragma once
#include <xrpl/beast/hash/hash_append.h>
#include <xrpl/beast/hash/xxhasher.h>
@@ -93,5 +92,3 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef JOIN_H_INCLUDED
#define JOIN_H_INCLUDED
#pragma once
#include <string>
@@ -24,7 +23,8 @@ public:
Collection const& collection;
std::string const delimiter;
explicit CollectionAndDelimiter(Collection const& c, std::string delim) : collection(c), delimiter(std::move(delim))
explicit CollectionAndDelimiter(Collection const& c, std::string delim)
: collection(c), delimiter(std::move(delim))
{
}
@@ -64,7 +64,8 @@ public:
char const* collection;
std::string const delimiter;
explicit CollectionAndDelimiter(char const c[N], std::string delim) : collection(c), delimiter(std::move(delim))
explicit CollectionAndDelimiter(char const c[N], std::string delim)
: collection(c), delimiter(std::move(delim))
{
}
@@ -80,5 +81,3 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_MAKE_SSLCONTEXT_H_INCLUDED
#define XRPL_BASICS_MAKE_SSLCONTEXT_H_INCLUDED
#pragma once
#include <boost/asio/ssl/context.hpp>
@@ -20,5 +19,3 @@ make_SSLContextAuthed(
std::string const& cipherList);
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_MULDIV_H_INCLUDED
#define XRPL_BASICS_MULDIV_H_INCLUDED
#pragma once
#include <cstdint>
#include <limits>
@@ -22,5 +21,3 @@ std::optional<std::uint64_t>
mulDiv(std::uint64_t value, std::uint64_t mul, std::uint64_t div);
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_PARTITIONED_UNORDERED_MAP_H
#define XRPL_BASICS_PARTITIONED_UNORDERED_MAP_H
#pragma once
#include <xrpl/beast/hash/uhash.h>
#include <xrpl/beast/utility/instrumentation.h>
@@ -331,8 +330,8 @@ public:
auto const& key = std::get<0>(keyTuple);
iterator it(&map_);
it.ait_ = it.map_->begin() + partitioner(key);
auto [eit, inserted] =
it.ait_->emplace(std::piecewise_construct, std::forward<T>(keyTuple), std::forward<U>(valueTuple));
auto [eit, inserted] = it.ait_->emplace(
std::piecewise_construct, std::forward<T>(keyTuple), std::forward<U>(valueTuple));
it.mit_ = eit;
return {it, inserted};
}
@@ -393,5 +392,3 @@ private:
};
} // namespace xrpl
#endif // XRPL_BASICS_PARTITIONED_UNORDERED_MAP_H

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_RANDOM_H_INCLUDED
#define XRPL_BASICS_RANDOM_H_INCLUDED
#pragma once
#include <xrpl/beast/utility/instrumentation.h>
#include <xrpl/beast/xor_shift_engine.h>
@@ -20,7 +19,8 @@ static_assert(
"The Ripple default PRNG engine must return an unsigned integral type.");
static_assert(
std::numeric_limits<beast::xor_shift_engine::result_type>::max() >= std::numeric_limits<std::uint64_t>::max(),
std::numeric_limits<beast::xor_shift_engine::result_type>::max() >=
std::numeric_limits<std::uint64_t>::max(),
"The Ripple default PRNG engine return must be at least 64 bits wide.");
#endif
@@ -145,12 +145,14 @@ std::enable_if_t<
Byte>
rand_byte(Engine& engine)
{
return static_cast<Byte>(
rand_int<Engine, std::uint32_t>(engine, std::numeric_limits<Byte>::min(), std::numeric_limits<Byte>::max()));
return static_cast<Byte>(rand_int<Engine, std::uint32_t>(
engine, std::numeric_limits<Byte>::min(), std::numeric_limits<Byte>::max()));
}
template <class Byte = std::uint8_t>
std::enable_if_t<(std::is_same<Byte, unsigned char>::value || std::is_same<Byte, std::uint8_t>::value), Byte>
std::enable_if_t<
(std::is_same<Byte, unsigned char>::value || std::is_same<Byte, std::uint8_t>::value),
Byte>
rand_byte()
{
return rand_byte<Byte>(default_prng());
@@ -174,5 +176,3 @@ rand_bool()
/** @} */
} // namespace xrpl
#endif // XRPL_BASICS_RANDOM_H_INCLUDED

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_UNITY_ROCKSDB_H_INCLUDED
#define XRPL_UNITY_ROCKSDB_H_INCLUDED
#pragma once
#if XRPL_ROCKSDB_AVAILABLE
// #include <rocksdb2/port/port_posix.h>
@@ -28,5 +27,3 @@
#include <rocksdb/write_batch.h>
#endif
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_SAFE_CAST_H_INCLUDED
#define XRPL_BASICS_SAFE_CAST_H_INCLUDED
#pragma once
#include <type_traits>
@@ -19,9 +18,12 @@ template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_integral_v<Dest> && std::is_integral_v<Src>, Dest>
safe_cast(Src s) noexcept
{
static_assert(std::is_signed_v<Dest> || std::is_unsigned_v<Src>, "Cannot cast signed to unsigned");
static_assert(
std::is_signed_v<Dest> || std::is_unsigned_v<Src>, "Cannot cast signed to unsigned");
constexpr unsigned not_same = std::is_signed_v<Dest> != std::is_signed_v<Src>;
static_assert(sizeof(Dest) >= sizeof(Src) + not_same, "Destination is too small to hold all values of source");
static_assert(
sizeof(Dest) >= sizeof(Src) + not_same,
"Destination is too small to hold all values of source");
return static_cast<Dest>(s);
}
@@ -69,5 +71,3 @@ unsafe_cast(Src s) noexcept
}
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_SCOPE_H_INCLUDED
#define XRPL_BASICS_SCOPE_H_INCLUDED
#pragma once
#include <xrpl/beast/utility/instrumentation.h>
@@ -37,7 +36,8 @@ public:
scope_exit(scope_exit&& rhs) noexcept(
std::is_nothrow_move_constructible_v<EF> || std::is_nothrow_copy_constructible_v<EF>)
: exit_function_{std::forward<EF>(rhs.exit_function_)}, execute_on_destruction_{rhs.execute_on_destruction_}
: exit_function_{std::forward<EF>(rhs.exit_function_)}
, execute_on_destruction_{rhs.execute_on_destruction_}
{
rhs.release();
}
@@ -48,8 +48,9 @@ public:
template <class EFP>
explicit scope_exit(
EFP&& f,
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_exit> && std::is_constructible_v<EF, EFP>>* =
0) noexcept
std::enable_if_t<
!std::is_same_v<std::remove_cv_t<EFP>, scope_exit> &&
std::is_constructible_v<EF, EFP>>* = 0) noexcept
: exit_function_{std::forward<EFP>(f)}
{
static_assert(std::is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>);
@@ -94,8 +95,9 @@ public:
template <class EFP>
explicit scope_fail(
EFP&& f,
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_fail> && std::is_constructible_v<EF, EFP>>* =
0) noexcept
std::enable_if_t<
!std::is_same_v<std::remove_cv_t<EFP>, scope_fail> &&
std::is_constructible_v<EF, EFP>>* = 0) noexcept
: exit_function_{std::forward<EFP>(f)}
{
static_assert(std::is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>);
@@ -140,7 +142,9 @@ public:
template <class EFP>
explicit scope_success(
EFP&& f,
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_success> && std::is_constructible_v<EF, EFP>>* =
std::enable_if_t<
!std::is_same_v<std::remove_cv_t<EFP>, scope_success> &&
std::is_constructible_v<EF, EFP>>* =
0) noexcept(std::is_nothrow_constructible_v<EF, EFP> || std::is_nothrow_constructible_v<EF, EFP&>)
: exit_function_{std::forward<EFP>(f)}
{
@@ -220,5 +224,3 @@ template <class Mutex>
scope_unlock(std::unique_lock<Mutex>&) -> scope_unlock<Mutex>;
} // namespace xrpl
#endif

View File

@@ -1,7 +1,6 @@
// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>
#ifndef XRPL_BASICS_SPINLOCK_H_INCLUDED
#define XRPL_BASICS_SPINLOCK_H_INCLUDED
#pragma once
#include <xrpl/beast/utility/instrumentation.h>
@@ -100,9 +99,12 @@ public:
@note For performance reasons, you should strive to have `lock` be
on a cacheline by itself.
*/
packed_spinlock(std::atomic<T>& lock, int index) : bits_(lock), mask_(static_cast<T>(1) << index)
packed_spinlock(std::atomic<T>& lock, int index)
: bits_(lock), mask_(static_cast<T>(1) << index)
{
XRPL_ASSERT(index >= 0 && (mask_ != 0), "xrpl::packed_spinlock::packed_spinlock : valid index and mask");
XRPL_ASSERT(
index >= 0 && (mask_ != 0),
"xrpl::packed_spinlock::packed_spinlock : valid index and mask");
}
[[nodiscard]] bool
@@ -175,7 +177,10 @@ public:
T expected = 0;
return lock_.compare_exchange_weak(
expected, std::numeric_limits<T>::max(), std::memory_order_acquire, std::memory_order_relaxed);
expected,
std::numeric_limits<T>::max(),
std::memory_order_acquire,
std::memory_order_relaxed);
}
void
@@ -201,5 +206,3 @@ public:
/** @} */
} // namespace xrpl
#endif

View File

@@ -1,5 +1,4 @@
#ifndef XRPL_BASICS_STRHEX_H_INCLUDED
#define XRPL_BASICS_STRHEX_H_INCLUDED
#pragma once
#include <boost/algorithm/hex.hpp>
#include <boost/endian/conversion.hpp>
@@ -11,7 +10,9 @@ std::string
strHex(FwdIt begin, FwdIt end)
{
static_assert(
std::is_convertible<typename std::iterator_traits<FwdIt>::iterator_category, std::forward_iterator_tag>::value,
std::is_convertible<
typename std::iterator_traits<FwdIt>::iterator_category,
std::forward_iterator_tag>::value,
"FwdIt must be a forward iterator");
std::string result;
result.reserve(2 * std::distance(begin, end));
@@ -27,5 +28,3 @@ strHex(T const& from)
}
} // namespace xrpl
#endif

View File

@@ -1,7 +1,6 @@
// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>
#ifndef BEAST_UTILITY_TAGGED_INTEGER_H_INCLUDED
#define BEAST_UTILITY_TAGGED_INTEGER_H_INCLUDED
#pragma once
#include <xrpl/beast/hash/hash_append.h>
@@ -24,14 +23,15 @@ namespace xrpl {
allowed arithmetic operations.
*/
template <class Int, class Tag>
class tagged_integer
: boost::totally_ordered<
tagged_integer<Int, Tag>,
boost::integer_arithmetic<
tagged_integer<Int, Tag>,
boost::bitwise<
tagged_integer<Int, Tag>,
boost::unit_steppable<tagged_integer<Int, Tag>, boost::shiftable<tagged_integer<Int, Tag>>>>>>
class tagged_integer : boost::totally_ordered<
tagged_integer<Int, Tag>,
boost::integer_arithmetic<
tagged_integer<Int, Tag>,
boost::bitwise<
tagged_integer<Int, Tag>,
boost::unit_steppable<
tagged_integer<Int, Tag>,
boost::shiftable<tagged_integer<Int, Tag>>>>>>
{
private:
Int m_value;
@@ -44,7 +44,8 @@ public:
template <
class OtherInt,
class = typename std::enable_if<std::is_integral<OtherInt>::value && sizeof(OtherInt) <= sizeof(Int)>::type>
class = typename std::enable_if<
std::is_integral<OtherInt>::value && sizeof(OtherInt) <= sizeof(Int)>::type>
explicit constexpr tagged_integer(OtherInt value) noexcept : m_value(value)
{
static_assert(sizeof(tagged_integer) == sizeof(Int), "tagged_integer is adding padding");
@@ -202,4 +203,3 @@ struct is_contiguously_hashable<xrpl::tagged_integer<Int, Tag>, HashAlgorithm>
};
} // namespace beast
#endif

View File

@@ -1,5 +1,4 @@
#ifndef BEAST_ASIO_IO_LATENCY_PROBE_H_INCLUDED
#define BEAST_ASIO_IO_LATENCY_PROBE_H_INCLUDED
#pragma once
#include <xrpl/beast/utility/instrumentation.h>
@@ -87,7 +86,8 @@ public:
std::lock_guard lock(m_mutex);
if (m_cancel)
throw std::logic_error("io_latency_probe is canceled");
boost::asio::post(m_ios, sample_op<Handler>(std::forward<Handler>(handler), Clock::now(), false, this));
boost::asio::post(
m_ios, sample_op<Handler>(std::forward<Handler>(handler), Clock::now(), false, this));
}
/** Initiate continuous i/o latency sampling.
@@ -101,7 +101,8 @@ public:
std::lock_guard lock(m_mutex);
if (m_cancel)
throw std::logic_error("io_latency_probe is canceled");
boost::asio::post(m_ios, sample_op<Handler>(std::forward<Handler>(handler), Clock::now(), true, this));
boost::asio::post(
m_ios, sample_op<Handler>(std::forward<Handler>(handler), Clock::now(), true, this));
}
private:
@@ -141,7 +142,11 @@ private:
bool m_repeat;
io_latency_probe* m_probe;
sample_op(Handler const& handler, time_point const& start, bool repeat, io_latency_probe* probe)
sample_op(
Handler const& handler,
time_point const& start,
bool repeat,
io_latency_probe* probe)
: m_handler(handler), m_start(start), m_repeat(repeat), m_probe(probe)
{
XRPL_ASSERT(
@@ -204,12 +209,14 @@ private:
// The latency is too high to maintain the desired
// period so don't bother with a timer.
//
boost::asio::post(m_probe->m_ios, sample_op<Handler>(m_handler, now, m_repeat, m_probe));
boost::asio::post(
m_probe->m_ios, sample_op<Handler>(m_handler, now, m_repeat, m_probe));
}
else
{
m_probe->m_timer.expires_after(when - now);
m_probe->m_timer.async_wait(sample_op<Handler>(m_handler, now, m_repeat, m_probe));
m_probe->m_timer.async_wait(
sample_op<Handler>(m_handler, now, m_repeat, m_probe));
}
}
}
@@ -220,11 +227,10 @@ private:
if (!m_probe)
return;
typename Clock::time_point const now(Clock::now());
boost::asio::post(m_probe->m_ios, sample_op<Handler>(m_handler, now, m_repeat, m_probe));
boost::asio::post(
m_probe->m_ios, sample_op<Handler>(m_handler, now, m_repeat, m_probe));
}
};
};
} // namespace beast
#endif

View File

@@ -1,5 +1,4 @@
#ifndef BEAST_CHRONO_ABSTRACT_CLOCK_H_INCLUDED
#define BEAST_CHRONO_ABSTRACT_CLOCK_H_INCLUDED
#pragma once
namespace beast {
@@ -89,5 +88,3 @@ get_abstract_clock()
}
} // namespace beast
#endif

View File

@@ -1,5 +1,4 @@
#ifndef BEAST_CHRONO_BASIC_SECONDS_CLOCK_H_INCLUDED
#define BEAST_CHRONO_BASIC_SECONDS_CLOCK_H_INCLUDED
#pragma once
#include <chrono>
@@ -33,5 +32,3 @@ public:
};
} // namespace beast
#endif

Some files were not shown because too many files have changed in this diff Show More