Compare commits

..

4 Commits

Author SHA1 Message Date
Michael Legleux
ab3382b116 rm patch 2026-02-11 00:51:41 -08:00
Michael Legleux
6153e2fa11 build image too 2026-02-11 00:47:34 -08:00
Michael Legleux
aab3305abe use currenty generate.py 2026-02-03 15:19:59 -08:00
Michael Legleux
66dbf460b4 feat: Build packages in GitHub 2026-02-03 15:14:52 -08:00
1310 changed files with 20064 additions and 32162 deletions

View File

@@ -37,7 +37,7 @@ BinPackParameters: false
BreakBeforeBinaryOperators: false
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: true
ColumnLimit: 100
ColumnLimit: 120
CommentPragmas: "^ IWYU pragma:"
ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 4

View File

@@ -1,191 +0,0 @@
---
Checks: "-*,
bugprone-argument-comment
"
# bugprone-assert-side-effect,
# bugprone-bad-signal-to-kill-thread,
# bugprone-bool-pointer-implicit-conversion,
# bugprone-casting-through-void,
# bugprone-chained-comparison,
# bugprone-compare-pointer-to-member-virtual-function,
# bugprone-copy-constructor-init,
# bugprone-crtp-constructor-accessibility,
# bugprone-dangling-handle,
# bugprone-dynamic-static-initializers,
# bugprone-empty-catch,
# bugprone-fold-init-type,
# bugprone-forward-declaration-namespace,
# bugprone-inaccurate-erase,
# bugprone-inc-dec-in-conditions,
# bugprone-incorrect-enable-if,
# bugprone-incorrect-roundings,
# bugprone-infinite-loop,
# bugprone-integer-division,
# bugprone-lambda-function-name,
# bugprone-macro-parentheses,
# bugprone-macro-repeated-side-effects,
# bugprone-misplaced-operator-in-strlen-in-alloc,
# bugprone-misplaced-pointer-arithmetic-in-alloc,
# bugprone-misplaced-widening-cast,
# bugprone-move-forwarding-reference,
# bugprone-multi-level-implicit-pointer-conversion,
# bugprone-multiple-new-in-one-expression,
# bugprone-multiple-statement-macro,
# bugprone-no-escape,
# bugprone-non-zero-enum-to-bool-conversion,
# bugprone-optional-value-conversion,
# bugprone-parent-virtual-call,
# bugprone-pointer-arithmetic-on-polymorphic-object,
# bugprone-posix-return,
# bugprone-redundant-branch-condition,
# bugprone-reserved-identifier,
# bugprone-return-const-ref-from-parameter,
# bugprone-shared-ptr-array-mismatch,
# bugprone-signal-handler,
# bugprone-signed-char-misuse,
# bugprone-sizeof-container,
# bugprone-sizeof-expression,
# bugprone-spuriously-wake-up-functions,
# bugprone-standalone-empty,
# bugprone-string-constructor,
# bugprone-string-integer-assignment,
# bugprone-string-literal-with-embedded-nul,
# bugprone-stringview-nullptr,
# bugprone-suspicious-enum-usage,
# bugprone-suspicious-include,
# bugprone-suspicious-memory-comparison,
# bugprone-suspicious-memset-usage,
# bugprone-suspicious-missing-comma,
# bugprone-suspicious-realloc-usage,
# bugprone-suspicious-semicolon,
# bugprone-suspicious-string-compare,
# bugprone-suspicious-stringview-data-usage,
# bugprone-swapped-arguments,
# bugprone-switch-missing-default-case,
# bugprone-terminating-continue,
# bugprone-throw-keyword-missing,
# bugprone-too-small-loop-variable,
# bugprone-undefined-memory-manipulation,
# bugprone-undelegated-constructor,
# bugprone-unhandled-exception-at-new,
# bugprone-unhandled-self-assignment,
# bugprone-unique-ptr-array-mismatch,
# bugprone-unsafe-functions,
# bugprone-unused-local-non-trivial-variable,
# bugprone-unused-raii,
# bugprone-unused-return-value,
# bugprone-use-after-move,
# bugprone-virtual-near-miss,
# cppcoreguidelines-init-variables,
# cppcoreguidelines-misleading-capture-default-by-value,
# cppcoreguidelines-no-suspend-with-lock,
# cppcoreguidelines-pro-type-member-init,
# cppcoreguidelines-pro-type-static-cast-downcast,
# cppcoreguidelines-rvalue-reference-param-not-moved,
# cppcoreguidelines-use-default-member-init,
# cppcoreguidelines-virtual-class-destructor,
# hicpp-ignored-remove-result,
# llvm-namespace-comment,
# misc-const-correctness,
# misc-definitions-in-headers,
# misc-header-include-cycle,
# misc-include-cleaner,
# misc-misplaced-const,
# misc-redundant-expression,
# misc-static-assert,
# misc-throw-by-value-catch-by-reference,
# misc-unused-alias-decls,
# misc-unused-using-decls,
# modernize-concat-nested-namespaces,
# modernize-deprecated-headers,
# modernize-make-shared,
# modernize-make-unique,
# modernize-pass-by-value,
# modernize-type-traits,
# modernize-use-designated-initializers,
# modernize-use-emplace,
# modernize-use-equals-default,
# modernize-use-equals-delete,
# modernize-use-override,
# modernize-use-ranges,
# modernize-use-starts-ends-with,
# modernize-use-std-numbers,
# modernize-use-using,
# performance-faster-string-find,
# performance-for-range-copy,
# performance-implicit-conversion-in-loop,
# performance-inefficient-vector-operation,
# performance-move-const-arg,
# performance-move-constructor-init,
# performance-no-automatic-move,
# performance-trivially-destructible,
# readability-avoid-nested-conditional-operator,
# readability-avoid-return-with-void-value,
# readability-braces-around-statements,
# readability-const-return-type,
# readability-container-contains,
# readability-container-size-empty,
# readability-convert-member-functions-to-static,
# readability-duplicate-include,
# readability-else-after-return,
# readability-enum-initial-value,
# readability-implicit-bool-conversion,
# readability-inconsistent-declaration-parameter-name,
# readability-identifier-naming,
# readability-make-member-function-const,
# readability-math-missing-parentheses,
# readability-misleading-indentation,
# readability-non-const-parameter,
# readability-redundant-casting,
# readability-redundant-declaration,
# readability-redundant-inline-specifier,
# readability-redundant-member-init,
# readability-redundant-string-init,
# readability-reference-to-constructed-temporary,
# readability-simplify-boolean-expr,
# readability-static-accessed-through-instance,
# readability-static-definition-in-anonymous-namespace,
# readability-suspicious-call-argument,
# readability-use-std-min-max
#
# CheckOptions:
# readability-braces-around-statements.ShortStatementLines: 2
# readability-identifier-naming.MacroDefinitionCase: UPPER_CASE
# readability-identifier-naming.ClassCase: CamelCase
# readability-identifier-naming.StructCase: CamelCase
# readability-identifier-naming.UnionCase: CamelCase
# readability-identifier-naming.EnumCase: CamelCase
# readability-identifier-naming.EnumConstantCase: CamelCase
# readability-identifier-naming.ScopedEnumConstantCase: CamelCase
# readability-identifier-naming.GlobalConstantCase: UPPER_CASE
# readability-identifier-naming.GlobalConstantPrefix: "k"
# readability-identifier-naming.GlobalVariableCase: CamelCase
# readability-identifier-naming.GlobalVariablePrefix: "g"
# readability-identifier-naming.ConstexprFunctionCase: camelBack
# readability-identifier-naming.ConstexprMethodCase: camelBack
# readability-identifier-naming.ClassMethodCase: camelBack
# readability-identifier-naming.ClassMemberCase: camelBack
# readability-identifier-naming.ClassConstantCase: UPPER_CASE
# readability-identifier-naming.ClassConstantPrefix: "k"
# readability-identifier-naming.StaticConstantCase: UPPER_CASE
# readability-identifier-naming.StaticConstantPrefix: "k"
# readability-identifier-naming.StaticVariableCase: UPPER_CASE
# readability-identifier-naming.StaticVariablePrefix: "k"
# readability-identifier-naming.ConstexprVariableCase: UPPER_CASE
# readability-identifier-naming.ConstexprVariablePrefix: "k"
# readability-identifier-naming.LocalConstantCase: camelBack
# readability-identifier-naming.LocalVariableCase: camelBack
# readability-identifier-naming.TemplateParameterCase: CamelCase
# readability-identifier-naming.ParameterCase: camelBack
# readability-identifier-naming.FunctionCase: camelBack
# readability-identifier-naming.MemberCase: camelBack
# readability-identifier-naming.PrivateMemberSuffix: _
# readability-identifier-naming.ProtectedMemberSuffix: _
# readability-identifier-naming.PublicMemberSuffix: ""
# readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
# bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
# bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
# misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h;__chrono/.*;fmt/chrono.h;boost/uuid/uuid_hash.hpp'
#
# HeaderFilterRegex: '^.*/(src|tests)/.*\.(h|hpp)$'
WarningsAsErrors: "*"

View File

@@ -29,7 +29,7 @@ format:
disable: false
_help_line_width:
- How wide to allow formatted cmake files
line_width: 100
line_width: 120
_help_tab_size:
- How many spaces to tab for indent
tab_size: 4

View File

@@ -1,14 +1,14 @@
ignorePaths:
- build/**
- src/libxrpl/crypto
- src/test/** # Will be removed in the future
- CMakeUserPresets.json
- Doxyfile
- docs/**/*.puml
- cmake/**
- LICENSE.md
- .clang-tidy
language: en
allowCompoundWords: true # TODO (#6334)
allowCompoundWords: true
ignoreRandomStrings: true
minWordLength: 5
dictionaries:
@@ -16,29 +16,20 @@ dictionaries:
- en_US
- en_GB
ignoreRegExpList:
- /\b[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds
- /\bC[A-Z0-9]{15}/g # CTIDs
- /\b(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards
- /\b(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
- /[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds
- /(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards
- /(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
- /::[a-z:_]+/g # things from other namespaces
- /\blib[a-z]+/g # libraries
- /\b[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates
- /\b[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years
- /lib[a-z]+/g # libraries
- /[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates
- /[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years
- /\[[A-Za-z0-9-]+\]\(https:\/\/github.com\/[A-Za-z0-9-]+\)/g # Github usernames
- /-[DWw][a-zA-Z0-9_-]+=/g # compile flags
- /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags
- ABCDEFGHIJKLMNOPQRSTUVWXYZ
- ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
overrides:
- filename: "**/*_test.cpp" # all test files
ignoreRegExpList:
- /"[^"]*"/g # double-quoted strings
- /'[^']*'/g # single-quoted strings
- /`[^`]*`/g # backtick strings
suggestWords:
- xprl->xrpl
- xprld->xrpld # cspell: disable-line not sure what this problem is....
- unsynched->unsynced # cspell: disable-line not sure what this problem is....
- xprld->xrpld
- unsynched->unsynced
- synched->synced
- synch->sync
words:
@@ -60,7 +51,6 @@ words:
- Britto
- Btrfs
- canonicality
- changespq
- checkme
- choco
- chrono
@@ -116,14 +106,12 @@ words:
- inequation
- insuf
- insuff
- invasively
- iou
- ious
- isrdc
- itype
- jemalloc
- jlog
- jtnofill
- keylet
- keylets
- keyvadb
@@ -150,7 +138,6 @@ words:
- Metafuncton
- misprediction
- mptbalance
- MPTDEX
- mptflags
- mptid
- mptissuance
@@ -160,7 +147,6 @@ words:
- mptokenissuance
- mptokens
- mpts
- mtgox
- multisig
- multisign
- multisigned
@@ -188,7 +174,6 @@ words:
- perminute
- permissioned
- pointee
- populator
- preauth
- preauthorization
- preauthorize
@@ -197,7 +182,6 @@ words:
- protobuf
- protos
- ptrs
- pushd
- pyenv
- qalloc
- queuable

8
.github/CODEOWNERS vendored Normal file
View File

@@ -0,0 +1,8 @@
# Allow anyone to review any change by default.
*
# Require the rpc-reviewers team to review changes to the rpc code.
include/xrpl/protocol/ @xrplf/rpc-reviewers
src/libxrpl/protocol/ @xrplf/rpc-reviewers
src/xrpld/rpc/ @xrplf/rpc-reviewers
src/xrpld/app/misc/ @xrplf/rpc-reviewers

View File

@@ -1,56 +0,0 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/build-deps/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/generate-version/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/print-env/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/setup-conan/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop

View File

@@ -4,11 +4,14 @@ Loop: test.jtx test.toplevel
Loop: test.jtx test.unit_test
test.unit_test == test.jtx
Loop: xrpld.app xrpld.core
xrpld.app > xrpld.core
Loop: xrpld.app xrpld.overlay
xrpld.overlay ~= xrpld.app
xrpld.overlay > xrpld.app
Loop: xrpld.app xrpld.peerfinder
xrpld.peerfinder == xrpld.app
xrpld.peerfinder ~= xrpld.app
Loop: xrpld.app xrpld.rpc
xrpld.rpc > xrpld.app

View File

@@ -1,6 +1,4 @@
libxrpl.basics > xrpl.basics
libxrpl.conditions > xrpl.basics
libxrpl.conditions > xrpl.conditions
libxrpl.core > xrpl.basics
libxrpl.core > xrpl.core
libxrpl.crypto > xrpl.basics
@@ -19,27 +17,16 @@ libxrpl.nodestore > xrpl.protocol
libxrpl.protocol > xrpl.basics
libxrpl.protocol > xrpl.json
libxrpl.protocol > xrpl.protocol
libxrpl.rdb > xrpl.basics
libxrpl.rdb > xrpl.rdb
libxrpl.resource > xrpl.basics
libxrpl.resource > xrpl.json
libxrpl.resource > xrpl.resource
libxrpl.server > xrpl.basics
libxrpl.server > xrpl.json
libxrpl.server > xrpl.protocol
libxrpl.server > xrpl.rdb
libxrpl.server > xrpl.server
libxrpl.shamap > xrpl.basics
libxrpl.shamap > xrpl.protocol
libxrpl.shamap > xrpl.shamap
libxrpl.tx > xrpl.basics
libxrpl.tx > xrpl.conditions
libxrpl.tx > xrpl.core
libxrpl.tx > xrpl.json
libxrpl.tx > xrpl.ledger
libxrpl.tx > xrpl.protocol
libxrpl.tx > xrpl.server
libxrpl.tx > xrpl.tx
test.app > test.jtx
test.app > test.rpc
test.app > test.toplevel
@@ -54,10 +41,7 @@ test.app > xrpl.json
test.app > xrpl.ledger
test.app > xrpl.nodestore
test.app > xrpl.protocol
test.app > xrpl.rdb
test.app > xrpl.resource
test.app > xrpl.server
test.app > xrpl.tx
test.basics > test.jtx
test.basics > test.unit_test
test.basics > xrpl.basics
@@ -67,7 +51,7 @@ test.basics > xrpl.json
test.basics > xrpl.protocol
test.beast > xrpl.basics
test.conditions > xrpl.basics
test.conditions > xrpl.conditions
test.conditions > xrpld.conditions
test.consensus > test.csf
test.consensus > test.toplevel
test.consensus > test.unit_test
@@ -76,7 +60,6 @@ test.consensus > xrpld.app
test.consensus > xrpld.consensus
test.consensus > xrpl.json
test.consensus > xrpl.ledger
test.consensus > xrpl.tx
test.core > test.jtx
test.core > test.toplevel
test.core > test.unit_test
@@ -84,7 +67,6 @@ test.core > xrpl.basics
test.core > xrpl.core
test.core > xrpld.core
test.core > xrpl.json
test.core > xrpl.rdb
test.core > xrpl.server
test.csf > xrpl.basics
test.csf > xrpld.consensus
@@ -93,7 +75,6 @@ test.csf > xrpl.protocol
test.json > test.jtx
test.json > xrpl.json
test.jtx > xrpl.basics
test.jtx > xrpl.core
test.jtx > xrpld.app
test.jtx > xrpld.core
test.jtx > xrpld.rpc
@@ -103,7 +84,6 @@ test.jtx > xrpl.net
test.jtx > xrpl.protocol
test.jtx > xrpl.resource
test.jtx > xrpl.server
test.jtx > xrpl.tx
test.ledger > test.jtx
test.ledger > test.toplevel
test.ledger > xrpl.basics
@@ -115,8 +95,8 @@ test.nodestore > test.jtx
test.nodestore > test.toplevel
test.nodestore > test.unit_test
test.nodestore > xrpl.basics
test.nodestore > xrpld.core
test.nodestore > xrpl.nodestore
test.nodestore > xrpl.rdb
test.overlay > test.jtx
test.overlay > test.toplevel
test.overlay > test.unit_test
@@ -149,11 +129,8 @@ test.rpc > xrpld.core
test.rpc > xrpld.overlay
test.rpc > xrpld.rpc
test.rpc > xrpl.json
test.rpc > xrpl.ledger
test.rpc > xrpl.protocol
test.rpc > xrpl.resource
test.rpc > xrpl.server
test.rpc > xrpl.tx
test.server > test.jtx
test.server > test.toplevel
test.server > test.unit_test
@@ -174,57 +151,39 @@ test.unit_test > xrpl.basics
tests.libxrpl > xrpl.basics
tests.libxrpl > xrpl.json
tests.libxrpl > xrpl.net
xrpl.conditions > xrpl.basics
xrpl.conditions > xrpl.protocol
xrpl.core > xrpl.basics
xrpl.core > xrpl.json
xrpl.core > xrpl.ledger
xrpl.core > xrpl.protocol
xrpl.json > xrpl.basics
xrpl.ledger > xrpl.basics
xrpl.ledger > xrpl.protocol
xrpl.ledger > xrpl.server
xrpl.ledger > xrpl.shamap
xrpl.net > xrpl.basics
xrpl.nodestore > xrpl.basics
xrpl.nodestore > xrpl.protocol
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json
xrpl.rdb > xrpl.basics
xrpl.rdb > xrpl.core
xrpl.rdb > xrpl.protocol
xrpl.resource > xrpl.basics
xrpl.resource > xrpl.json
xrpl.resource > xrpl.protocol
xrpl.server > xrpl.basics
xrpl.server > xrpl.core
xrpl.server > xrpl.json
xrpl.server > xrpl.protocol
xrpl.server > xrpl.rdb
xrpl.server > xrpl.resource
xrpl.server > xrpl.shamap
xrpl.shamap > xrpl.basics
xrpl.shamap > xrpl.nodestore
xrpl.shamap > xrpl.protocol
xrpl.tx > xrpl.basics
xrpl.tx > xrpl.core
xrpl.tx > xrpl.ledger
xrpl.tx > xrpl.protocol
xrpld.app > test.unit_test
xrpld.app > xrpl.basics
xrpld.app > xrpl.core
xrpld.app > xrpld.conditions
xrpld.app > xrpld.consensus
xrpld.app > xrpld.core
xrpld.app > xrpl.json
xrpld.app > xrpl.ledger
xrpld.app > xrpl.net
xrpld.app > xrpl.nodestore
xrpld.app > xrpl.protocol
xrpld.app > xrpl.rdb
xrpld.app > xrpl.resource
xrpld.app > xrpl.server
xrpld.app > xrpl.shamap
xrpld.app > xrpl.tx
xrpld.conditions > xrpl.basics
xrpld.conditions > xrpl.protocol
xrpld.consensus > xrpl.basics
xrpld.consensus > xrpl.json
xrpld.consensus > xrpl.protocol
@@ -233,21 +192,17 @@ xrpld.core > xrpl.core
xrpld.core > xrpl.json
xrpld.core > xrpl.net
xrpld.core > xrpl.protocol
xrpld.core > xrpl.rdb
xrpld.overlay > xrpl.basics
xrpld.overlay > xrpl.core
xrpld.overlay > xrpld.core
xrpld.overlay > xrpld.peerfinder
xrpld.overlay > xrpl.json
xrpld.overlay > xrpl.protocol
xrpld.overlay > xrpl.rdb
xrpld.overlay > xrpl.resource
xrpld.overlay > xrpl.server
xrpld.overlay > xrpl.tx
xrpld.peerfinder > xrpl.basics
xrpld.peerfinder > xrpld.core
xrpld.peerfinder > xrpl.protocol
xrpld.peerfinder > xrpl.rdb
xrpld.perflog > xrpl.basics
xrpld.perflog > xrpl.core
xrpld.perflog > xrpld.rpc
@@ -260,8 +215,6 @@ xrpld.rpc > xrpl.ledger
xrpld.rpc > xrpl.net
xrpld.rpc > xrpl.nodestore
xrpld.rpc > xrpl.protocol
xrpld.rpc > xrpl.rdb
xrpld.rpc > xrpl.resource
xrpld.rpc > xrpl.server
xrpld.rpc > xrpl.tx
xrpld.shamap > xrpl.shamap

View File

@@ -1,30 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# This script checks whether there are no new include guards introduced by a new
# PR, as header files should use "#pragma once" instead. The script assumes any
# include guards will use "XRPL_" as prefix.
# Usage: .github/scripts/rename/include.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" \) | while read -r FILE; do
echo "Processing file: ${FILE}"
if grep -q "#ifndef XRPL_" "${FILE}"; then
echo "Please replace all include guards by #pragma once."
exit 1
fi
done
echo "Checking complete."

View File

@@ -51,20 +51,22 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# Only generate a subset of configurations in PRs.
if not all:
# Debian:
# - Bookworm using GCC 13: Release on linux/amd64, set the reference
# fee to 500.
# - Bookworm using GCC 15: Debug on linux/amd64, enable code
# coverage (which will be done below).
# - Bookworm using Clang 16: Debug on linux/arm64, enable voidstar.
# - Bookworm using Clang 17: Release on linux/amd64, set the
# reference fee to 1000.
# - Bookworm using Clang 20: Debug on linux/amd64.
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
# the reference fee to 500.
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
# code coverage (which will be done below).
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
# enable voidstar.
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
# set the reference fee to 1000.
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
if os["distro_name"] == "debian":
skip = True
if os["distro_version"] == "bookworm":
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
@@ -72,12 +74,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64"
):
cmake_args = f"-Dvoidstar=ON {cmake_args}"
@@ -85,6 +89,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
@@ -92,6 +97,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
@@ -99,14 +105,15 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue
# RHEL:
# - 9 using GCC 12: Debug on linux/amd64.
# - 10 using Clang: Release on linux/amd64.
# - 9 using GCC 12: Debug and Unity on linux/amd64.
# - 10 using Clang: Release and no Unity on linux/amd64.
if os["distro_name"] == "rhel":
skip = True
if os["distro_version"] == "9":
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
@@ -114,6 +121,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any"
and build_type == "Release"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
@@ -121,16 +129,17 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue
# Ubuntu:
# - Jammy using GCC 12: Debug on linux/arm64.
# - Noble using GCC 14: Release on linux/amd64.
# - Noble using Clang 18: Debug on linux/amd64.
# - Noble using Clang 19: Release on linux/arm64.
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
# - Noble using GCC 14: Release and Unity on linux/amd64.
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
# - Noble using Clang 19: Release and Unity on linux/arm64.
if os["distro_name"] == "ubuntu":
skip = True
if os["distro_version"] == "jammy":
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64"
):
skip = False
@@ -138,18 +147,21 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19"
and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/arm64"
):
skip = False
@@ -157,16 +169,20 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue
# MacOS:
# - Debug on macos/arm64.
# - Debug and no Unity on macos/arm64.
if os["distro_name"] == "macos" and not (
build_type == "Debug" and architecture["platform"] == "macos/arm64"
build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "macos/arm64"
):
continue
# Windows:
# - Release on windows/amd64.
# - Release and Unity on windows/amd64.
if os["distro_name"] == "windows" and not (
build_type == "Release" and architecture["platform"] == "windows/amd64"
build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "windows/amd64"
):
continue
@@ -193,28 +209,18 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
):
continue
# Enable code coverage for Debian Bookworm using GCC 15 in Debug on
# linux/amd64
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
# Unity on linux/amd64
if (
f"{os['distro_name']}-{os['distro_version']}" == "debian-bookworm"
and f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"{cmake_args} -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0"
# Enable unity build for Ubuntu Jammy using GCC 12 in Debug on
# linux/amd64.
if (
f"{os['distro_name']}-{os['distro_version']}" == "ubuntu-jammy"
and f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug"
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"{cmake_args} -Dunity=ON"
cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}"
# Generate a unique name for the configuration, e.g. macos-arm64-debug
# or debian-bookworm-gcc-12-amd64-release.
# or debian-bookworm-gcc-12-amd64-release-unity.
config_name = os["distro_name"]
if (n := os["distro_version"]) != "":
config_name += f"-{n}"

View File

@@ -17,196 +17,13 @@
"compiler_version": "12",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "15",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "16",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "17",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "18",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "19",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "20",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "gcc",
"compiler_version": "15",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "clang",
"compiler_version": "20",
"image_sha": "ab4d1f0"
},
{
"distro_name": "debian",
"distro_version": "trixie",
"compiler_name": "clang",
"compiler_version": "21",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "8",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "8",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "12",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "10",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "rhel",
"distro_version": "10",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "jammy",
"compiler_name": "gcc",
"compiler_version": "12",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "16",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "17",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "18",
"image_sha": "ab4d1f0"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "19",
"image_sha": "ab4d1f0"
"compiler_version": "12"
}
],
"build_type": ["Debug", "Release"],
"cmake_args": [""]
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
}

View File

@@ -15,5 +15,8 @@
}
],
"build_type": ["Debug", "Release"],
"cmake_args": ["-DCMAKE_POLICY_VERSION_MINIMUM=3.5"]
"cmake_args": [
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
]
}

View File

@@ -15,5 +15,5 @@
}
],
"build_type": ["Debug", "Release"],
"cmake_args": [""]
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
}

View File

@@ -1,11 +1,14 @@
# This workflow runs all workflows to check, build and test the project on
# various Linux flavors, as well as on MacOS and Windows, on every push to a
# This workflow runs all workflows to check, build, package and test the project on
# various Linux flavors, as well as on macOS and Windows, on every push to a
# user branch. However, it will not run if the pull request is a draft unless it
# has the 'DraftRunCI' label. For commits to PRs that target a release branch,
# it also uploads the libxrpl recipe to the Conan remote.
name: PR
on:
push:
branches:
- legleux/build
merge_group:
types:
- checks_requested
@@ -33,7 +36,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Determine changed files
# This step checks whether any files have changed that should
# cause the next jobs to run. We do it this way rather than
@@ -46,7 +49,7 @@ jobs:
# that Github considers any skipped jobs to have passed, and in
# turn the required checks as well.
id: changes
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
# These paths are unique to `on-pr.yml`.
@@ -65,17 +68,18 @@ jobs:
.github/workflows/reusable-build.yml
.github/workflows/reusable-build-test-config.yml
.github/workflows/reusable-build-test.yml
.github/workflows/reusable-clang-tidy.yml
.github/workflows/reusable-clang-tidy-files.yml
.github/workflows/reusable-build-pkg.yml
.github/workflows/reusable-pkg.yml
.github/workflows/reusable-package.yml
.github/workflows/reusable-strategy-matrix.yml
.github/workflows/reusable-test.yml
.github/workflows/reusable-upload-recipe.yml
.clang-tidy
.codecov.yml
cmake/**
conan/**
external/**
include/**
pkgs/**
src/**
tests/**
CMakeLists.txt
@@ -100,80 +104,57 @@ jobs:
outputs:
go: ${{ steps.go.outputs.go == 'true' }}
check-levelization:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-check-levelization.yml
# check-levelization:
# needs: should-run
# if: ${{ needs.should-run.outputs.go == 'true' }}
# uses: ./.github/workflows/reusable-check-levelization.yml
check-rename:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-check-rename.yml
# build-test:
# needs: should-run
# if: ${{ needs.should-run.outputs.go == 'true' }}
# uses: ./.github/workflows/reusable-build-test.yml
# strategy:
# matrix:
# os: [linux, macos, windows]
# with:
# os: ${{ matrix.os }}
# secrets:
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
clang-tidy:
build-package:
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-clang-tidy.yml
permissions:
issues: write
contents: read
with:
check_only_changed: true
create_issue_on_failure: false
build-test:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-build-test.yml
uses: ./.github/workflows/reusable-build-pkg.yml
secrets: inherit
strategy:
fail-fast: false
matrix:
os: [linux, macos, windows]
# pkg_type: [rpm]
pkg_type: [deb, rpm]
arch: [amd64]
# arch: [amd64, arm64]
with:
# Enable ccache only for events targeting the XRPLF repository, since
# other accounts will not have access to our remote cache storage.
ccache_enabled: ${{ github.repository_owner == 'XRPLF' }}
os: ${{ matrix.os }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
pkg_type: ${{ matrix.pkg_type }}
arch: ${{ matrix.arch }}
upload-recipe:
needs:
- should-run
- build-test
# Only run when committing to a PR that targets a release branch in the
# XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' && needs.should-run.outputs.go == 'true' && startsWith(github.ref, 'refs/heads/release') }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
notify-clio:
needs: upload-recipe
runs-on: ubuntu-latest
steps:
# Notify the Clio repository about the newly proposed release version, so
# it can be checked for compatibility before the release is actually made.
- name: Notify Clio
env:
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[ref]=${{ needs.upload-recipe.outputs.recipe_ref }}" \
-F "client_payload[pr_url]=${PR_URL}"
# notify-clio:
# needs:
# - should-run
# - build-test
# if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
# uses: ./.github/workflows/reusable-notify-clio.yml
# secrets:
# clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
passed:
if: failure() || cancelled()
needs:
- check-levelization
- check-rename
- clang-tidy
- build-test
- upload-recipe
- notify-clio
# - build-test
# - check-levelization
- build-package
runs-on: ubuntu-latest
steps:
- name: Fail

97
.github/workflows/on-trigger.new.yml vendored Normal file
View File

@@ -0,0 +1,97 @@
# This workflow runs all workflows to build and test the code on various Linux
# flavors, as well as on MacOS and Windows, on a scheduled basis, on merge into
# the 'develop' or 'release*' branches, or when requested manually. Upon pushes
# to the develop branch it also uploads the libxrpl recipe to the Conan remote.
name: Trigger
on:
push:
branches:
- legleux/linux_packages
- develop
- release
- master
paths:
# These paths are unique to `on-trigger.yml`.
- ".github/workflows/on-trigger.yml"
# Keep the paths below in sync with those in `on-pr.yml`.
- ".github/actions/build-deps/**"
- ".github/actions/build-test/**"
- ".github/actions/generate-version/**"
- ".github/actions/setup-conan/**"
- ".github/scripts/strategy-matrix/**"
- ".github/workflows/reusable-build.yml"
- ".github/workflows/reusable-build-test-config.yml"
- ".github/workflows/reusable-build-test.yml"
- ".github/workflows/reusable-build-pkg.yml"
- ".github/workflows/reusable-pkg.yml"
- ".github/workflows/reusable-package.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml"
- ".github/workflows/reusable-upload-recipe.yml"
- ".codecov.yml"
- "cmake/**"
- "conan/**"
- "external/**"
- "include/**"
- "pkgs/**"
- "src/**"
- "tests/**"
- "CMakeLists.txt"
- "conanfile.py"
- "conan.lock"
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
# will force all dependencies to be rebuilt, which is useful to verify that
# all dependencies can be built successfully. Only the dependencies that
# are actually missing from the remote will be uploaded.
schedule:
- cron: "32 6 * * 1-5"
# Run when manually triggered via the GitHub UI or API.
workflow_dispatch:
concurrency:
# When a PR is merged into the develop branch it will be assigned a unique
# group identifier, so execution will continue even if another PR is merged
# while it is still running. In all other cases the group identifier is shared
# per branch, so that any in-progress runs are cancelled when a new commit is
# pushed.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# check-missing-commits:
# if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
# uses: ./.github/workflows/reusable-check-missing-commits.yml
# build-test:
# uses: ./.github/workflows/reusable-build-test.yml
# strategy:
# matrix:
# os: [linux, macos, windows]
# with:
# os: ${{ matrix.os }}
# strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
# secrets:
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
build-package:
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
uses: ./.github/workflows/reusable-build-pkg.yml
secrets: inherit
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
# pkg_type: [rpm]
pkg_type: [deb, rpm]
arch: [amd64]
# arch: [amd64, arm64]
with:
pkg_type: ${{ matrix.pkg_type }}
arch: ${{ matrix.arch }}

View File

@@ -9,6 +9,7 @@ on:
branches:
- "develop"
- "release*"
- "linux_packages_squashed"
paths:
# These paths are unique to `on-trigger.yml`.
- ".github/workflows/on-trigger.yml"
@@ -22,17 +23,18 @@ on:
- ".github/workflows/reusable-build.yml"
- ".github/workflows/reusable-build-test-config.yml"
- ".github/workflows/reusable-build-test.yml"
- ".github/workflows/reusable-clang-tidy.yml"
- ".github/workflows/reusable-clang-tidy-files.yml"
- ".github/workflows/reusable-build-pkg.yml"
- ".github/workflows/reusable-pkg.yml"
- ".github/workflows/reusable-package.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml"
- ".github/workflows/reusable-upload-recipe.yml"
- ".clang-tidy"
- ".codecov.yml"
- "cmake/**"
- "conan/**"
- "external/**"
- "include/**"
- "pkgs/**"
- "src/**"
- "tests/**"
- "CMakeLists.txt"
@@ -63,15 +65,6 @@ defaults:
shell: bash
jobs:
clang-tidy:
uses: ./.github/workflows/reusable-clang-tidy.yml
permissions:
issues: write
contents: read
with:
check_only_changed: false
create_issue_on_failure: ${{ github.event_name == 'schedule' }}
build-test:
uses: ./.github/workflows/reusable-build-test.yml
strategy:
@@ -90,6 +83,21 @@ jobs:
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
build-package:
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
uses: ./.github/workflows/reusable-build-pkg.yml
secrets: inherit
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
# pkg_type: [rpm]
pkg_type: [deb, rpm]
arch: [amd64]
# arch: [amd64, arm64]
with:
pkg_type: ${{ matrix.pkg_type }}
arch: ${{ matrix.arch }}
upload-recipe:
needs: build-test
# Only run when pushing to the develop branch in the XRPLF repository.

34
.github/workflows/package-test.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: Test rippled
on:
workflow_call:
inputs:
pkg_type:
description: "Whether to run unit tests"
required: true
type: boolean
arch:
description: Runner to run the job on as a JSON string
required: true
type: string
jobs:
test:
name: Test ${{ inputs.pkg_type }}-${{ inputs.arch }}
strategy:
fail-fast: false
matrix:
include:
- { pkg: rpm, distro: "rocky:9" }
- { pkg: deb, distro: "ubuntu:jammy" }
- { pkg: deb, distro: "debian:trixie" }
runs-on: ubuntu-latest
container: ${{ matrix.distro }}
steps:
- name: run unittests
run: |
ls -lh
# - name: Download rippled artifact
# uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
# with:
# name: rippled-${{ inputs.config_name }}

View File

@@ -4,18 +4,6 @@ name: Build and publish documentation
on:
push:
branches:
- "develop"
- "release*"
paths:
- ".github/workflows/publish-docs.yml"
- "*.md"
- "**/*.md"
- "docs/**"
- "include/**"
- "src/libxrpl/**"
- "src/xrpld/**"
pull_request:
paths:
- ".github/workflows/publish-docs.yml"
- "*.md"
@@ -35,9 +23,7 @@ defaults:
env:
BUILD_DIR: build
# ubuntu-latest has only 2 CPUs for private repositories
# https://docs.github.com/en/actions/reference/runners/github-hosted-runners#standard-github-hosted-runners-for--private-repositories
NPROC_SUBTRACT: ${{ github.event.repository.private && '1' || '2' }}
NPROC_SUBTRACT: 2
jobs:
publish:
@@ -47,7 +33,7 @@ jobs:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf
@@ -79,7 +65,7 @@ jobs:
cmake --build . --target docs --parallel ${BUILD_NPROC}
- name: Publish documentation
if: ${{ github.event_name == 'push' }}
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}

148
.github/workflows/reusable-build-pkg.yml vendored Normal file
View File

@@ -0,0 +1,148 @@
on:
workflow_call:
inputs:
pkg_type:
required: false
type: string
arch:
required: false
type: string
# secrets:
# GPG_KEY_B64:
# description: "The gpg key to sign packages."
# required: true
# GPG_KEY_PASS_B64:
# description: "The gpg key passphrase."
# required: true
defaults:
run:
shell: bash
jobs:
build:
name: Build ${{ inputs.pkg_type }} ${{ inputs.arch }} package
runs-on: heavy${{ inputs.arch == 'arm64' && '-arm64' || '' }}
container: ghcr.io/xrplf/ci/${{ inputs.pkg_type == 'rpm' && 'rhel-9' || 'ubuntu-jammy' }}:gcc-12
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Build packages
run: |
./pkgs/build.sh
cat build_vars >> $GITHUB_STEP_SUMMARY
- uses: actions/upload-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
path: |
*.deb
*.ddeb
if-no-files-found: error
if: inputs.pkg_type == 'deb'
- uses: actions/upload-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
path: "*${{ inputs.arch }}.${{ inputs.pkg_type }}"
if-no-files-found: error
if: inputs.pkg_type == 'rpm'
test:
name: Test ${{ inputs.pkg_type }} ${{ inputs.arch }} package
needs: build
runs-on: heavy${{ inputs.arch == 'arm64' && '-arm64' || '' }}
container: ghcr.io/xrplf/ci/${{ inputs.pkg_type == 'rpm' && 'rhel-9' || 'ubuntu-jammy' }}:gcc-12
steps:
- uses: actions/download-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
- name: Running tests
run: echo "Running tests..."
sign:
name: Sign ${{ inputs.pkg_type }} ${{ inputs.arch }} package
needs: build
runs-on: ubuntu-latest
container: ghcr.io/astral-sh/uv:python3.13-bookworm-slim
steps:
- name: Install gpg & rpm
run: apt-get update && apt-get install -y gpg rpm
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: actions/download-artifact@v4
with:
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
- name: Sign
env:
PYTHONUNBUFFERED: 1
GPG_KEY_B64: ${{ secrets.GPG_KEY_B64 }}
GPG_KEY_PASS_B64: ${{ secrets.GPG_KEY_PASS_B64 }}
run: |
if [ "${{ inputs.pkg_type }}" = "rpm" ]; then
for i in $(find . -maxdepth 1 -type f -name "rippled-[0-9]*.rpm"); do
echo "found $i"
./pkgs/sign_packages.py "$i"
done
elif [ "${{ inputs.pkg_type }}" = "deb" ]; then
for i in $(find . -maxdepth 1 -type f -name "rippled_*.deb"); do
echo "found $i"
./pkgs/sign_packages.py "$i"
done
fi
- uses: actions/upload-artifact@v4
with:
name: signed-rippled-${{ inputs.pkg_type }}-${{ inputs.arch }}
path: |
*.deb
*.ddeb
if-no-files-found: error
if: inputs.pkg_type == 'deb'
- uses: actions/upload-artifact@v4
with:
name: signed-rippled-${{ inputs.pkg_type }}-${{ inputs.arch }}
path: "*${{ inputs.arch }}.${{ inputs.pkg_type }}"
if-no-files-found: error
if: inputs.pkg_type == 'rpm'
docker:
name: Build Docker image
if: inputs.pkg_type == 'deb'
needs: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: actions/download-artifact@v4
with:
name: deb-${{ inputs.arch }}
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/metadata-action@v5
id: meta
with:
images: ghcr.io/${{ github.repository_owner }}/rippled
tags: |
type=ref,event=branch
type=ref,event=tag
type=sha
- uses: docker/build-push-action@v6
with:
context: .
file: pkgs/docker/Dockerfile
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -104,7 +104,7 @@ jobs:
uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d
@@ -254,7 +254,7 @@ jobs:
- name: Upload coverage report
if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
disable_search: true
disable_telem: true

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check levelization
run: .github/scripts/levelization/generate.sh
- name: Check for differences

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check definitions
run: .github/scripts/rename/definitions.sh .
- name: Check copyright notices
@@ -31,8 +31,6 @@ jobs:
run: .github/scripts/rename/namespace.sh .
- name: Check config name
run: .github/scripts/rename/config.sh .
- name: Check include guards
run: .github/scripts/rename/include.sh .
- name: Check for differences
env:
MESSAGE: |

View File

@@ -1,162 +0,0 @@
name: Run clang-tidy on files
on:
workflow_call:
inputs:
files:
description: "List of files to check (empty means check all files)"
type: string
default: ""
create_issue_on_failure:
description: "Whether to create an issue if the check failed"
type: boolean
default: false
defaults:
run:
shell: bash
env:
# Conan installs the generators in the build/generators directory, see the
# layout() method in conanfile.py. We then run CMake from the build directory.
BUILD_DIR: build
BUILD_TYPE: Release
jobs:
run-clang-tidy:
name: Run clang tidy
runs-on: ["self-hosted", "Linux", "X64", "heavy"]
container: "ghcr.io/xrplf/ci/debian-trixie:clang-21-sha-53033a2"
permissions:
issues: write
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d
with:
enable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf
id: nproc
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_nproc: ${{ steps.nproc.outputs.nproc }}
build_type: ${{ env.BUILD_TYPE }}
log_verbosity: verbose
- name: Configure CMake
working-directory: ${{ env.BUILD_DIR }}
run: |
cmake \
-G 'Ninja' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
-Dtests=ON \
-Dwerr=ON \
-Dxrpld=ON \
..
# clang-tidy needs headers generated from proto files
- name: Build libxrpl.libpb
working-directory: ${{ env.BUILD_DIR }}
run: |
ninja -j ${{ steps.nproc.outputs.nproc }} xrpl.libpb
- name: Run clang tidy
id: run_clang_tidy
continue-on-error: true
env:
FILES: ${{ inputs.files }}
run: |
run-clang-tidy -j ${{ steps.nproc.outputs.nproc }} -p "$BUILD_DIR" $FILES 2>&1 | tee clang-tidy-output.txt
- name: Upload clang-tidy output
if: steps.run_clang_tidy.outcome != 'success'
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: clang-tidy-results
path: clang-tidy-output.txt
retention-days: 30
- name: Create an issue
if: steps.run_clang_tidy.outcome != 'success' && inputs.create_issue_on_failure
id: create_issue
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
# Prepare issue body with clang-tidy output
cat > issue.md <<EOF
## Clang-tidy Check Failed
**Workflow:** ${{ github.workflow }}
**Run ID:** ${{ github.run_id }}
**Commit:** ${{ github.sha }}
**Branch/Ref:** ${{ github.ref }}
**Triggered by:** ${{ github.actor }}
### Clang-tidy Output:
\`\`\`
EOF
# Append clang-tidy output (filter for errors and warnings)
if [ -f clang-tidy-output.txt ]; then
# Extract lines containing 'error:', 'warning:', or 'note:'
grep -E '(error:|warning:|note:)' clang-tidy-output.txt > filtered-output.txt || true
# If filtered output is empty, use original (might be a different error format)
if [ ! -s filtered-output.txt ]; then
cp clang-tidy-output.txt filtered-output.txt
fi
# Truncate if too large
head -c 60000 filtered-output.txt >> issue.md
if [ "$(wc -c < filtered-output.txt)" -gt 60000 ]; then
echo "" >> issue.md
echo "... (output truncated, see artifacts for full output)" >> issue.md
fi
rm filtered-output.txt
else
echo "No output file found" >> issue.md
fi
cat >> issue.md <<EOF
\`\`\`
**Workflow run:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
---
*This issue was automatically created by the clang-tidy workflow.*
EOF
# Create the issue
gh issue create \
--label "Bug,Clang-tidy" \
--title "Clang-tidy check failed" \
--body-file ./issue.md \
> create_issue.log
created_issue="$(sed 's|.*/||' create_issue.log)"
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
echo "Created issue #$created_issue"
rm -f create_issue.log issue.md clang-tidy-output.txt
- name: Fail the workflow if clang-tidy failed
if: steps.run_clang_tidy.outcome != 'success'
run: |
echo "Clang-tidy check failed!"
exit 1

View File

@@ -1,47 +0,0 @@
name: Clang-tidy check
on:
workflow_call:
inputs:
check_only_changed:
description: "Check only changed files in PR. If false, checks all files in the repository."
type: boolean
default: false
create_issue_on_failure:
description: "Whether to create an issue if the check failed"
type: boolean
default: false
defaults:
run:
shell: bash
jobs:
determine-files:
name: Determine files to check
if: ${{ inputs.check_only_changed }}
runs-on: ubuntu-latest
outputs:
any_changed: ${{ steps.changed_files.outputs.any_changed }}
all_changed_files: ${{ steps.changed_files.outputs.all_changed_files }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Get changed C++ files
id: changed_files
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
**/*.cpp
**/*.h
**/*.ipp
separator: " "
run-clang-tidy:
needs: [determine-files]
if: ${{ always() && !cancelled() && (!inputs.check_only_changed || needs.determine-files.outputs.any_changed == 'true') }}
uses: ./.github/workflows/reusable-clang-tidy-files.yml
with:
files: ${{ inputs.check_only_changed && needs.determine-files.outputs.all_changed_files || '' }}
create_issue_on_failure: ${{ inputs.create_issue_on_failure }}

69
.github/workflows/reusable-package.yml vendored Normal file
View File

@@ -0,0 +1,69 @@
name: Package rippled
on:
workflow_call:
inputs:
build_type:
description: 'The build type to use ("Debug", "Release").'
required: false
type: string
default: 'Release'
cmake_args:
description: "Additional arguments to pass to CMake."
required: false
type: string
cmake_target:
description: "The CMake target to build."
required: false
type: string
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: false
type: string
default: ''
config_name:
description: "The name of the configuration."
required: false
type: string
defaults:
run:
shell: bash
jobs:
build:
name: Package ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Build packages
run: |
export BUILD_TYPE=${{ inputs.build_type }}
export CMAKE_ARGS=${{ inputs.cmake_args }}
export CMAKE_TARGETS=${{ inputs.cmake_target }}
./pkgs/build.sh
{
echo "<table>"
while IFS='=' read -r k v; do
printf '<tr><td>%s</td><td align="right"><code>%s</code></td></tr>\n' "$k" "$v"
done < build_vars
echo "</table>"
} >> "$GITHUB_STEP_SUMMARY"
- uses: actions/upload-artifact@v4
with:
name: ${{ inputs.config_name }}
path: '**/*.{deb,rpm}'
if-no-files-found: error

41
.github/workflows/reusable-pkg.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: Package
on:
workflow_call:
inputs:
build_dir:
description: "The directory where to build."
required: false
type: string
default: ".build"
os:
description: 'The operating system to use for the build ("linux", "macos", "windows").'
required: false
type: string
default: linux
strategy_matrix_subset:
description: 'The strategy matrix to use for generating a subset of configurations.'
required: false
type: string
default: "package"
jobs:
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
os: ${{ inputs.os }}
strategy_matrix_subset: ${{ inputs.strategy_matrix_subset }}
package:
needs:
- generate-matrix
uses: ./.github/workflows/reusable-package.yml
strategy:
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
with:
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_target: ${{ matrix.cmake_target }}
runs_on: ${{ toJSON(matrix.architecture.runner) }}
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-5dd7158', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || '' }}
config_name: ${{ matrix.config_name }}

View File

@@ -13,6 +13,10 @@ on:
required: false
type: string
default: "minimal"
strategy_matrix_subset:
description: 'The strategy matrix to use for generating a subset of configs).'
required: false
type: string
outputs:
matrix:
description: "The generated strategy matrix."
@@ -29,10 +33,10 @@ jobs:
matrix: ${{ steps.generate.outputs.matrix }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: 3.13
@@ -42,4 +46,5 @@ jobs:
env:
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
GENERATE_SUBSET: ${{ inputs.strategy_matrix_subset != '' && format('--{0}', inputs.strategy_matrix_subset) || '' }}
run: ./generate.py ${{ env.GENERATE_SUBSET }} ${{ env.GENERATE_OPTION }} ${{ env.GENERATE_CONFIG }} >> "${GITHUB_OUTPUT}"

View File

@@ -43,7 +43,7 @@ jobs:
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Generate build version number
id: version

View File

@@ -67,7 +67,7 @@ jobs:
uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d

3
.gitignore vendored
View File

@@ -71,6 +71,3 @@ DerivedData
/.augment
/.claude
/CLAUDE.md
# clangd cache
/.cache

View File

@@ -20,7 +20,7 @@ repos:
args: [--assume-in-merge]
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: 75ca4ad908dc4a99f57921f29b7e6c1521e10b26 # frozen: v21.1.8
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
hooks:
- id: clang-format
args: [--style=file]

View File

@@ -368,36 +368,6 @@ The workaround for this error is to add two lines to your profile:
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
```
### Set Up Ccache
To speed up repeated compilations, we recommend that you install
[ccache](https://ccache.dev), a tool that wraps your compiler so that it can
cache build objects locally.
#### Linux
You can install it using the package manager, e.g. `sudo apt install ccache`
(Ubuntu) or `sudo dnf install ccache` (RHEL).
#### macOS
You can install it using Homebrew, i.e. `brew install ccache`.
#### Windows
You can install it using Chocolatey, i.e. `choco install ccache`. If you already
have Ccache installed, then `choco upgrade ccache` will update it to the latest
version. However, if you see an error such as:
```
terminate called after throwing an instance of 'std::bad_alloc'
what(): std::bad_alloc
C:\Program Files\Microsoft Visual Studio\2022\Community\MSBuild\Microsoft\VC\v170\Microsoft.CppCommon.targets(617,5): error MSB6006: "cl.exe" exited with code 3.
```
then please install a specific version of Ccache that we know works, via: `choco
install ccache --version 4.11.3 --allow-downgrade`.
### Build and Test
1. Create a build directory and move into it.
@@ -580,10 +550,10 @@ See [Sanitizers docs](./docs/build/sanitizers.md) for more details.
| `werr` | OFF | Treat compilation warnings as errors |
| `wextra` | OFF | Enable additional compilation warnings |
[Unity builds][5] may be faster for the first build (at the cost of much more
memory) since they concatenate sources into fewer translation units. Non-unity
builds may be faster for incremental builds, and can be helpful for detecting
`#include` omissions.
[Unity builds][5] may be faster for the first build
(at the cost of much more memory) since they concatenate sources into fewer
translation units. Non-unity builds may be faster for incremental builds,
and can be helpful for detecting `#include` omissions.
## Troubleshooting

View File

@@ -17,7 +17,6 @@ project(xrpl)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
include(CompilationEnv)
@@ -39,16 +38,16 @@ include(Ccache)
# make GIT_COMMIT_HASH define available to all sources
find_package(Git)
if (Git_FOUND)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse
HEAD OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
if (gch)
set(GIT_COMMIT_HASH "${gch}")
message(STATUS gch: ${GIT_COMMIT_HASH})
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
endif ()
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse
--abbrev-ref HEAD OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
if (gb)
set(GIT_BRANCH "${gb}")
message(STATUS gb: ${GIT_BRANCH})
@@ -68,8 +67,7 @@ include(FetchContent)
include(ExternalProject)
include(CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
if (target)
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build"
)
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build")
endif ()
include(XrplSanity)
@@ -78,8 +76,7 @@ include(XrplSettings)
# this check has to remain in the top-level cmake because of the early return statement
if (packages_only)
if (NOT TARGET rpm)
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?"
)
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
endif ()
return()
endif ()
@@ -121,8 +118,7 @@ target_link_libraries(
option(rocksdb "Enable RocksDB" ON)
if (rocksdb)
find_package(RocksDB REQUIRED)
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS
XRPL_ROCKSDB_AVAILABLE=1)
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1)
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
endif ()

View File

@@ -219,7 +219,7 @@ coherent rather than a set of _thou shalt not_ commandments.
## Formatting
All code must conform to `clang-format` version 21,
All code must conform to `clang-format` version 18,
according to the settings in [`.clang-format`](./.clang-format),
unless the result would be unreasonably difficult to read or maintain.
To demarcate lines that should be left as-is, surround them with comments like

View File

@@ -940,7 +940,23 @@
#
# path Location to store the database
#
# Optional keys for NuDB and RocksDB:
# Optional keys
#
# cache_size Size of cache for database records. Default is 16384.
# Setting this value to 0 will use the default value.
#
# cache_age Length of time in minutes to keep database records
# cached. Default is 5 minutes. Setting this value to
# 0 will use the default value.
#
# Note: if neither cache_size nor cache_age is
# specified, the cache for database records will not
# be created. If only one of cache_size or cache_age
# is specified, the cache will be created using the
# default value for the unspecified parameter.
#
# Note: the cache will not be created if online_delete
# is specified.
#
# fast_load Boolean. If set, load the last persisted ledger
# from disk upon process start before syncing to
@@ -948,6 +964,8 @@
# if sufficient IOPS capacity is available.
# Default 0.
#
# Optional keys for NuDB or RocksDB:
#
# earliest_seq The default is 32570 to match the XRP ledger
# network's earliest allowed sequence. Alternate
# networks may set this value. Minimum value of 1.

View File

@@ -43,8 +43,7 @@ set(CMAKE_VS_GLOBALS "CLToolExe=cl.exe" "CLToolPath=${CMAKE_BINARY_DIR}" "TrackF
# By default Visual Studio generators will use /Zi to capture debug information, which is not compatible with ccache, so
# tell it to use /Z7 instead.
if (MSVC)
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG
CMAKE_CXX_FLAGS_RELEASE)
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
string(REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
endforeach ()
endif ()

View File

@@ -180,8 +180,7 @@ elseif (DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if (APPLE)
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH
OUTPUT_STRIP_TRAILING_WHITESPACE)
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program(LLVMCOV_PATH llvm-cov)
endif ()
@@ -200,8 +199,8 @@ foreach (LANG ${LANGUAGES})
if ("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
endif ()
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}"
MATCHES "(LLVM)?[Ff]lang")
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES
"(LLVM)?[Ff]lang")
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif ()
endforeach ()
@@ -322,16 +321,14 @@ function (setup_target_for_coverage_gcovr)
endif ()
if ("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting..."
)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
else ()
if ((Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested"))
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
elseif (Coverage_FORMAT STREQUAL "html-single")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL
"json-details")
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL "json-details")
OR (Coverage_FORMAT STREQUAL "coveralls"))
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
elseif (Coverage_FORMAT STREQUAL "txt")
@@ -455,10 +452,8 @@ function (setup_target_for_coverage_gcovr)
COMMENT "Running gcovr to produce code coverage report.")
# Show info where to find the report
add_custom_command(
TARGET ${Coverage_NAME} POST_BUILD COMMAND echo
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
)
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD COMMAND echo
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}")
endfunction () # setup_target_for_coverage_gcovr
function (add_code_coverage_to_target name scope)
@@ -468,10 +463,14 @@ function (add_code_coverage_to_target name scope)
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
# Add compiler options to the target
target_compile_options(
${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
target_compile_options(${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
target_link_libraries(${name} ${scope} $<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS}>
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}>)
target_link_libraries(
${name}
${scope}
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS}
gcov>
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}
gcov>)
endfunction () # add_code_coverage_to_target

View File

@@ -9,5 +9,8 @@ function (xrpl_add_test name)
isolate_headers(${target} "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests/${name}" PRIVATE)
# Make sure the test isn't optimized away in unity builds
set_target_properties(${target} PROPERTIES UNITY_BUILD_MODE GROUP UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
endfunction ()

View File

@@ -17,8 +17,7 @@ link_libraries(Xrpl::common)
if (NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
endif ()
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE
${CMAKE_POSITION_INDEPENDENT_CODE})
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
set(CMAKE_CXX_EXTENSIONS OFF)
target_compile_definitions(
common
@@ -38,8 +37,7 @@ if (MSVC)
# remove existing exception flag since we set it to -EHa
string(REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG
CMAKE_CXX_FLAGS_RELEASE)
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
# also remove dynamic runtime
string(REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
@@ -145,23 +143,20 @@ if (voidstar)
elseif (NOT is_linux)
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting..."
)
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
endif ()
endif ()
if (use_mold)
# use mold linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET
OUTPUT_VARIABLE LD_VERSION)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
if ("${LD_VERSION}" MATCHES "mold")
target_link_libraries(common INTERFACE -fuse-ld=mold)
endif ()
unset(LD_VERSION)
elseif (use_gold AND is_gcc)
# use gold linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET
OUTPUT_VARIABLE LD_VERSION)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
#[=========================================================[
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
default instead of DT_RPATH, so you might have slightly
@@ -191,8 +186,7 @@ elseif (use_gold AND is_gcc)
unset(LD_VERSION)
elseif (use_lld)
# use lld linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET
OUTPUT_VARIABLE LD_VERSION)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
if ("${LD_VERSION}" MATCHES "LLD")
target_link_libraries(common INTERFACE -fuse-ld=lld)
endif ()

View File

@@ -14,8 +14,7 @@ target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/x
PROTOS include/xrpl/proto/xrpl.proto)
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto
PROTOS "${protos}")
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto PROTOS "${protos}")
target_protobuf_sources(
xrpl.libpb xrpl/proto
LANGUAGE grpc
@@ -25,9 +24,8 @@ target_protobuf_sources(
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc)
target_compile_options(
xrpl.libpb
PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>:
--system-header-prefix="google/protobuf" -Wno-deprecated-dynamic-exception-spec >
xrpl.libpb PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>: --system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec >
PRIVATE $<$<BOOL:${is_msvc}>:-wd4065> $<$<NOT:$<BOOL:${is_msvc}>>:-Wno-deprecated-declarations>)
target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf gRPC::grpc++)
@@ -75,8 +73,7 @@ target_link_libraries(xrpl.libxrpl.protocol PUBLIC xrpl.libxrpl.crypto xrpl.libx
# Level 05
add_module(xrpl core)
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
xrpl.libxrpl.protocol)
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
# Level 06
add_module(xrpl resource)
@@ -84,40 +81,21 @@ target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
# Level 07
add_module(xrpl net)
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
xrpl.libxrpl.protocol xrpl.libxrpl.resource)
add_module(xrpl nodestore)
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
xrpl.libxrpl.protocol)
add_module(xrpl shamap)
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto
xrpl.libxrpl.protocol xrpl.libxrpl.nodestore)
add_module(xrpl rdb)
target_link_libraries(xrpl.libxrpl.rdb PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.core)
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol
xrpl.libxrpl.resource)
add_module(xrpl server)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol xrpl.libxrpl.core
xrpl.libxrpl.rdb xrpl.libxrpl.resource)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
add_module(xrpl conditions)
target_link_libraries(xrpl.libxrpl.conditions PUBLIC xrpl.libxrpl.server)
add_module(xrpl nodestore)
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
add_module(xrpl shamap)
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto xrpl.libxrpl.protocol
xrpl.libxrpl.nodestore)
add_module(xrpl ledger)
target_link_libraries(
xrpl.libxrpl.ledger
PUBLIC xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.rdb
xrpl.libxrpl.server
xrpl.libxrpl.shamap
xrpl.libxrpl.conditions)
add_module(xrpl tx)
target_link_libraries(xrpl.libxrpl.tx PUBLIC xrpl.libxrpl.ledger)
target_link_libraries(xrpl.libxrpl.ledger PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
add_library(xrpl.libxrpl)
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
@@ -132,19 +110,16 @@ target_link_modules(
PUBLIC
basics
beast
conditions
core
crypto
json
ledger
net
nodestore
protocol
rdb
resource
server
nodestore
shamap
tx)
net
ledger)
# All headers in libxrpl are in modules.
# Uncomment this stanza if you have not yet moved new headers into a module.
@@ -185,4 +160,12 @@ if (xrpld)
# antithesis_instrumentation.h, which is not exported as INTERFACE
target_include_directories(xrpld PRIVATE ${CMAKE_SOURCE_DIR}/external/antithesis-sdk)
endif ()
# any files that don't play well with unity should be added here
if (tests)
set_source_files_properties(
# these two seem to produce conflicts in beast teardown template methods
src/test/rpc/ValidatorRPC_test.cpp src/test/ledger/Invariants_test.cpp PROPERTIES SKIP_UNITY_BUILD_INCLUSION
TRUE)
endif ()
endif ()

View File

@@ -65,8 +65,8 @@ add_custom_command(
OUTPUT "${doxygen_index_file}"
COMMAND "${CMAKE_COMMAND}" -E env "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}"
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}"
"DOXYGEN_DOT_PATH=${doxygen_dot_path}" "${DOXYGEN_EXECUTABLE}" "${doxyfile}"
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" "DOXYGEN_DOT_PATH=${doxygen_dot_path}"
"${DOXYGEN_EXECUTABLE}" "${doxyfile}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
DEPENDS "${dependencies}" "${tagfile}")
add_custom_target(docs DEPENDS "${doxygen_index_file}" SOURCES "${dependencies}")

View File

@@ -20,11 +20,9 @@ install(TARGETS common
xrpl.libxrpl
xrpl.libxrpl.basics
xrpl.libxrpl.beast
xrpl.libxrpl.conditions
xrpl.libxrpl.core
xrpl.libxrpl.crypto
xrpl.libxrpl.json
xrpl.libxrpl.rdb
xrpl.libxrpl.ledger
xrpl.libxrpl.net
xrpl.libxrpl.nodestore
@@ -32,7 +30,6 @@ install(TARGETS common
xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl.shamap
xrpl.libxrpl.tx
antithesis-sdk-cpp
EXPORT XrplExports
LIBRARY DESTINATION lib
@@ -41,13 +38,11 @@ install(TARGETS common
INCLUDES
DESTINATION include)
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
install(EXPORT XrplExports FILE XrplTargets.cmake NAMESPACE Xrpl:: DESTINATION lib/cmake/xrpl)
include(CMakePackageConfigHelpers)
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version}
COMPATIBILITY SameMajorVersion)
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version} COMPATIBILITY SameMajorVersion)
if (is_root_project AND TARGET xrpld)
install(TARGETS xrpld RUNTIME DESTINATION bin)
@@ -74,5 +69,5 @@ if (is_root_project AND TARGET xrpld)
")
endif ()
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake DESTINATION lib/cmake/xrpl)
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake ${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
DESTINATION lib/cmake/xrpl)

View File

@@ -33,13 +33,10 @@ target_compile_definitions(
target_compile_options(
opts
INTERFACE $<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized> $<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
if (jemalloc)
find_package(jemalloc REQUIRED)

View File

@@ -19,8 +19,7 @@ if (NOT is_multiconfig)
endif ()
if (is_clang) # both Clang and AppleClang
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS
16.0)
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
message(FATAL_ERROR "This project requires clang 16 or later")
endif ()
elseif (is_gcc)
@@ -33,8 +32,7 @@ endif ()
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message(FATAL_ERROR "Builds (in-source) are not allowed in "
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory."
)
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
endif ()
if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")

View File

@@ -70,8 +70,7 @@ if (is_linux AND NOT SANITIZER)
else ()
set(TRUNCATED_LOGS_DEFAULT OFF)
endif ()
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux."
${TRUNCATED_LOGS_DEFAULT})
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux." ${TRUNCATED_LOGS_DEFAULT})
if (TRUNCATED_THREAD_NAME_LOGS)
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
endif ()
@@ -93,13 +92,11 @@ endif ()
option(jemalloc "Enables jemalloc for heap profiling" OFF)
option(werr "treat warnings as errors" OFF)
option(local_protobuf
"Force a local build of protobuf instead of looking for an installed version." OFF)
option(local_protobuf "Force a local build of protobuf instead of looking for an installed version." OFF)
option(local_grpc "Force a local build of gRPC instead of looking for an installed version." OFF)
# the remaining options are obscure and rarely used
option(beast_no_unit_test_inline
"Prevents unit test definitions from being inserted into global table" OFF)
option(beast_no_unit_test_inline "Prevents unit test definitions from being inserted into global table" OFF)
option(single_io_service_thread "Restricts the number of threads calling io_context::run to one. \
This can be useful when debugging." OFF)
option(boost_show_deprecated "Allow boost to fail on deprecated usage. Only useful if you're trying\

View File

@@ -1,6 +1,4 @@
option(validator_keys
"Enables building of validator-keys tool as a separate target (imported via FetchContent)"
OFF)
option(validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
if (validator_keys)
git_branch(current_branch)
@@ -10,9 +8,8 @@ if (validator_keys)
endif ()
message(STATUS "Tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare(
validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}")
FetchContent_Declare(validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}")
FetchContent_MakeAvailable(validator_keys)
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})

View File

@@ -15,12 +15,11 @@ include(isolate_headers)
function (add_module parent name)
set(target ${PROJECT_NAME}.lib${parent}.${name})
add_library(${target} OBJECT)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp")
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp")
target_sources(${target} PRIVATE ${sources})
target_include_directories(${target} PUBLIC "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>")
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/include"
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" PUBLIC)
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src"
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}" PRIVATE)
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src" "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
PRIVATE)
endfunction ()

View File

@@ -39,7 +39,6 @@ if (SANITIZERS_ENABLED AND is_clang)
endif ()
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
target_compile_options(
opts INTERFACE # ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
target_compile_options(opts INTERFACE # ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
endif ()

View File

@@ -6,11 +6,11 @@
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1765850149.926",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1765850149.46",
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1765850147.878",
"secp256k1/0.7.1#3a61e95e220062ef32c48d019e9c81f7%1770306721.686",
"secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765850147.928",
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1765850186.86",
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1765850148.103",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038",
"openssl/3.5.5#05a4ac5b7323f7a329b2db1391d9941f%1769599205.414",
"openssl/3.5.4#1b986e61b38fdfda3b40bebc1b234393%1768312656.257",
"nudb/2.0.9#0432758a24204da08fee953ec9ea03cb%1769436073.32",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1765850143.914",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1765842973.492",
@@ -23,7 +23,7 @@
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1765850143.772",
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1765850144.336",
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1765850143.837",
"boost/1.90.0#d5e8defe7355494953be18524a7f135b%1769454080.269",
"boost/1.90.0#d5e8defe7355494953be18524a7f135b%1765955095.179",
"abseil/20250127.0#99262a368bd01c0ccca8790dfced9719%1766517936.993"
],
"build_requires": [
@@ -31,7 +31,7 @@
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1765850165.196",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038",
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1765850144.707",
"msys2/cci.latest#eea83308ad7e9023f7318c60d5a9e6cb%1770199879.083",
"msys2/cci.latest#1996656c3c98e5765b25b60ff5cf77b4%1764840888.758",
"m4/1.4.19#70dc8bbb33e981d119d2acc0175cf381%1763158052.846",
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1765850153.937",
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1765850153.479",

View File

@@ -32,8 +32,8 @@ class Xrpl(ConanFile):
"grpc/1.72.0",
"libarchive/3.8.1",
"nudb/2.0.9",
"openssl/3.5.5",
"secp256k1/0.7.1",
"openssl/3.5.4",
"secp256k1/0.7.0",
"soci/4.0.3",
"zlib/1.3.1",
]

View File

@@ -17,8 +17,8 @@ guideline is to maintain the standards that are used in those libraries.
## Guidelines
If you want to do something contrary to these guidelines, understand
why you're doing it. Think, use common sense, and consider that these
changes will probably need to be maintained long after you've
why you're doing it. Think, use common sense, and consider that this
your changes will probably need to be maintained long after you've
moved on to other projects.
- Use white space and blank lines to guide the eye and keep your intent clear.

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_ARCHIVE_H_INCLUDED
#define XRPL_BASICS_ARCHIVE_H_INCLUDED
#include <boost/filesystem.hpp>
@@ -15,3 +16,5 @@ void
extractTarLz4(boost::filesystem::path const& src, boost::filesystem::path const& dst);
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_BASICCONFIG_H_INCLUDED
#define XRPL_BASICS_BASICCONFIG_H_INCLUDED
#include <xrpl/basics/contract.h>
@@ -84,8 +85,7 @@ public:
if (lines_.empty())
return "";
if (lines_.size() > 1)
Throw<std::runtime_error>(
"A legacy value must have exactly one line. Section: " + name_);
Throw<std::runtime_error>("A legacy value must have exactly one line. Section: " + name_);
return lines_[0];
}
@@ -269,8 +269,7 @@ public:
bool
had_trailing_comments() const
{
return std::any_of(
map_.cbegin(), map_.cend(), [](auto s) { return s.second.had_trailing_comments(); });
return std::any_of(map_.cbegin(), map_.cend(), [](auto s) { return s.second.had_trailing_comments(); });
}
protected:
@@ -370,3 +369,5 @@ get_if_exists<bool>(Section const& section, std::string const& name, bool& v)
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_BLOB_H_INCLUDED
#define XRPL_BASICS_BLOB_H_INCLUDED
#include <vector>
@@ -10,3 +11,5 @@ namespace xrpl {
using Blob = std::vector<unsigned char>;
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_BUFFER_H_INCLUDED
#define XRPL_BASICS_BUFFER_H_INCLUDED
#include <xrpl/basics/Slice.h>
#include <xrpl/beast/utility/instrumentation.h>
@@ -212,3 +213,5 @@ operator!=(Buffer const& lhs, Buffer const& rhs) noexcept
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_BYTEUTILITIES_H_INCLUDED
#define XRPL_BASICS_BYTEUTILITIES_H_INCLUDED
namespace xrpl {
@@ -19,3 +20,5 @@ megabytes(T value) noexcept
static_assert(kilobytes(2) == 2048, "kilobytes(2) == 2048");
static_assert(megabytes(3) == 3145728, "megabytes(3) == 3145728");
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_COMPRESSIONALGORITHMS_H_INCLUDED
#define XRPL_COMPRESSIONALGORITHMS_H_INCLUDED
#include <xrpl/basics/contract.h>
@@ -35,10 +36,7 @@ lz4Compress(void const* in, std::size_t inSize, BufferFactory&& bf)
auto compressed = bf(outCapacity);
auto compressedSize = LZ4_compress_default(
reinterpret_cast<char const*>(in),
reinterpret_cast<char*>(compressed),
inSize,
outCapacity);
reinterpret_cast<char const*>(in), reinterpret_cast<char*>(compressed), inSize, outCapacity);
if (compressedSize == 0)
Throw<std::runtime_error>("lz4 compress: failed");
@@ -69,10 +67,8 @@ lz4Decompress(
Throw<std::runtime_error>("lz4Decompress: integer overflow (output)");
if (LZ4_decompress_safe(
reinterpret_cast<char const*>(in),
reinterpret_cast<char*>(decompressed),
inSize,
decompressedSize) != decompressedSize)
reinterpret_cast<char const*>(in), reinterpret_cast<char*>(decompressed), inSize, decompressedSize) !=
decompressedSize)
Throw<std::runtime_error>("lz4Decompress: failed");
return decompressedSize;
@@ -88,11 +84,7 @@ lz4Decompress(
*/
template <typename InputStream>
std::size_t
lz4Decompress(
InputStream& in,
std::size_t inSize,
std::uint8_t* decompressed,
std::size_t decompressedSize)
lz4Decompress(InputStream& in, std::size_t inSize, std::uint8_t* decompressed, std::size_t decompressedSize)
{
std::vector<std::uint8_t> compressed;
std::uint8_t const* chunk = nullptr;
@@ -141,3 +133,5 @@ lz4Decompress(
} // namespace compression_algorithms
} // namespace xrpl
#endif // XRPL_COMPRESSIONALGORITHMS_H_INCLUDED

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_COUNTEDOBJECT_H_INCLUDED
#define XRPL_BASICS_COUNTEDOBJECT_H_INCLUDED
#include <xrpl/beast/type_name.h>
@@ -133,3 +134,5 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_DECAYINGSAMPLE_H_INCLUDED
#define XRPL_BASICS_DECAYINGSAMPLE_H_INCLUDED
#include <chrono>
#include <cmath>
@@ -55,8 +56,7 @@ private:
if (m_value != value_type())
{
std::size_t elapsed =
std::chrono::duration_cast<std::chrono::seconds>(now - m_when).count();
std::size_t elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - m_when).count();
// A span larger than four times the window decays the
// value to an insignificant amount so just reset it.
@@ -130,3 +130,5 @@ private:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_EXPECTED_H_INCLUDED
#define XRPL_BASICS_EXPECTED_H_INCLUDED
#include <xrpl/basics/contract.h>
@@ -120,8 +121,7 @@ public:
template <typename U>
requires std::convertible_to<U, E> && (!std::is_reference_v<U>)
constexpr Expected(Unexpected<U> e)
: Base(boost::outcome_v2::in_place_type_t<E>{}, std::move(e.value()))
constexpr Expected(Unexpected<U> e) : Base(boost::outcome_v2::in_place_type_t<E>{}, std::move(e.value()))
{
}
@@ -192,8 +192,7 @@ public:
// Specialization of Expected<void, E>. Allows returning either success
// (without a value) or the reason for the failure.
template <class E>
class [[nodiscard]]
Expected<void, E> : private boost::outcome_v2::result<void, E, detail::throw_policy>
class [[nodiscard]] Expected<void, E> : private boost::outcome_v2::result<void, E, detail::throw_policy>
{
using Base = boost::outcome_v2::result<void, E, detail::throw_policy>;
@@ -230,3 +229,5 @@ public:
};
} // namespace xrpl
#endif // XRPL_BASICS_EXPECTED_H_INCLUDED

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_FILEUTILITIES_H_INCLUDED
#define XRPL_BASICS_FILEUTILITIES_H_INCLUDED
#include <boost/filesystem.hpp>
#include <boost/system/error_code.hpp>
@@ -14,9 +15,8 @@ getFileContents(
std::optional<std::size_t> maxSize = std::nullopt);
void
writeFileContents(
boost::system::error_code& ec,
boost::filesystem::path const& destPath,
std::string const& contents);
writeFileContents(boost::system::error_code& ec, boost::filesystem::path const& destPath, std::string const& contents);
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_INTRUSIVEPOINTER_H_INCLUDED
#define XRPL_BASICS_INTRUSIVEPOINTER_H_INCLUDED
#include <concepts>
#include <cstdint>
@@ -44,8 +45,8 @@ struct SharedIntrusiveAdoptNoIncrementTag
//
template <class T>
concept CAdoptTag = std::is_same_v<T, SharedIntrusiveAdoptIncrementStrongTag> ||
std::is_same_v<T, SharedIntrusiveAdoptNoIncrementTag>;
concept CAdoptTag =
std::is_same_v<T, SharedIntrusiveAdoptIncrementStrongTag> || std::is_same_v<T, SharedIntrusiveAdoptNoIncrementTag>;
//------------------------------------------------------------------------------
@@ -443,8 +444,7 @@ make_SharedIntrusive(Args&&... args)
auto p = new TT(std::forward<Args>(args)...);
static_assert(
noexcept(SharedIntrusive<TT>(
std::declval<TT*>(), std::declval<SharedIntrusiveAdoptNoIncrementTag>())),
noexcept(SharedIntrusive<TT>(std::declval<TT*>(), std::declval<SharedIntrusiveAdoptNoIncrementTag>())),
"SharedIntrusive constructor should not throw or this can leak "
"memory");
@@ -485,3 +485,4 @@ dynamic_pointer_cast(TT const& v)
}
} // namespace intr_ptr
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_INTRUSIVEPOINTER_IPP_INCLUDED
#define XRPL_BASICS_INTRUSIVEPOINTER_IPP_INCLUDED
#include <xrpl/basics/IntrusivePointer.h>
#include <xrpl/basics/IntrusiveRefCounts.h>
@@ -208,8 +209,7 @@ SharedIntrusive<T>::operator->() const noexcept
}
template <class T>
SharedIntrusive<T>::
operator bool() const noexcept
SharedIntrusive<T>::operator bool() const noexcept
{
return bool(unsafeGetRawPtr());
}
@@ -504,8 +504,7 @@ SharedWeakUnion<T>::getStrong() const
}
template <class T>
SharedWeakUnion<T>::
operator bool() const noexcept
SharedWeakUnion<T>::operator bool() const noexcept
{
return bool(get());
}
@@ -704,3 +703,4 @@ SharedWeakUnion<T>::unsafeReleaseNoStore()
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_INTRUSIVEREFCOUNTS_H_INCLUDED
#define XRPL_BASICS_INTRUSIVEREFCOUNTS_H_INCLUDED
#include <xrpl/beast/utility/instrumentation.h>
@@ -184,8 +185,7 @@ private:
/** Mask that will zero out everything except the weak count.
*/
static constexpr FieldType weakMask =
(((one << WeakCountNumBits) - 1) << StrongCountNumBits) & valueMask;
static constexpr FieldType weakMask = (((one << WeakCountNumBits) - 1) << StrongCountNumBits) & valueMask;
/** Unpack the count and tag fields from the packed atomic integer form. */
struct RefCountPair
@@ -210,10 +210,8 @@ private:
FieldType
combinedValue() const noexcept;
static constexpr CountType maxStrongValue =
static_cast<CountType>((one << StrongCountNumBits) - 1);
static constexpr CountType maxWeakValue =
static_cast<CountType>((one << WeakCountNumBits) - 1);
static constexpr CountType maxStrongValue = static_cast<CountType>((one << StrongCountNumBits) - 1);
static constexpr CountType maxWeakValue = static_cast<CountType>((one << WeakCountNumBits) - 1);
/** Put an extra margin to detect when running up against limits.
This is only used in debug code, and is useful if we reduce the
number of bits in the strong and weak counts (to 16 and 14 bits).
@@ -398,8 +396,7 @@ inline IntrusiveRefCounts::~IntrusiveRefCounts() noexcept
{
#ifndef NDEBUG
auto v = refCounts.load(std::memory_order_acquire);
XRPL_ASSERT(
(!(v & valueMask)), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : count must be zero");
XRPL_ASSERT((!(v & valueMask)), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : count must be zero");
auto t = v & tagMask;
XRPL_ASSERT((!t || t == tagMask), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : valid tag");
#endif
@@ -437,10 +434,8 @@ IntrusiveRefCounts::RefCountPair::combinedValue() const noexcept
(strong < checkStrongMaxValue && weak < checkWeakMaxValue),
"xrpl::IntrusiveRefCounts::RefCountPair::combinedValue : inputs "
"inside range");
return (static_cast<IntrusiveRefCounts::FieldType>(weak)
<< IntrusiveRefCounts::StrongCountNumBits) |
static_cast<IntrusiveRefCounts::FieldType>(strong) | partialDestroyStartedBit |
partialDestroyFinishedBit;
return (static_cast<IntrusiveRefCounts::FieldType>(weak) << IntrusiveRefCounts::StrongCountNumBits) |
static_cast<IntrusiveRefCounts::FieldType>(strong) | partialDestroyStartedBit | partialDestroyFinishedBit;
}
template <class T>
@@ -448,8 +443,7 @@ inline void
partialDestructorFinished(T** o)
{
T& self = **o;
IntrusiveRefCounts::RefCountPair p =
self.refCounts.fetch_or(IntrusiveRefCounts::partialDestroyFinishedMask);
IntrusiveRefCounts::RefCountPair p = self.refCounts.fetch_or(IntrusiveRefCounts::partialDestroyFinishedMask);
XRPL_ASSERT(
(!p.partialDestroyFinishedBit && p.partialDestroyStartedBit && !p.strong),
"xrpl::partialDestructorFinished : not a weak ref");
@@ -467,3 +461,4 @@ partialDestructorFinished(T** o)
//------------------------------------------------------------------------------
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_KEYCACHE_H
#define XRPL_BASICS_KEYCACHE_H
#include <xrpl/basics/TaggedCache.h>
#include <xrpl/basics/base_uint.h>
@@ -8,3 +9,5 @@ namespace xrpl {
using KeyCache = TaggedCache<uint256, int, true>;
} // namespace xrpl
#endif // XRPL_BASICS_KEYCACHE_H

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_LOCALVALUE_H_INCLUDED
#define XRPL_BASICS_LOCALVALUE_H_INCLUDED
#include <boost/thread/tss.hpp>
@@ -103,7 +104,8 @@ LocalValue<T>::operator*()
}
return *reinterpret_cast<T*>(
lvs->values.emplace(this, std::make_unique<detail::LocalValues::Value<T>>(t_))
.first->second->get());
lvs->values.emplace(this, std::make_unique<detail::LocalValues::Value<T>>(t_)).first->second->get());
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_LOG_H_INCLUDED
#define XRPL_BASICS_LOG_H_INCLUDED
#include <xrpl/basics/UnorderedContainers.h>
#include <xrpl/beast/utility/Journal.h>
@@ -170,11 +171,7 @@ public:
partition_severities() const;
void
write(
beast::severities::Severity level,
std::string const& partition,
std::string const& text,
bool console);
write(beast::severities::Severity level, std::string const& partition, std::string const& text, bool console);
std::string
rotate();
@@ -261,3 +258,5 @@ beast::Journal
debugLog();
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_MATHUTILITIES_H_INCLUDED
#define XRPL_BASICS_MATHUTILITIES_H_INCLUDED
#include <algorithm>
#include <cassert>
@@ -44,3 +45,5 @@ static_assert(calculatePercent(50'000'001, 100'000'000) == 51);
static_assert(calculatePercent(99'999'999, 100'000'000) == 100);
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_NUMBER_H_INCLUDED
#define XRPL_BASICS_NUMBER_H_INCLUDED
#include <xrpl/beast/utility/instrumentation.h>
@@ -240,11 +241,7 @@ public:
Number(rep mantissa);
explicit Number(rep mantissa, int exponent);
explicit constexpr Number(
bool negative,
internalrep mantissa,
int exponent,
unchecked) noexcept;
explicit constexpr Number(bool negative, internalrep mantissa, int exponent, unchecked) noexcept;
// Assume unsigned values are... unsigned. i.e. positive
explicit constexpr Number(internalrep mantissa, int exponent, unchecked) noexcept;
// Only unit tests are expected to use this ctor
@@ -298,8 +295,7 @@ public:
friend constexpr bool
operator==(Number const& x, Number const& y) noexcept
{
return x.negative_ == y.negative_ && x.mantissa_ == y.mantissa_ &&
x.exponent_ == y.exponent_;
return x.negative_ == y.negative_ && x.mantissa_ == y.mantissa_ && x.exponent_ == y.exponent_;
}
friend constexpr bool
@@ -507,11 +503,7 @@ private:
class Guard;
};
inline constexpr Number::Number(
bool negative,
internalrep mantissa,
int exponent,
unchecked) noexcept
inline constexpr Number::Number(bool negative, internalrep mantissa, int exponent, unchecked) noexcept
: negative_(negative), mantissa_{mantissa}, exponent_{exponent}
{
}
@@ -529,8 +521,7 @@ inline Number::Number(bool negative, internalrep mantissa, int exponent, normali
normalize();
}
inline Number::Number(internalrep mantissa, int exponent, normalized)
: Number(false, mantissa, exponent, normalized{})
inline Number::Number(internalrep mantissa, int exponent, normalized) : Number(false, mantissa, exponent, normalized{})
{
}
@@ -692,8 +683,8 @@ Number::isnormal() const noexcept
MantissaRange const& range = range_;
auto const abs_m = mantissa_;
return *this == Number{} ||
(range.min <= abs_m && abs_m <= range.max && (abs_m <= maxRep || abs_m % 10 == 0) &&
minExponent <= exponent_ && exponent_ <= maxExponent);
(range.min <= abs_m && abs_m <= range.max && (abs_m <= maxRep || abs_m % 10 == 0) && minExponent <= exponent_ &&
exponent_ <= maxExponent);
}
template <Integral64 T>
@@ -705,10 +696,7 @@ Number::normalizeToRange(T minMantissa, T maxMantissa) const
int exponent = exponent_;
if constexpr (std::is_unsigned_v<T>)
XRPL_ASSERT_PARTS(
!negative,
"xrpl::Number::normalizeToRange",
"Number is non-negative for unsigned range.");
XRPL_ASSERT_PARTS(!negative, "xrpl::Number::normalizeToRange", "Number is non-negative for unsigned range.");
Number::normalize(negative, mantissa, exponent, minMantissa, maxMantissa);
auto const sign = negative ? -1 : 1;
@@ -794,8 +782,7 @@ class NumberRoundModeGuard
saveNumberRoundMode saved_;
public:
explicit NumberRoundModeGuard(Number::rounding_mode mode) noexcept
: saved_{Number::setround(mode)}
explicit NumberRoundModeGuard(Number::rounding_mode mode) noexcept : saved_{Number::setround(mode)}
{
}
@@ -815,8 +802,7 @@ class NumberMantissaScaleGuard
MantissaRange::mantissa_scale const saved_;
public:
explicit NumberMantissaScaleGuard(MantissaRange::mantissa_scale scale) noexcept
: saved_{Number::getMantissaScale()}
explicit NumberMantissaScaleGuard(MantissaRange::mantissa_scale scale) noexcept : saved_{Number::getMantissaScale()}
{
Number::setMantissaScale(scale);
}
@@ -833,3 +819,5 @@ public:
};
} // namespace xrpl
#endif // XRPL_BASICS_NUMBER_H_INCLUDED

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_RANGESET_H_INCLUDED
#define XRPL_BASICS_RANGESET_H_INCLUDED
#include <xrpl/beast/core/LexicalCast.h>
@@ -172,3 +173,5 @@ prevMissing(RangeSet<T> const& rs, T t, T minVal = 0)
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_RESOLVER_H_INCLUDED
#define XRPL_BASICS_RESOLVER_H_INCLUDED
#include <xrpl/beast/net/IPEndpoint.h>
@@ -44,3 +45,5 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_RESOLVERASIO_H_INCLUDED
#define XRPL_BASICS_RESOLVERASIO_H_INCLUDED
#include <xrpl/basics/Resolver.h>
#include <xrpl/beast/utility/Journal.h>
@@ -17,3 +18,5 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_SHAMAP_HASH_H_INCLUDED
#define XRPL_BASICS_SHAMAP_HASH_H_INCLUDED
#include <xrpl/basics/base_uint.h>
#include <xrpl/basics/partitioned_unordered_map.h>
@@ -97,3 +98,5 @@ extract(SHAMapHash const& key)
}
} // namespace xrpl
#endif // XRPL_BASICS_SHAMAP_HASH_H_INCLUDED

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_SHAREDWEAKCACHEPOINTER_H_INCLUDED
#define XRPL_BASICS_SHAREDWEAKCACHEPOINTER_H_INCLUDED
#include <memory>
#include <variant>
@@ -112,3 +113,4 @@ private:
std::variant<std::shared_ptr<T>, std::weak_ptr<T>> combo_;
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_SHAREDWEAKCACHEPOINTER_IPP_INCLUDED
#define XRPL_BASICS_SHAREDWEAKCACHEPOINTER_IPP_INCLUDED
#include <xrpl/basics/SharedWeakCachePointer.h>
@@ -19,8 +20,7 @@ SharedWeakCachePointer<T>::SharedWeakCachePointer(SharedWeakCachePointer&& rhs)
template <class T>
template <class TT>
requires std::convertible_to<TT*, T*>
SharedWeakCachePointer<T>::SharedWeakCachePointer(std::shared_ptr<TT>&& rhs)
: combo_{std::move(rhs)}
SharedWeakCachePointer<T>::SharedWeakCachePointer(std::shared_ptr<TT>&& rhs) : combo_{std::move(rhs)}
{
}
@@ -64,8 +64,7 @@ SharedWeakCachePointer<T>::getStrong() const
}
template <class T>
SharedWeakCachePointer<T>::
operator bool() const noexcept
SharedWeakCachePointer<T>::operator bool() const noexcept
{
return !!std::get_if<std::shared_ptr<T>>(&combo_);
}
@@ -165,3 +164,4 @@ SharedWeakCachePointer<T>::convertToWeak()
return false;
}
} // namespace xrpl
#endif

View File

@@ -1,6 +1,7 @@
// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>
#pragma once
#ifndef XRPL_BASICS_SLABALLOCATOR_H_INCLUDED
#define XRPL_BASICS_SLABALLOCATOR_H_INCLUDED
#include <xrpl/basics/ByteUtilities.h>
#include <xrpl/beast/type_name.h>
@@ -155,17 +156,13 @@ public:
contexts (e.g. when minimal memory usage is needed) and
allows for graceful failure.
*/
constexpr explicit SlabAllocator(
std::size_t extra,
std::size_t alloc = 0,
std::size_t align = 0)
constexpr explicit SlabAllocator(std::size_t extra, std::size_t alloc = 0, std::size_t align = 0)
: itemAlignment_(align ? align : alignof(Type))
, itemSize_(boost::alignment::align_up(sizeof(Type) + extra, itemAlignment_))
, slabSize_(alloc)
{
XRPL_ASSERT(
(itemAlignment_ & (itemAlignment_ - 1)) == 0,
"xrpl::SlabAllocator::SlabAllocator : valid alignment");
(itemAlignment_ & (itemAlignment_ - 1)) == 0, "xrpl::SlabAllocator::SlabAllocator : valid alignment");
}
SlabAllocator(SlabAllocator const& other) = delete;
@@ -219,7 +216,7 @@ public:
// clang-format off
if (!buf) [[unlikely]]
return nullptr;
// clang-format on
// clang-format on
#if BOOST_OS_LINUX
// When allocating large blocks, attempt to leverage Linux's
@@ -232,8 +229,7 @@ public:
// We need to carve out a bit of memory for the slab header
// and then align the rest appropriately:
auto slabData =
reinterpret_cast<void*>(reinterpret_cast<std::uint8_t*>(buf) + sizeof(SlabBlock));
auto slabData = reinterpret_cast<void*>(reinterpret_cast<std::uint8_t*>(buf) + sizeof(SlabBlock));
auto slabSize = size - sizeof(SlabBlock);
// This operation is essentially guaranteed not to fail but
@@ -244,12 +240,10 @@ public:
return nullptr;
}
slab = new (buf) SlabBlock(
slabs_.load(), reinterpret_cast<std::uint8_t*>(slabData), slabSize, itemSize_);
slab = new (buf) SlabBlock(slabs_.load(), reinterpret_cast<std::uint8_t*>(slabData), slabSize, itemSize_);
// Link the new slab
while (!slabs_.compare_exchange_weak(
slab->next_, slab, std::memory_order_release, std::memory_order_relaxed))
while (!slabs_.compare_exchange_weak(slab->next_, slab, std::memory_order_release, std::memory_order_relaxed))
{
; // Nothing to do
}
@@ -306,10 +300,7 @@ public:
std::size_t align;
public:
constexpr SlabConfig(
std::size_t extra_,
std::size_t alloc_ = 0,
std::size_t align_ = alignof(Type))
constexpr SlabConfig(std::size_t extra_, std::size_t alloc_ = 0, std::size_t align_ = alignof(Type))
: extra(extra_), alloc(alloc_), align(align_)
{
}
@@ -319,18 +310,15 @@ public:
{
// Ensure that the specified allocators are sorted from smallest to
// largest by size:
std::sort(std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) {
return a.extra < b.extra;
});
std::sort(
std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) { return a.extra < b.extra; });
// We should never have two slabs of the same size
if (std::adjacent_find(
std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) {
return a.extra == b.extra;
}) != cfg.end())
if (std::adjacent_find(std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) {
return a.extra == b.extra;
}) != cfg.end())
{
throw std::runtime_error(
"SlabAllocatorSet<" + beast::type_name<Type>() + ">: duplicate slab size");
throw std::runtime_error("SlabAllocatorSet<" + beast::type_name<Type>() + ">: duplicate slab size");
}
for (auto const& c : cfg)
@@ -398,3 +386,5 @@ public:
};
} // namespace xrpl
#endif // XRPL_BASICS_SLABALLOCATOR_H_INCLUDED

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_SLICE_H_INCLUDED
#define XRPL_BASICS_SLICE_H_INCLUDED
#include <xrpl/basics/contract.h>
#include <xrpl/basics/strHex.h>
@@ -40,8 +41,7 @@ public:
operator=(Slice const&) noexcept = default;
/** Create a slice pointing to existing memory. */
Slice(void const* data, std::size_t size) noexcept
: data_(reinterpret_cast<std::uint8_t const*>(data)), size_(size)
Slice(void const* data, std::size_t size) noexcept : data_(reinterpret_cast<std::uint8_t const*>(data)), size_(size)
{
}
@@ -198,8 +198,7 @@ operator!=(Slice const& lhs, Slice const& rhs) noexcept
inline bool
operator<(Slice const& lhs, Slice const& rhs) noexcept
{
return std::lexicographical_compare(
lhs.data(), lhs.data() + lhs.size(), rhs.data(), rhs.data() + rhs.size());
return std::lexicographical_compare(lhs.data(), lhs.data() + lhs.size(), rhs.data(), rhs.data() + rhs.size());
}
template <class Stream>
@@ -232,3 +231,5 @@ makeSlice(std::basic_string<char, Traits, Alloc> const& s)
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_STRINGUTILITIES_H_INCLUDED
#define XRPL_BASICS_STRINGUTILITIES_H_INCLUDED
#include <xrpl/basics/Blob.h>
#include <xrpl/basics/strHex.h>
@@ -108,8 +109,7 @@ struct parsedURL
bool
operator==(parsedURL const& other) const
{
return scheme == other.scheme && domain == other.domain && port == other.port &&
path == other.path;
return scheme == other.scheme && domain == other.domain && port == other.port && path == other.path;
}
};
@@ -132,3 +132,5 @@ bool
isProperlyFormedTomlDomain(std::string_view domain);
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_TAGGEDCACHE_H_INCLUDED
#define XRPL_BASICS_TAGGEDCACHE_H_INCLUDED
#include <xrpl/basics/IntrusivePointer.h>
#include <xrpl/basics/Log.h>
@@ -175,10 +176,7 @@ private:
struct Stats
{
template <class Handler>
Stats(
std::string const& prefix,
Handler const& handler,
beast::insight::Collector::ptr const& collector)
Stats(std::string const& prefix, Handler const& handler, beast::insight::Collector::ptr const& collector)
: hook(collector->make_hook(handler))
, size(collector->make_gauge(prefix, "size"))
, hit_rate(collector->make_gauge(prefix, "hit_rate"))
@@ -200,8 +198,7 @@ private:
public:
clock_type::time_point last_access;
explicit KeyOnlyEntry(clock_type::time_point const& last_access_)
: last_access(last_access_)
explicit KeyOnlyEntry(clock_type::time_point const& last_access_) : last_access(last_access_)
{
}
@@ -301,3 +298,5 @@ private:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_TAGGEDCACHE_IPP_INCLUDED
#define XRPL_BASICS_TAGGEDCACHE_IPP_INCLUDED
#include <xrpl/basics/IntrusivePointer.ipp>
#include <xrpl/basics/TaggedCache.h>
@@ -14,22 +15,13 @@ template <
class Hash,
class KeyEqual,
class Mutex>
inline TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::
TaggedCache(
std::string const& name,
int size,
clock_type::duration expiration,
clock_type& clock,
beast::Journal journal,
beast::insight::Collector::ptr const& collector)
inline TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::TaggedCache(
std::string const& name,
int size,
clock_type::duration expiration,
clock_type& clock,
beast::Journal journal,
beast::insight::Collector::ptr const& collector)
: m_journal(journal)
, m_clock(clock)
, m_stats(name, std::bind(&TaggedCache::collect_metrics, this), collector)
@@ -52,8 +44,8 @@ template <
class KeyEqual,
class Mutex>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
clock() -> clock_type&
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::clock()
-> clock_type&
{
return m_clock;
}
@@ -68,8 +60,7 @@ template <
class KeyEqual,
class Mutex>
inline std::size_t
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
size() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::size() const
{
std::lock_guard lock(m_mutex);
return m_cache.size();
@@ -85,8 +76,7 @@ template <
class KeyEqual,
class Mutex>
inline int
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getCacheSize() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getCacheSize() const
{
std::lock_guard lock(m_mutex);
return m_cache_count;
@@ -102,8 +92,7 @@ template <
class KeyEqual,
class Mutex>
inline int
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getTrackSize() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getTrackSize() const
{
std::lock_guard lock(m_mutex);
return m_cache.size();
@@ -119,8 +108,7 @@ template <
class KeyEqual,
class Mutex>
inline float
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getHitRate()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getHitRate()
{
std::lock_guard lock(m_mutex);
auto const total = static_cast<float>(m_hits + m_misses);
@@ -137,8 +125,7 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
clear()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::clear()
{
std::lock_guard lock(m_mutex);
m_cache.clear();
@@ -155,8 +142,7 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
reset()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::reset()
{
std::lock_guard lock(m_mutex);
m_cache.clear();
@@ -176,8 +162,8 @@ template <
class Mutex>
template <class KeyComparable>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
touch_if_exists(KeyComparable const& key)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::touch_if_exists(
KeyComparable const& key)
{
std::lock_guard lock(m_mutex);
auto const iter(m_cache.find(key));
@@ -201,8 +187,7 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
sweep()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweep()
{
// Keep references to all the stuff we sweep
// For performance, each worker thread should exit before the swept data
@@ -228,9 +213,8 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (when_expire > (now - minimumAge))
when_expire = now - minimumAge;
JLOG(m_journal.trace())
<< m_name << " is growing fast " << m_cache.size() << " of " << m_target_size
<< " aging at " << (now - when_expire).count() << " of " << m_target_age.count();
JLOG(m_journal.trace()) << m_name << " is growing fast " << m_cache.size() << " of " << m_target_size
<< " aging at " << (now - when_expire).count() << " of " << m_target_age.count();
}
std::vector<std::thread> workers;
@@ -239,8 +223,7 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
for (std::size_t p = 0; p < m_cache.partitions(); ++p)
{
workers.push_back(sweepHelper(
when_expire, now, m_cache.map()[p], allStuffToSweep[p], allRemovals, lock));
workers.push_back(sweepHelper(when_expire, now, m_cache.map()[p], allStuffToSweep[p], allRemovals, lock));
}
for (std::thread& worker : workers)
worker.join();
@@ -249,11 +232,10 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
}
// At this point allStuffToSweep will go out of scope outside the lock
// and decrement the reference count on each strong pointer.
JLOG(m_journal.debug()) << m_name << " TaggedCache sweep lock duration "
<< std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::steady_clock::now() - start)
.count()
<< "ms";
JLOG(m_journal.debug())
<< m_name << " TaggedCache sweep lock duration "
<< std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count()
<< "ms";
}
template <
@@ -266,8 +248,9 @@ template <
class KeyEqual,
class Mutex>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
del(key_type const& key, bool valid)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::del(
key_type const& key,
bool valid)
{
// Remove from cache, if !valid, remove from map too. Returns true if
// removed from cache
@@ -306,8 +289,10 @@ template <
class Mutex>
template <class R>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
canonicalize(key_type const& key, SharedPointerType& data, R&& replaceCallback)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::canonicalize(
key_type const& key,
SharedPointerType& data,
R&& replaceCallback)
{
// Return canonical value, store if needed, refresh in cache
// Return values: true=we had the data already
@@ -318,9 +303,7 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (cit == m_cache.end())
{
m_cache.emplace(
std::piecewise_construct,
std::forward_as_tuple(key),
std::forward_as_tuple(m_clock.now(), data));
std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(m_clock.now(), data));
++m_cache_count;
return false;
}
@@ -422,8 +405,8 @@ template <
class KeyEqual,
class Mutex>
inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
fetch(key_type const& key)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::fetch(
key_type const& key)
{
std::lock_guard<mutex_type> l(m_mutex);
auto ret = initialFetch(key, l);
@@ -443,8 +426,9 @@ template <
class Mutex>
template <class ReturnType>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
insert(key_type const& key, T const& value) -> std::enable_if_t<!IsKeyCache, ReturnType>
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::insert(
key_type const& key,
T const& value) -> std::enable_if_t<!IsKeyCache, ReturnType>
{
static_assert(
std::is_same_v<std::shared_ptr<T>, SharedPointerType> ||
@@ -473,13 +457,13 @@ template <
class Mutex>
template <class ReturnType>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
insert(key_type const& key) -> std::enable_if_t<IsKeyCache, ReturnType>
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::insert(
key_type const& key) -> std::enable_if_t<IsKeyCache, ReturnType>
{
std::lock_guard lock(m_mutex);
clock_type::time_point const now(m_clock.now());
auto [it, inserted] = m_cache.emplace(
std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(now));
auto [it, inserted] =
m_cache.emplace(std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(now));
if (!inserted)
it->second.last_access = now;
return inserted;
@@ -495,8 +479,9 @@ template <
class KeyEqual,
class Mutex>
inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
retrieve(key_type const& key, T& data)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::retrieve(
key_type const& key,
T& data)
{
// retrieve the value of the stored data
auto entry = fetch(key);
@@ -518,8 +503,8 @@ template <
class KeyEqual,
class Mutex>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
peekMutex() -> mutex_type&
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::peekMutex()
-> mutex_type&
{
return m_mutex;
}
@@ -534,8 +519,8 @@ template <
class KeyEqual,
class Mutex>
inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
getKeys() const -> std::vector<key_type>
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getKeys() const
-> std::vector<key_type>
{
std::vector<key_type> v;
@@ -559,8 +544,7 @@ template <
class KeyEqual,
class Mutex>
inline double
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
rate() const
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::rate() const
{
std::lock_guard lock(m_mutex);
auto const tot = m_hits + m_misses;
@@ -580,8 +564,9 @@ template <
class Mutex>
template <class Handler>
inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
fetch(key_type const& digest, Handler const& h)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::fetch(
key_type const& digest,
Handler const& h)
{
{
std::lock_guard l(m_mutex);
@@ -612,8 +597,9 @@ template <
class KeyEqual,
class Mutex>
inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
initialFetch(key_type const& key, std::lock_guard<mutex_type> const& l)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::initialFetch(
key_type const& key,
std::lock_guard<mutex_type> const& l)
{
auto cit = m_cache.find(key);
if (cit == m_cache.end())
@@ -649,8 +635,7 @@ template <
class KeyEqual,
class Mutex>
inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
collect_metrics()
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::collect_metrics()
{
m_stats.size.set(getCacheSize());
@@ -676,14 +661,13 @@ template <
class KeyEqual,
class Mutex>
inline std::thread
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
sweepHelper(
clock_type::time_point const& when_expire,
[[maybe_unused]] clock_type::time_point const& now,
typename KeyValueCacheType::map_type& partition,
SweptPointersVector& stuffToSweep,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweepHelper(
clock_type::time_point const& when_expire,
[[maybe_unused]] clock_type::time_point const& now,
typename KeyValueCacheType::map_type& partition,
SweptPointersVector& stuffToSweep,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
{
return std::thread([&, this]() {
int cacheRemovals = 0;
@@ -737,9 +721,8 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (mapRemovals || cacheRemovals)
{
JLOG(m_journal.debug())
<< "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
JLOG(m_journal.debug()) << "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
}
allRemovals += cacheRemovals;
@@ -756,14 +739,13 @@ template <
class KeyEqual,
class Mutex>
inline std::thread
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::
sweepHelper(
clock_type::time_point const& when_expire,
clock_type::time_point const& now,
typename KeyOnlyCacheType::map_type& partition,
SweptPointersVector&,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweepHelper(
clock_type::time_point const& when_expire,
clock_type::time_point const& now,
typename KeyOnlyCacheType::map_type& partition,
SweptPointersVector&,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
{
return std::thread([&, this]() {
int cacheRemovals = 0;
@@ -793,9 +775,8 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (mapRemovals || cacheRemovals)
{
JLOG(m_journal.debug())
<< "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
JLOG(m_journal.debug()) << "TaggedCache partition sweep " << m_name << ": cache = " << partition.size()
<< "-" << cacheRemovals << ", map-=" << mapRemovals;
}
allRemovals += cacheRemovals;
@@ -803,3 +784,5 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_TOSTRING_H_INCLUDED
#define XRPL_BASICS_TOSTRING_H_INCLUDED
#include <string>
#include <type_traits>
@@ -43,3 +44,5 @@ to_string(char const* s)
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_UNORDEREDCONTAINERS_H_INCLUDED
#define XRPL_BASICS_UNORDEREDCONTAINERS_H_INCLUDED
#include <xrpl/basics/hardened_hash.h>
#include <xrpl/basics/partitioned_unordered_map.h>
@@ -98,3 +99,5 @@ template <
using hardened_hash_multiset = std::unordered_multiset<Value, Hash, Pred, Allocator>;
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_UPTIMETIMER_H_INCLUDED
#define XRPL_BASICS_UPTIMETIMER_H_INCLUDED
#include <atomic>
#include <chrono>
@@ -45,3 +46,5 @@ private:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_ALGORITHM_H_INCLUDED
#define XRPL_ALGORITHM_H_INCLUDED
#include <utility>
@@ -51,13 +52,7 @@ generalized_set_intersection(
// std::set_intersection.
template <class FwdIter1, class InputIter2, class Pred, class Comp>
FwdIter1
remove_if_intersect_or_match(
FwdIter1 first1,
FwdIter1 last1,
InputIter2 first2,
InputIter2 last2,
Pred pred,
Comp comp)
remove_if_intersect_or_match(FwdIter1 first1, FwdIter1 last1, InputIter2 first2, InputIter2 last2, Pred pred, Comp comp)
{
// [original-first1, current-first1) is the set of elements to be preserved.
// [current-first1, i) is the set of elements that have been removed.
@@ -95,3 +90,5 @@ remove_if_intersect_or_match(
}
} // namespace xrpl
#endif

View File

@@ -32,7 +32,8 @@
*/
#pragma once
#ifndef XRPL_BASICS_BASE64_H_INCLUDED
#define XRPL_BASICS_BASE64_H_INCLUDED
#include <cstdint>
#include <string>
@@ -52,3 +53,5 @@ std::string
base64_decode(std::string_view data);
} // namespace xrpl
#endif

View File

@@ -3,7 +3,8 @@
// Distributed under the MIT/X11 software license, see the accompanying
// file license.txt or http://www.opensource.org/licenses/mit-license.php.
#pragma once
#ifndef XRPL_BASICS_BASE_UINT_H_INCLUDED
#define XRPL_BASICS_BASE_UINT_H_INCLUDED
#include <xrpl/basics/Expected.h>
#include <xrpl/basics/Slice.h>
@@ -214,8 +215,7 @@ private:
std::uint32_t accum = {};
for (std::uint32_t shift : {4u, 0u, 12u, 8u, 20u, 16u, 28u, 24u})
{
if (auto const result = hexCharToUInt(*in++, shift, accum);
result != ParseResult::okay)
if (auto const result = hexCharToUInt(*in++, shift, accum); result != ParseResult::okay)
return Unexpected(result);
}
ret[i++] = accum;
@@ -254,8 +254,7 @@ public:
// This constructor is intended to be used at compile time since it might
// throw at runtime. Consider declaring this constructor consteval once
// we get to C++23.
explicit constexpr base_uint(std::string_view sv) noexcept(false)
: data_(parseFromStringViewThrows(sv))
explicit constexpr base_uint(std::string_view sv) noexcept(false) : data_(parseFromStringViewThrows(sv))
{
}
@@ -444,8 +443,7 @@ public:
for (int i = WIDTH; i--;)
{
std::uint64_t n = carry + boost::endian::big_to_native(data_[i]) +
boost::endian::big_to_native(b.data_[i]);
std::uint64_t n = carry + boost::endian::big_to_native(data_[i]) + boost::endian::big_to_native(b.data_[i]);
data_[i] = boost::endian::native_to_big(static_cast<std::uint32_t>(n));
carry = n >> 32;
@@ -646,3 +644,5 @@ struct is_uniquely_represented<xrpl::base_uint<Bits, Tag>> : public std::true_ty
};
} // namespace beast
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_CHRONO_H_INCLUDED
#define XRPL_BASICS_CHRONO_H_INCLUDED
#include <xrpl/beast/clock/abstract_clock.h>
#include <xrpl/beast/clock/basic_seconds_clock.h>
@@ -15,8 +16,7 @@ namespace xrpl {
// A few handy aliases
using days =
std::chrono::duration<int, std::ratio_multiply<std::chrono::hours::period, std::ratio<24>>>;
using days = std::chrono::duration<int, std::ratio_multiply<std::chrono::hours::period, std::ratio<24>>>;
using weeks = std::chrono::duration<int, std::ratio_multiply<days::period, std::ratio<7>>>;
@@ -99,3 +99,5 @@ stopwatch()
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_COMPARATORS_H_INCLUDED
#define XRPL_BASICS_COMPARATORS_H_INCLUDED
#include <functional>
@@ -52,3 +53,5 @@ using equal_to = std::equal_to<T>;
#endif
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_CONTRACT_H_INCLUDED
#define XRPL_BASICS_CONTRACT_H_INCLUDED
#include <xrpl/beast/type_name.h>
@@ -35,9 +36,7 @@ template <class E, class... Args>
[[noreturn]] inline void
Throw(Args&&... args)
{
static_assert(
std::is_convertible<E*, std::exception*>::value,
"Exception must derive from std::exception.");
static_assert(std::is_convertible<E*, std::exception*>::value, "Exception must derive from std::exception.");
E e(std::forward<Args>(args)...);
LogThrow(std::string("Throwing exception of type " + beast::type_name<E>() + ": ") + e.what());
@@ -49,3 +48,5 @@ Throw(Args&&... args)
LogicError(std::string const& how) noexcept;
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_HARDENED_HASH_H_INCLUDED
#define XRPL_BASICS_HARDENED_HASH_H_INCLUDED
#include <xrpl/beast/hash/hash_append.h>
#include <xrpl/beast/hash/xxhasher.h>
@@ -92,3 +93,5 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef JOIN_H_INCLUDED
#define JOIN_H_INCLUDED
#include <string>
@@ -23,8 +24,7 @@ public:
Collection const& collection;
std::string const delimiter;
explicit CollectionAndDelimiter(Collection const& c, std::string delim)
: collection(c), delimiter(std::move(delim))
explicit CollectionAndDelimiter(Collection const& c, std::string delim) : collection(c), delimiter(std::move(delim))
{
}
@@ -64,8 +64,7 @@ public:
char const* collection;
std::string const delimiter;
explicit CollectionAndDelimiter(char const c[N], std::string delim)
: collection(c), delimiter(std::move(delim))
explicit CollectionAndDelimiter(char const c[N], std::string delim) : collection(c), delimiter(std::move(delim))
{
}
@@ -81,3 +80,5 @@ public:
};
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_MAKE_SSLCONTEXT_H_INCLUDED
#define XRPL_BASICS_MAKE_SSLCONTEXT_H_INCLUDED
#include <boost/asio/ssl/context.hpp>
@@ -19,3 +20,5 @@ make_SSLContextAuthed(
std::string const& cipherList);
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_MULDIV_H_INCLUDED
#define XRPL_BASICS_MULDIV_H_INCLUDED
#include <cstdint>
#include <limits>
@@ -21,3 +22,5 @@ std::optional<std::uint64_t>
mulDiv(std::uint64_t value, std::uint64_t mul, std::uint64_t div);
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_PARTITIONED_UNORDERED_MAP_H
#define XRPL_BASICS_PARTITIONED_UNORDERED_MAP_H
#include <xrpl/beast/hash/uhash.h>
#include <xrpl/beast/utility/instrumentation.h>
@@ -330,8 +331,8 @@ public:
auto const& key = std::get<0>(keyTuple);
iterator it(&map_);
it.ait_ = it.map_->begin() + partitioner(key);
auto [eit, inserted] = it.ait_->emplace(
std::piecewise_construct, std::forward<T>(keyTuple), std::forward<U>(valueTuple));
auto [eit, inserted] =
it.ait_->emplace(std::piecewise_construct, std::forward<T>(keyTuple), std::forward<U>(valueTuple));
it.mit_ = eit;
return {it, inserted};
}
@@ -392,3 +393,5 @@ private:
};
} // namespace xrpl
#endif // XRPL_BASICS_PARTITIONED_UNORDERED_MAP_H

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_RANDOM_H_INCLUDED
#define XRPL_BASICS_RANDOM_H_INCLUDED
#include <xrpl/beast/utility/instrumentation.h>
#include <xrpl/beast/xor_shift_engine.h>
@@ -19,8 +20,7 @@ static_assert(
"The Ripple default PRNG engine must return an unsigned integral type.");
static_assert(
std::numeric_limits<beast::xor_shift_engine::result_type>::max() >=
std::numeric_limits<std::uint64_t>::max(),
std::numeric_limits<beast::xor_shift_engine::result_type>::max() >= std::numeric_limits<std::uint64_t>::max(),
"The Ripple default PRNG engine return must be at least 64 bits wide.");
#endif
@@ -145,14 +145,12 @@ std::enable_if_t<
Byte>
rand_byte(Engine& engine)
{
return static_cast<Byte>(rand_int<Engine, std::uint32_t>(
engine, std::numeric_limits<Byte>::min(), std::numeric_limits<Byte>::max()));
return static_cast<Byte>(
rand_int<Engine, std::uint32_t>(engine, std::numeric_limits<Byte>::min(), std::numeric_limits<Byte>::max()));
}
template <class Byte = std::uint8_t>
std::enable_if_t<
(std::is_same<Byte, unsigned char>::value || std::is_same<Byte, std::uint8_t>::value),
Byte>
std::enable_if_t<(std::is_same<Byte, unsigned char>::value || std::is_same<Byte, std::uint8_t>::value), Byte>
rand_byte()
{
return rand_byte<Byte>(default_prng());
@@ -176,3 +174,5 @@ rand_bool()
/** @} */
} // namespace xrpl
#endif // XRPL_BASICS_RANDOM_H_INCLUDED

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_UNITY_ROCKSDB_H_INCLUDED
#define XRPL_UNITY_ROCKSDB_H_INCLUDED
#if XRPL_ROCKSDB_AVAILABLE
// #include <rocksdb2/port/port_posix.h>
@@ -27,3 +28,5 @@
#include <rocksdb/write_batch.h>
#endif
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_SAFE_CAST_H_INCLUDED
#define XRPL_BASICS_SAFE_CAST_H_INCLUDED
#include <type_traits>
@@ -18,12 +19,9 @@ template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_integral_v<Dest> && std::is_integral_v<Src>, Dest>
safe_cast(Src s) noexcept
{
static_assert(
std::is_signed_v<Dest> || std::is_unsigned_v<Src>, "Cannot cast signed to unsigned");
static_assert(std::is_signed_v<Dest> || std::is_unsigned_v<Src>, "Cannot cast signed to unsigned");
constexpr unsigned not_same = std::is_signed_v<Dest> != std::is_signed_v<Src>;
static_assert(
sizeof(Dest) >= sizeof(Src) + not_same,
"Destination is too small to hold all values of source");
static_assert(sizeof(Dest) >= sizeof(Src) + not_same, "Destination is too small to hold all values of source");
return static_cast<Dest>(s);
}
@@ -71,3 +69,5 @@ unsafe_cast(Src s) noexcept
}
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_SCOPE_H_INCLUDED
#define XRPL_BASICS_SCOPE_H_INCLUDED
#include <xrpl/beast/utility/instrumentation.h>
@@ -36,8 +37,7 @@ public:
scope_exit(scope_exit&& rhs) noexcept(
std::is_nothrow_move_constructible_v<EF> || std::is_nothrow_copy_constructible_v<EF>)
: exit_function_{std::forward<EF>(rhs.exit_function_)}
, execute_on_destruction_{rhs.execute_on_destruction_}
: exit_function_{std::forward<EF>(rhs.exit_function_)}, execute_on_destruction_{rhs.execute_on_destruction_}
{
rhs.release();
}
@@ -48,9 +48,8 @@ public:
template <class EFP>
explicit scope_exit(
EFP&& f,
std::enable_if_t<
!std::is_same_v<std::remove_cv_t<EFP>, scope_exit> &&
std::is_constructible_v<EF, EFP>>* = 0) noexcept
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_exit> && std::is_constructible_v<EF, EFP>>* =
0) noexcept
: exit_function_{std::forward<EFP>(f)}
{
static_assert(std::is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>);
@@ -95,9 +94,8 @@ public:
template <class EFP>
explicit scope_fail(
EFP&& f,
std::enable_if_t<
!std::is_same_v<std::remove_cv_t<EFP>, scope_fail> &&
std::is_constructible_v<EF, EFP>>* = 0) noexcept
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_fail> && std::is_constructible_v<EF, EFP>>* =
0) noexcept
: exit_function_{std::forward<EFP>(f)}
{
static_assert(std::is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>);
@@ -142,9 +140,7 @@ public:
template <class EFP>
explicit scope_success(
EFP&& f,
std::enable_if_t<
!std::is_same_v<std::remove_cv_t<EFP>, scope_success> &&
std::is_constructible_v<EF, EFP>>* =
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_success> && std::is_constructible_v<EF, EFP>>* =
0) noexcept(std::is_nothrow_constructible_v<EF, EFP> || std::is_nothrow_constructible_v<EF, EFP&>)
: exit_function_{std::forward<EFP>(f)}
{
@@ -224,3 +220,5 @@ template <class Mutex>
scope_unlock(std::unique_lock<Mutex>&) -> scope_unlock<Mutex>;
} // namespace xrpl
#endif

View File

@@ -1,6 +1,7 @@
// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>
#pragma once
#ifndef XRPL_BASICS_SPINLOCK_H_INCLUDED
#define XRPL_BASICS_SPINLOCK_H_INCLUDED
#include <xrpl/beast/utility/instrumentation.h>
@@ -99,12 +100,9 @@ public:
@note For performance reasons, you should strive to have `lock` be
on a cacheline by itself.
*/
packed_spinlock(std::atomic<T>& lock, int index)
: bits_(lock), mask_(static_cast<T>(1) << index)
packed_spinlock(std::atomic<T>& lock, int index) : bits_(lock), mask_(static_cast<T>(1) << index)
{
XRPL_ASSERT(
index >= 0 && (mask_ != 0),
"xrpl::packed_spinlock::packed_spinlock : valid index and mask");
XRPL_ASSERT(index >= 0 && (mask_ != 0), "xrpl::packed_spinlock::packed_spinlock : valid index and mask");
}
[[nodiscard]] bool
@@ -177,10 +175,7 @@ public:
T expected = 0;
return lock_.compare_exchange_weak(
expected,
std::numeric_limits<T>::max(),
std::memory_order_acquire,
std::memory_order_relaxed);
expected, std::numeric_limits<T>::max(), std::memory_order_acquire, std::memory_order_relaxed);
}
void
@@ -206,3 +201,5 @@ public:
/** @} */
} // namespace xrpl
#endif

View File

@@ -1,4 +1,5 @@
#pragma once
#ifndef XRPL_BASICS_STRHEX_H_INCLUDED
#define XRPL_BASICS_STRHEX_H_INCLUDED
#include <boost/algorithm/hex.hpp>
#include <boost/endian/conversion.hpp>
@@ -10,9 +11,7 @@ std::string
strHex(FwdIt begin, FwdIt end)
{
static_assert(
std::is_convertible<
typename std::iterator_traits<FwdIt>::iterator_category,
std::forward_iterator_tag>::value,
std::is_convertible<typename std::iterator_traits<FwdIt>::iterator_category, std::forward_iterator_tag>::value,
"FwdIt must be a forward iterator");
std::string result;
result.reserve(2 * std::distance(begin, end));
@@ -28,3 +27,5 @@ strHex(T const& from)
}
} // namespace xrpl
#endif

Some files were not shown because too many files have changed in this diff Show More