Compare commits

..

7 Commits

Author SHA1 Message Date
Pratik Mankawde
f2ec087fe2 build only
Signed-off-by: Pratik Mankawde <3397372+pratikmankawde@users.noreply.github.com>
2026-01-23 11:21:43 +00:00
Pratik Mankawde
a6b0671f21 cleanup one more include
Signed-off-by: Pratik Mankawde <3397372+pratikmankawde@users.noreply.github.com>
2026-01-22 14:07:06 +00:00
Pratik Mankawde
9d08722cd0 minor change
Signed-off-by: Pratik Mankawde <3397372+pratikmankawde@users.noreply.github.com>
2026-01-22 12:43:18 +00:00
Pratik Mankawde
6e17998acb ordering changes
Signed-off-by: Pratik Mankawde <3397372+pratikmankawde@users.noreply.github.com>
2026-01-22 12:15:18 +00:00
Pratik Mankawde
72ad34d214 more changes
Signed-off-by: Pratik Mankawde <3397372+pratikmankawde@users.noreply.github.com>
2026-01-22 11:46:20 +00:00
Pratik Mankawde
c9dd2d73e2 Merge remote-tracking branch 'origin/develop' into pratik/Move-includes-to-cpp-files 2026-01-22 11:05:03 +00:00
Pratik Mankawde
63d91b24b9 first itr
Signed-off-by: Pratik Mankawde <3397372+pratikmankawde@users.noreply.github.com>
2026-01-22 11:03:40 +00:00
1901 changed files with 91656 additions and 180596 deletions

View File

@@ -37,7 +37,7 @@ BinPackParameters: false
BreakBeforeBinaryOperators: false BreakBeforeBinaryOperators: false
BreakBeforeTernaryOperators: true BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: true BreakConstructorInitializersBeforeComma: true
ColumnLimit: 100 ColumnLimit: 80
CommentPragmas: "^ IWYU pragma:" CommentPragmas: "^ IWYU pragma:"
ConstructorInitializerAllOnOneLineOrOnePerLine: true ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 4 ConstructorInitializerIndentWidth: 4
@@ -50,21 +50,20 @@ ForEachMacros: [Q_FOREACH, BOOST_FOREACH]
IncludeBlocks: Regroup IncludeBlocks: Regroup
IncludeCategories: IncludeCategories:
- Regex: "^<(test)/" - Regex: "^<(test)/"
Priority: 1 Priority: 0
- Regex: "^<(xrpld)/" - Regex: "^<(xrpld)/"
Priority: 2 Priority: 1
- Regex: "^<(xrpl)/" - Regex: "^<(xrpl)/"
Priority: 3 Priority: 2
- Regex: "^<(boost)/" - Regex: "^<(boost)/"
Priority: 4 Priority: 3
- Regex: "^.*/" - Regex: "^.*/"
Priority: 5 Priority: 4
- Regex: '^.*\.h' - Regex: '^.*\.h'
Priority: 6 Priority: 5
- Regex: ".*" - Regex: ".*"
Priority: 7 Priority: 6
IncludeIsMainRegex: "$" IncludeIsMainRegex: "$"
MainIncludeChar: AngleBracket
IndentCaseLabels: true IndentCaseLabels: true
IndentFunctionDeclarationAfterType: false IndentFunctionDeclarationAfterType: false
IndentRequiresClause: true IndentRequiresClause: true

View File

@@ -1,200 +0,0 @@
---
# This entire group of checks was applied to all cpp files but not all header files.
# ---
Checks: "-*,
bugprone-argument-comment,
bugprone-assert-side-effect,
bugprone-bad-signal-to-kill-thread,
bugprone-bool-pointer-implicit-conversion,
bugprone-casting-through-void,
bugprone-chained-comparison,
bugprone-compare-pointer-to-member-virtual-function,
bugprone-copy-constructor-init,
bugprone-crtp-constructor-accessibility,
bugprone-dangling-handle,
bugprone-dynamic-static-initializers,
bugprone-empty-catch,
bugprone-fold-init-type,
bugprone-forward-declaration-namespace,
bugprone-inaccurate-erase,
bugprone-inc-dec-in-conditions,
bugprone-incorrect-enable-if,
bugprone-incorrect-roundings,
bugprone-infinite-loop,
bugprone-integer-division,
bugprone-lambda-function-name,
bugprone-macro-parentheses,
bugprone-macro-repeated-side-effects,
bugprone-misplaced-operator-in-strlen-in-alloc,
bugprone-misplaced-pointer-arithmetic-in-alloc,
bugprone-misplaced-widening-cast,
bugprone-move-forwarding-reference,
bugprone-multi-level-implicit-pointer-conversion,
bugprone-multiple-new-in-one-expression,
bugprone-multiple-statement-macro,
bugprone-no-escape,
bugprone-non-zero-enum-to-bool-conversion,
bugprone-optional-value-conversion,
bugprone-parent-virtual-call,
bugprone-pointer-arithmetic-on-polymorphic-object,
bugprone-posix-return,
bugprone-redundant-branch-condition,
bugprone-reserved-identifier,
bugprone-return-const-ref-from-parameter,
bugprone-shared-ptr-array-mismatch,
bugprone-signal-handler,
bugprone-signed-char-misuse,
bugprone-sizeof-container,
bugprone-sizeof-expression,
bugprone-spuriously-wake-up-functions,
bugprone-standalone-empty,
bugprone-string-constructor,
bugprone-string-integer-assignment,
bugprone-string-literal-with-embedded-nul,
bugprone-stringview-nullptr,
bugprone-suspicious-enum-usage,
bugprone-suspicious-include,
bugprone-suspicious-memory-comparison,
bugprone-suspicious-memset-usage,
bugprone-suspicious-missing-comma,
bugprone-suspicious-realloc-usage,
bugprone-suspicious-semicolon,
bugprone-suspicious-string-compare,
bugprone-suspicious-stringview-data-usage,
bugprone-swapped-arguments,
bugprone-switch-missing-default-case,
bugprone-terminating-continue,
bugprone-throw-keyword-missing,
bugprone-too-small-loop-variable,
bugprone-unchecked-optional-access,
bugprone-undefined-memory-manipulation,
bugprone-undelegated-constructor,
bugprone-unhandled-exception-at-new,
bugprone-unhandled-self-assignment,
bugprone-unique-ptr-array-mismatch,
bugprone-unsafe-functions,
bugprone-use-after-move,
bugprone-unused-raii,
bugprone-unused-return-value,
bugprone-unused-local-non-trivial-variable,
bugprone-virtual-near-miss,
cppcoreguidelines-init-variables,
cppcoreguidelines-misleading-capture-default-by-value,
cppcoreguidelines-no-suspend-with-lock,
cppcoreguidelines-pro-type-member-init,
cppcoreguidelines-pro-type-static-cast-downcast,
cppcoreguidelines-rvalue-reference-param-not-moved,
cppcoreguidelines-use-default-member-init,
cppcoreguidelines-virtual-class-destructor,
hicpp-ignored-remove-result,
misc-const-correctness,
misc-definitions-in-headers,
misc-header-include-cycle,
misc-include-cleaner,
misc-misplaced-const,
misc-redundant-expression,
misc-static-assert,
misc-throw-by-value-catch-by-reference,
misc-unused-alias-decls,
misc-unused-using-decls,
modernize-concat-nested-namespaces,
modernize-make-shared,
modernize-make-unique,
modernize-pass-by-value,
modernize-type-traits,
modernize-use-designated-initializers,
modernize-use-emplace,
modernize-use-equals-default,
modernize-use-equals-delete,
modernize-use-nodiscard,
modernize-use-override,
modernize-use-ranges,
modernize-use-starts-ends-with,
modernize-use-std-numbers,
modernize-use-using,
modernize-deprecated-headers,
llvm-namespace-comment,
performance-faster-string-find,
performance-for-range-copy,
performance-implicit-conversion-in-loop,
performance-inefficient-vector-operation,
performance-move-const-arg,
performance-move-constructor-init,
performance-no-automatic-move,
performance-trivially-destructible,
readability-avoid-nested-conditional-operator,
readability-avoid-return-with-void-value,
readability-braces-around-statements,
readability-const-return-type,
readability-container-contains,
readability-container-size-empty,
readability-convert-member-functions-to-static,
readability-duplicate-include,
readability-else-after-return,
readability-enum-initial-value,
readability-implicit-bool-conversion,
readability-make-member-function-const,
readability-math-missing-parentheses,
readability-misleading-indentation,
readability-non-const-parameter,
readability-redundant-casting,
readability-redundant-declaration,
readability-redundant-inline-specifier,
readability-redundant-member-init,
readability-redundant-string-init,
readability-reference-to-constructed-temporary,
readability-simplify-boolean-expr,
readability-static-definition-in-anonymous-namespace,
readability-suspicious-call-argument,
readability-use-std-min-max
"
# ---
# other checks that have issues that need to be resolved:
#
# readability-inconsistent-declaration-parameter-name, # in this codebase this check will break a lot of arg names
# readability-static-accessed-through-instance, # this check is probably unnecessary. it makes the code less readable
# readability-identifier-naming, # https://github.com/XRPLF/rippled/pull/6571
# ---
#
CheckOptions:
readability-braces-around-statements.ShortStatementLines: 2
# readability-identifier-naming.MacroDefinitionCase: UPPER_CASE
# readability-identifier-naming.ClassCase: CamelCase
# readability-identifier-naming.StructCase: CamelCase
# readability-identifier-naming.UnionCase: CamelCase
# readability-identifier-naming.EnumCase: CamelCase
# readability-identifier-naming.EnumConstantCase: CamelCase
# readability-identifier-naming.ScopedEnumConstantCase: CamelCase
# readability-identifier-naming.GlobalConstantCase: UPPER_CASE
# readability-identifier-naming.GlobalConstantPrefix: "k"
# readability-identifier-naming.GlobalVariableCase: CamelCase
# readability-identifier-naming.GlobalVariablePrefix: "g"
# readability-identifier-naming.ConstexprFunctionCase: camelBack
# readability-identifier-naming.ConstexprMethodCase: camelBack
# readability-identifier-naming.ClassMethodCase: camelBack
# readability-identifier-naming.ClassMemberCase: camelBack
# readability-identifier-naming.ClassConstantCase: UPPER_CASE
# readability-identifier-naming.ClassConstantPrefix: "k"
# readability-identifier-naming.StaticConstantCase: UPPER_CASE
# readability-identifier-naming.StaticConstantPrefix: "k"
# readability-identifier-naming.StaticVariableCase: UPPER_CASE
# readability-identifier-naming.StaticVariablePrefix: "k"
# readability-identifier-naming.ConstexprVariableCase: UPPER_CASE
# readability-identifier-naming.ConstexprVariablePrefix: "k"
# readability-identifier-naming.LocalConstantCase: camelBack
# readability-identifier-naming.LocalVariableCase: camelBack
# readability-identifier-naming.TemplateParameterCase: CamelCase
# readability-identifier-naming.ParameterCase: camelBack
# readability-identifier-naming.FunctionCase: camelBack
# readability-identifier-naming.MemberCase: camelBack
# readability-identifier-naming.PrivateMemberSuffix: _
# readability-identifier-naming.ProtectedMemberSuffix: _
# readability-identifier-naming.PublicMemberSuffix: ""
# readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
misc-include-cleaner.IgnoreHeaders: ".*/(detail|impl)/.*;.*fwd\\.h(pp)?;time.h;stdlib.h;sqlite3.h;netinet/in\\.h;sys/resource\\.h;sys/sysinfo\\.h;linux/sysinfo\\.h;__chrono/.*;bits/.*;_abort\\.h;boost/uuid/uuid_hash.hpp;boost/beast/core/flat_buffer\\.hpp;boost/beast/http/field\\.hpp;boost/beast/http/dynamic_body\\.hpp;boost/beast/http/message\\.hpp;boost/beast/http/read\\.hpp;boost/beast/http/write\\.hpp;openssl/obj_mac\\.h"
#
HeaderFilterRegex: '^.*/(test|xrpl|xrpld)/.*\.(h|hpp)$'
ExcludeHeaderFilterRegex: '^.*/protocol_autogen/.*\.(h|hpp)$'
WarningsAsErrors: "*"

View File

@@ -1,14 +1,14 @@
ignorePaths: ignorePaths:
- build/** - build/**
- src/libxrpl/crypto - src/libxrpl/crypto
- src/test/** # Will be removed in the future
- CMakeUserPresets.json - CMakeUserPresets.json
- Doxyfile - Doxyfile
- docs/**/*.puml - docs/**/*.puml
- cmake/** - cmake/**
- LICENSE.md - LICENSE.md
- .clang-tidy
language: en language: en
allowCompoundWords: true # TODO (#6334) allowCompoundWords: true
ignoreRandomStrings: true ignoreRandomStrings: true
minWordLength: 5 minWordLength: 5
dictionaries: dictionaries:
@@ -16,38 +16,25 @@ dictionaries:
- en_US - en_US
- en_GB - en_GB
ignoreRegExpList: ignoreRegExpList:
- /\b[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds - /[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds
- /\bC[A-Z0-9]{15}/g # CTIDs - /(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards
- /\b(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards - /(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
- /\b(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
- /::[a-z:_]+/g # things from other namespaces - /::[a-z:_]+/g # things from other namespaces
- /\blib[a-z]+/g # libraries - /lib[a-z]+/g # libraries
- /\b[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates - /[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates
- /\b[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years - /[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years
- /\[[A-Za-z0-9-]+\]\(https:\/\/github.com\/[A-Za-z0-9-]+\)/g # Github usernames - /\[[A-Za-z0-9-]+\]\(https:\/\/github.com\/[A-Za-z0-9-]+\)/g # Github usernames
- /-[DWw][a-zA-Z0-9_-]+=/g # compile flags - /-[DWw][a-zA-Z0-9_-]+=/g # compile flags
- /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags - /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags
- ABCDEFGHIJKLMNOPQRSTUVWXYZ
- ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
overrides:
- filename: "**/*_test.cpp" # all test files
ignoreRegExpList:
- /"[^"]*"/g # double-quoted strings
- /'[^']*'/g # single-quoted strings
- /`[^`]*`/g # backtick strings
suggestWords: suggestWords:
- xprl->xrpl - xprl->xrpl
- xprld->xrpld # cspell: disable-line not sure what this problem is.... - xprld->xrpld
- unsynched->unsynced # cspell: disable-line not sure what this problem is.... - unsynched->unsynced
- synched->synced - synched->synced
- synch->sync - synch->sync
words: words:
- abempty - abempty
- AMMID - AMMID
- AMMMPT
- AMMMPToken
- AMMMPTokens
- AMMXRP
- amt - amt
- amts - amts
- asnode - asnode
@@ -64,7 +51,6 @@ words:
- Britto - Britto
- Btrfs - Btrfs
- canonicality - canonicality
- changespq
- checkme - checkme
- choco - choco
- chrono - chrono
@@ -100,11 +86,9 @@ words:
- distro - distro
- doxyfile - doxyfile
- dxrpl - dxrpl
- enabled
- endmacro - endmacro
- exceptioned - exceptioned
- Falco - Falco
- fcontext
- finalizers - finalizers
- firewalled - firewalled
- fmtdur - fmtdur
@@ -117,25 +101,19 @@ words:
- gpgcheck - gpgcheck
- gpgkey - gpgkey
- hotwallet - hotwallet
- hwaddress
- hwrap
- ifndef - ifndef
- inequation - inequation
- insuf - insuf
- insuff - insuff
- invasively
- iou - iou
- ious - ious
- isrdc - isrdc
- itype - itype
- jemalloc - jemalloc
- jlog - jlog
- jtnofill
- keylet - keylet
- keylets - keylets
- keyvadb - keyvadb
- kwarg
- kwargs
- ledgerentry - ledgerentry
- ledgerhash - ledgerhash
- ledgerindex - ledgerindex
@@ -153,24 +131,19 @@ words:
- ltype - ltype
- mcmodel - mcmodel
- MEMORYSTATUSEX - MEMORYSTATUSEX
- MPTAMM
- MPTDEX
- Merkle - Merkle
- Metafuncton - Metafuncton
- misprediction - misprediction
- mptbalance - mptbalance
- MPTDEX
- mptflags - mptflags
- mptid - mptid
- mptissuance - mptissuance
- mptissuanceid - mptissuanceid
- mptissue
- mptoken - mptoken
- mptokenid - mptokenid
- mptokenissuance - mptokenissuance
- mptokens - mptokens
- mpts - mpts
- mtgox
- multisig - multisig
- multisign - multisign
- multisigned - multisigned
@@ -183,11 +156,6 @@ words:
- nftokens - nftokens
- nftpage - nftpage
- nikb - nikb
- nixfmt
- nixos
- nixpkgs
- NOLINT
- NOLINTNEXTLINE
- nonxrp - nonxrp
- noripple - noripple
- nudb - nudb
@@ -195,7 +163,6 @@ words:
- nunl - nunl
- Nyffenegger - Nyffenegger
- ostr - ostr
- pargs
- partitioner - partitioner
- paychan - paychan
- paychans - paychans
@@ -203,7 +170,6 @@ words:
- perminute - perminute
- permissioned - permissioned
- pointee - pointee
- populator
- preauth - preauth
- preauthorization - preauthorization
- preauthorize - preauthorize
@@ -212,9 +178,7 @@ words:
- protobuf - protobuf
- protos - protos
- ptrs - ptrs
- pushd
- pyenv - pyenv
- pyparsing
- qalloc - qalloc
- queuable - queuable
- Raphson - Raphson
@@ -240,7 +204,6 @@ words:
- seqit - seqit
- sf - sf
- SFIELD - SFIELD
- sfields
- shamap - shamap
- shamapitem - shamapitem
- sidechain - sidechain
@@ -305,7 +268,6 @@ words:
- venv - venv
- vfalco - vfalco
- vinnie - vinnie
- wasmi
- wextra - wextra
- wptr - wptr
- writeme - writeme

View File

@@ -1,101 +0,0 @@
# Custom CMake command definitions for gersemi formatting.
# These stubs teach gersemi the signatures of project-specific commands
# so it can format their invocations correctly.
function(git_branch branch_val)
endfunction()
function(isolate_headers target A B scope)
endfunction()
function(create_symbolic_link target link)
endfunction()
function(xrpl_add_test name)
endfunction()
macro(exclude_from_default target_)
endmacro()
macro(exclude_if_included target_)
endmacro()
function(target_protobuf_sources target prefix)
set(options APPEND_PATH DESCRIPTORS)
set(oneValueArgs
LANGUAGE
OUT_VAR
EXPORT_MACRO
TARGET
PROTOC_OUT_DIR
PLUGIN
PLUGIN_OPTIONS
PROTOC_EXE
)
set(multiValueArgs
PROTOS
IMPORT_DIRS
GENERATE_EXTENSIONS
PROTOC_OPTIONS
DEPENDENCIES
)
cmake_parse_arguments(
THIS_FUNCTION_PREFIX
"${options}"
"${oneValueArgs}"
"${multiValueArgs}"
${ARGN}
)
endfunction()
function(add_module parent name)
endfunction()
function(setup_protocol_autogen)
endfunction()
function(target_link_modules parent scope)
endfunction()
function(setup_target_for_coverage_gcovr)
set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments(
THIS_FUNCTION_PREFIX
"${options}"
"${oneValueArgs}"
"${multiValueArgs}"
${ARGN}
)
endfunction()
function(add_code_coverage_to_target name scope)
endfunction()
function(verbose_find_path variable name)
set(options
NO_CACHE
REQUIRED
OPTIONAL
NO_DEFAULT_PATH
NO_PACKAGE_ROOT_PATH
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH
NO_CMAKE_INSTALL_PREFIX
CMAKE_FIND_ROOT_PATH_BOTH
ONLY_CMAKE_FIND_ROOT_PATH
NO_CMAKE_FIND_ROOT_PATH
)
set(oneValueArgs REGISTRY_VIEW VALIDATOR DOC)
set(multiValueArgs NAMES HINTS PATHS PATH_SUFFIXES)
cmake_parse_arguments(
THIS_FUNCTION_PREFIX
"${options}"
"${oneValueArgs}"
"${multiValueArgs}"
${ARGN}
)
endfunction()

View File

@@ -1 +0,0 @@
definitions: [.gersemi]

View File

@@ -1,79 +1,16 @@
# This feature requires Git >= 2.24 # This feature requires Git >= 2.24
# To use it by default in git blame: # To use it by default in git blame:
# git config blame.ignoreRevsFile .git-blame-ignore-revs # git config blame.ignoreRevsFile .git-blame-ignore-revs
# This file is sorted in reverse chronological order, with the most recent commits at the top.
# The commits listed here are ignored by git blame, which is useful for formatting-only commits that would otherwise obscure the history of changes to a file.
# refactor: Enable remaining clang-tidy `cppcoreguidelines` checks (#6538)
72f4cb097f626b08b02fc3efcb4aa11cb2e7adb8
# refactor: Rename system name from 'ripple' to 'xrpld' (#6347)
ffea3977f0b771fe8e43a8f74e4d393d63a7afd8
# refactor: Update transaction folder structure (#6483)
5865bd017f777491b4a956f9210be0c4161f5442
# chore: Use gersemi instead of ancient cmake-format (#6486)
0c74270b055133a57a497b5c9fc5a75f7647b1f4
# chore: Apply clang-format width 100 (#6387)
2c1fad102353e11293e3edde1c043224e7d3e983
# chore: Set clang-format width to 100 in config file (#6387)
25cca465538a56cce501477f9e5e2c1c7ea2d84c
# chore: Set cmake-format width to 100 (#6386)
469ce9f291a4480c38d4ee3baca5136b2f053cd0
# refactor: Modularize app/tx (#6228)
0976b2b68b64972af8e6e7c497900b5bce9fe22f
# chore: Update clang-format to 21.1.8 (#6352)
958d8f375453d80bb1aa4c293b5102c045a3e4b4
# refactor: Replace include guards by '#pragma once' (#6322)
34ef577604782ca8d6e1c17df8bd7470990a52ff
# chore: Format all cmake files without comments (#6294)
fe9c8d568fcf6ac21483024e01f58962dd5c8260
# chore: Add cmake-format pre-commit hook (#6279)
a0e09187b9370805d027c611a7e9ff5a0125282a
# chore: Set ColumnLimit to 120 in clang-format (#6288)
5f638f55536def0d88b970d1018a465a238e55f4
# refactor: Fix typos in comments, configure cspell (#6164)
3c9f5b62525cb1d6ca1153eeb10433db7d7379fd
# refactor: Rename `rippled.cfg` to `xrpld.cfg` (#6098)
3d1b3a49b3601a0a7037fa0b19d5df7b5e0e2fc1
# refactor: Rename `ripple` namespace to `xrpl` (#5982)
1eb0fdac6543706b4b9ddca57fd4102928a1f871
# refactor: Rename `rippled` binary to `xrpld` (#5983)
9eb84a561ef8bb066d89f098bd9b4ac71baed67c
# refactor: Replaces secp256k1 source by Conan package (#6089)
813bc4d9491b078bb950f8255f93b02f71320478
# refactor: Remove unnecessary copyright notices already covered by LICENSE.md (#5929)
1d42c4f6de6bf01d1286fc7459b17a37a5189e88
# refactor: Rename `RIPPLE_` and `RIPPLED_` definitions to `XRPL_` (#5821)
ada83564d894829424b0f4d922b0e737e07abbf7
# refactor: Modularize shamap and nodestore (#5668)
8eb233c2ea8ad5a159be73b77f0f5e1496d547ac
# refactor: Modularise ledger (#5493)
dc8b37a52448b005153c13a7f046ad494128cf94
# chore: Update clang-format and prettier with pre-commit (#5709)
c14ce956adeabe476ad73c18d73103f347c9c613
# chore: Fix file formatting (#5718)
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce
# chore: Reverts formatting changes to external files, adds formatting changes to proto files (#5711)
b13370ac0d207217354f1fc1c29aef87769fb8a1
# chore: Run prettier on all files (#5657)
97f0747e103f13e26e45b731731059b32f7679ac
# Reformat code with clang-format-18
552377c76f55b403a1c876df873a23d780fcc81c
# Recompute loops (#4997)
d028005aa6319338b0adae1aebf8abe113162960
# Rewrite includes (#4997)
1d23148e6dd53957fcb6205c07a5c6cd7b64d50c
# Rearrange sources (#4997)
e416ee72ca26fa0c09d2aee1b68bdfb2b7046eed
# Move CMake directory (#4997)
2e902dee53aab2a8f27f32971047bb81e022f94f
# Rewrite includes
0eebe6a5f4246fced516d52b83ec4e7f47373edd
# Format formerly .hpp files
760f16f56835663d9286bd29294d074de26a7ba6
# Rename .hpp to .h
241b9ddde9e11beb7480600fd5ed90e1ef109b21
# Consolidate external libraries
e2384885f5f630c8f0ffe4bf21a169b433a16858
# Format first-party source according to .clang-format
50760c693510894ca368e90369b0cc2dabfd07f3 50760c693510894ca368e90369b0cc2dabfd07f3
e2384885f5f630c8f0ffe4bf21a169b433a16858
241b9ddde9e11beb7480600fd5ed90e1ef109b21
760f16f56835663d9286bd29294d074de26a7ba6
0eebe6a5f4246fced516d52b83ec4e7f47373edd
2189cc950c0cebb89e4e2fa3b2d8817205bf7cef
b9d007813378ad0ff45660dc07285b823c7e9855
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
552377c76f55b403a1c876df873a23d780fcc81c
97f0747e103f13e26e45b731731059b32f7679ac
b13370ac0d207217354f1fc1c29aef87769fb8a1
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce

8
.github/CODEOWNERS vendored Normal file
View File

@@ -0,0 +1,8 @@
# Allow anyone to review any change by default.
*
# Require the rpc-reviewers team to review changes to the rpc code.
include/xrpl/protocol/ @xrplf/rpc-reviewers
src/libxrpl/protocol/ @xrplf/rpc-reviewers
src/xrpld/rpc/ @xrplf/rpc-reviewers
src/xrpld/app/misc/ @xrplf/rpc-reviewers

View File

@@ -1,7 +1,7 @@
--- ---
name: Feature Request name: Feature Request
about: Suggest a new feature for the xrpld project about: Suggest a new feature for the rippled project
title: "[Title with short description] (Version: [xrpld version])" title: "[Title with short description] (Version: [rippled version])"
labels: Feature Request labels: Feature Request
assignees: "" assignees: ""
--- ---

View File

@@ -11,18 +11,16 @@ runs:
steps: steps:
# When a tag is pushed, the version is used as-is. # When a tag is pushed, the version is used as-is.
- name: Generate version for tag event - name: Generate version for tag event
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ github.event_name == 'tag' }}
shell: bash shell: bash
env: env:
VERSION: ${{ github.ref_name }} VERSION: ${{ github.ref_name }}
run: echo "VERSION=${VERSION}" >> "${GITHUB_ENV}" run: echo "VERSION=${VERSION}" >> "${GITHUB_ENV}"
# When a tag is not pushed, then the version (e.g. 1.2.3-b0) is extracted # When a tag is not pushed, then the version is extracted from the
# from the BuildInfo.cpp file and the shortened commit hash appended to it. # BuildInfo.cpp file and the shortened commit hash appended to it.
# We use a plus sign instead of a hyphen because Conan recipe versions do
# not support two hyphens.
- name: Generate version for non-tag event - name: Generate version for non-tag event
if: ${{ !startsWith(github.ref, 'refs/tags/') }} if: ${{ github.event_name != 'tag' }}
shell: bash shell: bash
run: | run: |
echo 'Extracting version from BuildInfo.cpp.' echo 'Extracting version from BuildInfo.cpp.'
@@ -34,7 +32,7 @@ runs:
echo 'Appending shortened commit hash to version.' echo 'Appending shortened commit hash to version.'
SHA='${{ github.sha }}' SHA='${{ github.sha }}'
VERSION="${VERSION}+${SHA:0:7}" VERSION="${VERSION}-${SHA:0:7}"
echo "VERSION=${VERSION}" >> "${GITHUB_ENV}" echo "VERSION=${VERSION}" >> "${GITHUB_ENV}"

View File

@@ -31,7 +31,7 @@ runs:
conan config install conan/profiles/ -tf $(conan config home)/profiles/ conan config install conan/profiles/ -tf $(conan config home)/profiles/
echo 'Conan profile:' echo 'Conan profile:'
conan profile show --profile ci conan profile show
- name: Set up Conan remote - name: Set up Conan remote
shell: bash shell: bash

View File

@@ -1,56 +0,0 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/build-deps/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/generate-version/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/print-env/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/setup-conan/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop

View File

@@ -29,6 +29,22 @@ If a refactor, how is this better than the previous implementation?
If there is a spec or design document for this feature, please link it here. If there is a spec or design document for this feature, please link it here.
--> -->
### Type of Change
<!--
Please check [x] relevant options, delete irrelevant ones.
-->
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] Refactor (non-breaking change that only restructures code)
- [ ] Performance (increase or change in throughput and/or latency)
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
- [ ] Documentation update
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
- [ ] Release
### API Impact ### API Impact
<!-- <!--

View File

@@ -1,85 +0,0 @@
#!/usr/bin/env python3
"""
Checks that a pull request description has been customized from the
pull_request_template.md. Exits with code 1 if the description is empty
or identical to the template (ignoring HTML comments and whitespace).
Usage:
python check-pr-description.py --template-file TEMPLATE --pr-body-file BODY
"""
import argparse
import re
import sys
from pathlib import Path
def normalize(text: str) -> str:
"""Strip HTML comments, trim lines, and remove blank lines."""
# Remove HTML comments (possibly multi-line)
text = re.sub(r"<!--.*?-->", "", text, flags=re.DOTALL)
# Strip each line and drop empties
lines = [line.strip() for line in text.splitlines()]
lines = [line for line in lines if line]
return "\n".join(lines)
def main() -> int:
parser = argparse.ArgumentParser(
description="Check that a PR description differs from the template."
)
parser.add_argument(
"--template-file",
type=Path,
required=True,
help="Path to the pull request template file.",
)
parser.add_argument(
"--pr-body-file",
type=Path,
required=True,
help="Path to a file containing the PR body text.",
)
args = parser.parse_args()
template_path: Path = args.template_file
pr_body_path: Path = args.pr_body_file
if not template_path.is_file():
print(f"::error::Template file {template_path} not found")
return 1
if not pr_body_path.is_file():
print(f"::error::PR body file {pr_body_path} not found")
return 1
template = template_path.read_text(encoding="utf-8")
pr_body = pr_body_path.read_text(encoding="utf-8")
# Check if the PR body is empty or whitespace-only
if not pr_body.strip():
print(
"::error::PR description is empty. "
"Please fill in the pull request template."
)
return 1
norm_template = normalize(template)
norm_pr_body = normalize(pr_body)
if norm_pr_body == norm_template:
print(
"::error::PR description (ignoring HTML comments) is identical"
" to the template. Please fill in the details of your change."
f"\n\nVisible template content:\n---\n{norm_template}\n---"
f"\n\nVisible PR description content:\n---\n{norm_pr_body}\n---"
)
return 1
print("PR description has been customized from the template.")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,14 +1,14 @@
# Levelization # Levelization
Levelization is the term used to describe efforts to prevent xrpld from Levelization is the term used to describe efforts to prevent rippled from
having or creating cyclic dependencies. having or creating cyclic dependencies.
xrpld code is organized into directories under `src/xrpld`, `src/libxrpl` (and rippled code is organized into directories under `src/xrpld`, `src/libxrpl` (and
`src/test`) representing modules. The modules are intended to be `src/test`) representing modules. The modules are intended to be
organized into "tiers" or "levels" such that a module from one level can organized into "tiers" or "levels" such that a module from one level can
only include code from lower levels. Additionally, a module only include code from lower levels. Additionally, a module
in one level should never include code in an `impl` or `detail` folder of any level in one level should never include code in an `impl` or `detail` folder of any level
other than its own. other than it's own.
The codebase is split into two main areas: The codebase is split into two main areas:
@@ -22,7 +22,7 @@ levelization violations they find (by moving files or individual
classes). At the very least, don't make things worse. classes). At the very least, don't make things worse.
The table below summarizes the _desired_ division of modules, based on the current The table below summarizes the _desired_ division of modules, based on the current
state of the xrpld code. The levels are numbered from state of the rippled code. The levels are numbered from
the bottom up with the lower level, lower numbered, more independent the bottom up with the lower level, lower numbered, more independent
modules listed first, and the higher level, higher numbered modules with modules listed first, and the higher level, higher numbered modules with
more dependencies listed later. more dependencies listed later.
@@ -70,12 +70,12 @@ that `test` code should _never_ be included in `xrpl` or `xrpld` code.)
## Validation ## Validation
The [levelization](generate.py) script takes no parameters, The [levelization](generate.sh) script takes no parameters,
reads no environment variables, and can be run from any directory, reads no environment variables, and can be run from any directory,
as long as it is in the expected location in the xrpld repo. as long as it is in the expected location in the rippled repo.
It can be run at any time from within a checked out repo, and will It can be run at any time from within a checked out repo, and will
do an analysis of all the `#include`s in do an analysis of all the `#include`s in
the xrpld source. The only caveat is that it runs much slower the rippled source. The only caveat is that it runs much slower
under Windows than in Linux. It hasn't yet been tested under MacOS. under Windows than in Linux. It hasn't yet been tested under MacOS.
It generates many files of [results](results): It generates many files of [results](results):
@@ -104,7 +104,7 @@ It generates many files of [results](results):
Github Actions workflow to test that levelization loops haven't Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or they are improvements or not, so if you have resolved any issues or
done anything else to improve levelization, run `generate.py`, done anything else to improve levelization, run `levelization.sh`,
and commit the updated results. and commit the updated results.
The `loops.txt` and `ordering.txt` files relate the modules The `loops.txt` and `ordering.txt` files relate the modules
@@ -128,7 +128,7 @@ The committed files hide the detailed values intentionally, to
prevent false alarms and merging issues, and because it's easy to prevent false alarms and merging issues, and because it's easy to
get those details locally. get those details locally.
1. Run `generate.py` 1. Run `levelization.sh`
2. Grep the modules in `paths.txt`. 2. Grep the modules in `paths.txt`.
- For example, if a cycle is found `A ~= B`, simply `grep -w - For example, if a cycle is found `A ~= B`, simply `grep -w
A .github/scripts/levelization/results/paths.txt | grep -w B` A .github/scripts/levelization/results/paths.txt | grep -w B`

View File

@@ -1,335 +0,0 @@
#!/usr/bin/env python3
"""
Usage: generate.py
This script takes no parameters, and can be called from any directory in the file system.
"""
import os
import re
import subprocess
import sys
from collections import defaultdict
from pathlib import Path
from typing import Dict, List, Tuple, Set, Optional
# Compile regex patterns once at module level
INCLUDE_PATTERN = re.compile(r"^\s*#include.*/.*\.h")
INCLUDE_PATH_PATTERN = re.compile(r'[<"]([^>"]+)[>"]')
def dictionary_sort_key(s: str) -> str:
"""
Create a sort key that mimics 'sort -d' (dictionary order).
Dictionary order only considers blanks and alphanumeric characters.
This means punctuation like '.' is ignored during sorting.
"""
# Keep only alphanumeric characters and spaces
return "".join(c for c in s if c.isalnum() or c.isspace())
def get_level(file_path: str) -> str:
"""
Extract the level from a file path (second and third directory components).
Equivalent to bash: cut -d/ -f 2,3
Examples:
src/xrpld/app/main.cpp -> xrpld.app
src/libxrpl/protocol/STObject.cpp -> libxrpl.protocol
include/xrpl/basics/base_uint.h -> xrpl.basics
"""
parts = file_path.split("/")
# Get fields 2 and 3 (indices 1 and 2 in 0-based indexing)
if len(parts) >= 3:
level = f"{parts[1]}/{parts[2]}"
elif len(parts) >= 2:
level = f"{parts[1]}/toplevel"
else:
level = file_path
# If the "level" indicates a file, cut off the filename
if "." in level.split("/")[-1]: # Avoid Path object creation
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level = level.rsplit("/", 1)[0] + "/toplevel"
return level.replace("/", ".")
def extract_include_level(include_line: str) -> Optional[str]:
"""
Extract the include path from an #include directive.
Gets the first two directory components from the include path.
Equivalent to bash: cut -d/ -f 1,2
Examples:
#include <xrpl/basics/base_uint.h> -> xrpl.basics
#include "xrpld/app/main/Application.h" -> xrpld.app
"""
# Remove everything before the quote or angle bracket
match = INCLUDE_PATH_PATTERN.search(include_line)
if not match:
return None
include_path = match.group(1)
parts = include_path.split("/")
# Get first two fields (indices 0 and 1)
if len(parts) >= 2:
include_level = f"{parts[0]}/{parts[1]}"
else:
include_level = include_path
# If the "includelevel" indicates a file, cut off the filename
if "." in include_level.split("/")[-1]: # Avoid Path object creation
include_level = include_level.rsplit("/", 1)[0] + "/toplevel"
return include_level.replace("/", ".")
def find_repository_directories(
start_path: Path, depth_limit: int = 10
) -> Tuple[Path, List[Path]]:
"""
Find the repository root by looking for src or include folders.
Walks up the directory tree from the start path.
"""
current = start_path.resolve()
# Walk up the directory tree
for _ in range(depth_limit): # Limit search depth to prevent infinite loops
src_path = current / "src"
include_path = current / "include"
# Check if this directory has src or include folders
has_src = src_path.exists()
has_include = include_path.exists()
if has_src or has_include:
return current, [src_path, include_path]
# Move up one level
parent = current.parent
if parent == current: # Reached filesystem root
break
current = parent
# If we couldn't find it, raise an error
raise RuntimeError(
"Could not find repository root. "
"Expected to find a directory containing 'src' and/or 'include' folders."
)
def main():
# Change to the script's directory
script_dir = Path(__file__).parent.resolve()
os.chdir(script_dir)
# Clean up and create results directory.
results_dir = script_dir / "results"
if results_dir.exists():
import shutil
shutil.rmtree(results_dir)
results_dir.mkdir()
# Find the repository root by searching for src and include directories.
try:
repo_root, scan_dirs = find_repository_directories(script_dir)
print(f"Found repository root: {repo_root}")
print(f"Scanning directories:")
for scan_dir in scan_dirs:
print(f" - {scan_dir.relative_to(repo_root)}")
except RuntimeError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
print("\nScanning for raw includes...")
# Find all #include directives
raw_includes: List[Tuple[str, str]] = []
rawincludes_file = results_dir / "rawincludes.txt"
# Write to file as we go to avoid storing everything in memory.
with open(rawincludes_file, "w", buffering=8192) as raw_f:
for dir_path in scan_dirs:
print(f" Scanning {dir_path.relative_to(repo_root)}...")
for file_path in dir_path.rglob("*"):
if not file_path.is_file():
continue
try:
rel_path_str = str(file_path.relative_to(repo_root))
# Read file with a large buffer for performance.
with open(
file_path,
"r",
encoding="utf-8",
errors="ignore",
buffering=8192,
) as f:
for line in f:
# Quick check before regex
if "#include" not in line or "boost" in line:
continue
if INCLUDE_PATTERN.match(line):
line_stripped = line.strip()
entry = f"{rel_path_str}:{line_stripped}\n"
print(entry, end="")
raw_f.write(entry)
raw_includes.append((rel_path_str, line_stripped))
except Exception as e:
print(f"Error reading {file_path}: {e}", file=sys.stderr)
# Build levelization paths and count directly (no need to sort first).
print("Build levelization paths")
path_counts: Dict[Tuple[str, str], int] = defaultdict(int)
for file_path, include_line in raw_includes:
include_level = extract_include_level(include_line)
if not include_level:
continue
level = get_level(file_path)
if level != include_level:
path_counts[(level, include_level)] += 1
# Sort and deduplicate paths (using dictionary order like bash 'sort -d').
print("Sort and deduplicate paths")
paths_file = results_dir / "paths.txt"
with open(paths_file, "w") as f:
# Sort using dictionary order: only alphanumeric and spaces matter
sorted_items = sorted(
path_counts.items(),
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
)
for (level, include_level), count in sorted_items:
line = f"{count:7} {level} {include_level}\n"
print(line.rstrip())
f.write(line)
# Split into flat-file database
print("Split into flat-file database")
includes_dir = results_dir / "includes"
included_by_dir = results_dir / "included_by"
includes_dir.mkdir()
included_by_dir.mkdir()
# Batch writes by grouping data first to avoid repeated file opens.
includes_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
included_by_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
# Process in sorted order to match bash script behaviour (dictionary order).
sorted_items = sorted(
path_counts.items(),
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
)
for (level, include_level), count in sorted_items:
includes_data[level].append((include_level, count))
included_by_data[include_level].append((level, count))
# Write all includes files in sorted order (dictionary order).
for level in sorted(includes_data.keys(), key=dictionary_sort_key):
entries = includes_data[level]
with open(includes_dir / level, "w") as f:
for include_level, count in entries:
line = f"{include_level} {count}\n"
print(line.rstrip())
f.write(line)
# Write all included_by files in sorted order (dictionary order).
for include_level in sorted(included_by_data.keys(), key=dictionary_sort_key):
entries = included_by_data[include_level]
with open(included_by_dir / include_level, "w") as f:
for level, count in entries:
line = f"{level} {count}\n"
print(line.rstrip())
f.write(line)
# Search for loops
print("Search for loops")
loops_file = results_dir / "loops.txt"
ordering_file = results_dir / "ordering.txt"
loops_found: Set[Tuple[str, str]] = set()
# Pre-load all include files into memory to avoid repeated I/O.
# This is the biggest optimisation - we were reading files repeatedly in nested loops.
# Use list of tuples to preserve file order.
includes_cache: Dict[str, List[Tuple[str, int]]] = {}
includes_lookup: Dict[str, Dict[str, int]] = {} # For fast lookup
# Note: bash script uses 'for source in *' which uses standard glob sorting,
# NOT dictionary order. So we use standard sorted() here, not dictionary_sort_key.
for include_file in sorted(includes_dir.iterdir(), key=lambda p: p.name):
if not include_file.is_file():
continue
includes_cache[include_file.name] = []
includes_lookup[include_file.name] = {}
with open(include_file, "r") as f:
for line in f:
parts = line.strip().split()
if len(parts) >= 2:
include_name = parts[0]
include_count = int(parts[1])
includes_cache[include_file.name].append(
(include_name, include_count)
)
includes_lookup[include_file.name][include_name] = include_count
with open(loops_file, "w", buffering=8192) as loops_f, open(
ordering_file, "w", buffering=8192
) as ordering_f:
# Use standard sorting to match bash glob expansion 'for source in *'.
for source in sorted(includes_cache.keys()):
source_includes = includes_cache[source]
for include, include_freq in source_includes:
# Check if include file exists and references source
if include not in includes_lookup:
continue
source_freq = includes_lookup[include].get(source)
if source_freq is not None:
# Found a loop
loop_key = tuple(sorted([source, include]))
if loop_key in loops_found:
continue
loops_found.add(loop_key)
loops_f.write(f"Loop: {source} {include}\n")
# If the counts are close, indicate that the two modules are
# on the same level, though they shouldn't be.
diff = include_freq - source_freq
if diff > 3:
loops_f.write(f" {source} > {include}\n\n")
elif diff < -3:
loops_f.write(f" {include} > {source}\n\n")
elif source_freq == include_freq:
loops_f.write(f" {include} == {source}\n\n")
else:
loops_f.write(f" {include} ~= {source}\n\n")
else:
ordering_f.write(f"{source} > {include}\n")
# Print results
print("\nOrdering:")
with open(ordering_file, "r") as f:
print(f.read(), end="")
print("\nLoops:")
with open(loops_file, "r") as f:
print(f.read(), end="")
if __name__ == "__main__":
main()

130
.github/scripts/levelization/generate.sh vendored Executable file
View File

@@ -0,0 +1,130 @@
#!/bin/bash
# Usage: generate.sh
# This script takes no parameters, reads no environment variables,
# and can be run from any directory, as long as it is in the expected
# location in the repo.
pushd $( dirname $0 )
if [ -v PS1 ]
then
# if the shell is interactive, clean up any flotsam before analyzing
git clean -ix
fi
# Ensure all sorting is ASCII-order consistently across platforms.
export LANG=C
rm -rfv results
mkdir results
includes="$( pwd )/results/rawincludes.txt"
pushd ../../..
echo Raw includes:
grep -r '^[ ]*#include.*/.*\.h' include src | \
grep -v boost | tee ${includes}
popd
pushd results
oldifs=${IFS}
IFS=:
mkdir includes
mkdir included_by
echo Build levelization paths
exec 3< ${includes} # open rawincludes.txt for input
while read -r -u 3 file include
do
level=$( echo ${file} | cut -d/ -f 2,3 )
# If the "level" indicates a file, cut off the filename
if [[ "${level##*.}" != "${level}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level="$( dirname ${level} )/toplevel"
fi
level=$( echo ${level} | tr '/' '.' )
includelevel=$( echo ${include} | sed 's/.*["<]//; s/[">].*//' | \
cut -d/ -f 1,2 )
if [[ "${includelevel##*.}" != "${includelevel}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
includelevel="$( dirname ${includelevel} )/toplevel"
fi
includelevel=$( echo ${includelevel} | tr '/' '.' )
if [[ "$level" != "$includelevel" ]]
then
echo $level $includelevel | tee -a paths.txt
fi
done
echo Sort and deduplicate paths
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
mv sortedpaths.txt paths.txt
exec 3>&- #close fd 3
IFS=${oldifs}
unset oldifs
echo Split into flat-file database
exec 4<paths.txt # open paths.txt for input
while read -r -u 4 count level include
do
echo ${include} ${count} | tee -a includes/${level}
echo ${level} ${count} | tee -a included_by/${include}
done
exec 4>&- #close fd 4
loops="$( pwd )/loops.txt"
ordering="$( pwd )/ordering.txt"
pushd includes
echo Search for loops
# Redirect stdout to a file
exec 4>&1
exec 1>"${loops}"
for source in *
do
if [[ -f "$source" ]]
then
exec 5<"${source}" # open for input
while read -r -u 5 include includefreq
do
if [[ -f $include ]]
then
if grep -q -w $source $include
then
if grep -q -w "Loop: $include $source" "${loops}"
then
continue
fi
sourcefreq=$( grep -w $source $include | cut -d\ -f2 )
echo "Loop: $source $include"
# If the counts are close, indicate that the two modules are
# on the same level, though they shouldn't be
if [[ $(( $includefreq - $sourcefreq )) -gt 3 ]]
then
echo -e " $source > $include\n"
elif [[ $(( $sourcefreq - $includefreq )) -gt 3 ]]
then
echo -e " $include > $source\n"
elif [[ $sourcefreq -eq $includefreq ]]
then
echo -e " $include == $source\n"
else
echo -e " $include ~= $source\n"
fi
else
echo "$source > $include" >> "${ordering}"
fi
fi
done
exec 5>&- #close fd 5
fi
done
exec 1>&4 #close fd 1
exec 4>&- #close fd 4
cat "${ordering}"
cat "${loops}"
popd
popd
popd

View File

@@ -2,10 +2,13 @@ Loop: test.jtx test.toplevel
test.toplevel > test.jtx test.toplevel > test.jtx
Loop: test.jtx test.unit_test Loop: test.jtx test.unit_test
test.unit_test ~= test.jtx test.unit_test == test.jtx
Loop: xrpld.app xrpld.core
xrpld.app > xrpld.core
Loop: xrpld.app xrpld.overlay Loop: xrpld.app xrpld.overlay
xrpld.app > xrpld.overlay xrpld.overlay > xrpld.app
Loop: xrpld.app xrpld.peerfinder Loop: xrpld.app xrpld.peerfinder
xrpld.peerfinder ~= xrpld.app xrpld.peerfinder ~= xrpld.app
@@ -14,7 +17,7 @@ Loop: xrpld.app xrpld.rpc
xrpld.rpc > xrpld.app xrpld.rpc > xrpld.app
Loop: xrpld.app xrpld.shamap Loop: xrpld.app xrpld.shamap
xrpld.shamap > xrpld.app xrpld.shamap ~= xrpld.app
Loop: xrpld.overlay xrpld.rpc Loop: xrpld.overlay xrpld.rpc
xrpld.rpc ~= xrpld.overlay xrpld.rpc ~= xrpld.overlay

View File

@@ -1,19 +1,13 @@
libxrpl.basics > xrpl.basics libxrpl.basics > xrpl.basics
libxrpl.conditions > xrpl.basics
libxrpl.conditions > xrpl.conditions
libxrpl.core > xrpl.basics libxrpl.core > xrpl.basics
libxrpl.core > xrpl.core libxrpl.core > xrpl.core
libxrpl.core > xrpl.json
libxrpl.crypto > xrpl.basics libxrpl.crypto > xrpl.basics
libxrpl.json > xrpl.basics libxrpl.json > xrpl.basics
libxrpl.json > xrpl.json libxrpl.json > xrpl.json
libxrpl.ledger > xrpl.basics libxrpl.ledger > xrpl.basics
libxrpl.ledger > xrpl.json libxrpl.ledger > xrpl.json
libxrpl.ledger > xrpl.ledger libxrpl.ledger > xrpl.ledger
libxrpl.ledger > xrpl.nodestore
libxrpl.ledger > xrpl.protocol libxrpl.ledger > xrpl.protocol
libxrpl.ledger > xrpl.server
libxrpl.ledger > xrpl.shamap
libxrpl.net > xrpl.basics libxrpl.net > xrpl.basics
libxrpl.net > xrpl.net libxrpl.net > xrpl.net
libxrpl.nodestore > xrpl.basics libxrpl.nodestore > xrpl.basics
@@ -23,38 +17,24 @@ libxrpl.nodestore > xrpl.protocol
libxrpl.protocol > xrpl.basics libxrpl.protocol > xrpl.basics
libxrpl.protocol > xrpl.json libxrpl.protocol > xrpl.json
libxrpl.protocol > xrpl.protocol libxrpl.protocol > xrpl.protocol
libxrpl.rdb > xrpl.basics
libxrpl.rdb > xrpl.core
libxrpl.rdb > xrpl.rdb
libxrpl.resource > xrpl.basics libxrpl.resource > xrpl.basics
libxrpl.resource > xrpl.json libxrpl.resource > xrpl.json
libxrpl.resource > xrpl.protocol libxrpl.resource > xrpl.protocol
libxrpl.resource > xrpl.resource libxrpl.resource > xrpl.resource
libxrpl.server > xrpl.basics libxrpl.server > xrpl.basics
libxrpl.server > xrpl.core
libxrpl.server > xrpl.json libxrpl.server > xrpl.json
libxrpl.server > xrpl.protocol libxrpl.server > xrpl.protocol
libxrpl.server > xrpl.rdb
libxrpl.server > xrpl.resource
libxrpl.server > xrpl.server libxrpl.server > xrpl.server
libxrpl.shamap > xrpl.basics libxrpl.shamap > xrpl.basics
libxrpl.shamap > xrpl.nodestore
libxrpl.shamap > xrpl.protocol libxrpl.shamap > xrpl.protocol
libxrpl.shamap > xrpl.shamap libxrpl.shamap > xrpl.shamap
libxrpl.tx > xrpl.basics
libxrpl.tx > xrpl.conditions
libxrpl.tx > xrpl.core
libxrpl.tx > xrpl.json
libxrpl.tx > xrpl.ledger
libxrpl.tx > xrpl.protocol
libxrpl.tx > xrpl.server
libxrpl.tx > xrpl.tx
test.app > test.jtx test.app > test.jtx
test.app > test.rpc
test.app > test.toplevel
test.app > test.unit_test test.app > test.unit_test
test.app > xrpl.basics test.app > xrpl.basics
test.app > xrpl.core test.app > xrpl.core
test.app > xrpld.app test.app > xrpld.app
test.app > xrpld.consensus
test.app > xrpld.core test.app > xrpld.core
test.app > xrpld.overlay test.app > xrpld.overlay
test.app > xrpld.rpc test.app > xrpld.rpc
@@ -63,9 +43,6 @@ test.app > xrpl.ledger
test.app > xrpl.nodestore test.app > xrpl.nodestore
test.app > xrpl.protocol test.app > xrpl.protocol
test.app > xrpl.resource test.app > xrpl.resource
test.app > xrpl.server
test.app > xrpl.shamap
test.app > xrpl.tx
test.basics > test.jtx test.basics > test.jtx
test.basics > test.unit_test test.basics > test.unit_test
test.basics > xrpl.basics test.basics > xrpl.basics
@@ -75,36 +52,30 @@ test.basics > xrpl.json
test.basics > xrpl.protocol test.basics > xrpl.protocol
test.beast > xrpl.basics test.beast > xrpl.basics
test.conditions > xrpl.basics test.conditions > xrpl.basics
test.conditions > xrpl.conditions test.conditions > xrpld.conditions
test.consensus > test.csf test.consensus > test.csf
test.consensus > test.jtx
test.consensus > test.toplevel test.consensus > test.toplevel
test.consensus > test.unit_test test.consensus > test.unit_test
test.consensus > xrpl.basics test.consensus > xrpl.basics
test.consensus > xrpld.app test.consensus > xrpld.app
test.consensus > xrpld.consensus test.consensus > xrpld.consensus
test.consensus > xrpl.json
test.consensus > xrpl.ledger test.consensus > xrpl.ledger
test.consensus > xrpl.protocol
test.consensus > xrpl.shamap
test.consensus > xrpl.tx
test.core > test.jtx test.core > test.jtx
test.core > test.toplevel
test.core > test.unit_test test.core > test.unit_test
test.core > xrpl.basics test.core > xrpl.basics
test.core > xrpl.core test.core > xrpl.core
test.core > xrpld.core test.core > xrpld.core
test.core > xrpl.json test.core > xrpl.json
test.core > xrpl.protocol
test.core > xrpl.rdb
test.core > xrpl.server test.core > xrpl.server
test.csf > xrpl.basics test.csf > xrpl.basics
test.csf > xrpld.consensus test.csf > xrpld.consensus
test.csf > xrpl.json test.csf > xrpl.json
test.csf > xrpl.ledger
test.csf > xrpl.protocol test.csf > xrpl.protocol
test.json > test.jtx test.json > test.jtx
test.json > xrpl.json test.json > xrpl.json
test.jtx > xrpl.basics test.jtx > xrpl.basics
test.jtx > xrpl.core
test.jtx > xrpld.app test.jtx > xrpld.app
test.jtx > xrpld.core test.jtx > xrpld.core
test.jtx > xrpld.rpc test.jtx > xrpld.rpc
@@ -114,34 +85,28 @@ test.jtx > xrpl.net
test.jtx > xrpl.protocol test.jtx > xrpl.protocol
test.jtx > xrpl.resource test.jtx > xrpl.resource
test.jtx > xrpl.server test.jtx > xrpl.server
test.jtx > xrpl.tx
test.ledger > test.jtx test.ledger > test.jtx
test.ledger > test.toplevel
test.ledger > xrpl.basics test.ledger > xrpl.basics
test.ledger > xrpl.core
test.ledger > xrpld.app test.ledger > xrpld.app
test.ledger > xrpld.core test.ledger > xrpld.core
test.ledger > xrpl.json
test.ledger > xrpl.ledger test.ledger > xrpl.ledger
test.ledger > xrpl.protocol test.ledger > xrpl.protocol
test.nodestore > test.jtx test.nodestore > test.jtx
test.nodestore > test.toplevel
test.nodestore > test.unit_test test.nodestore > test.unit_test
test.nodestore > xrpl.basics test.nodestore > xrpl.basics
test.nodestore > xrpld.core test.nodestore > xrpld.core
test.nodestore > xrpl.nodestore test.nodestore > xrpl.nodestore
test.nodestore > xrpl.protocol
test.nodestore > xrpl.rdb
test.overlay > test.jtx test.overlay > test.jtx
test.overlay > test.toplevel
test.overlay > test.unit_test test.overlay > test.unit_test
test.overlay > xrpl.basics test.overlay > xrpl.basics
test.overlay > xrpld.app test.overlay > xrpld.app
test.overlay > xrpld.core
test.overlay > xrpld.overlay test.overlay > xrpld.overlay
test.overlay > xrpld.peerfinder test.overlay > xrpld.peerfinder
test.overlay > xrpl.json
test.overlay > xrpl.nodestore test.overlay > xrpl.nodestore
test.overlay > xrpl.protocol test.overlay > xrpl.protocol
test.overlay > xrpl.resource
test.overlay > xrpl.server
test.overlay > xrpl.shamap test.overlay > xrpl.shamap
test.peerfinder > test.beast test.peerfinder > test.beast
test.peerfinder > test.unit_test test.peerfinder > test.unit_test
@@ -149,8 +114,7 @@ test.peerfinder > xrpl.basics
test.peerfinder > xrpld.core test.peerfinder > xrpld.core
test.peerfinder > xrpld.peerfinder test.peerfinder > xrpld.peerfinder
test.peerfinder > xrpl.protocol test.peerfinder > xrpl.protocol
test.protocol > test.jtx test.protocol > test.toplevel
test.protocol > test.unit_test
test.protocol > xrpl.basics test.protocol > xrpl.basics
test.protocol > xrpl.json test.protocol > xrpl.json
test.protocol > xrpl.protocol test.protocol > xrpl.protocol
@@ -158,6 +122,7 @@ test.resource > test.unit_test
test.resource > xrpl.basics test.resource > xrpl.basics
test.resource > xrpl.resource test.resource > xrpl.resource
test.rpc > test.jtx test.rpc > test.jtx
test.rpc > test.toplevel
test.rpc > xrpl.basics test.rpc > xrpl.basics
test.rpc > xrpl.core test.rpc > xrpl.core
test.rpc > xrpld.app test.rpc > xrpld.app
@@ -165,18 +130,16 @@ test.rpc > xrpld.core
test.rpc > xrpld.overlay test.rpc > xrpld.overlay
test.rpc > xrpld.rpc test.rpc > xrpld.rpc
test.rpc > xrpl.json test.rpc > xrpl.json
test.rpc > xrpl.ledger
test.rpc > xrpl.protocol test.rpc > xrpl.protocol
test.rpc > xrpl.resource test.rpc > xrpl.resource
test.rpc > xrpl.server
test.rpc > xrpl.tx
test.server > test.jtx test.server > test.jtx
test.server > test.toplevel
test.server > test.unit_test test.server > test.unit_test
test.server > xrpl.basics test.server > xrpl.basics
test.server > xrpld.app test.server > xrpld.app
test.server > xrpld.core test.server > xrpld.core
test.server > xrpld.rpc
test.server > xrpl.json test.server > xrpl.json
test.server > xrpl.protocol
test.server > xrpl.server test.server > xrpl.server
test.shamap > test.unit_test test.shamap > test.unit_test
test.shamap > xrpl.basics test.shamap > xrpl.basics
@@ -186,100 +149,65 @@ test.shamap > xrpl.shamap
test.toplevel > test.csf test.toplevel > test.csf
test.toplevel > xrpl.json test.toplevel > xrpl.json
test.unit_test > xrpl.basics test.unit_test > xrpl.basics
test.unit_test > xrpl.protocol
tests.libxrpl > xrpl.basics tests.libxrpl > xrpl.basics
tests.libxrpl > xrpl.core
tests.libxrpl > xrpl.json tests.libxrpl > xrpl.json
tests.libxrpl > xrpl.ledger
tests.libxrpl > xrpl.net tests.libxrpl > xrpl.net
tests.libxrpl > xrpl.nodestore
tests.libxrpl > xrpl.protocol
tests.libxrpl > xrpl.protocol_autogen
tests.libxrpl > xrpl.server
tests.libxrpl > xrpl.shamap
tests.libxrpl > xrpl.tx
xrpl.conditions > xrpl.basics
xrpl.conditions > xrpl.protocol
xrpl.core > xrpl.basics xrpl.core > xrpl.basics
xrpl.core > xrpl.json xrpl.core > xrpl.json
xrpl.core > xrpl.protocol
xrpl.json > xrpl.basics xrpl.json > xrpl.basics
xrpl.ledger > xrpl.basics xrpl.ledger > xrpl.basics
xrpl.ledger > xrpl.protocol xrpl.ledger > xrpl.protocol
xrpl.ledger > xrpl.server
xrpl.ledger > xrpl.shamap
xrpl.net > xrpl.basics xrpl.net > xrpl.basics
xrpl.nodestore > xrpl.basics xrpl.nodestore > xrpl.basics
xrpl.nodestore > xrpl.protocol xrpl.nodestore > xrpl.protocol
xrpl.protocol > xrpl.basics xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json xrpl.protocol > xrpl.json
xrpl.protocol_autogen > xrpl.json
xrpl.protocol_autogen > xrpl.protocol
xrpl.rdb > xrpl.basics
xrpl.rdb > xrpl.core
xrpl.rdb > xrpl.protocol
xrpl.resource > xrpl.basics xrpl.resource > xrpl.basics
xrpl.resource > xrpl.json xrpl.resource > xrpl.json
xrpl.resource > xrpl.protocol xrpl.resource > xrpl.protocol
xrpl.server > xrpl.basics xrpl.server > xrpl.basics
xrpl.server > xrpl.core
xrpl.server > xrpl.json xrpl.server > xrpl.json
xrpl.server > xrpl.protocol xrpl.server > xrpl.protocol
xrpl.server > xrpl.rdb
xrpl.server > xrpl.resource
xrpl.server > xrpl.shamap
xrpl.shamap > xrpl.basics xrpl.shamap > xrpl.basics
xrpl.shamap > xrpl.nodestore xrpl.shamap > xrpl.nodestore
xrpl.shamap > xrpl.protocol xrpl.shamap > xrpl.protocol
xrpl.tx > xrpl.basics
xrpl.tx > xrpl.core
xrpl.tx > xrpl.ledger
xrpl.tx > xrpl.protocol
xrpld.app > test.unit_test xrpld.app > test.unit_test
xrpld.app > xrpl.basics xrpld.app > xrpl.basics
xrpld.app > xrpl.core xrpld.app > xrpl.core
xrpld.app > xrpld.conditions
xrpld.app > xrpld.consensus xrpld.app > xrpld.consensus
xrpld.app > xrpld.core
xrpld.app > xrpl.json xrpld.app > xrpl.json
xrpld.app > xrpl.ledger xrpld.app > xrpl.ledger
xrpld.app > xrpl.net xrpld.app > xrpl.net
xrpld.app > xrpl.nodestore xrpld.app > xrpl.nodestore
xrpld.app > xrpl.protocol xrpld.app > xrpl.protocol
xrpld.app > xrpl.rdb
xrpld.app > xrpl.resource xrpld.app > xrpl.resource
xrpld.app > xrpl.server
xrpld.app > xrpl.shamap xrpld.app > xrpl.shamap
xrpld.app > xrpl.tx xrpld.conditions > xrpl.basics
xrpld.conditions > xrpl.protocol
xrpld.consensus > xrpl.basics xrpld.consensus > xrpl.basics
xrpld.consensus > xrpl.json xrpld.consensus > xrpl.json
xrpld.consensus > xrpl.ledger
xrpld.consensus > xrpl.protocol xrpld.consensus > xrpl.protocol
xrpld.core > xrpl.basics xrpld.core > xrpl.basics
xrpld.core > xrpl.core xrpld.core > xrpl.core
xrpld.core > xrpl.json
xrpld.core > xrpl.net xrpld.core > xrpl.net
xrpld.core > xrpl.protocol xrpld.core > xrpl.protocol
xrpld.core > xrpl.rdb
xrpld.overlay > xrpl.basics xrpld.overlay > xrpl.basics
xrpld.overlay > xrpl.core xrpld.overlay > xrpl.core
xrpld.overlay > xrpld.consensus
xrpld.overlay > xrpld.core xrpld.overlay > xrpld.core
xrpld.overlay > xrpld.peerfinder xrpld.overlay > xrpld.peerfinder
xrpld.overlay > xrpl.json xrpld.overlay > xrpl.json
xrpld.overlay > xrpl.ledger
xrpld.overlay > xrpl.protocol xrpld.overlay > xrpl.protocol
xrpld.overlay > xrpl.resource xrpld.overlay > xrpl.resource
xrpld.overlay > xrpl.server xrpld.overlay > xrpl.server
xrpld.overlay > xrpl.shamap
xrpld.overlay > xrpl.tx
xrpld.peerfinder > xrpl.basics xrpld.peerfinder > xrpl.basics
xrpld.peerfinder > xrpld.core xrpld.peerfinder > xrpld.core
xrpld.peerfinder > xrpl.protocol xrpld.peerfinder > xrpl.protocol
xrpld.peerfinder > xrpl.rdb
xrpld.perflog > xrpl.basics xrpld.perflog > xrpl.basics
xrpld.perflog > xrpl.core xrpld.perflog > xrpl.core
xrpld.perflog > xrpld.rpc xrpld.perflog > xrpld.rpc
xrpld.perflog > xrpl.json xrpld.perflog > xrpl.json
xrpld.perflog > xrpl.protocol
xrpld.rpc > xrpl.basics xrpld.rpc > xrpl.basics
xrpld.rpc > xrpl.core xrpld.rpc > xrpl.core
xrpld.rpc > xrpld.core xrpld.rpc > xrpld.core
@@ -288,12 +216,6 @@ xrpld.rpc > xrpl.ledger
xrpld.rpc > xrpl.net xrpld.rpc > xrpl.net
xrpld.rpc > xrpl.nodestore xrpld.rpc > xrpl.nodestore
xrpld.rpc > xrpl.protocol xrpld.rpc > xrpl.protocol
xrpld.rpc > xrpl.rdb
xrpld.rpc > xrpl.resource xrpld.rpc > xrpl.resource
xrpld.rpc > xrpl.server xrpld.rpc > xrpl.server
xrpld.rpc > xrpl.shamap
xrpld.rpc > xrpl.tx
xrpld.shamap > xrpl.basics
xrpld.shamap > xrpld.core
xrpld.shamap > xrpl.protocol
xrpld.shamap > xrpl.shamap xrpld.shamap > xrpl.shamap

View File

@@ -34,8 +34,6 @@ run from the repository root.
6. `.github/scripts/rename/config.sh`: This script will rename the config from 6. `.github/scripts/rename/config.sh`: This script will rename the config from
`rippled.cfg` to `xrpld.cfg`, and updating the code accordingly. The old `rippled.cfg` to `xrpld.cfg`, and updating the code accordingly. The old
filename will still be accepted. filename will still be accepted.
7. `.github/scripts/rename/docs.sh`: This script will rename any lingering
references of `ripple(d)` to `xrpl(d)` in code, comments, and documentation.
You can run all these scripts from the repository root as follows: You can run all these scripts from the repository root as follows:
@@ -46,5 +44,4 @@ You can run all these scripts from the repository root as follows:
./.github/scripts/rename/binary.sh . ./.github/scripts/rename/binary.sh .
./.github/scripts/rename/namespace.sh . ./.github/scripts/rename/namespace.sh .
./.github/scripts/rename/config.sh . ./.github/scripts/rename/config.sh .
./.github/scripts/rename/docs.sh .
``` ```

View File

@@ -6,11 +6,11 @@ set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`. # On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'." echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1 exit 1
fi fi
SED_COMMAND=gsed SED_COMMAND=gsed
fi fi
# This script changes the binary name from `rippled` to `xrpld`, and reverses # This script changes the binary name from `rippled` to `xrpld`, and reverses
@@ -29,7 +29,7 @@ if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist." echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1 exit 1
fi fi
pushd "${DIRECTORY}" pushd ${DIRECTORY}
# Remove the binary name override added by the cmake.sh script. # Remove the binary name override added by the cmake.sh script.
${SED_COMMAND} -z -i -E 's@\s+# For the time being.+"rippled"\)@@' cmake/XrplCore.cmake ${SED_COMMAND} -z -i -E 's@\s+# For the time being.+"rippled"\)@@' cmake/XrplCore.cmake
@@ -49,7 +49,6 @@ ${SED_COMMAND} -i -E 's@ripple/xrpld@XRPLF/rippled@g' BUILD.md
${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' BUILD.md ${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' BUILD.md
${SED_COMMAND} -i -E 's@xrpld \(`xrpld`\)@xrpld@g' BUILD.md ${SED_COMMAND} -i -E 's@xrpld \(`xrpld`\)@xrpld@g' BUILD.md
${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' CONTRIBUTING.md ${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' CONTRIBUTING.md
${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' docs/build/install.md
popd popd
echo "Processing complete." echo "Processing complete."

View File

@@ -8,16 +8,16 @@ set -e
SED_COMMAND=sed SED_COMMAND=sed
HEAD_COMMAND=head HEAD_COMMAND=head
if [[ "${OSTYPE}" == 'darwin'* ]]; then if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'." echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1 exit 1
fi fi
SED_COMMAND=gsed SED_COMMAND=gsed
if ! command -v ghead &> /dev/null; then if ! command -v ghead &> /dev/null; then
echo "Error: ghead is not installed. Please install it using 'brew install coreutils'." echo "Error: ghead is not installed. Please install it using 'brew install coreutils'."
exit 1 exit 1
fi fi
HEAD_COMMAND=ghead HEAD_COMMAND=ghead
fi fi
# This script renames CMake files from `RippleXXX.cmake` or `RippledXXX.cmake` # This script renames CMake files from `RippleXXX.cmake` or `RippledXXX.cmake`
@@ -38,16 +38,16 @@ if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist." echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1 exit 1
fi fi
pushd "${DIRECTORY}" pushd ${DIRECTORY}
# Rename the files. # Rename the files.
find cmake -type f -name 'Rippled*.cmake' -exec bash -c 'mv "${1}" "${1/Rippled/Xrpl}"' - {} \; find cmake -type f -name 'Rippled*.cmake' -exec bash -c 'mv "${1}" "${1/Rippled/Xrpl}"' - {} \;
find cmake -type f -name 'Ripple*.cmake' -exec bash -c 'mv "${1}" "${1/Ripple/Xrpl}"' - {} \; find cmake -type f -name 'Ripple*.cmake' -exec bash -c 'mv "${1}" "${1/Ripple/Xrpl}"' - {} \;
if [ -e cmake/xrpl_add_test.cmake ]; then if [ -e cmake/xrpl_add_test.cmake ]; then
mv cmake/xrpl_add_test.cmake cmake/XrplAddTest.cmake mv cmake/xrpl_add_test.cmake cmake/XrplAddTest.cmake
fi fi
if [ -e include/xrpl/proto/ripple.proto ]; then if [ -e include/xrpl/proto/ripple.proto ]; then
mv include/xrpl/proto/ripple.proto include/xrpl/proto/xrpl.proto mv include/xrpl/proto/ripple.proto include/xrpl/proto/xrpl.proto
fi fi
# Rename inside the files. # Rename inside the files.
@@ -71,14 +71,14 @@ ${SED_COMMAND} -i 's@xrpl/validator-keys-tool@ripple/validator-keys-tool@' cmake
# Ensure the name of the binary and config remain 'rippled' for now. # Ensure the name of the binary and config remain 'rippled' for now.
${SED_COMMAND} -i -E 's/xrpld(-example)?\.cfg/rippled\1.cfg/g' cmake/XrplInstall.cmake ${SED_COMMAND} -i -E 's/xrpld(-example)?\.cfg/rippled\1.cfg/g' cmake/XrplInstall.cmake
if grep -q '"xrpld"' cmake/XrplCore.cmake; then if grep -q '"xrpld"' cmake/XrplCore.cmake; then
# The script has been rerun, so just restore the name of the binary. # The script has been rerun, so just restore the name of the binary.
${SED_COMMAND} -i 's/"xrpld"/"rippled"/' cmake/XrplCore.cmake ${SED_COMMAND} -i 's/"xrpld"/"rippled"/' cmake/XrplCore.cmake
elif ! grep -q '"rippled"' cmake/XrplCore.cmake; then elif ! grep -q '"rippled"' cmake/XrplCore.cmake; then
${HEAD_COMMAND} -n -1 cmake/XrplCore.cmake > cmake.tmp ${HEAD_COMMAND} -n -1 cmake/XrplCore.cmake > cmake.tmp
echo ' # For the time being, we will keep the name of the binary as it was.' >> cmake.tmp echo ' # For the time being, we will keep the name of the binary as it was.' >> cmake.tmp
echo ' set_target_properties(xrpld PROPERTIES OUTPUT_NAME "rippled")' >> cmake.tmp echo ' set_target_properties(xrpld PROPERTIES OUTPUT_NAME "rippled")' >> cmake.tmp
tail -1 cmake/XrplCore.cmake >> cmake.tmp tail -1 cmake/XrplCore.cmake >> cmake.tmp
mv cmake.tmp cmake/XrplCore.cmake mv cmake.tmp cmake/XrplCore.cmake
fi fi
# Restore the symlink from 'xrpld' to 'rippled'. # Restore the symlink from 'xrpld' to 'rippled'.

View File

@@ -6,11 +6,11 @@ set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`. # On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'." echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1 exit 1
fi fi
SED_COMMAND=gsed SED_COMMAND=gsed
fi fi
# This script renames the config from `rippled.cfg` to `xrpld.cfg`, and updates # This script renames the config from `rippled.cfg` to `xrpld.cfg`, and updates
@@ -28,39 +28,40 @@ if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist." echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1 exit 1
fi fi
pushd "${DIRECTORY}" pushd ${DIRECTORY}
# Add the xrpld.cfg to the .gitignore. # Add the xrpld.cfg to the .gitignore.
if ! grep -q 'xrpld.cfg' .gitignore; then if ! grep -q 'xrpld.cfg' .gitignore; then
${SED_COMMAND} -i '/rippled.cfg/a\ ${SED_COMMAND} -i '/rippled.cfg/a\
/xrpld.cfg' .gitignore /xrpld.cfg' .gitignore
fi fi
# Rename the files. # Rename the files.
if [ -e rippled.cfg ]; then if [ -e rippled.cfg ]; then
mv rippled.cfg xrpld.cfg mv rippled.cfg xrpld.cfg
fi fi
if [ -e cfg/rippled-example.cfg ]; then if [ -e cfg/rippled-example.cfg ]; then
mv cfg/rippled-example.cfg cfg/xrpld-example.cfg mv cfg/rippled-example.cfg cfg/xrpld-example.cfg
fi fi
# Rename inside the files. # Rename inside the files.
DIRECTORIES=("cfg" "cmake" "include" "src") DIRECTORIES=("cfg" "cmake" "include" "src")
for DIRECTORY in "${DIRECTORIES[@]}"; do for DIRECTORY in "${DIRECTORIES[@]}"; do
echo "Processing directory: ${DIRECTORY}" echo "Processing directory: ${DIRECTORY}"
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.cmake" -o -name "*.txt" -o -name "*.cfg" -o -name "*.md" \) | while read -r FILE; do find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.cmake" -o -name "*.txt" -o -name "*.cfg" -o -name "*.md" \) | while read -r FILE; do
echo "Processing file: ${FILE}" echo "Processing file: ${FILE}"
${SED_COMMAND} -i -E 's/rippled(-example)?[ .]cfg/xrpld\1.cfg/g' "${FILE}" ${SED_COMMAND} -i -E 's/rippled(-example)?[ .]cfg/xrpld\1.cfg/g' "${FILE}"
${SED_COMMAND} -i 's/rippleConfig/xrpldConfig/g' "${FILE}" done
done
done done
${SED_COMMAND} -i 's/rippled/xrpld/g' cfg/xrpld-example.cfg ${SED_COMMAND} -i 's/rippled/xrpld/g' cfg/xrpld-example.cfg
${SED_COMMAND} -i 's/rippled/xrpld/g' src/test/core/Config_test.cpp ${SED_COMMAND} -i 's/rippled/xrpld/g' src/test/core/Config_test.cpp
${SED_COMMAND} -i 's/ripplevalidators/xrplvalidators/g' src/test/core/Config_test.cpp # cspell: disable-line ${SED_COMMAND} -i 's/ripplevalidators/xrplvalidators/g' src/test/core/Config_test.cpp # cspell: disable-line
${SED_COMMAND} -i 's/rippleConfig/xrpldConfig/g' src/test/core/Config_test.cpp
${SED_COMMAND} -i 's@ripple/@xrpld/@g' src/test/core/Config_test.cpp ${SED_COMMAND} -i 's@ripple/@xrpld/@g' src/test/core/Config_test.cpp
${SED_COMMAND} -i 's/Rippled/File/g' src/test/core/Config_test.cpp ${SED_COMMAND} -i 's/Rippled/File/g' src/test/core/Config_test.cpp
# Restore the old config file name in the code that maintains support for now. # Restore the old config file name in the code that maintains support for now.
${SED_COMMAND} -i 's/configLegacyName = "xrpld.cfg"/configLegacyName = "rippled.cfg"/g' src/xrpld/core/detail/Config.cpp ${SED_COMMAND} -i 's/configLegacyName = "xrpld.cfg"/configLegacyName = "rippled.cfg"/g' src/xrpld/core/detail/Config.cpp

View File

@@ -6,11 +6,11 @@ set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`. # On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'." echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1 exit 1
fi fi
SED_COMMAND=gsed SED_COMMAND=gsed
fi fi
# This script removes superfluous copyright notices in source and header files # This script removes superfluous copyright notices in source and header files
@@ -31,7 +31,7 @@ if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist." echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1 exit 1
fi fi
pushd "${DIRECTORY}" pushd ${DIRECTORY}
# Prevent sed and echo from removing newlines and tabs in string literals by # Prevent sed and echo from removing newlines and tabs in string literals by
# temporarily replacing them with placeholders. This only affects one file. # temporarily replacing them with placeholders. This only affects one file.
@@ -43,56 +43,56 @@ ${SED_COMMAND} -i -E "s@\\\t@${PLACEHOLDER_TAB}@g" src/test/rpc/ValidatorInfo_te
# Process the include/ and src/ directories. # Process the include/ and src/ directories.
DIRECTORIES=("include" "src") DIRECTORIES=("include" "src")
for DIRECTORY in "${DIRECTORIES[@]}"; do for DIRECTORY in "${DIRECTORIES[@]}"; do
echo "Processing directory: ${DIRECTORY}" echo "Processing directory: ${DIRECTORY}"
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.macro" \) | while read -r FILE; do find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.macro" \) | while read -r FILE; do
echo "Processing file: ${FILE}" echo "Processing file: ${FILE}"
# Handle the cases where the copyright notice is enclosed in /* ... */ # Handle the cases where the copyright notice is enclosed in /* ... */
# and usually surrounded by //---- and //======. # and usually surrounded by //---- and //======.
${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}" ${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}"
${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}" # cspell: ignore Bougalis Falco Hinnant Ritchford ${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}" # cspell: ignore Bougalis Falco Hinnant Ritchford
${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}" ${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}"
# Handle the cases where the copyright notice is commented out with //. # Handle the cases where the copyright notice is commented out with //.
${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}" # cspell: ignore Vinnie Falco ${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}" # cspell: ignore Vinnie Falco
done done
done done
# Restore copyright notices that were removed from specific files, without # Restore copyright notices that were removed from specific files, without
# restoring the verbiage that is already present in LICENSE.md. Ensure that if # restoring the verbiage that is already present in LICENSE.md. Ensure that if
# the script is run multiple times, duplicate notices are not added. # the script is run multiple times, duplicate notices are not added.
if ! grep -q 'Raw Material Software' include/xrpl/beast/core/CurrentThreadName.h; then if ! grep -q 'Raw Material Software' include/xrpl/beast/core/CurrentThreadName.h; then
echo -e "// Portions of this file are from JUCE (http://www.juce.com).\n// Copyright (c) 2013 - Raw Material Software Ltd.\n// Please visit http://www.juce.com\n\n$(cat include/xrpl/beast/core/CurrentThreadName.h)" > include/xrpl/beast/core/CurrentThreadName.h echo -e "// Portions of this file are from JUCE (http://www.juce.com).\n// Copyright (c) 2013 - Raw Material Software Ltd.\n// Please visit http://www.juce.com\n\n$(cat include/xrpl/beast/core/CurrentThreadName.h)" > include/xrpl/beast/core/CurrentThreadName.h
fi fi
if ! grep -q 'Dev Null' src/test/app/NetworkID_test.cpp; then if ! grep -q 'Dev Null' src/test/app/NetworkID_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/NetworkID_test.cpp)" > src/test/app/NetworkID_test.cpp echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/NetworkID_test.cpp)" > src/test/app/NetworkID_test.cpp
fi fi
if ! grep -q 'Dev Null' src/test/app/tx/apply_test.cpp; then if ! grep -q 'Dev Null' src/test/app/tx/apply_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/tx/apply_test.cpp)" > src/test/app/tx/apply_test.cpp echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/tx/apply_test.cpp)" > src/test/app/tx/apply_test.cpp
fi fi
if ! grep -q 'Dev Null' src/test/rpc/ManifestRPC_test.cpp; then if ! grep -q 'Dev Null' src/test/rpc/ManifestRPC_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ManifestRPC_test.cpp)" > src/test/rpc/ManifestRPC_test.cpp echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ManifestRPC_test.cpp)" > src/test/rpc/ManifestRPC_test.cpp
fi fi
if ! grep -q 'Dev Null' src/test/rpc/ValidatorInfo_test.cpp; then if ! grep -q 'Dev Null' src/test/rpc/ValidatorInfo_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ValidatorInfo_test.cpp)" > src/test/rpc/ValidatorInfo_test.cpp echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ValidatorInfo_test.cpp)" > src/test/rpc/ValidatorInfo_test.cpp
fi fi
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/server_info/Manifest.cpp; then if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/DoManifest.cpp; then
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/server_info/Manifest.cpp)" > src/xrpld/rpc/handlers/server_info/Manifest.cpp echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/DoManifest.cpp)" > src/xrpld/rpc/handlers/DoManifest.cpp
fi fi
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/admin/status/ValidatorInfo.cpp; then if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/ValidatorInfo.cpp; then
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/admin/status/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/admin/status/ValidatorInfo.cpp echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/ValidatorInfo.cpp
fi fi
if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h # cspell: ignore Nikolaos Bougalis nikb echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h # cspell: ignore Nikolaos Bougalis nikb
fi fi
if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h # cspell: ignore Nikolaos Bougalis nikb echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h # cspell: ignore Nikolaos Bougalis nikb
fi fi
if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then
echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h # cspell: ignore Nikolaos Bougalis nikb echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h # cspell: ignore Nikolaos Bougalis nikb
fi fi
if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then
echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h # cspell: ignore Ritchford echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h # cspell: ignore Ritchford
fi fi
# Restore newlines and tabs in string literals in the affected file. # Restore newlines and tabs in string literals in the affected file.

View File

@@ -6,11 +6,11 @@ set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`. # On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'." echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1 exit 1
fi fi
SED_COMMAND=gsed SED_COMMAND=gsed
fi fi
# This script renames definitions, such as include guards, in this project. # This script renames definitions, such as include guards, in this project.

View File

@@ -1,96 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1
fi
SED_COMMAND=gsed
fi
# This script renames all remaining references to `ripple` and `rippled` to
# `xrpl` and `xrpld`, respectively, in code, comments, and documentation.
# Usage: .github/scripts/rename/docs.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
pushd "${DIRECTORY}"
find . -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.txt" -o -name "*.cfg" -o -name "*.md" -o -name "*.proto" \) -not -path "./.github/scripts/*" | while read -r FILE; do
echo "Processing file: ${FILE}"
${SED_COMMAND} -i 's/rippleLockEscrowMPT/lockEscrowMPT/g' "${FILE}"
${SED_COMMAND} -i 's/rippleUnlockEscrowMPT/unlockEscrowMPT/g' "${FILE}"
${SED_COMMAND} -i 's/rippleCredit/directSendNoFee/g' "${FILE}"
${SED_COMMAND} -i 's/rippleSend/directSendNoLimit/g' "${FILE}"
${SED_COMMAND} -i -E 's@([^/+-])rippled@\1xrpld@g' "${FILE}"
${SED_COMMAND} -i -E 's@([^/+-])Rippled@\1Xrpld@g' "${FILE}"
${SED_COMMAND} -i -E 's/^rippled/xrpld/g' "${FILE}"
${SED_COMMAND} -i -E 's/^Rippled/Xrpld/g' "${FILE}"
# cspell: disable
${SED_COMMAND} -i -E 's/(r|R)ipple (a|A)ddress/XRPL address/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (a|A)ccount/XRPL account/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (a|A)lgorithm/XRPL algorithm/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (c|C)lient/XRPL client/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (c|C)luster/XRPL cluster/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (c|C)onsensus/XRPL consensus/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (d|D)efault/XRPL default/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (e|E)poch/XRPL epoch/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (f|F)eature/XRPL feature/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (n|N)etwork/XRPL network/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (p|P)ayment/XRPL payment/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (p|P)rotocol/XRPL protocol/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (r|R)epository/XRPL repository/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple RPC/XRPL RPC/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (s|S)erialization/XRPL serialization/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (s|S)erver/XRPL server/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (s|S)pecific/XRPL specific/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple Source/XRPL Source/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (t|T)imestamp/XRPL timestamp/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple uses the consensus/XRPL uses the consensus/g' "${FILE}"
${SED_COMMAND} -i -E 's/(r|R)ipple (v|V)alidator/XRPL validator/g' "${FILE}"
# cspell: enable
${SED_COMMAND} -i 's/RippleLib/XrplLib/g' "${FILE}"
${SED_COMMAND} -i 's/ripple-lib/XrplLib/g' "${FILE}"
${SED_COMMAND} -i 's@opt/ripple/@opt/xrpld/@g' "${FILE}"
${SED_COMMAND} -i 's@src/ripple/@src/xrpld/@g' "${FILE}"
${SED_COMMAND} -i 's@ripple/app/@xrpld/app/@g' "${FILE}"
${SED_COMMAND} -i 's@github.com/ripple/rippled@github.com/XRPLF/rippled@g' "${FILE}"
${SED_COMMAND} -i 's/\ba xrpl/an xrpl/g' "${FILE}"
${SED_COMMAND} -i 's/\ba XRPL/an XRPL/g' "${FILE}"
done
${SED_COMMAND} -i 's/ripple_libs/xrpl_libs/' BUILD.md
${SED_COMMAND} -i 's/Ripple integrators/XRPL developers/' README.md
${SED_COMMAND} -i 's/sanitizer-configuration-for-rippled/sanitizer-configuration-for-xrpld/' docs/build/sanitizers.md
${SED_COMMAND} -i 's/rippled/xrpld/g' .github/scripts/levelization/README.md
${SED_COMMAND} -i 's/rippled/xrpld/g' .github/scripts/strategy-matrix/generate.py
${SED_COMMAND} -i 's@/rippled@/xrpld@g' docs/build/install.md
${SED_COMMAND} -i 's@github.com/XRPLF/xrpld@github.com/XRPLF/rippled@g' docs/build/install.md
${SED_COMMAND} -i 's/rippled/xrpld/g' docs/Doxyfile
${SED_COMMAND} -i 's/ripple_basics/basics/' include/xrpl/basics/CountedObject.h
${SED_COMMAND} -i 's/<ripple/<xrpl/' include/xrpl/protocol/AccountID.h
${SED_COMMAND} -i 's/Ripple:/the XRPL:/g' include/xrpl/protocol/SecretKey.h
${SED_COMMAND} -i 's/Ripple:/the XRPL:/g' include/xrpl/protocol/Seed.h
${SED_COMMAND} -i 's/ripple/xrpl/g' src/test/README.md
${SED_COMMAND} -i 's/www.ripple.com/www.xrpl.org/g' src/test/protocol/Seed_test.cpp
# Restore specific changes.
${SED_COMMAND} -i 's@b5efcc/src/xrpld@b5efcc/src/ripple@' include/xrpl/protocol/README.md
${SED_COMMAND} -i 's/dbPrefix_ = "xrpldb"/dbPrefix_ = "rippledb"/' src/xrpld/app/misc/SHAMapStoreImp.h # cspell: disable-line
${SED_COMMAND} -i 's/configLegacyName = "xrpld.cfg"/configLegacyName = "rippled.cfg"/' src/xrpld/core/detail/Config.cpp
popd
echo "Renaming complete."

View File

@@ -1,30 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# This script checks whether there are no new include guards introduced by a new
# PR, as header files should use "#pragma once" instead. The script assumes any
# include guards will use "XRPL_" as prefix.
# Usage: .github/scripts/rename/include.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" \) | while read -r FILE; do
echo "Processing file: ${FILE}"
if grep -q "#ifndef XRPL_" "${FILE}"; then
echo "Please replace all include guards by #pragma once."
exit 1
fi
done
echo "Checking complete."

View File

@@ -6,11 +6,11 @@ set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`. # On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'." echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1 exit 1
fi fi
SED_COMMAND=gsed SED_COMMAND=gsed
fi fi
# This script renames the `ripple` namespace to `xrpl` in this project. # This script renames the `ripple` namespace to `xrpl` in this project.
@@ -31,19 +31,18 @@ if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist." echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1 exit 1
fi fi
pushd "${DIRECTORY}" pushd ${DIRECTORY}
DIRECTORIES=("include" "src" "tests") DIRECTORIES=("include" "src" "tests")
for DIRECTORY in "${DIRECTORIES[@]}"; do for DIRECTORY in "${DIRECTORIES[@]}"; do
echo "Processing directory: ${DIRECTORY}" echo "Processing directory: ${DIRECTORY}"
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.macro" \) | while read -r FILE; do find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" \) | while read -r FILE; do
echo "Processing file: ${FILE}" echo "Processing file: ${FILE}"
${SED_COMMAND} -i 's/namespace ripple/namespace xrpl/g' "${FILE}" ${SED_COMMAND} -i 's/namespace ripple/namespace xrpl/g' "${FILE}"
${SED_COMMAND} -i 's/ripple::/xrpl::/g' "${FILE}" ${SED_COMMAND} -i 's/ripple::/xrpl::/g' "${FILE}"
${SED_COMMAND} -i 's/"ripple:/"xrpl::/g' "${FILE}" ${SED_COMMAND} -i -E 's/(BEAST_DEFINE_TESTSUITE.+)ripple(.+)/\1xrpl\2/g' "${FILE}"
${SED_COMMAND} -i -E 's/(BEAST_DEFINE_TESTSUITE.+)ripple(.+)/\1xrpl\2/g' "${FILE}" done
done
done done
# Special case for NuDBFactory that has ripple twice in the test suite name. # Special case for NuDBFactory that has ripple twice in the test suite name.

View File

@@ -44,28 +44,29 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# We build and test all configurations by default, except for Windows in # We build and test all configurations by default, except for Windows in
# Debug, because it is too slow, as well as when code coverage is # Debug, because it is too slow, as well as when code coverage is
# enabled as that mode already runs the tests. # enabled as that mode already runs the tests.
build_only = False build_only = True
if os["distro_name"] == "windows" and build_type == "Debug": if os["distro_name"] == "windows" and build_type == "Debug":
build_only = True build_only = True
# Only generate a subset of configurations in PRs. # Only generate a subset of configurations in PRs.
if not all: if not all:
# Debian: # Debian:
# - Bookworm using GCC 13: Debug on linux/amd64, set the reference # - Bookworm using GCC 13: Release and Unity on linux/amd64, set
# fee to 500 and enable code coverage (which will be done below). # the reference fee to 500.
# - Bookworm using GCC 15: Debug on linux/amd64, enable Address and # - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
# UB sanitizers (which will be done below). # code coverage (which will be done below).
# - Bookworm using Clang 16: Debug on linux/amd64, enable voidstar. # - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
# - Bookworm using Clang 17: Release on linux/amd64, set the # enable voidstar.
# reference fee to 1000. # - Bookworm using Clang 17: Release and no Unity on linux/amd64,
# - Bookworm using Clang 20: Debug on linux/amd64, enable Address # set the reference fee to 1000.
# and UB sanitizers (which will be done below). # - Bookworm using Clang 20: Debug and Unity on linux/amd64.
if os["distro_name"] == "debian": if os["distro_name"] == "debian":
skip = True skip = True
if os["distro_version"] == "bookworm": if os["distro_version"] == "bookworm":
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
and build_type == "Debug" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}" cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
@@ -73,19 +74,22 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
and build_type == "Debug" and build_type == "Debug"
and architecture["platform"] == "linux/amd64" and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64"
): ):
cmake_args = f"-Dvoidstar=ON {cmake_args}" cmake_args = f"-Dvoidstar=ON {cmake_args}"
skip = False skip = False
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17"
and build_type == "Release" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}" cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
@@ -93,6 +97,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
@@ -100,14 +105,15 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue continue
# RHEL: # RHEL:
# - 9 using GCC 12: Debug on linux/amd64. # - 9 using GCC 12: Debug and Unity on linux/amd64.
# - 10 using Clang: Release on linux/amd64. # - 10 using Clang: Release and no Unity on linux/amd64.
if os["distro_name"] == "rhel": if os["distro_name"] == "rhel":
skip = True skip = True
if os["distro_version"] == "9": if os["distro_version"] == "9":
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
@@ -115,6 +121,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any"
and build_type == "Release" and build_type == "Release"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
@@ -122,16 +129,17 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue continue
# Ubuntu: # Ubuntu:
# - Jammy using GCC 12: Debug on linux/arm64. # - Jammy using GCC 12: Debug and no Unity on linux/arm64.
# - Noble using GCC 14: Release on linux/amd64. # - Noble using GCC 14: Release and Unity on linux/amd64.
# - Noble using Clang 18: Debug on linux/amd64. # - Noble using Clang 18: Debug and no Unity on linux/amd64.
# - Noble using Clang 19: Release on linux/arm64. # - Noble using Clang 19: Release and Unity on linux/arm64.
if os["distro_name"] == "ubuntu": if os["distro_name"] == "ubuntu":
skip = True skip = True
if os["distro_version"] == "jammy": if os["distro_version"] == "jammy":
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64" and architecture["platform"] == "linux/arm64"
): ):
skip = False skip = False
@@ -139,18 +147,21 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14"
and build_type == "Release" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19"
and build_type == "Release" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/arm64" and architecture["platform"] == "linux/arm64"
): ):
skip = False skip = False
@@ -158,16 +169,20 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue continue
# MacOS: # MacOS:
# - Debug on macos/arm64. # - Debug and no Unity on macos/arm64.
if os["distro_name"] == "macos" and not ( if os["distro_name"] == "macos" and not (
build_type == "Debug" and architecture["platform"] == "macos/arm64" build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "macos/arm64"
): ):
continue continue
# Windows: # Windows:
# - Release on windows/amd64. # - Release and Unity on windows/amd64.
if os["distro_name"] == "windows" and not ( if os["distro_name"] == "windows" and not (
build_type == "Release" and architecture["platform"] == "windows/amd64" build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "windows/amd64"
): ):
continue continue
@@ -194,28 +209,18 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
): ):
continue continue
# Enable code coverage for Debian Bookworm using GCC 13 in Debug on # Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
# linux/amd64. # Unity on linux/amd64
if ( if (
f"{os['distro_name']}-{os['distro_version']}" == "debian-bookworm" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
cmake_args = f"{cmake_args} -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0" cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}"
# Enable unity build for Ubuntu Jammy using GCC 12 in Debug on
# linux/amd64.
if (
f"{os['distro_name']}-{os['distro_version']}" == "ubuntu-jammy"
and f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug"
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"{cmake_args} -Dunity=ON"
# Generate a unique name for the configuration, e.g. macos-arm64-debug # Generate a unique name for the configuration, e.g. macos-arm64-debug
# or debian-bookworm-gcc-12-amd64-release. # or debian-bookworm-gcc-12-amd64-release-unity.
config_name = os["distro_name"] config_name = os["distro_name"]
if (n := os["distro_version"]) != "": if (n := os["distro_version"]) != "":
config_name += f"-{n}" config_name += f"-{n}"
@@ -235,39 +240,23 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# Add the configuration to the list, with the most unique fields first, # Add the configuration to the list, with the most unique fields first,
# so that they are easier to identify in the GitHub Actions UI, as long # so that they are easier to identify in the GitHub Actions UI, as long
# names get truncated. # names get truncated.
# Add Address and UB sanitizers as separate configurations for specific # Add Address and Thread (both coupled with UB) sanitizers for specific bookworm distros.
# bookworm distros. Thread sanitizer is currently disabled (see below). # GCC-Asan rippled-embedded tests are failing because of https://github.com/google/sanitizers/issues/856
# GCC-Asan xrpld-embedded tests are failing because of https://github.com/google/sanitizers/issues/856 if (
if os[ os["distro_version"] == "bookworm"
"distro_version" and f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
] == "bookworm" and f"{os['compiler_name']}-{os['compiler_version']}" in [ ):
"gcc-15", # Add ASAN + UBSAN configuration.
"clang-20",
]:
# Add ASAN configuration.
configurations.append( configurations.append(
{ {
"config_name": config_name + "-asan", "config_name": config_name + "-asan-ubsan",
"cmake_args": cmake_args, "cmake_args": cmake_args,
"cmake_target": cmake_target, "cmake_target": cmake_target,
"build_only": build_only, "build_only": build_only,
"build_type": build_type, "build_type": build_type,
"os": os, "os": os,
"architecture": architecture, "architecture": architecture,
"sanitizers": "address", "sanitizers": "address,undefinedbehavior",
}
)
# Add UBSAN configuration.
configurations.append(
{
"config_name": config_name + "-ubsan",
"cmake_args": cmake_args,
"cmake_target": cmake_target,
"build_only": build_only,
"build_type": build_type,
"os": os,
"architecture": architecture,
"sanitizers": "undefinedbehavior",
} }
) )
# TSAN is deactivated due to seg faults with latest compilers. # TSAN is deactivated due to seg faults with latest compilers.

View File

@@ -208,5 +208,5 @@
} }
], ],
"build_type": ["Debug", "Release"], "build_type": ["Debug", "Release"],
"cmake_args": [""] "cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
} }

View File

@@ -15,5 +15,8 @@
} }
], ],
"build_type": ["Debug", "Release"], "build_type": ["Debug", "Release"],
"cmake_args": ["-DCMAKE_POLICY_VERSION_MINIMUM=3.5"] "cmake_args": [
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
]
} }

View File

@@ -15,5 +15,5 @@
} }
], ],
"build_type": ["Debug", "Release"], "build_type": ["Debug", "Release"],
"cmake_args": [""] "cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
} }

View File

@@ -1,13 +0,0 @@
name: Check PR commits
on:
pull_request_target:
# The action needs to have write permissions to post comments on the PR.
permissions:
contents: read
pull-requests: write
jobs:
check_commits:
uses: XRPLF/actions/.github/workflows/check-pr-commits.yml@e2c7f400d1e85ae65dad552fd425169fbacca4a3

View File

@@ -1,30 +0,0 @@
name: Check PR description
on:
merge_group:
types:
- checks_requested
pull_request:
types: [opened, edited, reopened, synchronize, ready_for_review]
branches: [develop]
jobs:
check_description:
if: ${{ github.event.pull_request.draft != true }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Write PR body to file
env:
PR_BODY: ${{ github.event.pull_request.body }}
if: ${{ github.event_name == 'pull_request' }}
run: printenv PR_BODY > pr_body.md
- name: Check PR description differs from template
if: ${{ github.event_name == 'pull_request' }}
run: >
python .github/scripts/check-pr-description.py
--template-file .github/pull_request_template.md
--pr-body-file pr_body.md

View File

@@ -1,14 +0,0 @@
name: Check PR title
on:
merge_group:
types:
- checks_requested
pull_request:
types: [opened, edited, reopened, synchronize, ready_for_review]
branches: [develop]
jobs:
check_title:
if: ${{ github.event.pull_request.draft != true }}
uses: XRPLF/actions/.github/workflows/check-pr-title.yml@a5d8dd35be543365e90a11358447130c8763871d

View File

@@ -1,25 +0,0 @@
name: Label PRs with merge conflicts
on:
# So that PRs touching the same files as the push are updated.
push:
# So that the `dirtyLabel` is removed if conflicts are resolved.
# We recommend `pull_request_target` so that github secrets are available.
# In `pull_request` we wouldn't be able to change labels of fork PRs.
pull_request_target:
types: [synchronize]
permissions:
pull-requests: write
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Check if PRs are dirty
uses: eps1lon/actions-label-merge-conflict@1df065ebe6e3310545d4f4c4e862e43bdca146f0 # v3.0.3
with:
dirtyLabel: "PR: has conflicts"
repoToken: "${{ secrets.GITHUB_TOKEN }}"
commentOnDirty: "This PR has conflicts, please resolve them in order for the PR to be reviewed."
commentOnClean: "All conflicts have been resolved. Assigned reviewers can now start or resume their review."

View File

@@ -33,7 +33,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Determine changed files - name: Determine changed files
# This step checks whether any files have changed that should # This step checks whether any files have changed that should
# cause the next jobs to run. We do it this way rather than # cause the next jobs to run. We do it this way rather than
@@ -46,7 +46,7 @@ jobs:
# that Github considers any skipped jobs to have passed, and in # that Github considers any skipped jobs to have passed, and in
# turn the required checks as well. # turn the required checks as well.
id: changes id: changes
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6 uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with: with:
files: | files: |
# These paths are unique to `on-pr.yml`. # These paths are unique to `on-pr.yml`.
@@ -59,18 +59,14 @@ jobs:
# Keep the paths below in sync with those in `on-trigger.yml`. # Keep the paths below in sync with those in `on-trigger.yml`.
.github/actions/build-deps/** .github/actions/build-deps/**
.github/actions/build-test/** .github/actions/build-test/**
.github/actions/generate-version/**
.github/actions/setup-conan/** .github/actions/setup-conan/**
.github/scripts/strategy-matrix/** .github/scripts/strategy-matrix/**
.github/workflows/reusable-build.yml .github/workflows/reusable-build.yml
.github/workflows/reusable-build-test-config.yml .github/workflows/reusable-build-test-config.yml
.github/workflows/reusable-build-test.yml .github/workflows/reusable-build-test.yml
.github/workflows/reusable-clang-tidy.yml
.github/workflows/reusable-clang-tidy-files.yml
.github/workflows/reusable-strategy-matrix.yml .github/workflows/reusable-strategy-matrix.yml
.github/workflows/reusable-test.yml .github/workflows/reusable-test.yml
.github/workflows/reusable-upload-recipe.yml .github/workflows/reusable-upload-recipe.yml
.clang-tidy
.codecov.yml .codecov.yml
cmake/** cmake/**
conan/** conan/**
@@ -110,17 +106,6 @@ jobs:
if: ${{ needs.should-run.outputs.go == 'true' }} if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-check-rename.yml uses: ./.github/workflows/reusable-check-rename.yml
clang-tidy:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-clang-tidy.yml
permissions:
issues: write
contents: read
with:
check_only_changed: true
create_issue_on_failure: false
build-test: build-test:
needs: should-run needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }} if: ${{ needs.should-run.outputs.go == 'true' }}
@@ -141,8 +126,9 @@ jobs:
needs: needs:
- should-run - should-run
- build-test - build-test
# Only run when committing to a PR that targets a release branch. # Only run when committing to a PR that targets a release branch in the
if: ${{ github.repository == 'XRPLF/rippled' && needs.should-run.outputs.go == 'true' && github.event_name == 'pull_request' && startsWith(github.event.pull_request.base.ref, 'release') }} # XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' && needs.should-run.outputs.go == 'true' && startsWith(github.ref, 'refs/heads/release') }}
uses: ./.github/workflows/reusable-upload-recipe.yml uses: ./.github/workflows/reusable-upload-recipe.yml
secrets: secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }} remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
@@ -169,7 +155,6 @@ jobs:
needs: needs:
- check-levelization - check-levelization
- check-rename - check-rename
- clang-tidy
- build-test - build-test
- upload-recipe - upload-recipe
- notify-clio - notify-clio

View File

@@ -5,7 +5,7 @@ name: Tag
on: on:
push: push:
tags: tags:
- "[0-9]+.[0-9]+.[0-9]*" - "v*"
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
@@ -17,7 +17,8 @@ defaults:
jobs: jobs:
upload-recipe: upload-recipe:
if: ${{ github.repository == 'XRPLF/rippled' }} # Only run when a tag is pushed to the XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' }}
uses: ./.github/workflows/reusable-upload-recipe.yml uses: ./.github/workflows/reusable-upload-recipe.yml
secrets: secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }} remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}

View File

@@ -16,18 +16,14 @@ on:
# Keep the paths below in sync with those in `on-pr.yml`. # Keep the paths below in sync with those in `on-pr.yml`.
- ".github/actions/build-deps/**" - ".github/actions/build-deps/**"
- ".github/actions/build-test/**" - ".github/actions/build-test/**"
- ".github/actions/generate-version/**"
- ".github/actions/setup-conan/**" - ".github/actions/setup-conan/**"
- ".github/scripts/strategy-matrix/**" - ".github/scripts/strategy-matrix/**"
- ".github/workflows/reusable-build.yml" - ".github/workflows/reusable-build.yml"
- ".github/workflows/reusable-build-test-config.yml" - ".github/workflows/reusable-build-test-config.yml"
- ".github/workflows/reusable-build-test.yml" - ".github/workflows/reusable-build-test.yml"
- ".github/workflows/reusable-clang-tidy.yml"
- ".github/workflows/reusable-clang-tidy-files.yml"
- ".github/workflows/reusable-strategy-matrix.yml" - ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml" - ".github/workflows/reusable-test.yml"
- ".github/workflows/reusable-upload-recipe.yml" - ".github/workflows/reusable-upload-recipe.yml"
- ".clang-tidy"
- ".codecov.yml" - ".codecov.yml"
- "cmake/**" - "cmake/**"
- "conan/**" - "conan/**"
@@ -63,15 +59,6 @@ defaults:
shell: bash shell: bash
jobs: jobs:
clang-tidy:
uses: ./.github/workflows/reusable-clang-tidy.yml
permissions:
issues: write
contents: read
with:
check_only_changed: false
create_issue_on_failure: ${{ github.event_name == 'schedule' }}
build-test: build-test:
uses: ./.github/workflows/reusable-build-test.yml uses: ./.github/workflows/reusable-build-test.yml
strategy: strategy:
@@ -92,8 +79,8 @@ jobs:
upload-recipe: upload-recipe:
needs: build-test needs: build-test
# Only run when pushing to the develop branch. # Only run when pushing to the develop branch in the XRPLF repository.
if: ${{ github.repository == 'XRPLF/rippled' && github.event_name == 'push' && github.ref == 'refs/heads/develop' }} if: ${{ github.repository_owner == 'XRPLF' && github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
uses: ./.github/workflows/reusable-upload-recipe.yml uses: ./.github/workflows/reusable-upload-recipe.yml
secrets: secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }} remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}

View File

@@ -1,9 +1,6 @@
name: Run pre-commit hooks name: Run pre-commit hooks
on: on:
merge_group:
types:
- checks_requested
pull_request: pull_request:
push: push:
branches: branches:
@@ -14,7 +11,7 @@ on:
jobs: jobs:
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks. # Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
run-hooks: run-hooks:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@9307df762265e15c745ddcdb38a581c989f7f349 uses: XRPLF/actions/.github/workflows/pre-commit.yml@282890f46d6921249d5659dd38babcb0bd8aef48
with: with:
runs_on: ubuntu-latest runs_on: ubuntu-latest
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-41ec7c1" }' container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-ab4d1f0" }'

View File

@@ -4,17 +4,6 @@ name: Build and publish documentation
on: on:
push: push:
branches:
- "develop"
paths:
- ".github/workflows/publish-docs.yml"
- "*.md"
- "**/*.md"
- "docs/**"
- "include/**"
- "src/libxrpl/**"
- "src/xrpld/**"
pull_request:
paths: paths:
- ".github/workflows/publish-docs.yml" - ".github/workflows/publish-docs.yml"
- "*.md" - "*.md"
@@ -34,25 +23,20 @@ defaults:
env: env:
BUILD_DIR: build BUILD_DIR: build
# ubuntu-latest has only 2 CPUs for private repositories NPROC_SUBTRACT: 2
# https://docs.github.com/en/actions/reference/runners/github-hosted-runners#standard-github-hosted-runners-for--private-repositories
NPROC_SUBTRACT: ${{ github.event.repository.visibility == 'public' && '2' || '1' }}
jobs: jobs:
build: publish:
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1 container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
permissions:
contents: write
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@90f11ee655d1687824fb8793db770477d52afbab
with:
enable_ccache: false
- name: Get number of processors - name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc id: nproc
with: with:
subtract: ${{ env.NPROC_SUBTRACT }} subtract: ${{ env.NPROC_SUBTRACT }}
@@ -80,23 +64,9 @@ jobs:
cmake -Donly_docs=ON .. cmake -Donly_docs=ON ..
cmake --build . --target docs --parallel ${BUILD_NPROC} cmake --build . --target docs --parallel ${BUILD_NPROC}
- name: Create documentation artifact - name: Publish documentation
if: ${{ github.event.repository.visibility == 'public' && github.event_name == 'push' }} if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
uses: actions/upload-pages-artifact@fc324d3547104276b827a68afc52ff2a11cc49c9 # v5.0.0 uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
with: with:
path: ${{ env.BUILD_DIR }}/docs/html github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ${{ env.BUILD_DIR }}/docs/html
deploy:
if: ${{ github.repository == 'XRPLF/rippled' && github.event_name == 'push' }}
needs: build
runs-on: ubuntu-latest
permissions:
pages: write
id-token: write
environment:
name: github-pages
url: ${{ steps.deploy.outputs.page_url }}
steps:
- name: Deploy to GitHub Pages
id: deploy
uses: actions/deploy-pages@cd2ce8fcbc39b97be8ca5fce6e763baed58fa128 # v5.0.0

View File

@@ -76,7 +76,7 @@ jobs:
name: ${{ inputs.config_name }} name: ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }} runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }} container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: ${{ inputs.sanitizers != '' && 360 || 60 }} timeout-minutes: 60
env: env:
# Use a namespace to keep the objects separate for each configuration. # Use a namespace to keep the objects separate for each configuration.
CCACHE_NAMESPACE: ${{ inputs.config_name }} CCACHE_NAMESPACE: ${{ inputs.config_name }}
@@ -101,13 +101,13 @@ jobs:
steps: steps:
- name: Cleanup workspace (macOS and Windows) - name: Cleanup workspace (macOS and Windows)
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }} if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
uses: XRPLF/actions/cleanup-workspace@c7d9ce5ebb03c752a354889ecd870cadfc2b1cd4 uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner - name: Prepare runner
uses: XRPLF/actions/prepare-runner@90f11ee655d1687824fb8793db770477d52afbab uses: XRPLF/actions/prepare-runner@f05cab7b8541eee6473aa42beb9d2fe35608a190
with: with:
enable_ccache: ${{ inputs.ccache_enabled }} enable_ccache: ${{ inputs.ccache_enabled }}
@@ -119,14 +119,12 @@ jobs:
uses: ./.github/actions/print-env uses: ./.github/actions/print-env
- name: Get number of processors - name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc id: nproc
with: with:
subtract: ${{ inputs.nproc_subtract }} subtract: ${{ inputs.nproc_subtract }}
- name: Setup Conan - name: Setup Conan
env:
SANITIZERS: ${{ inputs.sanitizers }}
uses: ./.github/actions/setup-conan uses: ./.github/actions/setup-conan
- name: Build dependencies - name: Build dependencies
@@ -153,32 +151,6 @@ jobs:
${CMAKE_ARGS} \ ${CMAKE_ARGS} \
.. ..
- name: Check protocol autogen files are up-to-date
working-directory: ${{ env.BUILD_DIR }}
env:
MESSAGE: |
The generated protocol wrapper classes are out of date.
This typically happens when the macro files or generator scripts
have changed but the generated files were not regenerated.
To fix this:
1. Run: cmake --build . --target setup_code_gen
2. Run: cmake --build . --target code_gen
3. Commit and push the regenerated files
run: |
set -e
cmake --build . --target setup_code_gen
cmake --build . --target code_gen
DIFF=$(git -C .. status --porcelain -- include/xrpl/protocol_autogen src/tests/libxrpl/protocol_autogen)
if [ -n "${DIFF}" ]; then
echo "::error::Generated protocol files are out of date"
git -C .. diff -- include/xrpl/protocol_autogen src/tests/libxrpl/protocol_autogen
echo "${MESSAGE}"
exit 1
fi
- name: Build the binary - name: Build the binary
working-directory: ${{ env.BUILD_DIR }} working-directory: ${{ env.BUILD_DIR }}
env: env:
@@ -202,30 +174,14 @@ jobs:
fi fi
- name: Upload the binary (Linux) - name: Upload the binary (Linux)
if: ${{ github.event.repository.visibility == 'public' && runner.os == 'Linux' }} if: ${{ github.repository_owner == 'XRPLF' && runner.os == 'Linux' }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with: with:
name: xrpld-${{ inputs.config_name }} name: xrpld-${{ inputs.config_name }}
path: ${{ env.BUILD_DIR }}/xrpld path: ${{ env.BUILD_DIR }}/xrpld
retention-days: 3 retention-days: 3
if-no-files-found: error if-no-files-found: error
- name: Export server definitions
if: ${{ runner.os != 'Windows' && !inputs.build_only && env.VOIDSTAR_ENABLED != 'true' }}
working-directory: ${{ env.BUILD_DIR }}
run: |
set -o pipefail
./xrpld --definitions | python3 -m json.tool > server_definitions.json
- name: Upload server definitions
if: ${{ github.event.repository.visibility == 'public' && inputs.config_name == 'debian-bookworm-gcc-13-amd64-release' }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: server-definitions
path: ${{ env.BUILD_DIR }}/server_definitions.json
retention-days: 3
if-no-files-found: error
- name: Check linking (Linux) - name: Check linking (Linux)
if: ${{ runner.os == 'Linux' && env.SANITIZERS_ENABLED == 'false' }} if: ${{ runner.os == 'Linux' && env.SANITIZERS_ENABLED == 'false' }}
working-directory: ${{ env.BUILD_DIR }} working-directory: ${{ env.BUILD_DIR }}
@@ -246,17 +202,11 @@ jobs:
- name: Set sanitizer options - name: Set sanitizer options
if: ${{ !inputs.build_only && env.SANITIZERS_ENABLED == 'true' }} if: ${{ !inputs.build_only && env.SANITIZERS_ENABLED == 'true' }}
env:
CONFIG_NAME: ${{ inputs.config_name }}
run: | run: |
ASAN_OPTS="include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-asan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/asan.supp" echo "ASAN_OPTIONS=print_stacktrace=1:detect_container_overflow=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/asan.supp" >> ${GITHUB_ENV}
if [[ "${CONFIG_NAME}" == *gcc* ]]; then echo "TSAN_OPTIONS=second_deadlock_stack=1:halt_on_error=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/tsan.supp" >> ${GITHUB_ENV}
ASAN_OPTS="${ASAN_OPTS}:alloc_dealloc_mismatch=0" echo "UBSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/ubsan.supp" >> ${GITHUB_ENV}
fi echo "LSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/lsan.supp" >> ${GITHUB_ENV}
echo "ASAN_OPTIONS=${ASAN_OPTS}" >> ${GITHUB_ENV}
echo "TSAN_OPTIONS=include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-tsan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/tsan.supp" >> ${GITHUB_ENV}
echo "UBSAN_OPTIONS=include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-ubsan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/ubsan.supp" >> ${GITHUB_ENV}
echo "LSAN_OPTIONS=include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-lsan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/lsan.supp" >> ${GITHUB_ENV}
- name: Run the separate tests - name: Run the separate tests
if: ${{ !inputs.build_only }} if: ${{ !inputs.build_only }}
@@ -277,23 +227,8 @@ jobs:
env: env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }} BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
run: | run: |
set -o pipefail ./xrpld --unittest --unittest-jobs "${BUILD_NPROC}"
# Coverage builds are slower due to instrumentation; use fewer parallel jobs to avoid flakiness
[ "$COVERAGE_ENABLED" = "true" ] && BUILD_NPROC=$(( BUILD_NPROC - 2 ))
./xrpld --unittest --unittest-jobs "${BUILD_NPROC}" 2>&1 | tee unittest.log
- name: Show test failure summary
if: ${{ failure() && !inputs.build_only }}
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', env.BUILD_DIR, inputs.build_type) || env.BUILD_DIR }}
run: |
if [ ! -f unittest.log ]; then
echo "unittest.log not found; embedded tests may not have run."
exit 0
fi
if ! grep -E "failed" unittest.log; then
echo "Log present but no failure lines found in unittest.log."
fi
- name: Debug failure (Linux) - name: Debug failure (Linux)
if: ${{ failure() && runner.os == 'Linux' && !inputs.build_only }} if: ${{ failure() && runner.os == 'Linux' && !inputs.build_only }}
run: | run: |
@@ -316,8 +251,8 @@ jobs:
--target coverage --target coverage
- name: Upload coverage report - name: Upload coverage report
if: ${{ github.repository == 'XRPLF/rippled' && !inputs.build_only && env.COVERAGE_ENABLED == 'true' }} if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0 uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with: with:
disable_search: true disable_search: true
disable_telem: true disable_telem: true

View File

@@ -18,9 +18,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check levelization - name: Check levelization
run: python .github/scripts/levelization/generate.py run: .github/scripts/levelization/generate.sh
- name: Check for differences - name: Check for differences
env: env:
MESSAGE: | MESSAGE: |
@@ -32,7 +32,7 @@ jobs:
removed from loops.txt, it's probably an improvement, while if removed from loops.txt, it's probably an improvement, while if
something was added, it's probably a regression. something was added, it's probably a regression.
Run '.github/scripts/levelization/generate.py' in your repo, commit Run '.github/scripts/levelization/generate.sh' in your repo, commit
and push the changes. See .github/scripts/levelization/README.md for and push the changes. See .github/scripts/levelization/README.md for
more info. more info.
run: | run: |

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check definitions - name: Check definitions
run: .github/scripts/rename/definitions.sh . run: .github/scripts/rename/definitions.sh .
- name: Check copyright notices - name: Check copyright notices
@@ -31,10 +31,6 @@ jobs:
run: .github/scripts/rename/namespace.sh . run: .github/scripts/rename/namespace.sh .
- name: Check config name - name: Check config name
run: .github/scripts/rename/config.sh . run: .github/scripts/rename/config.sh .
- name: Check include guards
run: .github/scripts/rename/include.sh .
- name: Check documentation
run: .github/scripts/rename/docs.sh .
- name: Check for differences - name: Check for differences
env: env:
MESSAGE: | MESSAGE: |

View File

@@ -1,175 +0,0 @@
name: Run clang-tidy on files
on:
workflow_call:
inputs:
files:
description: "List of files to check (empty means check all files)"
type: string
default: ""
create_issue_on_failure:
description: "Whether to create an issue if the check failed"
type: boolean
default: false
defaults:
run:
shell: bash
env:
# Conan installs the generators in the build/generators directory, see the
# layout() method in conanfile.py. We then run CMake from the build directory.
BUILD_DIR: build
BUILD_TYPE: Release
jobs:
run-clang-tidy:
name: Run clang tidy
runs-on: ["self-hosted", "Linux", "X64", "heavy"]
container: "ghcr.io/xrplf/ci/debian-trixie:clang-21-sha-53033a2"
permissions:
issues: write
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@90f11ee655d1687824fb8793db770477d52afbab
with:
enable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf
id: nproc
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_nproc: ${{ steps.nproc.outputs.nproc }}
build_type: ${{ env.BUILD_TYPE }}
log_verbosity: verbose
- name: Configure CMake
working-directory: ${{ env.BUILD_DIR }}
run: |
cmake \
-G 'Ninja' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
-Dtests=ON \
-Dwerr=ON \
-Dxrpld=ON \
..
# clang-tidy needs headers generated from proto files
- name: Build libxrpl.libpb
working-directory: ${{ env.BUILD_DIR }}
run: |
ninja -j ${{ steps.nproc.outputs.nproc }} xrpl.libpb
- name: Run clang tidy
id: run_clang_tidy
continue-on-error: true
env:
TARGETS: ${{ inputs.files != '' && inputs.files || 'src tests' }}
run: |
run-clang-tidy -j ${{ steps.nproc.outputs.nproc }} -p "${BUILD_DIR}" -quiet -fix -allow-no-checks ${TARGETS} 2>&1 | tee clang-tidy-output.txt
- name: Upload clang-tidy output
if: ${{ github.event.repository.visibility == 'public' && steps.run_clang_tidy.outcome != 'success' }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: clang-tidy-results
path: clang-tidy-output.txt
retention-days: 30
- name: Generate git diff
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
run: |
git diff | tee clang-tidy-git-diff.txt
- name: Upload clang-tidy diff output
if: ${{ github.event.repository.visibility == 'public' && steps.run_clang_tidy.outcome != 'success' }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: clang-tidy-git-diff
path: clang-tidy-git-diff.txt
retention-days: 30
- name: Create an issue
if: ${{ steps.run_clang_tidy.outcome != 'success' && inputs.create_issue_on_failure }}
id: create_issue
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
# Prepare issue body with clang-tidy output
cat > issue.md <<EOF
## Clang-tidy Check Failed
**Workflow:** ${{ github.workflow }}
**Run ID:** ${{ github.run_id }}
**Commit:** ${{ github.sha }}
**Branch/Ref:** ${{ github.ref }}
**Triggered by:** ${{ github.actor }}
### Clang-tidy Output:
\`\`\`
EOF
# Append clang-tidy output (filter for errors and warnings)
if [ -f clang-tidy-output.txt ]; then
# Extract lines containing 'error:', 'warning:', or 'note:'
grep -E '(error:|warning:|note:)' clang-tidy-output.txt > filtered-output.txt || true
# If filtered output is empty, use original (might be a different error format)
if [ ! -s filtered-output.txt ]; then
cp clang-tidy-output.txt filtered-output.txt
fi
# Truncate if too large
head -c 60000 filtered-output.txt >> issue.md
if [ "$(wc -c < filtered-output.txt)" -gt 60000 ]; then
echo "" >> issue.md
echo "... (output truncated, see artifacts for full output)" >> issue.md
fi
rm filtered-output.txt
else
echo "No output file found" >> issue.md
fi
cat >> issue.md <<EOF
\`\`\`
**Workflow run:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
---
*This issue was automatically created by the clang-tidy workflow.*
EOF
# Create the issue
gh issue create \
--label "Bug,Clang-tidy" \
--title "Clang-tidy check failed" \
--body-file ./issue.md \
> create_issue.log
created_issue="$(sed 's|.*/||' create_issue.log)"
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
echo "Created issue #$created_issue"
rm -f create_issue.log issue.md clang-tidy-output.txt
- name: Fail the workflow if clang-tidy failed
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
run: |
echo "Clang-tidy check failed!"
exit 1

View File

@@ -1,55 +0,0 @@
name: Clang-tidy check
on:
workflow_call:
inputs:
check_only_changed:
description: "Check only changed files in PR. If false, checks all files in the repository."
type: boolean
default: false
create_issue_on_failure:
description: "Whether to create an issue if the check failed"
type: boolean
default: false
defaults:
run:
shell: bash
jobs:
determine-files:
name: Determine files to check
if: ${{ inputs.check_only_changed }}
runs-on: ubuntu-latest
outputs:
clang_tidy_config_changed: ${{ steps.changed_clang_tidy.outputs.any_changed }}
any_cpp_changed: ${{ steps.changed_files.outputs.any_changed }}
all_changed_files: ${{ steps.changed_files.outputs.all_changed_files }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Get changed C++ files
id: changed_files
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
with:
files: |
**/*.cpp
**/*.h
**/*.ipp
separator: " "
- name: Get changed clang-tidy configuration
id: changed_clang_tidy
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
with:
files: |
.clang-tidy
run-clang-tidy:
needs: [determine-files]
if: ${{ always() && !cancelled() && (!inputs.check_only_changed || needs.determine-files.outputs.any_cpp_changed == 'true' || needs.determine-files.outputs.clang_tidy_config_changed == 'true') }}
uses: ./.github/workflows/reusable-clang-tidy-files.yml
with:
files: ${{ (needs.determine-files.outputs.clang_tidy_config_changed != 'true' && inputs.check_only_changed) && needs.determine-files.outputs.all_changed_files || '' }}
create_issue_on_failure: ${{ inputs.create_issue_on_failure }}

View File

@@ -29,10 +29,10 @@ jobs:
matrix: ${{ steps.generate.outputs.matrix }} matrix: ${{ steps.generate.outputs.matrix }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: 3.13 python-version: 3.13

View File

@@ -43,12 +43,16 @@ jobs:
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158 container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Generate build version number - name: Generate build version number
id: version id: version
uses: ./.github/actions/generate-version uses: ./.github/actions/generate-version
- name: Determine recipe reference
id: ref
run: echo "ref=xrpl/${{ steps.version.outputs.version }}" >> "${GITHUB_OUTPUT}"
- name: Set up Conan - name: Set up Conan
uses: ./.github/actions/setup-conan uses: ./.github/actions/setup-conan
with: with:
@@ -58,46 +62,17 @@ jobs:
- name: Log into Conan remote - name: Log into Conan remote
env: env:
REMOTE_NAME: ${{ inputs.remote_name }} REMOTE_NAME: ${{ inputs.remote_name }}
REMOTE_USERNAME: ${{ secrets.remote_username }} REMOTE_USERNAME: ${{ inputs.remote_username }}
REMOTE_PASSWORD: ${{ secrets.remote_password }} REMOTE_PASSWORD: ${{ inputs.remote_password }}
run: conan remote login "${REMOTE_NAME}" "${REMOTE_USERNAME}" --password "${REMOTE_PASSWORD}" run: conan remote login "${REMOTE_NAME}" "${REMOTE_USERNAME}" --password "${REMOTE_PASSWORD}"
- name: Upload Conan recipe (version) - name: Upload Conan recipe
env: env:
RECIPE_REF: ${{ steps.ref.outputs.ref }}
REMOTE_NAME: ${{ inputs.remote_name }} REMOTE_NAME: ${{ inputs.remote_name }}
run: | run: |
conan export . --version=${{ steps.version.outputs.version }} conan export .
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/${{ steps.version.outputs.version }} conan upload --confirm --check --remote="${REMOTE_NAME}" ${RECIPE_REF}
# When this workflow is triggered by a push event, it will always be when merging into the
# 'develop' branch, see on-trigger.yml.
- name: Upload Conan recipe (develop)
if: ${{ github.event_name == 'push' }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=develop
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/develop
# When this workflow is triggered by a pull request event, it will always be when merging into
# one of the 'release' branches, see on-pr.yml.
- name: Upload Conan recipe (rc)
if: ${{ github.event_name == 'pull_request' }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=rc
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/rc
# When this workflow is triggered by a push event, it will always be when tagging a final
# release, see on-tag.yml.
- name: Upload Conan recipe (release)
if: ${{ startsWith(github.ref, 'refs/tags/') }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=release
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/release
outputs: outputs:
ref: xrpl/${{ steps.version.outputs.version }} ref: ${{ steps.ref.outputs.ref }}

View File

@@ -64,13 +64,13 @@ jobs:
steps: steps:
- name: Cleanup workspace (macOS and Windows) - name: Cleanup workspace (macOS and Windows)
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }} if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
uses: XRPLF/actions/cleanup-workspace@c7d9ce5ebb03c752a354889ecd870cadfc2b1cd4 uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner - name: Prepare runner
uses: XRPLF/actions/prepare-runner@90f11ee655d1687824fb8793db770477d52afbab uses: XRPLF/actions/prepare-runner@f05cab7b8541eee6473aa42beb9d2fe35608a190
with: with:
enable_ccache: false enable_ccache: false
@@ -78,14 +78,12 @@ jobs:
uses: ./.github/actions/print-env uses: ./.github/actions/print-env
- name: Get number of processors - name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc id: nproc
with: with:
subtract: ${{ env.NPROC_SUBTRACT }} subtract: ${{ env.NPROC_SUBTRACT }}
- name: Setup Conan - name: Setup Conan
env:
SANITIZERS: ${{ matrix.sanitizers }}
uses: ./.github/actions/setup-conan uses: ./.github/actions/setup-conan
with: with:
remote_name: ${{ env.CONAN_REMOTE_NAME }} remote_name: ${{ env.CONAN_REMOTE_NAME }}
@@ -100,14 +98,13 @@ jobs:
# Set the verbosity to "quiet" for Windows to avoid an excessive # Set the verbosity to "quiet" for Windows to avoid an excessive
# amount of logs. For other OSes, the "verbose" logs are more useful. # amount of logs. For other OSes, the "verbose" logs are more useful.
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }} log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
sanitizers: ${{ matrix.sanitizers }}
- name: Log into Conan remote - name: Log into Conan remote
if: ${{ github.repository == 'XRPLF/rippled' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }} if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}" run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
- name: Upload Conan packages - name: Upload Conan packages
if: ${{ github.repository == 'XRPLF/rippled' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }} if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
env: env:
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }} FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION} run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}

18
.gitignore vendored
View File

@@ -13,7 +13,6 @@
Debug/ Debug/
Release/ Release/
/.build/ /.build/
/.venv/
/build/ /build/
/db/ /db/
/out.txt /out.txt
@@ -43,9 +42,6 @@ gmon.out
# Locally patched Conan recipes # Locally patched Conan recipes
external/conan-center-index/ external/conan-center-index/
# Local conan directory
.conan
# XCode IDE. # XCode IDE.
*.pbxuser *.pbxuser
!default.pbxuser !default.pbxuser
@@ -68,21 +64,7 @@ DerivedData
/.vs/ /.vs/
/.vscode/ /.vscode/
# zed IDE.
/.zed/
# AI tools. # AI tools.
/.agent
/.agents
/.augment /.augment
/.claude /.claude
/CLAUDE.md /CLAUDE.md
# Python
__pycache__
# Direnv's directory
/.direnv
# clangd cache
/.cache

View File

@@ -13,64 +13,34 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0 rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
hooks: hooks:
- id: check-added-large-files
args: [--maxkb=400, --enforce-all]
- id: trailing-whitespace - id: trailing-whitespace
- id: end-of-file-fixer - id: end-of-file-fixer
- id: mixed-line-ending
- id: check-merge-conflict - id: check-merge-conflict
args: [--assume-in-merge] args: [--assume-in-merge]
- repo: local
hooks:
- id: clang-tidy
name: "clang-tidy (enable with: TIDY=1)"
entry: ./bin/pre-commit/clang_tidy_check.py
language: python
types_or: [c++, c]
exclude: ^include/xrpl/protocol_autogen
pass_filenames: false # script determines the staged files itself
- id: fix-include-style
name: fix include style
entry: ./bin/pre-commit/fix_include_style.py
language: python
types_or: [c++, c]
exclude: ^include/xrpl/protocol_autogen/(transactions|ledger_entries)/
- repo: https://github.com/pre-commit/mirrors-clang-format - repo: https://github.com/pre-commit/mirrors-clang-format
rev: cd481d7b0bfb5c7b3090c21846317f9a8262e891 # frozen: v22.1.0 rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
hooks: hooks:
- id: clang-format - id: clang-format
args: [--style=file] args: [--style=file]
"types_or": [c++, c, proto] "types_or": [c++, c, proto]
exclude: ^include/xrpl/protocol_autogen/(transactions|ledger_entries)/
- repo: https://github.com/BlankSpruce/gersemi
rev: 0.26.0
hooks:
- id: gersemi
- repo: https://github.com/rbubley/mirrors-prettier - repo: https://github.com/rbubley/mirrors-prettier
rev: c2bc67fe8f8f549cc489e00ba8b45aa18ee713b1 # frozen: v3.8.1 rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
hooks: hooks:
- id: prettier - id: prettier
args: [--end-of-line=auto]
- repo: https://github.com/psf/black-pre-commit-mirror - repo: https://github.com/psf/black-pre-commit-mirror
rev: ea488cebbfd88a5f50b8bd95d5c829d0bb76feb8 # frozen: 26.1.0 rev: 831207fd435b47aeffdf6af853097e64322b4d44 # frozen: v25.12.0
hooks: hooks:
- id: black - id: black
- repo: https://github.com/openstack/bashate
rev: 5798d24d571676fc407e81df574c1ef57b520f23 # frozen: 2.1.1
hooks:
- id: bashate
args: ["--ignore=E006"]
- repo: https://github.com/streetsidesoftware/cspell-cli - repo: https://github.com/streetsidesoftware/cspell-cli
rev: a42085ade523f591dca134379a595e7859986445 # frozen: v9.7.0 rev: 1cfa010f078c354f3ffb8413616280cc28f5ba21 # frozen: v9.4.0
hooks: hooks:
- id: cspell # Spell check changed files - id: cspell # Spell check changed files
exclude: (.config/cspell.config.yaml|^include/xrpl/protocol_autogen/(transactions|ledger_entries)/) exclude: .config/cspell.config.yaml
- id: cspell # Spell check the commit message - id: cspell # Spell check the commit message
name: check commit message spelling name: check commit message spelling
args: args:
@@ -81,27 +51,8 @@ repos:
- .git/COMMIT_EDITMSG - .git/COMMIT_EDITMSG
stages: [commit-msg] stages: [commit-msg]
- repo: local
hooks:
- id: nix-fmt
name: Format Nix files
entry: |
bash -c '
if command -v nix &> /dev/null || [ "$GITHUB_ACTIONS" = "true" ]; then
nix --extra-experimental-features "nix-command flakes" fmt "$@"
else
echo "Skipping nix-fmt: nix not installed and not in GitHub Actions"
exit 0
fi
' --
language: system
types:
- nix
pass_filenames: true
exclude: | exclude: |
(?x)^( (?x)^(
external/.*| external/.*|
.github/scripts/levelization/results/.*\.txt| .github/scripts/levelization/results/.*\.txt
src/tests/libxrpl/protocol_autogen/(transactions|ledger_entries)/.*
)$ )$

View File

@@ -4,106 +4,92 @@ This changelog is intended to list all updates to the [public API methods](https
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior. For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `xrpld` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade. The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `rippled` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade.
The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code. For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release.
For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`xrpld`) release.
## API Version 3 (Beta)
API version 3 is currently a beta API. It requires enabling `[beta_rpc_api]` in the xrpld configuration to use. See [API-VERSION-3.md](API-VERSION-3.md) for the full list of changes in API version 3.
## API Version 2 ## API Version 2
API version 2 is available in `xrpld` version 2.0.0 and later. See [API-VERSION-2.md](API-VERSION-2.md) for the full list of changes in API version 2. API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
#### Removed methods
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
- `ledger_header` - Instead, use the `ledger` method.
#### Modifications to JSON transaction element in V2
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
- JSON transaction element is named `tx_json`
- Binary transaction element is named `tx_blob`
- JSON transaction metadata element is named `meta`
- Binary transaction metadata element is named `meta_blob`
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
- `ledger_index` - Ledger index (only set on validated ledgers)
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
This change affects the following methods:
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
#### Modification to `Payment` transaction JSON schema
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
#### Modifications to account_info response
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770)
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
## API Version 1 ## API Version 1
This version is supported by all `xrpld` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified. This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified.
## Unreleased The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
This section contains changes targeting a future version. ### Inconsistency: server_info - network_id
### Additions The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
- `server_definitions`: Added the following new sections to the response ([#6321](https://github.com/XRPLF/rippled/pull/6321)):
- `TRANSACTION_FORMATS`: Describes the fields and their optionality for each transaction type, including common fields shared across all transactions.
- `LEDGER_ENTRY_FORMATS`: Describes the fields and their optionality for each ledger entry type, including common fields shared across all ledger entries.
- `TRANSACTION_FLAGS`: Maps transaction type names to their supported flags and flag values.
- `LEDGER_ENTRY_FLAGS`: Maps ledger entry type names to their flags and flag values.
- `ACCOUNT_SET_FLAGS`: Maps AccountSet flag names (asf flags) to their numeric values.
### Bugfixes
- Peer Crawler: The `port` field in `overlay.active[]` now consistently returns an integer instead of a string for outbound peers. [#6318](https://github.com/XRPLF/rippled/pull/6318)
- `ping`: The `ip` field is no longer returned as an empty string for proxied connections without a forwarded-for header. It is now omitted, consistent with the behavior for identified connections. [#6730](https://github.com/XRPLF/rippled/pull/6730)
- gRPC `GetLedgerDiff`: Fixed error message that incorrectly said "base ledger not validated" when the desired ledger was not validated. [#6730](https://github.com/XRPLF/rippled/pull/6730)
## XRP Ledger server version 3.1.0
[Version 3.1.0](https://github.com/XRPLF/rippled/releases/tag/3.1.0) was released on Jan 27, 2026.
### Additions in 3.1.0
- `vault_info`: New RPC method to retrieve information about a specific vault (part of XLS-66 Lending Protocol). ([#6156](https://github.com/XRPLF/rippled/pull/6156))
## XRP Ledger server version 3.0.0
[Version 3.0.0](https://github.com/XRPLF/rippled/releases/tag/3.0.0) was released on Dec 9, 2025.
### Additions in 3.0.0
- `ledger_entry`: Supports all ledger entry types with dedicated parsers. ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `ledger_entry`: New error codes `entryNotFound` and `unexpectedLedgerType` for more specific error handling. ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `ledger_entry`: Improved error messages with more context (e.g., specifying which field is invalid or missing). ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `ledger_entry`: Assorted bug fixes in RPC processing. ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `simulate`: Supports additional metadata in the response. ([#5754](https://github.com/XRPLF/rippled/pull/5754))
## XRP Ledger server version 2.6.2
[Version 2.6.2](https://github.com/XRPLF/rippled/releases/tag/2.6.2) was released on Nov 19, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.6.1
[Version 2.6.1](https://github.com/XRPLF/rippled/releases/tag/2.6.1) was released on Sep 30, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.6.0
[Version 2.6.0](https://github.com/XRPLF/rippled/releases/tag/2.6.0) was released on Aug 27, 2025.
### Additions in 2.6.0
- `account_info`: Added `allowTrustLineLocking` flag in response. ([#5525](https://github.com/XRPLF/rippled/pull/5525))
- `ledger`: Removed the type filter from the RPC command. ([#4934](https://github.com/XRPLF/rippled/pull/4934))
- `subscribe` (`validations` stream): `network_id` is now included. ([#5579](https://github.com/XRPLF/rippled/pull/5579))
- `subscribe` (`transactions` stream): `nftoken_id`, `nftoken_ids`, and `offer_id` are now included in transaction metadata. ([#5230](https://github.com/XRPLF/rippled/pull/5230))
## XRP Ledger server version 2.5.1
[Version 2.5.1](https://github.com/XRPLF/rippled/releases/tag/2.5.1) was released on Sep 17, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.5.0 ## XRP Ledger server version 2.5.0
[Version 2.5.0](https://github.com/XRPLF/rippled/releases/tag/2.5.0) was released on Jun 24, 2025. As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
### Additions and bugfixes in 2.5.0 ### Additions and bugfixes in 2.5.0
- `tx`: Added `ctid` field to the response and improved error handling. ([#4738](https://github.com/XRPLF/rippled/pull/4738)) - `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled.
- `ledger_entry`: Improved error messages in `permissioned_domain`. ([#5344](https://github.com/XRPLF/rippled/pull/5344))
- `simulate`: Improved multi-sign usage. ([#5479](https://github.com/XRPLF/rippled/pull/5479))
- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled. ([#5385](https://github.com/XRPLF/rippled/pull/5385))
- `subscribe` (admin): Removed webhook queue limit to prevent dropping notifications; reduced HTTP timeout from 10 minutes to 30 seconds. ([#5163](https://github.com/XRPLF/rippled/pull/5163))
- `ledger_data` (gRPC): Fixed crashing issue with some invalid markers. ([#5137](https://github.com/XRPLF/rippled/pull/5137))
- `account_lines`: Fixed error with `no_ripple` and `no_ripple_peer` sometimes showing up incorrectly. ([#5345](https://github.com/XRPLF/rippled/pull/5345))
- `account_tx`: Fixed issue with incorrect CTIDs. ([#5408](https://github.com/XRPLF/rippled/pull/5408))
## XRP Ledger server version 2.4.0 ## XRP Ledger server version 2.4.0
@@ -111,19 +97,11 @@ This release contains bug fixes only and no API changes.
### Additions and bugfixes in 2.4.0 ### Additions and bugfixes in 2.4.0
- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate). ([#5069](https://github.com/XRPLF/rippled/pull/5069)) - `ledger_entry`: `state` is added an alias for `ripple_state`.
- Signing methods (`sign`, `sign_for`, `submit`): Autofill fees better, properly handle transactions without a base fee, and autofill the `NetworkID` field. ([#5069](https://github.com/XRPLF/rippled/pull/5069)) - `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name.
- `ledger_entry`: `state` is added as an alias for `ripple_state`. ([#5199](https://github.com/XRPLF/rippled/pull/5199)) - `validators`: Added new field `validator_list_threshold` in response.
- `ledger`, `ledger_data`, `account_objects`: Support filtering ledger entry types by their canonical names (case-insensitive). ([#5271](https://github.com/XRPLF/rippled/pull/5271)) - `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate)
- `validators`: Added new field `validator_list_threshold` in response. ([#5112](https://github.com/XRPLF/rippled/pull/5112)) - Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field.
- `server_info`: Added git commit hash info on admin connection. ([#5225](https://github.com/XRPLF/rippled/pull/5225))
- `server_definitions`: Changed larger `UInt` serialized types to `Hash`. ([#5231](https://github.com/XRPLF/rippled/pull/5231))
## XRP Ledger server version 2.3.1
[Version 2.3.1](https://github.com/XRPLF/rippled/releases/tag/2.3.1) was released on Jan 29, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.3.0 ## XRP Ledger server version 2.3.0
@@ -131,30 +109,19 @@ This release contains bug fixes only and no API changes.
### Breaking changes in 2.3.0 ### Breaking changes in 2.3.0
- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs). Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC. - `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs).
Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC.
### Additions and bugfixes in 2.3.0 ### Additions and bugfixes in 2.3.0
- `book_changes`: Returns a `validated` field in its response. ([#5096](https://github.com/XRPLF/rippled/pull/5096)) - `book_changes`: Returns a `validated` field in its response, which was missing in prior versions.
- `book_changes`: Accepts shortcut strings (`current`, `closed`, `validated`) for the `ledger_index` parameter. ([#5096](https://github.com/XRPLF/rippled/pull/5096))
- `server_definitions`: Include `index` in response. ([#5190](https://github.com/XRPLF/rippled/pull/5190))
- `account_nfts`: Fix issue where unassociated marker would return incorrect results. ([#5045](https://github.com/XRPLF/rippled/pull/5045))
- `account_objects`: Fix issue where invalid marker would not return an error. ([#5046](https://github.com/XRPLF/rippled/pull/5046))
- `account_objects`: Disallow filtering by ledger entry types that an account cannot hold. ([#5056](https://github.com/XRPLF/rippled/pull/5056))
- `tx`: Allow lowercase CTID. ([#5049](https://github.com/XRPLF/rippled/pull/5049))
- `feature`: Better error handling for invalid values of `feature`. ([#5063](https://github.com/XRPLF/rippled/pull/5063))
## XRP Ledger server version 2.2.0 ## XRP Ledger server version 2.2.0
[Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive): [Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive):
- `feature`: Add a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781)) - The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 2.0.1
[Version 2.0.1](https://github.com/XRPLF/rippled/releases/tag/2.0.1) was released on Jan 29, 2024. The following additions are non-breaking:
- `path_find`: Fixes unbounded memory growth. ([#4822](https://github.com/XRPLF/rippled/pull/4822))
## XRP Ledger server version 2.0.0 ## XRP Ledger server version 2.0.0
@@ -162,18 +129,24 @@ This release contains bug fixes only and no API changes.
- `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries. - `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries.
- In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking. - In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking.
- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting). The full list of changes is in [API-VERSION-2.md](API-VERSION-2.md). - API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting).
## XRP Ledger server version 2.2.0
The following is a non-breaking addition to the API.
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 1.12.0 ## XRP Ledger server version 1.12.0
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive): [Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive).
- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. ([#4427](https://github.com/XRPLF/rippled/pull/4427)) - `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427)
- `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port. - `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port.
- This allows crawlers to build a more detailed topology without needing to port-scan nodes. - This allows crawlers to build a more detailed topology without needing to port-scan nodes.
- (For peers and other non-admin clients, the info about admin ports is excluded.) - (For peers and other non-admin clients, the info about admin ports is excluded.)
- Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). ([#4553](https://github.com/XRPLF/rippled/pull/4553)) - Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). (https://github.com/XRPLF/rippled/pull/4553)
- Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback`. ([#4617](https://github.com/XRPLF/rippled/pull/4617)) - Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback` (https://github.com/XRPLF/rippled/pull/4617)
- Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well. - Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well.
- Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks. - Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks.
- Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields: - Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields:
@@ -208,16 +181,16 @@ This release contains bug fixes only and no API changes.
### Breaking changes in 1.11 ### Breaking changes in 1.11
- Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. ([#4291](https://github.com/XRPLF/rippled/pull/4291)) - Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. (https://github.com/XRPLF/rippled/pull/4291)
- The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`. - The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`.
- Removed the acceptance of seeds or public keys in place of account addresses. ([#4404](https://github.com/XRPLF/rippled/pull/4404)) - Removed the acceptance of seeds or public keys in place of account addresses. (https://github.com/XRPLF/rippled/pull/4404)
- This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network). - This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network).
- For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. ([#4398](https://github.com/XRPLF/rippled/pull/4398)) - For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. (https://github.com/XRPLF/rippled/pull/4398)
- If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406)) - If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406))
- There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence. - There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence.
- This is potentially a breaking change if clients have logic for determining whether an account can be deleted. - This is potentially a breaking change if clients have logic for determining whether an account can be deleted.
- NetworkID - NetworkID
- For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. ([#4370](https://github.com/XRPLF/rippled/pull/4370)) - For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. (https://github.com/XRPLF/rippled/pull/4370)
- This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher. - This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher.
- The field must be omitted for Mainnet, so there is no change for Mainnet users. - The field must be omitted for Mainnet, so there is no change for Mainnet users.
- There are three new local error codes: - There are three new local error codes:
@@ -227,10 +200,10 @@ This release contains bug fixes only and no API changes.
### Additions and bug fixes in 1.11 ### Additions and bug fixes in 1.11
- Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. ([#4447](https://github.com/XRPLF/rippled/pull/4447)) - Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. (https://github.com/XRPLF/rippled/pull/4447)
- Added an `account_flags` object to the `account_info` method response. ([#4459](https://github.com/XRPLF/rippled/pull/4459)) - Added an `account_flags` object to the `account_info` method response. (https://github.com/XRPLF/rippled/pull/4459)
- Added `NFTokenPages` to the `account_objects` RPC. ([#4352](https://github.com/XRPLF/rippled/pull/4352)) - Added `NFTokenPages` to the `account_objects` RPC. (https://github.com/XRPLF/rippled/pull/4352)
- Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. ([#4361](https://github.com/XRPLF/rippled/pull/4361)) - Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. (https://github.com/XRPLF/rippled/pull/4361)
## XRP Ledger server version 1.10.0 ## XRP Ledger server version 1.10.0

View File

@@ -1,66 +0,0 @@
# API Version 2
API version 2 is available in `xrpld` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
## Removed methods
In API version 2, the following deprecated methods are no longer available: ([#4759](https://github.com/XRPLF/rippled/pull/4759))
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
- `ledger_header` - Instead, use the `ledger` method.
## Modifications to JSON transaction element in API version 2
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. ([#4775](https://github.com/XRPLF/rippled/pull/4775))
This helps to unify the JSON serialization format of transactions. ([clio#722](https://github.com/XRPLF/clio/issues/722), [#4727](https://github.com/XRPLF/rippled/issues/4727))
- JSON transaction element is named `tx_json`
- Binary transaction element is named `tx_blob`
- JSON transaction metadata element is named `meta`
- Binary transaction metadata element is named `meta_blob`
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
- `ledger_index` - Ledger index (only set on validated ledgers)
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
This change affects the following methods:
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
## Modifications to `Payment` transaction JSON schema
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. ([#4733](https://github.com/XRPLF/rippled/pull/4733))
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: ([#4733](https://github.com/XRPLF/rippled/pull/4733))
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
## Modifications to account_info response
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) ([#3770](https://github.com/XRPLF/rippled/pull/3770))
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. ([#4585](https://github.com/XRPLF/rippled/pull/4585))
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
## Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. ([#4571](https://github.com/XRPLF/rippled/pull/4571))
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. ([#4545](https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579))
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. ([#4288](https://github.com/XRPLF/rippled/issues/4288))
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. ([#4620](https://github.com/XRPLF/rippled/pull/4620))
## Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. ([#4620](https://github.com/XRPLF/rippled/pull/4620))

View File

@@ -1,27 +0,0 @@
# API Version 3
API version 3 is currently a **beta API**. It requires enabling `[beta_rpc_api]` in the xrpld configuration to use. To use this API, clients specify `"api_version" : 3` in each request.
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
## Breaking Changes
### Modifications to `amm_info`
The order of error checks has been changed to provide more specific error messages. ([#4924](https://github.com/XRPLF/rippled/pull/4924))
- **Before (API v2)**: When sending an invalid account or asset to `amm_info` while other parameters are not set as expected, the method returns a generic `rpcINVALID_PARAMS` error.
- **After (API v3)**: The same scenario returns a more specific error: `rpcISSUE_MALFORMED` for malformed assets or `rpcACT_MALFORMED` for malformed accounts.
### Modifications to `ledger_entry`
Added support for string shortcuts to look up fixed-location ledger entries using the `"index"` parameter. ([#5644](https://github.com/XRPLF/rippled/pull/5644))
In API version 3, the following string values can be used with the `"index"` parameter:
- `"index": "amendments"` - Returns the `Amendments` ledger entry
- `"index": "fee"` - Returns the `FeeSettings` ledger entry
- `"index": "nunl"` - Returns the `NegativeUNL` ledger entry
- `"index": "hashes"` - Returns the "short" `LedgerHashes` ledger entry (recent ledger hashes)
These shortcuts are only available in API version 3 and later. In API versions 1 and 2, these string values would result in an error.

134
BUILD.md
View File

@@ -125,9 +125,9 @@ default profile.
### Patched recipes ### Patched recipes
Occasionally, we need patched recipes or recipes not present in Conan Center. The recipes in Conan Center occasionally need to be patched for compatibility
We maintain a fork of the Conan Center Index with the latest version of `xrpld`. We maintain a fork of the Conan Center
[here](https://github.com/XRPLF/conan-center-index/) containing the modified and newly added recipes. [here](https://github.com/XRPLF/conan-center-index/) containing the patches.
To ensure our patched recipes are used, you must add our Conan remote at a To ensure our patched recipes are used, you must add our Conan remote at a
higher index than the default Conan Center remote, so it is consulted first. You higher index than the default Conan Center remote, so it is consulted first. You
@@ -137,11 +137,19 @@ can do this by running:
conan remote add --index 0 xrplf https://conan.ripplex.io conan remote add --index 0 xrplf https://conan.ripplex.io
``` ```
Alternatively, you can pull our recipes from the repository and export them locally: Alternatively, you can pull the patched recipes into the repository and use them
locally:
```bash ```bash
# Extract the version number from the lockfile.
function extract_version {
version=$(cat conan.lock | sed -nE "s@.+${1}/(.+)#.+@\1@p" | head -n1)
echo ${version}
}
# Define which recipes to export. # Define which recipes to export.
recipes=('abseil' 'ed25519' 'grpc' 'm4' 'mpt-crypto' 'openssl' 'secp256k1' 'snappy' 'soci' 'wasm-xrplf' 'wasmi') recipes=('ed25519' 'grpc' 'openssl' 'secp256k1' 'snappy' 'soci')
folders=('all' 'all' '3.x.x' 'all' 'all' 'all')
# Selectively check out the recipes from our CCI fork. # Selectively check out the recipes from our CCI fork.
cd external cd external
@@ -150,19 +158,29 @@ cd conan-center-index
git init git init
git remote add origin git@github.com:XRPLF/conan-center-index.git git remote add origin git@github.com:XRPLF/conan-center-index.git
git sparse-checkout init git sparse-checkout init
for recipe in "${recipes[@]}"; do for ((index = 1; index <= ${#recipes[@]}; index++)); do
echo "Checking out recipe '${recipe}'..." recipe=${recipes[index]}
git sparse-checkout add recipes/${recipe} folder=${folders[index]}
echo "Checking out recipe '${recipe}' from folder '${folder}'..."
git sparse-checkout add recipes/${recipe}/${folder}
done done
git fetch origin master git fetch origin master
git checkout master git checkout master
cd ../..
./export_all.sh # Export the recipes into the local cache.
cd ../../ for ((index = 1; index <= ${#recipes[@]}; index++)); do
recipe=${recipes[index]}
folder=${folders[index]}
version=$(extract_version ${recipe})
echo "Exporting '${recipe}/${version}' from '${recipe}/${folder}'..."
conan export --version $(extract_version ${recipe}) \
external/conan-center-index/recipes/${recipe}/${folder}
done
``` ```
In the case we switch to a newer version of a dependency that still requires a In the case we switch to a newer version of a dependency that still requires a
patch or add a new dependency, it will be necessary for you to pull in the changes and re-export the patch, it will be necessary for you to pull in the changes and re-export the
updated dependencies with the newer version. However, if we switch to a newer updated dependencies with the newer version. However, if we switch to a newer
version that no longer requires a patch, no action is required on your part, as version that no longer requires a patch, no action is required on your part, as
the new recipe will be automatically pulled from the official Conan Center. the new recipe will be automatically pulled from the official Conan Center.
@@ -171,8 +189,6 @@ the new recipe will be automatically pulled from the official Conan Center.
> You might need to add `--lockfile=""` to your `conan install` command > You might need to add `--lockfile=""` to your `conan install` command
> to avoid automatic use of the existing `conan.lock` file when you run > to avoid automatic use of the existing `conan.lock` file when you run
> `conan export` manually on your machine > `conan export` manually on your machine
>
> This is not recommended though, as you might end up using different revisions of recipes.
### Conan profile tweaks ### Conan profile tweaks
@@ -188,14 +204,39 @@ Possible values are ['5.0', '5.1', '6.0', '6.1', '7.0', '7.3', '8.0', '8.1',
Read "http://docs.conan.io/2/knowledge/faq.html#error-invalid-setting" Read "http://docs.conan.io/2/knowledge/faq.html#error-invalid-setting"
``` ```
you need to add your compiler to the list of compiler versions in you need to amend the list of compiler versions in
`$(conan config home)/settings_user.yml`, by adding the required version number(s) `$(conan config home)/settings.yml`, by appending the required version number(s)
to the `version` array specific for your compiler. For example: to the `version` array specific for your compiler. For example:
```yaml ```yaml
compiler: apple-clang:
apple-clang: version:
version: ["17.0"] [
"5.0",
"5.1",
"6.0",
"6.1",
"7.0",
"7.3",
"8.0",
"8.1",
"9.0",
"9.1",
"10.0",
"11.0",
"12.0",
"13",
"13.0",
"13.1",
"14",
"14.0",
"15",
"15.0",
"16",
"16.0",
"17",
"17.0",
]
``` ```
#### Multiple compilers #### Multiple compilers
@@ -327,36 +368,6 @@ The workaround for this error is to add two lines to your profile:
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS'] tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
``` ```
### Set Up Ccache
To speed up repeated compilations, we recommend that you install
[ccache](https://ccache.dev), a tool that wraps your compiler so that it can
cache build objects locally.
#### Linux
You can install it using the package manager, e.g. `sudo apt install ccache`
(Ubuntu) or `sudo dnf install ccache` (RHEL).
#### macOS
You can install it using Homebrew, i.e. `brew install ccache`.
#### Windows
You can install it using Chocolatey, i.e. `choco install ccache`. If you already
have Ccache installed, then `choco upgrade ccache` will update it to the latest
version. However, if you see an error such as:
```
terminate called after throwing an instance of 'std::bad_alloc'
what(): std::bad_alloc
C:\Program Files\Microsoft Visual Studio\2022\Community\MSBuild\Microsoft\VC\v170\Microsoft.CppCommon.targets(617,5): error MSB6006: "cl.exe" exited with code 3.
```
then please install a specific version of Ccache that we know works, via: `choco
install ccache --version 4.11.3 --allow-downgrade`.
### Build and Test ### Build and Test
1. Create a build directory and move into it. 1. Create a build directory and move into it.
@@ -459,21 +470,6 @@ install ccache --version 4.11.3 --allow-downgrade`.
The location of `xrpld` binary in your build directory depends on your The location of `xrpld` binary in your build directory depends on your
CMake generator. Pass `--help` to see the rest of the command line options. CMake generator. Pass `--help` to see the rest of the command line options.
## Code generation
The protocol wrapper classes in `include/xrpl/protocol_autogen/` are generated
from macro definition files in `include/xrpl/protocol/detail/`. If you modify
the macro files (e.g. `transactions.macro`, `ledger_entries.macro`) or the
generation scripts/templates in `cmake/scripts/codegen/`, you need to regenerate the
files:
```
cmake --build . --target setup_code_gen # create venv and install dependencies (once)
cmake --build . --target code_gen # regenerate code
```
The regenerated files should be committed alongside your changes.
## Coverage report ## Coverage report
The coverage report is intended for developers using compilers GCC The coverage report is intended for developers using compilers GCC
@@ -554,10 +550,10 @@ See [Sanitizers docs](./docs/build/sanitizers.md) for more details.
| `werr` | OFF | Treat compilation warnings as errors | | `werr` | OFF | Treat compilation warnings as errors |
| `wextra` | OFF | Enable additional compilation warnings | | `wextra` | OFF | Enable additional compilation warnings |
[Unity builds][5] may be faster for the first build (at the cost of much more [Unity builds][5] may be faster for the first build
memory) since they concatenate sources into fewer translation units. Non-unity (at the cost of much more memory) since they concatenate sources into fewer
builds may be faster for incremental builds, and can be helpful for detecting translation units. Non-unity builds may be faster for incremental builds,
`#include` omissions. and can be helpful for detecting `#include` omissions.
## Troubleshooting ## Troubleshooting
@@ -618,8 +614,8 @@ If you want to experiment with a new package, follow these steps:
`default_options` property (with syntax `'$package:$option': $value`). `default_options` property (with syntax `'$package:$option': $value`).
3. Modify [`CMakeLists.txt`](./CMakeLists.txt): 3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
- Add a call to `find_package($package REQUIRED)`. - Add a call to `find_package($package REQUIRED)`.
- Link a library from the package to the target `xrpl_libs` - Link a library from the package to the target `ripple_libs`
(search for the existing call to `target_link_libraries(xrpl_libs INTERFACE ...)`). (search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
4. Start coding! Don't forget to include whatever headers you need from the package. 4. Start coding! Don't forget to include whatever headers you need from the package.
[1]: https://github.com/conan-io/conan-center-index/issues/13168 [1]: https://github.com/conan-io/conan-center-index/issues/13168

View File

@@ -1,23 +1,20 @@
cmake_minimum_required(VERSION 3.16) cmake_minimum_required(VERSION 3.16)
if(POLICY CMP0074) if(POLICY CMP0074)
cmake_policy(SET CMP0074 NEW) cmake_policy(SET CMP0074 NEW)
endif() endif()
if(POLICY CMP0077) if(POLICY CMP0077)
cmake_policy(SET CMP0077 NEW) cmake_policy(SET CMP0077 NEW)
endif() endif()
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows. # Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
if(DEFINED CMAKE_MODULE_PATH) file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
endif()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
project(xrpl) project(xrpl)
set(CMAKE_CXX_EXTENSIONS OFF) set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20) set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
include(CompilationEnv) include(CompilationEnv)
@@ -36,40 +33,51 @@ endif()
# Enable ccache to speed up builds. # Enable ccache to speed up builds.
include(Ccache) include(Ccache)
# make GIT_COMMIT_HASH define available to all sources
find_package(Git)
if(Git_FOUND)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
if(gch)
set(GIT_COMMIT_HASH "${gch}")
message(STATUS gch: ${GIT_COMMIT_HASH})
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
endif()
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
if(gb)
set(GIT_BRANCH "${gb}")
message(STATUS gb: ${GIT_BRANCH})
add_definitions(-DGIT_BRANCH="${GIT_BRANCH}")
endif()
endif() #git
if(thread_safety_analysis) if(thread_safety_analysis)
add_compile_options( add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS)
-Wthread-safety add_compile_options("-stdlib=libc++")
-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS add_link_options("-stdlib=libc++")
-DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS
)
add_compile_options("-stdlib=libc++")
add_link_options("-stdlib=libc++")
endif() endif()
include(CheckCXXCompilerFlag) include (CheckCXXCompilerFlag)
include(FetchContent) include (FetchContent)
include(ExternalProject) include (ExternalProject)
include(CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
if(target) if (target)
message( message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
FATAL_ERROR endif ()
"The target option has been removed - use native cmake options to control build"
)
endif()
include(XrplSanity) include(XrplSanity)
include(XrplVersion) include(XrplVersion)
include(XrplSettings) include(XrplSettings)
# this check has to remain in the top-level cmake because of the early return statement # this check has to remain in the top-level cmake
if(packages_only) # because of the early return statement
if(NOT TARGET rpm) if (packages_only)
message( if (NOT TARGET rpm)
FATAL_ERROR message (FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
"packages_only requested, but targets were not created - is docker installed?" endif()
) return ()
endif() endif ()
return()
endif()
include(XrplCompiler) include(XrplCompiler)
include(XrplSanitizers) include(XrplSanitizers)
include(XrplInterface) include(XrplInterface)
@@ -77,9 +85,11 @@ include(XrplInterface)
option(only_docs "Include only the docs target?" FALSE) option(only_docs "Include only the docs target?" FALSE)
include(XrplDocs) include(XrplDocs)
if(only_docs) if(only_docs)
return() return()
endif() endif()
###
include(deps/Boost) include(deps/Boost)
add_subdirectory(external/antithesis-sdk) add_subdirectory(external/antithesis-sdk)
@@ -95,48 +105,45 @@ find_package(SOCI REQUIRED)
find_package(SQLite3 REQUIRED) find_package(SQLite3 REQUIRED)
find_package(xxHash REQUIRED) find_package(xxHash REQUIRED)
target_link_libraries( target_link_libraries(xrpl_libs INTERFACE
xrpl_libs ed25519::ed25519
INTERFACE lz4::lz4
ed25519::ed25519 OpenSSL::Crypto
lz4::lz4 OpenSSL::SSL
OpenSSL::Crypto secp256k1::secp256k1
OpenSSL::SSL soci::soci
secp256k1::secp256k1 SQLite::SQLite3
soci::soci
SQLite::SQLite3
) )
option(rocksdb "Enable RocksDB" ON) option(rocksdb "Enable RocksDB" ON)
if(rocksdb) if(rocksdb)
find_package(RocksDB REQUIRED) find_package(RocksDB REQUIRED)
set_target_properties( set_target_properties(RocksDB::rocksdb PROPERTIES
RocksDB::rocksdb INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
PROPERTIES INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
) )
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb) target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
endif() endif()
# Work around changes to Conan recipe for now. # Work around changes to Conan recipe for now.
if(TARGET nudb::core) if(TARGET nudb::core)
set(nudb nudb::core) set(nudb nudb::core)
elseif(TARGET NuDB::nudb) elseif(TARGET NuDB::nudb)
set(nudb NuDB::nudb) set(nudb NuDB::nudb)
else() else()
message(FATAL_ERROR "unknown nudb target") message(FATAL_ERROR "unknown nudb target")
endif() endif()
target_link_libraries(xrpl_libs INTERFACE ${nudb}) target_link_libraries(xrpl_libs INTERFACE ${nudb})
if(coverage) if(coverage)
include(XrplCov) include(XrplCov)
endif() endif()
set(PROJECT_EXPORT_SET XrplExports)
include(XrplCore) include(XrplCore)
include(XrplProtocolAutogen)
include(XrplInstall) include(XrplInstall)
include(XrplValidatorKeys) include(XrplValidatorKeys)
if(tests) if(tests)
include(CTest) include(CTest)
add_subdirectory(src/tests/libxrpl) add_subdirectory(src/tests/libxrpl)
endif() endif()

View File

@@ -127,6 +127,26 @@ tl;dr
> 6. Wrap the body at 72 characters. > 6. Wrap the body at 72 characters.
> 7. Use the body to explain what and why vs. how. > 7. Use the body to explain what and why vs. how.
In addition to those guidelines, please add one of the following
prefixes to the subject line if appropriate.
- `fix:` - The primary purpose is to fix an existing bug.
- `perf:` - The primary purpose is performance improvements.
- `refactor:` - The changes refactor code without affecting
functionality.
- `test:` - The changes _only_ affect unit tests.
- `docs:` - The changes _only_ affect documentation. This can
include code comments in addition to `.md` files like this one.
- `build:` - The changes _only_ affect the build process,
including CMake and/or Conan settings.
- `chore:` - Other tasks that don't affect the binary, but don't fit
any of the other cases. e.g. formatting, git settings, updating
Github Actions jobs.
Whenever possible, when updating commits after the PR is open, please
add the PR number to the end of the subject line. e.g. `test: Add
unit tests for Feature X (#1234)`.
## Pull requests ## Pull requests
In general, pull requests use `develop` as the base branch. In general, pull requests use `develop` as the base branch.
@@ -160,23 +180,6 @@ credibility of the existing approvals is insufficient.
Pull requests must be merged by [squash-and-merge][squash] Pull requests must be merged by [squash-and-merge][squash]
to preserve a linear history for the `develop` branch. to preserve a linear history for the `develop` branch.
### Type of Change
In addition to those guidelines, please start your PR title with one of the following:
- `build:` - The changes _only_ affect the build process, including CMake and/or Conan settings.
- `feat`: New feature (change which adds functionality).
- `fix:` - The primary purpose is to fix an existing bug.
- `docs:` - The changes _only_ affect documentation.
- `test:` - The changes _only_ affect unit tests.
- `ci`: Continuous Integration (changes to our CI configuration files and scripts).
- `style`: Code style (formatting).
- `refactor:` - The changes refactor code without affecting functionality.
- `perf:` - The primary purpose is performance improvements.
- `chore:` - Other tasks that don't affect the binary, but don't fit any of the other cases. e.g. `git` settings, `clang-tidy`, removing dead code, dropping support for older tooling.
First letter after the type prefix should be capitalized, and the type prefix should be followed by a colon and a space. e.g. `feat: Add support for Borrowing Protocol`.
### "Ready to merge" ### "Ready to merge"
A pull request should only have the "Ready to merge" label added when it A pull request should only have the "Ready to merge" label added when it
@@ -216,7 +219,7 @@ coherent rather than a set of _thou shalt not_ commandments.
## Formatting ## Formatting
All code must conform to `clang-format` version 21, All code must conform to `clang-format` version 18,
according to the settings in [`.clang-format`](./.clang-format), according to the settings in [`.clang-format`](./.clang-format),
unless the result would be unreasonably difficult to read or maintain. unless the result would be unreasonably difficult to read or maintain.
To demarcate lines that should be left as-is, surround them with comments like To demarcate lines that should be left as-is, surround them with comments like
@@ -248,58 +251,6 @@ pip3 install pre-commit
pre-commit install pre-commit install
``` ```
## Clang-tidy
All code must pass `clang-tidy` checks according to the settings in [`.clang-tidy`](./.clang-tidy).
There is a Continuous Integration job that runs clang-tidy on pull requests. The CI will check:
- All changed C++ files (`.cpp`, `.h`, `.ipp`) when only code files are modified
- **All files in the repository** when the `.clang-tidy` configuration file is changed
This ensures that configuration changes don't introduce new warnings across the codebase.
### Installing clang-tidy
See the [environment setup guide](./docs/build/environment.md#clang-tidy) for platform-specific installation instructions.
### Running clang-tidy locally
Before running clang-tidy, you must build the project to generate required files (particularly protobuf headers). Refer to [`BUILD.md`](./BUILD.md) for build instructions.
#### Via pre-commit (recommended)
If you have already installed the pre-commit hooks (see above), you can run clang-tidy on your staged files using:
```
TIDY=1 pre-commit run clang-tidy
```
This runs clang-tidy locally with the same configuration/flags as CI, scoped to your staged C++ files. The `TIDY=1` environment variable is required to opt in — without it the hook is skipped.
You can also have clang-tidy run automatically on every `git commit` by setting `TIDY=1` in your shell environment:
```
export TIDY=1
```
With this set, the hook will run as part of `git commit` alongside the other pre-commit checks.
#### Manually
Then run clang-tidy on your local changes:
```
run-clang-tidy -p build -allow-no-checks src tests
```
This will check all source files in the `src`, `include` and `tests` directories using the compile commands from your `build` directory.
If you wish to automatically fix whatever clang-tidy finds _and_ is capable of fixing, add `-fix` to the above command:
```
run-clang-tidy -p build -quiet -fix -allow-no-checks src tests
```
## Contracts and instrumentation ## Contracts and instrumentation
We are using [Antithesis](https://antithesis.com/) for continuous fuzzing, We are using [Antithesis](https://antithesis.com/) for continuous fuzzing,
@@ -553,7 +504,7 @@ All releases, including release candidates and betas, are handled
differently from typical PRs. Most importantly, never use differently from typical PRs. Most importantly, never use
the Github UI to merge a release. the Github UI to merge a release.
Xrpld uses a linear workflow model that can be summarized as: Rippled uses a linear workflow model that can be summarized as:
1. In between releases, developers work against the `develop` branch. 1. In between releases, developers work against the `develop` branch.
2. Periodically, a maintainer will build and tag a beta version from 2. Periodically, a maintainer will build and tag a beta version from
@@ -921,8 +872,7 @@ git push --delete upstream-push master-next
11. [Create a new release on 11. [Create a new release on
Github](https://github.com/XRPLF/rippled/releases). Be sure that Github](https://github.com/XRPLF/rippled/releases). Be sure that
"Set as the latest release" is checked. "Set as the latest release" is checked.
12. Open a PR to update the [API-CHANGELOG](API-CHANGELOG.md) and `API-VERSION-[n].md` with the changes for this release (if any are missing). 12. Finally [reverse merge the release into `develop`](#follow-up-reverse-merge).
13. Finally, [reverse merge the release into `develop`](#follow-up-reverse-merge).
#### Special cases: point releases, hotfixes, etc. #### Special cases: point releases, hotfixes, etc.

View File

@@ -1,7 +1,7 @@
ISC License ISC License
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant. Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
Copyright (c) 2012-present, the XRP Ledger developers. Copyright (c) 2012-2025, the XRP Ledger developers.
Permission to use, copy, modify, and distribute this software for any Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above purpose with or without fee is hereby granted, provided that the above

View File

@@ -8,11 +8,11 @@ The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powe
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP. [XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
## xrpld ## rippled
The server software that powers the XRP Ledger is called `xrpld` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `xrpld` server software is written primarily in C++ and runs on a variety of platforms. The `xrpld` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html). The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (xrpld Reporting Mode has been replaced by Clio.) If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (rippled Reporting Mode has been replaced by Clio.)
### Build from Source ### Build from Source
@@ -41,19 +41,19 @@ If you are interested in running an **API Server** (including a **Full History S
Here are some good places to start learning the source code: Here are some good places to start learning the source code:
- Read the markdown files in the source tree: `src/xrpld/**/*.md`. - Read the markdown files in the source tree: `src/ripple/**/*.md`.
- Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph. - Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph.
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component. - In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
### Repository Contents ### Repository Contents
| Folder | Contents | | Folder | Contents |
| :--------- | :--------------------------------------------- | | :--------- | :----------------------------------------------- |
| `./bin` | Scripts and data files for XRPL developers. | | `./bin` | Scripts and data files for Ripple integrators. |
| `./Builds` | Platform-specific guides for building `xrpld`. | | `./Builds` | Platform-specific guides for building `rippled`. |
| `./docs` | Source documentation files and doxygen config. | | `./docs` | Source documentation files and doxygen config. |
| `./cfg` | Example configuration files. | | `./cfg` | Example configuration files. |
| `./src` | Source code. | | `./src` | Source code. |
Some of the directories under `src` are external repositories included using Some of the directories under `src` are external repositories included using
git-subtree. See those directories' README files for more details. git-subtree. See those directories' README files for more details.

View File

@@ -6,7 +6,7 @@ For more details on operating an XRP Ledger server securely, please visit https:
## Supported Versions ## Supported Versions
Software constantly evolves. In order to focus resources, we generally only accept vulnerability reports that affect recent and current versions of the software. We always accept reports for issues present in the **master**, **release** or **develop** branches, and with proposed, [open pull requests](https://github.com/XRPLF/rippled/pulls). Software constantly evolves. In order to focus resources, we only generally only accept vulnerability reports that affect recent and current versions of the software. We always accept reports for issues present in the **master**, **release** or **develop** branches, and with proposed, [open pull requests](https://github.com/ripple/rippled/pulls).
## Identifying and Reporting Vulnerabilities ## Identifying and Reporting Vulnerabilities
@@ -22,10 +22,117 @@ Responsible investigation includes, but isn't limited to, the following:
- Not targeting physical security measures, or attempting to use social engineering, spam, distributed denial of service (DDOS) attacks, etc. - Not targeting physical security measures, or attempting to use social engineering, spam, distributed denial of service (DDOS) attacks, etc.
- Investigating bugs in a way that makes a reasonable, good faith effort not to be disruptive or harmful to the XRP Ledger and the broader ecosystem. - Investigating bugs in a way that makes a reasonable, good faith effort not to be disruptive or harmful to the XRP Ledger and the broader ecosystem.
### Responsible Disclosure
If you discover a vulnerability or potential threat, or if you _think_
you have, please reach out by dropping an email using the contact
information below.
Your report should include the following:
- Your contact information (typically, an email address);
- The description of the vulnerability;
- The attack scenario (if any);
- The steps to reproduce the vulnerability;
- Any other relevant details or artifacts, including code, scripts or patches.
In your email, please describe the issue or potential threat. If possible, include a "repro" (code that can reproduce the issue) or describe the best way to reproduce and replicate the issue. Please make your report as detailed and comprehensive as possible.
For more information on responsible disclosure, please read this [Wikipedia article](https://en.wikipedia.org/wiki/Responsible_disclosure).
## Report Handling Process
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic pre-commitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
Once we receive a report, we:
1. Assign two people to independently evaluate the report;
2. Consider their recommendations;
3. If action is necessary, formulate a plan to address the issue;
4. Communicate privately with the reporter to explain our plan.
5. Prepare, test and release a version which fixes the issue; and
6. Announce the vulnerability publicly.
We will triage and respond to your disclosure within 24 hours. Beyond that, we will work to analyze the issue in more detail, formulate, develop and test a fix.
While we commit to responding with 24 hours of your initial report with our triage assessment, we cannot guarantee a response time for the remaining steps. We will communicate with you throughout this process, letting you know where we are and keeping you updated on the timeframe.
## Bug Bounty Program ## Bug Bounty Program
[Ripple](https://ripple.com) is generously sponsoring a bug bounty program for vulnerabilities in [`xrpld`](https://github.com/XRPLF/rippled) (and other related projects, like [`Clio`](https://github.com/XRPLF/clio), [`xrpl.js`](https://github.com/XRPLF/xrpl.js), [`xrpl-py`](https://github.com/XRPLF/xrpl-py), [`xrpl4j`](https://github.com/XRPLF/xrpl4j)). [Ripple](https://ripple.com) is generously sponsoring a bug bounty program for vulnerabilities in [`rippled`](https://github.com/XRPLF/rippled) (and other related projects, like [`xrpl.js`](https://github.com/XRPLF/xrpl.js), [`xrpl-py`](https://github.com/XRPLF/xrpl-py), [`xrpl4j`](https://github.com/XRPLF/xrpl4j)).
This program allows us to recognize and reward individuals or groups that identify and report bugs. This program allows us to recognize and reward individuals or groups that identify and report bugs. In summary, in order to qualify for a bounty, the bug must be:
We have partnered with Bugcrowd to manage this program. It is a private program, and security researchers can participate based on invitation. If you need access to the program, please email bugs@ripple.com with your Bugcrowd handle or Bugcrowd registered email, and we will get you added to the program. Once you have been added, please submit vulnerability reports through Bugcrowd, not by email. The detailed bug bounty policy is available on the Bugcrowd website. 1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `rippled`, `xrpl.js`, `xrpl-py`, `xrpl4j`.
2. **Relevant**. A security issue, posing a danger to user funds, privacy, or the operation of the XRP Ledger.
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other peoples software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromise the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
6. **Unused**. If you use the exploit to attack the XRP Ledger, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
The amount paid varies dramatically. Vulnerabilities that are harmless on their own, but could form part of a critical exploit will usually receive a bounty. Full-blown exploits can receive much higher bounties. Please dont hold back partial vulnerabilities while trying to construct a full-blown exploit. We will pay a bounty to anyone who reports a complete chain of vulnerabilities even if they have reported each component of the exploit separately and those vulnerabilities have been fixed in the meantime. However, to qualify for a the full bounty, you must to have been the first to report each of the partial exploits.
### Contacting Us
To report a qualifying bug, please send a detailed report to:
| Email Address | bugs@ripple.com |
| :-----------: | :-------------------------------------------------- |
| Short Key ID | `0xA9F514E0` |
| Long Key ID | `0xD900855AA9F514E0` |
| Fingerprint | `B72C 0654 2F2A E250 2763 A268 D900 855A A9F5 14E0` |
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
```
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBGkSZAQBEACprU199OhgdsOsygNjiQV4msuN3vDOUooehL+NwfsGfW79Tbqq
Q2u7uQ3NZjW+M2T4nsDwuhkr7pe7xSReR5W8ssaczvtUyxkvbMClilcgZ2OSCAuC
N9tzJsqOqkwBvXoNXkn//T2jnPz0ZU2wSF+NrEibq5FeuyGdoX3yXXBxq9pW9HzK
HkQll63QSl6BzVSGRQq+B6lGgaZGLwf3mzmIND9Z5VGLNK2jKynyz9z091whNG/M
kV+E7/r/bujHk7WIVId07G5/COTXmSr7kFnNEkd2Umw42dkgfiNKvlmJ9M7c1wLK
KbL9Eb4ADuW6rRc5k4s1e6GT8R4/VPliWbCl9SE32hXH8uTkqVIFZP2eyM5WRRHs
aKzitkQG9UK9gcb0kdgUkxOvvgPHAe5IuZlcHFzU4y0dBbU1VEFWVpiLU0q+IuNw
5BRemeHc59YNsngkmAZ+/9zouoShRusZmC8Wzotv75C2qVBcjijPvmjWAUz0Zunm
Lsr+O71vqHE73pERjD07wuD/ISjiYRYYE/bVrXtXLZijC7qAH4RE3nID+2ojcZyO
/2jMQvt7un56RsGH4UBHi3aBHi9bUoDGCXKiQY981cEuNaOxpou7Mh3x/ONzzSvk
sTV6nl1LOZHykN1JyKwaNbTSAiuyoN+7lOBqbV04DNYAHL88PrT21P83aQARAQAB
tB1SaXBwbGUgTGFicyA8YnVnc0ByaXBwbGUuY29tPokCTgQTAQgAOBYhBLcsBlQv
KuJQJ2OiaNkAhVqp9RTgBQJpEmQEAhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheA
AAoJENkAhVqp9RTgBzgP/i7y+aDWl1maig1XMdyb+o0UGusumFSW4Hmj278wlKVv
usgLPihYgHE0PKrv6WRyKOMC1tQEcYYN93M+OeQ1vFhS2YyURq6RCMmh4zq/awXG
uZbG36OURB5NH8lGBOHiN/7O+nY0CgenBT2JWm+GW3nEOAVOVm4+r5GlpPlv+Dp1
NPBThcKXFMnH73++NpSQoDzTfRYHPxhDAX3jkLi/moXfSanOLlR6l94XNNN0jBHW
Quao0rzf4WSXq9g6AS224xhAA5JyIcFl8TX7hzj5HaFn3VWo3COoDu4U7H+BM0fl
85yqiMQypp7EhN2gxpMMWaHY5TFM85U/bFXFYfEgihZ4/gt4uoIzsNI9jlX7mYvG
KFdDij+oTlRsuOxdIy60B3dKcwOH9nZZCz0SPsN/zlRWgKzK4gDKdGhFkU9OlvPu
94ZqscanoiWKDoZkF96+sjgfjkuHsDK7Lwc1Xi+T4drHG/3aVpkYabXox+lrKB/S
yxZjeqOIQzWPhnLgCaLyvsKo5hxKzL0w3eURu8F3IS7RgOOlljv4M+Me9sEVcdNV
aN3/tQwbaomSX1X5D5YXqhBwC3rU3wXwamsscRTGEpkV+JCX6KUqGP7nWmxCpAly
FL05XuOd5SVHJjXLeuje0JqLUpN514uL+bThWwDbDTdAdwW3oK/2WbXz7IfJRLBj
uQINBGkSZAQBEADdI3SL2F72qkrgFqXWE6HSRBu9bsAvTE5QrRPWk7ux6at537r4
S4sIw2dOwLvbyIrDgKNq3LQ5wCK88NO/NeCOFm4AiCJSl3pJHXYnTDoUxTrrxx+o
vSRI4I3fHEql/MqzgiAb0YUezjgFdh3vYheMPp/309PFbOLhiFqEcx80Mx5h06UH
gDzu1qNj3Ec+31NLic5zwkrAkvFvD54d6bqYR3SEgMau6aYEewpGHbWBi2pLqSi2
lQcAeOFixqGpTwDmAnYR8YtjBYepy0MojEAdTHcQQlOYSDk4q4elG+io2N8vECfU
rD6ORecN48GXdZINYWTAdslrUeanmBdgQrYkSpce8TSghgT9P01SNaXxmyaehVUO
lqI4pcg5G2oojAE8ncNS3TwDtt7daTaTC3bAdr4PXDVAzNAiewjMNZPB7xidkDGQ
Y4W1LxTMXyJVWxehYOH7tsbBRKninlfRnLgYzmtIbNRAAvNcsxU6ihv3AV0WFknN
YbSzotEv1Xq/5wk309x8zCDe+sP0cQicvbXafXmUzPAZzeqFg+VLFn7F9MP1WGlW
B1u7VIvBF1Mp9Nd3EAGBAoLRdRu+0dVWIjPTQuPIuD9cCatJA0wVaKUrjYbBMl88
a12LixNVGeSFS9N7ADHx0/o7GNT6l88YbaLP6zggUHpUD/bR+cDN7vllIQARAQAB
iQI2BBgBCAAgFiEEtywGVC8q4lAnY6Jo2QCFWqn1FOAFAmkSZAQCGwwACgkQ2QCF
Wqn1FOAfAA/8CYq4p0p4bobY20CKEMsZrkBTFJyPDqzFwMeTjgpzqbD7Y3Qq5QCK
OBbvY02GWdiIsNOzKdBxiuam2xYP9WHZj4y7/uWEvT0qlPVmDFu+HXjoJ43oxwFd
CUp2gMuQ4cSL3X94VRJ3BkVL+tgBm8CNY0vnTLLOO3kum/R69VsGJS1JSGUWjNM+
4qwS3mz+73xJu1HmERyN2RZF/DGIZI2PyONQQ6aH85G1Dd2ohu2/DBAkQAMBrPbj
FrbDaBLyFhODxU3kTWqnfLlaElSm2EGdIU2yx7n4BggEa//NZRMm5kyeo4vzhtlQ
YIVUMLAOLZvnEqDnsLKp+22FzNR/O+htBQC4lPywl53oYSALdhz1IQlcAC1ru5KR
XPzhIXV6IIzkcx9xNkEclZxmsuy5ERXyKEmLbIHAlzFmnrldlt2ZgXDtzaorLmxj
klKibxd5tF50qOpOivz+oPtFo7n+HmFa1nlVAMxlDCUdM0pEVeYDKI5zfVwalyhZ
NnjpakdZSXMwgc7NP/hH9buF35hKDp7EckT2y3JNYwHsDdy1icXN2q40XZw5tSIn
zkPWdu3OUY8PISohN6Pw4h0RH4ZmoX97E8sEfmdKaT58U4Hf2aAv5r9IWCSrAVqY
u5jvac29CzQR9Kal0A+8phHAXHNFD83SwzIC0syaT9ficAguwGH8X6Q=
=nGuD
-----END PGP PUBLIC KEY BLOCK-----
```

View File

@@ -1,13 +1,14 @@
#!/bin/bash #!/bin/bash
if [[ $# -ne 1 || "$1" == "--help" || "$1" == "-h" ]]; then if [[ $# -ne 1 || "$1" == "--help" || "$1" == "-h" ]]
name=$( basename $0 ) then
cat <<- USAGE name=$( basename $0 )
Usage: $name <username> cat <<- USAGE
Usage: $name <username>
Where <username> is the Github username of the upstream repo. e.g. XRPLF Where <username> is the Github username of the upstream repo. e.g. XRPLF
USAGE USAGE
exit 0 exit 0
fi fi
# Create upstream remotes based on origin # Create upstream remotes based on origin
@@ -15,9 +16,10 @@ shift
user="$1" user="$1"
# Get the origin URL. Expect it be an SSH-style URL # Get the origin URL. Expect it be an SSH-style URL
origin=$( git remote get-url origin ) origin=$( git remote get-url origin )
if [[ "${origin}" == "" ]]; then if [[ "${origin}" == "" ]]
echo Invalid origin remote >&2 then
exit 1 echo Invalid origin remote >&2
exit 1
fi fi
# echo "Origin: ${origin}" # echo "Origin: ${origin}"
# Parse the origin # Parse the origin
@@ -28,9 +30,11 @@ IFS='@' read sshuser server <<< "${remote}"
# echo "SSHUser: ${sshuser}, Server: ${server}" # echo "SSHUser: ${sshuser}, Server: ${server}"
IFS='/' read originuser repo <<< "${originpath}" IFS='/' read originuser repo <<< "${originpath}"
# echo "Originuser: ${originuser}, Repo: ${repo}" # echo "Originuser: ${originuser}, Repo: ${repo}"
if [[ "${sshuser}" == "" || "${server}" == "" || "${originuser}" == "" || "${repo}" == "" ]]; then if [[ "${sshuser}" == "" || "${server}" == "" || "${originuser}" == ""
echo "Can't parse origin URL: ${origin}" >&2 || "${repo}" == "" ]]
exit 1 then
echo "Can't parse origin URL: ${origin}" >&2
exit 1
fi fi
upstream="https://${server}/${user}/${repo}" upstream="https://${server}/${user}/${repo}"
upstreampush="${remote}:${user}/${repo}" upstreampush="${remote}:${user}/${repo}"
@@ -38,34 +42,42 @@ upstreamgroup="upstream upstream-push"
current=$( git remote get-url upstream 2>/dev/null ) current=$( git remote get-url upstream 2>/dev/null )
currentpush=$( git remote get-url upstream-push 2>/dev/null ) currentpush=$( git remote get-url upstream-push 2>/dev/null )
currentgroup=$( git config remotes.upstreams ) currentgroup=$( git config remotes.upstreams )
if [[ "${current}" == "${upstream}" ]]; then if [[ "${current}" == "${upstream}" ]]
echo "Upstream already set up correctly. Skip" then
elif [[ -n "${current}" && "${current}" != "${upstream}" && "${current}" != "${upstreampush}" ]]; then echo "Upstream already set up correctly. Skip"
echo "Upstream already set up as: ${current}. Skip" elif [[ -n "${current}" && "${current}" != "${upstream}" &&
"${current}" != "${upstreampush}" ]]
then
echo "Upstream already set up as: ${current}. Skip"
else else
if [[ "${current}" == "${upstreampush}" ]]; then if [[ "${current}" == "${upstreampush}" ]]
echo "Upstream set to dangerous push URL. Update." then
_run git remote rename upstream upstream-push || \ echo "Upstream set to dangerous push URL. Update."
_run git remote remove upstream _run git remote rename upstream upstream-push || \
currentpush=$( git remote get-url upstream-push 2>/dev/null ) _run git remote remove upstream
fi currentpush=$( git remote get-url upstream-push 2>/dev/null )
_run git remote add upstream "${upstream}" fi
_run git remote add upstream "${upstream}"
fi fi
if [[ "${currentpush}" == "${upstreampush}" ]]; then if [[ "${currentpush}" == "${upstreampush}" ]]
echo "upstream-push already set up correctly. Skip" then
elif [[ -n "${currentpush}" && "${currentpush}" != "${upstreampush}" ]]; then echo "upstream-push already set up correctly. Skip"
echo "upstream-push already set up as: ${currentpush}. Skip" elif [[ -n "${currentpush}" && "${currentpush}" != "${upstreampush}" ]]
then
echo "upstream-push already set up as: ${currentpush}. Skip"
else else
_run git remote add upstream-push "${upstreampush}" _run git remote add upstream-push "${upstreampush}"
fi fi
if [[ "${currentgroup}" == "${upstreamgroup}" ]]; then if [[ "${currentgroup}" == "${upstreamgroup}" ]]
echo "Upstreams group already set up correctly. Skip" then
elif [[ -n "${currentgroup}" && "${currentgroup}" != "${upstreamgroup}" ]]; then echo "Upstreams group already set up correctly. Skip"
echo "Upstreams group already set up as: ${currentgroup}. Skip" elif [[ -n "${currentgroup}" && "${currentgroup}" != "${upstreamgroup}" ]]
then
echo "Upstreams group already set up as: ${currentgroup}. Skip"
else else
_run git config --add remotes.upstreams "${upstreamgroup}" _run git config --add remotes.upstreams "${upstreamgroup}"
fi fi
_run git fetch --jobs=$(nproc) upstreams _run git fetch --jobs=$(nproc) upstreams

View File

@@ -1,16 +1,17 @@
#!/bin/bash #!/bin/bash
if [[ $# -lt 3 || "$1" == "--help" || "$1" = "-h" ]]; then if [[ $# -lt 3 || "$1" == "--help" || "$1" = "-h" ]]
name=$( basename $0 ) then
cat <<- USAGE name=$( basename $0 )
Usage: $name workbranch base/branch user/branch [user/branch [...]] cat <<- USAGE
Usage: $name workbranch base/branch user/branch [user/branch [...]]
* workbranch will be created locally from base/branch * workbranch will be created locally from base/branch
* base/branch and user/branch may be specified as user:branch to allow * base/branch and user/branch may be specified as user:branch to allow
easy copying from Github PRs easy copying from Github PRs
* Remotes for each user must already be set up * Remotes for each user must already be set up
USAGE USAGE
exit 0 exit 0
fi fi
work="$1" work="$1"
@@ -23,8 +24,9 @@ unset branches[0]
set -e set -e
users=() users=()
for b in "${branches[@]}"; do for b in "${branches[@]}"
users+=( $( echo $b | cut -d/ -f1 ) ) do
users+=( $( echo $b | cut -d/ -f1 ) )
done done
users=( $( printf '%s\n' "${users[@]}" | sort -u ) ) users=( $( printf '%s\n' "${users[@]}" | sort -u ) )
@@ -32,9 +34,10 @@ users=( $( printf '%s\n' "${users[@]}" | sort -u ) )
git fetch --multiple upstreams "${users[@]}" git fetch --multiple upstreams "${users[@]}"
git checkout -B "$work" --no-track "$base" git checkout -B "$work" --no-track "$base"
for b in "${branches[@]}"; do for b in "${branches[@]}"
git merge --squash "${b}" do
git commit -S # Use the commit message provided on the PR git merge --squash "${b}"
git commit -S # Use the commit message provided on the PR
done done
# Make sure the commits look right # Make sure the commits look right
@@ -44,11 +47,13 @@ parts=( $( echo $base | sed "s/\// /" ) )
repo="${parts[0]}" repo="${parts[0]}"
b="${parts[1]}" b="${parts[1]}"
push=$repo push=$repo
if [[ "$push" == "upstream" ]]; then if [[ "$push" == "upstream" ]]
push="upstream-push" then
push="upstream-push"
fi fi
if [[ "$repo" == "upstream" ]]; then if [[ "$repo" == "upstream" ]]
repo="upstreams" then
repo="upstreams"
fi fi
cat << PUSH cat << PUSH

View File

@@ -1,16 +1,17 @@
#!/bin/bash #!/bin/bash
if [[ $# -ne 3 || "$1" == "--help" || "$1" = "-h" ]]; then if [[ $# -ne 3 || "$1" == "--help" || "$1" = "-h" ]]
name=$( basename $0 ) then
cat <<- USAGE name=$( basename $0 )
Usage: $name workbranch base/branch version cat <<- USAGE
Usage: $name workbranch base/branch version
* workbranch will be created locally from base/branch. If it exists, * workbranch will be created locally from base/branch. If it exists,
it will be reused, so make sure you don't overwrite any work. it will be reused, so make sure you don't overwrite any work.
* base/branch may be specified as user:branch to allow easy copying * base/branch may be specified as user:branch to allow easy copying
from Github PRs. from Github PRs.
USAGE USAGE
exit 0 exit 0
fi fi
work="$1" work="$1"
@@ -29,9 +30,10 @@ git fetch upstreams
git checkout -B "${work}" --no-track "${base}" git checkout -B "${work}" --no-track "${base}"
push=$( git rev-parse --abbrev-ref --symbolic-full-name '@{push}' \ push=$( git rev-parse --abbrev-ref --symbolic-full-name '@{push}' \
2>/dev/null ) || true 2>/dev/null ) || true
if [[ "${push}" != "" ]]; then if [[ "${push}" != "" ]]
echo "Warning: ${push} may already exist." then
echo "Warning: ${push} may already exist."
fi fi
build=$( find -name BuildInfo.cpp ) build=$( find -name BuildInfo.cpp )

View File

@@ -1,206 +0,0 @@
#!/usr/bin/env python3
"""Pre-commit hook that runs clang-tidy on changed files using run-clang-tidy."""
from __future__ import annotations
import json
import os
import re
import shutil
import subprocess
import sys
from collections import defaultdict
from pathlib import Path
HEADER_EXTENSIONS = {".h", ".hpp", ".ipp"}
SOURCE_EXTENSIONS = {".cpp"}
INCLUDE_RE = re.compile(r"^\s*#\s*include\s*[<\"]([^>\"]+)[>\"]")
def find_run_clang_tidy() -> str | None:
for candidate in ("run-clang-tidy-21", "run-clang-tidy"):
if path := shutil.which(candidate):
return path
return None
def find_build_dir(repo_root: Path) -> Path | None:
for name in (".build", "build"):
candidate = repo_root / name
if (candidate / "compile_commands.json").exists():
return candidate
return None
def build_include_graph(build_dir: Path, repo_root: Path) -> tuple[dict, set]:
"""
Scan all files reachable from compile_commands.json and build an inverted include graph.
Returns:
inverted: header_path -> set of files that include it
source_files: set of all TU paths from compile_commands.json
"""
with open(build_dir / "compile_commands.json") as f:
db = json.load(f)
source_files = {Path(e["file"]).resolve() for e in db}
include_roots = [repo_root / "include", repo_root / "src"]
inverted: dict[Path, set[Path]] = defaultdict(set)
to_scan: set[Path] = set(source_files)
scanned: set[Path] = set()
while to_scan:
file = to_scan.pop()
if file in scanned or not file.exists():
continue
scanned.add(file)
content = file.read_text()
for line in content.splitlines():
m = INCLUDE_RE.match(line)
if not m:
continue
for root in include_roots:
candidate = (root / m.group(1)).resolve()
if candidate.exists():
inverted[candidate].add(file)
if candidate not in scanned:
to_scan.add(candidate)
break
return inverted, source_files
def find_tus_for_headers(
headers: list[Path],
inverted: dict[Path, set[Path]],
source_files: set[Path],
) -> set[Path]:
"""
For each header, pick one TU that transitively includes it.
Prefers a TU whose stem matches the header's stem, otherwise picks the first found.
"""
result: set[Path] = set()
for header in headers:
preferred: Path | None = None
visited: set[Path] = {header}
stack: list[Path] = [header]
while stack:
h = stack.pop()
for inc in inverted.get(h, ()):
if inc in source_files:
if inc.stem == header.stem:
preferred = inc
break
if preferred is None:
preferred = inc
if inc not in visited:
visited.add(inc)
stack.append(inc)
if preferred is not None and preferred.stem == header.stem:
break
if preferred is not None:
result.add(preferred)
return result
def resolve_files(
input_files: list[str], build_dir: Path, repo_root: Path
) -> list[str]:
"""
Split input into source files and headers. Source files are passed through;
headers are resolved to the TUs that transitively include them.
"""
sources: list[Path] = []
headers: list[Path] = []
for f in input_files:
p = Path(f).resolve()
if p.suffix in SOURCE_EXTENSIONS:
sources.append(p)
elif p.suffix in HEADER_EXTENSIONS:
headers.append(p)
if not headers:
return [str(p) for p in sources]
print(
f"Resolving {len(headers)} header(s) to compilation units...", file=sys.stderr
)
inverted, source_files = build_include_graph(build_dir, repo_root)
tus = find_tus_for_headers(headers, inverted, source_files)
if not tus:
print(
"Warning: no compilation units found that include the modified headers; "
"skipping clang-tidy for headers.",
file=sys.stderr,
)
return sorted({str(p) for p in (*sources, *tus)})
def staged_files(repo_root: Path) -> list[str]:
result = subprocess.run(
["git", "diff", "--staged", "--name-only", "--diff-filter=d"],
capture_output=True,
text=True,
cwd=repo_root,
)
if result.returncode != 0:
print(
"clang-tidy check failed: 'git diff --staged' command failed.",
file=sys.stderr,
)
if result.stderr:
print(result.stderr, file=sys.stderr)
sys.exit(result.returncode or 1)
return [str(repo_root / p) for p in result.stdout.splitlines() if p]
def main():
if not os.environ.get("TIDY"):
return 0
repo_root = Path(__file__).parent.parent
files = staged_files(repo_root)
if not files:
return 0
run_clang_tidy = find_run_clang_tidy()
if not run_clang_tidy:
print(
"clang-tidy check failed: TIDY is enabled but neither "
"'run-clang-tidy-21' nor 'run-clang-tidy' was found in PATH.",
file=sys.stderr,
)
return 1
build_dir = find_build_dir(repo_root)
if not build_dir:
print(
"clang-tidy check failed: no build directory with compile_commands.json found "
"(looked for .build/ and build/)",
file=sys.stderr,
)
return 1
tidy_files = resolve_files(files, build_dir, repo_root)
if not tidy_files:
return 0
result = subprocess.run(
[run_clang_tidy, "-quiet", "-p", str(build_dir), "-fix", "-allow-no-checks"]
+ tidy_files
)
return result.returncode
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,37 +0,0 @@
#!/usr/bin/env python3
"""
Converts quoted includes (#include "...") to angle-bracket includes
(#include <...>), which is the required style in this project.
Usage: ./bin/pre-commit/fix_include_style.py <file1> <file2> ...
"""
import re
import sys
from pathlib import Path
PATTERN = re.compile(r'^(\s*#include\s*)"([^"]+)"', re.MULTILINE)
def fix_includes(path: Path) -> bool:
original = path.read_text(encoding="utf-8")
fixed = PATTERN.sub(r"\1<\2>", original)
if fixed != original:
path.write_text(fixed, encoding="utf-8")
return False
return True
def main() -> int:
files = [Path(f) for f in sys.argv[1:]]
success = True
for path in files:
success &= fix_includes(path)
return 0 if success else 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -28,7 +28,7 @@
# https://vl.ripple.com # https://vl.ripple.com
# https://unl.xrplf.org # https://unl.xrplf.org
# http://127.0.0.1:8000 # http://127.0.0.1:8000
# file:///etc/opt/xrpld/vl.txt # file:///etc/opt/ripple/vl.txt
# #
# [validator_list_keys] # [validator_list_keys]
# #
@@ -43,11 +43,11 @@
# ED307A760EE34F2D0CAA103377B1969117C38B8AA0AA1E2A24DAC1F32FC97087ED # ED307A760EE34F2D0CAA103377B1969117C38B8AA0AA1E2A24DAC1F32FC97087ED
# #
# The default validator list publishers that the xrpld instance # The default validator list publishers that the rippled instance
# trusts. # trusts.
# #
# WARNING: Changing these values can cause your xrpld instance to see a # WARNING: Changing these values can cause your rippled instance to see a
# validated ledger that contradicts other xrpld instances' # validated ledger that contradicts other rippled instances'
# validated ledgers (aka a ledger fork) if your validator list(s) # validated ledgers (aka a ledger fork) if your validator list(s)
# do not sufficiently overlap with the list(s) used by others. # do not sufficiently overlap with the list(s) used by others.
# See: https://arxiv.org/pdf/1802.07242.pdf # See: https://arxiv.org/pdf/1802.07242.pdf

View File

@@ -9,7 +9,7 @@
# #
# 2. Peer Protocol # 2. Peer Protocol
# #
# 3. XRPL protocol # 3. Ripple Protocol
# #
# 4. HTTPS Client # 4. HTTPS Client
# #
@@ -383,7 +383,7 @@
# #
# These settings control security and access attributes of the Peer to Peer # These settings control security and access attributes of the Peer to Peer
# server section of the xrpld process. Peer Protocol implements the # server section of the xrpld process. Peer Protocol implements the
# XRPL payment protocol. It is over peer connections that transactions # Ripple Payment protocol. It is over peer connections that transactions
# and validations are passed from to machine to machine, to determine the # and validations are passed from to machine to machine, to determine the
# contents of validated ledgers. # contents of validated ledgers.
# #
@@ -406,7 +406,7 @@
# #
# [ips] # [ips]
# #
# List of hostnames or ips where the XRPL protocol is served. A default # List of hostnames or ips where the Ripple protocol is served. A default
# starter list is included in the code and used if no other hostnames are # starter list is included in the code and used if no other hostnames are
# available. # available.
# #
@@ -435,7 +435,7 @@
# List of IP addresses or hostnames to which xrpld should always attempt to # List of IP addresses or hostnames to which xrpld should always attempt to
# maintain peer connections with. This is useful for manually forming private # maintain peer connections with. This is useful for manually forming private
# networks, for example to configure a validation server that connects to the # networks, for example to configure a validation server that connects to the
# XRPL network through a public-facing server, or for building a set # Ripple network through a public-facing server, or for building a set
# of cluster peers. # of cluster peers.
# #
# One address or domain names per line is allowed. A port must be specified # One address or domain names per line is allowed. A port must be specified
@@ -748,8 +748,8 @@
# the folder in which the xrpld.cfg file is located. # the folder in which the xrpld.cfg file is located.
# #
# Examples: # Examples:
# /home/username/validators.txt # /home/ripple/validators.txt
# C:/home/username/validators.txt # C:/home/ripple/validators.txt
# #
# Example content: # Example content:
# [validators] # [validators]
@@ -840,7 +840,7 @@
# #
# 0: Disable the ledger replay feature [default] # 0: Disable the ledger replay feature [default]
# 1: Enable the ledger replay feature. With this feature enabled, when # 1: Enable the ledger replay feature. With this feature enabled, when
# acquiring a ledger from the network, an xrpld node only downloads # acquiring a ledger from the network, a xrpld node only downloads
# the ledger header and the transactions instead of the whole ledger. # the ledger header and the transactions instead of the whole ledger.
# And the ledger is built by applying the transactions to the parent # And the ledger is built by applying the transactions to the parent
# ledger. # ledger.
@@ -853,7 +853,7 @@
# #
# The xrpld server instance uses HTTPS GET requests in a variety of # The xrpld server instance uses HTTPS GET requests in a variety of
# circumstances, including but not limited to contacting trusted domains to # circumstances, including but not limited to contacting trusted domains to
# fetch information such as mapping an email address to an XRPL payment # fetch information such as mapping an email address to a Ripple Payment
# Network address. # Network address.
# #
# [ssl_verify] # [ssl_verify]
@@ -940,7 +940,23 @@
# #
# path Location to store the database # path Location to store the database
# #
# Optional keys for NuDB and RocksDB: # Optional keys
#
# cache_size Size of cache for database records. Default is 16384.
# Setting this value to 0 will use the default value.
#
# cache_age Length of time in minutes to keep database records
# cached. Default is 5 minutes. Setting this value to
# 0 will use the default value.
#
# Note: if neither cache_size nor cache_age is
# specified, the cache for database records will not
# be created. If only one of cache_size or cache_age
# is specified, the cache will be created using the
# default value for the unspecified parameter.
#
# Note: the cache will not be created if online_delete
# is specified.
# #
# fast_load Boolean. If set, load the last persisted ledger # fast_load Boolean. If set, load the last persisted ledger
# from disk upon process start before syncing to # from disk upon process start before syncing to
@@ -948,6 +964,8 @@
# if sufficient IOPS capacity is available. # if sufficient IOPS capacity is available.
# Default 0. # Default 0.
# #
# Optional keys for NuDB or RocksDB:
#
# earliest_seq The default is 32570 to match the XRP ledger # earliest_seq The default is 32570 to match the XRP ledger
# network's earliest allowed sequence. Alternate # network's earliest allowed sequence. Alternate
# networks may set this value. Minimum value of 1. # networks may set this value. Minimum value of 1.
@@ -1227,7 +1245,7 @@
# #
#---------- #----------
# #
# The vote settings configure settings for the entire XRPL network. # The vote settings configure settings for the entire Ripple network.
# While a single instance of xrpld cannot unilaterally enforce network-wide # While a single instance of xrpld cannot unilaterally enforce network-wide
# settings, these choices become part of the instance's vote during the # settings, these choices become part of the instance's vote during the
# consensus process for each voting ledger. # consensus process for each voting ledger.
@@ -1416,12 +1434,6 @@
# in this section to a comma-separated list of the addresses # in this section to a comma-separated list of the addresses
# of your Clio servers, in order to bypass xrpld's rate limiting. # of your Clio servers, in order to bypass xrpld's rate limiting.
# #
# TLS/SSL can be enabled for gRPC by specifying ssl_cert and ssl_key.
# Both parameters must be provided together. The ssl_cert_chain parameter
# is optional and provides intermediate CA certificates for the certificate
# chain. The ssl_client_ca parameter is optional and enables mutual TLS
# (client certificate verification).
#
# This port is commented out but can be enabled by removing # This port is commented out but can be enabled by removing
# the '#' from each corresponding line including the entry under [server] # the '#' from each corresponding line including the entry under [server]
# #
@@ -1471,74 +1483,11 @@ admin = 127.0.0.1
protocol = ws protocol = ws
send_queue_limit = 500 send_queue_limit = 500
# gRPC TLS/SSL Configuration
#
# The gRPC port supports optional TLS/SSL encryption. When TLS is not
# configured, the gRPC server will accept unencrypted connections.
#
# ssl_cert = <filename>
# ssl_key = <filename>
#
# To enable TLS for gRPC, both ssl_cert and ssl_key must be specified.
# If only one is provided, xrpld will fail to start.
#
# ssl_cert: Path to the server's SSL certificate file in PEM format.
# ssl_key: Path to the server's SSL private key file in PEM format.
#
# When configured, the gRPC server will only accept TLS-encrypted
# connections. Clients must use TLS (secure) channel credentials rather
# than plaintext / insecure connections.
#
# ssl_cert_chain = <filename>
#
# Optional. Path to intermediate CA certificate(s) in PEM format that
# complete the server's certificate chain.
#
# This file should contain the intermediate CA certificate(s) needed
# to build a trust chain from the server certificate (ssl_cert) to a
# root CA that clients trust. Multiple certificates should be
# concatenated in PEM format.
#
# This is needed when your server certificate was signed by an
# intermediate CA rather than directly by a root CA. Without this,
# clients may fail to verify your server certificate.
#
# If not specified, only the server certificate from ssl_cert will be
# presented to clients.
#
# ssl_client_ca = <filename>
#
# Optional. Path to a CA certificate file in PEM format for verifying
# client certificates (mutual TLS / mTLS).
#
# When specified, the gRPC server will verify client certificates
# against this CA. This enables mutual authentication where both the
# server and client verify each other's identity.
#
# This is typically NOT needed for public-facing gRPC servers. Only
# use this if you want to restrict access to clients with valid
# certificates signed by the specified CA.
#
# If not specified, the server will use one-way TLS (server
# authentication only) and will accept connections from any client.
#
[port_grpc] [port_grpc]
port = 50051 port = 50051
ip = 127.0.0.1 ip = 127.0.0.1
secure_gateway = 127.0.0.1 secure_gateway = 127.0.0.1
# Optional TLS/SSL configuration for gRPC
# To enable TLS, uncomment and configure both ssl_cert and ssl_key:
#ssl_cert = /etc/ssl/certs/grpc-server.crt
#ssl_key = /etc/ssl/private/grpc-server.key
# Optional: Include intermediate CA certificates for complete certificate chain
#ssl_cert_chain = /etc/ssl/certs/grpc-intermediate-ca.crt
# Optional: Enable mutual TLS (client certificate verification)
# Uncomment to require and verify client certificates:
#ssl_client_ca = /etc/ssl/certs/grpc-client-ca.crt
#[port_ws_public] #[port_ws_public]
#port = 6005 #port = 6005
#ip = 127.0.0.1 #ip = 127.0.0.1

View File

@@ -1,32 +1,30 @@
macro(exclude_from_default target_) macro (exclude_from_default target_)
set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_ALL ON) set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON) set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
endmacro() endmacro ()
macro(exclude_if_included target_) macro (exclude_if_included target_)
get_directory_property(has_parent PARENT_DIRECTORY) get_directory_property(has_parent PARENT_DIRECTORY)
if(has_parent) if (has_parent)
exclude_from_default(${target_}) exclude_from_default (${target_})
endif() endif ()
endmacro() endmacro ()
find_package(Git) find_package(Git)
function(git_branch branch_val) function (git_branch branch_val)
if(NOT GIT_FOUND) if (NOT GIT_FOUND)
return() return ()
endif() endif ()
set(_branch "") set (_branch "")
execute_process( execute_process (COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD"
COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD" WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} RESULT_VARIABLE _git_exit_code
RESULT_VARIABLE _git_exit_code OUTPUT_VARIABLE _temp_branch
OUTPUT_VARIABLE _temp_branch OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET)
ERROR_QUIET if (_git_exit_code EQUAL 0)
) set (_branch ${_temp_branch})
if(_git_exit_code EQUAL 0) endif ()
set(_branch ${_temp_branch}) set (${branch_val} "${_branch}" PARENT_SCOPE)
endif() endfunction ()
set(${branch_val} "${_branch}" PARENT_SCOPE)
endfunction()

View File

@@ -1,62 +1,57 @@
find_program(CCACHE_PATH "ccache") find_program(CCACHE_PATH "ccache")
if(NOT CCACHE_PATH) if (NOT CCACHE_PATH)
return() return()
endif() endif ()
# For Linux and macOS we can use the ccache binary directly. # For Linux and macOS we can use the ccache binary directly.
if(NOT MSVC) if (NOT MSVC)
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PATH}") set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PATH}")
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PATH}") set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PATH}")
message(STATUS "Found ccache: ${CCACHE_PATH}") message(STATUS "Found ccache: ${CCACHE_PATH}")
return() return()
endif() endif ()
# For Windows more effort is required. The code below is a modified version of # For Windows more effort is required. The code below is a modified version of
# https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake. # https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake.
if("${CCACHE_PATH}" MATCHES "chocolatey") if ("${CCACHE_PATH}" MATCHES "chocolatey")
message(DEBUG "Ccache path: ${CCACHE_PATH}") message(DEBUG "Ccache path: ${CCACHE_PATH}")
# Chocolatey uses a shim executable that we cannot use directly, in which case we have to find the executable it # Chocolatey uses a shim executable that we cannot use directly, in which
# points to. If we cannot find the target executable then we cannot use ccache. # case we have to find the executable it points to. If we cannot find the
# target executable then we cannot use ccache.
find_program(BASH_PATH "bash") find_program(BASH_PATH "bash")
if(NOT BASH_PATH) if (NOT BASH_PATH)
message(WARNING "Could not find bash.") message(WARNING "Could not find bash.")
return() return()
endif() endif ()
execute_process( execute_process(
COMMAND COMMAND bash -c "export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1"
bash -c OUTPUT_VARIABLE CCACHE_PATH)
"export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1"
OUTPUT_VARIABLE CCACHE_PATH
)
if(NOT CCACHE_PATH) if (NOT CCACHE_PATH)
message(WARNING "Could not find ccache target.") message(WARNING "Could not find ccache target.")
return() return()
endif() endif ()
file(TO_CMAKE_PATH "${CCACHE_PATH}" CCACHE_PATH) file(TO_CMAKE_PATH "${CCACHE_PATH}" CCACHE_PATH)
endif() endif ()
message(STATUS "Found ccache: ${CCACHE_PATH}") message(STATUS "Found ccache: ${CCACHE_PATH}")
# Tell cmake to use ccache for compiling with Visual Studio. # Tell cmake to use ccache for compiling with Visual Studio.
file(COPY_FILE ${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe ONLY_IF_DIFFERENT) file(COPY_FILE
${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe
ONLY_IF_DIFFERENT)
set(CMAKE_VS_GLOBALS set(CMAKE_VS_GLOBALS
"CLToolExe=cl.exe" "CLToolExe=cl.exe"
"CLToolPath=${CMAKE_BINARY_DIR}" "CLToolPath=${CMAKE_BINARY_DIR}"
"TrackFileAccess=false" "TrackFileAccess=false"
"UseMultiToolTask=true" "UseMultiToolTask=true")
)
# By default Visual Studio generators will use /Zi to capture debug information, which is not compatible with ccache, so # By default Visual Studio generators will use /Zi to capture debug information,
# tell it to use /Z7 instead. # which is not compatible with ccache, so tell it to use /Z7 instead.
if(MSVC) if (MSVC)
foreach( foreach (var_
var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_DEBUG CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
CMAKE_C_FLAGS_RELEASE string (REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
CMAKE_CXX_FLAGS_DEBUG endforeach ()
CMAKE_CXX_FLAGS_RELEASE endif ()
)
string(REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
endforeach()
endif()

View File

@@ -172,49 +172,44 @@ include(CMakeParseArguments)
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE) option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
# Check prereqs # Check prereqs
find_program(GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test) find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
if(DEFINED CODE_COVERAGE_GCOV_TOOL) if(DEFINED CODE_COVERAGE_GCOV_TOOL)
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}") set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL}) elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}") set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if(APPLE) if(APPLE)
execute_process( execute_process( COMMAND xcrun -f llvm-cov
COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH
OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_STRIP_TRAILING_WHITESPACE )
) else()
else() find_program( LLVMCOV_PATH llvm-cov )
find_program(LLVMCOV_PATH llvm-cov) endif()
endif() if(LLVMCOV_PATH)
if(LLVMCOV_PATH) set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
set(GCOV_TOOL "${LLVMCOV_PATH} gcov") endif()
endif()
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU") elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
find_program(GCOV_PATH gcov) find_program( GCOV_PATH gcov )
set(GCOV_TOOL "${GCOV_PATH}") set(GCOV_TOOL "${GCOV_PATH}")
endif() endif()
# Check supported compiler (Clang, GNU and Flang) # Check supported compiler (Clang, GNU and Flang)
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
foreach(LANG ${LANGUAGES}) foreach(LANG ${LANGUAGES})
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3) if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message( message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
FATAL_ERROR
"Clang version must be 3.0.0 or greater! Aborting..."
)
endif()
elseif(
NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang"
)
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif() endif()
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif()
endforeach() endforeach()
set(COVERAGE_COMPILER_FLAGS "-g --coverage" CACHE INTERNAL "") set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
set(COVERAGE_CXX_COMPILER_FLAGS "") set(COVERAGE_CXX_COMPILER_FLAGS "")
set(COVERAGE_C_COMPILER_FLAGS "") set(COVERAGE_C_COMPILER_FLAGS "")
@@ -233,75 +228,49 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path) check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if(HAVE_cxx_fprofile_abs_path) if(HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
"${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path"
)
endif() endif()
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path) check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if(HAVE_c_fprofile_abs_path) if(HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
"${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path"
)
endif() endif()
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path) check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
if(HAVE_cxx_linker_fprofile_abs_path) if(HAVE_cxx_linker_fprofile_abs_path)
set(COVERAGE_CXX_LINKER_FLAGS set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
"${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path"
)
endif() endif()
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path) check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
if(HAVE_c_linker_fprofile_abs_path) if(HAVE_c_linker_fprofile_abs_path)
set(COVERAGE_C_LINKER_FLAGS set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
"${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path"
)
endif() endif()
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update) check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
if(HAVE_cxx_fprofile_update) if(HAVE_cxx_fprofile_update)
set(COVERAGE_CXX_COMPILER_FLAGS set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
"${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic"
)
endif() endif()
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update) check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
if(HAVE_c_fprofile_update) if(HAVE_c_fprofile_update)
set(COVERAGE_C_COMPILER_FLAGS set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
"${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic"
)
endif() endif()
check_linker_flag( check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
CXX
-fprofile-update=atomic
HAVE_cxx_linker_fprofile_update
)
if(HAVE_cxx_linker_fprofile_update) if(HAVE_cxx_linker_fprofile_update)
set(COVERAGE_CXX_LINKER_FLAGS set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
"${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic"
)
endif() endif()
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update) check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
if(HAVE_c_linker_fprofile_update) if(HAVE_c_linker_fprofile_update)
set(COVERAGE_C_LINKER_FLAGS set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
"${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic"
)
endif() endif()
endif() endif()
get_property( get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
GENERATOR_IS_MULTI_CONFIG
GLOBAL
PROPERTY GENERATOR_IS_MULTI_CONFIG
)
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)) if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
message( message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
WARNING
"Code coverage results with an optimised (non-Debug) build may be misleading"
)
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG) endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
# Defines a target for running and collection code coverage information # Defines a target for running and collection code coverage information
@@ -330,13 +299,7 @@ function(setup_target_for_coverage_gcovr)
set(options NONE) set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME FORMAT) set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments( cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
Coverage
"${options}"
"${oneValueArgs}"
"${multiValueArgs}"
${ARGN}
)
if(NOT GCOV_TOOL) if(NOT GCOV_TOOL)
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...") message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
@@ -358,33 +321,21 @@ function(setup_target_for_coverage_gcovr)
endif() endif()
if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS) if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
message( message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set")
FATAL_ERROR
"EXECUTABLE_ARGS must not be set if EXECUTABLE is not set"
)
endif() endif()
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS) if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message( message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
FATAL_ERROR
"Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting..."
)
else() else()
if( if((Coverage_FORMAT STREQUAL "html-details")
(Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested"))
OR (Coverage_FORMAT STREQUAL "html-nested") set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
)
set(GCOVR_OUTPUT_FILE
${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html
)
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME}) set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
elseif(Coverage_FORMAT STREQUAL "html-single") elseif(Coverage_FORMAT STREQUAL "html-single")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
elseif( elseif((Coverage_FORMAT STREQUAL "json-summary")
(Coverage_FORMAT STREQUAL "json-summary")
OR (Coverage_FORMAT STREQUAL "json-details") OR (Coverage_FORMAT STREQUAL "json-details")
OR (Coverage_FORMAT STREQUAL "coveralls") OR (Coverage_FORMAT STREQUAL "coveralls"))
)
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
elseif(Coverage_FORMAT STREQUAL "txt") elseif(Coverage_FORMAT STREQUAL "txt")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
@@ -397,65 +348,50 @@ function(setup_target_for_coverage_gcovr)
endif() endif()
endif() endif()
if( if((Coverage_FORMAT STREQUAL "cobertura")
(Coverage_FORMAT STREQUAL "cobertura") OR (Coverage_FORMAT STREQUAL "xml"))
OR (Coverage_FORMAT STREQUAL "xml") list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
) list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}")
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty)
set(Coverage_FORMAT cobertura) # overwrite xml set(Coverage_FORMAT cobertura) # overwrite xml
elseif(Coverage_FORMAT STREQUAL "sonarqube") elseif(Coverage_FORMAT STREQUAL "sonarqube")
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "jacoco") elseif(Coverage_FORMAT STREQUAL "jacoco")
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty )
elseif(Coverage_FORMAT STREQUAL "clover") elseif(Coverage_FORMAT STREQUAL "clover")
list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty )
elseif(Coverage_FORMAT STREQUAL "lcov") elseif(Coverage_FORMAT STREQUAL "lcov")
list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "json-summary") elseif(Coverage_FORMAT STREQUAL "json-summary")
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
elseif(Coverage_FORMAT STREQUAL "json-details") elseif(Coverage_FORMAT STREQUAL "json-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
elseif(Coverage_FORMAT STREQUAL "coveralls") elseif(Coverage_FORMAT STREQUAL "coveralls")
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
elseif(Coverage_FORMAT STREQUAL "csv") elseif(Coverage_FORMAT STREQUAL "csv")
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "txt") elseif(Coverage_FORMAT STREQUAL "txt")
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "html-single") elseif(Coverage_FORMAT STREQUAL "html-single")
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained) list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
elseif(Coverage_FORMAT STREQUAL "html-nested") elseif(Coverage_FORMAT STREQUAL "html-nested")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "html-details") elseif(Coverage_FORMAT STREQUAL "html-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
else() else()
message( message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
FATAL_ERROR
"Unsupported output style ${Coverage_FORMAT}! Aborting..."
)
endif() endif()
# Collect excludes (CMake 3.4+: Also compute absolute paths) # Collect excludes (CMake 3.4+: Also compute absolute paths)
set(GCOVR_EXCLUDES "") set(GCOVR_EXCLUDES "")
foreach( foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
EXCLUDE
${Coverage_EXCLUDE}
${COVERAGE_EXCLUDES}
${COVERAGE_GCOVR_EXCLUDES}
)
if(CMAKE_VERSION VERSION_GREATER 3.4) if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component( get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
EXCLUDE
${EXCLUDE}
ABSOLUTE
BASE_DIR ${BASEDIR}
)
endif() endif()
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
endforeach() endforeach()
@@ -473,29 +409,22 @@ function(setup_target_for_coverage_gcovr)
# before running the coverage target NAME # before running the coverage target NAME
if(DEFINED Coverage_EXECUTABLE) if(DEFINED Coverage_EXECUTABLE)
set(GCOVR_EXEC_TESTS_CMD set(GCOVR_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
${Coverage_EXECUTABLE_ARGS}
) )
endif() endif()
# Create folder # Create folder
if(DEFINED GCOVR_CREATE_FOLDER) if(DEFINED GCOVR_CREATE_FOLDER)
set(GCOVR_FOLDER_CMD set(GCOVR_FOLDER_CMD
${CMAKE_COMMAND} ${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
-E
make_directory
${GCOVR_CREATE_FOLDER}
)
endif() endif()
# Running gcovr # Running gcovr
set(GCOVR_CMD set(GCOVR_CMD
${GCOVR_PATH} ${GCOVR_PATH}
--gcov-executable --gcov-executable ${GCOV_TOOL}
${GCOV_TOOL}
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file --gcov-ignore-parse-errors=negative_hits.warn_once_per_file
-r -r ${BASEDIR}
${BASEDIR}
${GCOVR_ADDITIONAL_ARGS} ${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS} ${GCOVR_EXCLUDE_ARGS}
--object-directory=${PROJECT_BINARY_DIR} --object-directory=${PROJECT_BINARY_DIR}
@@ -506,23 +435,13 @@ function(setup_target_for_coverage_gcovr)
if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "") if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
message(STATUS "Command to run tests: ") message(STATUS "Command to run tests: ")
string( string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
REPLACE ";"
" "
GCOVR_EXEC_TESTS_CMD_SPACED
"${GCOVR_EXEC_TESTS_CMD}"
)
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}") message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
endif() endif()
if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "") if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
message(STATUS "Command to create a folder: ") message(STATUS "Command to create a folder: ")
string( string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
REPLACE ";"
" "
GCOVR_FOLDER_CMD_SPACED
"${GCOVR_FOLDER_CMD}"
)
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}") message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
endif() endif()
@@ -531,11 +450,11 @@ function(setup_target_for_coverage_gcovr)
message(STATUS "${GCOVR_CMD_SPACED}") message(STATUS "${GCOVR_CMD_SPACED}")
endif() endif()
add_custom_target( add_custom_target(${Coverage_NAME}
${Coverage_NAME}
COMMAND ${GCOVR_EXEC_TESTS_CMD} COMMAND ${GCOVR_EXEC_TESTS_CMD}
COMMAND ${GCOVR_FOLDER_CMD} COMMAND ${GCOVR_FOLDER_CMD}
COMMAND ${GCOVR_CMD} COMMAND ${GCOVR_CMD}
BYPRODUCTS ${GCOVR_OUTPUT_FILE} BYPRODUCTS ${GCOVR_OUTPUT_FILE}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR} WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES} DEPENDS ${Coverage_DEPENDENCIES}
@@ -544,49 +463,25 @@ function(setup_target_for_coverage_gcovr)
) )
# Show info where to find the report # Show info where to find the report
add_custom_command( add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
TARGET ${Coverage_NAME}
POST_BUILD
COMMAND echo COMMAND echo
COMMENT COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
"Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
) )
endfunction() # setup_target_for_coverage_gcovr endfunction() # setup_target_for_coverage_gcovr
function(add_code_coverage_to_target name scope) function(add_code_coverage_to_target name scope)
separate_arguments( separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
COVERAGE_CXX_COMPILER_FLAGS separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
NATIVE_COMMAND separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
"${COVERAGE_CXX_COMPILER_FLAGS}" separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
)
separate_arguments(
COVERAGE_C_COMPILER_FLAGS
NATIVE_COMMAND
"${COVERAGE_C_COMPILER_FLAGS}"
)
separate_arguments(
COVERAGE_CXX_LINKER_FLAGS
NATIVE_COMMAND
"${COVERAGE_CXX_LINKER_FLAGS}"
)
separate_arguments(
COVERAGE_C_LINKER_FLAGS
NATIVE_COMMAND
"${COVERAGE_C_LINKER_FLAGS}"
)
# Add compiler options to the target # Add compiler options to the target
target_compile_options( target_compile_options(${name} ${scope}
${name} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
${scope} $<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
$<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>
)
target_link_libraries( target_link_libraries (${name} ${scope}
${name} $<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
${scope} $<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS}>
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}>
) )
endfunction() # add_code_coverage_to_target endfunction() # add_code_coverage_to_target

View File

@@ -1,7 +1,8 @@
# Shared detection of compiler, operating system, and architecture. # Shared detection of compiler, operating system, and architecture.
# #
# This module centralizes environment detection so that other CMake modules can use the same variables instead of # This module centralizes environment detection so that other
# repeating checks on CMAKE_* and built-in platform variables. # CMake modules can use the same variables instead of repeating
# checks on CMAKE_* and built-in platform variables.
# Only run once per configure step. # Only run once per configure step.
include_guard(GLOBAL) include_guard(GLOBAL)
@@ -12,22 +13,17 @@ include_guard(GLOBAL)
set(is_clang FALSE) set(is_clang FALSE)
set(is_gcc FALSE) set(is_gcc FALSE)
set(is_msvc FALSE) set(is_msvc FALSE)
set(is_xcode FALSE)
if(CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang if(CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang
set(is_clang TRUE) set(is_clang TRUE)
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set(is_gcc TRUE) set(is_gcc TRUE)
elseif(MSVC) elseif(MSVC)
set(is_msvc TRUE) set(is_msvc TRUE)
else() else()
message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}") message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}")
endif() endif()
# Xcode generator detection
if(CMAKE_GENERATOR STREQUAL "Xcode")
set(is_xcode TRUE)
endif()
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# Operating system detection # Operating system detection
@@ -37,11 +33,11 @@ set(is_windows FALSE)
set(is_macos FALSE) set(is_macos FALSE)
if(CMAKE_SYSTEM_NAME STREQUAL "Linux") if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
set(is_linux TRUE) set(is_linux TRUE)
elseif(CMAKE_SYSTEM_NAME STREQUAL "Windows") elseif(CMAKE_SYSTEM_NAME STREQUAL "Windows")
set(is_windows TRUE) set(is_windows TRUE)
elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin") elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
set(is_macos TRUE) set(is_macos TRUE)
endif() endif()
# -------------------------------------------------------------------- # --------------------------------------------------------------------
@@ -50,9 +46,9 @@ endif()
set(is_amd64 FALSE) set(is_amd64 FALSE)
set(is_arm64 FALSE) set(is_arm64 FALSE)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64") if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64")
set(is_amd64 TRUE) set(is_amd64 TRUE)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64") elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64")
set(is_arm64 TRUE) set(is_arm64 TRUE)
else() else()
message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}") message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif() endif()

View File

@@ -1,28 +0,0 @@
include_guard()
set(GIT_BUILD_BRANCH "")
set(GIT_COMMIT_HASH "")
find_package(Git)
if(NOT Git_FOUND)
message(WARNING "Git not found. Git branch and commit hash will be empty.")
return()
endif()
set(GIT_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/.git)
execute_process(
COMMAND
${GIT_EXECUTABLE} --git-dir=${GIT_DIRECTORY} rev-parse --abbrev-ref HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_VARIABLE GIT_BUILD_BRANCH
)
execute_process(
COMMAND ${GIT_EXECUTABLE} --git-dir=${GIT_DIRECTORY} rev-parse HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_VARIABLE GIT_COMMIT_HASH
)
message(STATUS "Git branch: ${GIT_BUILD_BRANCH}")
message(STATUS "Git commit hash: ${GIT_COMMIT_HASH}")

View File

@@ -1,22 +1,25 @@
include(isolate_headers) include(isolate_headers)
function(xrpl_add_test name) function(xrpl_add_test name)
set(target ${PROJECT_NAME}.test.${name}) set(target ${PROJECT_NAME}.test.${name})
file( file(GLOB_RECURSE sources CONFIGURE_DEPENDS
GLOB_RECURSE sources "${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp" )
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp" add_executable(${target} ${ARGN} ${sources})
)
add_executable(${target} ${ARGN} ${sources})
isolate_headers( isolate_headers(
${target} ${target}
"${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}"
"${CMAKE_SOURCE_DIR}/tests/${name}" "${CMAKE_SOURCE_DIR}/tests/${name}"
PRIVATE PRIVATE
) )
add_test(NAME ${target} COMMAND ${target}) # Make sure the test isn't optimized away in unity builds
set_target_properties(${target} PROPERTIES
UNITY_BUILD_MODE GROUP
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
endfunction() endfunction()

View File

@@ -8,192 +8,153 @@ include(CompilationEnv)
TODO some/most of these common settings belong in a TODO some/most of these common settings belong in a
toolchain file, especially the ABI-impacting ones toolchain file, especially the ABI-impacting ones
#]=========================================================] #]=========================================================]
add_library(common INTERFACE) add_library (common INTERFACE)
add_library(Xrpl::common ALIAS common) add_library (Xrpl::common ALIAS common)
include(XrplSanitizers) include(XrplSanitizers)
# add a single global dependency on this interface lib # add a single global dependency on this interface lib
link_libraries(Xrpl::common) link_libraries (Xrpl::common)
# Respect CMAKE_POSITION_INDEPENDENT_CODE setting (may be set by Conan toolchain) # Respect CMAKE_POSITION_INDEPENDENT_CODE setting (may be set by Conan toolchain)
if(NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE) if(NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
set(CMAKE_POSITION_INDEPENDENT_CODE ON) set(CMAKE_POSITION_INDEPENDENT_CODE ON)
endif() endif()
set_target_properties( set_target_properties (common
common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
PROPERTIES
INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE}
)
set(CMAKE_CXX_EXTENSIONS OFF) set(CMAKE_CXX_EXTENSIONS OFF)
target_compile_definitions( target_compile_definitions (common
common INTERFACE
INTERFACE $<$<CONFIG:Debug>:DEBUG _DEBUG>
$<$<CONFIG:Debug>:DEBUG #[===[
_DEBUG>
#[===[
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
explicitly except for the special case of (profile ON) and (assert OFF). explicitly except for the special case of (profile ON) and (assert OFF).
Presumably this is because we don't want profile builds asserting unless Presumably this is because we don't want profile builds asserting unless
asserts were specifically requested. asserts were specifically requested.
]===] ]===]
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG> $<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x. # TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
OPENSSL_SUPPRESS_DEPRECATED OPENSSL_SUPPRESS_DEPRECATED
) )
if(MSVC) if (MSVC)
# remove existing exception flag since we set it to -EHa # remove existing exception flag since we set it to -EHa
string(REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") string (REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
foreach( foreach (var_
var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_DEBUG CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
CMAKE_C_FLAGS_RELEASE
CMAKE_CXX_FLAGS_DEBUG
CMAKE_CXX_FLAGS_RELEASE
)
# also remove dynamic runtime
string(REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
# /ZI (Edit & Continue debugging information) is incompatible with Gy- # also remove dynamic runtime
string(REPLACE "/ZI" "/Zi" ${var_} "${${var_}}") string (REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
# omit debug info completely under CI (not needed) # /ZI (Edit & Continue debugging information) is incompatible with Gy-
if(is_ci) string (REPLACE "/ZI" "/Zi" ${var_} "${${var_}}")
string(REPLACE "/Zi" " " ${var_} "${${var_}}")
string(REPLACE "/Z7" " " ${var_} "${${var_}}")
endif()
endforeach()
target_compile_options( # omit debug info completely under CI (not needed)
common if (is_ci)
INTERFACE # Increase object file max size string (REPLACE "/Zi" " " ${var_} "${${var_}}")
-bigobj string (REPLACE "/Z7" " " ${var_} "${${var_}}")
# Floating point behavior endif ()
-fp:precise endforeach ()
# __cdecl calling convention
-Gd target_compile_options (common
# Minimal rebuild: disabled INTERFACE
-Gm- -bigobj # Increase object file max size
# Function level linking: disabled -fp:precise # Floating point behavior
-Gy- -Gd # __cdecl calling convention
# Multiprocessor compilation -Gm- # Minimal rebuild: disabled
-MP -Gy- # Function level linking: disabled
# pragma omp: disabled -MP # Multiprocessor compilation
-openmp- -openmp- # pragma omp: disabled
# No error reporting to Internet -errorReport:none # No error reporting to Internet
-errorReport:none -nologo # Suppress login banner
# Suppress login banner -wd4018 # Disable signed/unsigned comparison warnings
-nologo -wd4244 # Disable float to int possible loss of data warnings
# Disable signed/unsigned comparison warnings -wd4267 # Disable size_t to T possible loss of data warnings
-wd4018 -wd4800 # Disable C4800(int to bool performance)
# Disable float to int possible loss of data warnings -wd4503 # Decorated name length exceeded, name was truncated
-wd4244 $<$<COMPILE_LANGUAGE:CXX>:
# Disable size_t to T possible loss of data warnings -EHa
-wd4267 -GR
# Disable C4800(int to bool performance) >
-wd4800 $<$<CONFIG:Release>:-Ox>
# Decorated name length exceeded, name was truncated $<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:
-wd4503 -GS
$<$<COMPILE_LANGUAGE:CXX>: -Zc:forScope
-EHa >
-GR # static runtime
> $<$<CONFIG:Debug>:-MTd>
$<$<CONFIG:Release>:-Ox> $<$<NOT:$<CONFIG:Debug>>:-MT>
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>: $<$<BOOL:${werr}>:-WX>
-GS )
-Zc:forScope target_compile_definitions (common
> INTERFACE
# static runtime _WIN32_WINNT=0x6000
$<$<CONFIG:Debug>:-MTd> _SCL_SECURE_NO_WARNINGS
$<$<NOT:$<CONFIG:Debug>>:-MT> _CRT_SECURE_NO_WARNINGS
$<$<BOOL:${werr}>:-WX> WIN32_CONSOLE
) WIN32_LEAN_AND_MEAN
target_compile_definitions( NOMINMAX
common # TODO: Resolve these warnings, don't just silence them
INTERFACE _SILENCE_ALL_CXX17_DEPRECATION_WARNINGS
_WIN32_WINNT=0x6000 $<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:_CRTDBG_MAP_ALLOC>)
_SCL_SECURE_NO_WARNINGS target_link_libraries (common
_CRT_SECURE_NO_WARNINGS INTERFACE
WIN32_CONSOLE -errorreport:none
WIN32_LEAN_AND_MEAN -machine:X64)
NOMINMAX else ()
# TODO: Resolve these warnings, don't just silence them target_compile_options (common
_SILENCE_ALL_CXX17_DEPRECATION_WARNINGS INTERFACE
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:_CRTDBG_MAP_ALLOC> -Wall
) -Wdeprecated
target_link_libraries(common INTERFACE -errorreport:none -machine:X64) $<$<BOOL:${is_clang}>:-Wno-deprecated-declarations>
else() $<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
target_compile_options( $<$<BOOL:${werr}>:-Werror>
common -fstack-protector
INTERFACE -Wno-sign-compare
-Wall -Wno-unused-but-set-variable
-Wdeprecated $<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
$<$<BOOL:${is_clang}>:-Wno-deprecated-declarations> # tweak gcc optimization for debug
$<$<BOOL:${wextra}>:-Wextra $<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
-Wno-unused-parameter> # Add debug symbols to release config
$<$<BOOL:${werr}>:-Werror> $<$<CONFIG:Release>:-g>)
-fstack-protector target_link_libraries (common
-Wno-sign-compare INTERFACE
-Wno-unused-but-set-variable -rdynamic
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing> $<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
# tweak gcc optimization for debug # link to static libc/c++ iff:
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0> # * static option set and
# Add debug symbols to release config # * NOT APPLE (AppleClang does not support static libc/c++) and
$<$<CONFIG:Release>:-g> # * NOT SANITIZERS (sanitizers typically don't work with static libc/c++)
) $<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${SANITIZERS_ENABLED}>>>:
target_link_libraries( -static-libstdc++
common -static-libgcc
INTERFACE >)
-rdynamic endif ()
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
# link to static libc/c++ iff: * static option set and * NOT APPLE (AppleClang does not support static
# libc/c++) and * NOT SANITIZERS (sanitizers typically don't work with static libc/c++)
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${SANITIZERS_ENABLED}>>>:
-static-libstdc++
-static-libgcc
>
)
endif()
# Antithesis instrumentation will only be built and deployed using machines running Linux. # Antithesis instrumentation will only be built and deployed using machines running Linux.
if(voidstar) if (voidstar)
if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug") if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
message( message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...")
FATAL_ERROR elseif (NOT is_linux)
"Antithesis instrumentation requires Debug build type, aborting..." message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
) elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
elseif(NOT is_linux) message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
message( endif ()
FATAL_ERROR endif ()
"Antithesis instrumentation requires Linux, aborting..."
)
elseif(
NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0)
)
message(
FATAL_ERROR
"Antithesis instrumentation requires Clang version 16 or later, aborting..."
)
endif()
endif()
if(use_mold) if (use_mold)
# use mold linker if available # use mold linker if available
execute_process( execute_process (
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version
ERROR_QUIET ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
OUTPUT_VARIABLE LD_VERSION if ("${LD_VERSION}" MATCHES "mold")
) target_link_libraries (common INTERFACE -fuse-ld=mold)
if("${LD_VERSION}" MATCHES "mold") endif ()
target_link_libraries(common INTERFACE -fuse-ld=mold) unset (LD_VERSION)
endif() elseif (use_gold AND is_gcc)
unset(LD_VERSION) # use gold linker if available
elseif(use_gold AND is_gcc) execute_process (
# use gold linker if available COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
execute_process( ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
ERROR_QUIET
OUTPUT_VARIABLE LD_VERSION
)
#[=========================================================[ #[=========================================================[
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
default instead of DT_RPATH, so you might have slightly default instead of DT_RPATH, so you might have slightly
@@ -207,37 +168,34 @@ elseif(use_gold AND is_gcc)
disabling would be to figure out all the settings disabling would be to figure out all the settings
required to make gold play nicely with jemalloc. required to make gold play nicely with jemalloc.
#]=========================================================] #]=========================================================]
if(("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc)) if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc))
target_link_libraries( target_link_libraries (common
common INTERFACE
INTERFACE -fuse-ld=gold
-fuse-ld=gold -Wl,--no-as-needed
-Wl,--no-as-needed #[=========================================================[
#[=========================================================[
see https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1253638/comments/5 see https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1253638/comments/5
DT_RUNPATH does not work great for transitive DT_RUNPATH does not work great for transitive
dependencies (of which boost has a few) - so just dependencies (of which boost has a few) - so just
switch to DT_RPATH if doing dynamic linking with gold switch to DT_RPATH if doing dynamic linking with gold
#]=========================================================] #]=========================================================]
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags> $<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
) endif ()
endif() unset (LD_VERSION)
unset(LD_VERSION) elseif (use_lld)
elseif(use_lld) # use lld linker if available
# use lld linker if available execute_process (
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
ERROR_QUIET if ("${LD_VERSION}" MATCHES "LLD")
OUTPUT_VARIABLE LD_VERSION target_link_libraries (common INTERFACE -fuse-ld=lld)
) endif ()
if("${LD_VERSION}" MATCHES "LLD") unset (LD_VERSION)
target_link_libraries(common INTERFACE -fuse-ld=lld)
endif()
unset(LD_VERSION)
endif() endif()
if(assert)
foreach(var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE) if (assert)
string(REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}") foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
endforeach() STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")
endif() endforeach ()
endif ()

54
cmake/XrplConfig.cmake Normal file
View File

@@ -0,0 +1,54 @@
include (CMakeFindDependencyMacro)
# need to represent system dependencies of the lib here
#[=========================================================[
Boost
#]=========================================================]
if (static OR APPLE OR MSVC)
set (Boost_USE_STATIC_LIBS ON)
endif ()
set (Boost_USE_MULTITHREADED ON)
if (static OR MSVC)
set (Boost_USE_STATIC_RUNTIME ON)
else ()
set (Boost_USE_STATIC_RUNTIME OFF)
endif ()
find_dependency (Boost
COMPONENTS
chrono
container
context
coroutine
date_time
filesystem
program_options
regex
system
thread)
#[=========================================================[
OpenSSL
#]=========================================================]
if (NOT DEFINED OPENSSL_ROOT_DIR)
if (DEFINED ENV{OPENSSL_ROOT})
set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
elseif (APPLE)
find_program (homebrew brew)
if (homebrew)
execute_process (COMMAND ${homebrew} --prefix openssl
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
OUTPUT_STRIP_TRAILING_WHITESPACE)
endif ()
endif ()
file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
endif ()
if (static OR APPLE OR MSVC)
set (OPENSSL_USE_STATIC_LIBS ON)
endif ()
set (OPENSSL_MSVC_STATIC_RT ON)
find_dependency (OpenSSL REQUIRED)
find_dependency (ZLIB)
find_dependency (date)
if (TARGET ZLIB::ZLIB)
set_target_properties(OpenSSL::Crypto PROPERTIES
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
endif ()

View File

@@ -10,66 +10,62 @@ include(target_protobuf_sources)
# so we just build them as a separate library. # so we just build them as a separate library.
add_library(xrpl.libpb) add_library(xrpl.libpb)
set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF) set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
target_protobuf_sources( target_protobuf_sources(xrpl.libpb xrpl/proto
xrpl.libpb LANGUAGE cpp
xrpl/proto IMPORT_DIRS include/xrpl/proto
LANGUAGE cpp PROTOS include/xrpl/proto/xrpl.proto
IMPORT_DIRS include/xrpl/proto
PROTOS include/xrpl/proto/xrpl.proto
) )
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto") file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
target_protobuf_sources( target_protobuf_sources(xrpl.libpb xrpl/proto
xrpl.libpb LANGUAGE cpp
xrpl/proto IMPORT_DIRS include/xrpl/proto
LANGUAGE cpp PROTOS "${protos}"
IMPORT_DIRS include/xrpl/proto
PROTOS "${protos}"
) )
target_protobuf_sources( target_protobuf_sources(xrpl.libpb xrpl/proto
xrpl.libpb LANGUAGE grpc
xrpl/proto IMPORT_DIRS include/xrpl/proto
LANGUAGE grpc PROTOS "${protos}"
IMPORT_DIRS include/xrpl/proto PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
PROTOS "${protos}" GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc
PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc
) )
target_compile_options( target_compile_options(xrpl.libpb
xrpl.libpb PUBLIC
PUBLIC $<$<BOOL:${MSVC}>:-wd4996>
$<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${XCODE}>:
$<$<BOOL:${is_xcode}>: --system-header-prefix="google/protobuf"
--system-header-prefix="google/protobuf" -Wno-deprecated-dynamic-exception-spec
-Wno-deprecated-dynamic-exception-spec >
> PRIVATE
PRIVATE $<$<BOOL:${MSVC}>:-wd4065>
$<$<BOOL:${is_msvc}>:-wd4065> $<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
$<$<NOT:$<BOOL:${is_msvc}>>:-Wno-deprecated-declarations>
) )
target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf gRPC::grpc++) target_link_libraries(xrpl.libpb
PUBLIC
protobuf::libprotobuf
gRPC::grpc++
)
# TODO: Clean up the number of library targets later. # TODO: Clean up the number of library targets later.
add_library(xrpl.imports.main INTERFACE) add_library(xrpl.imports.main INTERFACE)
target_link_libraries( target_link_libraries(xrpl.imports.main
xrpl.imports.main INTERFACE
INTERFACE absl::random_random
absl::random_random date::date
date::date ed25519::ed25519
ed25519::ed25519 LibArchive::LibArchive
LibArchive::LibArchive OpenSSL::Crypto
OpenSSL::Crypto Xrpl::boost
Xrpl::boost Xrpl::libs
Xrpl::libs Xrpl::opts
Xrpl::opts Xrpl::syslibs
Xrpl::syslibs secp256k1::secp256k1
secp256k1::secp256k1 xrpl.libpb
xrpl.libpb xxHash::xxhash
xxHash::xxhash $<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
) )
include(add_module) include(add_module)
@@ -79,16 +75,6 @@ include(target_link_modules)
add_module(xrpl beast) add_module(xrpl beast)
target_link_libraries(xrpl.libxrpl.beast PUBLIC xrpl.imports.main) target_link_libraries(xrpl.libxrpl.beast PUBLIC xrpl.imports.main)
include(GitInfo)
add_module(xrpl git)
target_compile_definitions(
xrpl.libxrpl.git
PRIVATE
GIT_COMMIT_HASH="${GIT_COMMIT_HASH}"
GIT_BUILD_BRANCH="${GIT_BUILD_BRANCH}"
)
target_link_libraries(xrpl.libxrpl.git PUBLIC xrpl.imports.main)
# Level 02 # Level 02
add_module(xrpl basics) add_module(xrpl basics)
target_link_libraries(xrpl.libxrpl.basics PUBLIC xrpl.libxrpl.beast) target_link_libraries(xrpl.libxrpl.basics PUBLIC xrpl.libxrpl.beast)
@@ -102,128 +88,80 @@ target_link_libraries(xrpl.libxrpl.crypto PUBLIC xrpl.libxrpl.basics)
# Level 04 # Level 04
add_module(xrpl protocol) add_module(xrpl protocol)
target_link_libraries( target_link_libraries(xrpl.libxrpl.protocol PUBLIC
xrpl.libxrpl.protocol xrpl.libxrpl.crypto
PUBLIC xrpl.libxrpl.crypto xrpl.libxrpl.git xrpl.libxrpl.json xrpl.libxrpl.json
) )
# Level 05 # Level 05
add_module(xrpl protocol_autogen) add_module(xrpl core)
target_link_libraries( target_link_libraries(xrpl.libxrpl.core PUBLIC
xrpl.libxrpl.protocol_autogen xrpl.libxrpl.basics
PUBLIC xrpl.libxrpl.protocol xrpl.libxrpl.json
xrpl.libxrpl.protocol
) )
# Level 06 # Level 06
add_module(xrpl core)
target_link_libraries(
xrpl.libxrpl.core
PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.protocol_autogen
)
# Level 07
add_module(xrpl resource) add_module(xrpl resource)
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol) target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
# Level 08 # Level 07
add_module(xrpl net) add_module(xrpl net)
target_link_libraries( target_link_libraries(xrpl.libxrpl.net PUBLIC
xrpl.libxrpl.net xrpl.libxrpl.basics
PUBLIC xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
)
add_module(xrpl server)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
add_module(xrpl nodestore)
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC
xrpl.libxrpl.basics xrpl.libxrpl.basics
xrpl.libxrpl.json xrpl.libxrpl.json
xrpl.libxrpl.protocol xrpl.libxrpl.protocol
xrpl.libxrpl.resource
)
add_module(xrpl nodestore)
target_link_libraries(
xrpl.libxrpl.nodestore
PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol
) )
add_module(xrpl shamap) add_module(xrpl shamap)
target_link_libraries( target_link_libraries(xrpl.libxrpl.shamap PUBLIC
xrpl.libxrpl.shamap
PUBLIC
xrpl.libxrpl.basics xrpl.libxrpl.basics
xrpl.libxrpl.crypto xrpl.libxrpl.crypto
xrpl.libxrpl.protocol xrpl.libxrpl.protocol
xrpl.libxrpl.nodestore xrpl.libxrpl.nodestore
) )
add_module(xrpl rdb)
target_link_libraries(
xrpl.libxrpl.rdb
PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.core
)
add_module(xrpl server)
target_link_libraries(
xrpl.libxrpl.server
PUBLIC
xrpl.libxrpl.protocol
xrpl.libxrpl.core
xrpl.libxrpl.rdb
xrpl.libxrpl.resource
)
add_module(xrpl conditions)
target_link_libraries(xrpl.libxrpl.conditions PUBLIC xrpl.libxrpl.server)
add_module(xrpl ledger) add_module(xrpl ledger)
target_link_libraries( target_link_libraries(xrpl.libxrpl.ledger PUBLIC
xrpl.libxrpl.ledger xrpl.libxrpl.basics
PUBLIC xrpl.libxrpl.json
xrpl.libxrpl.basics xrpl.libxrpl.protocol
xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.protocol_autogen
xrpl.libxrpl.rdb
xrpl.libxrpl.server
xrpl.libxrpl.shamap
xrpl.libxrpl.conditions
) )
add_module(xrpl tx)
target_link_libraries(xrpl.libxrpl.tx PUBLIC xrpl.libxrpl.ledger)
add_library(xrpl.libxrpl) add_library(xrpl.libxrpl)
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl) set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
add_library(xrpl::libxrpl ALIAS xrpl.libxrpl) add_library(xrpl::libxrpl ALIAS xrpl.libxrpl)
file( file(GLOB_RECURSE sources CONFIGURE_DEPENDS
GLOB_RECURSE sources "${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp"
CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp"
) )
target_sources(xrpl.libxrpl PRIVATE ${sources}) target_sources(xrpl.libxrpl PRIVATE ${sources})
target_link_modules( target_link_modules(xrpl PUBLIC
xrpl basics
PUBLIC beast
basics core
beast crypto
conditions json
core protocol
crypto resource
git server
json nodestore
ledger shamap
net net
nodestore ledger
protocol
protocol_autogen
rdb
resource
server
shamap
tx
) )
# All headers in libxrpl are in modules. # All headers in libxrpl are in modules.
@@ -236,50 +174,62 @@ target_link_modules(
# $<INSTALL_INTERFACE:include>) # $<INSTALL_INTERFACE:include>)
if(xrpld) if(xrpld)
add_executable(xrpld) add_executable(xrpld)
if(tests) if(tests)
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS) target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
target_compile_definitions( target_compile_definitions(xrpld PRIVATE
xrpld UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
PRIVATE UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
)
endif()
target_include_directories(
xrpld
PRIVATE $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
) )
endif()
target_include_directories(xrpld
PRIVATE
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
)
file( file(GLOB_RECURSE sources CONFIGURE_DEPENDS
GLOB_RECURSE sources "${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
CONFIGURE_DEPENDS )
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp" target_sources(xrpld PRIVATE ${sources})
if(tests)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
) )
target_sources(xrpld PRIVATE ${sources}) target_sources(xrpld PRIVATE ${sources})
endif()
if(tests) target_link_libraries(xrpld
file( Xrpl::boost
GLOB_RECURSE sources Xrpl::opts
CONFIGURE_DEPENDS Xrpl::libs
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp" xrpl.libxrpl
) )
target_sources(xrpld PRIVATE ${sources}) exclude_if_included(xrpld)
endif() # define a macro for tests that might need to
# be excluded or run differently in CI environment
if(is_ci)
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
endif ()
target_link_libraries(xrpld Xrpl::boost Xrpl::opts Xrpl::libs xrpl.libxrpl) if(voidstar)
exclude_if_included(xrpld) target_compile_options(xrpld
# define a macro for tests that might need to PRIVATE
# be excluded or run differently in CI environment -fsanitize-coverage=trace-pc-guard
if(is_ci) )
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI) # xrpld requires access to antithesis-sdk-cpp implementation file
endif() # antithesis_instrumentation.h, which is not exported as INTERFACE
target_include_directories(xrpld
PRIVATE
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
)
endif()
if(voidstar) # any files that don't play well with unity should be added here
target_compile_options(xrpld PRIVATE -fsanitize-coverage=trace-pc-guard) if(tests)
# xrpld requires access to antithesis-sdk-cpp implementation file set_source_files_properties(
# antithesis_instrumentation.h, which is not exported as INTERFACE # these two seem to produce conflicts in beast teardown template methods
target_include_directories( src/test/rpc/ValidatorRPC_test.cpp
xrpld src/test/ledger/Invariants_test.cpp
PRIVATE ${CMAKE_SOURCE_DIR}/external/antithesis-sdk PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
) endif()
endif()
endif() endif()

View File

@@ -3,15 +3,12 @@
#]===================================================================] #]===================================================================]
if(NOT coverage) if(NOT coverage)
message(FATAL_ERROR "Code coverage not enabled! Aborting ...") message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
endif() endif()
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC") if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
message( message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
WARNING return()
"Code coverage on Windows is not supported, ignoring 'coverage' flag"
)
return()
endif() endif()
include(ProcessorCount) include(ProcessorCount)
@@ -19,35 +16,26 @@ ProcessorCount(PROCESSOR_COUNT)
include(CodeCoverage) include(CodeCoverage)
# The instructions for these commands come from the `CodeCoverage` module, which was copied from # The instructions for these commands come from the `CodeCoverage` module,
# https://github.com/bilke/cmake-modules, commit fb7d2a3, then locally changed (see CHANGES: section in # which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
# `CodeCoverage.cmake`) # then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args}) set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "") if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
separate_arguments(GCOVR_ADDITIONAL_ARGS) separate_arguments(GCOVR_ADDITIONAL_ARGS)
endif() endif()
list( list(APPEND GCOVR_ADDITIONAL_ARGS
APPEND GCOVR_ADDITIONAL_ARGS --exclude-throw-branches
--exclude-throw-branches --exclude-noncode-lines
--exclude-noncode-lines --exclude-unreachable-branches -s
--exclude-unreachable-branches -j ${PROCESSOR_COUNT})
-s
-j
${PROCESSOR_COUNT}
)
setup_target_for_coverage_gcovr( setup_target_for_coverage_gcovr(
NAME coverage NAME coverage
FORMAT ${coverage_format} FORMAT ${coverage_format}
EXCLUDE EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
"src/test" DEPENDENCIES xrpld xrpl.tests
"src/tests"
"include/xrpl/beast/test"
"include/xrpl/beast/unit_test"
"${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
DEPENDENCIES xrpld xrpl.tests
) )
add_code_coverage_to_target(opts INTERFACE) add_code_coverage_to_target(opts INTERFACE)

View File

@@ -3,13 +3,13 @@
#]===================================================================] #]===================================================================]
if(NOT only_docs) if(NOT only_docs)
return() return()
endif() endif()
find_package(Doxygen) find_package(Doxygen)
if(NOT TARGET Doxygen::doxygen) if(NOT TARGET Doxygen::doxygen)
message(STATUS "doxygen executable not found -- skipping docs target") message(STATUS "doxygen executable not found -- skipping docs target")
return() return()
endif() endif()
set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs") set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs")
@@ -17,44 +17,40 @@ set(doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src")
set(doxygen_index_file "${doxygen_output_directory}/html/index.html") set(doxygen_index_file "${doxygen_output_directory}/html/index.html")
set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile") set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile")
file( file(GLOB_RECURSE doxygen_input
GLOB_RECURSE doxygen_input docs/*.md
docs/*.md include/*.h
include/*.h include/*.cpp
include/*.cpp include/*.md
include/*.md src/*.h
src/*.h src/*.cpp
src/*.cpp src/*.md
src/*.md Builds/*.md
Builds/*.md *.md)
*.md list(APPEND doxygen_input
) external/README.md
list(APPEND doxygen_input external/README.md) )
set(dependencies "${doxygen_input}" "${doxyfile}") set(dependencies "${doxygen_input}" "${doxyfile}")
function(verbose_find_path variable name) function(verbose_find_path variable name)
# find_path sets a CACHE variable, so don't try using a "local" variable. # find_path sets a CACHE variable, so don't try using a "local" variable.
find_path(${variable} "${name}" ${ARGN}) find_path(${variable} "${name}" ${ARGN})
if(NOT ${variable}) if(NOT ${variable})
message(NOTICE "could not find ${name}") message(NOTICE "could not find ${name}")
else() else()
message(STATUS "found ${name}: ${${variable}}/${name}") message(STATUS "found ${name}: ${${variable}}/${name}")
endif() endif()
endfunction() endfunction()
verbose_find_path( verbose_find_path(doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml)
doxygen_plantuml_jar_path
plantuml.jar
PATH_SUFFIXES share/plantuml
)
verbose_find_path(doxygen_dot_path dot) verbose_find_path(doxygen_dot_path dot)
# https://en.cppreference.com/w/Cppreference:Archives # https://en.cppreference.com/w/Cppreference:Archives
# https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step # https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step
set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake") set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake")
file( file(WRITE
WRITE "${download_script}" "${download_script}"
"file(DOWNLOAD \ "file(DOWNLOAD \
https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \ https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \
${CMAKE_BINARY_DIR}/docs/cppreference.zip \ ${CMAKE_BINARY_DIR}/docs/cppreference.zip \
EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \ EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \
@@ -65,27 +61,23 @@ file(
) )
set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml") set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml")
add_custom_command( add_custom_command(
OUTPUT "${tagfile}" OUTPUT "${tagfile}"
COMMAND "${CMAKE_COMMAND}" -P "${download_script}" COMMAND "${CMAKE_COMMAND}" -P "${download_script}"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs"
) )
set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/") set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/")
add_custom_command( add_custom_command(
OUTPUT "${doxygen_index_file}" OUTPUT "${doxygen_index_file}"
COMMAND COMMAND "${CMAKE_COMMAND}" -E env
"${CMAKE_COMMAND}" -E env "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
"DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}" "DOXYGEN_INCLUDE_PATH=${doxygen_include_path}"
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}"
"DOXYGEN_TAGFILES=${doxygen_tagfiles}" "DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}"
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" "DOXYGEN_DOT_PATH=${doxygen_dot_path}"
"DOXYGEN_DOT_PATH=${doxygen_dot_path}" "${DOXYGEN_EXECUTABLE}" "${DOXYGEN_EXECUTABLE}" "${doxyfile}"
"${doxyfile}" WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" DEPENDS "${dependencies}" "${tagfile}")
DEPENDS "${dependencies}" "${tagfile}" add_custom_target(docs
) DEPENDS "${doxygen_index_file}"
add_custom_target( SOURCES "${dependencies}")
docs
DEPENDS "${doxygen_index_file}"
SOURCES "${dependencies}"
)

View File

@@ -2,38 +2,79 @@
install stuff install stuff
#]===================================================================] #]===================================================================]
include(GNUInstallDirs) include(create_symbolic_link)
if(is_root_project AND TARGET xrpld) install (
install( TARGETS
TARGETS xrpld common
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT runtime opts
) xrpl_boost
xrpl_libs
install( xrpl_syslibs
FILES "${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg" xrpl.imports.main
DESTINATION "${CMAKE_INSTALL_SYSCONFDIR}/xrpld" xrpl.libpb
RENAME xrpld.cfg xrpl.libxrpl
COMPONENT runtime xrpl.libxrpl.basics
) xrpl.libxrpl.beast
xrpl.libxrpl.core
install( xrpl.libxrpl.crypto
FILES "${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt" xrpl.libxrpl.json
DESTINATION "${CMAKE_INSTALL_SYSCONFDIR}/xrpld" xrpl.libxrpl.ledger
RENAME validators.txt xrpl.libxrpl.net
COMPONENT runtime xrpl.libxrpl.nodestore
) xrpl.libxrpl.protocol
endif() xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl.shamap
antithesis-sdk-cpp
EXPORT XrplExports
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES DESTINATION include)
install( install(
TARGETS xrpl.libpb xrpl.libxrpl DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT development DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT development
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT development
) )
install( install (EXPORT XrplExports
DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" FILE XrplTargets.cmake
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" NAMESPACE Xrpl::
COMPONENT development DESTINATION lib/cmake/xrpl)
) include (CMakePackageConfigHelpers)
write_basic_package_version_file (
XrplConfigVersion.cmake
VERSION ${xrpld_version}
COMPATIBILITY SameMajorVersion)
if (is_root_project AND TARGET xrpld)
install (TARGETS xrpld RUNTIME DESTINATION bin)
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
# sample configs should not overwrite existing files
# install if-not-exists workaround as suggested by
# https://cmake.org/Bug/view.php?id=12646
install(CODE "
macro (copy_if_not_exists SRC DEST NEWNAME)
if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
file (INSTALL FILE_PERMISSIONS OWNER_READ OWNER_WRITE DESTINATION \"\${CMAKE_INSTALL_PREFIX}/\${DEST}\" FILES \"\${SRC}\" RENAME \"\${NEWNAME}\")
else ()
message (\"-- Skipping : \$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
endif ()
endmacro()
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg)
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
")
install(CODE "
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(xrpld${suffix} \
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/rippled${suffix})
")
endif ()
install (
FILES
${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
DESTINATION lib/cmake/xrpl)

View File

@@ -4,93 +4,80 @@
include(CompilationEnv) include(CompilationEnv)
# Set defaults for optional variables to avoid uninitialized variable warnings add_library (opts INTERFACE)
if(NOT DEFINED voidstar) add_library (Xrpl::opts ALIAS opts)
set(voidstar OFF) target_compile_definitions (opts
endif() INTERFACE
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
BOOST_CONTAINER_FWD_BAD_DEQUE
HAS_UNCAUGHT_EXCEPTIONS=1
$<$<BOOL:${boost_show_deprecated}>:
BOOST_ASIO_NO_DEPRECATED
BOOST_FILESYSTEM_NO_DEPRECATED
>
$<$<NOT:$<BOOL:${boost_show_deprecated}>>:
BOOST_COROUTINES_NO_DEPRECATION_WARNING
BOOST_BEAST_ALLOW_DEPRECATED
BOOST_FILESYSTEM_DEPRECATED
>
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
target_compile_options (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
add_library(opts INTERFACE) target_link_libraries (opts
add_library(Xrpl::opts ALIAS opts) INTERFACE
target_compile_definitions( $<$<BOOL:${profile}>:-pg>
opts $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
INTERFACE
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
BOOST_CONTAINER_FWD_BAD_DEQUE
HAS_UNCAUGHT_EXCEPTIONS=1
$<$<BOOL:${boost_show_deprecated}>:
BOOST_ASIO_NO_DEPRECATED
BOOST_FILESYSTEM_NO_DEPRECATED
>
$<$<NOT:$<BOOL:${boost_show_deprecated}>>:
BOOST_COROUTINES2_NO_DEPRECATION_WARNING
BOOST_BEAST_ALLOW_DEPRECATED
BOOST_FILESYSTEM_DEPRECATED
>
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>
)
target_compile_options(
opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>
)
target_link_libraries(
opts
INTERFACE
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>
)
if(jemalloc) if(jemalloc)
find_package(jemalloc REQUIRED) find_package(jemalloc REQUIRED)
target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC) target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC)
target_link_libraries(opts INTERFACE jemalloc::jemalloc) target_link_libraries(opts INTERFACE jemalloc::jemalloc)
endif() endif ()
#[===================================================================[ #[===================================================================[
xrpld transitive library deps via an interface library xrpld transitive library deps via an interface library
#]===================================================================] #]===================================================================]
add_library(xrpl_syslibs INTERFACE) add_library (xrpl_syslibs INTERFACE)
add_library(Xrpl::syslibs ALIAS xrpl_syslibs) add_library (Xrpl::syslibs ALIAS xrpl_syslibs)
target_link_libraries( target_link_libraries (xrpl_syslibs
xrpl_syslibs INTERFACE
INTERFACE $<$<BOOL:${MSVC}>:
$<$<BOOL:${is_msvc}>: legacy_stdio_definitions.lib
legacy_stdio_definitions.lib Shlwapi
Shlwapi kernel32
kernel32 user32
user32 gdi32
gdi32 winspool
winspool comdlg32
comdlg32 advapi32
advapi32 shell32
shell32 ole32
ole32 oleaut32
oleaut32 uuid
uuid odbc32
odbc32 odbccp32
odbccp32 crypt32
crypt32 >
> $<$<NOT:$<BOOL:${MSVC}>>:dl>
$<$<NOT:$<BOOL:${is_msvc}>>:dl> $<$<NOT:$<OR:$<BOOL:${MSVC}>,$<BOOL:${APPLE}>>>:rt>)
$<$<NOT:$<OR:$<BOOL:${is_msvc}>,$<BOOL:${is_macos}>>>:rt>
)
if(NOT is_msvc) if (NOT MSVC)
set(THREADS_PREFER_PTHREAD_FLAG ON) set (THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads) find_package (Threads)
target_link_libraries(xrpl_syslibs INTERFACE Threads::Threads) target_link_libraries (xrpl_syslibs INTERFACE Threads::Threads)
endif() endif ()
add_library(xrpl_libs INTERFACE) add_library (xrpl_libs INTERFACE)
add_library(Xrpl::libs ALIAS xrpl_libs) add_library (Xrpl::libs ALIAS xrpl_libs)
target_link_libraries(xrpl_libs INTERFACE Xrpl::syslibs) target_link_libraries (xrpl_libs INTERFACE Xrpl::syslibs)

View File

@@ -1,146 +0,0 @@
#[===================================================================[
Protocol Autogen - Code generation for protocol wrapper classes
#]===================================================================]
set(CODEGEN_VENV_DIR
"${CMAKE_CURRENT_SOURCE_DIR}/.venv"
CACHE PATH
"Path to a Python virtual environment for code generation. A venv will be created here by setup_code_gen and used to run generation scripts."
)
# Directory paths
set(MACRO_DIR "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl/protocol/detail")
set(AUTOGEN_HEADER_DIR
"${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl/protocol_autogen"
)
set(AUTOGEN_TEST_DIR
"${CMAKE_CURRENT_SOURCE_DIR}/src/tests/libxrpl/protocol_autogen"
)
set(SCRIPTS_DIR "${CMAKE_CURRENT_SOURCE_DIR}/cmake/scripts/codegen")
# Input macro files
set(TRANSACTIONS_MACRO "${MACRO_DIR}/transactions.macro")
set(LEDGER_ENTRIES_MACRO "${MACRO_DIR}/ledger_entries.macro")
set(SFIELDS_MACRO "${MACRO_DIR}/sfields.macro")
# Python scripts and templates
set(GENERATE_TX_SCRIPT "${SCRIPTS_DIR}/generate_tx_classes.py")
set(GENERATE_LEDGER_SCRIPT "${SCRIPTS_DIR}/generate_ledger_classes.py")
set(REQUIREMENTS_FILE "${SCRIPTS_DIR}/requirements.txt")
set(MACRO_PARSER_COMMON "${SCRIPTS_DIR}/macro_parser_common.py")
set(TX_TEMPLATE "${SCRIPTS_DIR}/templates/Transaction.h.mako")
set(TX_TEST_TEMPLATE "${SCRIPTS_DIR}/templates/TransactionTests.cpp.mako")
set(LEDGER_TEMPLATE "${SCRIPTS_DIR}/templates/LedgerEntry.h.mako")
set(LEDGER_TEST_TEMPLATE "${SCRIPTS_DIR}/templates/LedgerEntryTests.cpp.mako")
set(ALL_INPUT_FILES
"${TRANSACTIONS_MACRO}"
"${LEDGER_ENTRIES_MACRO}"
"${SFIELDS_MACRO}"
"${GENERATE_TX_SCRIPT}"
"${GENERATE_LEDGER_SCRIPT}"
"${REQUIREMENTS_FILE}"
"${MACRO_PARSER_COMMON}"
"${TX_TEMPLATE}"
"${TX_TEST_TEMPLATE}"
"${LEDGER_TEMPLATE}"
"${LEDGER_TEST_TEMPLATE}"
)
# Create output directories
file(MAKE_DIRECTORY "${AUTOGEN_HEADER_DIR}/transactions")
file(MAKE_DIRECTORY "${AUTOGEN_HEADER_DIR}/ledger_entries")
file(MAKE_DIRECTORY "${AUTOGEN_TEST_DIR}/ledger_entries")
file(MAKE_DIRECTORY "${AUTOGEN_TEST_DIR}/transactions")
# Find Python3
if(NOT Python3_EXECUTABLE)
find_package(Python3 COMPONENTS Interpreter QUIET)
endif()
if(NOT Python3_EXECUTABLE)
find_program(Python3_EXECUTABLE NAMES python3 python)
endif()
if(NOT Python3_EXECUTABLE)
message(
WARNING
"Python3 not found. The 'code_gen' and 'setup_code_gen' targets will not be available."
)
return()
endif()
# Warn if pip is configured with a non-default index (may need VPN).
execute_process(
COMMAND ${Python3_EXECUTABLE} -m pip config get global.index-url
OUTPUT_VARIABLE PIP_INDEX_URL
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
RESULT_VARIABLE PIP_CONFIG_RESULT
)
if(PIP_CONFIG_RESULT EQUAL 0 AND PIP_INDEX_URL)
if(
NOT PIP_INDEX_URL STREQUAL "https://pypi.org/simple"
AND NOT PIP_INDEX_URL STREQUAL "https://pypi.python.org/simple"
)
message(
WARNING
"Private pip index URL detected: ${PIP_INDEX_URL}\n"
"You may need to connect to VPN to access this URL."
)
endif()
endif()
# Determine which Python interpreter to use for code generation.
if(CODEGEN_VENV_DIR)
if(WIN32)
set(CODEGEN_PYTHON "${CODEGEN_VENV_DIR}/Scripts/python.exe")
else()
set(CODEGEN_PYTHON "${CODEGEN_VENV_DIR}/bin/python")
endif()
else()
set(CODEGEN_PYTHON "${Python3_EXECUTABLE}")
message(
WARNING
"CODEGEN_VENV_DIR is not set. Dependencies will be installed globally.\n"
"If this is not intended, reconfigure with:\n"
" cmake . -UCODEGEN_VENV_DIR"
)
endif()
# Custom target to create a venv and install Python dependencies.
# Run manually with: cmake --build . --target setup_code_gen
if(CODEGEN_VENV_DIR)
add_custom_target(
setup_code_gen
COMMAND ${Python3_EXECUTABLE} -m venv "${CODEGEN_VENV_DIR}"
COMMAND ${CODEGEN_PYTHON} -m pip install -r "${REQUIREMENTS_FILE}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
COMMENT "Creating venv and installing code generation dependencies..."
)
else()
add_custom_target(
setup_code_gen
COMMAND ${Python3_EXECUTABLE} -m pip install -r "${REQUIREMENTS_FILE}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
COMMENT "Installing code generation dependencies..."
)
endif()
# Custom target for code generation, excluded from ALL.
# Run manually with: cmake --build . --target code_gen
add_custom_target(
code_gen
COMMAND
${CMAKE_COMMAND} -DCODEGEN_PYTHON=${CODEGEN_PYTHON}
-DGENERATE_TX_SCRIPT=${GENERATE_TX_SCRIPT}
-DGENERATE_LEDGER_SCRIPT=${GENERATE_LEDGER_SCRIPT}
-DTRANSACTIONS_MACRO=${TRANSACTIONS_MACRO}
-DLEDGER_ENTRIES_MACRO=${LEDGER_ENTRIES_MACRO}
-DSFIELDS_MACRO=${SFIELDS_MACRO}
-DAUTOGEN_HEADER_DIR=${AUTOGEN_HEADER_DIR}
-DAUTOGEN_TEST_DIR=${AUTOGEN_TEST_DIR} -P
"${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplProtocolAutogenRun.cmake"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
COMMENT "Running protocol code generation..."
SOURCES ${ALL_INPUT_FILES}
)

View File

@@ -1,39 +0,0 @@
#[===================================================================[
Protocol Autogen - Run script invoked by the 'code_gen' target
#]===================================================================]
# Generate transaction classes.
execute_process(
COMMAND
${CODEGEN_PYTHON} "${GENERATE_TX_SCRIPT}" "${TRANSACTIONS_MACRO}"
--header-dir "${AUTOGEN_HEADER_DIR}/transactions" --test-dir
"${AUTOGEN_TEST_DIR}/transactions" --sfields-macro "${SFIELDS_MACRO}"
RESULT_VARIABLE TX_RESULT
OUTPUT_VARIABLE TX_OUTPUT
ERROR_VARIABLE TX_ERROR
)
if(NOT TX_RESULT EQUAL 0)
message(
FATAL_ERROR
"Transaction code generation failed:\n${TX_OUTPUT}\n${TX_ERROR}\n${TX_RESULT}"
)
endif()
# Generate ledger entry classes.
execute_process(
COMMAND
${CODEGEN_PYTHON} "${GENERATE_LEDGER_SCRIPT}" "${LEDGER_ENTRIES_MACRO}"
--header-dir "${AUTOGEN_HEADER_DIR}/ledger_entries" --test-dir
"${AUTOGEN_TEST_DIR}/ledger_entries" --sfields-macro "${SFIELDS_MACRO}"
RESULT_VARIABLE LEDGER_RESULT
OUTPUT_VARIABLE LEDGER_OUTPUT
ERROR_VARIABLE LEDGER_ERROR
)
if(NOT LEDGER_RESULT EQUAL 0)
message(
FATAL_ERROR
"Ledger entry code generation failed:\n${LEDGER_OUTPUT}\n${LEDGER_ERROR}\n${TX_RESULT}"
)
endif()
message(STATUS "Protocol autogen: code generation complete")

View File

@@ -43,10 +43,7 @@
include(CompilationEnv) include(CompilationEnv)
# Read environment variable # Read environment variable
set(SANITIZERS "") set(SANITIZERS $ENV{SANITIZERS})
if(DEFINED ENV{SANITIZERS})
set(SANITIZERS "$ENV{SANITIZERS}")
endif()
# Set SANITIZERS_ENABLED flag for use in other modules # Set SANITIZERS_ENABLED flag for use in other modules
if(SANITIZERS MATCHES "address|thread|undefinedbehavior") if(SANITIZERS MATCHES "address|thread|undefinedbehavior")
@@ -58,11 +55,8 @@ endif()
# Sanitizers are not supported on Windows/MSVC # Sanitizers are not supported on Windows/MSVC
if(is_msvc) if(is_msvc)
message( message(FATAL_ERROR "Sanitizers are not supported on Windows/MSVC. "
FATAL_ERROR "Please unset the SANITIZERS environment variable.")
"Sanitizers are not supported on Windows/MSVC. "
"Please unset the SANITIZERS environment variable."
)
endif() endif()
message(STATUS "Configuring sanitizers: ${SANITIZERS}") message(STATUS "Configuring sanitizers: ${SANITIZERS}")
@@ -85,21 +79,15 @@ foreach(san IN LISTS san_list)
elseif(san STREQUAL "undefinedbehavior") elseif(san STREQUAL "undefinedbehavior")
set(enable_ubsan TRUE) set(enable_ubsan TRUE)
else() else()
message( message(FATAL_ERROR "Unsupported sanitizer type: ${san}"
FATAL_ERROR "Supported: address, thread, undefinedbehavior and their combinations.")
"Unsupported sanitizer type: ${san}"
"Supported: address, thread, undefinedbehavior and their combinations."
)
endif() endif()
endforeach() endforeach()
# Validate sanitizer compatibility # Validate sanitizer compatibility
if(enable_asan AND enable_tsan) if(enable_asan AND enable_tsan)
message( message(FATAL_ERROR "AddressSanitizer and ThreadSanitizer are incompatible and cannot be enabled simultaneously. "
FATAL_ERROR "Use 'address' or 'thread', optionally with 'undefinedbehavior'.")
"AddressSanitizer and ThreadSanitizer are incompatible and cannot be enabled simultaneously. "
"Use 'address' or 'thread', optionally with 'undefinedbehavior'."
)
endif() endif()
# Frame pointer is required for meaningful stack traces. Sanitizers recommend minimum of -O1 for reasonable performance # Frame pointer is required for meaningful stack traces. Sanitizers recommend minimum of -O1 for reasonable performance
@@ -118,28 +106,25 @@ if(enable_ubsan)
# UB sanitizer flags # UB sanitizer flags
list(APPEND SANITIZER_TYPES "undefined" "float-divide-by-zero") list(APPEND SANITIZER_TYPES "undefined" "float-divide-by-zero")
if(is_clang) if(is_clang)
# Clang supports additional UB checks. More info here # Clang supports additional UB checks. More info here https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html
# https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html
list(APPEND SANITIZER_TYPES "unsigned-integer-overflow") list(APPEND SANITIZER_TYPES "unsigned-integer-overflow")
endif() endif()
endif() endif()
# Configure code model for GCC on amd64 Use large code model for ASAN to avoid relocation errors Use medium code model # Configure code model for GCC on amd64
# for TSAN (large is not compatible with TSAN) # Use large code model for ASAN to avoid relocation errors
# Use medium code model for TSAN (large is not compatible with TSAN)
set(SANITIZERS_RELOCATION_FLAGS) set(SANITIZERS_RELOCATION_FLAGS)
# Compiler-specific configuration # Compiler-specific configuration
if(is_gcc) if(is_gcc)
# Disable mold, gold and lld linkers for GCC with sanitizers Use default linker (bfd/ld) which is more lenient with # Disable mold, gold and lld linkers for GCC with sanitizers
# mixed code models This is needed since the size of instrumented binary exceeds the limits set by mold, lld and # Use default linker (bfd/ld) which is more lenient with mixed code models
# gold linkers # This is needed since the size of instrumented binary exceeds the limits set by mold, lld and gold linkers
set(use_mold OFF CACHE BOOL "Use mold linker" FORCE) set(use_mold OFF CACHE BOOL "Use mold linker" FORCE)
set(use_gold OFF CACHE BOOL "Use gold linker" FORCE) set(use_gold OFF CACHE BOOL "Use gold linker" FORCE)
set(use_lld OFF CACHE BOOL "Use lld linker" FORCE) set(use_lld OFF CACHE BOOL "Use lld linker" FORCE)
message( message(STATUS " Disabled mold, gold, and lld linkers for GCC with sanitizers")
STATUS
" Disabled mold, gold, and lld linkers for GCC with sanitizers"
)
# Suppress false positive warnings in GCC with stringop-overflow # Suppress false positive warnings in GCC with stringop-overflow
list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-stringop-overflow") list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-stringop-overflow")
@@ -161,26 +146,17 @@ if(is_gcc)
# Add sanitizer to compile and link flags # Add sanitizer to compile and link flags
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}") list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
set(SANITIZERS_LINK_FLAGS set(SANITIZERS_LINK_FLAGS "${SANITIZERS_RELOCATION_FLAGS}" "-fsanitize=${SANITIZER_TYPES_STR}")
"${SANITIZERS_RELOCATION_FLAGS}"
"-fsanitize=${SANITIZER_TYPES_STR}"
)
elseif(is_clang) elseif(is_clang)
# Add ignorelist for Clang (GCC doesn't support this) Use CMAKE_SOURCE_DIR to get the path to the ignorelist # Add ignorelist for Clang (GCC doesn't support this)
set(IGNORELIST_PATH # Use CMAKE_SOURCE_DIR to get the path to the ignorelist
"${CMAKE_SOURCE_DIR}/sanitizers/suppressions/sanitizer-ignorelist.txt" set(IGNORELIST_PATH "${CMAKE_SOURCE_DIR}/sanitizers/suppressions/sanitizer-ignorelist.txt")
)
if(NOT EXISTS "${IGNORELIST_PATH}") if(NOT EXISTS "${IGNORELIST_PATH}")
message( message(FATAL_ERROR "Sanitizer ignorelist not found: ${IGNORELIST_PATH}")
FATAL_ERROR
"Sanitizer ignorelist not found: ${IGNORELIST_PATH}"
)
endif() endif()
list( list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize-ignorelist=${IGNORELIST_PATH}")
APPEND SANITIZERS_COMPILE_FLAGS
"-fsanitize-ignorelist=${IGNORELIST_PATH}"
)
message(STATUS " Using sanitizer ignorelist: ${IGNORELIST_PATH}") message(STATUS " Using sanitizer ignorelist: ${IGNORELIST_PATH}")
# Join sanitizer flags with commas for -fsanitize option # Join sanitizer flags with commas for -fsanitize option
@@ -194,12 +170,11 @@ endif()
message(STATUS " Compile flags: ${SANITIZERS_COMPILE_FLAGS}") message(STATUS " Compile flags: ${SANITIZERS_COMPILE_FLAGS}")
message(STATUS " Link flags: ${SANITIZERS_LINK_FLAGS}") message(STATUS " Link flags: ${SANITIZERS_LINK_FLAGS}")
# Apply the sanitizer flags to the 'common' interface library This is the same library used by XrplCompiler.cmake # Apply the sanitizer flags to the 'common' interface library
target_compile_options( # This is the same library used by XrplCompiler.cmake
common target_compile_options(common INTERFACE
INTERFACE $<$<COMPILE_LANGUAGE:CXX>:${SANITIZERS_COMPILE_FLAGS}>
$<$<COMPILE_LANGUAGE:CXX>:${SANITIZERS_COMPILE_FLAGS}> $<$<COMPILE_LANGUAGE:C>:${SANITIZERS_COMPILE_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${SANITIZERS_COMPILE_FLAGS}>
) )
# Apply linker flags # Apply linker flags

View File

@@ -6,57 +6,40 @@ include(CompilationEnv)
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
set(CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE) set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
if(NOT is_multiconfig) if (NOT is_multiconfig)
if(NOT CMAKE_BUILD_TYPE) if (NOT CMAKE_BUILD_TYPE)
message(STATUS "Build type not specified - defaulting to Release") message (STATUS "Build type not specified - defaulting to Release")
set(CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE) set (CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE)
elseif( elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release))
NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release) # for simplicity, these are the only two config types we care about. Limiting
) # the build types simplifies dealing with external project builds especially
# for simplicity, these are the only two config types we care about. Limiting the build types simplifies dealing message (FATAL_ERROR " *** Only Debug or Release build types are currently supported ***")
# with external project builds especially endif ()
message( endif ()
FATAL_ERROR
" *** Only Debug or Release build types are currently supported ***"
)
endif()
endif()
if(is_clang) # both Clang and AppleClang if (is_clang) # both Clang and AppleClang
if( if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
"${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0 message (FATAL_ERROR "This project requires clang 16 or later")
) endif ()
message(FATAL_ERROR "This project requires clang 16 or later") elseif (is_gcc)
endif() if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
elseif(is_gcc) message (FATAL_ERROR "This project requires GCC 12 or later")
if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0) endif ()
message(FATAL_ERROR "This project requires GCC 12 or later") endif ()
endif()
endif()
# check for in-source build and fail # check for in-source build and fail
if("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}") if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message( message (FATAL_ERROR "Builds (in-source) are not allowed in "
FATAL_ERROR "${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
"Builds (in-source) are not allowed in " "directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles " endif ()
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory."
)
endif()
if(MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32") if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
message(FATAL_ERROR "Visual Studio 32-bit build is not supported.") message (FATAL_ERROR "Visual Studio 32-bit build is not supported.")
endif() endif ()
if(voidstar AND NOT is_amd64) if (APPLE AND NOT HOMEBREW)
message( find_program (HOMEBREW brew)
FATAL_ERROR endif ()
"The voidstar library only supported on amd64/x86_64. Detected archictecture was: ${CMAKE_SYSTEM_PROCESSOR}"
)
endif()
if(APPLE AND NOT HOMEBREW)
find_program(HOMEBREW brew)
endif()

View File

@@ -4,18 +4,17 @@
include(CompilationEnv) include(CompilationEnv)
set(is_ci FALSE) if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
if(DEFINED ENV{CI}) set(is_ci TRUE)
if("$ENV{CI}" STREQUAL "true") else()
set(is_ci TRUE) set(is_ci FALSE)
endif()
endif() endif()
get_directory_property(has_parent PARENT_DIRECTORY) get_directory_property(has_parent PARENT_DIRECTORY)
if(has_parent) if(has_parent)
set(is_root_project OFF) set(is_root_project OFF)
else() else()
set(is_root_project ON) set(is_root_project ON)
endif() endif()
option(assert "Enables asserts, even in release builds" OFF) option(assert "Enables asserts, even in release builds" OFF)
@@ -24,132 +23,106 @@ option(xrpld "Build xrpld" ON)
option(tests "Build tests" ON) option(tests "Build tests" ON)
if(tests) if(tests)
# This setting allows making a separate workflow to test fees other than default 10 # This setting allows making a separate workflow to test fees other than default 10
if(NOT UNIT_TEST_REFERENCE_FEE) if(NOT UNIT_TEST_REFERENCE_FEE)
set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "") set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "")
endif() endif()
endif() endif()
option(unity "Creates a build using UNITY support in cmake." OFF) option(unity "Creates a build using UNITY support in cmake." OFF)
if(unity) if(unity)
if(NOT is_ci) if(NOT is_ci)
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "") set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
endif() endif()
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build") set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
endif() endif()
if(is_clang AND is_linux) if(is_clang AND is_linux)
option(voidstar "Enable Antithesis instrumentation." OFF) option(voidstar "Enable Antithesis instrumentation." OFF)
endif() endif()
if(is_gcc OR is_clang) if(is_gcc OR is_clang)
include(ProcessorCount) include(ProcessorCount)
ProcessorCount(PROCESSOR_COUNT) ProcessorCount(PROCESSOR_COUNT)
option(coverage "Generates coverage info." OFF) option(coverage "Generates coverage info." OFF)
option(profile "Add profiling flags" OFF) option(profile "Add profiling flags" OFF)
set(coverage_format set(coverage_format "html-details" CACHE STRING
"html-details" "Output format of the coverage report.")
CACHE STRING set(coverage_extra_args "" CACHE STRING
"Output format of the coverage report." "Additional arguments to pass to gcovr.")
) option(wextra "compile with extra gcc/clang warnings enabled" ON)
set(coverage_extra_args
""
CACHE STRING
"Additional arguments to pass to gcovr."
)
option(wextra "compile with extra gcc/clang warnings enabled" ON)
else() else()
set(profile OFF CACHE BOOL "gcc/clang only" FORCE) set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE) set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE) set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif() endif()
if(is_linux AND NOT SANITIZER) if(is_linux AND NOT SANITIZER)
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF) option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
option(static "link protobuf, openssl, libc++, and boost statically" ON) option(static "link protobuf, openssl, libc++, and boost statically" ON)
option(perf "Enables flags that assist with perf recording" OFF) option(perf "Enables flags that assist with perf recording" OFF)
option(use_gold "enables detection of gold (binutils) linker" ON) option(use_gold "enables detection of gold (binutils) linker" ON)
option(use_mold "enables detection of mold (binutils) linker" ON) option(use_mold "enables detection of mold (binutils) linker" ON)
# Set a default value for the log flag based on the build type. This provides a sensible default (on for debug, off # Set a default value for the log flag based on the build type.
# for release) while still allowing the user to override it for any build. # This provides a sensible default (on for debug, off for release)
if(CMAKE_BUILD_TYPE STREQUAL "Debug") # while still allowing the user to override it for any build.
set(TRUNCATED_LOGS_DEFAULT ON) if(CMAKE_BUILD_TYPE STREQUAL "Debug")
else() set(TRUNCATED_LOGS_DEFAULT ON)
set(TRUNCATED_LOGS_DEFAULT OFF) else()
endif() set(TRUNCATED_LOGS_DEFAULT OFF)
option( endif()
TRUNCATED_THREAD_NAME_LOGS option(TRUNCATED_THREAD_NAME_LOGS
"Show warnings about truncated thread names on Linux." "Show warnings about truncated thread names on Linux."
${TRUNCATED_LOGS_DEFAULT} ${TRUNCATED_LOGS_DEFAULT}
) )
if(TRUNCATED_THREAD_NAME_LOGS) if(TRUNCATED_THREAD_NAME_LOGS)
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS) add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
endif() endif()
else() else()
# we are not ready to allow shared-libs on windows because it would require export declarations. On macos it's more # we are not ready to allow shared-libs on windows because it would require
# feasible, but static openssl produces odd linker errors, thus we disable shared lib builds for now. # export declarations. On macos it's more feasible, but static openssl
set(BUILD_SHARED_LIBS # produces odd linker errors, thus we disable shared lib builds for now.
OFF set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
CACHE BOOL set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
"build shared xrpl libraries - OFF for win/macos" set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
FORCE set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
) set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
endif() endif()
if(is_clang) if(is_clang)
option(use_lld "enables detection of lld linker" ON) option(use_lld "enables detection of lld linker" ON)
else() else()
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE) set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif() endif()
option(jemalloc "Enables jemalloc for heap profiling" OFF) option(jemalloc "Enables jemalloc for heap profiling" OFF)
option(werr "treat warnings as errors" OFF) option(werr "treat warnings as errors" OFF)
option( option(local_protobuf
local_protobuf "Force a local build of protobuf instead of looking for an installed version." OFF)
"Force a local build of protobuf instead of looking for an installed version." option(local_grpc
OFF "Force a local build of gRPC instead of looking for an installed version." OFF)
)
option(
local_grpc
"Force a local build of gRPC instead of looking for an installed version."
OFF
)
# the remaining options are obscure and rarely used # the remaining options are obscure and rarely used
option( option(beast_no_unit_test_inline
beast_no_unit_test_inline "Prevents unit test definitions from being inserted into global table"
"Prevents unit test definitions from being inserted into global table" OFF)
OFF option(single_io_service_thread
) "Restricts the number of threads calling io_context::run to one. \
option(
single_io_service_thread
"Restricts the number of threads calling io_context::run to one. \
This can be useful when debugging." This can be useful when debugging."
OFF OFF)
) option(boost_show_deprecated
option( "Allow boost to fail on deprecated usage. Only useful if you're trying\
boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls." to find deprecated calls."
OFF OFF)
)
if(WIN32) if(WIN32)
option( option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
beast_disable_autolink
"Disables autolinking of system libraries on WIN32"
OFF
)
else() else()
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE) set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif() endif()
if(coverage) if(coverage)
message(STATUS "coverage build requested - forcing Debug build") message(STATUS "coverage build requested - forcing Debug build")
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE) set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
endif() endif()

View File

@@ -1,26 +1,20 @@
option( option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
if (validator_keys)
git_branch (current_branch)
# default to tracking VK master branch unless we are on release
if (NOT (current_branch STREQUAL "release"))
set (current_branch "master")
endif ()
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare (
validator_keys validator_keys
"Enables building of validator-keys tool as a separate target (imported via FetchContent)" GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
OFF GIT_TAG "${current_branch}"
) )
FetchContent_MakeAvailable(validator_keys)
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
if(validator_keys) endif ()
git_branch(current_branch)
# default to tracking VK master branch unless we are on release
if(NOT (current_branch STREQUAL "release"))
set(current_branch "master")
endif()
message(STATUS "Tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare(
validator_keys
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}"
)
FetchContent_MakeAvailable(validator_keys)
set_target_properties(
validator-keys
PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}"
)
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
endif()

View File

@@ -4,12 +4,12 @@
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO) file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
foreach(line_ ${BUILD_INFO}) foreach(line_ ${BUILD_INFO})
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"") if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
set(xrpld_version ${CMAKE_MATCH_1}) set(xrpld_version ${CMAKE_MATCH_1})
endif() endif()
endforeach() endforeach()
if(xrpld_version) if(xrpld_version)
message(STATUS "xrpld version: ${xrpld_version}") message(STATUS "xrpld version: ${xrpld_version}")
else() else()
message(FATAL_ERROR "unable to determine xrpld version") message(FATAL_ERROR "unable to determine xrpld version")
endif() endif()

View File

@@ -13,28 +13,25 @@ include(isolate_headers)
# add_module(parent b) # add_module(parent b)
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a) # target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
function(add_module parent name) function(add_module parent name)
set(target ${PROJECT_NAME}.lib${parent}.${name}) set(target ${PROJECT_NAME}.lib${parent}.${name})
add_library(${target} OBJECT) add_library(${target} OBJECT)
file( file(GLOB_RECURSE sources CONFIGURE_DEPENDS
GLOB_RECURSE sources "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp"
CONFIGURE_DEPENDS )
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp" target_sources(${target} PRIVATE ${sources})
) target_include_directories(${target} PUBLIC
target_sources(${target} PRIVATE ${sources}) "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>"
target_include_directories( )
${target} isolate_headers(
PUBLIC "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>" ${target}
) "${CMAKE_CURRENT_SOURCE_DIR}/include"
isolate_headers( "${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}"
${target} PUBLIC
"${CMAKE_CURRENT_SOURCE_DIR}/include" )
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" isolate_headers(
PUBLIC ${target}
) "${CMAKE_CURRENT_SOURCE_DIR}/src"
isolate_headers( "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
${target} PRIVATE
"${CMAKE_CURRENT_SOURCE_DIR}/src" )
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
PRIVATE
)
endfunction() endfunction()

View File

@@ -1,21 +1,20 @@
# file(CREATE_SYMLINK) only works on Windows with administrator privileges. https://stackoverflow.com/a/61244115/618906 # file(CREATE_SYMLINK) only works on Windows with administrator privileges.
# https://stackoverflow.com/a/61244115/618906
function(create_symbolic_link target link) function(create_symbolic_link target link)
if(WIN32) if(WIN32)
if(NOT IS_SYMLINK "${link}")
if(NOT IS_ABSOLUTE "${target}")
# Relative links work do not work on Windows.
set(target "${link}/../${target}")
endif()
file(TO_NATIVE_PATH "${target}" target)
file(TO_NATIVE_PATH "${link}" link)
execute_process(
COMMAND cmd.exe /c mklink /J "${link}" "${target}"
)
endif()
else()
file(CREATE_LINK "${target}" "${link}" SYMBOLIC)
endif()
if(NOT IS_SYMLINK "${link}") if(NOT IS_SYMLINK "${link}")
message(ERROR "failed to create symlink: <${link}>") if(NOT IS_ABSOLUTE "${target}")
# Relative links work do not work on Windows.
set(target "${link}/../${target}")
endif()
file(TO_NATIVE_PATH "${target}" target)
file(TO_NATIVE_PATH "${link}" link)
execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}")
endif() endif()
else()
file(CREATE_LINK "${target}" "${link}" SYMBOLIC)
endif()
if(NOT IS_SYMLINK "${link}")
message(ERROR "failed to create symlink: <${link}>")
endif()
endfunction() endfunction()

View File

@@ -1,63 +1,50 @@
include(CompilationEnv) include(CompilationEnv)
include(XrplSanitizers) include(XrplSanitizers)
find_package( find_package(Boost REQUIRED
Boost COMPONENTS
REQUIRED chrono
COMPONENTS container
chrono coroutine
container date_time
context filesystem
date_time json
filesystem program_options
json regex
program_options system
regex thread
system
thread
) )
add_library(xrpl_boost INTERFACE) add_library(xrpl_boost INTERFACE)
add_library(Xrpl::boost ALIAS xrpl_boost) add_library(Xrpl::boost ALIAS xrpl_boost)
target_link_libraries( target_link_libraries(xrpl_boost
xrpl_boost INTERFACE
INTERFACE Boost::headers
Boost::headers Boost::chrono
Boost::chrono Boost::container
Boost::container Boost::coroutine
Boost::context Boost::date_time
Boost::date_time Boost::filesystem
Boost::filesystem Boost::json
Boost::json Boost::process
Boost::process Boost::program_options
Boost::program_options Boost::regex
Boost::regex Boost::system
Boost::thread Boost::thread)
)
if(Boost_COMPILER) if(Boost_COMPILER)
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking) target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
endif() endif()
if(SANITIZERS_ENABLED AND is_clang)
# GCC 14+ has a false positive -Wuninitialized warning in Boost.Coroutine2's # TODO: gcc does not support -fsanitize-blacklist...can we do something else
# state.hpp when compiled with -O3. This is due to GCC's intentional behavior # for gcc ?
# change (Bug #98871, #119388) where warnings from inlined system header code if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
# are no longer suppressed by -isystem. The warning occurs in operator|= in get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
# boost/coroutine2/detail/state.hpp when inlined from push_control_block::destroy(). endif()
# See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=119388 message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
if(is_gcc AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 14) file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
target_compile_options(xrpl_boost INTERFACE -Wno-uninitialized) target_compile_options(opts
endif() INTERFACE
# ignore boost headers for sanitizing
# Boost.Context's ucontext backend has ASAN fiber-switching annotations -fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
# (start/finish_switch_fiber) that are compiled in when BOOST_USE_ASAN is defined.
# This tells ASAN about coroutine stack switches, preventing false positive
# stack-use-after-scope errors. BOOST_USE_UCONTEXT ensures the ucontext backend
# is selected (fcontext does not support ASAN annotations).
# These defines must match what Boost was compiled with (see conan/profiles/sanitizers).
if(enable_asan)
target_compile_definitions(
xrpl_boost
INTERFACE BOOST_USE_ASAN BOOST_USE_UCONTEXT
)
endif() endif()

View File

@@ -38,11 +38,11 @@ include(create_symbolic_link)
# #
# isolate_headers(target A B scope) # isolate_headers(target A B scope)
function(isolate_headers target A B scope) function(isolate_headers target A B scope)
file(RELATIVE_PATH C "${A}" "${B}") file(RELATIVE_PATH C "${A}" "${B}")
set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}") set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}")
set(Y "${X}/${C}") set(Y "${X}/${C}")
cmake_path(GET Y PARENT_PATH parent) cmake_path(GET Y PARENT_PATH parent)
file(MAKE_DIRECTORY "${parent}") file(MAKE_DIRECTORY "${parent}")
create_symbolic_link("${B}" "${Y}") create_symbolic_link("${B}" "${Y}")
target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>") target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>")
endfunction() endfunction()

View File

@@ -1,211 +0,0 @@
#!/usr/bin/env python3
"""
Generate C++ wrapper classes for XRP Ledger entry types from ledger_entries.macro.
This script parses the ledger_entries.macro file and generates type-safe wrapper
classes for each ledger entry type, similar to the transaction wrapper classes.
Uses pcpp to preprocess the macro file and pyparsing to parse the DSL.
"""
import io
import argparse
from pathlib import Path
import pyparsing as pp
# Import common utilities
from macro_parser_common import (
CppCleaner,
parse_sfields_macro,
parse_field_list,
generate_cpp_class,
generate_from_template,
clear_output_directory,
)
def create_ledger_entry_parser():
"""Create a pyparsing parser for LEDGER_ENTRY macros.
This parser extracts the full LEDGER_ENTRY macro call and parses its arguments
using pyparsing's nesting-aware delimited list parsing.
"""
# Match the exact words
ledger_entry = pp.Keyword("LEDGER_ENTRY") | pp.Keyword("LEDGER_ENTRY_DUPLICATE")
# Define nested structures so pyparsing protects them
nested_braces = pp.original_text_for(pp.nested_expr("{", "}"))
nested_parens = pp.original_text_for(pp.nested_expr("(", ")"))
# Define standard text (anything that isn't a comma, parens, or braces)
plain_text = pp.Word(pp.printables + " \t\n", exclude_chars=",{}()")
# A single argument is any combination of the above
single_arg = pp.Combine(pp.OneOrMore(nested_braces | nested_parens | plain_text))
single_arg.set_parse_action(lambda t: t[0].strip())
# The arguments are a delimited list
args_list = pp.DelimitedList(single_arg)
# The full macro: LEDGER_ENTRY(args) or LEDGER_ENTRY_DUPLICATE(args)
macro_parser = (
ledger_entry + pp.Suppress("(") + pp.Group(args_list)("args") + pp.Suppress(")")
)
return macro_parser
def parse_ledger_entry_args(args_list):
"""Parse the arguments of a LEDGER_ENTRY macro call.
Args:
args_list: A list of parsed arguments from pyparsing, e.g.,
['ltACCOUNT_ROOT', '0x0061', 'AccountRoot', 'account', '({...})']
Returns:
A dict with parsed ledger entry information.
"""
if len(args_list) < 5:
raise ValueError(
f"Expected at least 5 parts in LEDGER_ENTRY, got {len(args_list)}: {args_list}"
)
tag = args_list[0]
value = args_list[1]
name = args_list[2]
rpc_name = args_list[3]
fields_str = args_list[-1]
# Parse fields: ({field1, field2, ...})
fields = parse_field_list(fields_str)
return {
"tag": tag,
"value": value,
"name": name,
"rpc_name": rpc_name,
"fields": fields,
}
def parse_macro_file(file_path):
"""Parse the ledger_entries.macro file and return a list of ledger entry definitions.
Uses pcpp to preprocess the file and pyparsing to parse the LEDGER_ENTRY macros.
"""
with open(file_path, "r") as f:
c_code = f.read()
# Step 1: Clean the C++ code using pcpp
cleaner = CppCleaner("LEDGER_ENTRY_INCLUDE", "LEDGER_ENTRY")
cleaner.parse(c_code)
out = io.StringIO()
cleaner.write(out)
clean_text = out.getvalue()
# Step 2: Parse the clean text using pyparsing
parser = create_ledger_entry_parser()
entries = []
for match, _, _ in parser.scan_string(clean_text):
# Extract the macro name and arguments
raw_args = match.args
# Parse the arguments
entry_data = parse_ledger_entry_args(raw_args)
entries.append(entry_data)
return entries
def main():
parser = argparse.ArgumentParser(
description="Generate C++ ledger entry classes from ledger_entries.macro"
)
parser.add_argument("macro_path", help="Path to ledger_entries.macro")
parser.add_argument(
"--header-dir",
help="Output directory for header files",
default="include/xrpl/protocol_autogen/ledger_entries",
)
parser.add_argument(
"--test-dir",
help="Output directory for test files (optional)",
default=None,
)
parser.add_argument(
"--sfields-macro",
help="Path to sfields.macro (default: auto-detect from macro_path)",
)
parser.add_argument("--venv-dir", help=argparse.SUPPRESS)
args = parser.parse_args()
# Parse the macro file to get ledger entry names
entries = parse_macro_file(args.macro_path)
# Auto-detect sfields.macro path if not provided
if args.sfields_macro:
sfields_path = Path(args.sfields_macro)
else:
# Assume sfields.macro is in the same directory as ledger_entries.macro
macro_path = Path(args.macro_path)
sfields_path = macro_path.parent / "sfields.macro"
# Parse sfields.macro to get field type information
print(f"Parsing {sfields_path}...")
field_types = parse_sfields_macro(sfields_path)
print(
f"Found {len(field_types)} field definitions ({sum(1 for f in field_types.values() if f['typed'])} typed, {sum(1 for f in field_types.values() if not f['typed'])} untyped)\n"
)
print(f"Found {len(entries)} ledger entries\n")
for entry in entries:
print(f"Ledger Entry: {entry['name']}")
print(f" Tag: {entry['tag']}")
print(f" Value: {entry['value']}")
print(f" RPC Name: {entry['rpc_name']}")
print(f" Fields: {len(entry['fields'])}")
for field in entry["fields"]:
mpt_info = f" ({field['mpt_support']})" if "mpt_support" in field else ""
print(f" - {field['name']}: {field['requirement']}{mpt_info}")
print()
# Set up template directory
script_dir = Path(__file__).parent
template_dir = script_dir / "templates"
# Generate C++ classes
header_dir = Path(args.header_dir)
header_dir.mkdir(parents=True, exist_ok=True)
# Clear existing generated files before regenerating
clear_output_directory(header_dir)
for entry in entries:
generate_cpp_class(
entry, header_dir, template_dir, field_types, "LedgerEntry.h.mako"
)
print(f"\nGenerated {len(entries)} ledger entry classes")
# Generate unit tests if --test-dir is provided
if args.test_dir:
test_dir = Path(args.test_dir)
test_dir.mkdir(parents=True, exist_ok=True)
# Clear existing generated test files before regenerating
clear_output_directory(test_dir)
for entry in entries:
# Fields are already enriched from generate_cpp_class above
generate_from_template(
entry, test_dir, template_dir, "LedgerEntryTests.cpp.mako", "Tests.cpp"
)
print(f"\nGenerated {len(entries)} ledger entry test files")
if __name__ == "__main__":
main()

View File

@@ -1,231 +0,0 @@
#!/usr/bin/env python3
"""
Parse transactions.macro file to extract transaction information
and generate C++ classes for each transaction type.
Uses pcpp to preprocess the macro file and pyparsing to parse the DSL.
"""
import io
import argparse
from pathlib import Path
import pyparsing as pp
# Import common utilities
from macro_parser_common import (
CppCleaner,
parse_sfields_macro,
parse_field_list,
generate_cpp_class,
generate_from_template,
clear_output_directory,
)
def create_transaction_parser():
"""Create a pyparsing parser for TRANSACTION macros.
This parser extracts the full TRANSACTION macro call and parses its arguments
using pyparsing's nesting-aware delimited list parsing.
"""
# Define nested structures so pyparsing protects them
nested_braces = pp.original_text_for(pp.nested_expr("{", "}"))
nested_parens = pp.original_text_for(pp.nested_expr("(", ")"))
# Define standard text (anything that isn't a comma, parens, or braces)
plain_text = pp.Word(pp.printables + " \t\n", exclude_chars=",{}()")
# A single argument is any combination of the above
single_arg = pp.Combine(pp.OneOrMore(nested_braces | nested_parens | plain_text))
single_arg.set_parse_action(lambda t: t[0].strip())
# The arguments are a delimited list
args_list = pp.DelimitedList(single_arg)
# The full macro: TRANSACTION(args)
macro_parser = (
pp.Keyword("TRANSACTION")
+ pp.Suppress("(")
+ pp.Group(args_list)("args")
+ pp.Suppress(")")
)
return macro_parser
def parse_transaction_args(args_list):
"""Parse the arguments of a TRANSACTION macro call.
Args:
args_list: A list of parsed arguments from pyparsing, e.g.,
['ttPAYMENT', '0', 'Payment', 'Delegation::delegable',
'uint256{}', 'createAcct', '({...})']
Returns:
A dict with parsed transaction information.
"""
if len(args_list) < 7:
raise ValueError(
f"Expected at least 7 parts in TRANSACTION, got {len(args_list)}: {args_list}"
)
tag = args_list[0]
value = args_list[1]
name = args_list[2]
delegable = args_list[3]
amendments = args_list[4]
privileges = args_list[5]
fields_str = args_list[-1]
# Parse fields: ({field1, field2, ...})
fields = parse_field_list(fields_str)
return {
"tag": tag,
"value": value,
"name": name,
"delegable": delegable,
"amendments": amendments,
"privileges": privileges,
"fields": fields,
}
def parse_macro_file(filepath):
"""Parse the transactions.macro file.
Uses pcpp to preprocess the file and pyparsing to parse the TRANSACTION macros.
"""
with open(filepath, "r") as f:
c_code = f.read()
# Step 1: Clean the C++ code using pcpp
cleaner = CppCleaner("TRANSACTION_INCLUDE", "TRANSACTION")
cleaner.parse(c_code)
out = io.StringIO()
cleaner.write(out)
clean_text = out.getvalue()
# Step 2: Parse the clean text using pyparsing
parser = create_transaction_parser()
transactions = []
for match, _, _ in parser.scan_string(clean_text):
# Extract the macro name and arguments
raw_args = match.args
# Parse the arguments
tx_data = parse_transaction_args(raw_args)
transactions.append(tx_data)
return transactions
# TransactionBase is a static file in the repository at:
# - include/xrpl/protocol/TransactionBase.h
# - src/libxrpl/protocol/TransactionBase.cpp
# It is NOT generated by this script.
def main():
parser = argparse.ArgumentParser(
description="Generate C++ transaction classes from transactions.macro"
)
parser.add_argument("macro_path", help="Path to transactions.macro")
parser.add_argument(
"--header-dir",
help="Output directory for header files",
default="include/xrpl/protocol_autogen/transactions",
)
parser.add_argument(
"--test-dir",
help="Output directory for test files (optional)",
default=None,
)
parser.add_argument(
"--sfields-macro",
help="Path to sfields.macro (default: auto-detect from macro_path)",
)
parser.add_argument("--venv-dir", help=argparse.SUPPRESS)
args = parser.parse_args()
# Parse the macro file to get transaction names
transactions = parse_macro_file(args.macro_path)
# Auto-detect sfields.macro path if not provided
if args.sfields_macro:
sfields_path = Path(args.sfields_macro)
else:
# Assume sfields.macro is in the same directory as transactions.macro
macro_path = Path(args.macro_path)
sfields_path = macro_path.parent / "sfields.macro"
# Parse sfields.macro to get field type information
print(f"Parsing {sfields_path}...")
field_types = parse_sfields_macro(sfields_path)
print(
f"Found {len(field_types)} field definitions ({sum(1 for f in field_types.values() if f['typed'])} typed, {sum(1 for f in field_types.values() if not f['typed'])} untyped)\n"
)
print(f"Found {len(transactions)} transactions\n")
for tx in transactions:
print(f"Transaction: {tx['name']}")
print(f" Tag: {tx['tag']}")
print(f" Value: {tx['value']}")
print(f" Fields: {len(tx['fields'])}")
for field in tx["fields"]:
print(f" - {field['name']}: {field['requirement']}")
print()
# Set up output directory
header_dir = Path(args.header_dir)
header_dir.mkdir(parents=True, exist_ok=True)
# Clear existing generated files before regenerating
clear_output_directory(header_dir)
print(f"\nGenerating header-only template classes...")
print(f" Headers: {header_dir}\n")
# Set up template directory
script_dir = Path(__file__).parent
template_dir = script_dir / "templates"
generated_files = []
for tx_info in transactions:
header_path = generate_cpp_class(
tx_info, header_dir, template_dir, field_types, "Transaction.h.mako"
)
generated_files.append(header_path)
print(f" Generated: {tx_info['name']}.h")
print(
f"\nGenerated {len(transactions)} transaction classes ({len(generated_files)} header files)"
)
print(f" Headers: {header_dir.absolute()}")
# Generate unit tests if --test-dir is provided
if args.test_dir:
test_dir = Path(args.test_dir)
test_dir.mkdir(parents=True, exist_ok=True)
# Clear existing generated test files before regenerating
clear_output_directory(test_dir)
for tx_info in transactions:
# Fields are already enriched from generate_cpp_class above
generate_from_template(
tx_info,
test_dir,
template_dir,
"TransactionTests.cpp.mako",
"Tests.cpp",
)
print(f"\nGenerated {len(transactions)} transaction test files")
if __name__ == "__main__":
main()

View File

@@ -1,297 +0,0 @@
#!/usr/bin/env python3
"""
Common utilities for parsing XRP Ledger macro files.
This module provides shared functionality for parsing transactions.macro
and ledger_entries.macro files using pcpp and pyparsing.
"""
import re
import shutil
from pathlib import Path
import pyparsing as pp
from pcpp import Preprocessor
def clear_output_directory(directory):
"""Clear all generated files from an output directory.
Removes all .h and .cpp files from the directory, but preserves
the directory itself and any subdirectories.
Args:
directory: Path to the directory to clear
"""
dir_path = Path(directory)
if not dir_path.exists():
return
# Remove generated files (headers and source files)
for pattern in ["*.h", "*.cpp"]:
for file_path in dir_path.glob(pattern):
file_path.unlink()
print(f"Cleared output directory: {dir_path}")
class CppCleaner(Preprocessor):
"""C preprocessor that removes C++ noise while preserving macro calls."""
def __init__(self, macro_include_name, macro_name):
"""
Initialize the preprocessor.
Args:
macro_include_name: The name of the include flag to set to 0
(e.g., "TRANSACTION_INCLUDE" or "LEDGER_ENTRY_INCLUDE")
macro_name: The name of the macro to define so #if !defined() checks pass
(e.g., "TRANSACTION" or "LEDGER_ENTRY")
"""
super(CppCleaner, self).__init__()
# Define flags so #if blocks evaluate correctly
# We set the include flag to 0 so includes are skipped
self.define(f"{macro_include_name} 0")
# Define the macro so #if !defined(MACRO) / #error checks pass
# We define it to expand to itself so the macro calls remain in the output
# for pyparsing to find and parse
self.define(f"{macro_name}(...) {macro_name}(__VA_ARGS__)")
# Suppress line directives
self.line_directive = None
def on_error(self, file, line, msg):
# Ignore #error directives
pass
def on_include_not_found(
self, is_malformed, is_system_include, curdir, includepath
):
# Ignore missing headers
pass
def parse_sfields_macro(sfields_path):
"""
Parse sfields.macro to determine which fields are typed vs untyped.
Returns a dict mapping field names to their type information:
{
'sfMemos': {'typed': False, 'stiSuffix': 'ARRAY', 'typeData': {...}},
'sfAmount': {'typed': True, 'stiSuffix': 'AMOUNT', 'typeData': {...}},
...
}
"""
# Mapping from STI suffix to C++ type for untyped fields
UNTYPED_TYPE_MAP = {
"ARRAY": {
"getter_method": "getFieldArray",
"setter_method": "setFieldArray",
"setter_use_brackets": False,
"setter_type": "STArray const&",
"return_type": "STArray const&",
"return_type_optional": "std::optional<std::reference_wrapper<STArray const>>",
},
"OBJECT": {
"getter_method": "getFieldObject",
"setter_method": "setFieldObject",
"setter_use_brackets": False,
"setter_type": "STObject const&",
"return_type": "STObject",
"return_type_optional": "std::optional<STObject>",
},
"PATHSET": {
"getter_method": "getFieldPathSet",
"setter_method": "setFieldPathSet",
"setter_use_brackets": False,
"setter_type": "STPathSet const&",
"return_type": "STPathSet const&",
"return_type_optional": "std::optional<std::reference_wrapper<STPathSet const>>",
},
}
field_info = {}
with open(sfields_path, "r") as f:
content = f.read()
# Parse TYPED_SFIELD entries
# Format: TYPED_SFIELD(sfName, stiSuffix, fieldValue, ...)
typed_pattern = r"TYPED_SFIELD\s*\(\s*(\w+)\s*,\s*(\w+)\s*,"
for match in re.finditer(typed_pattern, content):
field_name = match.group(1)
sti_suffix = match.group(2)
field_info[field_name] = {
"typed": True,
"stiSuffix": sti_suffix,
"typeData": {
"getter_method": "at",
"setter_method": "",
"setter_use_brackets": True,
"setter_type": f"std::decay_t<typename SF_{sti_suffix}::type::value_type> const&",
"return_type": f"SF_{sti_suffix}::type::value_type",
"return_type_optional": f"protocol_autogen::Optional<SF_{sti_suffix}::type::value_type>",
},
}
# Parse UNTYPED_SFIELD entries
# Format: UNTYPED_SFIELD(sfName, stiSuffix, fieldValue, ...)
untyped_pattern = r"UNTYPED_SFIELD\s*\(\s*(\w+)\s*,\s*(\w+)\s*,"
for match in re.finditer(untyped_pattern, content):
field_name = match.group(1)
sti_suffix = match.group(2)
type_data = UNTYPED_TYPE_MAP.get(
sti_suffix, UNTYPED_TYPE_MAP.get("OBJECT")
) # Default to OBJECT
field_info[field_name] = {
"typed": False,
"stiSuffix": sti_suffix,
"typeData": type_data,
}
return field_info
def create_field_list_parser():
"""Create a pyparsing parser for field lists like '({...})'."""
# A field identifier (e.g., sfDestination, soeREQUIRED, soeMPTSupported)
field_identifier = pp.Word(pp.alphas + "_", pp.alphanums + "_")
# A single field definition: {sfName, soeREQUIRED, ...}
# Allow optional trailing comma inside the braces
field_def = (
pp.Suppress("{")
+ pp.Group(pp.DelimitedList(field_identifier) + pp.Optional(pp.Suppress(",")))(
"parts"
)
+ pp.Suppress("}")
)
# The field list: ({field1, field2, ...}) or ({}) for empty lists
# Allow optional trailing comma after the last field definition
field_list = (
pp.Suppress("(")
+ pp.Suppress("{")
+ pp.Group(
pp.Optional(pp.DelimitedList(field_def) + pp.Optional(pp.Suppress(",")))
)("fields")
+ pp.Suppress("}")
+ pp.Suppress(")")
)
return field_list
def parse_field_list(fields_str):
"""Parse a field list string like '({...})' using pyparsing.
Args:
fields_str: A string like '({
{sfDestination, soeREQUIRED},
{sfAmount, soeREQUIRED, soeMPTSupported}
})'
Returns:
A list of field dicts with 'name', 'requirement', 'flags', and 'supports_mpt'.
"""
parser = create_field_list_parser()
try:
result = parser.parse_string(fields_str, parse_all=True)
fields = []
for field_parts in result.fields:
if len(field_parts) < 2:
continue
field_name = field_parts[0]
requirement = field_parts[1]
flags = list(field_parts[2:]) if len(field_parts) > 2 else []
supports_mpt = "soeMPTSupported" in flags
fields.append(
{
"name": field_name,
"requirement": requirement,
"flags": flags,
"supports_mpt": supports_mpt,
}
)
return fields
except pp.ParseException as e:
raise ValueError(f"Failed to parse field list: {e}")
def enrich_fields_with_type_data(entry_info, field_types):
"""Enrich field information with type data from sfields.macro.
Args:
entry_info: Dict containing entry information (name, fields, etc.)
field_types: Dict mapping field names to type information
Modifies entry_info["fields"] in place.
"""
for field in entry_info["fields"]:
field_name = field["name"]
if field_name in field_types:
field["typed"] = field_types[field_name]["typed"]
field["paramName"] = field_name[2].lower() + field_name[3:]
field["stiSuffix"] = field_types[field_name]["stiSuffix"]
field["typeData"] = field_types[field_name]["typeData"]
else:
# Unknown field - assume typed for safety
field["typed"] = True
field["paramName"] = ""
field["stiSuffix"] = None
field["typeData"] = None
def generate_from_template(
entry_info, output_dir, template_dir, template_name, output_suffix
):
"""Generate a file from a Mako template.
Args:
entry_info: Dict containing entry information (name, fields, etc.)
Fields should already be enriched with type data.
output_dir: Output directory for generated files
template_dir: Directory containing Mako templates
template_name: Name of the Mako template file to use
output_suffix: Suffix for the output file (e.g., ".h" or "Tests.cpp")
Returns:
Path to the generated file
"""
from mako.template import Template
template_path = Path(template_dir) / template_name
template = Template(filename=str(template_path))
# Render the template - pass entry_info directly so templates can access any field
content = template.render(**entry_info)
# Write output file in binary mode to avoid any line ending conversion
output_path = Path(output_dir) / f"{entry_info['name']}{output_suffix}"
with open(output_path, "wb") as f:
f.write(content.encode("utf-8"))
print(f"Generated {output_path}")
return output_path
def generate_cpp_class(
entry_info, header_dir, template_dir, field_types, template_name
):
"""Generate C++ header file from a Mako template.
Args:
entry_info: Dict containing entry information (name, fields, etc.)
header_dir: Output directory for generated header files
template_dir: Directory containing Mako templates
field_types: Dict mapping field names to type information
template_name: Name of the Mako template file to use
"""
# Enrich field information with type data
enrich_fields_with_type_data(entry_info, field_types)
# Generate the header file
generate_from_template(entry_info, header_dir, template_dir, template_name, ".h")

View File

@@ -1,13 +0,0 @@
# Python dependencies for XRP Ledger code generation scripts
#
# These packages are required to run the code generation scripts that
# parse macro files and generate C++ wrapper classes.
# C preprocessor for Python - used to preprocess macro files
pcpp>=1.30
# Parser combinator library - used to parse the macro DSL
pyparsing>=3.0.0
# Template engine - used to generate C++ code from templates
Mako>=1.2.0

View File

@@ -1,216 +0,0 @@
// This file is auto-generated. Do not edit.
#pragma once
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol/STParsedJSON.h>
#include <xrpl/protocol/jss.h>
#include <xrpl/protocol_autogen/LedgerEntryBase.h>
#include <xrpl/protocol_autogen/LedgerEntryBuilderBase.h>
#include <xrpl/json/json_value.h>
#include <stdexcept>
#include <optional>
namespace xrpl::ledger_entries {
class ${name}Builder;
/**
* @brief Ledger Entry: ${name}
*
* Type: ${tag} (${value})
* RPC Name: ${rpc_name}
*
* Immutable wrapper around SLE providing type-safe field access.
* Use ${name}Builder to construct new ledger entries.
*/
class ${name} : public LedgerEntryBase
{
public:
static constexpr LedgerEntryType entryType = ${tag};
/**
* @brief Construct a ${name} ledger entry wrapper from an existing SLE object.
* @throws std::runtime_error if the ledger entry type doesn't match.
*/
explicit ${name}(std::shared_ptr<SLE const> sle)
: LedgerEntryBase(std::move(sle))
{
// Verify ledger entry type
if (sle_->getType() != entryType)
{
throw std::runtime_error("Invalid ledger entry type for ${name}");
}
}
// Ledger entry-specific field getters
% for field in fields:
% if field['typed']:
/**
* @brief Get ${field['name']} (${field['requirement']})
% if field.get('mpt_support'):
* MPT Support: ${field['mpt_support']}
% endif
% if field['requirement'] == 'soeREQUIRED':
* @return The field value.
% else:
* @return The field value, or std::nullopt if not present.
% endif
*/
% if field['requirement'] == 'soeREQUIRED':
[[nodiscard]]
${field['typeData']['return_type']}
get${field['name'][2:]}() const
{
return this->sle_->${field['typeData']['getter_method']}(${field['name']});
}
% else:
[[nodiscard]]
${field['typeData']['return_type_optional']}
get${field['name'][2:]}() const
{
if (has${field['name'][2:]}())
return this->sle_->${field['typeData']['getter_method']}(${field['name']});
return std::nullopt;
}
/**
* @brief Check if ${field['name']} is present.
* @return True if the field is present, false otherwise.
*/
[[nodiscard]]
bool
has${field['name'][2:]}() const
{
return this->sle_->isFieldPresent(${field['name']});
}
% endif
% else:
/**
* @brief Get ${field['name']} (${field['requirement']})
% if field.get('mpt_support'):
* MPT Support: ${field['mpt_support']}
% endif
* @note This is an untyped field (${field.get('cppType', 'unknown')}).
% if field['requirement'] == 'soeREQUIRED':
* @return The field value.
% else:
* @return The field value, or std::nullopt if not present.
% endif
*/
% if field['requirement'] == 'soeREQUIRED':
[[nodiscard]]
${field['typeData']['return_type']}
get${field['name'][2:]}() const
{
return this->sle_->${field['typeData']['getter_method']}(${field['name']});
}
% else:
[[nodiscard]]
${field['typeData']['return_type_optional']}
get${field['name'][2:]}() const
{
if (this->sle_->isFieldPresent(${field['name']}))
return this->sle_->${field['typeData']['getter_method']}(${field['name']});
return std::nullopt;
}
/**
* @brief Check if ${field['name']} is present.
* @return True if the field is present, false otherwise.
*/
[[nodiscard]]
bool
has${field['name'][2:]}() const
{
return this->sle_->isFieldPresent(${field['name']});
}
% endif
% endif
% endfor
};
<%
required_fields = [f for f in fields if f['requirement'] == 'soeREQUIRED']
%>\
/**
* @brief Builder for ${name} ledger entries.
*
* Provides a fluent interface for constructing ledger entries with method chaining.
* Uses Json::Value internally for flexible ledger entry construction.
* Inherits common field setters from LedgerEntryBuilderBase.
*/
class ${name}Builder : public LedgerEntryBuilderBase<${name}Builder>
{
public:
/**
* @brief Construct a new ${name}Builder with required fields.
% for field in required_fields:
* @param ${field['paramName']} The ${field['name']} field value.
% endfor
*/
${name}Builder(\
% for i, field in enumerate(required_fields):
${field['typeData']['setter_type']} ${field['paramName']}${',' if i < len(required_fields) - 1 else ''}\
% endfor
)
: LedgerEntryBuilderBase<${name}Builder>(${tag})
{
% for field in required_fields:
set${field['name'][2:]}(${field['paramName']});
% endfor
}
/**
* @brief Construct a ${name}Builder from an existing SLE object.
* @param sle The existing ledger entry to copy from.
* @throws std::runtime_error if the ledger entry type doesn't match.
*/
${name}Builder(std::shared_ptr<SLE const> sle)
{
if (sle->at(sfLedgerEntryType) != ${tag})
{
throw std::runtime_error("Invalid ledger entry type for ${name}");
}
object_ = *sle;
}
/** @brief Ledger entry-specific field setters */
% for field in fields:
/**
* @brief Set ${field['name']} (${field['requirement']})
% if field.get('mpt_support'):
* MPT Support: ${field['mpt_support']}
% endif
* @return Reference to this builder for method chaining.
*/
${name}Builder&
set${field['name'][2:]}(${field['typeData']['setter_type']} value)
{
% if field.get('stiSuffix') == 'ISSUE':
object_[${field['name']}] = STIssue(${field['name']}, value);
% elif field['typeData'].get('setter_use_brackets'):
object_[${field['name']}] = value;
% else:
object_.${field['typeData']['setter_method']}(${field['name']}, value);
% endif
return *this;
}
% endfor
/**
* @brief Build and return the completed ${name} wrapper.
* @param index The ledger entry index.
* @return The constructed ledger entry wrapper.
*/
${name}
build(uint256 const& index)
{
return ${name}{std::make_shared<SLE>(std::move(object_), index)};
}
};
} // namespace xrpl::ledger_entries

View File

@@ -1,231 +0,0 @@
// Auto-generated unit tests for ledger entry ${name}
<%
required_fields = [f for f in fields if f["requirement"] == "soeREQUIRED"]
optional_fields = [f for f in fields if f["requirement"] != "soeREQUIRED"]
def canonical_expr(field):
return f"canonical_{field['stiSuffix']}()"
# Pick a wrong ledger entry to test type mismatch
# Use Ticket as it has minimal required fields (just Account)
if name != "Ticket":
wrong_le_include = "Ticket"
else:
wrong_le_include = "Check"
%>
#include <gtest/gtest.h>
#include <protocol_autogen/TestHelpers.h>
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol_autogen/ledger_entries/${name}.h>
#include <xrpl/protocol_autogen/ledger_entries/${wrong_le_include}.h>
#include <string>
namespace xrpl::ledger_entries {
// 1 & 4) Set fields via builder setters, build, then read them back via
// wrapper getters. After build(), validate() should succeed for both the
// builder's STObject and the wrapper's SLE.
TEST(${name}Tests, BuilderSettersRoundTrip)
{
uint256 const index{1u};
% for field in fields:
auto const ${field["paramName"]}Value = ${canonical_expr(field)};
% endfor
${name}Builder builder{
% for i, field in enumerate(required_fields):
${field["paramName"]}Value${"," if i < len(required_fields) - 1 else ""}
% endfor
};
% for field in optional_fields:
builder.set${field["name"][2:]}(${field["paramName"]}Value);
% endfor
builder.setLedgerIndex(index);
builder.setFlags(0x1u);
EXPECT_TRUE(builder.validate());
auto const entry = builder.build(index);
EXPECT_TRUE(entry.validate());
% for field in required_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const actual = entry.get${field["name"][2:]}();
expectEqualField(expected, actual, "${field["name"]}");
}
% endfor
% for field in optional_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const actualOpt = entry.get${field["name"][2:]}();
ASSERT_TRUE(actualOpt.has_value());
expectEqualField(expected, *actualOpt, "${field["name"]}");
EXPECT_TRUE(entry.has${field["name"][2:]}());
}
% endfor
EXPECT_TRUE(entry.hasLedgerIndex());
auto const ledgerIndex = entry.getLedgerIndex();
ASSERT_TRUE(ledgerIndex.has_value());
EXPECT_EQ(*ledgerIndex, index);
EXPECT_EQ(entry.getKey(), index);
}
// 2 & 4) Start from an SLE, set fields directly on it, construct a builder
// from that SLE, build a new wrapper, and verify all fields (and validate()).
TEST(${name}Tests, BuilderFromSleRoundTrip)
{
uint256 const index{2u};
% for field in fields:
auto const ${field["paramName"]}Value = ${canonical_expr(field)};
% endfor
auto sle = std::make_shared<SLE>(${name}::entryType, index);
% for field in fields:
% if field.get("stiSuffix") == "ISSUE":
sle->at(${field["name"]}) = STIssue(${field["name"]}, ${field["paramName"]}Value);
% elif field["typeData"].get("setter_use_brackets"):
sle->at(${field["name"]}) = ${field["paramName"]}Value;
% else:
sle->${field["typeData"]["setter_method"]}(${field["name"]}, ${field["paramName"]}Value);
% endif
% endfor
${name}Builder builderFromSle{sle};
EXPECT_TRUE(builderFromSle.validate());
auto const entryFromBuilder = builderFromSle.build(index);
${name} entryFromSle{sle};
EXPECT_TRUE(entryFromBuilder.validate());
EXPECT_TRUE(entryFromSle.validate());
% for field in required_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const fromSle = entryFromSle.get${field["name"][2:]}();
auto const fromBuilder = entryFromBuilder.get${field["name"][2:]}();
expectEqualField(expected, fromSle, "${field["name"]}");
expectEqualField(expected, fromBuilder, "${field["name"]}");
}
% endfor
% for field in optional_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const fromSleOpt = entryFromSle.get${field["name"][2:]}();
auto const fromBuilderOpt = entryFromBuilder.get${field["name"][2:]}();
ASSERT_TRUE(fromSleOpt.has_value());
ASSERT_TRUE(fromBuilderOpt.has_value());
expectEqualField(expected, *fromSleOpt, "${field["name"]}");
expectEqualField(expected, *fromBuilderOpt, "${field["name"]}");
}
% endfor
EXPECT_EQ(entryFromSle.getKey(), index);
EXPECT_EQ(entryFromBuilder.getKey(), index);
}
// 3) Verify wrapper throws when constructed from wrong ledger entry type.
TEST(${name}Tests, WrapperThrowsOnWrongEntryType)
{
uint256 const index{3u};
// Build a valid ledger entry of a different type
// Ticket requires: Account, OwnerNode, TicketSequence, PreviousTxnID, PreviousTxnLgrSeq
// Check requires: Account, Destination, SendMax, Sequence, OwnerNode, DestinationNode, PreviousTxnID, PreviousTxnLgrSeq
% if wrong_le_include == "Ticket":
${wrong_le_include}Builder wrongBuilder{
canonical_ACCOUNT(),
canonical_UINT64(),
canonical_UINT32(),
canonical_UINT256(),
canonical_UINT32()};
% else:
${wrong_le_include}Builder wrongBuilder{
canonical_ACCOUNT(),
canonical_ACCOUNT(),
canonical_AMOUNT(),
canonical_UINT32(),
canonical_UINT64(),
canonical_UINT64(),
canonical_UINT256(),
canonical_UINT32()};
% endif
auto wrongEntry = wrongBuilder.build(index);
EXPECT_THROW(${name}{wrongEntry.getSle()}, std::runtime_error);
}
// 4) Verify builder throws when constructed from wrong ledger entry type.
TEST(${name}Tests, BuilderThrowsOnWrongEntryType)
{
uint256 const index{4u};
// Build a valid ledger entry of a different type
% if wrong_le_include == "Ticket":
${wrong_le_include}Builder wrongBuilder{
canonical_ACCOUNT(),
canonical_UINT64(),
canonical_UINT32(),
canonical_UINT256(),
canonical_UINT32()};
% else:
${wrong_le_include}Builder wrongBuilder{
canonical_ACCOUNT(),
canonical_ACCOUNT(),
canonical_AMOUNT(),
canonical_UINT32(),
canonical_UINT64(),
canonical_UINT64(),
canonical_UINT256(),
canonical_UINT32()};
% endif
auto wrongEntry = wrongBuilder.build(index);
EXPECT_THROW(${name}Builder{wrongEntry.getSle()}, std::runtime_error);
}
% if optional_fields:
// 5) Build with only required fields and verify optional fields return nullopt.
TEST(${name}Tests, OptionalFieldsReturnNullopt)
{
uint256 const index{3u};
% for field in required_fields:
auto const ${field["paramName"]}Value = ${canonical_expr(field)};
% endfor
${name}Builder builder{
% for i, field in enumerate(required_fields):
${field["paramName"]}Value${"," if i < len(required_fields) - 1 else ""}
% endfor
};
auto const entry = builder.build(index);
// Verify optional fields are not present
% for field in optional_fields:
EXPECT_FALSE(entry.has${field["name"][2:]}());
EXPECT_FALSE(entry.get${field["name"][2:]}().has_value());
% endfor
}
% endif
}

View File

@@ -1,226 +0,0 @@
// This file is auto-generated. Do not edit.
#pragma once
#include <xrpl/protocol/STTx.h>
#include <xrpl/protocol/STParsedJSON.h>
#include <xrpl/protocol/jss.h>
#include <xrpl/protocol_autogen/TransactionBase.h>
#include <xrpl/protocol_autogen/TransactionBuilderBase.h>
#include <xrpl/json/json_value.h>
#include <stdexcept>
#include <optional>
namespace xrpl::transactions {
class ${name}Builder;
/**
* @brief Transaction: ${name}
*
* Type: ${tag} (${value})
* Delegable: ${delegable}
* Amendment: ${amendments}
* Privileges: ${privileges}
*
* Immutable wrapper around STTx providing type-safe field access.
* Use ${name}Builder to construct new transactions.
*/
class ${name} : public TransactionBase
{
public:
static constexpr xrpl::TxType txType = ${tag};
/**
* @brief Construct a ${name} transaction wrapper from an existing STTx object.
* @throws std::runtime_error if the transaction type doesn't match.
*/
explicit ${name}(std::shared_ptr<STTx const> tx)
: TransactionBase(std::move(tx))
{
// Verify transaction type
if (tx_->getTxnType() != txType)
{
throw std::runtime_error("Invalid transaction type for ${name}");
}
}
// Transaction-specific field getters
% for field in fields:
% if field['typed']:
/**
* @brief Get ${field['name']} (${field['requirement']})
% if field.get('supports_mpt'):
* @note This field supports MPT (Multi-Purpose Token) amounts.
% endif
% if field['requirement'] == 'soeREQUIRED':
* @return The field value.
% else:
* @return The field value, or std::nullopt if not present.
% endif
*/
% if field['requirement'] == 'soeREQUIRED':
[[nodiscard]]
${field['typeData']['return_type']}
get${field['name'][2:]}() const
{
return this->tx_->${field['typeData']['getter_method']}(${field['name']});
}
% else:
[[nodiscard]]
${field['typeData']['return_type_optional']}
get${field['name'][2:]}() const
{
if (has${field['name'][2:]}())
{
return this->tx_->${field['typeData']['getter_method']}(${field['name']});
}
return std::nullopt;
}
/**
* @brief Check if ${field['name']} is present.
* @return True if the field is present, false otherwise.
*/
[[nodiscard]]
bool
has${field['name'][2:]}() const
{
return this->tx_->isFieldPresent(${field['name']});
}
% endif
% else:
/**
* @brief Get ${field['name']} (${field['requirement']})
% if field.get('supports_mpt'):
* @note This field supports MPT (Multi-Purpose Token) amounts.
% endif
* @note This is an untyped field.
% if field['requirement'] == 'soeREQUIRED':
* @return The field value.
% else:
* @return The field value, or std::nullopt if not present.
% endif
*/
% if field['requirement'] == 'soeREQUIRED':
[[nodiscard]]
${field['typeData']['return_type']}
get${field['name'][2:]}() const
{
return this->tx_->${field['typeData']['getter_method']}(${field['name']});
}
% else:
[[nodiscard]]
${field['typeData']['return_type_optional']}
get${field['name'][2:]}() const
{
if (this->tx_->isFieldPresent(${field['name']}))
return this->tx_->${field['typeData']['getter_method']}(${field['name']});
return std::nullopt;
}
/**
* @brief Check if ${field['name']} is present.
* @return True if the field is present, false otherwise.
*/
[[nodiscard]]
bool
has${field['name'][2:]}() const
{
return this->tx_->isFieldPresent(${field['name']});
}
% endif
% endif
% endfor
};
<%
required_fields = [f for f in fields if f['requirement'] == 'soeREQUIRED']
%>\
/**
* @brief Builder for ${name} transactions.
*
* Provides a fluent interface for constructing transactions with method chaining.
* Uses Json::Value internally for flexible transaction construction.
* Inherits common field setters from TransactionBuilderBase.
*/
class ${name}Builder : public TransactionBuilderBase<${name}Builder>
{
public:
/**
* @brief Construct a new ${name}Builder with required fields.
* @param account The account initiating the transaction.
% for field in required_fields:
* @param ${field['paramName']} The ${field['name']} field value.
% endfor
* @param sequence Optional sequence number for the transaction.
* @param fee Optional fee for the transaction.
*/
${name}Builder(SF_ACCOUNT::type::value_type account,
% for i, field in enumerate(required_fields):
${field['typeData']['setter_type']} ${field['paramName']},\
% endfor
std::optional<SF_UINT32::type::value_type> sequence = std::nullopt,
std::optional<SF_AMOUNT::type::value_type> fee = std::nullopt
)
: TransactionBuilderBase<${name}Builder>(${tag}, account, sequence, fee)
{
% for field in required_fields:
set${field['name'][2:]}(${field['paramName']});
% endfor
}
/**
* @brief Construct a ${name}Builder from an existing STTx object.
* @param tx The existing transaction to copy from.
* @throws std::runtime_error if the transaction type doesn't match.
*/
${name}Builder(std::shared_ptr<STTx const> tx)
{
if (tx->getTxnType() != ${tag})
{
throw std::runtime_error("Invalid transaction type for ${name}Builder");
}
object_ = *tx;
}
/** @brief Transaction-specific field setters */
% for field in fields:
/**
* @brief Set ${field['name']} (${field['requirement']})
% if field.get('supports_mpt'):
* @note This field supports MPT (Multi-Purpose Token) amounts.
% endif
* @return Reference to this builder for method chaining.
*/
${name}Builder&
set${field['name'][2:]}(${field['typeData']['setter_type']} value)
{
% if field.get('stiSuffix') == 'ISSUE':
object_[${field['name']}] = STIssue(${field['name']}, value);
% elif field['typeData'].get('setter_use_brackets'):
object_[${field['name']}] = value;
% else:
object_.${field['typeData']['setter_method']}(${field['name']}, value);
% endif
return *this;
}
% endfor
/**
* @brief Build and return the ${name} wrapper.
* @param publicKey The public key for signing.
* @param secretKey The secret key for signing.
* @return The constructed transaction wrapper.
*/
${name}
build(PublicKey const& publicKey, SecretKey const& secretKey)
{
sign(publicKey, secretKey);
return ${name}{std::make_shared<STTx>(std::move(object_))};
}
};
} // namespace xrpl::transactions

View File

@@ -1,241 +0,0 @@
// Auto-generated unit tests for transaction ${name}
<%
required_fields = [f for f in fields if f["requirement"] == "soeREQUIRED"]
optional_fields = [f for f in fields if f["requirement"] != "soeREQUIRED"]
def canonical_expr(field):
return f"canonical_{field['stiSuffix']}()"
# Pick a wrong transaction to test type mismatch
if name != "AccountSet":
wrong_tx_include = "AccountSet"
else:
wrong_tx_include = "OfferCancel"
%>
#include <gtest/gtest.h>
#include <protocol_autogen/TestHelpers.h>
#include <xrpl/protocol/SecretKey.h>
#include <xrpl/protocol/Seed.h>
#include <xrpl/protocol/STTx.h>
#include <xrpl/protocol_autogen/transactions/${name}.h>
#include <xrpl/protocol_autogen/transactions/${wrong_tx_include}.h>
#include <string>
namespace xrpl::transactions {
// 1 & 4) Set fields via builder setters, build, then read them back via
// wrapper getters. After build(), validate() should succeed.
TEST(Transactions${name}Tests, BuilderSettersRoundTrip)
{
// Generate a deterministic keypair for signing
auto const [publicKey, secretKey] =
generateKeyPair(KeyType::secp256k1, generateSeed("test${name}"));
// Common transaction fields
auto const accountValue = calcAccountID(publicKey);
std::uint32_t const sequenceValue = 1;
auto const feeValue = canonical_AMOUNT();
// Transaction-specific field values
% for field in fields:
auto const ${field["paramName"]}Value = ${canonical_expr(field)};
% endfor
${name}Builder builder{
accountValue,
% for field in required_fields:
${field["paramName"]}Value,
% endfor
sequenceValue,
feeValue
};
// Set optional fields
% for field in optional_fields:
builder.set${field["name"][2:]}(${field["paramName"]}Value);
% endfor
auto tx = builder.build(publicKey, secretKey);
std::string reason;
EXPECT_TRUE(tx.validate(reason)) << reason;
// Verify signing was applied
EXPECT_FALSE(tx.getSigningPubKey().empty());
EXPECT_TRUE(tx.hasTxnSignature());
// Verify common fields
EXPECT_EQ(tx.getAccount(), accountValue);
EXPECT_EQ(tx.getSequence(), sequenceValue);
EXPECT_EQ(tx.getFee(), feeValue);
// Verify required fields
% for field in required_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const actual = tx.get${field["name"][2:]}();
expectEqualField(expected, actual, "${field["name"]}");
}
% endfor
// Verify optional fields
% for field in optional_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const actualOpt = tx.get${field["name"][2:]}();
ASSERT_TRUE(actualOpt.has_value()) << "Optional field ${field["name"]} should be present";
expectEqualField(expected, *actualOpt, "${field["name"]}");
EXPECT_TRUE(tx.has${field["name"][2:]}());
}
% endfor
}
// 2 & 4) Start from an STTx, construct a builder from it, build a new wrapper,
// and verify all fields match.
TEST(Transactions${name}Tests, BuilderFromStTxRoundTrip)
{
// Generate a deterministic keypair for signing
auto const [publicKey, secretKey] =
generateKeyPair(KeyType::secp256k1, generateSeed("test${name}FromTx"));
// Common transaction fields
auto const accountValue = calcAccountID(publicKey);
std::uint32_t const sequenceValue = 2;
auto const feeValue = canonical_AMOUNT();
// Transaction-specific field values
% for field in fields:
auto const ${field["paramName"]}Value = ${canonical_expr(field)};
% endfor
// Build an initial transaction
${name}Builder initialBuilder{
accountValue,
% for field in required_fields:
${field["paramName"]}Value,
% endfor
sequenceValue,
feeValue
};
% for field in optional_fields:
initialBuilder.set${field["name"][2:]}(${field["paramName"]}Value);
% endfor
auto initialTx = initialBuilder.build(publicKey, secretKey);
// Create builder from existing STTx
${name}Builder builderFromTx{initialTx.getSTTx()};
auto rebuiltTx = builderFromTx.build(publicKey, secretKey);
std::string reason;
EXPECT_TRUE(rebuiltTx.validate(reason)) << reason;
// Verify common fields
EXPECT_EQ(rebuiltTx.getAccount(), accountValue);
EXPECT_EQ(rebuiltTx.getSequence(), sequenceValue);
EXPECT_EQ(rebuiltTx.getFee(), feeValue);
// Verify required fields
% for field in required_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const actual = rebuiltTx.get${field["name"][2:]}();
expectEqualField(expected, actual, "${field["name"]}");
}
% endfor
// Verify optional fields
% for field in optional_fields:
{
auto const& expected = ${field["paramName"]}Value;
auto const actualOpt = rebuiltTx.get${field["name"][2:]}();
ASSERT_TRUE(actualOpt.has_value()) << "Optional field ${field["name"]} should be present";
expectEqualField(expected, *actualOpt, "${field["name"]}");
}
% endfor
}
// 3) Verify wrapper throws when constructed from wrong transaction type.
TEST(Transactions${name}Tests, WrapperThrowsOnWrongTxType)
{
// Build a valid transaction of a different type
auto const [pk, sk] =
generateKeyPair(KeyType::secp256k1, generateSeed("testWrongType"));
auto const account = calcAccountID(pk);
% if wrong_tx_include == "AccountSet":
${wrong_tx_include}Builder wrongBuilder{account, 1, canonical_AMOUNT()};
% else:
${wrong_tx_include}Builder wrongBuilder{account, canonical_UINT32(), 1, canonical_AMOUNT()};
% endif
auto wrongTx = wrongBuilder.build(pk, sk);
EXPECT_THROW(${name}{wrongTx.getSTTx()}, std::runtime_error);
}
// 4) Verify builder throws when constructed from wrong transaction type.
TEST(Transactions${name}Tests, BuilderThrowsOnWrongTxType)
{
// Build a valid transaction of a different type
auto const [pk, sk] =
generateKeyPair(KeyType::secp256k1, generateSeed("testWrongTypeBuilder"));
auto const account = calcAccountID(pk);
% if wrong_tx_include == "AccountSet":
${wrong_tx_include}Builder wrongBuilder{account, 1, canonical_AMOUNT()};
% else:
${wrong_tx_include}Builder wrongBuilder{account, canonical_UINT32(), 1, canonical_AMOUNT()};
% endif
auto wrongTx = wrongBuilder.build(pk, sk);
EXPECT_THROW(${name}Builder{wrongTx.getSTTx()}, std::runtime_error);
}
% if optional_fields:
// 5) Build with only required fields and verify optional fields return nullopt.
TEST(Transactions${name}Tests, OptionalFieldsReturnNullopt)
{
// Generate a deterministic keypair for signing
auto const [publicKey, secretKey] =
generateKeyPair(KeyType::secp256k1, generateSeed("test${name}Nullopt"));
// Common transaction fields
auto const accountValue = calcAccountID(publicKey);
std::uint32_t const sequenceValue = 3;
auto const feeValue = canonical_AMOUNT();
// Transaction-specific required field values
% for field in required_fields:
auto const ${field["paramName"]}Value = ${canonical_expr(field)};
% endfor
${name}Builder builder{
accountValue,
% for field in required_fields:
${field["paramName"]}Value,
% endfor
sequenceValue,
feeValue
};
// Do NOT set optional fields
auto tx = builder.build(publicKey, secretKey);
// Verify optional fields are not present
% for field in optional_fields:
EXPECT_FALSE(tx.has${field["name"][2:]}());
EXPECT_FALSE(tx.get${field["name"][2:]}().has_value());
% endfor
}
% endif
}

View File

@@ -7,21 +7,18 @@
# add_library(project.libparent) # add_library(project.libparent)
# target_link_modules(parent PUBLIC a b) # target_link_modules(parent PUBLIC a b)
function(target_link_modules parent scope) function(target_link_modules parent scope)
set(library ${PROJECT_NAME}.lib${parent}) set(library ${PROJECT_NAME}.lib${parent})
foreach(name ${ARGN}) foreach(name ${ARGN})
set(module ${library}.${name}) set(module ${library}.${name})
get_target_property(sources ${library} SOURCES) get_target_property(sources ${library} SOURCES)
list(LENGTH sources before) list(LENGTH sources before)
get_target_property(dupes ${module} SOURCES) get_target_property(dupes ${module} SOURCES)
list(LENGTH dupes expected) list(LENGTH dupes expected)
list(REMOVE_ITEM sources ${dupes}) list(REMOVE_ITEM sources ${dupes})
list(LENGTH sources after) list(LENGTH sources after)
math(EXPR actual "${before} - ${after}") math(EXPR actual "${before} - ${after}")
message( message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}")
STATUS set_target_properties(${library} PROPERTIES SOURCES "${sources}")
"${module} with ${expected} sources took ${actual} sources from ${library}" target_link_libraries(${library} ${scope} ${module})
) endforeach()
set_target_properties(${library} PROPERTIES SOURCES "${sources}")
target_link_libraries(${library} ${scope} ${module})
endforeach()
endfunction() endfunction()

View File

@@ -36,30 +36,27 @@ find_package(Protobuf REQUIRED)
# ARGN: # ARGN:
# A list of .proto files. # A list of .proto files.
function(target_protobuf_sources target prefix) function(target_protobuf_sources target prefix)
set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}") set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}")
file(MAKE_DIRECTORY "${dir}/${prefix}") file(MAKE_DIRECTORY "${dir}/${prefix}")
protobuf_generate( protobuf_generate(
TARGET ${target} TARGET ${target}
PROTOC_OUT_DIR "${dir}/${prefix}" PROTOC_OUT_DIR "${dir}/${prefix}"
"${ARGN}" "${ARGN}"
) )
target_include_directories( target_include_directories(${target} SYSTEM PUBLIC
${target} # Allows #include <package/path/to/file.proto> used by consumer files.
SYSTEM $<BUILD_INTERFACE:${dir}>
PUBLIC # Allows #include <package/path/to/file.proto> used by consumer files. # Allows #include "path/to/file.proto" used by generated files.
$<BUILD_INTERFACE:${dir}> $<BUILD_INTERFACE:${dir}/${prefix}>
# Allows #include "path/to/file.proto" used by generated files. # Allows #include <package/path/to/file.proto> used by consumer files.
$<BUILD_INTERFACE:${dir}/${prefix}> $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
# Allows #include <package/path/to/file.proto> used by consumer files. # Allows #include "path/to/file.proto" used by generated files.
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}> $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/${prefix}>
# Allows #include "path/to/file.proto" used by generated files. )
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/${prefix}> install(
) DIRECTORY ${dir}/
install( DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
DIRECTORY ${dir}/ FILES_MATCHING PATTERN "*.h"
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} )
FILES_MATCHING
PATTERN "*.h"
)
endfunction() endfunction()

Some files were not shown because too many files have changed in this diff Show More