From a0e09187b9370805d027c611a7e9ff5a0125282a Mon Sep 17 00:00:00 2001 From: Ayaz Salikhov Date: Thu, 29 Jan 2026 13:33:24 +0000 Subject: [PATCH 1/9] chore: Add cmake-format pre-commit hook (#6279) This change adds `cmake-format` as. a pre-commit hook. The style file closely matches that in Clio, and they will be made to be equivalent over time. For now, some files have been excluded, as those need some manual adjustments, which will be done in future changes. --- .cmake-format.yaml | 236 +++++++++++++++++++++++++ .config/cspell.config.yaml | 4 + .pre-commit-config.yaml | 18 ++ CMakeLists.txt | 136 +++++++-------- cmake/CMakeFuncs.cmake | 39 ++--- cmake/Ccache.cmake | 32 ++-- cmake/CompilationEnv.cmake | 62 ++++--- cmake/XrplAddTest.cmake | 29 ++-- cmake/XrplCompiler.cmake | 287 +++++++++++++++---------------- cmake/XrplConfig.cmake | 66 ++++--- cmake/XrplCov.cmake | 57 +++--- cmake/XrplInterface.cmake | 131 +++++++------- cmake/XrplSanitizers.cmake | 112 ++++++------ cmake/XrplSanity.cmake | 41 +++-- cmake/XrplSettings.cmake | 185 +++++++++----------- cmake/XrplValidatorKeys.cmake | 27 ++- cmake/XrplVersion.cmake | 20 +-- cmake/create_symbolic_link.cmake | 39 ++--- cmake/deps/Boost.cmake | 77 ++++----- src/tests/libxrpl/CMakeLists.txt | 18 +- 20 files changed, 910 insertions(+), 706 deletions(-) create mode 100644 .cmake-format.yaml diff --git a/.cmake-format.yaml b/.cmake-format.yaml new file mode 100644 index 0000000000..1c4d6684e8 --- /dev/null +++ b/.cmake-format.yaml @@ -0,0 +1,236 @@ +_help_parse: Options affecting listfile parsing +parse: + _help_additional_commands: + - Specify structure for custom cmake functions + _help_override_spec: + - Override configurations per-command where available + override_spec: {} + _help_vartags: + - Specify variable tags. + vartags: [] + _help_proptags: + - Specify property tags. + proptags: [] +_help_format: Options affecting formatting. +format: + _help_disable: + - Disable formatting entirely, making cmake-format a no-op + disable: false + _help_line_width: + - How wide to allow formatted cmake files + line_width: 120 + _help_tab_size: + - How many spaces to tab for indent + tab_size: 4 + _help_use_tabchars: + - If true, lines are indented using tab characters (utf-8 + - 0x09) instead of space characters (utf-8 0x20). + - In cases where the layout would require a fractional tab + - character, the behavior of the fractional indentation is + - governed by + use_tabchars: false + _help_fractional_tab_policy: + - If is True, then the value of this variable + - indicates how fractional indentions are handled during + - whitespace replacement. If set to 'use-space', fractional + - indentation is left as spaces (utf-8 0x20). If set to + - "`round-up` fractional indentation is replaced with a single" + - tab character (utf-8 0x09) effectively shifting the column + - to the next tabstop + fractional_tab_policy: use-space + _help_max_subgroups_hwrap: + - If an argument group contains more than this many sub-groups + - (parg or kwarg groups) then force it to a vertical layout. + max_subgroups_hwrap: 4 + _help_max_pargs_hwrap: + - If a positional argument group contains more than this many + - arguments, then force it to a vertical layout. + max_pargs_hwrap: 5 + _help_max_rows_cmdline: + - If a cmdline positional group consumes more than this many + - lines without nesting, then invalidate the layout (and nest) + max_rows_cmdline: 2 + _help_separate_ctrl_name_with_space: + - If true, separate flow control names from their parentheses + - with a space + separate_ctrl_name_with_space: true + _help_separate_fn_name_with_space: + - If true, separate function names from parentheses with a + - space + separate_fn_name_with_space: false + _help_dangle_parens: + - If a statement is wrapped to more than one line, than dangle + - the closing parenthesis on its own line. + dangle_parens: false + _help_dangle_align: + - If the trailing parenthesis must be 'dangled' on its on + - "line, then align it to this reference: `prefix`: the start" + - "of the statement, `prefix-indent`: the start of the" + - "statement, plus one indentation level, `child`: align to" + - the column of the arguments + dangle_align: prefix + _help_min_prefix_chars: + - If the statement spelling length (including space and + - parenthesis) is smaller than this amount, then force reject + - nested layouts. + min_prefix_chars: 18 + _help_max_prefix_chars: + - If the statement spelling length (including space and + - parenthesis) is larger than the tab width by more than this + - amount, then force reject un-nested layouts. + max_prefix_chars: 10 + _help_max_lines_hwrap: + - If a candidate layout is wrapped horizontally but it exceeds + - this many lines, then reject the layout. + max_lines_hwrap: 2 + _help_line_ending: + - What style line endings to use in the output. + line_ending: unix + _help_command_case: + - Format command names consistently as 'lower' or 'upper' case + command_case: canonical + _help_keyword_case: + - Format keywords consistently as 'lower' or 'upper' case + keyword_case: unchanged + _help_always_wrap: + - A list of command names which should always be wrapped + always_wrap: [] + _help_enable_sort: + - If true, the argument lists which are known to be sortable + - will be sorted lexicographicall + enable_sort: true + _help_autosort: + - If true, the parsers may infer whether or not an argument + - list is sortable (without annotation). + autosort: true + _help_require_valid_layout: + - By default, if cmake-format cannot successfully fit + - everything into the desired linewidth it will apply the + - last, most aggressive attempt that it made. If this flag is + - True, however, cmake-format will print error, exit with non- + - zero status code, and write-out nothing + require_valid_layout: false + _help_layout_passes: + - A dictionary mapping layout nodes to a list of wrap + - decisions. See the documentation for more information. + layout_passes: {} +_help_markup: Options affecting comment reflow and formatting. +markup: + _help_bullet_char: + - What character to use for bulleted lists + bullet_char: "-" + _help_enum_char: + - What character to use as punctuation after numerals in an + - enumerated list + enum_char: . + _help_first_comment_is_literal: + - If comment markup is enabled, don't reflow the first comment + - block in each listfile. Use this to preserve formatting of + - your copyright/license statements. + first_comment_is_literal: false + _help_literal_comment_pattern: + - If comment markup is enabled, don't reflow any comment block + - which matches this (regex) pattern. Default is `None` + - (disabled). + literal_comment_pattern: null + _help_fence_pattern: + - Regular expression to match preformat fences in comments + - default= ``r'^\s*([`~]{3}[`~]*)(.*)$'`` + fence_pattern: ^\s*([`~]{3}[`~]*)(.*)$ + _help_ruler_pattern: + - Regular expression to match rulers in comments default= + - '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``' + ruler_pattern: ^\s*[^\w\s]{3}.*[^\w\s]{3}$ + _help_explicit_trailing_pattern: + - If a comment line matches starts with this pattern then it + - is explicitly a trailing comment for the preceding + - argument. Default is '#<' + explicit_trailing_pattern: "#<" + _help_hashruler_min_length: + - If a comment line starts with at least this many consecutive + - hash characters, then don't lstrip() them off. This allows + - for lazy hash rulers where the first hash char is not + - separated by space + hashruler_min_length: 10 + _help_canonicalize_hashrulers: + - If true, then insert a space between the first hash char and + - remaining hash chars in a hash ruler, and normalize its + - length to fill the column + canonicalize_hashrulers: true + _help_enable_markup: + - enable comment markup parsing and reflow + enable_markup: true +_help_lint: Options affecting the linter +lint: + _help_disabled_codes: + - a list of lint codes to disable + disabled_codes: [] + _help_function_pattern: + - regular expression pattern describing valid function names + function_pattern: "[0-9a-z_]+" + _help_macro_pattern: + - regular expression pattern describing valid macro names + macro_pattern: "[0-9A-Z_]+" + _help_global_var_pattern: + - regular expression pattern describing valid names for + - variables with global (cache) scope + global_var_pattern: "[A-Z][0-9A-Z_]+" + _help_internal_var_pattern: + - regular expression pattern describing valid names for + - variables with global scope (but internal semantic) + internal_var_pattern: _[A-Z][0-9A-Z_]+ + _help_local_var_pattern: + - regular expression pattern describing valid names for + - variables with local scope + local_var_pattern: "[a-z][a-z0-9_]+" + _help_private_var_pattern: + - regular expression pattern describing valid names for + - privatedirectory variables + private_var_pattern: _[0-9a-z_]+ + _help_public_var_pattern: + - regular expression pattern describing valid names for public + - directory variables + public_var_pattern: "[A-Z][0-9A-Z_]+" + _help_argument_var_pattern: + - regular expression pattern describing valid names for + - function/macro arguments and loop variables. + argument_var_pattern: "[a-z][a-z0-9_]+" + _help_keyword_pattern: + - regular expression pattern describing valid names for + - keywords used in functions or macros + keyword_pattern: "[A-Z][0-9A-Z_]+" + _help_max_conditionals_custom_parser: + - In the heuristic for C0201, how many conditionals to match + - within a loop in before considering the loop a parser. + max_conditionals_custom_parser: 2 + _help_min_statement_spacing: + - Require at least this many newlines between statements + min_statement_spacing: 1 + _help_max_statement_spacing: + - Require no more than this many newlines between statements + max_statement_spacing: 2 + max_returns: 6 + max_branches: 12 + max_arguments: 5 + max_localvars: 15 + max_statements: 50 +_help_encode: Options affecting file encoding +encode: + _help_emit_byteorder_mark: + - If true, emit the unicode byte-order mark (BOM) at the start + - of the file + emit_byteorder_mark: false + _help_input_encoding: + - Specify the encoding of the input file. Defaults to utf-8 + input_encoding: utf-8 + _help_output_encoding: + - Specify the encoding of the output file. Defaults to utf-8. + - Note that cmake only claims to support utf-8 so be careful + - when using anything else + output_encoding: utf-8 +_help_misc: Miscellaneous configurations options. +misc: + _help_per_command: + - A dictionary containing any per-command configuration + - overrides. Currently only `command_case` is supported. + per_command: {} diff --git a/.config/cspell.config.yaml b/.config/cspell.config.yaml index 73b0417d79..a9c621f567 100644 --- a/.config/cspell.config.yaml +++ b/.config/cspell.config.yaml @@ -101,6 +101,7 @@ words: - gpgcheck - gpgkey - hotwallet + - hwrap - ifndef - inequation - insuf @@ -114,6 +115,8 @@ words: - keylet - keylets - keyvadb + - kwarg + - kwargs - ledgerentry - ledgerhash - ledgerindex @@ -163,6 +166,7 @@ words: - nunl - Nyffenegger - ostr + - pargs - partitioner - paychan - paychans diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 603cf39375..d38c11af38 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,24 @@ repos: args: [--style=file] "types_or": [c++, c, proto] + - repo: https://github.com/cheshirekow/cmake-format-precommit + rev: e2c2116d86a80e72e7146a06e68b7c228afc6319 # frozen: v0.6.13 + hooks: + - id: cmake-format + additional_dependencies: [PyYAML] + exclude: | + (?x)^( + cmake/CodeCoverage.cmake| + cmake/XrplCore.cmake| + cmake/XrplDocs.cmake| + cmake/XrplInstall.cmake| + cmake/add_module.cmake| + cmake/isolate_headers.cmake| + cmake/target_link_modules.cmake| + cmake/target_protobuf_sources.cmake| + tests/conan/CMakeLists.txt + )$ + - repo: https://github.com/rbubley/mirrors-prettier rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2 hooks: diff --git a/CMakeLists.txt b/CMakeLists.txt index c24b27adb2..3c434506f7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,16 +1,16 @@ cmake_minimum_required(VERSION 3.16) -if(POLICY CMP0074) - cmake_policy(SET CMP0074 NEW) -endif() -if(POLICY CMP0077) - cmake_policy(SET CMP0077 NEW) -endif() +if (POLICY CMP0074) + cmake_policy(SET CMP0074 NEW) +endif () +if (POLICY CMP0077) + cmake_policy(SET CMP0077 NEW) +endif () # Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows. -if(DEFINED CMAKE_MODULE_PATH) - file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH) -endif() +if (DEFINED CMAKE_MODULE_PATH) + file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH) +endif () list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") project(xrpl) @@ -20,65 +20,65 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON) include(CompilationEnv) -if(is_gcc) +if (is_gcc) # GCC-specific fixes add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage) # -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3 -elseif(is_clang) +elseif (is_clang) # Clang-specific fixes add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options -elseif(is_msvc) +elseif (is_msvc) # MSVC-specific fixes add_compile_options(/wd4068) # Ignore unknown pragmas -endif() +endif () # Enable ccache to speed up builds. include(Ccache) # make GIT_COMMIT_HASH define available to all sources find_package(Git) -if(Git_FOUND) +if (Git_FOUND) execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD - OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch) - if(gch) + OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch) + if (gch) set(GIT_COMMIT_HASH "${gch}") message(STATUS gch: ${GIT_COMMIT_HASH}) add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}") - endif() + endif () execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD - OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb) - if(gb) + OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb) + if (gb) set(GIT_BRANCH "${gb}") message(STATUS gb: ${GIT_BRANCH}) add_definitions(-DGIT_BRANCH="${GIT_BRANCH}") - endif() -endif() #git + endif () +endif () # git -if(thread_safety_analysis) - add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS) - add_compile_options("-stdlib=libc++") - add_link_options("-stdlib=libc++") -endif() +if (thread_safety_analysis) + add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS + -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS) + add_compile_options("-stdlib=libc++") + add_link_options("-stdlib=libc++") +endif () -include (CheckCXXCompilerFlag) -include (FetchContent) -include (ExternalProject) -include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP +include(CheckCXXCompilerFlag) +include(FetchContent) +include(ExternalProject) +include(CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP if (target) - message (FATAL_ERROR "The target option has been removed - use native cmake options to control build") + message(FATAL_ERROR "The target option has been removed - use native cmake options to control build") endif () include(XrplSanity) include(XrplVersion) include(XrplSettings) -# this check has to remain in the top-level cmake -# because of the early return statement +# this check has to remain in the top-level cmake because of the early return statement if (packages_only) - if (NOT TARGET rpm) - message (FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?") - endif() - return () + if (NOT TARGET rpm) + message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?") + endif () + return() endif () include(XrplCompiler) include(XrplSanitizers) @@ -86,11 +86,9 @@ include(XrplInterface) option(only_docs "Include only the docs target?" FALSE) include(XrplDocs) -if(only_docs) - return() -endif() - -### +if (only_docs) + return() +endif () include(deps/Boost) @@ -107,45 +105,43 @@ find_package(SOCI REQUIRED) find_package(SQLite3 REQUIRED) find_package(xxHash REQUIRED) -target_link_libraries(xrpl_libs INTERFACE - ed25519::ed25519 - lz4::lz4 - OpenSSL::Crypto - OpenSSL::SSL - secp256k1::secp256k1 - soci::soci - SQLite::SQLite3 -) +target_link_libraries( + xrpl_libs + INTERFACE ed25519::ed25519 + lz4::lz4 + OpenSSL::Crypto + OpenSSL::SSL + secp256k1::secp256k1 + soci::soci + SQLite::SQLite3) option(rocksdb "Enable RocksDB" ON) -if(rocksdb) +if (rocksdb) find_package(RocksDB REQUIRED) - set_target_properties(RocksDB::rocksdb PROPERTIES - INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1 - ) + set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1) target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb) -endif() +endif () # Work around changes to Conan recipe for now. -if(TARGET nudb::core) - set(nudb nudb::core) -elseif(TARGET NuDB::nudb) - set(nudb NuDB::nudb) -else() - message(FATAL_ERROR "unknown nudb target") -endif() +if (TARGET nudb::core) + set(nudb nudb::core) +elseif (TARGET NuDB::nudb) + set(nudb NuDB::nudb) +else () + message(FATAL_ERROR "unknown nudb target") +endif () target_link_libraries(xrpl_libs INTERFACE ${nudb}) -if(coverage) - include(XrplCov) -endif() +if (coverage) + include(XrplCov) +endif () set(PROJECT_EXPORT_SET XrplExports) include(XrplCore) include(XrplInstall) include(XrplValidatorKeys) -if(tests) - include(CTest) - add_subdirectory(src/tests/libxrpl) -endif() +if (tests) + include(CTest) + add_subdirectory(src/tests/libxrpl) +endif () diff --git a/cmake/CMakeFuncs.cmake b/cmake/CMakeFuncs.cmake index e5b2a451f4..ec2d9db330 100644 --- a/cmake/CMakeFuncs.cmake +++ b/cmake/CMakeFuncs.cmake @@ -1,30 +1,29 @@ macro (exclude_from_default target_) - set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON) - set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON) + set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_ALL ON) + set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON) endmacro () macro (exclude_if_included target_) - get_directory_property(has_parent PARENT_DIRECTORY) - if (has_parent) - exclude_from_default (${target_}) - endif () + get_directory_property(has_parent PARENT_DIRECTORY) + if (has_parent) + exclude_from_default(${target_}) + endif () endmacro () find_package(Git) function (git_branch branch_val) - if (NOT GIT_FOUND) - return () - endif () - set (_branch "") - execute_process (COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD" - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - RESULT_VARIABLE _git_exit_code - OUTPUT_VARIABLE _temp_branch - OUTPUT_STRIP_TRAILING_WHITESPACE - ERROR_QUIET) - if (_git_exit_code EQUAL 0) - set (_branch ${_temp_branch}) - endif () - set (${branch_val} "${_branch}" PARENT_SCOPE) + if (NOT GIT_FOUND) + return() + endif () + set(_branch "") + execute_process(COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD" + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + RESULT_VARIABLE _git_exit_code + OUTPUT_VARIABLE _temp_branch + OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET) + if (_git_exit_code EQUAL 0) + set(_branch ${_temp_branch}) + endif () + set(${branch_val} "${_branch}" PARENT_SCOPE) endfunction () diff --git a/cmake/Ccache.cmake b/cmake/Ccache.cmake index aa8d3ac59d..000883cf6a 100644 --- a/cmake/Ccache.cmake +++ b/cmake/Ccache.cmake @@ -15,18 +15,17 @@ endif () # https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake. if ("${CCACHE_PATH}" MATCHES "chocolatey") message(DEBUG "Ccache path: ${CCACHE_PATH}") - # Chocolatey uses a shim executable that we cannot use directly, in which - # case we have to find the executable it points to. If we cannot find the - # target executable then we cannot use ccache. + # Chocolatey uses a shim executable that we cannot use directly, in which case we have to find the executable it + # points to. If we cannot find the target executable then we cannot use ccache. find_program(BASH_PATH "bash") if (NOT BASH_PATH) message(WARNING "Could not find bash.") return() endif () - execute_process( - COMMAND bash -c "export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1" - OUTPUT_VARIABLE CCACHE_PATH) + execute_process(COMMAND bash -c + "export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1" + OUTPUT_VARIABLE CCACHE_PATH) if (NOT CCACHE_PATH) message(WARNING "Could not find ccache target.") @@ -37,21 +36,14 @@ endif () message(STATUS "Found ccache: ${CCACHE_PATH}") # Tell cmake to use ccache for compiling with Visual Studio. -file(COPY_FILE - ${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe - ONLY_IF_DIFFERENT) -set(CMAKE_VS_GLOBALS - "CLToolExe=cl.exe" - "CLToolPath=${CMAKE_BINARY_DIR}" - "TrackFileAccess=false" - "UseMultiToolTask=true") +file(COPY_FILE ${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe ONLY_IF_DIFFERENT) +set(CMAKE_VS_GLOBALS "CLToolExe=cl.exe" "CLToolPath=${CMAKE_BINARY_DIR}" "TrackFileAccess=false" + "UseMultiToolTask=true") -# By default Visual Studio generators will use /Zi to capture debug information, -# which is not compatible with ccache, so tell it to use /Z7 instead. +# By default Visual Studio generators will use /Zi to capture debug information, which is not compatible with ccache, so +# tell it to use /Z7 instead. if (MSVC) - foreach (var_ - CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE) - string (REPLACE "/Zi" "/Z7" ${var_} "${${var_}}") + foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE) + string(REPLACE "/Zi" "/Z7" ${var_} "${${var_}}") endforeach () endif () diff --git a/cmake/CompilationEnv.cmake b/cmake/CompilationEnv.cmake index 2d97f94615..39ec383398 100644 --- a/cmake/CompilationEnv.cmake +++ b/cmake/CompilationEnv.cmake @@ -1,8 +1,7 @@ - # Shared detection of compiler, operating system, and architecture. - # - # This module centralizes environment detection so that other - # CMake modules can use the same variables instead of repeating - # checks on CMAKE_* and built-in platform variables. +# Shared detection of compiler, operating system, and architecture. +# +# This module centralizes environment detection so that other CMake modules can use the same variables instead of +# repeating checks on CMAKE_* and built-in platform variables. # Only run once per configure step. include_guard(GLOBAL) @@ -15,21 +14,20 @@ set(is_gcc FALSE) set(is_msvc FALSE) set(is_xcode FALSE) -if(CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang - set(is_clang TRUE) -elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") - set(is_gcc TRUE) -elseif(MSVC) - set(is_msvc TRUE) -else() - message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}") -endif() +if (CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang + set(is_clang TRUE) +elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + set(is_gcc TRUE) +elseif (MSVC) + set(is_msvc TRUE) +else () + message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}") +endif () # Xcode generator detection -if(CMAKE_GENERATOR STREQUAL "Xcode") - set(is_xcode TRUE) -endif() - +if (CMAKE_GENERATOR STREQUAL "Xcode") + set(is_xcode TRUE) +endif () # -------------------------------------------------------------------- # Operating system detection @@ -38,23 +36,23 @@ set(is_linux FALSE) set(is_windows FALSE) set(is_macos FALSE) -if(CMAKE_SYSTEM_NAME STREQUAL "Linux") - set(is_linux TRUE) -elseif(CMAKE_SYSTEM_NAME STREQUAL "Windows") - set(is_windows TRUE) -elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin") - set(is_macos TRUE) -endif() +if (CMAKE_SYSTEM_NAME STREQUAL "Linux") + set(is_linux TRUE) +elseif (CMAKE_SYSTEM_NAME STREQUAL "Windows") + set(is_windows TRUE) +elseif (CMAKE_SYSTEM_NAME STREQUAL "Darwin") + set(is_macos TRUE) +endif () # -------------------------------------------------------------------- # Architecture # -------------------------------------------------------------------- set(is_amd64 FALSE) set(is_arm64 FALSE) -if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64") - set(is_amd64 TRUE) -elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64") - set(is_arm64 TRUE) -else() - message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}") -endif() +if (CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64") + set(is_amd64 TRUE) +elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64") + set(is_arm64 TRUE) +else () + message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}") +endif () diff --git a/cmake/XrplAddTest.cmake b/cmake/XrplAddTest.cmake index 191a25c467..135b975a02 100644 --- a/cmake/XrplAddTest.cmake +++ b/cmake/XrplAddTest.cmake @@ -1,25 +1,16 @@ include(isolate_headers) -function(xrpl_add_test name) - set(target ${PROJECT_NAME}.test.${name}) +function (xrpl_add_test name) + set(target ${PROJECT_NAME}.test.${name}) - file(GLOB_RECURSE sources CONFIGURE_DEPENDS - "${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp" - "${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp" - ) - add_executable(${target} ${ARGN} ${sources}) + file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp") + add_executable(${target} ${ARGN} ${sources}) - isolate_headers( - ${target} - "${CMAKE_SOURCE_DIR}" - "${CMAKE_SOURCE_DIR}/tests/${name}" - PRIVATE - ) + isolate_headers(${target} "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests/${name}" PRIVATE) - # Make sure the test isn't optimized away in unity builds - set_target_properties(${target} PROPERTIES - UNITY_BUILD_MODE GROUP - UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed + # Make sure the test isn't optimized away in unity builds + set_target_properties(${target} PROPERTIES UNITY_BUILD_MODE GROUP UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed - add_test(NAME ${target} COMMAND ${target}) -endfunction() + add_test(NAME ${target} COMMAND ${target}) +endfunction () diff --git a/cmake/XrplCompiler.cmake b/cmake/XrplCompiler.cmake index a7c5ff0423..f7ac8c2b71 100644 --- a/cmake/XrplCompiler.cmake +++ b/cmake/XrplCompiler.cmake @@ -8,153 +8,155 @@ include(CompilationEnv) TODO some/most of these common settings belong in a toolchain file, especially the ABI-impacting ones #]=========================================================] -add_library (common INTERFACE) -add_library (Xrpl::common ALIAS common) +add_library(common INTERFACE) +add_library(Xrpl::common ALIAS common) include(XrplSanitizers) # add a single global dependency on this interface lib -link_libraries (Xrpl::common) +link_libraries(Xrpl::common) # Respect CMAKE_POSITION_INDEPENDENT_CODE setting (may be set by Conan toolchain) -if(NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE) - set(CMAKE_POSITION_INDEPENDENT_CODE ON) -endif() -set_target_properties (common - PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE}) +if (NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE) + set(CMAKE_POSITION_INDEPENDENT_CODE ON) +endif () +set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE}) set(CMAKE_CXX_EXTENSIONS OFF) -target_compile_definitions (common - INTERFACE - $<$:DEBUG _DEBUG> - #[===[ +target_compile_definitions( + common + INTERFACE $<$:DEBUG + _DEBUG> + #[===[ NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it explicitly except for the special case of (profile ON) and (assert OFF). Presumably this is because we don't want profile builds asserting unless asserts were specifically requested. ]===] - $<$,$>>:NDEBUG> - # TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x. - OPENSSL_SUPPRESS_DEPRECATED -) + $<$,$>>:NDEBUG> + # TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x. + OPENSSL_SUPPRESS_DEPRECATED) if (MSVC) - # remove existing exception flag since we set it to -EHa - string (REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + # remove existing exception flag since we set it to -EHa + string(REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") - foreach (var_ - CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE) + foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE) - # also remove dynamic runtime - string (REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}") + # also remove dynamic runtime + string(REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}") - # /ZI (Edit & Continue debugging information) is incompatible with Gy- - string (REPLACE "/ZI" "/Zi" ${var_} "${${var_}}") + # /ZI (Edit & Continue debugging information) is incompatible with Gy- + string(REPLACE "/ZI" "/Zi" ${var_} "${${var_}}") - # omit debug info completely under CI (not needed) - if (is_ci) - string (REPLACE "/Zi" " " ${var_} "${${var_}}") - string (REPLACE "/Z7" " " ${var_} "${${var_}}") - endif () - endforeach () + # omit debug info completely under CI (not needed) + if (is_ci) + string(REPLACE "/Zi" " " ${var_} "${${var_}}") + string(REPLACE "/Z7" " " ${var_} "${${var_}}") + endif () + endforeach () - target_compile_options (common - INTERFACE - -bigobj # Increase object file max size - -fp:precise # Floating point behavior - -Gd # __cdecl calling convention - -Gm- # Minimal rebuild: disabled - -Gy- # Function level linking: disabled - -MP # Multiprocessor compilation - -openmp- # pragma omp: disabled - -errorReport:none # No error reporting to Internet - -nologo # Suppress login banner - -wd4018 # Disable signed/unsigned comparison warnings - -wd4244 # Disable float to int possible loss of data warnings - -wd4267 # Disable size_t to T possible loss of data warnings - -wd4800 # Disable C4800(int to bool performance) - -wd4503 # Decorated name length exceeded, name was truncated - $<$: - -EHa - -GR - > - $<$:-Ox> - $<$,$>: - -GS - -Zc:forScope - > - # static runtime - $<$:-MTd> - $<$>:-MT> - $<$:-WX> - ) - target_compile_definitions (common - INTERFACE - _WIN32_WINNT=0x6000 - _SCL_SECURE_NO_WARNINGS - _CRT_SECURE_NO_WARNINGS - WIN32_CONSOLE - WIN32_LEAN_AND_MEAN - NOMINMAX - # TODO: Resolve these warnings, don't just silence them - _SILENCE_ALL_CXX17_DEPRECATION_WARNINGS - $<$,$>:_CRTDBG_MAP_ALLOC>) - target_link_libraries (common - INTERFACE - -errorreport:none - -machine:X64) + target_compile_options( + common + INTERFACE # Increase object file max size + -bigobj + # Floating point behavior + -fp:precise + # __cdecl calling convention + -Gd + # Minimal rebuild: disabled + -Gm- + # Function level linking: disabled + -Gy- + # Multiprocessor compilation + -MP + # pragma omp: disabled + -openmp- + # No error reporting to Internet + -errorReport:none + # Suppress login banner + -nologo + # Disable signed/unsigned comparison warnings + -wd4018 + # Disable float to int possible loss of data warnings + -wd4244 + # Disable size_t to T possible loss of data warnings + -wd4267 + # Disable C4800(int to bool performance) + -wd4800 + # Decorated name length exceeded, name was truncated + -wd4503 + $<$: + -EHa + -GR + > + $<$:-Ox> + $<$,$>: + -GS + -Zc:forScope + > + # static runtime + $<$:-MTd> + $<$>:-MT> + $<$:-WX>) + target_compile_definitions( + common + INTERFACE _WIN32_WINNT=0x6000 + _SCL_SECURE_NO_WARNINGS + _CRT_SECURE_NO_WARNINGS + WIN32_CONSOLE + WIN32_LEAN_AND_MEAN + NOMINMAX + # TODO: Resolve these warnings, don't just silence them + _SILENCE_ALL_CXX17_DEPRECATION_WARNINGS + $<$,$>:_CRTDBG_MAP_ALLOC>) + target_link_libraries(common INTERFACE -errorreport:none -machine:X64) else () - target_compile_options (common - INTERFACE - -Wall - -Wdeprecated - $<$:-Wno-deprecated-declarations> - $<$:-Wextra -Wno-unused-parameter> - $<$:-Werror> - -fstack-protector - -Wno-sign-compare - -Wno-unused-but-set-variable - $<$>:-fno-strict-aliasing> - # tweak gcc optimization for debug - $<$,$>:-O0> - # Add debug symbols to release config - $<$:-g>) - target_link_libraries (common - INTERFACE - -rdynamic - $<$:-Wl,-z,relro,-z,now,--build-id> - # link to static libc/c++ iff: - # * static option set and - # * NOT APPLE (AppleClang does not support static libc/c++) and - # * NOT SANITIZERS (sanitizers typically don't work with static libc/c++) - $<$,$>,$>>: - -static-libstdc++ - -static-libgcc - >) + target_compile_options( + common + INTERFACE -Wall + -Wdeprecated + $<$:-Wno-deprecated-declarations> + $<$:-Wextra + -Wno-unused-parameter> + $<$:-Werror> + -fstack-protector + -Wno-sign-compare + -Wno-unused-but-set-variable + $<$>:-fno-strict-aliasing> + # tweak gcc optimization for debug + $<$,$>:-O0> + # Add debug symbols to release config + $<$:-g>) + target_link_libraries( + common + INTERFACE -rdynamic + $<$:-Wl,-z,relro,-z,now,--build-id> + # link to static libc/c++ iff: * static option set and * NOT APPLE (AppleClang does not support static + # libc/c++) and * NOT SANITIZERS (sanitizers typically don't work with static libc/c++) + $<$,$>,$>>: + -static-libstdc++ + -static-libgcc + >) endif () # Antithesis instrumentation will only be built and deployed using machines running Linux. if (voidstar) - if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug") - message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...") - elseif (NOT is_linux) - message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...") - elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0)) - message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...") - endif () + if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug") + message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...") + elseif (NOT is_linux) + message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...") + elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0)) + message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...") + endif () endif () if (use_mold) - # use mold linker if available - execute_process ( - COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version - ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) - if ("${LD_VERSION}" MATCHES "mold") - target_link_libraries (common INTERFACE -fuse-ld=mold) - endif () - unset (LD_VERSION) + # use mold linker if available + execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) + if ("${LD_VERSION}" MATCHES "mold") + target_link_libraries(common INTERFACE -fuse-ld=mold) + endif () + unset(LD_VERSION) elseif (use_gold AND is_gcc) - # use gold linker if available - execute_process ( - COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version - ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) + # use gold linker if available + execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) #[=========================================================[ NOTE: THE gold linker inserts -rpath as DT_RUNPATH by default instead of DT_RPATH, so you might have slightly @@ -168,34 +170,31 @@ elseif (use_gold AND is_gcc) disabling would be to figure out all the settings required to make gold play nicely with jemalloc. #]=========================================================] - if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc)) - target_link_libraries (common - INTERFACE - -fuse-ld=gold - -Wl,--no-as-needed - #[=========================================================[ + if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc)) + target_link_libraries( + common + INTERFACE -fuse-ld=gold + -Wl,--no-as-needed + #[=========================================================[ see https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1253638/comments/5 DT_RUNPATH does not work great for transitive dependencies (of which boost has a few) - so just switch to DT_RPATH if doing dynamic linking with gold #]=========================================================] - $<$>:-Wl,--disable-new-dtags>) - endif () - unset (LD_VERSION) + $<$>:-Wl,--disable-new-dtags>) + endif () + unset(LD_VERSION) elseif (use_lld) - # use lld linker if available - execute_process ( - COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version - ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) - if ("${LD_VERSION}" MATCHES "LLD") - target_link_libraries (common INTERFACE -fuse-ld=lld) - endif () - unset (LD_VERSION) -endif() - + # use lld linker if available + execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) + if ("${LD_VERSION}" MATCHES "LLD") + target_link_libraries(common INTERFACE -fuse-ld=lld) + endif () + unset(LD_VERSION) +endif () if (assert) - foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE) - STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}") - endforeach () + foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE) + string(REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}") + endforeach () endif () diff --git a/cmake/XrplConfig.cmake b/cmake/XrplConfig.cmake index 8a739d48a3..bf6aa475ba 100644 --- a/cmake/XrplConfig.cmake +++ b/cmake/XrplConfig.cmake @@ -1,54 +1,52 @@ -include (CMakeFindDependencyMacro) +include(CMakeFindDependencyMacro) # need to represent system dependencies of the lib here #[=========================================================[ Boost #]=========================================================] if (static OR APPLE OR MSVC) - set (Boost_USE_STATIC_LIBS ON) + set(Boost_USE_STATIC_LIBS ON) endif () -set (Boost_USE_MULTITHREADED ON) +set(Boost_USE_MULTITHREADED ON) if (static OR MSVC) - set (Boost_USE_STATIC_RUNTIME ON) + set(Boost_USE_STATIC_RUNTIME ON) else () - set (Boost_USE_STATIC_RUNTIME OFF) + set(Boost_USE_STATIC_RUNTIME OFF) endif () -find_dependency (Boost - COMPONENTS - chrono - container - context - coroutine - date_time - filesystem - program_options - regex - system - thread) +find_dependency(Boost + COMPONENTS + chrono + container + context + coroutine + date_time + filesystem + program_options + regex + system + thread) #[=========================================================[ OpenSSL #]=========================================================] if (NOT DEFINED OPENSSL_ROOT_DIR) - if (DEFINED ENV{OPENSSL_ROOT}) - set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT}) - elseif (APPLE) - find_program (homebrew brew) - if (homebrew) - execute_process (COMMAND ${homebrew} --prefix openssl - OUTPUT_VARIABLE OPENSSL_ROOT_DIR - OUTPUT_STRIP_TRAILING_WHITESPACE) + if (DEFINED ENV{OPENSSL_ROOT}) + set(OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT}) + elseif (APPLE) + find_program(homebrew brew) + if (homebrew) + execute_process(COMMAND ${homebrew} --prefix openssl OUTPUT_VARIABLE OPENSSL_ROOT_DIR + OUTPUT_STRIP_TRAILING_WHITESPACE) + endif () endif () - endif () - file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR) + file(TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR) endif () if (static OR APPLE OR MSVC) - set (OPENSSL_USE_STATIC_LIBS ON) + set(OPENSSL_USE_STATIC_LIBS ON) endif () -set (OPENSSL_MSVC_STATIC_RT ON) -find_dependency (OpenSSL REQUIRED) -find_dependency (ZLIB) -find_dependency (date) +set(OPENSSL_MSVC_STATIC_RT ON) +find_dependency(OpenSSL REQUIRED) +find_dependency(ZLIB) +find_dependency(date) if (TARGET ZLIB::ZLIB) - set_target_properties(OpenSSL::Crypto PROPERTIES - INTERFACE_LINK_LIBRARIES ZLIB::ZLIB) + set_target_properties(OpenSSL::Crypto PROPERTIES INTERFACE_LINK_LIBRARIES ZLIB::ZLIB) endif () diff --git a/cmake/XrplCov.cmake b/cmake/XrplCov.cmake index b212d60b64..62c40407f2 100644 --- a/cmake/XrplCov.cmake +++ b/cmake/XrplCov.cmake @@ -2,40 +2,51 @@ coverage report target #]===================================================================] -if(NOT coverage) - message(FATAL_ERROR "Code coverage not enabled! Aborting ...") -endif() +if (NOT coverage) + message(FATAL_ERROR "Code coverage not enabled! Aborting ...") +endif () -if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC") - message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag") - return() -endif() +if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC") + message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag") + return() +endif () include(ProcessorCount) ProcessorCount(PROCESSOR_COUNT) include(CodeCoverage) -# The instructions for these commands come from the `CodeCoverage` module, -# which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3, -# then locally changed (see CHANGES: section in `CodeCoverage.cmake`) +# The instructions for these commands come from the `CodeCoverage` module, which was copied from +# https://github.com/bilke/cmake-modules, commit fb7d2a3, then locally changed (see CHANGES: section in +# `CodeCoverage.cmake`) set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args}) -if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "") - separate_arguments(GCOVR_ADDITIONAL_ARGS) -endif() +if (NOT GCOVR_ADDITIONAL_ARGS STREQUAL "") + separate_arguments(GCOVR_ADDITIONAL_ARGS) +endif () -list(APPEND GCOVR_ADDITIONAL_ARGS - --exclude-throw-branches - --exclude-noncode-lines - --exclude-unreachable-branches -s - -j ${PROCESSOR_COUNT}) +list(APPEND + GCOVR_ADDITIONAL_ARGS + --exclude-throw-branches + --exclude-noncode-lines + --exclude-unreachable-branches + -s + -j + ${PROCESSOR_COUNT}) setup_target_for_coverage_gcovr( - NAME coverage - FORMAT ${coverage_format} - EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb" - DEPENDENCIES xrpld xrpl.tests -) + NAME + coverage + FORMAT + ${coverage_format} + EXCLUDE + "src/test" + "src/tests" + "include/xrpl/beast/test" + "include/xrpl/beast/unit_test" + "${CMAKE_BINARY_DIR}/pb-xrpl.libpb" + DEPENDENCIES + xrpld + xrpl.tests) add_code_coverage_to_target(opts INTERFACE) diff --git a/cmake/XrplInterface.cmake b/cmake/XrplInterface.cmake index f53b2dac26..f471b37dd7 100644 --- a/cmake/XrplInterface.cmake +++ b/cmake/XrplInterface.cmake @@ -5,84 +5,79 @@ include(CompilationEnv) # Set defaults for optional variables to avoid uninitialized variable warnings -if(NOT DEFINED voidstar) - set(voidstar OFF) -endif() +if (NOT DEFINED voidstar) + set(voidstar OFF) +endif () -add_library (opts INTERFACE) -add_library (Xrpl::opts ALIAS opts) -target_compile_definitions (opts - INTERFACE - BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS - BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT - BOOST_CONTAINER_FWD_BAD_DEQUE - HAS_UNCAUGHT_EXCEPTIONS=1 - $<$: - BOOST_ASIO_NO_DEPRECATED - BOOST_FILESYSTEM_NO_DEPRECATED - > - $<$>: - BOOST_COROUTINES_NO_DEPRECATION_WARNING - BOOST_BEAST_ALLOW_DEPRECATED - BOOST_FILESYSTEM_DEPRECATED - > - $<$:BEAST_NO_UNIT_TEST_INLINE=1> - $<$:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1> - $<$:XRPL_SINGLE_IO_SERVICE_THREAD=1> - $<$:ENABLE_VOIDSTAR>) -target_compile_options (opts - INTERFACE - $<$,$>:-Wsuggest-override> - $<$:-Wno-maybe-uninitialized> - $<$:-fno-omit-frame-pointer> - $<$:-pg> - $<$,$>:-p>) +add_library(opts INTERFACE) +add_library(Xrpl::opts ALIAS opts) +target_compile_definitions( + opts + INTERFACE BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS + BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT + BOOST_CONTAINER_FWD_BAD_DEQUE + HAS_UNCAUGHT_EXCEPTIONS=1 + $<$: + BOOST_ASIO_NO_DEPRECATED + BOOST_FILESYSTEM_NO_DEPRECATED + > + $<$>: + BOOST_COROUTINES_NO_DEPRECATION_WARNING + BOOST_BEAST_ALLOW_DEPRECATED + BOOST_FILESYSTEM_DEPRECATED + > + $<$:BEAST_NO_UNIT_TEST_INLINE=1> + $<$:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1> + $<$:XRPL_SINGLE_IO_SERVICE_THREAD=1> + $<$:ENABLE_VOIDSTAR>) +target_compile_options( + opts + INTERFACE $<$,$>:-Wsuggest-override> + $<$:-Wno-maybe-uninitialized> $<$:-fno-omit-frame-pointer> + $<$:-pg> $<$,$>:-p>) -target_link_libraries (opts - INTERFACE - $<$:-pg> - $<$,$>:-p>) +target_link_libraries(opts INTERFACE $<$:-pg> $<$,$>:-p>) -if(jemalloc) - find_package(jemalloc REQUIRED) - target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC) - target_link_libraries(opts INTERFACE jemalloc::jemalloc) +if (jemalloc) + find_package(jemalloc REQUIRED) + target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC) + target_link_libraries(opts INTERFACE jemalloc::jemalloc) endif () #[===================================================================[ xrpld transitive library deps via an interface library #]===================================================================] -add_library (xrpl_syslibs INTERFACE) -add_library (Xrpl::syslibs ALIAS xrpl_syslibs) -target_link_libraries (xrpl_syslibs - INTERFACE - $<$: - legacy_stdio_definitions.lib - Shlwapi - kernel32 - user32 - gdi32 - winspool - comdlg32 - advapi32 - shell32 - ole32 - oleaut32 - uuid - odbc32 - odbccp32 - crypt32 - > - $<$>:dl> - $<$,$>>:rt>) +add_library(xrpl_syslibs INTERFACE) +add_library(Xrpl::syslibs ALIAS xrpl_syslibs) +target_link_libraries( + xrpl_syslibs + INTERFACE $<$: + legacy_stdio_definitions.lib + Shlwapi + kernel32 + user32 + gdi32 + winspool + comdlg32 + advapi32 + shell32 + ole32 + oleaut32 + uuid + odbc32 + odbccp32 + crypt32 + > + $<$>:dl> + $<$,$>>:rt>) if (NOT is_msvc) - set (THREADS_PREFER_PTHREAD_FLAG ON) - find_package (Threads) - target_link_libraries (xrpl_syslibs INTERFACE Threads::Threads) + set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads) + target_link_libraries(xrpl_syslibs INTERFACE Threads::Threads) endif () -add_library (xrpl_libs INTERFACE) -add_library (Xrpl::libs ALIAS xrpl_libs) -target_link_libraries (xrpl_libs INTERFACE Xrpl::syslibs) +add_library(xrpl_libs INTERFACE) +add_library(Xrpl::libs ALIAS xrpl_libs) +target_link_libraries(xrpl_libs INTERFACE Xrpl::syslibs) diff --git a/cmake/XrplSanitizers.cmake b/cmake/XrplSanitizers.cmake index fc31e4a3ec..6ddc0b7e7d 100644 --- a/cmake/XrplSanitizers.cmake +++ b/cmake/XrplSanitizers.cmake @@ -44,23 +44,23 @@ include(CompilationEnv) # Read environment variable set(SANITIZERS "") -if(DEFINED ENV{SANITIZERS}) - set(SANITIZERS "$ENV{SANITIZERS}") -endif() +if (DEFINED ENV{SANITIZERS}) + set(SANITIZERS "$ENV{SANITIZERS}") +endif () # Set SANITIZERS_ENABLED flag for use in other modules -if(SANITIZERS MATCHES "address|thread|undefinedbehavior") +if (SANITIZERS MATCHES "address|thread|undefinedbehavior") set(SANITIZERS_ENABLED TRUE) -else() +else () set(SANITIZERS_ENABLED FALSE) return() -endif() +endif () # Sanitizers are not supported on Windows/MSVC -if(is_msvc) +if (is_msvc) message(FATAL_ERROR "Sanitizers are not supported on Windows/MSVC. " - "Please unset the SANITIZERS environment variable.") -endif() + "Please unset the SANITIZERS environment variable.") +endif () message(STATUS "Configuring sanitizers: ${SANITIZERS}") @@ -74,24 +74,24 @@ set(san_list "${SANITIZERS}") string(REPLACE "," ";" san_list "${san_list}") separate_arguments(san_list) -foreach(san IN LISTS san_list) - if(san STREQUAL "address") +foreach (san IN LISTS san_list) + if (san STREQUAL "address") set(enable_asan TRUE) - elseif(san STREQUAL "thread") + elseif (san STREQUAL "thread") set(enable_tsan TRUE) - elseif(san STREQUAL "undefinedbehavior") + elseif (san STREQUAL "undefinedbehavior") set(enable_ubsan TRUE) - else() + else () message(FATAL_ERROR "Unsupported sanitizer type: ${san}" - "Supported: address, thread, undefinedbehavior and their combinations.") - endif() -endforeach() + "Supported: address, thread, undefinedbehavior and their combinations.") + endif () +endforeach () # Validate sanitizer compatibility -if(enable_asan AND enable_tsan) +if (enable_asan AND enable_tsan) message(FATAL_ERROR "AddressSanitizer and ThreadSanitizer are incompatible and cannot be enabled simultaneously. " - "Use 'address' or 'thread', optionally with 'undefinedbehavior'.") -endif() + "Use 'address' or 'thread', optionally with 'undefinedbehavior'.") +endif () # Frame pointer is required for meaningful stack traces. Sanitizers recommend minimum of -O1 for reasonable performance set(SANITIZERS_COMPILE_FLAGS "-fno-omit-frame-pointer" "-O1") @@ -99,31 +99,31 @@ set(SANITIZERS_COMPILE_FLAGS "-fno-omit-frame-pointer" "-O1") # Build the sanitizer flags list set(SANITIZER_TYPES) -if(enable_asan) +if (enable_asan) list(APPEND SANITIZER_TYPES "address") -elseif(enable_tsan) +elseif (enable_tsan) list(APPEND SANITIZER_TYPES "thread") -endif() +endif () -if(enable_ubsan) +if (enable_ubsan) # UB sanitizer flags list(APPEND SANITIZER_TYPES "undefined" "float-divide-by-zero") - if(is_clang) - # Clang supports additional UB checks. More info here https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html + if (is_clang) + # Clang supports additional UB checks. More info here + # https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html list(APPEND SANITIZER_TYPES "unsigned-integer-overflow") - endif() -endif() + endif () +endif () -# Configure code model for GCC on amd64 -# Use large code model for ASAN to avoid relocation errors -# Use medium code model for TSAN (large is not compatible with TSAN) +# Configure code model for GCC on amd64 Use large code model for ASAN to avoid relocation errors Use medium code model +# for TSAN (large is not compatible with TSAN) set(SANITIZERS_RELOCATION_FLAGS) # Compiler-specific configuration -if(is_gcc) - # Disable mold, gold and lld linkers for GCC with sanitizers - # Use default linker (bfd/ld) which is more lenient with mixed code models - # This is needed since the size of instrumented binary exceeds the limits set by mold, lld and gold linkers +if (is_gcc) + # Disable mold, gold and lld linkers for GCC with sanitizers Use default linker (bfd/ld) which is more lenient with + # mixed code models This is needed since the size of instrumented binary exceeds the limits set by mold, lld and + # gold linkers set(use_mold OFF CACHE BOOL "Use mold linker" FORCE) set(use_gold OFF CACHE BOOL "Use gold linker" FORCE) set(use_lld OFF CACHE BOOL "Use lld linker" FORCE) @@ -132,17 +132,17 @@ if(is_gcc) # Suppress false positive warnings in GCC with stringop-overflow list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-stringop-overflow") - if(is_amd64 AND enable_asan) + if (is_amd64 AND enable_asan) message(STATUS " Using large code model (-mcmodel=large)") list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=large") list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=large") - elseif(enable_tsan) + elseif (enable_tsan) # GCC doesn't support atomic_thread_fence with tsan. Suppress warnings. list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-tsan") message(STATUS " Using medium code model (-mcmodel=medium)") list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=medium") list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=medium") - endif() + endif () # Join sanitizer flags with commas for -fsanitize option list(JOIN SANITIZER_TYPES "," SANITIZER_TYPES_STR) @@ -151,13 +151,12 @@ if(is_gcc) list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}") set(SANITIZERS_LINK_FLAGS "${SANITIZERS_RELOCATION_FLAGS}" "-fsanitize=${SANITIZER_TYPES_STR}") -elseif(is_clang) - # Add ignorelist for Clang (GCC doesn't support this) - # Use CMAKE_SOURCE_DIR to get the path to the ignorelist +elseif (is_clang) + # Add ignorelist for Clang (GCC doesn't support this) Use CMAKE_SOURCE_DIR to get the path to the ignorelist set(IGNORELIST_PATH "${CMAKE_SOURCE_DIR}/sanitizers/suppressions/sanitizer-ignorelist.txt") - if(NOT EXISTS "${IGNORELIST_PATH}") + if (NOT EXISTS "${IGNORELIST_PATH}") message(FATAL_ERROR "Sanitizer ignorelist not found: ${IGNORELIST_PATH}") - endif() + endif () list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize-ignorelist=${IGNORELIST_PATH}") message(STATUS " Using sanitizer ignorelist: ${IGNORELIST_PATH}") @@ -168,34 +167,31 @@ elseif(is_clang) # Add sanitizer to compile and link flags list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}") set(SANITIZERS_LINK_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}") -endif() +endif () message(STATUS " Compile flags: ${SANITIZERS_COMPILE_FLAGS}") message(STATUS " Link flags: ${SANITIZERS_LINK_FLAGS}") -# Apply the sanitizer flags to the 'common' interface library -# This is the same library used by XrplCompiler.cmake -target_compile_options(common INTERFACE - $<$:${SANITIZERS_COMPILE_FLAGS}> - $<$:${SANITIZERS_COMPILE_FLAGS}> -) +# Apply the sanitizer flags to the 'common' interface library This is the same library used by XrplCompiler.cmake +target_compile_options(common INTERFACE $<$:${SANITIZERS_COMPILE_FLAGS}> + $<$:${SANITIZERS_COMPILE_FLAGS}>) # Apply linker flags target_link_options(common INTERFACE ${SANITIZERS_LINK_FLAGS}) # Define SANITIZERS macro for BuildInfo.cpp set(sanitizers_list) -if(enable_asan) +if (enable_asan) list(APPEND sanitizers_list "ASAN") -endif() -if(enable_tsan) +endif () +if (enable_tsan) list(APPEND sanitizers_list "TSAN") -endif() -if(enable_ubsan) +endif () +if (enable_ubsan) list(APPEND sanitizers_list "UBSAN") -endif() +endif () -if(sanitizers_list) +if (sanitizers_list) list(JOIN sanitizers_list "." sanitizers_str) target_compile_definitions(common INTERFACE SANITIZERS=${sanitizers_str}) -endif() +endif () diff --git a/cmake/XrplSanity.cmake b/cmake/XrplSanity.cmake index 7464ca396c..5055414abd 100644 --- a/cmake/XrplSanity.cmake +++ b/cmake/XrplSanity.cmake @@ -6,40 +6,39 @@ include(CompilationEnv) get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) -set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE) +set(CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE) if (NOT is_multiconfig) - if (NOT CMAKE_BUILD_TYPE) - message (STATUS "Build type not specified - defaulting to Release") - set (CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE) - elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release)) - # for simplicity, these are the only two config types we care about. Limiting - # the build types simplifies dealing with external project builds especially - message (FATAL_ERROR " *** Only Debug or Release build types are currently supported ***") - endif () + if (NOT CMAKE_BUILD_TYPE) + message(STATUS "Build type not specified - defaulting to Release") + set(CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE) + elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release)) + # for simplicity, these are the only two config types we care about. Limiting the build types simplifies dealing + # with external project builds especially + message(FATAL_ERROR " *** Only Debug or Release build types are currently supported ***") + endif () endif () if (is_clang) # both Clang and AppleClang - if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND - CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0) - message (FATAL_ERROR "This project requires clang 16 or later") - endif () + if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0) + message(FATAL_ERROR "This project requires clang 16 or later") + endif () elseif (is_gcc) - if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0) - message (FATAL_ERROR "This project requires GCC 12 or later") - endif () + if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0) + message(FATAL_ERROR "This project requires GCC 12 or later") + endif () endif () # check for in-source build and fail if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}") - message (FATAL_ERROR "Builds (in-source) are not allowed in " - "${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles " - "directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.") + message(FATAL_ERROR "Builds (in-source) are not allowed in " + "${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles " + "directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.") endif () if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32") - message (FATAL_ERROR "Visual Studio 32-bit build is not supported.") + message(FATAL_ERROR "Visual Studio 32-bit build is not supported.") endif () if (APPLE AND NOT HOMEBREW) - find_program (HOMEBREW brew) + find_program(HOMEBREW brew) endif () diff --git a/cmake/XrplSettings.cmake b/cmake/XrplSettings.cmake index 3724ea2b4f..6d332be19d 100644 --- a/cmake/XrplSettings.cmake +++ b/cmake/XrplSettings.cmake @@ -5,125 +5,110 @@ include(CompilationEnv) set(is_ci FALSE) -if(DEFINED ENV{CI}) - if("$ENV{CI}" STREQUAL "true") - set(is_ci TRUE) - endif() -endif() +if (DEFINED ENV{CI}) + if ("$ENV{CI}" STREQUAL "true") + set(is_ci TRUE) + endif () +endif () get_directory_property(has_parent PARENT_DIRECTORY) -if(has_parent) - set(is_root_project OFF) -else() - set(is_root_project ON) -endif() +if (has_parent) + set(is_root_project OFF) +else () + set(is_root_project ON) +endif () option(assert "Enables asserts, even in release builds" OFF) option(xrpld "Build xrpld" ON) option(tests "Build tests" ON) -if(tests) - # This setting allows making a separate workflow to test fees other than default 10 - if(NOT UNIT_TEST_REFERENCE_FEE) - set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "") - endif() -endif() +if (tests) + # This setting allows making a separate workflow to test fees other than default 10 + if (NOT UNIT_TEST_REFERENCE_FEE) + set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "") + endif () +endif () option(unity "Creates a build using UNITY support in cmake." OFF) -if(unity) - if(NOT is_ci) - set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "") - endif() - set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build") -endif() +if (unity) + if (NOT is_ci) + set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "") + endif () + set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build") +endif () -if(is_clang AND is_linux) - option(voidstar "Enable Antithesis instrumentation." OFF) -endif() +if (is_clang AND is_linux) + option(voidstar "Enable Antithesis instrumentation." OFF) +endif () -if(is_gcc OR is_clang) - include(ProcessorCount) - ProcessorCount(PROCESSOR_COUNT) +if (is_gcc OR is_clang) + include(ProcessorCount) + ProcessorCount(PROCESSOR_COUNT) - option(coverage "Generates coverage info." OFF) - option(profile "Add profiling flags" OFF) - set(coverage_format "html-details" CACHE STRING - "Output format of the coverage report.") - set(coverage_extra_args "" CACHE STRING - "Additional arguments to pass to gcovr.") - option(wextra "compile with extra gcc/clang warnings enabled" ON) -else() - set(profile OFF CACHE BOOL "gcc/clang only" FORCE) - set(coverage OFF CACHE BOOL "gcc/clang only" FORCE) - set(wextra OFF CACHE BOOL "gcc/clang only" FORCE) -endif() + option(coverage "Generates coverage info." OFF) + option(profile "Add profiling flags" OFF) + set(coverage_format "html-details" CACHE STRING "Output format of the coverage report.") + set(coverage_extra_args "" CACHE STRING "Additional arguments to pass to gcovr.") + option(wextra "compile with extra gcc/clang warnings enabled" ON) +else () + set(profile OFF CACHE BOOL "gcc/clang only" FORCE) + set(coverage OFF CACHE BOOL "gcc/clang only" FORCE) + set(wextra OFF CACHE BOOL "gcc/clang only" FORCE) +endif () -if(is_linux AND NOT SANITIZER) - option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF) - option(static "link protobuf, openssl, libc++, and boost statically" ON) - option(perf "Enables flags that assist with perf recording" OFF) - option(use_gold "enables detection of gold (binutils) linker" ON) - option(use_mold "enables detection of mold (binutils) linker" ON) - # Set a default value for the log flag based on the build type. - # This provides a sensible default (on for debug, off for release) - # while still allowing the user to override it for any build. - if(CMAKE_BUILD_TYPE STREQUAL "Debug") - set(TRUNCATED_LOGS_DEFAULT ON) - else() - set(TRUNCATED_LOGS_DEFAULT OFF) - endif() - option(TRUNCATED_THREAD_NAME_LOGS - "Show warnings about truncated thread names on Linux." - ${TRUNCATED_LOGS_DEFAULT} - ) - if(TRUNCATED_THREAD_NAME_LOGS) - add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS) - endif() -else() - # we are not ready to allow shared-libs on windows because it would require - # export declarations. On macos it's more feasible, but static openssl - # produces odd linker errors, thus we disable shared lib builds for now. - set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE) - set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE) - set(perf OFF CACHE BOOL "perf flags, linux only" FORCE) - set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE) - set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE) -endif() +if (is_linux AND NOT SANITIZER) + option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF) + option(static "link protobuf, openssl, libc++, and boost statically" ON) + option(perf "Enables flags that assist with perf recording" OFF) + option(use_gold "enables detection of gold (binutils) linker" ON) + option(use_mold "enables detection of mold (binutils) linker" ON) + # Set a default value for the log flag based on the build type. This provides a sensible default (on for debug, off + # for release) while still allowing the user to override it for any build. + if (CMAKE_BUILD_TYPE STREQUAL "Debug") + set(TRUNCATED_LOGS_DEFAULT ON) + else () + set(TRUNCATED_LOGS_DEFAULT OFF) + endif () + option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux." ${TRUNCATED_LOGS_DEFAULT}) + if (TRUNCATED_THREAD_NAME_LOGS) + add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS) + endif () +else () + # we are not ready to allow shared-libs on windows because it would require export declarations. On macos it's more + # feasible, but static openssl produces odd linker errors, thus we disable shared lib builds for now. + set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE) + set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE) + set(perf OFF CACHE BOOL "perf flags, linux only" FORCE) + set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE) + set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE) +endif () -if(is_clang) - option(use_lld "enables detection of lld linker" ON) -else() - set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE) -endif() +if (is_clang) + option(use_lld "enables detection of lld linker" ON) +else () + set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE) +endif () option(jemalloc "Enables jemalloc for heap profiling" OFF) option(werr "treat warnings as errors" OFF) -option(local_protobuf - "Force a local build of protobuf instead of looking for an installed version." OFF) -option(local_grpc - "Force a local build of gRPC instead of looking for an installed version." OFF) +option(local_protobuf "Force a local build of protobuf instead of looking for an installed version." OFF) +option(local_grpc "Force a local build of gRPC instead of looking for an installed version." OFF) # the remaining options are obscure and rarely used -option(beast_no_unit_test_inline - "Prevents unit test definitions from being inserted into global table" - OFF) -option(single_io_service_thread - "Restricts the number of threads calling io_context::run to one. \ - This can be useful when debugging." - OFF) -option(boost_show_deprecated - "Allow boost to fail on deprecated usage. Only useful if you're trying\ - to find deprecated calls." - OFF) +option(beast_no_unit_test_inline "Prevents unit test definitions from being inserted into global table" OFF) +option(single_io_service_thread "Restricts the number of threads calling io_context::run to one. \ + This can be useful when debugging." OFF) +option(boost_show_deprecated "Allow boost to fail on deprecated usage. Only useful if you're trying\ + to find deprecated calls." OFF) -if(WIN32) - option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF) -else() - set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE) -endif() +if (WIN32) + option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF) +else () + set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE) +endif () -if(coverage) - message(STATUS "coverage build requested - forcing Debug build") - set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE) -endif() +if (coverage) + message(STATUS "coverage build requested - forcing Debug build") + set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE) +endif () diff --git a/cmake/XrplValidatorKeys.cmake b/cmake/XrplValidatorKeys.cmake index fa520ce9c1..5fa7e14886 100644 --- a/cmake/XrplValidatorKeys.cmake +++ b/cmake/XrplValidatorKeys.cmake @@ -1,20 +1,17 @@ -option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF) +option(validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF) if (validator_keys) - git_branch (current_branch) - # default to tracking VK master branch unless we are on release - if (NOT (current_branch STREQUAL "release")) - set (current_branch "master") - endif () - message (STATUS "Tracking ValidatorKeys branch: ${current_branch}") + git_branch(current_branch) + # default to tracking VK master branch unless we are on release + if (NOT (current_branch STREQUAL "release")) + set(current_branch "master") + endif () + message(STATUS "Tracking ValidatorKeys branch: ${current_branch}") - FetchContent_Declare ( - validator_keys - GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git - GIT_TAG "${current_branch}" - ) - FetchContent_MakeAvailable(validator_keys) - set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}") - install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + FetchContent_Declare(validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git + GIT_TAG "${current_branch}") + FetchContent_MakeAvailable(validator_keys) + set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}") + install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) endif () diff --git a/cmake/XrplVersion.cmake b/cmake/XrplVersion.cmake index e42f942ecb..6eeda7b1eb 100644 --- a/cmake/XrplVersion.cmake +++ b/cmake/XrplVersion.cmake @@ -3,13 +3,13 @@ #]===================================================================] file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO) -foreach(line_ ${BUILD_INFO}) - if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"") - set(xrpld_version ${CMAKE_MATCH_1}) - endif() -endforeach() -if(xrpld_version) - message(STATUS "xrpld version: ${xrpld_version}") -else() - message(FATAL_ERROR "unable to determine xrpld version") -endif() +foreach (line_ ${BUILD_INFO}) + if (line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"") + set(xrpld_version ${CMAKE_MATCH_1}) + endif () +endforeach () +if (xrpld_version) + message(STATUS "xrpld version: ${xrpld_version}") +else () + message(FATAL_ERROR "unable to determine xrpld version") +endif () diff --git a/cmake/create_symbolic_link.cmake b/cmake/create_symbolic_link.cmake index 60fcf2e0b5..82ca915f5c 100644 --- a/cmake/create_symbolic_link.cmake +++ b/cmake/create_symbolic_link.cmake @@ -1,20 +1,19 @@ -# file(CREATE_SYMLINK) only works on Windows with administrator privileges. -# https://stackoverflow.com/a/61244115/618906 -function(create_symbolic_link target link) - if(WIN32) - if(NOT IS_SYMLINK "${link}") - if(NOT IS_ABSOLUTE "${target}") - # Relative links work do not work on Windows. - set(target "${link}/../${target}") - endif() - file(TO_NATIVE_PATH "${target}" target) - file(TO_NATIVE_PATH "${link}" link) - execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}") - endif() - else() - file(CREATE_LINK "${target}" "${link}" SYMBOLIC) - endif() - if(NOT IS_SYMLINK "${link}") - message(ERROR "failed to create symlink: <${link}>") - endif() -endfunction() +# file(CREATE_SYMLINK) only works on Windows with administrator privileges. https://stackoverflow.com/a/61244115/618906 +function (create_symbolic_link target link) + if (WIN32) + if (NOT IS_SYMLINK "${link}") + if (NOT IS_ABSOLUTE "${target}") + # Relative links work do not work on Windows. + set(target "${link}/../${target}") + endif () + file(TO_NATIVE_PATH "${target}" target) + file(TO_NATIVE_PATH "${link}" link) + execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}") + endif () + else () + file(CREATE_LINK "${target}" "${link}" SYMBOLIC) + endif () + if (NOT IS_SYMLINK "${link}") + message(ERROR "failed to create symlink: <${link}>") + endif () +endfunction () diff --git a/cmake/deps/Boost.cmake b/cmake/deps/Boost.cmake index 49025ae342..fe62fcb79c 100644 --- a/cmake/deps/Boost.cmake +++ b/cmake/deps/Boost.cmake @@ -2,48 +2,43 @@ include(CompilationEnv) include(XrplSanitizers) find_package(Boost REQUIRED - COMPONENTS - chrono - container - coroutine - date_time - filesystem - json - program_options - regex - system - thread -) + COMPONENTS chrono + container + coroutine + date_time + filesystem + json + program_options + regex + system + thread) add_library(xrpl_boost INTERFACE) add_library(Xrpl::boost ALIAS xrpl_boost) -target_link_libraries(xrpl_boost - INTERFACE - Boost::headers - Boost::chrono - Boost::container - Boost::coroutine - Boost::date_time - Boost::filesystem - Boost::json - Boost::process - Boost::program_options - Boost::regex - Boost::thread) -if(Boost_COMPILER) - target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking) -endif() -if(SANITIZERS_ENABLED AND is_clang) - # TODO: gcc does not support -fsanitize-blacklist...can we do something else - # for gcc ? - if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers) - get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES) - endif() - message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist") - file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*") - target_compile_options(opts - INTERFACE - # ignore boost headers for sanitizing - -fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt) -endif() +target_link_libraries( + xrpl_boost + INTERFACE Boost::headers + Boost::chrono + Boost::container + Boost::coroutine + Boost::date_time + Boost::filesystem + Boost::json + Boost::process + Boost::program_options + Boost::regex + Boost::thread) +if (Boost_COMPILER) + target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking) +endif () +if (SANITIZERS_ENABLED AND is_clang) + # TODO: gcc does not support -fsanitize-blacklist...can we do something else for gcc ? + if (NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers) + get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES) + endif () + message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist") + file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*") + target_compile_options(opts INTERFACE # ignore boost headers for sanitizing + -fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt) +endif () diff --git a/src/tests/libxrpl/CMakeLists.txt b/src/tests/libxrpl/CMakeLists.txt index 72d7d0fa92..cfa056b0aa 100644 --- a/src/tests/libxrpl/CMakeLists.txt +++ b/src/tests/libxrpl/CMakeLists.txt @@ -8,12 +8,8 @@ add_custom_target(xrpl.tests) # Test helpers add_library(xrpl.helpers.test STATIC) -target_sources(xrpl.helpers.test PRIVATE - helpers/TestSink.cpp -) -target_include_directories(xrpl.helpers.test PUBLIC - ${CMAKE_CURRENT_SOURCE_DIR} -) +target_sources(xrpl.helpers.test PRIVATE helpers/TestSink.cpp) +target_include_directories(xrpl.helpers.test PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) target_link_libraries(xrpl.helpers.test PRIVATE xrpl.libxrpl) # Common library dependencies for the rest of the tests. @@ -34,8 +30,8 @@ target_link_libraries(xrpl.test.json PRIVATE xrpl.imports.test) add_dependencies(xrpl.tests xrpl.test.json) # Network unit tests are currently not supported on Windows -if(NOT WIN32) - xrpl_add_test(net) - target_link_libraries(xrpl.test.net PRIVATE xrpl.imports.test) - add_dependencies(xrpl.tests xrpl.test.net) -endif() +if (NOT WIN32) + xrpl_add_test(net) + target_link_libraries(xrpl.test.net PRIVATE xrpl.imports.test) + add_dependencies(xrpl.tests xrpl.test.net) +endif () From fe9c8d568fcf6ac21483024e01f58962dd5c8260 Mon Sep 17 00:00:00 2001 From: Ayaz Salikhov Date: Thu, 29 Jan 2026 18:19:32 +0000 Subject: [PATCH 2/9] chore: Format all cmake files without comments (#6294) --- .cmake-format.yaml | 13 +- .pre-commit-config.yaml | 12 -- cmake/CodeCoverage.cmake | 309 ++++++++++++++-------------- cmake/XrplCore.cmake | 240 ++++++++------------- cmake/XrplDocs.cmake | 93 ++++----- cmake/XrplInstall.cmake | 99 ++++----- cmake/add_module.cmake | 34 +-- cmake/isolate_headers.cmake | 18 +- cmake/target_link_modules.cmake | 32 +-- cmake/target_protobuf_sources.cmake | 40 ++-- tests/conan/CMakeLists.txt | 6 +- 11 files changed, 386 insertions(+), 510 deletions(-) diff --git a/.cmake-format.yaml b/.cmake-format.yaml index 1c4d6684e8..1a3337fe8c 100644 --- a/.cmake-format.yaml +++ b/.cmake-format.yaml @@ -2,6 +2,17 @@ _help_parse: Options affecting listfile parsing parse: _help_additional_commands: - Specify structure for custom cmake functions + additional_commands: + target_protobuf_sources: + pargs: + - target + - prefix + kwargs: + PROTOS: "*" + LANGUAGE: cpp + IMPORT_DIRS: "*" + GENERATE_EXTENSIONS: "*" + PLUGIN: "*" _help_override_spec: - Override configurations per-command where available override_spec: {} @@ -159,7 +170,7 @@ markup: canonicalize_hashrulers: true _help_enable_markup: - enable comment markup parsing and reflow - enable_markup: true + enable_markup: false _help_lint: Options affecting the linter lint: _help_disabled_codes: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d38c11af38..79a3e4e7ec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,18 +31,6 @@ repos: hooks: - id: cmake-format additional_dependencies: [PyYAML] - exclude: | - (?x)^( - cmake/CodeCoverage.cmake| - cmake/XrplCore.cmake| - cmake/XrplDocs.cmake| - cmake/XrplInstall.cmake| - cmake/add_module.cmake| - cmake/isolate_headers.cmake| - cmake/target_link_modules.cmake| - cmake/target_protobuf_sources.cmake| - tests/conan/CMakeLists.txt - )$ - repo: https://github.com/rbubley/mirrors-prettier rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2 diff --git a/cmake/CodeCoverage.cmake b/cmake/CodeCoverage.cmake index e1b44e656d..0178d68cc0 100644 --- a/cmake/CodeCoverage.cmake +++ b/cmake/CodeCoverage.cmake @@ -172,51 +172,47 @@ include(CMakeParseArguments) option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE) # Check prereqs -find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test) +find_program(GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test) -if(DEFINED CODE_COVERAGE_GCOV_TOOL) - set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}") -elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL}) - set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}") -elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") - if(APPLE) - execute_process( COMMAND xcrun -f llvm-cov - OUTPUT_VARIABLE LLVMCOV_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE - ) - else() - find_program( LLVMCOV_PATH llvm-cov ) - endif() - if(LLVMCOV_PATH) - set(GCOV_TOOL "${LLVMCOV_PATH} gcov") - endif() -elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU") - find_program( GCOV_PATH gcov ) - set(GCOV_TOOL "${GCOV_PATH}") -endif() +if (DEFINED CODE_COVERAGE_GCOV_TOOL) + set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}") +elseif (DEFINED ENV{CODE_COVERAGE_GCOV_TOOL}) + set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}") +elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") + if (APPLE) + execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE) + else () + find_program(LLVMCOV_PATH llvm-cov) + endif () + if (LLVMCOV_PATH) + set(GCOV_TOOL "${LLVMCOV_PATH} gcov") + endif () +elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU") + find_program(GCOV_PATH gcov) + set(GCOV_TOOL "${GCOV_PATH}") +endif () # Check supported compiler (Clang, GNU and Flang) get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) -foreach(LANG ${LANGUAGES}) - if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") - if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3) - message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...") - endif() - elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" - AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang") - message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...") - endif() -endforeach() +foreach (LANG ${LANGUAGES}) + if ("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") + if ("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3) + message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...") + endif () + elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES + "(LLVM)?[Ff]lang") + message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...") + endif () +endforeach () -set(COVERAGE_COMPILER_FLAGS "-g --coverage" - CACHE INTERNAL "") +set(COVERAGE_COMPILER_FLAGS "-g --coverage" CACHE INTERNAL "") set(COVERAGE_CXX_COMPILER_FLAGS "") set(COVERAGE_C_COMPILER_FLAGS "") set(COVERAGE_CXX_LINKER_FLAGS "") set(COVERAGE_C_LINKER_FLAGS "") -if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)") +if (CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)") include(CheckCXXCompilerFlag) include(CheckCCompilerFlag) include(CheckLinkerFlag) @@ -227,51 +223,51 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)") set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS}) check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path) - if(HAVE_cxx_fprofile_abs_path) + if (HAVE_cxx_fprofile_abs_path) set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path") - endif() + endif () check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path) - if(HAVE_c_fprofile_abs_path) + if (HAVE_c_fprofile_abs_path) set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path") - endif() + endif () check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path) - if(HAVE_cxx_linker_fprofile_abs_path) + if (HAVE_cxx_linker_fprofile_abs_path) set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path") - endif() + endif () check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path) - if(HAVE_c_linker_fprofile_abs_path) + if (HAVE_c_linker_fprofile_abs_path) set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path") - endif() + endif () check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update) - if(HAVE_cxx_fprofile_update) + if (HAVE_cxx_fprofile_update) set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic") - endif() + endif () check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update) - if(HAVE_c_fprofile_update) + if (HAVE_c_fprofile_update) set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic") - endif() + endif () check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update) - if(HAVE_cxx_linker_fprofile_update) + if (HAVE_cxx_linker_fprofile_update) set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic") - endif() + endif () check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update) - if(HAVE_c_linker_fprofile_update) + if (HAVE_c_linker_fprofile_update) set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic") - endif() + endif () -endif() +endif () get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) -if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)) +if (NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)) message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading") -endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG) +endif () # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG) # Defines a target for running and collection code coverage information # Builds dependencies, runs the given executable and outputs reports. @@ -295,193 +291,186 @@ endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG) # ) # The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the # GCVOR command. -function(setup_target_for_coverage_gcovr) +function (setup_target_for_coverage_gcovr) set(options NONE) set(oneValueArgs BASE_DIRECTORY NAME FORMAT) set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - if(NOT GCOV_TOOL) + if (NOT GCOV_TOOL) message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...") - endif() + endif () - if(NOT GCOVR_PATH) + if (NOT GCOVR_PATH) message(FATAL_ERROR "Could not find gcovr tool! Aborting...") - endif() + endif () # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR - if(DEFINED Coverage_BASE_DIRECTORY) + if (DEFINED Coverage_BASE_DIRECTORY) get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) - else() + else () set(BASEDIR ${PROJECT_SOURCE_DIR}) - endif() + endif () - if(NOT DEFINED Coverage_FORMAT) + if (NOT DEFINED Coverage_FORMAT) set(Coverage_FORMAT xml) - endif() + endif () - if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS) + if (NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS) message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set") - endif() + endif () - if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS) + if ("--output" IN_LIST GCOVR_ADDITIONAL_ARGS) message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...") - else() - if((Coverage_FORMAT STREQUAL "html-details") - OR (Coverage_FORMAT STREQUAL "html-nested")) + else () + if ((Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested")) set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html) set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME}) - elseif(Coverage_FORMAT STREQUAL "html-single") + elseif (Coverage_FORMAT STREQUAL "html-single") set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html) - elseif((Coverage_FORMAT STREQUAL "json-summary") - OR (Coverage_FORMAT STREQUAL "json-details") - OR (Coverage_FORMAT STREQUAL "coveralls")) + elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL "json-details") + OR (Coverage_FORMAT STREQUAL "coveralls")) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json) - elseif(Coverage_FORMAT STREQUAL "txt") + elseif (Coverage_FORMAT STREQUAL "txt") set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt) - elseif(Coverage_FORMAT STREQUAL "csv") + elseif (Coverage_FORMAT STREQUAL "csv") set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv) - elseif(Coverage_FORMAT STREQUAL "lcov") + elseif (Coverage_FORMAT STREQUAL "lcov") set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.lcov) - else() + else () set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml) - endif() - endif() + endif () + endif () - if((Coverage_FORMAT STREQUAL "cobertura") - OR (Coverage_FORMAT STREQUAL "xml")) - list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" ) - list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty ) + if ((Coverage_FORMAT STREQUAL "cobertura") OR (Coverage_FORMAT STREQUAL "xml")) + list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}") + list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty) set(Coverage_FORMAT cobertura) # overwrite xml - elseif(Coverage_FORMAT STREQUAL "sonarqube") - list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" ) - elseif(Coverage_FORMAT STREQUAL "jacoco") - list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}" ) - list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty ) - elseif(Coverage_FORMAT STREQUAL "clover") - list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}" ) - list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty ) - elseif(Coverage_FORMAT STREQUAL "lcov") - list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}" ) - elseif(Coverage_FORMAT STREQUAL "json-summary") - list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" ) + elseif (Coverage_FORMAT STREQUAL "sonarqube") + list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}") + elseif (Coverage_FORMAT STREQUAL "jacoco") + list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}") + list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty) + elseif (Coverage_FORMAT STREQUAL "clover") + list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}") + list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty) + elseif (Coverage_FORMAT STREQUAL "lcov") + list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}") + elseif (Coverage_FORMAT STREQUAL "json-summary") + list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty) - elseif(Coverage_FORMAT STREQUAL "json-details") - list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" ) + elseif (Coverage_FORMAT STREQUAL "json-details") + list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty) - elseif(Coverage_FORMAT STREQUAL "coveralls") - list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" ) + elseif (Coverage_FORMAT STREQUAL "coveralls") + list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty) - elseif(Coverage_FORMAT STREQUAL "csv") - list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" ) - elseif(Coverage_FORMAT STREQUAL "txt") - list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" ) - elseif(Coverage_FORMAT STREQUAL "html-single") - list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" ) + elseif (Coverage_FORMAT STREQUAL "csv") + list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}") + elseif (Coverage_FORMAT STREQUAL "txt") + list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}") + elseif (Coverage_FORMAT STREQUAL "html-single") + list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained) - elseif(Coverage_FORMAT STREQUAL "html-nested") - list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" ) - elseif(Coverage_FORMAT STREQUAL "html-details") - list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" ) - else() + elseif (Coverage_FORMAT STREQUAL "html-nested") + list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}") + elseif (Coverage_FORMAT STREQUAL "html-details") + list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}") + else () message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...") - endif() + endif () # Collect excludes (CMake 3.4+: Also compute absolute paths) set(GCOVR_EXCLUDES "") - foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) - if(CMAKE_VERSION VERSION_GREATER 3.4) + foreach (EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) + if (CMAKE_VERSION VERSION_GREATER 3.4) get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) - endif() + endif () list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") - endforeach() + endforeach () list(REMOVE_DUPLICATES GCOVR_EXCLUDES) # Combine excludes to several -e arguments set(GCOVR_EXCLUDE_ARGS "") - foreach(EXCLUDE ${GCOVR_EXCLUDES}) + foreach (EXCLUDE ${GCOVR_EXCLUDES}) list(APPEND GCOVR_EXCLUDE_ARGS "-e") list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}") - endforeach() + endforeach () # Set up commands which will be run to generate coverage data # If EXECUTABLE is not set, the user is expected to run the tests manually # before running the coverage target NAME - if(DEFINED Coverage_EXECUTABLE) - set(GCOVR_EXEC_TESTS_CMD - ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} - ) - endif() + if (DEFINED Coverage_EXECUTABLE) + set(GCOVR_EXEC_TESTS_CMD ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}) + endif () # Create folder - if(DEFINED GCOVR_CREATE_FOLDER) - set(GCOVR_FOLDER_CMD - ${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER}) - endif() + if (DEFINED GCOVR_CREATE_FOLDER) + set(GCOVR_FOLDER_CMD ${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER}) + endif () # Running gcovr set(GCOVR_CMD ${GCOVR_PATH} - --gcov-executable ${GCOV_TOOL} + --gcov-executable + ${GCOV_TOOL} --gcov-ignore-parse-errors=negative_hits.warn_once_per_file - -r ${BASEDIR} + -r + ${BASEDIR} ${GCOVR_ADDITIONAL_ARGS} ${GCOVR_EXCLUDE_ARGS} - --object-directory=${PROJECT_BINARY_DIR} - ) + --object-directory=${PROJECT_BINARY_DIR}) - if(CODE_COVERAGE_VERBOSE) + if (CODE_COVERAGE_VERBOSE) message(STATUS "Executed command report") - if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "") + if (NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "") message(STATUS "Command to run tests: ") string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}") message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}") - endif() + endif () - if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "") + if (NOT "${GCOVR_FOLDER_CMD}" STREQUAL "") message(STATUS "Command to create a folder: ") string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}") message(STATUS "${GCOVR_FOLDER_CMD_SPACED}") - endif() + endif () message(STATUS "Command to generate gcovr coverage data: ") string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}") message(STATUS "${GCOVR_CMD_SPACED}") - endif() + endif () add_custom_target(${Coverage_NAME} - COMMAND ${GCOVR_EXEC_TESTS_CMD} - COMMAND ${GCOVR_FOLDER_CMD} - COMMAND ${GCOVR_CMD} - - BYPRODUCTS ${GCOVR_OUTPUT_FILE} - WORKING_DIRECTORY ${PROJECT_BINARY_DIR} - DEPENDS ${Coverage_DEPENDENCIES} - VERBATIM # Protect arguments to commands - COMMENT "Running gcovr to produce code coverage report." - ) + COMMAND ${GCOVR_EXEC_TESTS_CMD} + COMMAND ${GCOVR_FOLDER_CMD} + COMMAND ${GCOVR_CMD} + BYPRODUCTS ${GCOVR_OUTPUT_FILE} + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Running gcovr to produce code coverage report.") # Show info where to find the report - add_custom_command(TARGET ${Coverage_NAME} POST_BUILD - COMMAND echo - COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}" - ) -endfunction() # setup_target_for_coverage_gcovr + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD COMMAND echo + COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}") +endfunction () # setup_target_for_coverage_gcovr -function(add_code_coverage_to_target name scope) +function (add_code_coverage_to_target name scope) separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}") separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}") separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}") separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}") # Add compiler options to the target - target_compile_options(${name} ${scope} - $<$:${COVERAGE_CXX_COMPILER_FLAGS}> - $<$:${COVERAGE_C_COMPILER_FLAGS}>) + target_compile_options(${name} ${scope} $<$:${COVERAGE_CXX_COMPILER_FLAGS}> + $<$:${COVERAGE_C_COMPILER_FLAGS}>) - target_link_libraries (${name} ${scope} - $<$:${COVERAGE_CXX_LINKER_FLAGS} gcov> - $<$:${COVERAGE_C_LINKER_FLAGS} gcov> - ) -endfunction() # add_code_coverage_to_target + target_link_libraries( + ${name} + ${scope} + $<$:${COVERAGE_CXX_LINKER_FLAGS} + gcov> + $<$:${COVERAGE_C_LINKER_FLAGS} + gcov>) +endfunction () # add_code_coverage_to_target diff --git a/cmake/XrplCore.cmake b/cmake/XrplCore.cmake index 2e50cd2f7a..0651b8e0d8 100644 --- a/cmake/XrplCore.cmake +++ b/cmake/XrplCore.cmake @@ -10,63 +10,44 @@ include(target_protobuf_sources) # so we just build them as a separate library. add_library(xrpl.libpb) set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF) -target_protobuf_sources(xrpl.libpb xrpl/proto - LANGUAGE cpp - IMPORT_DIRS include/xrpl/proto - PROTOS include/xrpl/proto/xrpl.proto -) +target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto + PROTOS include/xrpl/proto/xrpl.proto) file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto") -target_protobuf_sources(xrpl.libpb xrpl/proto - LANGUAGE cpp - IMPORT_DIRS include/xrpl/proto - PROTOS "${protos}" -) -target_protobuf_sources(xrpl.libpb xrpl/proto - LANGUAGE grpc - IMPORT_DIRS include/xrpl/proto - PROTOS "${protos}" - PLUGIN protoc-gen-grpc=$ - GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc -) +target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto PROTOS "${protos}") +target_protobuf_sources( + xrpl.libpb xrpl/proto + LANGUAGE grpc + IMPORT_DIRS include/xrpl/proto + PROTOS "${protos}" + PLUGIN protoc-gen-grpc=$ + GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc) -target_compile_options(xrpl.libpb - PUBLIC - $<$:-wd4996> - $<$: - --system-header-prefix="google/protobuf" - -Wno-deprecated-dynamic-exception-spec - > - PRIVATE - $<$:-wd4065> - $<$>:-Wno-deprecated-declarations> -) +target_compile_options( + xrpl.libpb PUBLIC $<$:-wd4996> $<$: --system-header-prefix="google/protobuf" + -Wno-deprecated-dynamic-exception-spec > + PRIVATE $<$:-wd4065> $<$>:-Wno-deprecated-declarations>) -target_link_libraries(xrpl.libpb - PUBLIC - protobuf::libprotobuf - gRPC::grpc++ -) +target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf gRPC::grpc++) # TODO: Clean up the number of library targets later. add_library(xrpl.imports.main INTERFACE) -target_link_libraries(xrpl.imports.main - INTERFACE - absl::random_random - date::date - ed25519::ed25519 - LibArchive::LibArchive - OpenSSL::Crypto - Xrpl::boost - Xrpl::libs - Xrpl::opts - Xrpl::syslibs - secp256k1::secp256k1 - xrpl.libpb - xxHash::xxhash - $<$:antithesis-sdk-cpp> -) +target_link_libraries( + xrpl.imports.main + INTERFACE absl::random_random + date::date + ed25519::ed25519 + LibArchive::LibArchive + OpenSSL::Crypto + Xrpl::boost + Xrpl::libs + Xrpl::opts + Xrpl::syslibs + secp256k1::secp256k1 + xrpl.libpb + xxHash::xxhash + $<$:antithesis-sdk-cpp>) include(add_module) include(target_link_modules) @@ -88,18 +69,11 @@ target_link_libraries(xrpl.libxrpl.crypto PUBLIC xrpl.libxrpl.basics) # Level 04 add_module(xrpl protocol) -target_link_libraries(xrpl.libxrpl.protocol PUBLIC - xrpl.libxrpl.crypto - xrpl.libxrpl.json -) +target_link_libraries(xrpl.libxrpl.protocol PUBLIC xrpl.libxrpl.crypto xrpl.libxrpl.json) # Level 05 add_module(xrpl core) -target_link_libraries(xrpl.libxrpl.core PUBLIC - xrpl.libxrpl.basics - xrpl.libxrpl.json - xrpl.libxrpl.protocol -) +target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol) # Level 06 add_module(xrpl resource) @@ -107,62 +81,45 @@ target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol) # Level 07 add_module(xrpl net) -target_link_libraries(xrpl.libxrpl.net PUBLIC - xrpl.libxrpl.basics - xrpl.libxrpl.json - xrpl.libxrpl.protocol - xrpl.libxrpl.resource -) +target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol + xrpl.libxrpl.resource) add_module(xrpl server) target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol) add_module(xrpl nodestore) -target_link_libraries(xrpl.libxrpl.nodestore PUBLIC - xrpl.libxrpl.basics - xrpl.libxrpl.json - xrpl.libxrpl.protocol -) +target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol) add_module(xrpl shamap) -target_link_libraries(xrpl.libxrpl.shamap PUBLIC - xrpl.libxrpl.basics - xrpl.libxrpl.crypto - xrpl.libxrpl.protocol - xrpl.libxrpl.nodestore -) +target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto xrpl.libxrpl.protocol + xrpl.libxrpl.nodestore) add_module(xrpl ledger) -target_link_libraries(xrpl.libxrpl.ledger PUBLIC - xrpl.libxrpl.basics - xrpl.libxrpl.json - xrpl.libxrpl.protocol -) +target_link_libraries(xrpl.libxrpl.ledger PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol) add_library(xrpl.libxrpl) set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl) add_library(xrpl::libxrpl ALIAS xrpl.libxrpl) -file(GLOB_RECURSE sources CONFIGURE_DEPENDS - "${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp" -) +file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp") target_sources(xrpl.libxrpl PRIVATE ${sources}) -target_link_modules(xrpl PUBLIC - basics - beast - core - crypto - json - protocol - resource - server - nodestore - shamap - net - ledger -) +target_link_modules( + xrpl + PUBLIC + basics + beast + core + crypto + json + protocol + resource + server + nodestore + shamap + net + ledger) # All headers in libxrpl are in modules. # Uncomment this stanza if you have not yet moved new headers into a module. @@ -173,63 +130,42 @@ target_link_modules(xrpl PUBLIC # $ # $) -if(xrpld) - add_executable(xrpld) - if(tests) - target_compile_definitions(xrpld PUBLIC ENABLE_TESTS) - target_compile_definitions(xrpld PRIVATE - UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE} - ) - endif() - target_include_directories(xrpld - PRIVATE - $ - ) +if (xrpld) + add_executable(xrpld) + if (tests) + target_compile_definitions(xrpld PUBLIC ENABLE_TESTS) + target_compile_definitions(xrpld PRIVATE UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}) + endif () + target_include_directories(xrpld PRIVATE $) - file(GLOB_RECURSE sources CONFIGURE_DEPENDS - "${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp" - ) - target_sources(xrpld PRIVATE ${sources}) - - if(tests) - file(GLOB_RECURSE sources CONFIGURE_DEPENDS - "${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp" - ) + file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp") target_sources(xrpld PRIVATE ${sources}) - endif() - target_link_libraries(xrpld - Xrpl::boost - Xrpl::opts - Xrpl::libs - xrpl.libxrpl - ) - exclude_if_included(xrpld) - # define a macro for tests that might need to - # be excluded or run differently in CI environment - if(is_ci) - target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI) - endif () + if (tests) + file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp") + target_sources(xrpld PRIVATE ${sources}) + endif () - if(voidstar) - target_compile_options(xrpld - PRIVATE - -fsanitize-coverage=trace-pc-guard - ) - # xrpld requires access to antithesis-sdk-cpp implementation file - # antithesis_instrumentation.h, which is not exported as INTERFACE - target_include_directories(xrpld - PRIVATE - ${CMAKE_SOURCE_DIR}/external/antithesis-sdk - ) - endif() + target_link_libraries(xrpld Xrpl::boost Xrpl::opts Xrpl::libs xrpl.libxrpl) + exclude_if_included(xrpld) + # define a macro for tests that might need to + # be excluded or run differently in CI environment + if (is_ci) + target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI) + endif () - # any files that don't play well with unity should be added here - if(tests) - set_source_files_properties( - # these two seem to produce conflicts in beast teardown template methods - src/test/rpc/ValidatorRPC_test.cpp - src/test/ledger/Invariants_test.cpp - PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE) - endif() -endif() + if (voidstar) + target_compile_options(xrpld PRIVATE -fsanitize-coverage=trace-pc-guard) + # xrpld requires access to antithesis-sdk-cpp implementation file + # antithesis_instrumentation.h, which is not exported as INTERFACE + target_include_directories(xrpld PRIVATE ${CMAKE_SOURCE_DIR}/external/antithesis-sdk) + endif () + + # any files that don't play well with unity should be added here + if (tests) + set_source_files_properties( + # these two seem to produce conflicts in beast teardown template methods + src/test/rpc/ValidatorRPC_test.cpp src/test/ledger/Invariants_test.cpp PROPERTIES SKIP_UNITY_BUILD_INCLUSION + TRUE) + endif () +endif () diff --git a/cmake/XrplDocs.cmake b/cmake/XrplDocs.cmake index a99bbe076b..69581a99c7 100644 --- a/cmake/XrplDocs.cmake +++ b/cmake/XrplDocs.cmake @@ -2,45 +2,44 @@ docs target (optional) #]===================================================================] -if(NOT only_docs) - return() -endif() +if (NOT only_docs) + return() +endif () find_package(Doxygen) -if(NOT TARGET Doxygen::doxygen) - message(STATUS "doxygen executable not found -- skipping docs target") - return() -endif() +if (NOT TARGET Doxygen::doxygen) + message(STATUS "doxygen executable not found -- skipping docs target") + return() +endif () set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs") set(doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src") set(doxygen_index_file "${doxygen_output_directory}/html/index.html") set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile") -file(GLOB_RECURSE doxygen_input - docs/*.md - include/*.h - include/*.cpp - include/*.md - src/*.h - src/*.cpp - src/*.md - Builds/*.md - *.md) -list(APPEND doxygen_input - external/README.md - ) +file(GLOB_RECURSE + doxygen_input + docs/*.md + include/*.h + include/*.cpp + include/*.md + src/*.h + src/*.cpp + src/*.md + Builds/*.md + *.md) +list(APPEND doxygen_input external/README.md) set(dependencies "${doxygen_input}" "${doxyfile}") -function(verbose_find_path variable name) - # find_path sets a CACHE variable, so don't try using a "local" variable. - find_path(${variable} "${name}" ${ARGN}) - if(NOT ${variable}) - message(NOTICE "could not find ${name}") - else() - message(STATUS "found ${name}: ${${variable}}/${name}") - endif() -endfunction() +function (verbose_find_path variable name) + # find_path sets a CACHE variable, so don't try using a "local" variable. + find_path(${variable} "${name}" ${ARGN}) + if (NOT ${variable}) + message(NOTICE "could not find ${name}") + else () + message(STATUS "found ${name}: ${${variable}}/${name}") + endif () +endfunction () verbose_find_path(doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml) verbose_find_path(doxygen_dot_path dot) @@ -48,36 +47,26 @@ verbose_find_path(doxygen_dot_path dot) # https://en.cppreference.com/w/Cppreference:Archives # https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake") -file(WRITE - "${download_script}" - "file(DOWNLOAD \ +file(WRITE "${download_script}" + "file(DOWNLOAD \ https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \ ${CMAKE_BINARY_DIR}/docs/cppreference.zip \ EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \ )\n \ execute_process( \ COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \ - )\n" -) + )\n") set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml") -add_custom_command( - OUTPUT "${tagfile}" - COMMAND "${CMAKE_COMMAND}" -P "${download_script}" - WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs" -) +add_custom_command(OUTPUT "${tagfile}" COMMAND "${CMAKE_COMMAND}" -P "${download_script}" + WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs") set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/") add_custom_command( - OUTPUT "${doxygen_index_file}" - COMMAND "${CMAKE_COMMAND}" -E env - "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}" - "DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" - "DOXYGEN_TAGFILES=${doxygen_tagfiles}" - "DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" - "DOXYGEN_DOT_PATH=${doxygen_dot_path}" - "${DOXYGEN_EXECUTABLE}" "${doxyfile}" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - DEPENDS "${dependencies}" "${tagfile}") -add_custom_target(docs - DEPENDS "${doxygen_index_file}" - SOURCES "${dependencies}") + OUTPUT "${doxygen_index_file}" + COMMAND "${CMAKE_COMMAND}" -E env "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}" + "DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}" + "DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" "DOXYGEN_DOT_PATH=${doxygen_dot_path}" + "${DOXYGEN_EXECUTABLE}" "${doxyfile}" + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + DEPENDS "${dependencies}" "${tagfile}") +add_custom_target(docs DEPENDS "${doxygen_index_file}" SOURCES "${dependencies}") diff --git a/cmake/XrplInstall.cmake b/cmake/XrplInstall.cmake index 0599a8268c..141dc56089 100644 --- a/cmake/XrplInstall.cmake +++ b/cmake/XrplInstall.cmake @@ -6,61 +6,51 @@ include(create_symbolic_link) # If no suffix is defined for executables (e.g. Windows uses .exe but Linux # and macOS use none), then explicitly set it to the empty string. -if(NOT DEFINED suffix) - set(suffix "") -endif() +if (NOT DEFINED suffix) + set(suffix "") +endif () -install ( - TARGETS - common - opts - xrpl_boost - xrpl_libs - xrpl_syslibs - xrpl.imports.main - xrpl.libpb - xrpl.libxrpl - xrpl.libxrpl.basics - xrpl.libxrpl.beast - xrpl.libxrpl.core - xrpl.libxrpl.crypto - xrpl.libxrpl.json - xrpl.libxrpl.ledger - xrpl.libxrpl.net - xrpl.libxrpl.nodestore - xrpl.libxrpl.protocol - xrpl.libxrpl.resource - xrpl.libxrpl.server - xrpl.libxrpl.shamap - antithesis-sdk-cpp - EXPORT XrplExports - LIBRARY DESTINATION lib - ARCHIVE DESTINATION lib - RUNTIME DESTINATION bin - INCLUDES DESTINATION include) +install(TARGETS common + opts + xrpl_boost + xrpl_libs + xrpl_syslibs + xrpl.imports.main + xrpl.libpb + xrpl.libxrpl + xrpl.libxrpl.basics + xrpl.libxrpl.beast + xrpl.libxrpl.core + xrpl.libxrpl.crypto + xrpl.libxrpl.json + xrpl.libxrpl.ledger + xrpl.libxrpl.net + xrpl.libxrpl.nodestore + xrpl.libxrpl.protocol + xrpl.libxrpl.resource + xrpl.libxrpl.server + xrpl.libxrpl.shamap + antithesis-sdk-cpp + EXPORT XrplExports + LIBRARY DESTINATION lib + ARCHIVE DESTINATION lib + RUNTIME DESTINATION bin + INCLUDES + DESTINATION include) -install( - DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" - DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" -) +install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") -install (EXPORT XrplExports - FILE XrplTargets.cmake - NAMESPACE Xrpl:: - DESTINATION lib/cmake/xrpl) -include (CMakePackageConfigHelpers) -write_basic_package_version_file ( - XrplConfigVersion.cmake - VERSION ${xrpld_version} - COMPATIBILITY SameMajorVersion) +install(EXPORT XrplExports FILE XrplTargets.cmake NAMESPACE Xrpl:: DESTINATION lib/cmake/xrpl) +include(CMakePackageConfigHelpers) +write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version} COMPATIBILITY SameMajorVersion) if (is_root_project AND TARGET xrpld) - install (TARGETS xrpld RUNTIME DESTINATION bin) - set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON) - # sample configs should not overwrite existing files - # install if-not-exists workaround as suggested by - # https://cmake.org/Bug/view.php?id=12646 - install(CODE " + install(TARGETS xrpld RUNTIME DESTINATION bin) + set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON) + # sample configs should not overwrite existing files + # install if-not-exists workaround as suggested by + # https://cmake.org/Bug/view.php?id=12646 + install(CODE " macro (copy_if_not_exists SRC DEST NEWNAME) if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\") file (INSTALL FILE_PERMISSIONS OWNER_READ OWNER_WRITE DESTINATION \"\${CMAKE_INSTALL_PREFIX}/\${DEST}\" FILES \"\${SRC}\" RENAME \"\${NEWNAME}\") @@ -71,7 +61,7 @@ if (is_root_project AND TARGET xrpld) copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg) copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt) ") - install(CODE " + install(CODE " set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\") include(create_symbolic_link) create_symbolic_link(xrpld${suffix} \ @@ -79,8 +69,5 @@ if (is_root_project AND TARGET xrpld) ") endif () -install ( - FILES - ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake - ${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake - DESTINATION lib/cmake/xrpl) +install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake ${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake + DESTINATION lib/cmake/xrpl) diff --git a/cmake/add_module.cmake b/cmake/add_module.cmake index bcfce1bf60..a3cb247fd2 100644 --- a/cmake/add_module.cmake +++ b/cmake/add_module.cmake @@ -12,26 +12,14 @@ include(isolate_headers) # add_module(parent a) # add_module(parent b) # target_link_libraries(project.libparent.b PUBLIC project.libparent.a) -function(add_module parent name) - set(target ${PROJECT_NAME}.lib${parent}.${name}) - add_library(${target} OBJECT) - file(GLOB_RECURSE sources CONFIGURE_DEPENDS - "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp" - ) - target_sources(${target} PRIVATE ${sources}) - target_include_directories(${target} PUBLIC - "$" - ) - isolate_headers( - ${target} - "${CMAKE_CURRENT_SOURCE_DIR}/include" - "${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" - PUBLIC - ) - isolate_headers( - ${target} - "${CMAKE_CURRENT_SOURCE_DIR}/src" - "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}" - PRIVATE - ) -endfunction() +function (add_module parent name) + set(target ${PROJECT_NAME}.lib${parent}.${name}) + add_library(${target} OBJECT) + file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp") + target_sources(${target} PRIVATE ${sources}) + target_include_directories(${target} PUBLIC "$") + isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/include" + "${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" PUBLIC) + isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src" "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}" + PRIVATE) +endfunction () diff --git a/cmake/isolate_headers.cmake b/cmake/isolate_headers.cmake index 0a5a43a6a2..ef53b3f20a 100644 --- a/cmake/isolate_headers.cmake +++ b/cmake/isolate_headers.cmake @@ -37,12 +37,12 @@ include(create_symbolic_link) # `${CMAKE_CURRENT_BINARY_DIR}/include/${target}`. # # isolate_headers(target A B scope) -function(isolate_headers target A B scope) - file(RELATIVE_PATH C "${A}" "${B}") - set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}") - set(Y "${X}/${C}") - cmake_path(GET Y PARENT_PATH parent) - file(MAKE_DIRECTORY "${parent}") - create_symbolic_link("${B}" "${Y}") - target_include_directories(${target} ${scope} "$") -endfunction() +function (isolate_headers target A B scope) + file(RELATIVE_PATH C "${A}" "${B}") + set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}") + set(Y "${X}/${C}") + cmake_path(GET Y PARENT_PATH parent) + file(MAKE_DIRECTORY "${parent}") + create_symbolic_link("${B}" "${Y}") + target_include_directories(${target} ${scope} "$") +endfunction () diff --git a/cmake/target_link_modules.cmake b/cmake/target_link_modules.cmake index acbf67903a..bcd80591b0 100644 --- a/cmake/target_link_modules.cmake +++ b/cmake/target_link_modules.cmake @@ -6,19 +6,19 @@ # target_link_libraries(project.libparent.b PUBLIC project.libparent.a) # add_library(project.libparent) # target_link_modules(parent PUBLIC a b) -function(target_link_modules parent scope) - set(library ${PROJECT_NAME}.lib${parent}) - foreach(name ${ARGN}) - set(module ${library}.${name}) - get_target_property(sources ${library} SOURCES) - list(LENGTH sources before) - get_target_property(dupes ${module} SOURCES) - list(LENGTH dupes expected) - list(REMOVE_ITEM sources ${dupes}) - list(LENGTH sources after) - math(EXPR actual "${before} - ${after}") - message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}") - set_target_properties(${library} PROPERTIES SOURCES "${sources}") - target_link_libraries(${library} ${scope} ${module}) - endforeach() -endfunction() +function (target_link_modules parent scope) + set(library ${PROJECT_NAME}.lib${parent}) + foreach (name ${ARGN}) + set(module ${library}.${name}) + get_target_property(sources ${library} SOURCES) + list(LENGTH sources before) + get_target_property(dupes ${module} SOURCES) + list(LENGTH dupes expected) + list(REMOVE_ITEM sources ${dupes}) + list(LENGTH sources after) + math(EXPR actual "${before} - ${after}") + message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}") + set_target_properties(${library} PROPERTIES SOURCES "${sources}") + target_link_libraries(${library} ${scope} ${module}) + endforeach () +endfunction () diff --git a/cmake/target_protobuf_sources.cmake b/cmake/target_protobuf_sources.cmake index da2ef6dc9a..bb5de02b53 100644 --- a/cmake/target_protobuf_sources.cmake +++ b/cmake/target_protobuf_sources.cmake @@ -35,28 +35,20 @@ find_package(Protobuf REQUIRED) # This prefix should appear at the start of all your consumer includes. # ARGN: # A list of .proto files. -function(target_protobuf_sources target prefix) - set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}") - file(MAKE_DIRECTORY "${dir}/${prefix}") +function (target_protobuf_sources target prefix) + set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}") + file(MAKE_DIRECTORY "${dir}/${prefix}") - protobuf_generate( - TARGET ${target} - PROTOC_OUT_DIR "${dir}/${prefix}" - "${ARGN}" - ) - target_include_directories(${target} SYSTEM PUBLIC - # Allows #include used by consumer files. - $ - # Allows #include "path/to/file.proto" used by generated files. - $ - # Allows #include used by consumer files. - $ - # Allows #include "path/to/file.proto" used by generated files. - $ - ) - install( - DIRECTORY ${dir}/ - DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} - FILES_MATCHING PATTERN "*.h" - ) -endfunction() + protobuf_generate(TARGET ${target} PROTOC_OUT_DIR "${dir}/${prefix}" "${ARGN}") + target_include_directories( + ${target} SYSTEM + PUBLIC # Allows #include used by consumer files. + $ + # Allows #include "path/to/file.proto" used by generated files. + $ + # Allows #include used by consumer files. + $ + # Allows #include "path/to/file.proto" used by generated files. + $) + install(DIRECTORY ${dir}/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} FILES_MATCHING PATTERN "*.h") +endfunction () diff --git a/tests/conan/CMakeLists.txt b/tests/conan/CMakeLists.txt index f1b37e7a69..871c4c60c0 100644 --- a/tests/conan/CMakeLists.txt +++ b/tests/conan/CMakeLists.txt @@ -3,11 +3,7 @@ cmake_minimum_required(VERSION 3.21) set(name example) set(version 0.1.0) -project( - ${name} - VERSION ${version} - LANGUAGES CXX -) +project(${name} VERSION ${version} LANGUAGES CXX) find_package(xrpl CONFIG REQUIRED) From ff4520cc45cd6ce00ac59e50b49c84af3ec43b94 Mon Sep 17 00:00:00 2001 From: Ayaz Salikhov Date: Mon, 2 Feb 2026 19:37:06 +0000 Subject: [PATCH 3/9] ci: Update hashes of XRPLF/actions (#6316) This updates the hashes of all XRPLF/actions to their latest versions. --- .github/workflows/pre-commit.yml | 2 +- .github/workflows/publish-docs.yml | 2 +- .github/workflows/reusable-build-test-config.yml | 6 +++--- .github/workflows/upload-conan-deps.yml | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 6b8fd9955e..f43275201c 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -11,7 +11,7 @@ on: jobs: # Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks. run-hooks: - uses: XRPLF/actions/.github/workflows/pre-commit.yml@282890f46d6921249d5659dd38babcb0bd8aef48 + uses: XRPLF/actions/.github/workflows/pre-commit.yml@320be44621ca2a080f05aeb15817c44b84518108 with: runs_on: ubuntu-latest container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-ab4d1f0" }' diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index c37a82a2f3..f61559d6d3 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -36,7 +36,7 @@ jobs: uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - name: Get number of processors - uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6 + uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf id: nproc with: subtract: ${{ env.NPROC_SUBTRACT }} diff --git a/.github/workflows/reusable-build-test-config.yml b/.github/workflows/reusable-build-test-config.yml index d298c85726..85b973ea0c 100644 --- a/.github/workflows/reusable-build-test-config.yml +++ b/.github/workflows/reusable-build-test-config.yml @@ -101,13 +101,13 @@ jobs: steps: - name: Cleanup workspace (macOS and Windows) if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }} - uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6 + uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf - name: Checkout repository uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - name: Prepare runner - uses: XRPLF/actions/prepare-runner@f05cab7b8541eee6473aa42beb9d2fe35608a190 + uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d with: enable_ccache: ${{ inputs.ccache_enabled }} @@ -119,7 +119,7 @@ jobs: uses: ./.github/actions/print-env - name: Get number of processors - uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6 + uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf id: nproc with: subtract: ${{ inputs.nproc_subtract }} diff --git a/.github/workflows/upload-conan-deps.yml b/.github/workflows/upload-conan-deps.yml index 2dada1ef08..60696a9769 100644 --- a/.github/workflows/upload-conan-deps.yml +++ b/.github/workflows/upload-conan-deps.yml @@ -64,13 +64,13 @@ jobs: steps: - name: Cleanup workspace (macOS and Windows) if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }} - uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6 + uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf - name: Checkout repository uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - name: Prepare runner - uses: XRPLF/actions/prepare-runner@f05cab7b8541eee6473aa42beb9d2fe35608a190 + uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d with: enable_ccache: false @@ -78,7 +78,7 @@ jobs: uses: ./.github/actions/print-env - name: Get number of processors - uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6 + uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf id: nproc with: subtract: ${{ env.NPROC_SUBTRACT }} From fe31cdc9f62b2be8981e02f0075c9f9dc0d115db Mon Sep 17 00:00:00 2001 From: Ed Hennis Date: Mon, 2 Feb 2026 19:57:10 -0400 Subject: [PATCH 4/9] chore: Add upper-case match for ARM64 in CompilationEnv (#6315) --- cmake/CompilationEnv.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/CompilationEnv.cmake b/cmake/CompilationEnv.cmake index 39ec383398..59b903c13a 100644 --- a/cmake/CompilationEnv.cmake +++ b/cmake/CompilationEnv.cmake @@ -51,7 +51,7 @@ set(is_amd64 FALSE) set(is_arm64 FALSE) if (CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64") set(is_amd64 TRUE) -elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64") +elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64") set(is_arm64 TRUE) else () message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}") From b1824301789f0e03add198e1664182711565d6e9 Mon Sep 17 00:00:00 2001 From: Bart Date: Mon, 2 Feb 2026 20:15:56 -0500 Subject: [PATCH 5/9] fix: Restore config changes that broke standalone mode (#6301) When support was added for `xrpld.cfg` in addition to `rippled.cfg` in https://github.com/XRPLF/rippled/pull/6098, as part of an effort to rename occurrences of ripple(d) to xrpl(d), the clearing and creation of the data directory were modified for what, at the time, seemed to result in an equivalent code flow. This has turned out to not be true, which is why this change restores two modifications to `Config.cpp` that currently break running the binary in standalone mode. --- src/xrpld/core/detail/Config.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/xrpld/core/detail/Config.cpp b/src/xrpld/core/detail/Config.cpp index 74dfa6d5a8..0a60416af7 100644 --- a/src/xrpld/core/detail/Config.cpp +++ b/src/xrpld/core/detail/Config.cpp @@ -358,9 +358,11 @@ Config::setup(std::string const& strConf, bool bQuiet, bool bSilent, bool bStand std::string const dbPath(legacy("database_path")); if (!dbPath.empty()) dataDir = boost::filesystem::path(dbPath); + else if (RUN_STANDALONE) + dataDir.clear(); } - if (!RUN_STANDALONE) + if (!dataDir.empty()) { boost::system::error_code ec; boost::filesystem::create_directories(dataDir, ec); From 6d369e0f0250a81077214bfeef00fae0ffbf15bd Mon Sep 17 00:00:00 2001 From: Mayukha Vadari Date: Mon, 2 Feb 2026 21:12:26 -0500 Subject: [PATCH 6/9] docs: Update API changelog, add APIv2+APIv3 version documentation (#6308) This change cleans up the `API-CHANGELOG.md` file. It moves the version-specific documentation to other files and fleshes out the changelog with all the API-related changes in each version. --- API-CHANGELOG.md | 200 ++++++++++++++++++++++++----------------------- API-VERSION-2.md | 66 ++++++++++++++++ API-VERSION-3.md | 27 +++++++ CONTRIBUTING.md | 3 +- 4 files changed, 199 insertions(+), 97 deletions(-) create mode 100644 API-VERSION-2.md create mode 100644 API-VERSION-3.md diff --git a/API-CHANGELOG.md b/API-CHANGELOG.md index dd3fcd018b..c7a31d27fa 100644 --- a/API-CHANGELOG.md +++ b/API-CHANGELOG.md @@ -6,90 +6,85 @@ For info about how [API versioning](https://xrpl.org/request-formatting.html#api The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `rippled` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade. +The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code. + For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release. +## API Version 3 (Beta) + +API version 3 is currently a beta API. It requires enabling `[beta_rpc_api]` in the rippled configuration to use. See [API-VERSION-3.md](API-VERSION-3.md) for the full list of changes in API version 3. + ## API Version 2 -API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request. - -#### Removed methods - -In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759) - -- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`. -- `ledger_header` - Instead, use the `ledger` method. - -#### Modifications to JSON transaction element in V2 - -In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775) -This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727) - -- JSON transaction element is named `tx_json` -- Binary transaction element is named `tx_blob` -- JSON transaction metadata element is named `meta` -- Binary transaction metadata element is named `meta_blob` - -Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible: - -- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element. -- `ledger_index` - Ledger index (only set on validated ledgers) -- `ledger_hash` - Ledger hash (only set on closed or validated ledgers) -- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers) -- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false` - -This change affects the following methods: - -- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements -- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements -- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements -- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements -- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element. - -#### Modification to `Payment` transaction JSON schema - -When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733) - -- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733) - - On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients. - - On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error. - - On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2. - - On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2. - -#### Modifications to account_info response - -- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770) -- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585) - - (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.) - -#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response - -- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571) - - The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579) -- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has: - - returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288) -- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620) - -#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response - -- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620) +API version 2 is available in `rippled` version 2.0.0 and later. See [API-VERSION-2.md](API-VERSION-2.md) for the full list of changes in API version 2. ## API Version 1 This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified. -The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code. +## XRP Ledger server version 3.1.0 -### Inconsistency: server_info - network_id +[Version 3.1.0](https://github.com/XRPLF/rippled/releases/tag/3.1.0) was released on Jan 27, 2026. -The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead. +### Additions in 3.1.0 + +- `vault_info`: New RPC method to retrieve information about a specific vault (part of XLS-66 Lending Protocol). ([#6156](https://github.com/XRPLF/rippled/pull/6156)) + +## XRP Ledger server version 3.0.0 + +[Version 3.0.0](https://github.com/XRPLF/rippled/releases/tag/3.0.0) was released on Dec 9, 2025. + +### Additions in 3.0.0 + +- `ledger_entry`: Supports all ledger entry types with dedicated parsers. ([#5237](https://github.com/XRPLF/rippled/pull/5237)) +- `ledger_entry`: New error codes `entryNotFound` and `unexpectedLedgerType` for more specific error handling. ([#5237](https://github.com/XRPLF/rippled/pull/5237)) +- `ledger_entry`: Improved error messages with more context (e.g., specifying which field is invalid or missing). ([#5237](https://github.com/XRPLF/rippled/pull/5237)) +- `ledger_entry`: Assorted bug fixes in RPC processing. ([#5237](https://github.com/XRPLF/rippled/pull/5237)) +- `simulate`: Supports additional metadata in the response. ([#5754](https://github.com/XRPLF/rippled/pull/5754)) + +## XRP Ledger server version 2.6.2 + +[Version 2.6.2](https://github.com/XRPLF/rippled/releases/tag/2.6.2) was released on Nov 19, 2025. + +This release contains bug fixes only and no API changes. + +## XRP Ledger server version 2.6.1 + +[Version 2.6.1](https://github.com/XRPLF/rippled/releases/tag/2.6.1) was released on Sep 30, 2025. + +This release contains bug fixes only and no API changes. + +## XRP Ledger server version 2.6.0 + +[Version 2.6.0](https://github.com/XRPLF/rippled/releases/tag/2.6.0) was released on Aug 27, 2025. + +### Additions in 2.6.0 + +- `account_info`: Added `allowTrustLineLocking` flag in response. ([#5525](https://github.com/XRPLF/rippled/pull/5525)) +- `ledger`: Removed the type filter from the RPC command. ([#4934](https://github.com/XRPLF/rippled/pull/4934)) +- `subscribe` (`validations` stream): `network_id` is now included. ([#5579](https://github.com/XRPLF/rippled/pull/5579)) +- `subscribe` (`transactions` stream): `nftoken_id`, `nftoken_ids`, and `offer_id` are now included in transaction metadata. ([#5230](https://github.com/XRPLF/rippled/pull/5230)) + +## XRP Ledger server version 2.5.1 + +[Version 2.5.1](https://github.com/XRPLF/rippled/releases/tag/2.5.1) was released on Sep 17, 2025. + +This release contains bug fixes only and no API changes. ## XRP Ledger server version 2.5.0 -As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu). +[Version 2.5.0](https://github.com/XRPLF/rippled/releases/tag/2.5.0) was released on Jun 24, 2025. ### Additions and bugfixes in 2.5.0 -- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled. +- `tx`: Added `ctid` field to the response and improved error handling. ([#4738](https://github.com/XRPLF/rippled/pull/4738)) +- `ledger_entry`: Improved error messages in `permissioned_domain`. ([#5344](https://github.com/XRPLF/rippled/pull/5344)) +- `simulate`: Improved multi-sign usage. ([#5479](https://github.com/XRPLF/rippled/pull/5479)) +- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled. ([#5385](https://github.com/XRPLF/rippled/pull/5385)) +- `subscribe` (admin): Removed webhook queue limit to prevent dropping notifications; reduced HTTP timeout from 10 minutes to 30 seconds. ([#5163](https://github.com/XRPLF/rippled/pull/5163)) +- `ledger_data` (gRPC): Fixed crashing issue with some invalid markers. ([#5137](https://github.com/XRPLF/rippled/pull/5137)) +- `account_lines`: Fixed error with `no_ripple` and `no_ripple_peer` sometimes showing up incorrectly. ([#5345](https://github.com/XRPLF/rippled/pull/5345)) +- `account_tx`: Fixed issue with incorrect CTIDs. ([#5408](https://github.com/XRPLF/rippled/pull/5408)) ## XRP Ledger server version 2.4.0 @@ -97,11 +92,19 @@ As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release ver ### Additions and bugfixes in 2.4.0 -- `ledger_entry`: `state` is added an alias for `ripple_state`. -- `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name. -- `validators`: Added new field `validator_list_threshold` in response. -- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate) -- Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field. +- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate). ([#5069](https://github.com/XRPLF/rippled/pull/5069)) +- Signing methods (`sign`, `sign_for`, `submit`): Autofill fees better, properly handle transactions without a base fee, and autofill the `NetworkID` field. ([#5069](https://github.com/XRPLF/rippled/pull/5069)) +- `ledger_entry`: `state` is added as an alias for `ripple_state`. ([#5199](https://github.com/XRPLF/rippled/pull/5199)) +- `ledger`, `ledger_data`, `account_objects`: Support filtering ledger entry types by their canonical names (case-insensitive). ([#5271](https://github.com/XRPLF/rippled/pull/5271)) +- `validators`: Added new field `validator_list_threshold` in response. ([#5112](https://github.com/XRPLF/rippled/pull/5112)) +- `server_info`: Added git commit hash info on admin connection. ([#5225](https://github.com/XRPLF/rippled/pull/5225)) +- `server_definitions`: Changed larger `UInt` serialized types to `Hash`. ([#5231](https://github.com/XRPLF/rippled/pull/5231)) + +## XRP Ledger server version 2.3.1 + +[Version 2.3.1](https://github.com/XRPLF/rippled/releases/tag/2.3.1) was released on Jan 29, 2025. + +This release contains bug fixes only and no API changes. ## XRP Ledger server version 2.3.0 @@ -109,19 +112,30 @@ As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release ver ### Breaking changes in 2.3.0 -- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs). - -Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC. +- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs). Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC. ### Additions and bugfixes in 2.3.0 -- `book_changes`: Returns a `validated` field in its response, which was missing in prior versions. +- `book_changes`: Returns a `validated` field in its response. ([#5096](https://github.com/XRPLF/rippled/pull/5096)) +- `book_changes`: Accepts shortcut strings (`current`, `closed`, `validated`) for the `ledger_index` parameter. ([#5096](https://github.com/XRPLF/rippled/pull/5096)) +- `server_definitions`: Include `index` in response. ([#5190](https://github.com/XRPLF/rippled/pull/5190)) +- `account_nfts`: Fix issue where unassociated marker would return incorrect results. ([#5045](https://github.com/XRPLF/rippled/pull/5045)) +- `account_objects`: Fix issue where invalid marker would not return an error. ([#5046](https://github.com/XRPLF/rippled/pull/5046)) +- `account_objects`: Disallow filtering by ledger entry types that an account cannot hold. ([#5056](https://github.com/XRPLF/rippled/pull/5056)) +- `tx`: Allow lowercase CTID. ([#5049](https://github.com/XRPLF/rippled/pull/5049)) +- `feature`: Better error handling for invalid values of `feature`. ([#5063](https://github.com/XRPLF/rippled/pull/5063)) ## XRP Ledger server version 2.2.0 [Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive): -- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781)) +- `feature`: Add a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781)) + +## XRP Ledger server version 2.0.1 + +[Version 2.0.1](https://github.com/XRPLF/rippled/releases/tag/2.0.1) was released on Jan 29, 2024. The following additions are non-breaking: + +- `path_find`: Fixes unbounded memory growth. ([#4822](https://github.com/XRPLF/rippled/pull/4822)) ## XRP Ledger server version 2.0.0 @@ -129,24 +143,18 @@ Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC. - `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries. - In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking. -- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting). - -## XRP Ledger server version 2.2.0 - -The following is a non-breaking addition to the API. - -- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781)) +- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting). The full list of changes is in [API-VERSION-2.md](API-VERSION-2.md). ## XRP Ledger server version 1.12.0 -[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive). +[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive): -- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427) +- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. ([#4427](https://github.com/XRPLF/rippled/pull/4427)) - `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port. - This allows crawlers to build a more detailed topology without needing to port-scan nodes. - (For peers and other non-admin clients, the info about admin ports is excluded.) -- Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). (https://github.com/XRPLF/rippled/pull/4553) - - Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback` (https://github.com/XRPLF/rippled/pull/4617) +- Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). ([#4553](https://github.com/XRPLF/rippled/pull/4553)) + - Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback`. ([#4617](https://github.com/XRPLF/rippled/pull/4617)) - Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well. - Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks. - Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields: @@ -181,16 +189,16 @@ The following is a non-breaking addition to the API. ### Breaking changes in 1.11 -- Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. (https://github.com/XRPLF/rippled/pull/4291) +- Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. ([#4291](https://github.com/XRPLF/rippled/pull/4291)) - The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`. -- Removed the acceptance of seeds or public keys in place of account addresses. (https://github.com/XRPLF/rippled/pull/4404) +- Removed the acceptance of seeds or public keys in place of account addresses. ([#4404](https://github.com/XRPLF/rippled/pull/4404)) - This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network). -- For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. (https://github.com/XRPLF/rippled/pull/4398) +- For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. ([#4398](https://github.com/XRPLF/rippled/pull/4398)) - If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406)) - There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence. - This is potentially a breaking change if clients have logic for determining whether an account can be deleted. - NetworkID - - For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. (https://github.com/XRPLF/rippled/pull/4370) + - For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. ([#4370](https://github.com/XRPLF/rippled/pull/4370)) - This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher. - The field must be omitted for Mainnet, so there is no change for Mainnet users. - There are three new local error codes: @@ -200,10 +208,10 @@ The following is a non-breaking addition to the API. ### Additions and bug fixes in 1.11 -- Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. (https://github.com/XRPLF/rippled/pull/4447) -- Added an `account_flags` object to the `account_info` method response. (https://github.com/XRPLF/rippled/pull/4459) -- Added `NFTokenPages` to the `account_objects` RPC. (https://github.com/XRPLF/rippled/pull/4352) -- Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. (https://github.com/XRPLF/rippled/pull/4361) +- Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. ([#4447](https://github.com/XRPLF/rippled/pull/4447)) +- Added an `account_flags` object to the `account_info` method response. ([#4459](https://github.com/XRPLF/rippled/pull/4459)) +- Added `NFTokenPages` to the `account_objects` RPC. ([#4352](https://github.com/XRPLF/rippled/pull/4352)) +- Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. ([#4361](https://github.com/XRPLF/rippled/pull/4361)) ## XRP Ledger server version 1.10.0 diff --git a/API-VERSION-2.md b/API-VERSION-2.md new file mode 100644 index 0000000000..2296795271 --- /dev/null +++ b/API-VERSION-2.md @@ -0,0 +1,66 @@ +# API Version 2 + +API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request. + +For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior. + +## Removed methods + +In API version 2, the following deprecated methods are no longer available: ([#4759](https://github.com/XRPLF/rippled/pull/4759)) + +- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`. +- `ledger_header` - Instead, use the `ledger` method. + +## Modifications to JSON transaction element in API version 2 + +In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. ([#4775](https://github.com/XRPLF/rippled/pull/4775)) +This helps to unify the JSON serialization format of transactions. ([clio#722](https://github.com/XRPLF/clio/issues/722), [#4727](https://github.com/XRPLF/rippled/issues/4727)) + +- JSON transaction element is named `tx_json` +- Binary transaction element is named `tx_blob` +- JSON transaction metadata element is named `meta` +- Binary transaction metadata element is named `meta_blob` + +Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible: + +- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element. +- `ledger_index` - Ledger index (only set on validated ledgers) +- `ledger_hash` - Ledger hash (only set on closed or validated ledgers) +- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers) +- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false` + +This change affects the following methods: + +- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements +- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements +- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements +- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements +- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element. + +## Modifications to `Payment` transaction JSON schema + +When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. ([#4733](https://github.com/XRPLF/rippled/pull/4733)) + +- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: ([#4733](https://github.com/XRPLF/rippled/pull/4733)) + - On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients. + - On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error. + - On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2. + - On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2. + +## Modifications to account_info response + +- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) ([#3770](https://github.com/XRPLF/rippled/pull/3770)) +- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. ([#4585](https://github.com/XRPLF/rippled/pull/4585)) + - (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.) + +## Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response + +- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. ([#4571](https://github.com/XRPLF/rippled/pull/4571)) + - The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. ([#4545](https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)) +- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has: + - returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. ([#4288](https://github.com/XRPLF/rippled/issues/4288)) +- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. ([#4620](https://github.com/XRPLF/rippled/pull/4620)) + +## Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response + +- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. ([#4620](https://github.com/XRPLF/rippled/pull/4620)) diff --git a/API-VERSION-3.md b/API-VERSION-3.md new file mode 100644 index 0000000000..46dc3f504d --- /dev/null +++ b/API-VERSION-3.md @@ -0,0 +1,27 @@ +# API Version 3 + +API version 3 is currently a **beta API**. It requires enabling `[beta_rpc_api]` in the rippled configuration to use. To use this API, clients specify `"api_version" : 3` in each request. + +For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior. + +## Breaking Changes + +### Modifications to `amm_info` + +The order of error checks has been changed to provide more specific error messages. ([#4924](https://github.com/XRPLF/rippled/pull/4924)) + +- **Before (API v2)**: When sending an invalid account or asset to `amm_info` while other parameters are not set as expected, the method returns a generic `rpcINVALID_PARAMS` error. +- **After (API v3)**: The same scenario returns a more specific error: `rpcISSUE_MALFORMED` for malformed assets or `rpcACT_MALFORMED` for malformed accounts. + +### Modifications to `ledger_entry` + +Added support for string shortcuts to look up fixed-location ledger entries using the `"index"` parameter. ([#5644](https://github.com/XRPLF/rippled/pull/5644)) + +In API version 3, the following string values can be used with the `"index"` parameter: + +- `"index": "amendments"` - Returns the `Amendments` ledger entry +- `"index": "fee"` - Returns the `FeeSettings` ledger entry +- `"index": "nunl"` - Returns the `NegativeUNL` ledger entry +- `"index": "hashes"` - Returns the "short" `LedgerHashes` ledger entry (recent ledger hashes) + +These shortcuts are only available in API version 3 and later. In API versions 1 and 2, these string values would result in an error. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4f99972713..808d553e17 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -872,7 +872,8 @@ git push --delete upstream-push master-next 11. [Create a new release on Github](https://github.com/XRPLF/rippled/releases). Be sure that "Set as the latest release" is checked. -12. Finally [reverse merge the release into `develop`](#follow-up-reverse-merge). +12. Open a PR to update the [API-CHANGELOG](API-CHANGELOG.md) and `API-VERSION-[n].md` with the changes for this release (if any are missing). +13. Finally, [reverse merge the release into `develop`](#follow-up-reverse-merge). #### Special cases: point releases, hotfixes, etc. From b814a09a08f47c99b5fdb9a2970e9f7bffd41ddc Mon Sep 17 00:00:00 2001 From: Vito Tumas <5780819+Tapanito@users.noreply.github.com> Date: Tue, 3 Feb 2026 16:13:10 +0100 Subject: [PATCH 7/9] chore: Add .zed editor config directory to .gitignore (#6317) This change adds the project configuration directory to `.gitignore` for the `zed` editor. As per the [documentation](https://zed.dev/docs/remote-development?highlight=.zed#zed-settings), the project configuration files are stored in the `.zed` directory at the project root dir. --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index b899cf8436..2692d707e7 100644 --- a/.gitignore +++ b/.gitignore @@ -64,6 +64,9 @@ DerivedData /.vs/ /.vscode/ +# zed IDE. +/.zed/ + # AI tools. /.augment /.claude From 78136830914f39024da0665806756e216b562d09 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Tue, 3 Feb 2026 16:37:24 +0000 Subject: [PATCH 8/9] fix: Deletes expired NFToken offers from ledger (#5707) This change introduces the `fixExpiredNFTokenOfferRemoval` amendment that allows expired offers to pass through `preclaim()` and be deleted in `doApply()`, following the same pattern used for expired credentials. --- include/xrpl/protocol/detail/features.macro | 1 + src/test/app/NFToken_test.cpp | 412 ++++++++++++------ .../app/tx/detail/NFTokenAcceptOffer.cpp | 47 +- 3 files changed, 328 insertions(+), 132 deletions(-) diff --git a/include/xrpl/protocol/detail/features.macro b/include/xrpl/protocol/detail/features.macro index 0c952bf59b..d8498ffa2f 100644 --- a/include/xrpl/protocol/detail/features.macro +++ b/include/xrpl/protocol/detail/features.macro @@ -16,6 +16,7 @@ // Add new amendments to the top of this list. // Keep it sorted in reverse chronological order. +XRPL_FIX (ExpiredNFTokenOfferRemoval, Supported::yes, VoteBehavior::DefaultNo) XRPL_FIX (BatchInnerSigs, Supported::yes, VoteBehavior::DefaultNo) XRPL_FEATURE(LendingProtocol, Supported::yes, VoteBehavior::DefaultNo) XRPL_FEATURE(PermissionDelegationV1_1, Supported::no, VoteBehavior::DefaultNo) diff --git a/src/test/app/NFToken_test.cpp b/src/test/app/NFToken_test.cpp index 51590903e3..2ab2e2a94c 100644 --- a/src/test/app/NFToken_test.cpp +++ b/src/test/app/NFToken_test.cpp @@ -876,42 +876,48 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env.fund(XRP(1000), alice, buyer, gw); env.close(); BEAST_EXPECT(ownerCount(env, alice) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == 0); uint256 const nftAlice0ID = token::getNextID(env, alice, 0, tfTransferable); env(token::mint(alice, 0u), txflags(tfTransferable)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 1); + uint8_t aliceCount = 1; + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); uint256 const nftXrpOnlyID = token::getNextID(env, alice, 0, tfOnlyXRP | tfTransferable); env(token::mint(alice, 0), txflags(tfOnlyXRP | tfTransferable)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 1); + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); uint256 nftNoXferID = token::getNextID(env, alice, 0); env(token::mint(alice, 0)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 1); + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); // alice creates sell offers for her nfts. uint256 const plainOfferIndex = keylet::nftoffer(alice, env.seq(alice)).key; env(token::createOffer(alice, nftAlice0ID, XRP(10)), txflags(tfSellNFToken)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 2); + aliceCount++; + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); uint256 const audOfferIndex = keylet::nftoffer(alice, env.seq(alice)).key; env(token::createOffer(alice, nftAlice0ID, gwAUD(30)), txflags(tfSellNFToken)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 3); + aliceCount++; + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); uint256 const xrpOnlyOfferIndex = keylet::nftoffer(alice, env.seq(alice)).key; env(token::createOffer(alice, nftXrpOnlyID, XRP(20)), txflags(tfSellNFToken)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 4); + aliceCount++; + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); uint256 const noXferOfferIndex = keylet::nftoffer(alice, env.seq(alice)).key; env(token::createOffer(alice, nftNoXferID, XRP(30)), txflags(tfSellNFToken)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 5); + aliceCount++; + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); // alice creates a sell offer that will expire soon. uint256 const aliceExpOfferIndex = keylet::nftoffer(alice, env.seq(alice)).key; @@ -919,7 +925,17 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite txflags(tfSellNFToken), token::expiration(lastClose(env) + 5)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 6); + aliceCount++; + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); + + // buyer creates a Buy offer that will expire soon. + uint256 const buyerExpOfferIndex = keylet::nftoffer(buyer, env.seq(buyer)).key; + env(token::createOffer(buyer, nftAlice0ID, XRP(40)), + token::owner(alice), + token::expiration(lastClose(env) + 5)); + env.close(); + uint8_t buyerCount = 1; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); //---------------------------------------------------------------------- // preflight @@ -927,12 +943,12 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite // Set a negative fee. env(token::acceptSellOffer(buyer, noXferOfferIndex), fee(STAmount(10ull, true)), ter(temBAD_FEE)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Set an invalid flag. env(token::acceptSellOffer(buyer, noXferOfferIndex), txflags(0x00008000), ter(temINVALID_FLAG)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Supply nether an sfNFTokenBuyOffer nor an sfNFTokenSellOffer field. { @@ -940,7 +956,7 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite jv.removeMember(sfNFTokenSellOffer.jsonName); env(jv, ter(temMALFORMED)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } // A buy offer may not contain a sfNFTokenBrokerFee field. @@ -949,7 +965,7 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite jv[sfNFTokenBrokerFee.jsonName] = STAmount(500000).getJson(JsonOptions::none); env(jv, ter(temMALFORMED)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } // A sell offer may not contain a sfNFTokenBrokerFee field. @@ -958,7 +974,7 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite jv[sfNFTokenBrokerFee.jsonName] = STAmount(500000).getJson(JsonOptions::none); env(jv, ter(temMALFORMED)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } // A brokered offer may not contain a negative or zero brokerFee. @@ -966,7 +982,7 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite token::brokerFee(gwAUD(0)), ter(temMALFORMED)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); //---------------------------------------------------------------------- // preclaim @@ -974,33 +990,48 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite // The buy offer must be non-zero. env(token::acceptBuyOffer(buyer, beast::zero), ter(tecOBJECT_NOT_FOUND)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // The buy offer must be present in the ledger. uint256 const missingOfferIndex = keylet::nftoffer(alice, 1).key; env(token::acceptBuyOffer(buyer, missingOfferIndex), ter(tecOBJECT_NOT_FOUND)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // The buy offer must not have expired. - env(token::acceptBuyOffer(buyer, aliceExpOfferIndex), ter(tecEXPIRED)); + // NOTE: this is only a preclaim check with the + // fixExpiredNFTokenOfferRemoval amendment disabled. + env(token::acceptBuyOffer(alice, buyerExpOfferIndex), ter(tecEXPIRED)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + if (features[fixExpiredNFTokenOfferRemoval]) + { + buyerCount--; + } + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // The sell offer must be non-zero. env(token::acceptSellOffer(buyer, beast::zero), ter(tecOBJECT_NOT_FOUND)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // The sell offer must be present in the ledger. env(token::acceptSellOffer(buyer, missingOfferIndex), ter(tecOBJECT_NOT_FOUND)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // The sell offer must not have expired. + // NOTE: this is only a preclaim check with the + // fixExpiredNFTokenOfferRemoval amendment disabled. env(token::acceptSellOffer(buyer, aliceExpOfferIndex), ter(tecEXPIRED)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + // Alice's count is decremented by one when the expired offer is + // removed. + if (features[fixExpiredNFTokenOfferRemoval]) + { + aliceCount--; + } + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); //---------------------------------------------------------------------- // preclaim brokered @@ -1012,8 +1043,13 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env.close(); env(pay(gw, buyer, gwAUD(30))); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 7); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + aliceCount++; + buyerCount++; + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); + + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // We're about to exercise offer brokering, so we need // corresponding buy and sell offers. @@ -1022,35 +1058,38 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite uint256 const buyerOfferIndex = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftAlice0ID, gwAUD(29)), token::owner(alice)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + buyerCount++; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // gw attempts to broker offers that are not for the same token. env(token::brokerOffers(gw, buyerOfferIndex, xrpOnlyOfferIndex), ter(tecNFTOKEN_BUY_SELL_MISMATCH)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // gw attempts to broker offers that are not for the same currency. env(token::brokerOffers(gw, buyerOfferIndex, plainOfferIndex), ter(tecNFTOKEN_BUY_SELL_MISMATCH)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // In a brokered offer, the buyer must offer greater than or // equal to the selling price. env(token::brokerOffers(gw, buyerOfferIndex, audOfferIndex), ter(tecINSUFFICIENT_PAYMENT)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Remove buyer's offer. env(token::cancelOffer(buyer, {buyerOfferIndex})); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + buyerCount--; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } { // buyer creates a buy offer for one of alice's nfts. uint256 const buyerOfferIndex = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftAlice0ID, gwAUD(31)), token::owner(alice)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + buyerCount++; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Broker sets their fee in a denomination other than the one // used by the offers @@ -1058,14 +1097,14 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite token::brokerFee(XRP(40)), ter(tecNFTOKEN_BUY_SELL_MISMATCH)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Broker fee way too big. env(token::brokerOffers(gw, buyerOfferIndex, audOfferIndex), token::brokerFee(gwAUD(31)), ter(tecINSUFFICIENT_PAYMENT)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Broker fee is smaller, but still too big once the offer // seller's minimum is taken into account. @@ -1073,12 +1112,13 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite token::brokerFee(gwAUD(1.5)), ter(tecINSUFFICIENT_PAYMENT)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Remove buyer's offer. env(token::cancelOffer(buyer, {buyerOfferIndex})); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + buyerCount--; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } //---------------------------------------------------------------------- // preclaim buy @@ -1087,17 +1127,18 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite uint256 const buyerOfferIndex = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftAlice0ID, gwAUD(30)), token::owner(alice)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + buyerCount++; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Don't accept a buy offer if the sell flag is set. env(token::acceptBuyOffer(buyer, plainOfferIndex), ter(tecNFTOKEN_OFFER_TYPE_MISMATCH)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 7); + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); // An account can't accept its own offer. env(token::acceptBuyOffer(buyer, buyerOfferIndex), ter(tecCANT_ACCEPT_OWN_NFTOKEN_OFFER)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // An offer acceptor must have enough funds to pay for the offer. env(pay(buyer, gw, gwAUD(30))); @@ -1105,7 +1146,7 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite BEAST_EXPECT(env.balance(buyer, gwAUD) == gwAUD(0)); env(token::acceptBuyOffer(alice, buyerOfferIndex), ter(tecINSUFFICIENT_FUNDS)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // alice gives her NFT to gw, so alice no longer owns nftAlice0. { @@ -1114,7 +1155,7 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env.close(); env(token::acceptSellOffer(gw, offerIndex)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 7); + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); } env(pay(gw, buyer, gwAUD(30))); env.close(); @@ -1122,12 +1163,13 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite // alice can't accept a buy offer for an NFT she no longer owns. env(token::acceptBuyOffer(alice, buyerOfferIndex), ter(tecNO_PERMISSION)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Remove buyer's offer. env(token::cancelOffer(buyer, {buyerOfferIndex})); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + buyerCount--; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } //---------------------------------------------------------------------- // preclaim sell @@ -1136,23 +1178,24 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite uint256 const buyerOfferIndex = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftXrpOnlyID, XRP(30)), token::owner(alice)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + buyerCount++; + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Don't accept a sell offer without the sell flag set. env(token::acceptSellOffer(alice, buyerOfferIndex), ter(tecNFTOKEN_OFFER_TYPE_MISMATCH)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 7); + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); // An account can't accept its own offer. env(token::acceptSellOffer(alice, plainOfferIndex), ter(tecCANT_ACCEPT_OWN_NFTOKEN_OFFER)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // The seller must currently be in possession of the token they // are selling. alice gave nftAlice0ID to gw. env(token::acceptSellOffer(buyer, plainOfferIndex), ter(tecNO_PERMISSION)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // gw gives nftAlice0ID back to alice. That allows us to check // buyer attempting to accept one of alice's offers with @@ -1163,14 +1206,14 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env.close(); env(token::acceptSellOffer(alice, offerIndex)); env.close(); - BEAST_EXPECT(ownerCount(env, alice) == 7); + BEAST_EXPECT(ownerCount(env, alice) == aliceCount); } env(pay(buyer, gw, gwAUD(30))); env.close(); BEAST_EXPECT(env.balance(buyer, gwAUD) == gwAUD(0)); env(token::acceptSellOffer(buyer, audOfferIndex), ter(tecINSUFFICIENT_FUNDS)); env.close(); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } //---------------------------------------------------------------------- @@ -2769,6 +2812,7 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite uint256 const nftokenID1 = token::getNextID(env, issuer, 0, tfTransferable); env(token::mint(minter, 0), token::issuer(issuer), txflags(tfTransferable)); env.close(); + uint8_t issuerCount, minterCount, buyerCount; // Test how adding an Expiration field to an offer affects permissions // for cancelling offers. @@ -2792,9 +2836,12 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite uint256 const offerBuyerToMinter = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftokenID0, drops(1)), token::owner(minter), token::expiration(expiration)); env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 1); - BEAST_EXPECT(ownerCount(env, minter) == 3); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + issuerCount = 1; + minterCount = 3; + buyerCount = 1; + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Test who gets to cancel the offers. Anyone outside of the // offer-owner/destination pair should not be able to cancel @@ -2806,32 +2853,36 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env(token::cancelOffer(buyer, {offerIssuerToMinter}), ter(tecNO_PERMISSION)); env.close(); BEAST_EXPECT(lastClose(env) < expiration); - BEAST_EXPECT(ownerCount(env, issuer) == 1); - BEAST_EXPECT(ownerCount(env, minter) == 3); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // The offer creator can cancel their own unexpired offer. env(token::cancelOffer(minter, {offerMinterToAnyone})); + minterCount--; // The destination of a sell offer can cancel the NFT owner's // unexpired offer. env(token::cancelOffer(issuer, {offerMinterToIssuer})); + minterCount--; // Close enough ledgers to get past the expiration. while (lastClose(env) < expiration) env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 1); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Anyone can cancel expired offers. env(token::cancelOffer(issuer, {offerBuyerToMinter})); + buyerCount--; env(token::cancelOffer(buyer, {offerIssuerToMinter})); + issuerCount--; env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } // Show that: // 1. An unexpired sell offer with an expiration can be accepted. @@ -2844,44 +2895,70 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env(token::createOffer(minter, nftokenID0, drops(1)), token::expiration(expiration), txflags(tfSellNFToken)); + minterCount++; uint256 const offer1 = keylet::nftoffer(minter, env.seq(minter)).key; env(token::createOffer(minter, nftokenID1, drops(1)), token::expiration(expiration), txflags(tfSellNFToken)); + minterCount++; env.close(); BEAST_EXPECT(lastClose(env) < expiration); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 3); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // Anyone can accept an unexpired sell offer. env(token::acceptSellOffer(buyer, offer0)); + minterCount--; + buyerCount++; // Close enough ledgers to get past the expiration. while (lastClose(env) < expiration) env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // No one can accept an expired sell offer. env(token::acceptSellOffer(buyer, offer1), ter(tecEXPIRED)); - env(token::acceptSellOffer(issuer, offer1), ter(tecEXPIRED)); + + // With fixExpiredNFTokenOfferRemoval amendment, the first accept + // attempt deletes the expired offer. Without the amendment, + // the offer remains and we can try to accept it again. + if (features[fixExpiredNFTokenOfferRemoval]) + { + // After amendment: offer was deleted by first accept attempt + minterCount--; + env(token::acceptSellOffer(issuer, offer1), ter(tecOBJECT_NOT_FOUND)); + } + else + { + // Before amendment: offer still exists, second accept also + // fails + env(token::acceptSellOffer(issuer, offer1), ter(tecEXPIRED)); + } env.close(); - // The expired sell offer is still in the ledger. - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + // Check if the expired sell offer behavior matches amendment status + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); - // Anyone can cancel the expired sell offer. - env(token::cancelOffer(issuer, {offer1})); - env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + if (!features[fixExpiredNFTokenOfferRemoval]) + { + // Before amendment: expired offer still exists and needs to be + // cancelled + env(token::cancelOffer(issuer, {offer1})); + env.close(); + minterCount--; + } + // Ensure that owner counts are correct with and without the + // amendment + BEAST_EXPECT(ownerCount(env, issuer) == 0 && issuerCount == 0); + BEAST_EXPECT(ownerCount(env, minter) == 1 && minterCount == 1); + BEAST_EXPECT(ownerCount(env, buyer) == 1 && buyerCount == 1); // Transfer nftokenID0 back to minter so we start the next test in // a simple place. @@ -2889,10 +2966,11 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env(token::createOffer(buyer, nftokenID0, XRP(0)), txflags(tfSellNFToken), token::destination(minter)); env.close(); env(token::acceptSellOffer(minter, offerSellBack)); + buyerCount--; env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } // Show that: // 1. An unexpired buy offer with an expiration can be accepted. @@ -2903,14 +2981,16 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite uint256 const offer0 = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftokenID0, drops(1)), token::owner(minter), token::expiration(expiration)); + buyerCount++; uint256 const offer1 = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftokenID1, drops(1)), token::owner(minter), token::expiration(expiration)); + buyerCount++; env.close(); BEAST_EXPECT(lastClose(env) < expiration); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // An unexpired buy offer can be accepted. env(token::acceptBuyOffer(minter, offer0)); @@ -2919,26 +2999,48 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite while (lastClose(env) < expiration) env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // An expired buy offer cannot be accepted. env(token::acceptBuyOffer(minter, offer1), ter(tecEXPIRED)); - env(token::acceptBuyOffer(issuer, offer1), ter(tecEXPIRED)); + + // With fixExpiredNFTokenOfferRemoval amendment, the first accept + // attempt deletes the expired offer. Without the amendment, + // the offer remains and we can try to accept it again. + if (features[fixExpiredNFTokenOfferRemoval]) + { + // After amendment: offer was deleted by first accept attempt + buyerCount--; + env(token::acceptBuyOffer(issuer, offer1), ter(tecOBJECT_NOT_FOUND)); + } + else + { + // Before amendment: offer still exists, second accept also + // fails + env(token::acceptBuyOffer(issuer, offer1), ter(tecEXPIRED)); + } env.close(); - // The expired buy offer is still in the ledger. - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + // Check if the expired buy offer behavior matches amendment status + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); - // Anyone can cancel the expired buy offer. - env(token::cancelOffer(issuer, {offer1})); - env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + if (!features[fixExpiredNFTokenOfferRemoval]) + { + // Before amendment: expired offer still exists and can be + // cancelled + env(token::cancelOffer(issuer, {offer1})); + env.close(); + buyerCount--; + } + // Ensure that owner counts are the same with and without the + // amendment + BEAST_EXPECT(ownerCount(env, issuer) == 0 && issuerCount == 0); + BEAST_EXPECT(ownerCount(env, minter) == 1 && minterCount == 1); + BEAST_EXPECT(ownerCount(env, buyer) == 1 && buyerCount == 1); // Transfer nftokenID0 back to minter so we start the next test in // a simple place. @@ -2947,9 +3049,10 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env.close(); env(token::acceptSellOffer(minter, offerSellBack)); env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + buyerCount--; + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } // Show that in brokered mode: // 1. An unexpired sell offer with an expiration can be accepted. @@ -2962,50 +3065,74 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env(token::createOffer(minter, nftokenID0, drops(1)), token::expiration(expiration), txflags(tfSellNFToken)); + minterCount++; uint256 const sellOffer1 = keylet::nftoffer(minter, env.seq(minter)).key; env(token::createOffer(minter, nftokenID1, drops(1)), token::expiration(expiration), txflags(tfSellNFToken)); + minterCount++; uint256 const buyOffer0 = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftokenID0, drops(1)), token::owner(minter)); + buyerCount++; uint256 const buyOffer1 = keylet::nftoffer(buyer, env.seq(buyer)).key; env(token::createOffer(buyer, nftokenID1, drops(1)), token::owner(minter)); + buyerCount++; env.close(); BEAST_EXPECT(lastClose(env) < expiration); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 3); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // An unexpired offer can be brokered. env(token::brokerOffers(issuer, buyOffer0, sellOffer0)); + minterCount--; // Close enough ledgers to get past the expiration. while (lastClose(env) < expiration) env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); // If the sell offer is expired it cannot be brokered. env(token::brokerOffers(issuer, buyOffer1, sellOffer1), ter(tecEXPIRED)); env.close(); - // The expired sell offer is still in the ledger. - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 2); + if (features[fixExpiredNFTokenOfferRemoval]) + { + // With amendment: expired offers are deleted + minterCount--; + } - // Anyone can cancel the expired sell offer. - env(token::cancelOffer(buyer, {buyOffer1, sellOffer1})); + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); + + if (features[fixExpiredNFTokenOfferRemoval]) + { + // The buy offer was deleted, so no need to cancel it + // The sell offer still exists, so we can cancel it + env(token::cancelOffer(buyer, {buyOffer1})); + buyerCount--; + } + else + { + // Anyone can cancel the expired offers + env(token::cancelOffer(buyer, {buyOffer1, sellOffer1})); + minterCount--; + buyerCount--; + } env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + // Ensure that owner counts are the same with and without the + // amendment + BEAST_EXPECT(ownerCount(env, issuer) == 0 && issuerCount == 0); + BEAST_EXPECT(ownerCount(env, minter) == 1 && minterCount == 1); + BEAST_EXPECT(ownerCount(env, buyer) == 1 && buyerCount == 1); // Transfer nftokenID0 back to minter so we start the next test in // a simple place. @@ -3014,9 +3141,10 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite env.close(); env(token::acceptSellOffer(minter, offerSellBack)); env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + buyerCount--; + BEAST_EXPECT(ownerCount(env, issuer) == issuerCount); + BEAST_EXPECT(ownerCount(env, minter) == minterCount); + BEAST_EXPECT(ownerCount(env, buyer) == buyerCount); } // Show that in brokered mode: // 1. An unexpired buy offer with an expiration can be accepted. @@ -3054,17 +3182,28 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite BEAST_EXPECT(ownerCount(env, minter) == 2); BEAST_EXPECT(ownerCount(env, buyer) == 2); - // If the buy offer is expired it cannot be brokered. env(token::brokerOffers(issuer, buyOffer1, sellOffer1), ter(tecEXPIRED)); env.close(); - // The expired buy offer is still in the ledger. BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 2); - - // Anyone can cancel the expired buy offer. - env(token::cancelOffer(minter, {buyOffer1, sellOffer1})); + if (features[fixExpiredNFTokenOfferRemoval]) + { + // After amendment: expired offers were deleted during broker + // attempt + BEAST_EXPECT(ownerCount(env, minter) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == 1); + // The buy offer was deleted, so no need to cancel it + // The sell offer still exists, so we can cancel it + env(token::cancelOffer(minter, {sellOffer1})); + } + else + { + // Before amendment: expired offers still exist in ledger + BEAST_EXPECT(ownerCount(env, minter) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == 2); + // Anyone can cancel the expired offers + env(token::cancelOffer(minter, {buyOffer1, sellOffer1})); + } env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 1); @@ -3122,17 +3261,19 @@ class NFTokenBaseUtil_test : public beast::unit_test::suite BEAST_EXPECT(ownerCount(env, minter) == 2); BEAST_EXPECT(ownerCount(env, buyer) == 2); - // If the offers are expired they cannot be brokered. env(token::brokerOffers(issuer, buyOffer1, sellOffer1), ter(tecEXPIRED)); env.close(); // The expired offers are still in the ledger. BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 2); - - // Anyone can cancel the expired offers. - env(token::cancelOffer(issuer, {buyOffer1, sellOffer1})); + if (!features[fixExpiredNFTokenOfferRemoval]) + { + // Before amendment: expired offers still exist in ledger + BEAST_EXPECT(ownerCount(env, minter) == 2); + BEAST_EXPECT(ownerCount(env, buyer) == 2); + // Anyone can cancel the expired offers + env(token::cancelOffer(issuer, {buyOffer1, sellOffer1})); + } env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 1); @@ -6736,7 +6877,9 @@ public: void run() override { - testWithFeats(allFeatures - fixNFTokenReserve - featureNFTokenMintOffer - featureDynamicNFT); + testWithFeats( + allFeatures - fixNFTokenReserve - featureNFTokenMintOffer - featureDynamicNFT - + fixExpiredNFTokenOfferRemoval); } }; @@ -6767,6 +6910,15 @@ class NFTokenWOModify_test : public NFTokenBaseUtil_test } }; +class NFTokenWOExpiredOfferRemoval_test : public NFTokenBaseUtil_test +{ + void + run() override + { + testWithFeats(allFeatures - fixExpiredNFTokenOfferRemoval); + } +}; + class NFTokenAllFeatures_test : public NFTokenBaseUtil_test { void diff --git a/src/xrpld/app/tx/detail/NFTokenAcceptOffer.cpp b/src/xrpld/app/tx/detail/NFTokenAcceptOffer.cpp index 4e91bdc1b6..349dc2c1ea 100644 --- a/src/xrpld/app/tx/detail/NFTokenAcceptOffer.cpp +++ b/src/xrpld/app/tx/detail/NFTokenAcceptOffer.cpp @@ -53,7 +53,17 @@ NFTokenAcceptOffer::preclaim(PreclaimContext const& ctx) return {nullptr, tecOBJECT_NOT_FOUND}; if (hasExpired(ctx.view, (*offerSLE)[~sfExpiration])) - return {nullptr, tecEXPIRED}; + { + // Before fixExpiredNFTokenOfferRemoval amendment, expired + // offers caused tecEXPIRED in preclaim, leaving them on ledger + // forever. After the amendment, we allow expired offers to + // reach doApply() where they get deleted and tecEXPIRED is + // returned. + if (!ctx.view.rules().enabled(fixExpiredNFTokenOfferRemoval)) + return {nullptr, tecEXPIRED}; + // Amendment enabled: return the expired offer to be handled in + // doApply + } if ((*offerSLE)[sfAmount].negative()) return {nullptr, temBAD_OFFER}; @@ -299,7 +309,7 @@ NFTokenAcceptOffer::pay(AccountID const& from, AccountID const& to, STAmount con { // This should never happen, but it's easy and quick to check. if (amount < beast::zero) - return tecINTERNAL; + return tecINTERNAL; // LCOV_EXCL_LINE auto const result = accountSend(view(), from, to, amount, j_); @@ -410,6 +420,39 @@ NFTokenAcceptOffer::doApply() auto bo = loadToken(ctx_.tx[~sfNFTokenBuyOffer]); auto so = loadToken(ctx_.tx[~sfNFTokenSellOffer]); + // With fixExpiredNFTokenOfferRemoval amendment, check for expired offers + // and delete them, returning tecEXPIRED. This ensures expired offers + // are properly cleaned up from the ledger. + if (view().rules().enabled(fixExpiredNFTokenOfferRemoval)) + { + bool foundExpired = false; + + auto const deleteOfferIfExpired = [this, &foundExpired](std::shared_ptr const& offer) -> TER { + if (offer && hasExpired(view(), (*offer)[~sfExpiration])) + { + JLOG(j_.trace()) << "Offer is expired, deleting: " << offer->key(); + if (!nft::deleteTokenOffer(view(), offer)) + { + // LCOV_EXCL_START + JLOG(j_.fatal()) << "Unable to delete expired offer '" << offer->key() << "': ignoring"; + return tecINTERNAL; + // LCOV_EXCL_STOP + } + JLOG(j_.trace()) << "Deleted offer " << offer->key(); + foundExpired = true; + } + return tesSUCCESS; + }; + + if (auto const r = deleteOfferIfExpired(bo); !isTesSuccess(r)) + return r; + if (auto const r = deleteOfferIfExpired(so); !isTesSuccess(r)) + return r; + + if (foundExpired) + return tecEXPIRED; + } + if (bo && !nft::deleteTokenOffer(view(), bo)) { // LCOV_EXCL_START From 6c1a92fe935a226a0fcea10778bae88cb74726c3 Mon Sep 17 00:00:00 2001 From: Jingchen Date: Tue, 3 Feb 2026 19:08:27 +0000 Subject: [PATCH 9/9] refactor: Add ServiceRegistry to help modularization (#6222) Currently we're passing the `Application` object around, whereby the `Application` class acts more like a service registry that gives other classes access to other services. In order to allow modularization, we should replace `Application` with a service registry class so that modules depending on `Application` for other services can be moved easily. This change adds the `ServiceRegistry` class. --- .../scripts/levelization/results/ordering.txt | 1 + include/xrpl/core/ServiceRegistry.h | 202 ++++++++++++++++++ src/xrpld/app/main/Application.h | 83 +------ 3 files changed, 205 insertions(+), 81 deletions(-) create mode 100644 include/xrpl/core/ServiceRegistry.h diff --git a/.github/scripts/levelization/results/ordering.txt b/.github/scripts/levelization/results/ordering.txt index 8d17e1167f..88a3441fa1 100644 --- a/.github/scripts/levelization/results/ordering.txt +++ b/.github/scripts/levelization/results/ordering.txt @@ -153,6 +153,7 @@ tests.libxrpl > xrpl.json tests.libxrpl > xrpl.net xrpl.core > xrpl.basics xrpl.core > xrpl.json +xrpl.core > xrpl.ledger xrpl.json > xrpl.basics xrpl.ledger > xrpl.basics xrpl.ledger > xrpl.protocol diff --git a/include/xrpl/core/ServiceRegistry.h b/include/xrpl/core/ServiceRegistry.h new file mode 100644 index 0000000000..a70d96292c --- /dev/null +++ b/include/xrpl/core/ServiceRegistry.h @@ -0,0 +1,202 @@ +#ifndef XRPL_CORE_SERVICEREGISTRY_H_INCLUDED +#define XRPL_CORE_SERVICEREGISTRY_H_INCLUDED + +#include +#include +#include +#include + +namespace xrpl { + +// Forward declarations +namespace NodeStore { +class Database; +} +namespace Resource { +class Manager; +} +namespace perf { +class PerfLog; +} + +class AcceptedLedger; +class AmendmentTable; +class Cluster; +class CollectorManager; +class DatabaseCon; +class Family; +class HashRouter; +class InboundLedgers; +class InboundTransactions; +class JobQueue; +class LedgerCleaner; +class LedgerMaster; +class LedgerReplayer; +class LoadFeeTrack; +class LoadManager; +class ManifestCache; +class NetworkOPs; +class OpenLedger; +class OrderBookDB; +class Overlay; +class PathRequests; +class PeerReservationTable; +class PendingSaves; +class RelationalDatabase; +class ServerHandler; +class SHAMapStore; +class TimeKeeper; +class TransactionMaster; +class TxQ; +class ValidatorList; +class ValidatorSite; + +template +class Validations; +class RCLValidationsAdaptor; +using RCLValidations = Validations; + +using NodeCache = TaggedCache; + +/** Service registry for dependency injection. + + This abstract interface provides access to various services and components + used throughout the application. It separates the service locator pattern + from the Application lifecycle management. + + Components that need access to services can hold a reference to + ServiceRegistry rather than Application when they only need service + access and not lifecycle management. + +*/ +class ServiceRegistry +{ +public: + ServiceRegistry() = default; + virtual ~ServiceRegistry() = default; + + // Core infrastructure services + virtual CollectorManager& + getCollectorManager() = 0; + + virtual Family& + getNodeFamily() = 0; + + virtual TimeKeeper& + timeKeeper() = 0; + + virtual JobQueue& + getJobQueue() = 0; + + virtual NodeCache& + getTempNodeCache() = 0; + + virtual CachedSLEs& + cachedSLEs() = 0; + + // Protocol and validation services + virtual AmendmentTable& + getAmendmentTable() = 0; + + virtual HashRouter& + getHashRouter() = 0; + + virtual LoadFeeTrack& + getFeeTrack() = 0; + + virtual LoadManager& + getLoadManager() = 0; + + virtual RCLValidations& + getValidations() = 0; + + virtual ValidatorList& + validators() = 0; + + virtual ValidatorSite& + validatorSites() = 0; + + virtual ManifestCache& + validatorManifests() = 0; + + virtual ManifestCache& + publisherManifests() = 0; + + // Network services + virtual Overlay& + overlay() = 0; + + virtual Cluster& + cluster() = 0; + + virtual PeerReservationTable& + peerReservations() = 0; + + virtual Resource::Manager& + getResourceManager() = 0; + + // Storage services + virtual NodeStore::Database& + getNodeStore() = 0; + + virtual SHAMapStore& + getSHAMapStore() = 0; + + virtual RelationalDatabase& + getRelationalDatabase() = 0; + + // Ledger services + virtual InboundLedgers& + getInboundLedgers() = 0; + + virtual InboundTransactions& + getInboundTransactions() = 0; + + virtual TaggedCache& + getAcceptedLedgerCache() = 0; + + virtual LedgerMaster& + getLedgerMaster() = 0; + + virtual LedgerCleaner& + getLedgerCleaner() = 0; + + virtual LedgerReplayer& + getLedgerReplayer() = 0; + + virtual PendingSaves& + pendingSaves() = 0; + + virtual OpenLedger& + openLedger() = 0; + + virtual OpenLedger const& + openLedger() const = 0; + + // Transaction and operation services + virtual NetworkOPs& + getOPs() = 0; + + virtual OrderBookDB& + getOrderBookDB() = 0; + + virtual TransactionMaster& + getMasterTransaction() = 0; + + virtual TxQ& + getTxQ() = 0; + + virtual PathRequests& + getPathRequests() = 0; + + // Server services + virtual ServerHandler& + getServerHandler() = 0; + + virtual perf::PerfLog& + getPerfLog() = 0; +}; + +} // namespace xrpl + +#endif diff --git a/src/xrpld/app/main/Application.h b/src/xrpld/app/main/Application.h index 07edd5f558..bb8bac8bbb 100644 --- a/src/xrpld/app/main/Application.h +++ b/src/xrpld/app/main/Application.h @@ -6,6 +6,7 @@ #include #include +#include #include #include @@ -91,7 +92,7 @@ class Validations; class RCLValidationsAdaptor; using RCLValidations = Validations; -class Application : public beast::PropertyStream::Source +class Application : public ServiceRegistry, public beast::PropertyStream::Source { public: /* VFALCO NOTE @@ -146,92 +147,12 @@ public: virtual boost::asio::io_context& getIOContext() = 0; - virtual CollectorManager& - getCollectorManager() = 0; - virtual Family& - getNodeFamily() = 0; - virtual TimeKeeper& - timeKeeper() = 0; - virtual JobQueue& - getJobQueue() = 0; - virtual NodeCache& - getTempNodeCache() = 0; - virtual CachedSLEs& - cachedSLEs() = 0; - virtual AmendmentTable& - getAmendmentTable() = 0; - virtual HashRouter& - getHashRouter() = 0; - virtual LoadFeeTrack& - getFeeTrack() = 0; - virtual LoadManager& - getLoadManager() = 0; - virtual Overlay& - overlay() = 0; - virtual TxQ& - getTxQ() = 0; - virtual ValidatorList& - validators() = 0; - virtual ValidatorSite& - validatorSites() = 0; - virtual ManifestCache& - validatorManifests() = 0; - virtual ManifestCache& - publisherManifests() = 0; - virtual Cluster& - cluster() = 0; - virtual PeerReservationTable& - peerReservations() = 0; - virtual RCLValidations& - getValidations() = 0; - virtual NodeStore::Database& - getNodeStore() = 0; - virtual InboundLedgers& - getInboundLedgers() = 0; - virtual InboundTransactions& - getInboundTransactions() = 0; - - virtual TaggedCache& - getAcceptedLedgerCache() = 0; - - virtual LedgerMaster& - getLedgerMaster() = 0; - virtual LedgerCleaner& - getLedgerCleaner() = 0; - virtual LedgerReplayer& - getLedgerReplayer() = 0; - virtual NetworkOPs& - getOPs() = 0; - virtual OrderBookDB& - getOrderBookDB() = 0; - virtual ServerHandler& - getServerHandler() = 0; - virtual TransactionMaster& - getMasterTransaction() = 0; - virtual perf::PerfLog& - getPerfLog() = 0; - virtual std::pair const& nodeIdentity() = 0; virtual std::optional getValidationPublicKey() const = 0; - virtual Resource::Manager& - getResourceManager() = 0; - virtual PathRequests& - getPathRequests() = 0; - virtual SHAMapStore& - getSHAMapStore() = 0; - virtual PendingSaves& - pendingSaves() = 0; - virtual OpenLedger& - openLedger() = 0; - virtual OpenLedger const& - openLedger() const = 0; - virtual RelationalDatabase& - getRelationalDatabase() = 0; - virtual std::chrono::milliseconds getIOLatency() = 0;