mirror of
https://github.com/XRPLF/rippled.git
synced 2026-01-31 03:55:27 +00:00
Compare commits
35 Commits
bthomee/co
...
ximinez/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f98033c84 | ||
|
|
0f4b201379 | ||
|
|
561f2a29d0 | ||
|
|
2e14e453ae | ||
|
|
b54539fb59 | ||
|
|
83036e4f77 | ||
|
|
0f231c3cd2 | ||
|
|
58febcb683 | ||
|
|
754757bac9 | ||
|
|
859467455c | ||
|
|
60dc20042c | ||
|
|
108b3e5e2c | ||
|
|
599f197109 | ||
|
|
36000188b1 | ||
|
|
944d758a39 | ||
|
|
de661f8ef8 | ||
|
|
8b70664d4c | ||
|
|
4f03625b75 | ||
|
|
fb0379f93d | ||
|
|
8e795197c9 | ||
|
|
cf7665137e | ||
|
|
0aa43e1772 | ||
|
|
0834c23f27 | ||
|
|
cd62ba13ab | ||
|
|
31fc348446 | ||
|
|
009f17b9bf | ||
|
|
b0872bae95 | ||
|
|
0c69b23b93 | ||
|
|
a2e93188fc | ||
|
|
5406d28357 | ||
|
|
7804c09494 | ||
|
|
79d294bd2d | ||
|
|
acace507d0 | ||
|
|
98732100fb | ||
|
|
b186516d0a |
@@ -1,247 +0,0 @@
|
||||
_help_parse: Options affecting listfile parsing
|
||||
parse:
|
||||
_help_additional_commands:
|
||||
- Specify structure for custom cmake functions
|
||||
additional_commands:
|
||||
target_protobuf_sources:
|
||||
pargs:
|
||||
- target
|
||||
- prefix
|
||||
kwargs:
|
||||
PROTOS: "*"
|
||||
LANGUAGE: cpp
|
||||
IMPORT_DIRS: "*"
|
||||
GENERATE_EXTENSIONS: "*"
|
||||
PLUGIN: "*"
|
||||
_help_override_spec:
|
||||
- Override configurations per-command where available
|
||||
override_spec: {}
|
||||
_help_vartags:
|
||||
- Specify variable tags.
|
||||
vartags: []
|
||||
_help_proptags:
|
||||
- Specify property tags.
|
||||
proptags: []
|
||||
_help_format: Options affecting formatting.
|
||||
format:
|
||||
_help_disable:
|
||||
- Disable formatting entirely, making cmake-format a no-op
|
||||
disable: false
|
||||
_help_line_width:
|
||||
- How wide to allow formatted cmake files
|
||||
line_width: 120
|
||||
_help_tab_size:
|
||||
- How many spaces to tab for indent
|
||||
tab_size: 4
|
||||
_help_use_tabchars:
|
||||
- If true, lines are indented using tab characters (utf-8
|
||||
- 0x09) instead of <tab_size> space characters (utf-8 0x20).
|
||||
- In cases where the layout would require a fractional tab
|
||||
- character, the behavior of the fractional indentation is
|
||||
- governed by <fractional_tab_policy>
|
||||
use_tabchars: false
|
||||
_help_fractional_tab_policy:
|
||||
- If <use_tabchars> is True, then the value of this variable
|
||||
- indicates how fractional indentions are handled during
|
||||
- whitespace replacement. If set to 'use-space', fractional
|
||||
- indentation is left as spaces (utf-8 0x20). If set to
|
||||
- "`round-up` fractional indentation is replaced with a single"
|
||||
- tab character (utf-8 0x09) effectively shifting the column
|
||||
- to the next tabstop
|
||||
fractional_tab_policy: use-space
|
||||
_help_max_subgroups_hwrap:
|
||||
- If an argument group contains more than this many sub-groups
|
||||
- (parg or kwarg groups) then force it to a vertical layout.
|
||||
max_subgroups_hwrap: 4
|
||||
_help_max_pargs_hwrap:
|
||||
- If a positional argument group contains more than this many
|
||||
- arguments, then force it to a vertical layout.
|
||||
max_pargs_hwrap: 5
|
||||
_help_max_rows_cmdline:
|
||||
- If a cmdline positional group consumes more than this many
|
||||
- lines without nesting, then invalidate the layout (and nest)
|
||||
max_rows_cmdline: 2
|
||||
_help_separate_ctrl_name_with_space:
|
||||
- If true, separate flow control names from their parentheses
|
||||
- with a space
|
||||
separate_ctrl_name_with_space: true
|
||||
_help_separate_fn_name_with_space:
|
||||
- If true, separate function names from parentheses with a
|
||||
- space
|
||||
separate_fn_name_with_space: false
|
||||
_help_dangle_parens:
|
||||
- If a statement is wrapped to more than one line, than dangle
|
||||
- the closing parenthesis on its own line.
|
||||
dangle_parens: false
|
||||
_help_dangle_align:
|
||||
- If the trailing parenthesis must be 'dangled' on its on
|
||||
- "line, then align it to this reference: `prefix`: the start"
|
||||
- "of the statement, `prefix-indent`: the start of the"
|
||||
- "statement, plus one indentation level, `child`: align to"
|
||||
- the column of the arguments
|
||||
dangle_align: prefix
|
||||
_help_min_prefix_chars:
|
||||
- If the statement spelling length (including space and
|
||||
- parenthesis) is smaller than this amount, then force reject
|
||||
- nested layouts.
|
||||
min_prefix_chars: 18
|
||||
_help_max_prefix_chars:
|
||||
- If the statement spelling length (including space and
|
||||
- parenthesis) is larger than the tab width by more than this
|
||||
- amount, then force reject un-nested layouts.
|
||||
max_prefix_chars: 10
|
||||
_help_max_lines_hwrap:
|
||||
- If a candidate layout is wrapped horizontally but it exceeds
|
||||
- this many lines, then reject the layout.
|
||||
max_lines_hwrap: 2
|
||||
_help_line_ending:
|
||||
- What style line endings to use in the output.
|
||||
line_ending: unix
|
||||
_help_command_case:
|
||||
- Format command names consistently as 'lower' or 'upper' case
|
||||
command_case: canonical
|
||||
_help_keyword_case:
|
||||
- Format keywords consistently as 'lower' or 'upper' case
|
||||
keyword_case: unchanged
|
||||
_help_always_wrap:
|
||||
- A list of command names which should always be wrapped
|
||||
always_wrap: []
|
||||
_help_enable_sort:
|
||||
- If true, the argument lists which are known to be sortable
|
||||
- will be sorted lexicographicall
|
||||
enable_sort: true
|
||||
_help_autosort:
|
||||
- If true, the parsers may infer whether or not an argument
|
||||
- list is sortable (without annotation).
|
||||
autosort: true
|
||||
_help_require_valid_layout:
|
||||
- By default, if cmake-format cannot successfully fit
|
||||
- everything into the desired linewidth it will apply the
|
||||
- last, most aggressive attempt that it made. If this flag is
|
||||
- True, however, cmake-format will print error, exit with non-
|
||||
- zero status code, and write-out nothing
|
||||
require_valid_layout: false
|
||||
_help_layout_passes:
|
||||
- A dictionary mapping layout nodes to a list of wrap
|
||||
- decisions. See the documentation for more information.
|
||||
layout_passes: {}
|
||||
_help_markup: Options affecting comment reflow and formatting.
|
||||
markup:
|
||||
_help_bullet_char:
|
||||
- What character to use for bulleted lists
|
||||
bullet_char: "-"
|
||||
_help_enum_char:
|
||||
- What character to use as punctuation after numerals in an
|
||||
- enumerated list
|
||||
enum_char: .
|
||||
_help_first_comment_is_literal:
|
||||
- If comment markup is enabled, don't reflow the first comment
|
||||
- block in each listfile. Use this to preserve formatting of
|
||||
- your copyright/license statements.
|
||||
first_comment_is_literal: false
|
||||
_help_literal_comment_pattern:
|
||||
- If comment markup is enabled, don't reflow any comment block
|
||||
- which matches this (regex) pattern. Default is `None`
|
||||
- (disabled).
|
||||
literal_comment_pattern: null
|
||||
_help_fence_pattern:
|
||||
- Regular expression to match preformat fences in comments
|
||||
- default= ``r'^\s*([`~]{3}[`~]*)(.*)$'``
|
||||
fence_pattern: ^\s*([`~]{3}[`~]*)(.*)$
|
||||
_help_ruler_pattern:
|
||||
- Regular expression to match rulers in comments default=
|
||||
- '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``'
|
||||
ruler_pattern: ^\s*[^\w\s]{3}.*[^\w\s]{3}$
|
||||
_help_explicit_trailing_pattern:
|
||||
- If a comment line matches starts with this pattern then it
|
||||
- is explicitly a trailing comment for the preceding
|
||||
- argument. Default is '#<'
|
||||
explicit_trailing_pattern: "#<"
|
||||
_help_hashruler_min_length:
|
||||
- If a comment line starts with at least this many consecutive
|
||||
- hash characters, then don't lstrip() them off. This allows
|
||||
- for lazy hash rulers where the first hash char is not
|
||||
- separated by space
|
||||
hashruler_min_length: 10
|
||||
_help_canonicalize_hashrulers:
|
||||
- If true, then insert a space between the first hash char and
|
||||
- remaining hash chars in a hash ruler, and normalize its
|
||||
- length to fill the column
|
||||
canonicalize_hashrulers: true
|
||||
_help_enable_markup:
|
||||
- enable comment markup parsing and reflow
|
||||
enable_markup: false
|
||||
_help_lint: Options affecting the linter
|
||||
lint:
|
||||
_help_disabled_codes:
|
||||
- a list of lint codes to disable
|
||||
disabled_codes: []
|
||||
_help_function_pattern:
|
||||
- regular expression pattern describing valid function names
|
||||
function_pattern: "[0-9a-z_]+"
|
||||
_help_macro_pattern:
|
||||
- regular expression pattern describing valid macro names
|
||||
macro_pattern: "[0-9A-Z_]+"
|
||||
_help_global_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global (cache) scope
|
||||
global_var_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_internal_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global scope (but internal semantic)
|
||||
internal_var_pattern: _[A-Z][0-9A-Z_]+
|
||||
_help_local_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with local scope
|
||||
local_var_pattern: "[a-z][a-z0-9_]+"
|
||||
_help_private_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- privatedirectory variables
|
||||
private_var_pattern: _[0-9a-z_]+
|
||||
_help_public_var_pattern:
|
||||
- regular expression pattern describing valid names for public
|
||||
- directory variables
|
||||
public_var_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_argument_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- function/macro arguments and loop variables.
|
||||
argument_var_pattern: "[a-z][a-z0-9_]+"
|
||||
_help_keyword_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- keywords used in functions or macros
|
||||
keyword_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_max_conditionals_custom_parser:
|
||||
- In the heuristic for C0201, how many conditionals to match
|
||||
- within a loop in before considering the loop a parser.
|
||||
max_conditionals_custom_parser: 2
|
||||
_help_min_statement_spacing:
|
||||
- Require at least this many newlines between statements
|
||||
min_statement_spacing: 1
|
||||
_help_max_statement_spacing:
|
||||
- Require no more than this many newlines between statements
|
||||
max_statement_spacing: 2
|
||||
max_returns: 6
|
||||
max_branches: 12
|
||||
max_arguments: 5
|
||||
max_localvars: 15
|
||||
max_statements: 50
|
||||
_help_encode: Options affecting file encoding
|
||||
encode:
|
||||
_help_emit_byteorder_mark:
|
||||
- If true, emit the unicode byte-order mark (BOM) at the start
|
||||
- of the file
|
||||
emit_byteorder_mark: false
|
||||
_help_input_encoding:
|
||||
- Specify the encoding of the input file. Defaults to utf-8
|
||||
input_encoding: utf-8
|
||||
_help_output_encoding:
|
||||
- Specify the encoding of the output file. Defaults to utf-8.
|
||||
- Note that cmake only claims to support utf-8 so be careful
|
||||
- when using anything else
|
||||
output_encoding: utf-8
|
||||
_help_misc: Miscellaneous configurations options.
|
||||
misc:
|
||||
_help_per_command:
|
||||
- A dictionary containing any per-command configuration
|
||||
- overrides. Currently only `command_case` is supported.
|
||||
per_command: {}
|
||||
@@ -101,7 +101,6 @@ words:
|
||||
- gpgcheck
|
||||
- gpgkey
|
||||
- hotwallet
|
||||
- hwrap
|
||||
- ifndef
|
||||
- inequation
|
||||
- insuf
|
||||
@@ -115,8 +114,6 @@ words:
|
||||
- keylet
|
||||
- keylets
|
||||
- keyvadb
|
||||
- kwarg
|
||||
- kwargs
|
||||
- ledgerentry
|
||||
- ledgerhash
|
||||
- ledgerindex
|
||||
@@ -166,7 +163,6 @@ words:
|
||||
- nunl
|
||||
- Nyffenegger
|
||||
- ostr
|
||||
- pargs
|
||||
- partitioner
|
||||
- paychan
|
||||
- paychans
|
||||
|
||||
@@ -26,12 +26,6 @@ repos:
|
||||
args: [--style=file]
|
||||
"types_or": [c++, c, proto]
|
||||
|
||||
- repo: https://github.com/cheshirekow/cmake-format-precommit
|
||||
rev: e2c2116d86a80e72e7146a06e68b7c228afc6319 # frozen: v0.6.13
|
||||
hooks:
|
||||
- id: cmake-format
|
||||
additional_dependencies: [PyYAML]
|
||||
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
|
||||
hooks:
|
||||
|
||||
136
CMakeLists.txt
136
CMakeLists.txt
@@ -1,16 +1,16 @@
|
||||
cmake_minimum_required(VERSION 3.16)
|
||||
|
||||
if (POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif ()
|
||||
if (POLICY CMP0077)
|
||||
cmake_policy(SET CMP0077 NEW)
|
||||
endif ()
|
||||
if(POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif()
|
||||
if(POLICY CMP0077)
|
||||
cmake_policy(SET CMP0077 NEW)
|
||||
endif()
|
||||
|
||||
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
|
||||
if (DEFINED CMAKE_MODULE_PATH)
|
||||
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
|
||||
endif ()
|
||||
if(DEFINED CMAKE_MODULE_PATH)
|
||||
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
|
||||
endif()
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
|
||||
project(xrpl)
|
||||
@@ -20,65 +20,65 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(CompilationEnv)
|
||||
|
||||
if (is_gcc)
|
||||
if(is_gcc)
|
||||
# GCC-specific fixes
|
||||
add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage)
|
||||
# -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3
|
||||
elseif (is_clang)
|
||||
elseif(is_clang)
|
||||
# Clang-specific fixes
|
||||
add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options
|
||||
elseif (is_msvc)
|
||||
elseif(is_msvc)
|
||||
# MSVC-specific fixes
|
||||
add_compile_options(/wd4068) # Ignore unknown pragmas
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
# Enable ccache to speed up builds.
|
||||
include(Ccache)
|
||||
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if (Git_FOUND)
|
||||
if(Git_FOUND)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
if (gch)
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
if(gch)
|
||||
set(GIT_COMMIT_HASH "${gch}")
|
||||
message(STATUS gch: ${GIT_COMMIT_HASH})
|
||||
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
|
||||
if (gb)
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
|
||||
if(gb)
|
||||
set(GIT_BRANCH "${gb}")
|
||||
message(STATUS gb: ${GIT_BRANCH})
|
||||
add_definitions(-DGIT_BRANCH="${GIT_BRANCH}")
|
||||
endif ()
|
||||
endif () # git
|
||||
endif()
|
||||
endif() #git
|
||||
|
||||
if (thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS
|
||||
-DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options("-stdlib=libc++")
|
||||
add_link_options("-stdlib=libc++")
|
||||
endif ()
|
||||
if(thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options("-stdlib=libc++")
|
||||
add_link_options("-stdlib=libc++")
|
||||
endif()
|
||||
|
||||
include(CheckCXXCompilerFlag)
|
||||
include(FetchContent)
|
||||
include(ExternalProject)
|
||||
include(CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
|
||||
include (CheckCXXCompilerFlag)
|
||||
include (FetchContent)
|
||||
include (ExternalProject)
|
||||
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
|
||||
if (target)
|
||||
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
endif ()
|
||||
|
||||
include(XrplSanity)
|
||||
include(XrplVersion)
|
||||
include(XrplSettings)
|
||||
# this check has to remain in the top-level cmake because of the early return statement
|
||||
# this check has to remain in the top-level cmake
|
||||
# because of the early return statement
|
||||
if (packages_only)
|
||||
if (NOT TARGET rpm)
|
||||
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
|
||||
endif ()
|
||||
return()
|
||||
if (NOT TARGET rpm)
|
||||
message (FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
|
||||
endif()
|
||||
return ()
|
||||
endif ()
|
||||
include(XrplCompiler)
|
||||
include(XrplSanitizers)
|
||||
@@ -86,9 +86,11 @@ include(XrplInterface)
|
||||
|
||||
option(only_docs "Include only the docs target?" FALSE)
|
||||
include(XrplDocs)
|
||||
if (only_docs)
|
||||
return()
|
||||
endif ()
|
||||
if(only_docs)
|
||||
return()
|
||||
endif()
|
||||
|
||||
###
|
||||
|
||||
include(deps/Boost)
|
||||
|
||||
@@ -105,43 +107,45 @@ find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
find_package(xxHash REQUIRED)
|
||||
|
||||
target_link_libraries(
|
||||
xrpl_libs
|
||||
INTERFACE ed25519::ed25519
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
OpenSSL::SSL
|
||||
secp256k1::secp256k1
|
||||
soci::soci
|
||||
SQLite::SQLite3)
|
||||
target_link_libraries(xrpl_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
OpenSSL::SSL
|
||||
secp256k1::secp256k1
|
||||
soci::soci
|
||||
SQLite::SQLite3
|
||||
)
|
||||
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if (rocksdb)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
# Work around changes to Conan recipe for now.
|
||||
if (TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
elseif (TARGET NuDB::nudb)
|
||||
set(nudb NuDB::nudb)
|
||||
else ()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif ()
|
||||
if(TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
elseif(TARGET NuDB::nudb)
|
||||
set(nudb NuDB::nudb)
|
||||
else()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif()
|
||||
target_link_libraries(xrpl_libs INTERFACE ${nudb})
|
||||
|
||||
if (coverage)
|
||||
include(XrplCov)
|
||||
endif ()
|
||||
if(coverage)
|
||||
include(XrplCov)
|
||||
endif()
|
||||
|
||||
set(PROJECT_EXPORT_SET XrplExports)
|
||||
include(XrplCore)
|
||||
include(XrplInstall)
|
||||
include(XrplValidatorKeys)
|
||||
|
||||
if (tests)
|
||||
include(CTest)
|
||||
add_subdirectory(src/tests/libxrpl)
|
||||
endif ()
|
||||
if(tests)
|
||||
include(CTest)
|
||||
add_subdirectory(src/tests/libxrpl)
|
||||
endif()
|
||||
|
||||
@@ -1,29 +1,30 @@
|
||||
macro (exclude_from_default target_)
|
||||
set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
|
||||
set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
|
||||
endmacro ()
|
||||
|
||||
macro (exclude_if_included target_)
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
exclude_from_default(${target_})
|
||||
endif ()
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
exclude_from_default (${target_})
|
||||
endif ()
|
||||
endmacro ()
|
||||
|
||||
find_package(Git)
|
||||
|
||||
function (git_branch branch_val)
|
||||
if (NOT GIT_FOUND)
|
||||
return()
|
||||
endif ()
|
||||
set(_branch "")
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
RESULT_VARIABLE _git_exit_code
|
||||
OUTPUT_VARIABLE _temp_branch
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET)
|
||||
if (_git_exit_code EQUAL 0)
|
||||
set(_branch ${_temp_branch})
|
||||
endif ()
|
||||
set(${branch_val} "${_branch}" PARENT_SCOPE)
|
||||
if (NOT GIT_FOUND)
|
||||
return ()
|
||||
endif ()
|
||||
set (_branch "")
|
||||
execute_process (COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
RESULT_VARIABLE _git_exit_code
|
||||
OUTPUT_VARIABLE _temp_branch
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET)
|
||||
if (_git_exit_code EQUAL 0)
|
||||
set (_branch ${_temp_branch})
|
||||
endif ()
|
||||
set (${branch_val} "${_branch}" PARENT_SCOPE)
|
||||
endfunction ()
|
||||
|
||||
@@ -15,17 +15,18 @@ endif ()
|
||||
# https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake.
|
||||
if ("${CCACHE_PATH}" MATCHES "chocolatey")
|
||||
message(DEBUG "Ccache path: ${CCACHE_PATH}")
|
||||
# Chocolatey uses a shim executable that we cannot use directly, in which case we have to find the executable it
|
||||
# points to. If we cannot find the target executable then we cannot use ccache.
|
||||
# Chocolatey uses a shim executable that we cannot use directly, in which
|
||||
# case we have to find the executable it points to. If we cannot find the
|
||||
# target executable then we cannot use ccache.
|
||||
find_program(BASH_PATH "bash")
|
||||
if (NOT BASH_PATH)
|
||||
message(WARNING "Could not find bash.")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
execute_process(COMMAND bash -c
|
||||
"export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1"
|
||||
OUTPUT_VARIABLE CCACHE_PATH)
|
||||
execute_process(
|
||||
COMMAND bash -c "export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1"
|
||||
OUTPUT_VARIABLE CCACHE_PATH)
|
||||
|
||||
if (NOT CCACHE_PATH)
|
||||
message(WARNING "Could not find ccache target.")
|
||||
@@ -36,14 +37,21 @@ endif ()
|
||||
message(STATUS "Found ccache: ${CCACHE_PATH}")
|
||||
|
||||
# Tell cmake to use ccache for compiling with Visual Studio.
|
||||
file(COPY_FILE ${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe ONLY_IF_DIFFERENT)
|
||||
set(CMAKE_VS_GLOBALS "CLToolExe=cl.exe" "CLToolPath=${CMAKE_BINARY_DIR}" "TrackFileAccess=false"
|
||||
"UseMultiToolTask=true")
|
||||
file(COPY_FILE
|
||||
${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe
|
||||
ONLY_IF_DIFFERENT)
|
||||
set(CMAKE_VS_GLOBALS
|
||||
"CLToolExe=cl.exe"
|
||||
"CLToolPath=${CMAKE_BINARY_DIR}"
|
||||
"TrackFileAccess=false"
|
||||
"UseMultiToolTask=true")
|
||||
|
||||
# By default Visual Studio generators will use /Zi to capture debug information, which is not compatible with ccache, so
|
||||
# tell it to use /Z7 instead.
|
||||
# By default Visual Studio generators will use /Zi to capture debug information,
|
||||
# which is not compatible with ccache, so tell it to use /Z7 instead.
|
||||
if (MSVC)
|
||||
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
string(REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
|
||||
foreach (var_
|
||||
CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
|
||||
CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
string (REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
|
||||
endforeach ()
|
||||
endif ()
|
||||
|
||||
@@ -172,47 +172,51 @@ include(CMakeParseArguments)
|
||||
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
|
||||
|
||||
# Check prereqs
|
||||
find_program(GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
|
||||
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
|
||||
|
||||
if (DEFINED CODE_COVERAGE_GCOV_TOOL)
|
||||
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif (DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
|
||||
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if (APPLE)
|
||||
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program(LLVMCOV_PATH llvm-cov)
|
||||
endif ()
|
||||
if (LLVMCOV_PATH)
|
||||
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
|
||||
endif ()
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
|
||||
find_program(GCOV_PATH gcov)
|
||||
set(GCOV_TOOL "${GCOV_PATH}")
|
||||
endif ()
|
||||
if(DEFINED CODE_COVERAGE_GCOV_TOOL)
|
||||
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
|
||||
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if(APPLE)
|
||||
execute_process( COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVMCOV_PATH
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
else()
|
||||
find_program( LLVMCOV_PATH llvm-cov )
|
||||
endif()
|
||||
if(LLVMCOV_PATH)
|
||||
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
|
||||
endif()
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
|
||||
find_program( GCOV_PATH gcov )
|
||||
set(GCOV_TOOL "${GCOV_PATH}")
|
||||
endif()
|
||||
|
||||
# Check supported compiler (Clang, GNU and Flang)
|
||||
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
|
||||
foreach (LANG ${LANGUAGES})
|
||||
if ("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if ("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
|
||||
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
|
||||
endif ()
|
||||
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES
|
||||
"(LLVM)?[Ff]lang")
|
||||
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
|
||||
endif ()
|
||||
endforeach ()
|
||||
foreach(LANG ${LANGUAGES})
|
||||
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
|
||||
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
|
||||
endif()
|
||||
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
|
||||
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
|
||||
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage" CACHE INTERNAL "")
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "")
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "")
|
||||
set(COVERAGE_C_LINKER_FLAGS "")
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
include(CheckCCompilerFlag)
|
||||
include(CheckLinkerFlag)
|
||||
@@ -223,51 +227,51 @@ if (CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if (HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if (HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
|
||||
if (HAVE_cxx_linker_fprofile_abs_path)
|
||||
if(HAVE_cxx_linker_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
|
||||
if (HAVE_c_linker_fprofile_abs_path)
|
||||
if(HAVE_c_linker_fprofile_abs_path)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
|
||||
if (HAVE_cxx_fprofile_update)
|
||||
if(HAVE_cxx_fprofile_update)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
|
||||
if (HAVE_c_fprofile_update)
|
||||
if(HAVE_c_fprofile_update)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
|
||||
if (HAVE_cxx_linker_fprofile_update)
|
||||
if(HAVE_cxx_linker_fprofile_update)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
|
||||
if (HAVE_c_linker_fprofile_update)
|
||||
if(HAVE_c_linker_fprofile_update)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if (NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif () # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
@@ -291,186 +295,193 @@ endif () # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
# )
|
||||
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
|
||||
# GCVOR command.
|
||||
function (setup_target_for_coverage_gcovr)
|
||||
function(setup_target_for_coverage_gcovr)
|
||||
set(options NONE)
|
||||
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
|
||||
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
|
||||
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
if (NOT GCOV_TOOL)
|
||||
if(NOT GCOV_TOOL)
|
||||
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (NOT GCOVR_PATH)
|
||||
if(NOT GCOVR_PATH)
|
||||
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
|
||||
if (DEFINED Coverage_BASE_DIRECTORY)
|
||||
if(DEFINED Coverage_BASE_DIRECTORY)
|
||||
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
|
||||
else ()
|
||||
else()
|
||||
set(BASEDIR ${PROJECT_SOURCE_DIR})
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (NOT DEFINED Coverage_FORMAT)
|
||||
if(NOT DEFINED Coverage_FORMAT)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
|
||||
if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
|
||||
message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if ("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else ()
|
||||
if ((Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested"))
|
||||
else()
|
||||
if((Coverage_FORMAT STREQUAL "html-details")
|
||||
OR (Coverage_FORMAT STREQUAL "html-nested"))
|
||||
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
|
||||
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
|
||||
elseif (Coverage_FORMAT STREQUAL "html-single")
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
|
||||
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL "json-details")
|
||||
OR (Coverage_FORMAT STREQUAL "coveralls"))
|
||||
elseif((Coverage_FORMAT STREQUAL "json-summary")
|
||||
OR (Coverage_FORMAT STREQUAL "json-details")
|
||||
OR (Coverage_FORMAT STREQUAL "coveralls"))
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
|
||||
elseif (Coverage_FORMAT STREQUAL "txt")
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
|
||||
elseif (Coverage_FORMAT STREQUAL "csv")
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
|
||||
elseif (Coverage_FORMAT STREQUAL "lcov")
|
||||
elseif(Coverage_FORMAT STREQUAL "lcov")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.lcov)
|
||||
else ()
|
||||
else()
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
|
||||
endif ()
|
||||
endif ()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if ((Coverage_FORMAT STREQUAL "cobertura") OR (Coverage_FORMAT STREQUAL "xml"))
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty)
|
||||
if((Coverage_FORMAT STREQUAL "cobertura")
|
||||
OR (Coverage_FORMAT STREQUAL "xml"))
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
|
||||
set(Coverage_FORMAT cobertura) # overwrite xml
|
||||
elseif (Coverage_FORMAT STREQUAL "sonarqube")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}")
|
||||
elseif (Coverage_FORMAT STREQUAL "jacoco")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty)
|
||||
elseif (Coverage_FORMAT STREQUAL "clover")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty)
|
||||
elseif (Coverage_FORMAT STREQUAL "lcov")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}")
|
||||
elseif (Coverage_FORMAT STREQUAL "json-summary")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}")
|
||||
elseif(Coverage_FORMAT STREQUAL "sonarqube")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "jacoco")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty )
|
||||
elseif(Coverage_FORMAT STREQUAL "clover")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty )
|
||||
elseif(Coverage_FORMAT STREQUAL "lcov")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "json-summary")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
|
||||
elseif (Coverage_FORMAT STREQUAL "json-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}")
|
||||
elseif(Coverage_FORMAT STREQUAL "json-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
|
||||
elseif (Coverage_FORMAT STREQUAL "coveralls")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}")
|
||||
elseif(Coverage_FORMAT STREQUAL "coveralls")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
|
||||
elseif (Coverage_FORMAT STREQUAL "csv")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}")
|
||||
elseif (Coverage_FORMAT STREQUAL "txt")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}")
|
||||
elseif (Coverage_FORMAT STREQUAL "html-single")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}")
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
|
||||
elseif (Coverage_FORMAT STREQUAL "html-nested")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}")
|
||||
elseif (Coverage_FORMAT STREQUAL "html-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}")
|
||||
else ()
|
||||
elseif(Coverage_FORMAT STREQUAL "html-nested")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
# Collect excludes (CMake 3.4+: Also compute absolute paths)
|
||||
set(GCOVR_EXCLUDES "")
|
||||
foreach (EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
|
||||
if (CMAKE_VERSION VERSION_GREATER 3.4)
|
||||
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
|
||||
if(CMAKE_VERSION VERSION_GREATER 3.4)
|
||||
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
|
||||
endif ()
|
||||
endif()
|
||||
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
|
||||
endforeach ()
|
||||
endforeach()
|
||||
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
|
||||
|
||||
# Combine excludes to several -e arguments
|
||||
set(GCOVR_EXCLUDE_ARGS "")
|
||||
foreach (EXCLUDE ${GCOVR_EXCLUDES})
|
||||
foreach(EXCLUDE ${GCOVR_EXCLUDES})
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
|
||||
endforeach ()
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# If EXECUTABLE is not set, the user is expected to run the tests manually
|
||||
# before running the coverage target NAME
|
||||
if (DEFINED Coverage_EXECUTABLE)
|
||||
set(GCOVR_EXEC_TESTS_CMD ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS})
|
||||
endif ()
|
||||
if(DEFINED Coverage_EXECUTABLE)
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
endif()
|
||||
|
||||
# Create folder
|
||||
if (DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD ${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
endif ()
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
set(GCOVR_CMD
|
||||
${GCOVR_PATH}
|
||||
--gcov-executable
|
||||
${GCOV_TOOL}
|
||||
--gcov-executable ${GCOV_TOOL}
|
||||
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
|
||||
-r
|
||||
${BASEDIR}
|
||||
-r ${BASEDIR}
|
||||
${GCOVR_ADDITIONAL_ARGS}
|
||||
${GCOVR_EXCLUDE_ARGS}
|
||||
--object-directory=${PROJECT_BINARY_DIR})
|
||||
--object-directory=${PROJECT_BINARY_DIR}
|
||||
)
|
||||
|
||||
if (CODE_COVERAGE_VERBOSE)
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
if (NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
|
||||
if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
|
||||
if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
message(STATUS "Command to generate gcovr coverage data: ")
|
||||
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
|
||||
message(STATUS "${GCOVR_CMD_SPACED}")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
add_custom_target(${Coverage_NAME}
|
||||
COMMAND ${GCOVR_EXEC_TESTS_CMD}
|
||||
COMMAND ${GCOVR_FOLDER_CMD}
|
||||
COMMAND ${GCOVR_CMD}
|
||||
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
DEPENDS ${Coverage_DEPENDENCIES}
|
||||
VERBATIM # Protect arguments to commands
|
||||
COMMENT "Running gcovr to produce code coverage report.")
|
||||
COMMAND ${GCOVR_EXEC_TESTS_CMD}
|
||||
COMMAND ${GCOVR_FOLDER_CMD}
|
||||
COMMAND ${GCOVR_CMD}
|
||||
|
||||
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
DEPENDS ${Coverage_DEPENDENCIES}
|
||||
VERBATIM # Protect arguments to commands
|
||||
COMMENT "Running gcovr to produce code coverage report."
|
||||
)
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD COMMAND echo
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}")
|
||||
endfunction () # setup_target_for_coverage_gcovr
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||
COMMAND echo
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function (add_code_coverage_to_target name scope)
|
||||
function(add_code_coverage_to_target name scope)
|
||||
separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
|
||||
|
||||
# Add compiler options to the target
|
||||
target_compile_options(${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||
target_compile_options(${name} ${scope}
|
||||
$<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||
|
||||
target_link_libraries(
|
||||
${name}
|
||||
${scope}
|
||||
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS}
|
||||
gcov>
|
||||
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}
|
||||
gcov>)
|
||||
endfunction () # add_code_coverage_to_target
|
||||
target_link_libraries (${name} ${scope}
|
||||
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
|
||||
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
|
||||
)
|
||||
endfunction() # add_code_coverage_to_target
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# Shared detection of compiler, operating system, and architecture.
|
||||
#
|
||||
# This module centralizes environment detection so that other CMake modules can use the same variables instead of
|
||||
# repeating checks on CMAKE_* and built-in platform variables.
|
||||
# Shared detection of compiler, operating system, and architecture.
|
||||
#
|
||||
# This module centralizes environment detection so that other
|
||||
# CMake modules can use the same variables instead of repeating
|
||||
# checks on CMAKE_* and built-in platform variables.
|
||||
|
||||
# Only run once per configure step.
|
||||
include_guard(GLOBAL)
|
||||
@@ -14,20 +15,21 @@ set(is_gcc FALSE)
|
||||
set(is_msvc FALSE)
|
||||
set(is_xcode FALSE)
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang
|
||||
set(is_clang TRUE)
|
||||
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(is_gcc TRUE)
|
||||
elseif (MSVC)
|
||||
set(is_msvc TRUE)
|
||||
else ()
|
||||
message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}")
|
||||
endif ()
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang
|
||||
set(is_clang TRUE)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(is_gcc TRUE)
|
||||
elseif(MSVC)
|
||||
set(is_msvc TRUE)
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}")
|
||||
endif()
|
||||
|
||||
# Xcode generator detection
|
||||
if (CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
set(is_xcode TRUE)
|
||||
endif ()
|
||||
if(CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
set(is_xcode TRUE)
|
||||
endif()
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Operating system detection
|
||||
@@ -36,23 +38,23 @@ set(is_linux FALSE)
|
||||
set(is_windows FALSE)
|
||||
set(is_macos FALSE)
|
||||
|
||||
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set(is_linux TRUE)
|
||||
elseif (CMAKE_SYSTEM_NAME STREQUAL "Windows")
|
||||
set(is_windows TRUE)
|
||||
elseif (CMAKE_SYSTEM_NAME STREQUAL "Darwin")
|
||||
set(is_macos TRUE)
|
||||
endif ()
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set(is_linux TRUE)
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Windows")
|
||||
set(is_windows TRUE)
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
|
||||
set(is_macos TRUE)
|
||||
endif()
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Architecture
|
||||
# --------------------------------------------------------------------
|
||||
set(is_amd64 FALSE)
|
||||
set(is_arm64 FALSE)
|
||||
if (CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64")
|
||||
set(is_amd64 TRUE)
|
||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64")
|
||||
set(is_arm64 TRUE)
|
||||
else ()
|
||||
message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif ()
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64")
|
||||
set(is_amd64 TRUE)
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64")
|
||||
set(is_arm64 TRUE)
|
||||
else()
|
||||
message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
@@ -1,16 +1,25 @@
|
||||
include(isolate_headers)
|
||||
|
||||
function (xrpl_add_test name)
|
||||
set(target ${PROJECT_NAME}.test.${name})
|
||||
function(xrpl_add_test name)
|
||||
set(target ${PROJECT_NAME}.test.${name})
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp")
|
||||
add_executable(${target} ${ARGN} ${sources})
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||
)
|
||||
add_executable(${target} ${ARGN} ${sources})
|
||||
|
||||
isolate_headers(${target} "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests/${name}" PRIVATE)
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_SOURCE_DIR}"
|
||||
"${CMAKE_SOURCE_DIR}/tests/${name}"
|
||||
PRIVATE
|
||||
)
|
||||
|
||||
# Make sure the test isn't optimized away in unity builds
|
||||
set_target_properties(${target} PROPERTIES UNITY_BUILD_MODE GROUP UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||
# Make sure the test isn't optimized away in unity builds
|
||||
set_target_properties(${target} PROPERTIES
|
||||
UNITY_BUILD_MODE GROUP
|
||||
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||
|
||||
add_test(NAME ${target} COMMAND ${target})
|
||||
endfunction ()
|
||||
add_test(NAME ${target} COMMAND ${target})
|
||||
endfunction()
|
||||
|
||||
@@ -8,155 +8,153 @@ include(CompilationEnv)
|
||||
TODO some/most of these common settings belong in a
|
||||
toolchain file, especially the ABI-impacting ones
|
||||
#]=========================================================]
|
||||
add_library(common INTERFACE)
|
||||
add_library(Xrpl::common ALIAS common)
|
||||
add_library (common INTERFACE)
|
||||
add_library (Xrpl::common ALIAS common)
|
||||
include(XrplSanitizers)
|
||||
# add a single global dependency on this interface lib
|
||||
link_libraries(Xrpl::common)
|
||||
link_libraries (Xrpl::common)
|
||||
# Respect CMAKE_POSITION_INDEPENDENT_CODE setting (may be set by Conan toolchain)
|
||||
if (NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
endif ()
|
||||
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
|
||||
if(NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
endif()
|
||||
set_target_properties (common
|
||||
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions(
|
||||
common
|
||||
INTERFACE $<$<CONFIG:Debug>:DEBUG
|
||||
_DEBUG>
|
||||
#[===[
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
#[===[
|
||||
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
|
||||
explicitly except for the special case of (profile ON) and (assert OFF).
|
||||
Presumably this is because we don't want profile builds asserting unless
|
||||
asserts were specifically requested.
|
||||
]===]
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED)
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED
|
||||
)
|
||||
|
||||
if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
string(REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
string (REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
|
||||
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
foreach (var_
|
||||
CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
|
||||
CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
|
||||
# also remove dynamic runtime
|
||||
string(REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
|
||||
# also remove dynamic runtime
|
||||
string (REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
|
||||
|
||||
# /ZI (Edit & Continue debugging information) is incompatible with Gy-
|
||||
string(REPLACE "/ZI" "/Zi" ${var_} "${${var_}}")
|
||||
# /ZI (Edit & Continue debugging information) is incompatible with Gy-
|
||||
string (REPLACE "/ZI" "/Zi" ${var_} "${${var_}}")
|
||||
|
||||
# omit debug info completely under CI (not needed)
|
||||
if (is_ci)
|
||||
string(REPLACE "/Zi" " " ${var_} "${${var_}}")
|
||||
string(REPLACE "/Z7" " " ${var_} "${${var_}}")
|
||||
endif ()
|
||||
endforeach ()
|
||||
# omit debug info completely under CI (not needed)
|
||||
if (is_ci)
|
||||
string (REPLACE "/Zi" " " ${var_} "${${var_}}")
|
||||
string (REPLACE "/Z7" " " ${var_} "${${var_}}")
|
||||
endif ()
|
||||
endforeach ()
|
||||
|
||||
target_compile_options(
|
||||
common
|
||||
INTERFACE # Increase object file max size
|
||||
-bigobj
|
||||
# Floating point behavior
|
||||
-fp:precise
|
||||
# __cdecl calling convention
|
||||
-Gd
|
||||
# Minimal rebuild: disabled
|
||||
-Gm-
|
||||
# Function level linking: disabled
|
||||
-Gy-
|
||||
# Multiprocessor compilation
|
||||
-MP
|
||||
# pragma omp: disabled
|
||||
-openmp-
|
||||
# No error reporting to Internet
|
||||
-errorReport:none
|
||||
# Suppress login banner
|
||||
-nologo
|
||||
# Disable signed/unsigned comparison warnings
|
||||
-wd4018
|
||||
# Disable float to int possible loss of data warnings
|
||||
-wd4244
|
||||
# Disable size_t to T possible loss of data warnings
|
||||
-wd4267
|
||||
# Disable C4800(int to bool performance)
|
||||
-wd4800
|
||||
# Decorated name length exceeded, name was truncated
|
||||
-wd4503
|
||||
$<$<COMPILE_LANGUAGE:CXX>:
|
||||
-EHa
|
||||
-GR
|
||||
>
|
||||
$<$<CONFIG:Release>:-Ox>
|
||||
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:
|
||||
-GS
|
||||
-Zc:forScope
|
||||
>
|
||||
# static runtime
|
||||
$<$<CONFIG:Debug>:-MTd>
|
||||
$<$<NOT:$<CONFIG:Debug>>:-MT>
|
||||
$<$<BOOL:${werr}>:-WX>)
|
||||
target_compile_definitions(
|
||||
common
|
||||
INTERFACE _WIN32_WINNT=0x6000
|
||||
_SCL_SECURE_NO_WARNINGS
|
||||
_CRT_SECURE_NO_WARNINGS
|
||||
WIN32_CONSOLE
|
||||
WIN32_LEAN_AND_MEAN
|
||||
NOMINMAX
|
||||
# TODO: Resolve these warnings, don't just silence them
|
||||
_SILENCE_ALL_CXX17_DEPRECATION_WARNINGS
|
||||
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:_CRTDBG_MAP_ALLOC>)
|
||||
target_link_libraries(common INTERFACE -errorreport:none -machine:X64)
|
||||
target_compile_options (common
|
||||
INTERFACE
|
||||
-bigobj # Increase object file max size
|
||||
-fp:precise # Floating point behavior
|
||||
-Gd # __cdecl calling convention
|
||||
-Gm- # Minimal rebuild: disabled
|
||||
-Gy- # Function level linking: disabled
|
||||
-MP # Multiprocessor compilation
|
||||
-openmp- # pragma omp: disabled
|
||||
-errorReport:none # No error reporting to Internet
|
||||
-nologo # Suppress login banner
|
||||
-wd4018 # Disable signed/unsigned comparison warnings
|
||||
-wd4244 # Disable float to int possible loss of data warnings
|
||||
-wd4267 # Disable size_t to T possible loss of data warnings
|
||||
-wd4800 # Disable C4800(int to bool performance)
|
||||
-wd4503 # Decorated name length exceeded, name was truncated
|
||||
$<$<COMPILE_LANGUAGE:CXX>:
|
||||
-EHa
|
||||
-GR
|
||||
>
|
||||
$<$<CONFIG:Release>:-Ox>
|
||||
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:
|
||||
-GS
|
||||
-Zc:forScope
|
||||
>
|
||||
# static runtime
|
||||
$<$<CONFIG:Debug>:-MTd>
|
||||
$<$<NOT:$<CONFIG:Debug>>:-MT>
|
||||
$<$<BOOL:${werr}>:-WX>
|
||||
)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
_WIN32_WINNT=0x6000
|
||||
_SCL_SECURE_NO_WARNINGS
|
||||
_CRT_SECURE_NO_WARNINGS
|
||||
WIN32_CONSOLE
|
||||
WIN32_LEAN_AND_MEAN
|
||||
NOMINMAX
|
||||
# TODO: Resolve these warnings, don't just silence them
|
||||
_SILENCE_ALL_CXX17_DEPRECATION_WARNINGS
|
||||
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:_CRTDBG_MAP_ALLOC>)
|
||||
target_link_libraries (common
|
||||
INTERFACE
|
||||
-errorreport:none
|
||||
-machine:X64)
|
||||
else ()
|
||||
target_compile_options(
|
||||
common
|
||||
INTERFACE -Wall
|
||||
-Wdeprecated
|
||||
$<$<BOOL:${is_clang}>:-Wno-deprecated-declarations>
|
||||
$<$<BOOL:${wextra}>:-Wextra
|
||||
-Wno-unused-parameter>
|
||||
$<$<BOOL:${werr}>:-Werror>
|
||||
-fstack-protector
|
||||
-Wno-sign-compare
|
||||
-Wno-unused-but-set-variable
|
||||
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
|
||||
# tweak gcc optimization for debug
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
|
||||
# Add debug symbols to release config
|
||||
$<$<CONFIG:Release>:-g>)
|
||||
target_link_libraries(
|
||||
common
|
||||
INTERFACE -rdynamic
|
||||
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
|
||||
# link to static libc/c++ iff: * static option set and * NOT APPLE (AppleClang does not support static
|
||||
# libc/c++) and * NOT SANITIZERS (sanitizers typically don't work with static libc/c++)
|
||||
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${SANITIZERS_ENABLED}>>>:
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
>)
|
||||
target_compile_options (common
|
||||
INTERFACE
|
||||
-Wall
|
||||
-Wdeprecated
|
||||
$<$<BOOL:${is_clang}>:-Wno-deprecated-declarations>
|
||||
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
|
||||
$<$<BOOL:${werr}>:-Werror>
|
||||
-fstack-protector
|
||||
-Wno-sign-compare
|
||||
-Wno-unused-but-set-variable
|
||||
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
|
||||
# tweak gcc optimization for debug
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
|
||||
# Add debug symbols to release config
|
||||
$<$<CONFIG:Release>:-g>)
|
||||
target_link_libraries (common
|
||||
INTERFACE
|
||||
-rdynamic
|
||||
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
|
||||
# link to static libc/c++ iff:
|
||||
# * static option set and
|
||||
# * NOT APPLE (AppleClang does not support static libc/c++) and
|
||||
# * NOT SANITIZERS (sanitizers typically don't work with static libc/c++)
|
||||
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${SANITIZERS_ENABLED}>>>:
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
>)
|
||||
endif ()
|
||||
|
||||
# Antithesis instrumentation will only be built and deployed using machines running Linux.
|
||||
if (voidstar)
|
||||
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...")
|
||||
elseif (NOT is_linux)
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
|
||||
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
|
||||
endif ()
|
||||
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...")
|
||||
elseif (NOT is_linux)
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
|
||||
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (use_mold)
|
||||
# use mold linker if available
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries(common INTERFACE -fuse-ld=mold)
|
||||
endif ()
|
||||
unset(LD_VERSION)
|
||||
# use mold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries (common INTERFACE -fuse-ld=mold)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_gold AND is_gcc)
|
||||
# use gold linker if available
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
# use gold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
#[=========================================================[
|
||||
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
|
||||
default instead of DT_RPATH, so you might have slightly
|
||||
@@ -170,31 +168,34 @@ elseif (use_gold AND is_gcc)
|
||||
disabling would be to figure out all the settings
|
||||
required to make gold play nicely with jemalloc.
|
||||
#]=========================================================]
|
||||
if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc))
|
||||
target_link_libraries(
|
||||
common
|
||||
INTERFACE -fuse-ld=gold
|
||||
-Wl,--no-as-needed
|
||||
#[=========================================================[
|
||||
if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc))
|
||||
target_link_libraries (common
|
||||
INTERFACE
|
||||
-fuse-ld=gold
|
||||
-Wl,--no-as-needed
|
||||
#[=========================================================[
|
||||
see https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1253638/comments/5
|
||||
DT_RUNPATH does not work great for transitive
|
||||
dependencies (of which boost has a few) - so just
|
||||
switch to DT_RPATH if doing dynamic linking with gold
|
||||
#]=========================================================]
|
||||
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
|
||||
endif ()
|
||||
unset(LD_VERSION)
|
||||
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_lld)
|
||||
# use lld linker if available
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "LLD")
|
||||
target_link_libraries(common INTERFACE -fuse-ld=lld)
|
||||
endif ()
|
||||
unset(LD_VERSION)
|
||||
endif ()
|
||||
# use lld linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "LLD")
|
||||
target_link_libraries (common INTERFACE -fuse-ld=lld)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
endif()
|
||||
|
||||
|
||||
if (assert)
|
||||
foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
|
||||
string(REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")
|
||||
endforeach ()
|
||||
foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
|
||||
STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")
|
||||
endforeach ()
|
||||
endif ()
|
||||
|
||||
@@ -1,52 +1,54 @@
|
||||
include(CMakeFindDependencyMacro)
|
||||
include (CMakeFindDependencyMacro)
|
||||
# need to represent system dependencies of the lib here
|
||||
#[=========================================================[
|
||||
Boost
|
||||
#]=========================================================]
|
||||
if (static OR APPLE OR MSVC)
|
||||
set(Boost_USE_STATIC_LIBS ON)
|
||||
set (Boost_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set(Boost_USE_MULTITHREADED ON)
|
||||
set (Boost_USE_MULTITHREADED ON)
|
||||
if (static OR MSVC)
|
||||
set(Boost_USE_STATIC_RUNTIME ON)
|
||||
set (Boost_USE_STATIC_RUNTIME ON)
|
||||
else ()
|
||||
set(Boost_USE_STATIC_RUNTIME OFF)
|
||||
set (Boost_USE_STATIC_RUNTIME OFF)
|
||||
endif ()
|
||||
find_dependency(Boost
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread)
|
||||
find_dependency (Boost
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread)
|
||||
#[=========================================================[
|
||||
OpenSSL
|
||||
#]=========================================================]
|
||||
if (NOT DEFINED OPENSSL_ROOT_DIR)
|
||||
if (DEFINED ENV{OPENSSL_ROOT})
|
||||
set(OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
|
||||
elseif (APPLE)
|
||||
find_program(homebrew brew)
|
||||
if (homebrew)
|
||||
execute_process(COMMAND ${homebrew} --prefix openssl OUTPUT_VARIABLE OPENSSL_ROOT_DIR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif ()
|
||||
if (DEFINED ENV{OPENSSL_ROOT})
|
||||
set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
|
||||
elseif (APPLE)
|
||||
find_program (homebrew brew)
|
||||
if (homebrew)
|
||||
execute_process (COMMAND ${homebrew} --prefix openssl
|
||||
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif ()
|
||||
file(TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
|
||||
endif ()
|
||||
file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
|
||||
endif ()
|
||||
|
||||
if (static OR APPLE OR MSVC)
|
||||
set(OPENSSL_USE_STATIC_LIBS ON)
|
||||
set (OPENSSL_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set(OPENSSL_MSVC_STATIC_RT ON)
|
||||
find_dependency(OpenSSL REQUIRED)
|
||||
find_dependency(ZLIB)
|
||||
find_dependency(date)
|
||||
set (OPENSSL_MSVC_STATIC_RT ON)
|
||||
find_dependency (OpenSSL REQUIRED)
|
||||
find_dependency (ZLIB)
|
||||
find_dependency (date)
|
||||
if (TARGET ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
endif ()
|
||||
|
||||
@@ -10,44 +10,63 @@ include(target_protobuf_sources)
|
||||
# so we just build them as a separate library.
|
||||
add_library(xrpl.libpb)
|
||||
set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS include/xrpl/proto/xrpl.proto)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE cpp
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS include/xrpl/proto/xrpl.proto
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto PROTOS "${protos}")
|
||||
target_protobuf_sources(
|
||||
xrpl.libpb xrpl/proto
|
||||
LANGUAGE grpc
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}"
|
||||
PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
|
||||
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE cpp
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}"
|
||||
)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE grpc
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}"
|
||||
PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
|
||||
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc
|
||||
)
|
||||
|
||||
target_compile_options(
|
||||
xrpl.libpb PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>: --system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec >
|
||||
PRIVATE $<$<BOOL:${is_msvc}>:-wd4065> $<$<NOT:$<BOOL:${is_msvc}>>:-Wno-deprecated-declarations>)
|
||||
target_compile_options(xrpl.libpb
|
||||
PUBLIC
|
||||
$<$<BOOL:${is_msvc}>:-wd4996>
|
||||
$<$<BOOL:${is_xcode}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>
|
||||
PRIVATE
|
||||
$<$<BOOL:${is_msvc}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${is_msvc}>>:-Wno-deprecated-declarations>
|
||||
)
|
||||
|
||||
target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf gRPC::grpc++)
|
||||
target_link_libraries(xrpl.libpb
|
||||
PUBLIC
|
||||
protobuf::libprotobuf
|
||||
gRPC::grpc++
|
||||
)
|
||||
|
||||
# TODO: Clean up the number of library targets later.
|
||||
add_library(xrpl.imports.main INTERFACE)
|
||||
|
||||
target_link_libraries(
|
||||
xrpl.imports.main
|
||||
INTERFACE absl::random_random
|
||||
date::date
|
||||
ed25519::ed25519
|
||||
LibArchive::LibArchive
|
||||
OpenSSL::Crypto
|
||||
Xrpl::boost
|
||||
Xrpl::libs
|
||||
Xrpl::opts
|
||||
Xrpl::syslibs
|
||||
secp256k1::secp256k1
|
||||
xrpl.libpb
|
||||
xxHash::xxhash
|
||||
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>)
|
||||
target_link_libraries(xrpl.imports.main
|
||||
INTERFACE
|
||||
absl::random_random
|
||||
date::date
|
||||
ed25519::ed25519
|
||||
LibArchive::LibArchive
|
||||
OpenSSL::Crypto
|
||||
Xrpl::boost
|
||||
Xrpl::libs
|
||||
Xrpl::opts
|
||||
Xrpl::syslibs
|
||||
secp256k1::secp256k1
|
||||
xrpl.libpb
|
||||
xxHash::xxhash
|
||||
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
|
||||
)
|
||||
|
||||
include(add_module)
|
||||
include(target_link_modules)
|
||||
@@ -69,11 +88,18 @@ target_link_libraries(xrpl.libxrpl.crypto PUBLIC xrpl.libxrpl.basics)
|
||||
|
||||
# Level 04
|
||||
add_module(xrpl protocol)
|
||||
target_link_libraries(xrpl.libxrpl.protocol PUBLIC xrpl.libxrpl.crypto xrpl.libxrpl.json)
|
||||
target_link_libraries(xrpl.libxrpl.protocol PUBLIC
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
)
|
||||
|
||||
# Level 05
|
||||
add_module(xrpl core)
|
||||
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
|
||||
target_link_libraries(xrpl.libxrpl.core PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
# Level 06
|
||||
add_module(xrpl resource)
|
||||
@@ -81,45 +107,62 @@ target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
# Level 07
|
||||
add_module(xrpl net)
|
||||
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource)
|
||||
target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
)
|
||||
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
add_module(xrpl nodestore)
|
||||
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
|
||||
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
add_module(xrpl shamap)
|
||||
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.nodestore)
|
||||
target_link_libraries(xrpl.libxrpl.shamap PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.nodestore
|
||||
)
|
||||
|
||||
add_module(xrpl ledger)
|
||||
target_link_libraries(xrpl.libxrpl.ledger PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
|
||||
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
add_library(xrpl.libxrpl)
|
||||
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
|
||||
|
||||
add_library(xrpl::libxrpl ALIAS xrpl.libxrpl)
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp")
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp"
|
||||
)
|
||||
target_sources(xrpl.libxrpl PRIVATE ${sources})
|
||||
|
||||
target_link_modules(
|
||||
xrpl
|
||||
PUBLIC
|
||||
basics
|
||||
beast
|
||||
core
|
||||
crypto
|
||||
json
|
||||
protocol
|
||||
resource
|
||||
server
|
||||
nodestore
|
||||
shamap
|
||||
net
|
||||
ledger)
|
||||
target_link_modules(xrpl PUBLIC
|
||||
basics
|
||||
beast
|
||||
core
|
||||
crypto
|
||||
json
|
||||
protocol
|
||||
resource
|
||||
server
|
||||
nodestore
|
||||
shamap
|
||||
net
|
||||
ledger
|
||||
)
|
||||
|
||||
# All headers in libxrpl are in modules.
|
||||
# Uncomment this stanza if you have not yet moved new headers into a module.
|
||||
@@ -130,42 +173,63 @@ target_link_modules(
|
||||
# $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
|
||||
# $<INSTALL_INTERFACE:include>)
|
||||
|
||||
if (xrpld)
|
||||
add_executable(xrpld)
|
||||
if (tests)
|
||||
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
|
||||
target_compile_definitions(xrpld PRIVATE UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE})
|
||||
endif ()
|
||||
target_include_directories(xrpld PRIVATE $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>)
|
||||
if(xrpld)
|
||||
add_executable(xrpld)
|
||||
if(tests)
|
||||
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
|
||||
target_compile_definitions(xrpld PRIVATE
|
||||
UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
|
||||
)
|
||||
endif()
|
||||
target_include_directories(xrpld
|
||||
PRIVATE
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp")
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
|
||||
)
|
||||
target_sources(xrpld PRIVATE ${sources})
|
||||
|
||||
if(tests)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
|
||||
)
|
||||
target_sources(xrpld PRIVATE ${sources})
|
||||
endif()
|
||||
|
||||
if (tests)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp")
|
||||
target_sources(xrpld PRIVATE ${sources})
|
||||
endif ()
|
||||
target_link_libraries(xrpld
|
||||
Xrpl::boost
|
||||
Xrpl::opts
|
||||
Xrpl::libs
|
||||
xrpl.libxrpl
|
||||
)
|
||||
exclude_if_included(xrpld)
|
||||
# define a macro for tests that might need to
|
||||
# be excluded or run differently in CI environment
|
||||
if(is_ci)
|
||||
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
|
||||
endif ()
|
||||
|
||||
target_link_libraries(xrpld Xrpl::boost Xrpl::opts Xrpl::libs xrpl.libxrpl)
|
||||
exclude_if_included(xrpld)
|
||||
# define a macro for tests that might need to
|
||||
# be excluded or run differently in CI environment
|
||||
if (is_ci)
|
||||
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
|
||||
endif ()
|
||||
if(voidstar)
|
||||
target_compile_options(xrpld
|
||||
PRIVATE
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
# xrpld requires access to antithesis-sdk-cpp implementation file
|
||||
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
||||
target_include_directories(xrpld
|
||||
PRIVATE
|
||||
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
|
||||
)
|
||||
endif()
|
||||
|
||||
if (voidstar)
|
||||
target_compile_options(xrpld PRIVATE -fsanitize-coverage=trace-pc-guard)
|
||||
# xrpld requires access to antithesis-sdk-cpp implementation file
|
||||
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
||||
target_include_directories(xrpld PRIVATE ${CMAKE_SOURCE_DIR}/external/antithesis-sdk)
|
||||
endif ()
|
||||
|
||||
# any files that don't play well with unity should be added here
|
||||
if (tests)
|
||||
set_source_files_properties(
|
||||
# these two seem to produce conflicts in beast teardown template methods
|
||||
src/test/rpc/ValidatorRPC_test.cpp src/test/ledger/Invariants_test.cpp PROPERTIES SKIP_UNITY_BUILD_INCLUSION
|
||||
TRUE)
|
||||
endif ()
|
||||
endif ()
|
||||
# any files that don't play well with unity should be added here
|
||||
if(tests)
|
||||
set_source_files_properties(
|
||||
# these two seem to produce conflicts in beast teardown template methods
|
||||
src/test/rpc/ValidatorRPC_test.cpp
|
||||
src/test/ledger/Invariants_test.cpp
|
||||
PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -2,51 +2,40 @@
|
||||
coverage report target
|
||||
#]===================================================================]
|
||||
|
||||
if (NOT coverage)
|
||||
message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
|
||||
endif ()
|
||||
if(NOT coverage)
|
||||
message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
|
||||
endif()
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
|
||||
return()
|
||||
endif ()
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
|
||||
return()
|
||||
endif()
|
||||
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
include(CodeCoverage)
|
||||
|
||||
# The instructions for these commands come from the `CodeCoverage` module, which was copied from
|
||||
# https://github.com/bilke/cmake-modules, commit fb7d2a3, then locally changed (see CHANGES: section in
|
||||
# `CodeCoverage.cmake`)
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
# which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
|
||||
# then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
|
||||
|
||||
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
|
||||
if (NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
|
||||
separate_arguments(GCOVR_ADDITIONAL_ARGS)
|
||||
endif ()
|
||||
if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
|
||||
separate_arguments(GCOVR_ADDITIONAL_ARGS)
|
||||
endif()
|
||||
|
||||
list(APPEND
|
||||
GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches
|
||||
-s
|
||||
-j
|
||||
${PROCESSOR_COUNT})
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${PROCESSOR_COUNT})
|
||||
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME
|
||||
coverage
|
||||
FORMAT
|
||||
${coverage_format}
|
||||
EXCLUDE
|
||||
"src/test"
|
||||
"src/tests"
|
||||
"include/xrpl/beast/test"
|
||||
"include/xrpl/beast/unit_test"
|
||||
"${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES
|
||||
xrpld
|
||||
xrpl.tests)
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES xrpld xrpl.tests
|
||||
)
|
||||
|
||||
add_code_coverage_to_target(opts INTERFACE)
|
||||
|
||||
@@ -2,44 +2,45 @@
|
||||
docs target (optional)
|
||||
#]===================================================================]
|
||||
|
||||
if (NOT only_docs)
|
||||
return()
|
||||
endif ()
|
||||
if(NOT only_docs)
|
||||
return()
|
||||
endif()
|
||||
|
||||
find_package(Doxygen)
|
||||
if (NOT TARGET Doxygen::doxygen)
|
||||
message(STATUS "doxygen executable not found -- skipping docs target")
|
||||
return()
|
||||
endif ()
|
||||
if(NOT TARGET Doxygen::doxygen)
|
||||
message(STATUS "doxygen executable not found -- skipping docs target")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs")
|
||||
set(doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src")
|
||||
set(doxygen_index_file "${doxygen_output_directory}/html/index.html")
|
||||
set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile")
|
||||
|
||||
file(GLOB_RECURSE
|
||||
doxygen_input
|
||||
docs/*.md
|
||||
include/*.h
|
||||
include/*.cpp
|
||||
include/*.md
|
||||
src/*.h
|
||||
src/*.cpp
|
||||
src/*.md
|
||||
Builds/*.md
|
||||
*.md)
|
||||
list(APPEND doxygen_input external/README.md)
|
||||
file(GLOB_RECURSE doxygen_input
|
||||
docs/*.md
|
||||
include/*.h
|
||||
include/*.cpp
|
||||
include/*.md
|
||||
src/*.h
|
||||
src/*.cpp
|
||||
src/*.md
|
||||
Builds/*.md
|
||||
*.md)
|
||||
list(APPEND doxygen_input
|
||||
external/README.md
|
||||
)
|
||||
set(dependencies "${doxygen_input}" "${doxyfile}")
|
||||
|
||||
function (verbose_find_path variable name)
|
||||
# find_path sets a CACHE variable, so don't try using a "local" variable.
|
||||
find_path(${variable} "${name}" ${ARGN})
|
||||
if (NOT ${variable})
|
||||
message(NOTICE "could not find ${name}")
|
||||
else ()
|
||||
message(STATUS "found ${name}: ${${variable}}/${name}")
|
||||
endif ()
|
||||
endfunction ()
|
||||
function(verbose_find_path variable name)
|
||||
# find_path sets a CACHE variable, so don't try using a "local" variable.
|
||||
find_path(${variable} "${name}" ${ARGN})
|
||||
if(NOT ${variable})
|
||||
message(NOTICE "could not find ${name}")
|
||||
else()
|
||||
message(STATUS "found ${name}: ${${variable}}/${name}")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
verbose_find_path(doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml)
|
||||
verbose_find_path(doxygen_dot_path dot)
|
||||
@@ -47,26 +48,36 @@ verbose_find_path(doxygen_dot_path dot)
|
||||
# https://en.cppreference.com/w/Cppreference:Archives
|
||||
# https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step
|
||||
set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake")
|
||||
file(WRITE "${download_script}"
|
||||
"file(DOWNLOAD \
|
||||
file(WRITE
|
||||
"${download_script}"
|
||||
"file(DOWNLOAD \
|
||||
https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \
|
||||
${CMAKE_BINARY_DIR}/docs/cppreference.zip \
|
||||
EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \
|
||||
)\n \
|
||||
execute_process( \
|
||||
COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \
|
||||
)\n")
|
||||
)\n"
|
||||
)
|
||||
set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml")
|
||||
add_custom_command(OUTPUT "${tagfile}" COMMAND "${CMAKE_COMMAND}" -P "${download_script}"
|
||||
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs")
|
||||
add_custom_command(
|
||||
OUTPUT "${tagfile}"
|
||||
COMMAND "${CMAKE_COMMAND}" -P "${download_script}"
|
||||
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs"
|
||||
)
|
||||
set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT "${doxygen_index_file}"
|
||||
COMMAND "${CMAKE_COMMAND}" -E env "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
|
||||
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}"
|
||||
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" "DOXYGEN_DOT_PATH=${doxygen_dot_path}"
|
||||
"${DOXYGEN_EXECUTABLE}" "${doxyfile}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
DEPENDS "${dependencies}" "${tagfile}")
|
||||
add_custom_target(docs DEPENDS "${doxygen_index_file}" SOURCES "${dependencies}")
|
||||
OUTPUT "${doxygen_index_file}"
|
||||
COMMAND "${CMAKE_COMMAND}" -E env
|
||||
"DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
|
||||
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}"
|
||||
"DOXYGEN_TAGFILES=${doxygen_tagfiles}"
|
||||
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}"
|
||||
"DOXYGEN_DOT_PATH=${doxygen_dot_path}"
|
||||
"${DOXYGEN_EXECUTABLE}" "${doxyfile}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
DEPENDS "${dependencies}" "${tagfile}")
|
||||
add_custom_target(docs
|
||||
DEPENDS "${doxygen_index_file}"
|
||||
SOURCES "${dependencies}")
|
||||
|
||||
@@ -6,51 +6,61 @@ include(create_symbolic_link)
|
||||
|
||||
# If no suffix is defined for executables (e.g. Windows uses .exe but Linux
|
||||
# and macOS use none), then explicitly set it to the empty string.
|
||||
if (NOT DEFINED suffix)
|
||||
set(suffix "")
|
||||
endif ()
|
||||
if(NOT DEFINED suffix)
|
||||
set(suffix "")
|
||||
endif()
|
||||
|
||||
install(TARGETS common
|
||||
opts
|
||||
xrpl_boost
|
||||
xrpl_libs
|
||||
xrpl_syslibs
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
xrpl.libxrpl
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.beast
|
||||
xrpl.libxrpl.core
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.ledger
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl.nodestore
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl.shamap
|
||||
antithesis-sdk-cpp
|
||||
EXPORT XrplExports
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES
|
||||
DESTINATION include)
|
||||
install (
|
||||
TARGETS
|
||||
common
|
||||
opts
|
||||
xrpl_boost
|
||||
xrpl_libs
|
||||
xrpl_syslibs
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
xrpl.libxrpl
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.beast
|
||||
xrpl.libxrpl.core
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.ledger
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl.nodestore
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl.shamap
|
||||
antithesis-sdk-cpp
|
||||
EXPORT XrplExports
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES DESTINATION include)
|
||||
|
||||
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
install(
|
||||
DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
|
||||
)
|
||||
|
||||
install(EXPORT XrplExports FILE XrplTargets.cmake NAMESPACE Xrpl:: DESTINATION lib/cmake/xrpl)
|
||||
include(CMakePackageConfigHelpers)
|
||||
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version} COMPATIBILITY SameMajorVersion)
|
||||
install (EXPORT XrplExports
|
||||
FILE XrplTargets.cmake
|
||||
NAMESPACE Xrpl::
|
||||
DESTINATION lib/cmake/xrpl)
|
||||
include (CMakePackageConfigHelpers)
|
||||
write_basic_package_version_file (
|
||||
XrplConfigVersion.cmake
|
||||
VERSION ${xrpld_version}
|
||||
COMPATIBILITY SameMajorVersion)
|
||||
|
||||
if (is_root_project AND TARGET xrpld)
|
||||
install(TARGETS xrpld RUNTIME DESTINATION bin)
|
||||
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
||||
# sample configs should not overwrite existing files
|
||||
# install if-not-exists workaround as suggested by
|
||||
# https://cmake.org/Bug/view.php?id=12646
|
||||
install(CODE "
|
||||
install (TARGETS xrpld RUNTIME DESTINATION bin)
|
||||
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
||||
# sample configs should not overwrite existing files
|
||||
# install if-not-exists workaround as suggested by
|
||||
# https://cmake.org/Bug/view.php?id=12646
|
||||
install(CODE "
|
||||
macro (copy_if_not_exists SRC DEST NEWNAME)
|
||||
if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
|
||||
file (INSTALL FILE_PERMISSIONS OWNER_READ OWNER_WRITE DESTINATION \"\${CMAKE_INSTALL_PREFIX}/\${DEST}\" FILES \"\${SRC}\" RENAME \"\${NEWNAME}\")
|
||||
@@ -61,7 +71,7 @@ if (is_root_project AND TARGET xrpld)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
|
||||
")
|
||||
install(CODE "
|
||||
install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(xrpld${suffix} \
|
||||
@@ -69,5 +79,8 @@ if (is_root_project AND TARGET xrpld)
|
||||
")
|
||||
endif ()
|
||||
|
||||
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake ${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
|
||||
DESTINATION lib/cmake/xrpl)
|
||||
install (
|
||||
FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
|
||||
DESTINATION lib/cmake/xrpl)
|
||||
|
||||
@@ -5,79 +5,84 @@
|
||||
include(CompilationEnv)
|
||||
|
||||
# Set defaults for optional variables to avoid uninitialized variable warnings
|
||||
if (NOT DEFINED voidstar)
|
||||
set(voidstar OFF)
|
||||
endif ()
|
||||
if(NOT DEFINED voidstar)
|
||||
set(voidstar OFF)
|
||||
endif()
|
||||
|
||||
add_library(opts INTERFACE)
|
||||
add_library(Xrpl::opts ALIAS opts)
|
||||
target_compile_definitions(
|
||||
opts
|
||||
INTERFACE BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
|
||||
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
|
||||
BOOST_CONTAINER_FWD_BAD_DEQUE
|
||||
HAS_UNCAUGHT_EXCEPTIONS=1
|
||||
$<$<BOOL:${boost_show_deprecated}>:
|
||||
BOOST_ASIO_NO_DEPRECATED
|
||||
BOOST_FILESYSTEM_NO_DEPRECATED
|
||||
>
|
||||
$<$<NOT:$<BOOL:${boost_show_deprecated}>>:
|
||||
BOOST_COROUTINES_NO_DEPRECATION_WARNING
|
||||
BOOST_BEAST_ALLOW_DEPRECATED
|
||||
BOOST_FILESYSTEM_DEPRECATED
|
||||
>
|
||||
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
|
||||
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
|
||||
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
|
||||
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
|
||||
target_compile_options(
|
||||
opts
|
||||
INTERFACE $<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized> $<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
add_library (opts INTERFACE)
|
||||
add_library (Xrpl::opts ALIAS opts)
|
||||
target_compile_definitions (opts
|
||||
INTERFACE
|
||||
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
|
||||
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
|
||||
BOOST_CONTAINER_FWD_BAD_DEQUE
|
||||
HAS_UNCAUGHT_EXCEPTIONS=1
|
||||
$<$<BOOL:${boost_show_deprecated}>:
|
||||
BOOST_ASIO_NO_DEPRECATED
|
||||
BOOST_FILESYSTEM_NO_DEPRECATED
|
||||
>
|
||||
$<$<NOT:$<BOOL:${boost_show_deprecated}>>:
|
||||
BOOST_COROUTINES_NO_DEPRECATION_WARNING
|
||||
BOOST_BEAST_ALLOW_DEPRECATED
|
||||
BOOST_FILESYSTEM_DEPRECATED
|
||||
>
|
||||
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
|
||||
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
|
||||
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
|
||||
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
|
||||
target_compile_options (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
if (jemalloc)
|
||||
find_package(jemalloc REQUIRED)
|
||||
target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC)
|
||||
target_link_libraries(opts INTERFACE jemalloc::jemalloc)
|
||||
if(jemalloc)
|
||||
find_package(jemalloc REQUIRED)
|
||||
target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC)
|
||||
target_link_libraries(opts INTERFACE jemalloc::jemalloc)
|
||||
endif ()
|
||||
|
||||
#[===================================================================[
|
||||
xrpld transitive library deps via an interface library
|
||||
#]===================================================================]
|
||||
|
||||
add_library(xrpl_syslibs INTERFACE)
|
||||
add_library(Xrpl::syslibs ALIAS xrpl_syslibs)
|
||||
target_link_libraries(
|
||||
xrpl_syslibs
|
||||
INTERFACE $<$<BOOL:${is_msvc}>:
|
||||
legacy_stdio_definitions.lib
|
||||
Shlwapi
|
||||
kernel32
|
||||
user32
|
||||
gdi32
|
||||
winspool
|
||||
comdlg32
|
||||
advapi32
|
||||
shell32
|
||||
ole32
|
||||
oleaut32
|
||||
uuid
|
||||
odbc32
|
||||
odbccp32
|
||||
crypt32
|
||||
>
|
||||
$<$<NOT:$<BOOL:${is_msvc}>>:dl>
|
||||
$<$<NOT:$<OR:$<BOOL:${is_msvc}>,$<BOOL:${is_macos}>>>:rt>)
|
||||
add_library (xrpl_syslibs INTERFACE)
|
||||
add_library (Xrpl::syslibs ALIAS xrpl_syslibs)
|
||||
target_link_libraries (xrpl_syslibs
|
||||
INTERFACE
|
||||
$<$<BOOL:${is_msvc}>:
|
||||
legacy_stdio_definitions.lib
|
||||
Shlwapi
|
||||
kernel32
|
||||
user32
|
||||
gdi32
|
||||
winspool
|
||||
comdlg32
|
||||
advapi32
|
||||
shell32
|
||||
ole32
|
||||
oleaut32
|
||||
uuid
|
||||
odbc32
|
||||
odbccp32
|
||||
crypt32
|
||||
>
|
||||
$<$<NOT:$<BOOL:${is_msvc}>>:dl>
|
||||
$<$<NOT:$<OR:$<BOOL:${is_msvc}>,$<BOOL:${is_macos}>>>:rt>)
|
||||
|
||||
if (NOT is_msvc)
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads)
|
||||
target_link_libraries(xrpl_syslibs INTERFACE Threads::Threads)
|
||||
set (THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package (Threads)
|
||||
target_link_libraries (xrpl_syslibs INTERFACE Threads::Threads)
|
||||
endif ()
|
||||
|
||||
add_library(xrpl_libs INTERFACE)
|
||||
add_library(Xrpl::libs ALIAS xrpl_libs)
|
||||
target_link_libraries(xrpl_libs INTERFACE Xrpl::syslibs)
|
||||
add_library (xrpl_libs INTERFACE)
|
||||
add_library (Xrpl::libs ALIAS xrpl_libs)
|
||||
target_link_libraries (xrpl_libs INTERFACE Xrpl::syslibs)
|
||||
|
||||
@@ -44,23 +44,23 @@ include(CompilationEnv)
|
||||
|
||||
# Read environment variable
|
||||
set(SANITIZERS "")
|
||||
if (DEFINED ENV{SANITIZERS})
|
||||
set(SANITIZERS "$ENV{SANITIZERS}")
|
||||
endif ()
|
||||
if(DEFINED ENV{SANITIZERS})
|
||||
set(SANITIZERS "$ENV{SANITIZERS}")
|
||||
endif()
|
||||
|
||||
# Set SANITIZERS_ENABLED flag for use in other modules
|
||||
if (SANITIZERS MATCHES "address|thread|undefinedbehavior")
|
||||
if(SANITIZERS MATCHES "address|thread|undefinedbehavior")
|
||||
set(SANITIZERS_ENABLED TRUE)
|
||||
else ()
|
||||
else()
|
||||
set(SANITIZERS_ENABLED FALSE)
|
||||
return()
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
# Sanitizers are not supported on Windows/MSVC
|
||||
if (is_msvc)
|
||||
if(is_msvc)
|
||||
message(FATAL_ERROR "Sanitizers are not supported on Windows/MSVC. "
|
||||
"Please unset the SANITIZERS environment variable.")
|
||||
endif ()
|
||||
"Please unset the SANITIZERS environment variable.")
|
||||
endif()
|
||||
|
||||
message(STATUS "Configuring sanitizers: ${SANITIZERS}")
|
||||
|
||||
@@ -74,24 +74,24 @@ set(san_list "${SANITIZERS}")
|
||||
string(REPLACE "," ";" san_list "${san_list}")
|
||||
separate_arguments(san_list)
|
||||
|
||||
foreach (san IN LISTS san_list)
|
||||
if (san STREQUAL "address")
|
||||
foreach(san IN LISTS san_list)
|
||||
if(san STREQUAL "address")
|
||||
set(enable_asan TRUE)
|
||||
elseif (san STREQUAL "thread")
|
||||
elseif(san STREQUAL "thread")
|
||||
set(enable_tsan TRUE)
|
||||
elseif (san STREQUAL "undefinedbehavior")
|
||||
elseif(san STREQUAL "undefinedbehavior")
|
||||
set(enable_ubsan TRUE)
|
||||
else ()
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported sanitizer type: ${san}"
|
||||
"Supported: address, thread, undefinedbehavior and their combinations.")
|
||||
endif ()
|
||||
endforeach ()
|
||||
"Supported: address, thread, undefinedbehavior and their combinations.")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# Validate sanitizer compatibility
|
||||
if (enable_asan AND enable_tsan)
|
||||
if(enable_asan AND enable_tsan)
|
||||
message(FATAL_ERROR "AddressSanitizer and ThreadSanitizer are incompatible and cannot be enabled simultaneously. "
|
||||
"Use 'address' or 'thread', optionally with 'undefinedbehavior'.")
|
||||
endif ()
|
||||
"Use 'address' or 'thread', optionally with 'undefinedbehavior'.")
|
||||
endif()
|
||||
|
||||
# Frame pointer is required for meaningful stack traces. Sanitizers recommend minimum of -O1 for reasonable performance
|
||||
set(SANITIZERS_COMPILE_FLAGS "-fno-omit-frame-pointer" "-O1")
|
||||
@@ -99,31 +99,31 @@ set(SANITIZERS_COMPILE_FLAGS "-fno-omit-frame-pointer" "-O1")
|
||||
# Build the sanitizer flags list
|
||||
set(SANITIZER_TYPES)
|
||||
|
||||
if (enable_asan)
|
||||
if(enable_asan)
|
||||
list(APPEND SANITIZER_TYPES "address")
|
||||
elseif (enable_tsan)
|
||||
elseif(enable_tsan)
|
||||
list(APPEND SANITIZER_TYPES "thread")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (enable_ubsan)
|
||||
if(enable_ubsan)
|
||||
# UB sanitizer flags
|
||||
list(APPEND SANITIZER_TYPES "undefined" "float-divide-by-zero")
|
||||
if (is_clang)
|
||||
# Clang supports additional UB checks. More info here
|
||||
# https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html
|
||||
if(is_clang)
|
||||
# Clang supports additional UB checks. More info here https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html
|
||||
list(APPEND SANITIZER_TYPES "unsigned-integer-overflow")
|
||||
endif ()
|
||||
endif ()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Configure code model for GCC on amd64 Use large code model for ASAN to avoid relocation errors Use medium code model
|
||||
# for TSAN (large is not compatible with TSAN)
|
||||
# Configure code model for GCC on amd64
|
||||
# Use large code model for ASAN to avoid relocation errors
|
||||
# Use medium code model for TSAN (large is not compatible with TSAN)
|
||||
set(SANITIZERS_RELOCATION_FLAGS)
|
||||
|
||||
# Compiler-specific configuration
|
||||
if (is_gcc)
|
||||
# Disable mold, gold and lld linkers for GCC with sanitizers Use default linker (bfd/ld) which is more lenient with
|
||||
# mixed code models This is needed since the size of instrumented binary exceeds the limits set by mold, lld and
|
||||
# gold linkers
|
||||
if(is_gcc)
|
||||
# Disable mold, gold and lld linkers for GCC with sanitizers
|
||||
# Use default linker (bfd/ld) which is more lenient with mixed code models
|
||||
# This is needed since the size of instrumented binary exceeds the limits set by mold, lld and gold linkers
|
||||
set(use_mold OFF CACHE BOOL "Use mold linker" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "Use gold linker" FORCE)
|
||||
set(use_lld OFF CACHE BOOL "Use lld linker" FORCE)
|
||||
@@ -132,17 +132,17 @@ if (is_gcc)
|
||||
# Suppress false positive warnings in GCC with stringop-overflow
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-stringop-overflow")
|
||||
|
||||
if (is_amd64 AND enable_asan)
|
||||
if(is_amd64 AND enable_asan)
|
||||
message(STATUS " Using large code model (-mcmodel=large)")
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=large")
|
||||
list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=large")
|
||||
elseif (enable_tsan)
|
||||
elseif(enable_tsan)
|
||||
# GCC doesn't support atomic_thread_fence with tsan. Suppress warnings.
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-tsan")
|
||||
message(STATUS " Using medium code model (-mcmodel=medium)")
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=medium")
|
||||
list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=medium")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
# Join sanitizer flags with commas for -fsanitize option
|
||||
list(JOIN SANITIZER_TYPES "," SANITIZER_TYPES_STR)
|
||||
@@ -151,12 +151,13 @@ if (is_gcc)
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
set(SANITIZERS_LINK_FLAGS "${SANITIZERS_RELOCATION_FLAGS}" "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
|
||||
elseif (is_clang)
|
||||
# Add ignorelist for Clang (GCC doesn't support this) Use CMAKE_SOURCE_DIR to get the path to the ignorelist
|
||||
elseif(is_clang)
|
||||
# Add ignorelist for Clang (GCC doesn't support this)
|
||||
# Use CMAKE_SOURCE_DIR to get the path to the ignorelist
|
||||
set(IGNORELIST_PATH "${CMAKE_SOURCE_DIR}/sanitizers/suppressions/sanitizer-ignorelist.txt")
|
||||
if (NOT EXISTS "${IGNORELIST_PATH}")
|
||||
if(NOT EXISTS "${IGNORELIST_PATH}")
|
||||
message(FATAL_ERROR "Sanitizer ignorelist not found: ${IGNORELIST_PATH}")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize-ignorelist=${IGNORELIST_PATH}")
|
||||
message(STATUS " Using sanitizer ignorelist: ${IGNORELIST_PATH}")
|
||||
@@ -167,31 +168,34 @@ elseif (is_clang)
|
||||
# Add sanitizer to compile and link flags
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
set(SANITIZERS_LINK_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
message(STATUS " Compile flags: ${SANITIZERS_COMPILE_FLAGS}")
|
||||
message(STATUS " Link flags: ${SANITIZERS_LINK_FLAGS}")
|
||||
|
||||
# Apply the sanitizer flags to the 'common' interface library This is the same library used by XrplCompiler.cmake
|
||||
target_compile_options(common INTERFACE $<$<COMPILE_LANGUAGE:CXX>:${SANITIZERS_COMPILE_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${SANITIZERS_COMPILE_FLAGS}>)
|
||||
# Apply the sanitizer flags to the 'common' interface library
|
||||
# This is the same library used by XrplCompiler.cmake
|
||||
target_compile_options(common INTERFACE
|
||||
$<$<COMPILE_LANGUAGE:CXX>:${SANITIZERS_COMPILE_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${SANITIZERS_COMPILE_FLAGS}>
|
||||
)
|
||||
|
||||
# Apply linker flags
|
||||
target_link_options(common INTERFACE ${SANITIZERS_LINK_FLAGS})
|
||||
|
||||
# Define SANITIZERS macro for BuildInfo.cpp
|
||||
set(sanitizers_list)
|
||||
if (enable_asan)
|
||||
if(enable_asan)
|
||||
list(APPEND sanitizers_list "ASAN")
|
||||
endif ()
|
||||
if (enable_tsan)
|
||||
endif()
|
||||
if(enable_tsan)
|
||||
list(APPEND sanitizers_list "TSAN")
|
||||
endif ()
|
||||
if (enable_ubsan)
|
||||
endif()
|
||||
if(enable_ubsan)
|
||||
list(APPEND sanitizers_list "UBSAN")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (sanitizers_list)
|
||||
if(sanitizers_list)
|
||||
list(JOIN sanitizers_list "." sanitizers_str)
|
||||
target_compile_definitions(common INTERFACE SANITIZERS=${sanitizers_str})
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
@@ -6,39 +6,40 @@ include(CompilationEnv)
|
||||
|
||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
set(CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
|
||||
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
|
||||
if (NOT is_multiconfig)
|
||||
if (NOT CMAKE_BUILD_TYPE)
|
||||
message(STATUS "Build type not specified - defaulting to Release")
|
||||
set(CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE)
|
||||
elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release))
|
||||
# for simplicity, these are the only two config types we care about. Limiting the build types simplifies dealing
|
||||
# with external project builds especially
|
||||
message(FATAL_ERROR " *** Only Debug or Release build types are currently supported ***")
|
||||
endif ()
|
||||
if (NOT CMAKE_BUILD_TYPE)
|
||||
message (STATUS "Build type not specified - defaulting to Release")
|
||||
set (CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE)
|
||||
elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release))
|
||||
# for simplicity, these are the only two config types we care about. Limiting
|
||||
# the build types simplifies dealing with external project builds especially
|
||||
message (FATAL_ERROR " *** Only Debug or Release build types are currently supported ***")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (is_clang) # both Clang and AppleClang
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
|
||||
message(FATAL_ERROR "This project requires clang 16 or later")
|
||||
endif ()
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
|
||||
message (FATAL_ERROR "This project requires clang 16 or later")
|
||||
endif ()
|
||||
elseif (is_gcc)
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
|
||||
message(FATAL_ERROR "This project requires GCC 12 or later")
|
||||
endif ()
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
|
||||
message (FATAL_ERROR "This project requires GCC 12 or later")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# check for in-source build and fail
|
||||
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
message(FATAL_ERROR "Builds (in-source) are not allowed in "
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
|
||||
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
|
||||
message (FATAL_ERROR "Builds (in-source) are not allowed in "
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
|
||||
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
|
||||
endif ()
|
||||
|
||||
if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
|
||||
message(FATAL_ERROR "Visual Studio 32-bit build is not supported.")
|
||||
message (FATAL_ERROR "Visual Studio 32-bit build is not supported.")
|
||||
endif ()
|
||||
|
||||
if (APPLE AND NOT HOMEBREW)
|
||||
find_program(HOMEBREW brew)
|
||||
find_program (HOMEBREW brew)
|
||||
endif ()
|
||||
|
||||
@@ -5,110 +5,125 @@
|
||||
include(CompilationEnv)
|
||||
|
||||
set(is_ci FALSE)
|
||||
if (DEFINED ENV{CI})
|
||||
if ("$ENV{CI}" STREQUAL "true")
|
||||
set(is_ci TRUE)
|
||||
endif ()
|
||||
endif ()
|
||||
if(DEFINED ENV{CI})
|
||||
if("$ENV{CI}" STREQUAL "true")
|
||||
set(is_ci TRUE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
set(is_root_project OFF)
|
||||
else ()
|
||||
set(is_root_project ON)
|
||||
endif ()
|
||||
if(has_parent)
|
||||
set(is_root_project OFF)
|
||||
else()
|
||||
set(is_root_project ON)
|
||||
endif()
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
option(xrpld "Build xrpld" ON)
|
||||
|
||||
option(tests "Build tests" ON)
|
||||
if (tests)
|
||||
# This setting allows making a separate workflow to test fees other than default 10
|
||||
if (NOT UNIT_TEST_REFERENCE_FEE)
|
||||
set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "")
|
||||
endif ()
|
||||
endif ()
|
||||
if(tests)
|
||||
# This setting allows making a separate workflow to test fees other than default 10
|
||||
if(NOT UNIT_TEST_REFERENCE_FEE)
|
||||
set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
option(unity "Creates a build using UNITY support in cmake." OFF)
|
||||
if (unity)
|
||||
if (NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif ()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif ()
|
||||
if(unity)
|
||||
if(NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif()
|
||||
|
||||
if (is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif ()
|
||||
if(is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif()
|
||||
|
||||
if (is_gcc OR is_clang)
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
if(is_gcc OR is_clang)
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_format "html-details" CACHE STRING "Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING "Additional arguments to pass to gcovr.")
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else ()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif ()
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
|
||||
if (is_linux AND NOT SANITIZER)
|
||||
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
option(use_mold "enables detection of mold (binutils) linker" ON)
|
||||
# Set a default value for the log flag based on the build type. This provides a sensible default (on for debug, off
|
||||
# for release) while still allowing the user to override it for any build.
|
||||
if (CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(TRUNCATED_LOGS_DEFAULT ON)
|
||||
else ()
|
||||
set(TRUNCATED_LOGS_DEFAULT OFF)
|
||||
endif ()
|
||||
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux." ${TRUNCATED_LOGS_DEFAULT})
|
||||
if (TRUNCATED_THREAD_NAME_LOGS)
|
||||
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
|
||||
endif ()
|
||||
else ()
|
||||
# we are not ready to allow shared-libs on windows because it would require export declarations. On macos it's more
|
||||
# feasible, but static openssl produces odd linker errors, thus we disable shared lib builds for now.
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
|
||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif ()
|
||||
if(is_linux AND NOT SANITIZER)
|
||||
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
option(use_mold "enables detection of mold (binutils) linker" ON)
|
||||
# Set a default value for the log flag based on the build type.
|
||||
# This provides a sensible default (on for debug, off for release)
|
||||
# while still allowing the user to override it for any build.
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(TRUNCATED_LOGS_DEFAULT ON)
|
||||
else()
|
||||
set(TRUNCATED_LOGS_DEFAULT OFF)
|
||||
endif()
|
||||
option(TRUNCATED_THREAD_NAME_LOGS
|
||||
"Show warnings about truncated thread names on Linux."
|
||||
${TRUNCATED_LOGS_DEFAULT}
|
||||
)
|
||||
if(TRUNCATED_THREAD_NAME_LOGS)
|
||||
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
|
||||
endif()
|
||||
else()
|
||||
# we are not ready to allow shared-libs on windows because it would require
|
||||
# export declarations. On macos it's more feasible, but static openssl
|
||||
# produces odd linker errors, thus we disable shared lib builds for now.
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
|
||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
|
||||
if (is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else ()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif ()
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf "Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option(local_grpc "Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
option(local_protobuf
|
||||
"Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option(local_grpc
|
||||
"Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline "Prevents unit test definitions from being inserted into global table" OFF)
|
||||
option(single_io_service_thread "Restricts the number of threads calling io_context::run to one. \
|
||||
This can be useful when debugging." OFF)
|
||||
option(boost_show_deprecated "Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls." OFF)
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
"Restricts the number of threads calling io_context::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
|
||||
if (WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else ()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif ()
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
|
||||
if (coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif ()
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif()
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
option(validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch(current_branch)
|
||||
# default to tracking VK master branch unless we are on release
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set(current_branch "master")
|
||||
endif ()
|
||||
message(STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||
git_branch (current_branch)
|
||||
# default to tracking VK master branch unless we are on release
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare(validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}")
|
||||
FetchContent_MakeAvailable(validator_keys)
|
||||
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
FetchContent_Declare (
|
||||
validator_keys
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_MakeAvailable(validator_keys)
|
||||
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
|
||||
endif ()
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
#]===================================================================]
|
||||
|
||||
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
|
||||
foreach (line_ ${BUILD_INFO})
|
||||
if (line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
||||
set(xrpld_version ${CMAKE_MATCH_1})
|
||||
endif ()
|
||||
endforeach ()
|
||||
if (xrpld_version)
|
||||
message(STATUS "xrpld version: ${xrpld_version}")
|
||||
else ()
|
||||
message(FATAL_ERROR "unable to determine xrpld version")
|
||||
endif ()
|
||||
foreach(line_ ${BUILD_INFO})
|
||||
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
||||
set(xrpld_version ${CMAKE_MATCH_1})
|
||||
endif()
|
||||
endforeach()
|
||||
if(xrpld_version)
|
||||
message(STATUS "xrpld version: ${xrpld_version}")
|
||||
else()
|
||||
message(FATAL_ERROR "unable to determine xrpld version")
|
||||
endif()
|
||||
|
||||
@@ -12,14 +12,26 @@ include(isolate_headers)
|
||||
# add_module(parent a)
|
||||
# add_module(parent b)
|
||||
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
|
||||
function (add_module parent name)
|
||||
set(target ${PROJECT_NAME}.lib${parent}.${name})
|
||||
add_library(${target} OBJECT)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp")
|
||||
target_sources(${target} PRIVATE ${sources})
|
||||
target_include_directories(${target} PUBLIC "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>")
|
||||
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/include"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" PUBLIC)
|
||||
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src" "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
|
||||
PRIVATE)
|
||||
endfunction ()
|
||||
function(add_module parent name)
|
||||
set(target ${PROJECT_NAME}.lib${parent}.${name})
|
||||
add_library(${target} OBJECT)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp"
|
||||
)
|
||||
target_sources(${target} PRIVATE ${sources})
|
||||
target_include_directories(${target} PUBLIC
|
||||
"$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>"
|
||||
)
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}"
|
||||
PUBLIC
|
||||
)
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
|
||||
PRIVATE
|
||||
)
|
||||
endfunction()
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
# file(CREATE_SYMLINK) only works on Windows with administrator privileges. https://stackoverflow.com/a/61244115/618906
|
||||
function (create_symbolic_link target link)
|
||||
if (WIN32)
|
||||
if (NOT IS_SYMLINK "${link}")
|
||||
if (NOT IS_ABSOLUTE "${target}")
|
||||
# Relative links work do not work on Windows.
|
||||
set(target "${link}/../${target}")
|
||||
endif ()
|
||||
file(TO_NATIVE_PATH "${target}" target)
|
||||
file(TO_NATIVE_PATH "${link}" link)
|
||||
execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}")
|
||||
endif ()
|
||||
else ()
|
||||
file(CREATE_LINK "${target}" "${link}" SYMBOLIC)
|
||||
endif ()
|
||||
if (NOT IS_SYMLINK "${link}")
|
||||
message(ERROR "failed to create symlink: <${link}>")
|
||||
endif ()
|
||||
endfunction ()
|
||||
# file(CREATE_SYMLINK) only works on Windows with administrator privileges.
|
||||
# https://stackoverflow.com/a/61244115/618906
|
||||
function(create_symbolic_link target link)
|
||||
if(WIN32)
|
||||
if(NOT IS_SYMLINK "${link}")
|
||||
if(NOT IS_ABSOLUTE "${target}")
|
||||
# Relative links work do not work on Windows.
|
||||
set(target "${link}/../${target}")
|
||||
endif()
|
||||
file(TO_NATIVE_PATH "${target}" target)
|
||||
file(TO_NATIVE_PATH "${link}" link)
|
||||
execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}")
|
||||
endif()
|
||||
else()
|
||||
file(CREATE_LINK "${target}" "${link}" SYMBOLIC)
|
||||
endif()
|
||||
if(NOT IS_SYMLINK "${link}")
|
||||
message(ERROR "failed to create symlink: <${link}>")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
@@ -2,43 +2,48 @@ include(CompilationEnv)
|
||||
include(XrplSanitizers)
|
||||
|
||||
find_package(Boost REQUIRED
|
||||
COMPONENTS chrono
|
||||
container
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread)
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread
|
||||
)
|
||||
|
||||
add_library(xrpl_boost INTERFACE)
|
||||
add_library(Xrpl::boost ALIAS xrpl_boost)
|
||||
|
||||
target_link_libraries(
|
||||
xrpl_boost
|
||||
INTERFACE Boost::headers
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::process
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::thread)
|
||||
if (Boost_COMPILER)
|
||||
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
|
||||
endif ()
|
||||
if (SANITIZERS_ENABLED AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else for gcc ?
|
||||
if (NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
|
||||
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif ()
|
||||
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
|
||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
|
||||
target_compile_options(opts INTERFACE # ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
endif ()
|
||||
target_link_libraries(xrpl_boost
|
||||
INTERFACE
|
||||
Boost::headers
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::process
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::thread)
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
|
||||
endif()
|
||||
if(SANITIZERS_ENABLED AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||
# for gcc ?
|
||||
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
|
||||
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif()
|
||||
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
|
||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
|
||||
target_compile_options(opts
|
||||
INTERFACE
|
||||
# ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
endif()
|
||||
|
||||
@@ -37,12 +37,12 @@ include(create_symbolic_link)
|
||||
# `${CMAKE_CURRENT_BINARY_DIR}/include/${target}`.
|
||||
#
|
||||
# isolate_headers(target A B scope)
|
||||
function (isolate_headers target A B scope)
|
||||
file(RELATIVE_PATH C "${A}" "${B}")
|
||||
set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}")
|
||||
set(Y "${X}/${C}")
|
||||
cmake_path(GET Y PARENT_PATH parent)
|
||||
file(MAKE_DIRECTORY "${parent}")
|
||||
create_symbolic_link("${B}" "${Y}")
|
||||
target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>")
|
||||
endfunction ()
|
||||
function(isolate_headers target A B scope)
|
||||
file(RELATIVE_PATH C "${A}" "${B}")
|
||||
set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}")
|
||||
set(Y "${X}/${C}")
|
||||
cmake_path(GET Y PARENT_PATH parent)
|
||||
file(MAKE_DIRECTORY "${parent}")
|
||||
create_symbolic_link("${B}" "${Y}")
|
||||
target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>")
|
||||
endfunction()
|
||||
|
||||
@@ -6,19 +6,19 @@
|
||||
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
|
||||
# add_library(project.libparent)
|
||||
# target_link_modules(parent PUBLIC a b)
|
||||
function (target_link_modules parent scope)
|
||||
set(library ${PROJECT_NAME}.lib${parent})
|
||||
foreach (name ${ARGN})
|
||||
set(module ${library}.${name})
|
||||
get_target_property(sources ${library} SOURCES)
|
||||
list(LENGTH sources before)
|
||||
get_target_property(dupes ${module} SOURCES)
|
||||
list(LENGTH dupes expected)
|
||||
list(REMOVE_ITEM sources ${dupes})
|
||||
list(LENGTH sources after)
|
||||
math(EXPR actual "${before} - ${after}")
|
||||
message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}")
|
||||
set_target_properties(${library} PROPERTIES SOURCES "${sources}")
|
||||
target_link_libraries(${library} ${scope} ${module})
|
||||
endforeach ()
|
||||
endfunction ()
|
||||
function(target_link_modules parent scope)
|
||||
set(library ${PROJECT_NAME}.lib${parent})
|
||||
foreach(name ${ARGN})
|
||||
set(module ${library}.${name})
|
||||
get_target_property(sources ${library} SOURCES)
|
||||
list(LENGTH sources before)
|
||||
get_target_property(dupes ${module} SOURCES)
|
||||
list(LENGTH dupes expected)
|
||||
list(REMOVE_ITEM sources ${dupes})
|
||||
list(LENGTH sources after)
|
||||
math(EXPR actual "${before} - ${after}")
|
||||
message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}")
|
||||
set_target_properties(${library} PROPERTIES SOURCES "${sources}")
|
||||
target_link_libraries(${library} ${scope} ${module})
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
@@ -35,20 +35,28 @@ find_package(Protobuf REQUIRED)
|
||||
# This prefix should appear at the start of all your consumer includes.
|
||||
# ARGN:
|
||||
# A list of .proto files.
|
||||
function (target_protobuf_sources target prefix)
|
||||
set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}")
|
||||
file(MAKE_DIRECTORY "${dir}/${prefix}")
|
||||
function(target_protobuf_sources target prefix)
|
||||
set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}")
|
||||
file(MAKE_DIRECTORY "${dir}/${prefix}")
|
||||
|
||||
protobuf_generate(TARGET ${target} PROTOC_OUT_DIR "${dir}/${prefix}" "${ARGN}")
|
||||
target_include_directories(
|
||||
${target} SYSTEM
|
||||
PUBLIC # Allows #include <package/path/to/file.proto> used by consumer files.
|
||||
$<BUILD_INTERFACE:${dir}>
|
||||
# Allows #include "path/to/file.proto" used by generated files.
|
||||
$<BUILD_INTERFACE:${dir}/${prefix}>
|
||||
# Allows #include <package/path/to/file.proto> used by consumer files.
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
# Allows #include "path/to/file.proto" used by generated files.
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/${prefix}>)
|
||||
install(DIRECTORY ${dir}/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} FILES_MATCHING PATTERN "*.h")
|
||||
endfunction ()
|
||||
protobuf_generate(
|
||||
TARGET ${target}
|
||||
PROTOC_OUT_DIR "${dir}/${prefix}"
|
||||
"${ARGN}"
|
||||
)
|
||||
target_include_directories(${target} SYSTEM PUBLIC
|
||||
# Allows #include <package/path/to/file.proto> used by consumer files.
|
||||
$<BUILD_INTERFACE:${dir}>
|
||||
# Allows #include "path/to/file.proto" used by generated files.
|
||||
$<BUILD_INTERFACE:${dir}/${prefix}>
|
||||
# Allows #include <package/path/to/file.proto> used by consumer files.
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
# Allows #include "path/to/file.proto" used by generated files.
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/${prefix}>
|
||||
)
|
||||
install(
|
||||
DIRECTORY ${dir}/
|
||||
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
FILES_MATCHING PATTERN "*.h"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
133
include/xrpl/basics/CanProcess.h
Normal file
133
include/xrpl/basics/CanProcess.h
Normal file
@@ -0,0 +1,133 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2024 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_BASICS_CANPROCESS_H_INCLUDED
|
||||
#define RIPPLE_BASICS_CANPROCESS_H_INCLUDED
|
||||
|
||||
#include <functional>
|
||||
#include <mutex>
|
||||
#include <set>
|
||||
|
||||
/** RAII class to check if an Item is already being processed on another thread,
|
||||
* as indicated by it's presence in a Collection.
|
||||
*
|
||||
* If the Item is not in the Collection, it will be added under lock in the
|
||||
* ctor, and removed under lock in the dtor. The object will be considered
|
||||
* "usable" and evaluate to `true`.
|
||||
*
|
||||
* If the Item is in the Collection, no changes will be made to the collection,
|
||||
* and the CanProcess object will be considered "unusable".
|
||||
*
|
||||
* It's up to the caller to decide what "usable" and "unusable" mean. (e.g.
|
||||
* Process or skip a block of code, or set a flag.)
|
||||
*
|
||||
* The current use is to avoid lock contention that would be involved in
|
||||
* processing something associated with the Item.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* void IncomingLedgers::acquireAsync(LedgerHash const& hash, ...)
|
||||
* {
|
||||
* if (CanProcess check{acquiresMutex_, pendingAcquires_, hash})
|
||||
* {
|
||||
* acquire(hash, ...);
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* bool
|
||||
* NetworkOPsImp::recvValidation(
|
||||
* std::shared_ptr<STValidation> const& val,
|
||||
* std::string const& source)
|
||||
* {
|
||||
* CanProcess check(
|
||||
* validationsMutex_, pendingValidations_, val->getLedgerHash());
|
||||
* BypassAccept bypassAccept =
|
||||
* check ? BypassAccept::no : BypassAccept::yes;
|
||||
* handleNewValidation(app_, val, source, bypassAccept, m_journal);
|
||||
* }
|
||||
*
|
||||
*/
|
||||
class CanProcess
|
||||
{
|
||||
public:
|
||||
template <class Mutex, class Collection, class Item>
|
||||
CanProcess(Mutex& mtx, Collection& collection, Item const& item) : cleanup_(insert(mtx, collection, item))
|
||||
{
|
||||
}
|
||||
|
||||
~CanProcess()
|
||||
{
|
||||
if (cleanup_)
|
||||
cleanup_();
|
||||
}
|
||||
|
||||
explicit
|
||||
operator bool() const
|
||||
{
|
||||
return static_cast<bool>(cleanup_);
|
||||
}
|
||||
|
||||
private:
|
||||
template <bool useIterator, class Mutex, class Collection, class Item>
|
||||
std::function<void()>
|
||||
doInsert(Mutex& mtx, Collection& collection, Item const& item)
|
||||
{
|
||||
std::unique_lock<Mutex> lock(mtx);
|
||||
// TODO: Use structured binding once LLVM 16 is the minimum supported
|
||||
// version. See also: https://github.com/llvm/llvm-project/issues/48582
|
||||
// https://github.com/llvm/llvm-project/commit/127bf44385424891eb04cff8e52d3f157fc2cb7c
|
||||
auto const insertResult = collection.insert(item);
|
||||
auto const it = insertResult.first;
|
||||
if (!insertResult.second)
|
||||
return {};
|
||||
if constexpr (useIterator)
|
||||
return [&, it]() {
|
||||
std::unique_lock<Mutex> lock(mtx);
|
||||
collection.erase(it);
|
||||
};
|
||||
else
|
||||
return [&]() {
|
||||
std::unique_lock<Mutex> lock(mtx);
|
||||
collection.erase(item);
|
||||
};
|
||||
}
|
||||
|
||||
// Generic insert() function doesn't use iterators because they may get
|
||||
// invalidated
|
||||
template <class Mutex, class Collection, class Item>
|
||||
std::function<void()>
|
||||
insert(Mutex& mtx, Collection& collection, Item const& item)
|
||||
{
|
||||
return doInsert<false>(mtx, collection, item);
|
||||
}
|
||||
|
||||
// Specialize insert() for std::set, which does not invalidate iterators for
|
||||
// insert and erase
|
||||
template <class Mutex, class Item>
|
||||
std::function<void()>
|
||||
insert(Mutex& mtx, std::set<Item>& collection, Item const& item)
|
||||
{
|
||||
return doInsert<true>(mtx, collection, item);
|
||||
}
|
||||
|
||||
// If set, then the item is "usable"
|
||||
std::function<void()> cleanup_;
|
||||
};
|
||||
|
||||
#endif
|
||||
@@ -286,8 +286,18 @@ message TMLedgerData {
|
||||
required uint32 ledgerSeq = 2;
|
||||
required TMLedgerInfoType type = 3;
|
||||
repeated TMLedgerNode nodes = 4;
|
||||
// If the peer supports "responseCookies", this field will
|
||||
// never be populated.
|
||||
optional uint32 requestCookie = 5;
|
||||
optional TMReplyError error = 6;
|
||||
// The old field is called "requestCookie", but this is
|
||||
// a response, so this name makes more sense
|
||||
repeated uint32 responseCookies = 7;
|
||||
// If a TMGetLedger request was received without a "requestCookie",
|
||||
// and the peer supports it, this flag will be set to true to
|
||||
// indicate that the receiver should process the result in addition
|
||||
// to forwarding it to its "responseCookies" peers.
|
||||
optional bool directResponse = 8;
|
||||
}
|
||||
|
||||
message TMPing {
|
||||
|
||||
@@ -36,6 +36,8 @@ struct LedgerHeader
|
||||
|
||||
// If validated is false, it means "not yet validated."
|
||||
// Once validated is true, it will never be set false at a later time.
|
||||
// NOTE: If you are accessing this directly, you are probably doing it
|
||||
// wrong. Use LedgerMaster::isValidated().
|
||||
// VFALCO TODO Make this not mutable
|
||||
bool mutable validated = false;
|
||||
bool accepted = false;
|
||||
|
||||
@@ -387,6 +387,33 @@ class HashRouter_test : public beast::unit_test::suite
|
||||
BEAST_EXPECT(!any(HF::UNDEFINED));
|
||||
}
|
||||
|
||||
void
|
||||
testProcessPeer()
|
||||
{
|
||||
using namespace std::chrono_literals;
|
||||
TestStopwatch stopwatch;
|
||||
HashRouter router(getSetup(5s, 5s), stopwatch);
|
||||
uint256 const key(1);
|
||||
HashRouter::PeerShortID peer1 = 1;
|
||||
HashRouter::PeerShortID peer2 = 2;
|
||||
auto const timeout = 2s;
|
||||
|
||||
BEAST_EXPECT(router.shouldProcessForPeer(key, peer1, timeout));
|
||||
BEAST_EXPECT(!router.shouldProcessForPeer(key, peer1, timeout));
|
||||
++stopwatch;
|
||||
BEAST_EXPECT(!router.shouldProcessForPeer(key, peer1, timeout));
|
||||
BEAST_EXPECT(router.shouldProcessForPeer(key, peer2, timeout));
|
||||
BEAST_EXPECT(!router.shouldProcessForPeer(key, peer2, timeout));
|
||||
++stopwatch;
|
||||
BEAST_EXPECT(router.shouldProcessForPeer(key, peer1, timeout));
|
||||
BEAST_EXPECT(!router.shouldProcessForPeer(key, peer2, timeout));
|
||||
++stopwatch;
|
||||
BEAST_EXPECT(router.shouldProcessForPeer(key, peer2, timeout));
|
||||
++stopwatch;
|
||||
BEAST_EXPECT(router.shouldProcessForPeer(key, peer1, timeout));
|
||||
BEAST_EXPECT(!router.shouldProcessForPeer(key, peer2, timeout));
|
||||
}
|
||||
|
||||
public:
|
||||
void
|
||||
run() override
|
||||
@@ -399,6 +426,7 @@ public:
|
||||
testProcess();
|
||||
testSetup();
|
||||
testFlagsOps();
|
||||
testProcessPeer();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -288,6 +288,11 @@ public:
|
||||
{
|
||||
return false;
|
||||
}
|
||||
std::set<std::optional<uint64_t>>
|
||||
releaseRequestCookies(uint256 const& requestHash) override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string const&
|
||||
fingerprint() const override
|
||||
|
||||
@@ -64,7 +64,9 @@ public:
|
||||
BEAST_EXPECT(negotiateProtocolVersion("RTXP/1.2") == std::nullopt);
|
||||
BEAST_EXPECT(negotiateProtocolVersion("RTXP/1.2, XRPL/2.0, XRPL/2.1") == make_protocol(2, 1));
|
||||
BEAST_EXPECT(negotiateProtocolVersion("XRPL/2.2") == make_protocol(2, 2));
|
||||
BEAST_EXPECT(negotiateProtocolVersion("RTXP/1.2, XRPL/2.2, XRPL/2.3, XRPL/999.999") == make_protocol(2, 2));
|
||||
BEAST_EXPECT(
|
||||
negotiateProtocolVersion("RTXP/1.2, XRPL/2.2, XRPL/2.3, XRPL/2.4, XRPL/999.999") ==
|
||||
make_protocol(2, 3));
|
||||
BEAST_EXPECT(negotiateProtocolVersion("XRPL/999.999, WebSocket/1.0") == std::nullopt);
|
||||
BEAST_EXPECT(negotiateProtocolVersion("") == std::nullopt);
|
||||
}
|
||||
|
||||
@@ -167,6 +167,11 @@ public:
|
||||
removeTxQueue(uint256 const&) override
|
||||
{
|
||||
}
|
||||
std::set<std::optional<uint64_t>>
|
||||
releaseRequestCookies(uint256 const& requestHash) override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
/** Manually advanced clock. */
|
||||
|
||||
@@ -8,8 +8,12 @@ add_custom_target(xrpl.tests)
|
||||
|
||||
# Test helpers
|
||||
add_library(xrpl.helpers.test STATIC)
|
||||
target_sources(xrpl.helpers.test PRIVATE helpers/TestSink.cpp)
|
||||
target_include_directories(xrpl.helpers.test PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
target_sources(xrpl.helpers.test PRIVATE
|
||||
helpers/TestSink.cpp
|
||||
)
|
||||
target_include_directories(xrpl.helpers.test PUBLIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
target_link_libraries(xrpl.helpers.test PRIVATE xrpl.libxrpl)
|
||||
|
||||
# Common library dependencies for the rest of the tests.
|
||||
@@ -30,8 +34,8 @@ target_link_libraries(xrpl.test.json PRIVATE xrpl.imports.test)
|
||||
add_dependencies(xrpl.tests xrpl.test.json)
|
||||
|
||||
# Network unit tests are currently not supported on Windows
|
||||
if (NOT WIN32)
|
||||
xrpl_add_test(net)
|
||||
target_link_libraries(xrpl.test.net PRIVATE xrpl.imports.test)
|
||||
add_dependencies(xrpl.tests xrpl.test.net)
|
||||
endif ()
|
||||
if(NOT WIN32)
|
||||
xrpl_add_test(net)
|
||||
target_link_libraries(xrpl.test.net PRIVATE xrpl.imports.test)
|
||||
add_dependencies(xrpl.tests xrpl.test.net)
|
||||
endif()
|
||||
|
||||
@@ -918,7 +918,7 @@ void
|
||||
RCLConsensus::Adaptor::updateOperatingMode(std::size_t const positions) const
|
||||
{
|
||||
if (!positions && app_.getOPs().isFull())
|
||||
app_.getOPs().setMode(OperatingMode::CONNECTED);
|
||||
app_.getOPs().setMode(OperatingMode::CONNECTED, "updateOperatingMode: no positions");
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
@@ -172,6 +172,24 @@ private:
|
||||
std::unique_ptr<PeerSet> mPeerSet;
|
||||
};
|
||||
|
||||
inline std::string
|
||||
to_string(InboundLedger::Reason reason)
|
||||
{
|
||||
using enum InboundLedger::Reason;
|
||||
switch (reason)
|
||||
{
|
||||
case HISTORY:
|
||||
return "HISTORY";
|
||||
case GENERIC:
|
||||
return "GENERIC";
|
||||
case CONSENSUS:
|
||||
return "CONSENSUS";
|
||||
default:
|
||||
UNREACHABLE("ripple::to_string(InboundLedger::Reason) : unknown value");
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace xrpl
|
||||
|
||||
#endif
|
||||
|
||||
@@ -344,7 +344,14 @@ InboundLedger::onTimer(bool wasProgress, ScopedLockType&)
|
||||
|
||||
if (!wasProgress)
|
||||
{
|
||||
checkLocal();
|
||||
if (checkLocal())
|
||||
{
|
||||
// Done. Something else (probably consensus) built the ledger
|
||||
// locally while waiting for data (or possibly before requesting)
|
||||
XRPL_ASSERT(isDone(), "ripple::InboundLedger::onTimer : done");
|
||||
JLOG(journal_.info()) << "Finished while waiting " << hash_;
|
||||
return;
|
||||
}
|
||||
|
||||
mByHash = true;
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
#include <xrpld/app/main/Application.h>
|
||||
#include <xrpld/app/misc/NetworkOPs.h>
|
||||
|
||||
#include <xrpl/basics/CanProcess.h>
|
||||
#include <xrpl/basics/DecayingSample.h>
|
||||
#include <xrpl/basics/Log.h>
|
||||
#include <xrpl/basics/scope.h>
|
||||
#include <xrpl/beast/container/aged_map.h>
|
||||
#include <xrpl/core/JobQueue.h>
|
||||
#include <xrpl/core/PerfLog.h>
|
||||
@@ -53,10 +53,77 @@ public:
|
||||
auto doAcquire = [&, seq, reason]() -> std::shared_ptr<Ledger const> {
|
||||
XRPL_ASSERT(hash.isNonZero(), "xrpl::InboundLedgersImp::acquire::doAcquire : nonzero hash");
|
||||
|
||||
// probably not the right rule
|
||||
if (app_.getOPs().isNeedNetworkLedger() && (reason != InboundLedger::Reason::GENERIC) &&
|
||||
(reason != InboundLedger::Reason::CONSENSUS))
|
||||
bool const needNetworkLedger = app_.getOPs().isNeedNetworkLedger();
|
||||
bool const shouldAcquire = [&]() {
|
||||
if (!needNetworkLedger)
|
||||
return true;
|
||||
if (reason == InboundLedger::Reason::GENERIC)
|
||||
return true;
|
||||
if (reason == InboundLedger::Reason::CONSENSUS)
|
||||
return true;
|
||||
return false;
|
||||
}();
|
||||
|
||||
std::stringstream ss;
|
||||
ss << "InboundLedger::acquire: "
|
||||
<< "Request: " << to_string(hash) << ", " << seq
|
||||
<< " NeedNetworkLedger: " << (needNetworkLedger ? "yes" : "no") << " Reason: " << to_string(reason)
|
||||
<< " Should acquire: " << (shouldAcquire ? "true." : "false.");
|
||||
|
||||
/* Acquiring ledgers is somewhat expensive. It requires lots of
|
||||
* computation and network communication. Avoid it when it's not
|
||||
* appropriate. Every validation from a peer for a ledger that
|
||||
* we do not have locally results in a call to this function: even
|
||||
* if we are moments away from validating the same ledger.
|
||||
*/
|
||||
bool const shouldBroadcast = [&]() {
|
||||
// If the node is not in "full" state, it needs to sync to
|
||||
// the network, and doesn't have the necessary tx's and
|
||||
// ledger entries to build the ledger.
|
||||
bool const isFull = app_.getOPs().isFull();
|
||||
// If everything else is ok, don't try to acquire the ledger
|
||||
// if the requested seq is in the near future relative to
|
||||
// the validated ledger. If the requested ledger is between
|
||||
// 1 and 19 inclusive ledgers ahead of the valid ledger this
|
||||
// node has not built it yet, but it's possible/likely it
|
||||
// has the tx's necessary to build it and get caught up.
|
||||
// Plus it might not become validated. On the other hand, if
|
||||
// it's more than 20 in the future, this node should request
|
||||
// it so that it can jump ahead and get caught up.
|
||||
LedgerIndex const validSeq = app_.getLedgerMaster().getValidLedgerIndex();
|
||||
constexpr std::size_t lagLeeway = 20;
|
||||
bool const nearFuture = (seq > validSeq) && (seq < validSeq + lagLeeway);
|
||||
// If everything else is ok, don't try to acquire the ledger
|
||||
// if the request is related to consensus. (Note that
|
||||
// consensus calls usually pass a seq of 0, so nearFuture
|
||||
// will be false other than on a brand new network.)
|
||||
bool const consensus = reason == InboundLedger::Reason::CONSENSUS;
|
||||
ss << " Evaluating whether to broadcast requests to peers"
|
||||
<< ". full: " << (isFull ? "true" : "false") << ". ledger sequence " << seq
|
||||
<< ". Valid sequence: " << validSeq << ". Lag leeway: " << lagLeeway
|
||||
<< ". request for near future ledger: " << (nearFuture ? "true" : "false")
|
||||
<< ". Consensus: " << (consensus ? "true" : "false");
|
||||
|
||||
// If the node is not synced, send requests.
|
||||
if (!isFull)
|
||||
return true;
|
||||
// If the ledger is in the near future, do NOT send requests.
|
||||
// This node is probably about to build it.
|
||||
if (nearFuture)
|
||||
return false;
|
||||
// If the request is because of consensus, do NOT send requests.
|
||||
// This node is probably about to build it.
|
||||
if (consensus)
|
||||
return false;
|
||||
return true;
|
||||
}();
|
||||
ss << ". Would broadcast to peers? " << (shouldBroadcast ? "true." : "false.");
|
||||
|
||||
if (!shouldAcquire)
|
||||
{
|
||||
JLOG(j_.debug()) << "Abort(rule): " << ss.str();
|
||||
return {};
|
||||
}
|
||||
|
||||
bool isNew = true;
|
||||
std::shared_ptr<InboundLedger> inbound;
|
||||
@@ -64,6 +131,7 @@ public:
|
||||
ScopedLockType sl(mLock);
|
||||
if (stopping_)
|
||||
{
|
||||
JLOG(j_.debug()) << "Abort(stopping): " << ss.str();
|
||||
return {};
|
||||
}
|
||||
|
||||
@@ -82,46 +150,50 @@ public:
|
||||
++mCounter;
|
||||
}
|
||||
}
|
||||
ss << " IsNew: " << (isNew ? "true" : "false");
|
||||
|
||||
if (inbound->isFailed())
|
||||
{
|
||||
JLOG(j_.debug()) << "Abort(failed): " << ss.str();
|
||||
return {};
|
||||
}
|
||||
|
||||
if (!isNew)
|
||||
inbound->update(seq);
|
||||
|
||||
if (!inbound->isComplete())
|
||||
{
|
||||
JLOG(j_.debug()) << "InProgress: " << ss.str();
|
||||
return {};
|
||||
}
|
||||
|
||||
JLOG(j_.debug()) << "Complete: " << ss.str();
|
||||
return inbound->getLedger();
|
||||
};
|
||||
using namespace std::chrono_literals;
|
||||
std::shared_ptr<Ledger const> ledger =
|
||||
perf::measureDurationAndLog(doAcquire, "InboundLedgersImp::acquire", 500ms, j_);
|
||||
|
||||
return ledger;
|
||||
return perf::measureDurationAndLog(doAcquire, "InboundLedgersImp::acquire", 500ms, j_);
|
||||
}
|
||||
|
||||
void
|
||||
acquireAsync(uint256 const& hash, std::uint32_t seq, InboundLedger::Reason reason) override
|
||||
{
|
||||
std::unique_lock lock(acquiresMutex_);
|
||||
try
|
||||
if (CanProcess const check{acquiresMutex_, pendingAcquires_, hash})
|
||||
{
|
||||
if (pendingAcquires_.contains(hash))
|
||||
return;
|
||||
pendingAcquires_.insert(hash);
|
||||
scope_unlock unlock(lock);
|
||||
acquire(hash, seq, reason);
|
||||
try
|
||||
{
|
||||
acquire(hash, seq, reason);
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(j_.warn()) << "Exception thrown for acquiring new inbound ledger " << hash << ": " << e.what();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
JLOG(j_.warn()) << "Unknown exception thrown for acquiring new "
|
||||
"inbound ledger "
|
||||
<< hash;
|
||||
}
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(j_.warn()) << "Exception thrown for acquiring new inbound ledger " << hash << ": " << e.what();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
JLOG(j_.warn()) << "Unknown exception thrown for acquiring new inbound ledger " << hash;
|
||||
}
|
||||
pendingAcquires_.erase(hash);
|
||||
}
|
||||
|
||||
std::shared_ptr<InboundLedger>
|
||||
|
||||
@@ -890,8 +890,8 @@ LedgerMaster::checkAccept(std::shared_ptr<Ledger const> const& ledger)
|
||||
return;
|
||||
}
|
||||
|
||||
JLOG(m_journal.info()) << "Advancing accepted ledger to " << ledger->header().seq << " with >= " << minVal
|
||||
<< " validations";
|
||||
JLOG(m_journal.info()) << "Advancing accepted ledger to " << ledger->header().seq << " ("
|
||||
<< to_short_string(ledger->header().hash) << ") with >= " << minVal << " validations";
|
||||
|
||||
ledger->setValidated();
|
||||
ledger->setFull();
|
||||
|
||||
@@ -13,7 +13,8 @@ TimeoutCounter::TimeoutCounter(
|
||||
QueueJobParameter&& jobParameter,
|
||||
beast::Journal journal)
|
||||
: app_(app)
|
||||
, journal_(journal)
|
||||
, sink_(journal, to_short_string(hash) + " ")
|
||||
, journal_(sink_)
|
||||
, hash_(hash)
|
||||
, timeouts_(0)
|
||||
, complete_(false)
|
||||
@@ -33,6 +34,7 @@ TimeoutCounter::setTimer(ScopedLockType& sl)
|
||||
{
|
||||
if (isDone())
|
||||
return;
|
||||
JLOG(journal_.debug()) << "Setting timer for " << timerInterval_.count() << "ms";
|
||||
timer_.expires_after(timerInterval_);
|
||||
timer_.async_wait([wptr = pmDowncast()](boost::system::error_code const& ec) {
|
||||
if (ec == boost::asio::error::operation_aborted)
|
||||
@@ -40,6 +42,9 @@ TimeoutCounter::setTimer(ScopedLockType& sl)
|
||||
|
||||
if (auto ptr = wptr.lock())
|
||||
{
|
||||
JLOG(ptr->journal_.debug()) << "timer: ec: " << ec
|
||||
<< " (operation_aborted: " << boost::asio::error::operation_aborted << " - "
|
||||
<< (ec == boost::asio::error::operation_aborted ? "aborted" : "other") << ")";
|
||||
ScopedLockType sl(ptr->mtx_);
|
||||
ptr->queueJob(sl);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
#include <xrpld/app/main/Application.h>
|
||||
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
#include <xrpl/beast/utility/WrappedSink.h>
|
||||
#include <xrpl/core/Job.h>
|
||||
|
||||
#include <boost/asio/basic_waitable_timer.hpp>
|
||||
@@ -104,6 +105,7 @@ protected:
|
||||
// Used in this class for access to boost::asio::io_context and
|
||||
// xrpl::Overlay. Used in subtypes for the kitchen sink.
|
||||
Application& app_;
|
||||
beast::WrappedSink sink_;
|
||||
beast::Journal journal_;
|
||||
mutable std::recursive_mutex mtx_;
|
||||
|
||||
|
||||
@@ -71,6 +71,16 @@ HashRouter::shouldProcess(
|
||||
return s.shouldProcess(suppressionMap_.clock().now(), tx_interval);
|
||||
}
|
||||
|
||||
bool
|
||||
HashRouter::shouldProcessForPeer(uint256 const& key, PeerShortID peer, std::chrono::seconds interval)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
|
||||
auto& entry = emplace(key).first;
|
||||
|
||||
return entry.shouldProcessForPeer(peer, suppressionMap_.clock().now(), interval);
|
||||
}
|
||||
|
||||
HashRouterFlags
|
||||
HashRouter::getFlags(uint256 const& key)
|
||||
{
|
||||
@@ -142,4 +152,13 @@ setup_HashRouter(Config const& config)
|
||||
return setup;
|
||||
}
|
||||
|
||||
auto
|
||||
HashRouter::getPeers(uint256 const& key) -> std::set<PeerShortID>
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
|
||||
auto& s = emplace(key).first;
|
||||
return s.peekPeerSet();
|
||||
}
|
||||
|
||||
} // namespace xrpl
|
||||
|
||||
@@ -140,6 +140,13 @@ private:
|
||||
return std::move(peers_);
|
||||
}
|
||||
|
||||
/** Return set of peers waiting for reply. Leaves list unchanged. */
|
||||
std::set<PeerShortID> const&
|
||||
peekPeerSet()
|
||||
{
|
||||
return peers_;
|
||||
}
|
||||
|
||||
/** Return seated relay time point if the message has been relayed */
|
||||
std::optional<Stopwatch::time_point>
|
||||
relayed() const
|
||||
@@ -171,6 +178,17 @@ private:
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
shouldProcessForPeer(PeerShortID peer, Stopwatch::time_point now, std::chrono::seconds interval)
|
||||
{
|
||||
if (peerProcessed_.contains(peer) && ((peerProcessed_[peer] + interval) > now))
|
||||
return false;
|
||||
// Peer may already be in the list, but adding it again doesn't hurt
|
||||
addPeer(peer);
|
||||
peerProcessed_[peer] = now;
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
HashRouterFlags flags_ = HashRouterFlags::UNDEFINED;
|
||||
std::set<PeerShortID> peers_;
|
||||
@@ -178,6 +196,7 @@ private:
|
||||
// than one flag needs to expire independently.
|
||||
std::optional<Stopwatch::time_point> relayed_;
|
||||
std::optional<Stopwatch::time_point> processed_;
|
||||
std::map<PeerShortID, Stopwatch::time_point> peerProcessed_;
|
||||
};
|
||||
|
||||
public:
|
||||
@@ -200,7 +219,7 @@ public:
|
||||
|
||||
/** Add a suppression peer and get message's relay status.
|
||||
* Return pair:
|
||||
* element 1: true if the peer is added.
|
||||
* element 1: true if the key is added.
|
||||
* element 2: optional is seated to the relay time point or
|
||||
* is unseated if has not relayed yet. */
|
||||
std::pair<bool, std::optional<Stopwatch::time_point>>
|
||||
@@ -213,6 +232,15 @@ public:
|
||||
bool
|
||||
shouldProcess(uint256 const& key, PeerShortID peer, HashRouterFlags& flags, std::chrono::seconds tx_interval);
|
||||
|
||||
/** Determines whether the hashed item should be processed for the given
|
||||
peer. Could be an incoming or outgoing message.
|
||||
|
||||
Items filtered with this function should only be processed for the given
|
||||
peer once. Unlike shouldProcess, it can be processed for other peers.
|
||||
*/
|
||||
bool
|
||||
shouldProcessForPeer(uint256 const& key, PeerShortID peer, std::chrono::seconds interval);
|
||||
|
||||
/** Set the flags on a hash.
|
||||
|
||||
@return `true` if the flags were changed. `false` if unchanged.
|
||||
@@ -238,6 +266,11 @@ public:
|
||||
std::optional<std::set<PeerShortID>>
|
||||
shouldRelay(uint256 const& key);
|
||||
|
||||
/** Returns a copy of the set of peers in the Entry for the key
|
||||
*/
|
||||
std::set<PeerShortID>
|
||||
getPeers(uint256 const& key);
|
||||
|
||||
private:
|
||||
// pair.second indicates whether the entry was created
|
||||
std::pair<Entry&, bool>
|
||||
|
||||
@@ -33,10 +33,10 @@
|
||||
#include <xrpld/rpc/MPTokenIssuanceID.h>
|
||||
#include <xrpld/rpc/ServerHandler.h>
|
||||
|
||||
#include <xrpl/basics/CanProcess.h>
|
||||
#include <xrpl/basics/UptimeClock.h>
|
||||
#include <xrpl/basics/mulDiv.h>
|
||||
#include <xrpl/basics/safe_cast.h>
|
||||
#include <xrpl/basics/scope.h>
|
||||
#include <xrpl/beast/utility/rngfill.h>
|
||||
#include <xrpl/core/PerfLog.h>
|
||||
#include <xrpl/crypto/RFC1751.h>
|
||||
@@ -378,7 +378,7 @@ public:
|
||||
isFull() override;
|
||||
|
||||
void
|
||||
setMode(OperatingMode om) override;
|
||||
setMode(OperatingMode om, char const* reason) override;
|
||||
|
||||
bool
|
||||
isBlocked() override;
|
||||
@@ -809,7 +809,7 @@ NetworkOPsImp::strOperatingMode(bool const admin /* = false */) const
|
||||
inline void
|
||||
NetworkOPsImp::setStandAlone()
|
||||
{
|
||||
setMode(OperatingMode::FULL);
|
||||
setMode(OperatingMode::FULL, "setStandAlone");
|
||||
}
|
||||
|
||||
inline void
|
||||
@@ -945,7 +945,7 @@ NetworkOPsImp::processHeartbeatTimer()
|
||||
{
|
||||
if (mMode != OperatingMode::DISCONNECTED)
|
||||
{
|
||||
setMode(OperatingMode::DISCONNECTED);
|
||||
setMode(OperatingMode::DISCONNECTED, "Heartbeat: insufficient peers");
|
||||
std::stringstream ss;
|
||||
ss << "Node count (" << numPeers << ") has fallen "
|
||||
<< "below required minimum (" << minPeerCount_ << ").";
|
||||
@@ -969,7 +969,7 @@ NetworkOPsImp::processHeartbeatTimer()
|
||||
|
||||
if (mMode == OperatingMode::DISCONNECTED)
|
||||
{
|
||||
setMode(OperatingMode::CONNECTED);
|
||||
setMode(OperatingMode::CONNECTED, "Heartbeat: sufficient peers");
|
||||
JLOG(m_journal.info()) << "Node count (" << numPeers << ") is sufficient.";
|
||||
CLOG(clog.ss()) << "setting mode to CONNECTED based on " << numPeers << " peers. ";
|
||||
}
|
||||
@@ -979,9 +979,9 @@ NetworkOPsImp::processHeartbeatTimer()
|
||||
auto origMode = mMode.load();
|
||||
CLOG(clog.ss()) << "mode: " << strOperatingMode(origMode, true);
|
||||
if (mMode == OperatingMode::SYNCING)
|
||||
setMode(OperatingMode::SYNCING);
|
||||
setMode(OperatingMode::SYNCING, "Heartbeat: check syncing");
|
||||
else if (mMode == OperatingMode::CONNECTED)
|
||||
setMode(OperatingMode::CONNECTED);
|
||||
setMode(OperatingMode::CONNECTED, "Heartbeat: check connected");
|
||||
auto newMode = mMode.load();
|
||||
if (origMode != newMode)
|
||||
{
|
||||
@@ -1650,7 +1650,7 @@ void
|
||||
NetworkOPsImp::setAmendmentBlocked()
|
||||
{
|
||||
amendmentBlocked_ = true;
|
||||
setMode(OperatingMode::CONNECTED);
|
||||
setMode(OperatingMode::CONNECTED, "setAmendmentBlocked");
|
||||
}
|
||||
|
||||
inline bool
|
||||
@@ -1681,7 +1681,7 @@ void
|
||||
NetworkOPsImp::setUNLBlocked()
|
||||
{
|
||||
unlBlocked_ = true;
|
||||
setMode(OperatingMode::CONNECTED);
|
||||
setMode(OperatingMode::CONNECTED, "setUNLBlocked");
|
||||
}
|
||||
|
||||
inline void
|
||||
@@ -1776,7 +1776,7 @@ NetworkOPsImp::checkLastClosedLedger(Overlay::PeerSequence const& peerList, uint
|
||||
|
||||
if ((mMode == OperatingMode::TRACKING) || (mMode == OperatingMode::FULL))
|
||||
{
|
||||
setMode(OperatingMode::CONNECTED);
|
||||
setMode(OperatingMode::CONNECTED, "check LCL: not on consensus ledger");
|
||||
}
|
||||
|
||||
if (consensus)
|
||||
@@ -1856,8 +1856,8 @@ NetworkOPsImp::beginConsensus(uint256 const& networkClosed, std::unique_ptr<std:
|
||||
// this shouldn't happen unless we jump ledgers
|
||||
if (mMode == OperatingMode::FULL)
|
||||
{
|
||||
JLOG(m_journal.warn()) << "Don't have LCL, going to tracking";
|
||||
setMode(OperatingMode::TRACKING);
|
||||
JLOG(m_journal.warn()) << "beginConsensus Don't have LCL, going to tracking";
|
||||
setMode(OperatingMode::TRACKING, "beginConsensus: No LCL");
|
||||
CLOG(clog) << "beginConsensus Don't have LCL, going to tracking. ";
|
||||
}
|
||||
|
||||
@@ -1981,7 +1981,7 @@ NetworkOPsImp::endConsensus(std::unique_ptr<std::stringstream> const& clog)
|
||||
// validations we have for LCL. If the ledger is good enough, go to
|
||||
// TRACKING - TODO
|
||||
if (!needNetworkLedger_)
|
||||
setMode(OperatingMode::TRACKING);
|
||||
setMode(OperatingMode::TRACKING, "endConsensus: check tracking");
|
||||
}
|
||||
|
||||
if (((mMode == OperatingMode::CONNECTED) || (mMode == OperatingMode::TRACKING)) && !ledgerChange)
|
||||
@@ -1992,7 +1992,7 @@ NetworkOPsImp::endConsensus(std::unique_ptr<std::stringstream> const& clog)
|
||||
auto current = m_ledgerMaster.getCurrentLedger();
|
||||
if (app_.timeKeeper().now() < (current->header().parentCloseTime + 2 * current->header().closeTimeResolution))
|
||||
{
|
||||
setMode(OperatingMode::FULL);
|
||||
setMode(OperatingMode::FULL, "endConsensus: check full");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2004,7 +2004,7 @@ NetworkOPsImp::consensusViewChange()
|
||||
{
|
||||
if ((mMode == OperatingMode::FULL) || (mMode == OperatingMode::TRACKING))
|
||||
{
|
||||
setMode(OperatingMode::CONNECTED);
|
||||
setMode(OperatingMode::CONNECTED, "consensusViewChange");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2302,7 +2302,7 @@ NetworkOPsImp::pubPeerStatus(std::function<Json::Value(void)> const& func)
|
||||
}
|
||||
|
||||
void
|
||||
NetworkOPsImp::setMode(OperatingMode om)
|
||||
NetworkOPsImp::setMode(OperatingMode om, char const* reason)
|
||||
{
|
||||
using namespace std::chrono_literals;
|
||||
if (om == OperatingMode::CONNECTED)
|
||||
@@ -2322,11 +2322,12 @@ NetworkOPsImp::setMode(OperatingMode om)
|
||||
if (mMode == om)
|
||||
return;
|
||||
|
||||
auto const sink = om < mMode ? m_journal.warn() : m_journal.info();
|
||||
mMode = om;
|
||||
|
||||
accounting_.mode(om);
|
||||
|
||||
JLOG(m_journal.info()) << "STATE->" << strOperatingMode();
|
||||
JLOG(sink) << "STATE->" << strOperatingMode() << " - " << reason;
|
||||
pubServer();
|
||||
}
|
||||
|
||||
@@ -2335,31 +2336,23 @@ NetworkOPsImp::recvValidation(std::shared_ptr<STValidation> const& val, std::str
|
||||
{
|
||||
JLOG(m_journal.trace()) << "recvValidation " << val->getLedgerHash() << " from " << source;
|
||||
|
||||
std::unique_lock lock(validationsMutex_);
|
||||
BypassAccept bypassAccept = BypassAccept::no;
|
||||
try
|
||||
{
|
||||
if (pendingValidations_.contains(val->getLedgerHash()))
|
||||
bypassAccept = BypassAccept::yes;
|
||||
else
|
||||
pendingValidations_.insert(val->getLedgerHash());
|
||||
scope_unlock unlock(lock);
|
||||
handleNewValidation(app_, val, source, bypassAccept, m_journal);
|
||||
CanProcess const check(validationsMutex_, pendingValidations_, val->getLedgerHash());
|
||||
try
|
||||
{
|
||||
BypassAccept bypassAccept = check ? BypassAccept::no : BypassAccept::yes;
|
||||
handleNewValidation(app_, val, source, bypassAccept, m_journal);
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(m_journal.warn()) << "Exception thrown for handling new validation " << val->getLedgerHash() << ": "
|
||||
<< e.what();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
JLOG(m_journal.warn()) << "Unknown exception thrown for handling new validation " << val->getLedgerHash();
|
||||
}
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(m_journal.warn()) << "Exception thrown for handling new validation " << val->getLedgerHash() << ": "
|
||||
<< e.what();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
JLOG(m_journal.warn()) << "Unknown exception thrown for handling new validation " << val->getLedgerHash();
|
||||
}
|
||||
if (bypassAccept == BypassAccept::no)
|
||||
{
|
||||
pendingValidations_.erase(val->getLedgerHash());
|
||||
}
|
||||
lock.unlock();
|
||||
|
||||
pubValidation(val);
|
||||
|
||||
|
||||
@@ -180,7 +180,7 @@ public:
|
||||
virtual bool
|
||||
isFull() = 0;
|
||||
virtual void
|
||||
setMode(OperatingMode om) = 0;
|
||||
setMode(OperatingMode om, char const* reason) = 0;
|
||||
virtual bool
|
||||
isBlocked() = 0;
|
||||
virtual bool
|
||||
|
||||
@@ -358,11 +358,9 @@ Config::setup(std::string const& strConf, bool bQuiet, bool bSilent, bool bStand
|
||||
std::string const dbPath(legacy("database_path"));
|
||||
if (!dbPath.empty())
|
||||
dataDir = boost::filesystem::path(dbPath);
|
||||
else if (RUN_STANDALONE)
|
||||
dataDir.clear();
|
||||
}
|
||||
|
||||
if (!dataDir.empty())
|
||||
if (!RUN_STANDALONE)
|
||||
{
|
||||
boost::system::error_code ec;
|
||||
boost::filesystem::create_directories(dataDir, ec);
|
||||
|
||||
@@ -14,11 +14,7 @@ namespace Resource {
|
||||
class Charge;
|
||||
}
|
||||
|
||||
enum class ProtocolFeature {
|
||||
ValidatorListPropagation,
|
||||
ValidatorList2Propagation,
|
||||
LedgerReplay,
|
||||
};
|
||||
enum class ProtocolFeature { ValidatorListPropagation, ValidatorList2Propagation, LedgerReplay, LedgerDataCookies };
|
||||
|
||||
/** Represents a peer connection in the overlay. */
|
||||
class Peer
|
||||
@@ -117,6 +113,13 @@ public:
|
||||
|
||||
virtual bool
|
||||
txReduceRelayEnabled() const = 0;
|
||||
|
||||
//
|
||||
// Messages
|
||||
//
|
||||
|
||||
virtual std::set<std::optional<uint64_t>>
|
||||
releaseRequestCookies(uint256 const& requestHash) = 0;
|
||||
};
|
||||
|
||||
} // namespace xrpl
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
#include <xrpld/app/tx/apply.h>
|
||||
#include <xrpld/overlay/Cluster.h>
|
||||
#include <xrpld/overlay/detail/PeerImp.h>
|
||||
#include <xrpld/overlay/detail/ProtocolMessage.h>
|
||||
#include <xrpld/overlay/detail/Tuning.h>
|
||||
|
||||
#include <xrpl/basics/UptimeClock.h>
|
||||
@@ -45,6 +46,8 @@ std::chrono::seconds constexpr peerTimerInterval{60};
|
||||
/** The timeout for a shutdown timer */
|
||||
std::chrono::seconds constexpr shutdownTimerInterval{5};
|
||||
|
||||
/** How often we process duplicate incoming TMGetLedger messages */
|
||||
std::chrono::seconds constexpr getledgerInterval{15};
|
||||
} // namespace
|
||||
|
||||
// TODO: Remove this exclusion once unit tests are added after the hotfix
|
||||
@@ -461,6 +464,8 @@ PeerImp::supportsFeature(ProtocolFeature f) const
|
||||
return protocol_ >= make_protocol(2, 2);
|
||||
case ProtocolFeature::LedgerReplay:
|
||||
return ledgerReplayEnabled_;
|
||||
case ProtocolFeature::LedgerDataCookies:
|
||||
return protocol_ >= make_protocol(2, 3);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@@ -1337,8 +1342,9 @@ PeerImp::handleTransaction(std::shared_ptr<protocol::TMTransaction> const& m, bo
|
||||
void
|
||||
PeerImp::onMessage(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
{
|
||||
auto badData = [&](std::string const& msg) {
|
||||
fee_.update(Resource::feeInvalidData, "get_ledger " + msg);
|
||||
auto badData = [&](std::string const& msg, bool chargefee = true) {
|
||||
if (chargefee)
|
||||
fee_.update(Resource::feeInvalidData, "get_ledger " + msg);
|
||||
JLOG(p_journal_.warn()) << "TMGetLedger: " << msg;
|
||||
};
|
||||
auto const itype{m->itype()};
|
||||
@@ -1411,11 +1417,66 @@ PeerImp::onMessage(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
}
|
||||
}
|
||||
|
||||
// Drop duplicate requests from the same peer for at least
|
||||
// `getLedgerInterval` seconds.
|
||||
// Append a little junk to prevent the hash of an incoming messsage
|
||||
// from matching the hash of the same outgoing message.
|
||||
// `shouldProcessForPeer` does not distingish between incoming and
|
||||
// outgoing, and some of the message relay logic checks the hash to see
|
||||
// if the message has been relayed already. If the hashes are the same,
|
||||
// a duplicate will be detected when sending the message is attempted,
|
||||
// so it will fail.
|
||||
auto const messageHash = sha512Half(*m, nullptr);
|
||||
// Request cookies are not included in the hash. Track them here.
|
||||
auto const requestCookie = [&m]() -> std::optional<uint64_t> {
|
||||
if (m->has_requestcookie())
|
||||
return m->requestcookie();
|
||||
return std::nullopt;
|
||||
}();
|
||||
auto const [inserted, pending] = [&] {
|
||||
std::lock_guard lock{cookieLock_};
|
||||
auto& cookies = messageRequestCookies_[messageHash];
|
||||
bool const pending = !cookies.empty();
|
||||
return std::pair{cookies.emplace(requestCookie).second, pending};
|
||||
}();
|
||||
// Check if the request has been seen from this peer.
|
||||
if (!app_.getHashRouter().shouldProcessForPeer(messageHash, id_, getledgerInterval))
|
||||
{
|
||||
// This request has already been seen from this peer.
|
||||
// Has it been seen with this request cookie (or lack thereof)?
|
||||
|
||||
if (inserted)
|
||||
{
|
||||
// This is a duplicate request, but with a new cookie. When a
|
||||
// response is ready, one will be sent for each request cookie.
|
||||
JLOG(p_journal_.debug()) << "TMGetLedger: duplicate request with new request cookie: "
|
||||
<< requestCookie.value_or(0) << ". Job pending: " << (pending ? "yes" : "no")
|
||||
<< ": " << messageHash;
|
||||
if (pending)
|
||||
{
|
||||
// Don't bother queueing up a new job if other requests are
|
||||
// already pending. This should limit entries in the job queue
|
||||
// to one per peer per unique request.
|
||||
JLOG(p_journal_.debug()) << "TMGetLedger: Suppressing recvGetLedger job, since one "
|
||||
"is pending: "
|
||||
<< messageHash;
|
||||
return;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Don't punish nodes that don't know any better
|
||||
return badData(
|
||||
"duplicate request: " + to_string(messageHash), supportsFeature(ProtocolFeature::LedgerDataCookies));
|
||||
}
|
||||
}
|
||||
|
||||
// Queue a job to process the request
|
||||
JLOG(p_journal_.debug()) << "TMGetLedger: Adding recvGetLedger job: " << messageHash;
|
||||
std::weak_ptr<PeerImp> weak = shared_from_this();
|
||||
app_.getJobQueue().addJob(jtLEDGER_REQ, "RcvGetLedger", [weak, m]() {
|
||||
app_.getJobQueue().addJob(jtLEDGER_REQ, "RcvGetLedger", [weak, m, messageHash]() {
|
||||
if (auto peer = weak.lock())
|
||||
peer->processLedgerRequest(m);
|
||||
peer->processLedgerRequest(m, messageHash);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1514,8 +1575,9 @@ PeerImp::onMessage(std::shared_ptr<protocol::TMReplayDeltaResponse> const& m)
|
||||
void
|
||||
PeerImp::onMessage(std::shared_ptr<protocol::TMLedgerData> const& m)
|
||||
{
|
||||
auto badData = [&](std::string const& msg) {
|
||||
fee_.update(Resource::feeInvalidData, msg);
|
||||
auto badData = [&](std::string const& msg, bool charge = true) {
|
||||
if (charge)
|
||||
fee_.update(Resource::feeInvalidData, msg);
|
||||
JLOG(p_journal_.warn()) << "TMLedgerData: " << msg;
|
||||
};
|
||||
|
||||
@@ -1561,23 +1623,94 @@ PeerImp::onMessage(std::shared_ptr<protocol::TMLedgerData> const& m)
|
||||
return badData("Invalid Ledger/TXset nodes " + std::to_string(m->nodes_size()));
|
||||
}
|
||||
|
||||
// If there is a request cookie, attempt to relay the message
|
||||
if (m->has_requestcookie())
|
||||
auto const messageHash = sha512Half(*m);
|
||||
if (!app_.getHashRouter().addSuppressionPeer(messageHash, id_))
|
||||
{
|
||||
if (auto peer = overlay_.findPeerByShortID(m->requestcookie()))
|
||||
// Don't punish nodes that don't know any better
|
||||
return badData(
|
||||
"Duplicate message: " + to_string(messageHash), supportsFeature(ProtocolFeature::LedgerDataCookies));
|
||||
}
|
||||
|
||||
bool const routed = m->has_directresponse() || m->responsecookies_size() || m->has_requestcookie();
|
||||
|
||||
{
|
||||
// Check if this message needs to be forwarded to one or more peers.
|
||||
// Maximum of one of the relevant fields should be populated.
|
||||
XRPL_ASSERT(
|
||||
!m->has_requestcookie() || !m->responsecookies_size(),
|
||||
"ripple::PeerImp::onMessage(TMLedgerData) : valid cookie fields");
|
||||
|
||||
// Make a copy of the response cookies, then wipe the list so it can be
|
||||
// forwarded cleanly
|
||||
auto const responseCookies = m->responsecookies();
|
||||
m->clear_responsecookies();
|
||||
// Flag indicating if this response should be processed locally,
|
||||
// possibly in addition to being forwarded.
|
||||
bool const directResponse = m->has_directresponse() && m->directresponse();
|
||||
m->clear_directresponse();
|
||||
|
||||
auto const relay = [this, m, &messageHash](auto const cookie) {
|
||||
if (auto peer = overlay_.findPeerByShortID(cookie))
|
||||
{
|
||||
XRPL_ASSERT(
|
||||
!m->has_requestcookie() && !m->responsecookies_size(),
|
||||
"ripple::PeerImp::onMessage(TMLedgerData) relay : no "
|
||||
"cookies");
|
||||
if (peer->supportsFeature(ProtocolFeature::LedgerDataCookies))
|
||||
// Setting this flag is not _strictly_ necessary for peers
|
||||
// that support it if there are no cookies included in the
|
||||
// message, but it is more accurate.
|
||||
m->set_directresponse(true);
|
||||
else
|
||||
m->clear_directresponse();
|
||||
peer->send(std::make_shared<Message>(*m, protocol::mtLEDGER_DATA));
|
||||
}
|
||||
else
|
||||
JLOG(p_journal_.info()) << "Unable to route TX/ledger data reply to peer [" << cookie
|
||||
<< "]: " << messageHash;
|
||||
};
|
||||
// If there is a request cookie, attempt to relay the message
|
||||
if (m->has_requestcookie())
|
||||
{
|
||||
XRPL_ASSERT(
|
||||
responseCookies.empty(),
|
||||
"ripple::PeerImp::onMessage(TMLedgerData) : no response "
|
||||
"cookies");
|
||||
m->clear_requestcookie();
|
||||
peer->send(std::make_shared<Message>(*m, protocol::mtLEDGER_DATA));
|
||||
relay(m->requestcookie());
|
||||
if (!directResponse && responseCookies.empty())
|
||||
return;
|
||||
}
|
||||
else
|
||||
// If there's a list of request cookies, attempt to relay the message to
|
||||
// all of them.
|
||||
if (responseCookies.size())
|
||||
{
|
||||
JLOG(p_journal_.info()) << "Unable to route TX/ledger data reply";
|
||||
for (auto const cookie : responseCookies)
|
||||
relay(cookie);
|
||||
if (!directResponse)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Now that any forwarding is done check the base message (data only, no
|
||||
// routing info for duplicates)
|
||||
if (routed)
|
||||
{
|
||||
m->clear_directresponse();
|
||||
XRPL_ASSERT(
|
||||
!m->has_requestcookie() && !m->responsecookies_size(),
|
||||
"ripple::PeerImp::onMessage(TMLedgerData) : no cookies");
|
||||
auto const baseMessageHash = sha512Half(*m);
|
||||
if (!app_.getHashRouter().addSuppressionPeer(baseMessageHash, id_))
|
||||
{
|
||||
// Don't punish nodes that don't know any better
|
||||
return badData(
|
||||
"Duplicate message: " + to_string(baseMessageHash),
|
||||
supportsFeature(ProtocolFeature::LedgerDataCookies));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
uint256 const ledgerHash{m->ledgerhash()};
|
||||
|
||||
// Otherwise check if received data for a candidate transaction set
|
||||
if (m->type() == protocol::liTS_CANDIDATE)
|
||||
{
|
||||
@@ -2849,16 +2982,21 @@ PeerImp::checkValidation(
|
||||
// the TX tree with the specified root hash.
|
||||
//
|
||||
static std::shared_ptr<PeerImp>
|
||||
getPeerWithTree(OverlayImpl& ov, uint256 const& rootHash, PeerImp const* skip)
|
||||
getPeerWithTree(
|
||||
OverlayImpl& ov,
|
||||
uint256 const& rootHash,
|
||||
PeerImp const* skip,
|
||||
std::function<bool(Peer::id_t)> shouldProcessCallback)
|
||||
{
|
||||
std::shared_ptr<PeerImp> ret;
|
||||
int retScore = 0;
|
||||
|
||||
XRPL_ASSERT(shouldProcessCallback, "ripple::getPeerWithTree : callback provided");
|
||||
ov.for_each([&](std::shared_ptr<PeerImp>&& p) {
|
||||
if (p->hasTxSet(rootHash) && p.get() != skip)
|
||||
{
|
||||
auto score = p->getScore(true);
|
||||
if (!ret || (score > retScore))
|
||||
if (!ret || (score > retScore && shouldProcessCallback(p->id())))
|
||||
{
|
||||
ret = std::move(p);
|
||||
retScore = score;
|
||||
@@ -2873,16 +3011,22 @@ getPeerWithTree(OverlayImpl& ov, uint256 const& rootHash, PeerImp const* skip)
|
||||
// have the ledger and how responsive it is.
|
||||
//
|
||||
static std::shared_ptr<PeerImp>
|
||||
getPeerWithLedger(OverlayImpl& ov, uint256 const& ledgerHash, LedgerIndex ledger, PeerImp const* skip)
|
||||
getPeerWithLedger(
|
||||
OverlayImpl& ov,
|
||||
uint256 const& ledgerHash,
|
||||
LedgerIndex ledger,
|
||||
PeerImp const* skip,
|
||||
std::function<bool(Peer::id_t)> shouldProcessCallback)
|
||||
{
|
||||
std::shared_ptr<PeerImp> ret;
|
||||
int retScore = 0;
|
||||
|
||||
XRPL_ASSERT(shouldProcessCallback, "ripple::getPeerWithLedger : callback provided");
|
||||
ov.for_each([&](std::shared_ptr<PeerImp>&& p) {
|
||||
if (p->hasLedger(ledgerHash, ledger) && p.get() != skip)
|
||||
{
|
||||
auto score = p->getScore(true);
|
||||
if (!ret || (score > retScore))
|
||||
if (!ret || (score > retScore && shouldProcessCallback(p->id())))
|
||||
{
|
||||
ret = std::move(p);
|
||||
retScore = score;
|
||||
@@ -2894,7 +3038,10 @@ getPeerWithLedger(OverlayImpl& ov, uint256 const& ledgerHash, LedgerIndex ledger
|
||||
}
|
||||
|
||||
void
|
||||
PeerImp::sendLedgerBase(std::shared_ptr<Ledger const> const& ledger, protocol::TMLedgerData& ledgerData)
|
||||
PeerImp::sendLedgerBase(
|
||||
std::shared_ptr<Ledger const> const& ledger,
|
||||
protocol::TMLedgerData& ledgerData,
|
||||
PeerCookieMap const& destinations)
|
||||
{
|
||||
JLOG(p_journal_.trace()) << "sendLedgerBase: Base data";
|
||||
|
||||
@@ -2924,14 +3071,90 @@ PeerImp::sendLedgerBase(std::shared_ptr<Ledger const> const& ledger, protocol::T
|
||||
}
|
||||
}
|
||||
|
||||
auto message{std::make_shared<Message>(ledgerData, protocol::mtLEDGER_DATA)};
|
||||
send(message);
|
||||
sendToMultiple(ledgerData, destinations);
|
||||
}
|
||||
|
||||
void
|
||||
PeerImp::sendToMultiple(protocol::TMLedgerData& ledgerData, PeerCookieMap const& destinations)
|
||||
{
|
||||
bool foundSelf = false;
|
||||
for (auto const& [peer, cookies] : destinations)
|
||||
{
|
||||
if (peer.get() == this)
|
||||
foundSelf = true;
|
||||
bool const multipleCookies = peer->supportsFeature(ProtocolFeature::LedgerDataCookies);
|
||||
std::vector<std::uint64_t> sendCookies;
|
||||
|
||||
bool directResponse = false;
|
||||
if (!multipleCookies)
|
||||
{
|
||||
JLOG(p_journal_.debug()) << "sendToMultiple: Sending " << cookies.size()
|
||||
<< " TMLedgerData messages to peer [" << peer->id()
|
||||
<< "]: " << sha512Half(ledgerData);
|
||||
}
|
||||
for (auto const& cookie : cookies)
|
||||
{
|
||||
// Unfortunately, need a separate Message object for every
|
||||
// combination
|
||||
if (cookie)
|
||||
{
|
||||
if (multipleCookies)
|
||||
{
|
||||
// Save this one for later to send a single message
|
||||
sendCookies.emplace_back(*cookie);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Feature not supported, so send a single message with a
|
||||
// single cookie
|
||||
ledgerData.set_requestcookie(*cookie);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (multipleCookies)
|
||||
{
|
||||
// Set this flag later on the single message
|
||||
directResponse = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
ledgerData.clear_requestcookie();
|
||||
}
|
||||
XRPL_ASSERT(
|
||||
!multipleCookies,
|
||||
"ripple::PeerImp::sendToMultiple : ledger data cookies "
|
||||
"unsupported");
|
||||
auto message{std::make_shared<Message>(ledgerData, protocol::mtLEDGER_DATA)};
|
||||
peer->send(message);
|
||||
}
|
||||
if (multipleCookies)
|
||||
{
|
||||
// Send a single message with all the cookies and/or the direct
|
||||
// response flag, so the receiver can farm out the single message to
|
||||
// multiple peers and/or itself
|
||||
XRPL_ASSERT(
|
||||
sendCookies.size() || directResponse, "ripple::PeerImp::sendToMultiple : valid response options");
|
||||
ledgerData.clear_requestcookie();
|
||||
ledgerData.clear_responsecookies();
|
||||
ledgerData.set_directresponse(directResponse);
|
||||
for (auto const& cookie : sendCookies)
|
||||
ledgerData.add_responsecookies(cookie);
|
||||
auto message{std::make_shared<Message>(ledgerData, protocol::mtLEDGER_DATA)};
|
||||
peer->send(message);
|
||||
|
||||
JLOG(p_journal_.debug()) << "sendToMultiple: Sent 1 TMLedgerData message to peer [" << peer->id()
|
||||
<< "]: including " << (directResponse ? "the direct response flag and " : "")
|
||||
<< sendCookies.size() << " response cookies. "
|
||||
<< ": " << sha512Half(ledgerData);
|
||||
}
|
||||
}
|
||||
XRPL_ASSERT(foundSelf, "ripple::PeerImp::sendToMultiple : current peer included");
|
||||
}
|
||||
|
||||
std::shared_ptr<Ledger const>
|
||||
PeerImp::getLedger(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
PeerImp::getLedger(std::shared_ptr<protocol::TMGetLedger> const& m, uint256 const& mHash)
|
||||
{
|
||||
JLOG(p_journal_.trace()) << "getLedger: Ledger";
|
||||
JLOG(p_journal_.trace()) << "getLedger: Ledger " << mHash;
|
||||
|
||||
std::shared_ptr<Ledger const> ledger;
|
||||
|
||||
@@ -2947,16 +3170,23 @@ PeerImp::getLedger(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
if (m->has_querytype() && !m->has_requestcookie())
|
||||
{
|
||||
// Attempt to relay the request to a peer
|
||||
if (auto const peer =
|
||||
getPeerWithLedger(overlay_, ledgerHash, m->has_ledgerseq() ? m->ledgerseq() : 0, this))
|
||||
// Note repeated messages will not relay to the same peer
|
||||
// before `getLedgerInterval` seconds. This prevents one
|
||||
// peer from getting flooded, and distributes the request
|
||||
// load. If a request has been relayed to all eligible
|
||||
// peers, then this message will not be relayed.
|
||||
if (auto const peer = getPeerWithLedger(
|
||||
overlay_, ledgerHash, m->has_ledgerseq() ? m->ledgerseq() : 0, this, [&](Peer::id_t id) {
|
||||
return app_.getHashRouter().shouldProcessForPeer(mHash, id, getledgerInterval);
|
||||
}))
|
||||
{
|
||||
m->set_requestcookie(id());
|
||||
peer->send(std::make_shared<Message>(*m, protocol::mtGET_LEDGER));
|
||||
JLOG(p_journal_.debug()) << "getLedger: Request relayed to peer";
|
||||
JLOG(p_journal_.debug()) << "getLedger: Request relayed to peer [" << peer->id() << "]: " << mHash;
|
||||
return ledger;
|
||||
}
|
||||
|
||||
JLOG(p_journal_.trace()) << "getLedger: Failed to find peer to relay request";
|
||||
JLOG(p_journal_.trace()) << "getLedger: Don't have ledger with hash " << ledgerHash << ": " << mHash;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2965,14 +3195,15 @@ PeerImp::getLedger(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
// Attempt to find ledger by sequence
|
||||
if (m->ledgerseq() < app_.getLedgerMaster().getEarliestFetch())
|
||||
{
|
||||
JLOG(p_journal_.debug()) << "getLedger: Early ledger sequence request";
|
||||
JLOG(p_journal_.debug()) << "getLedger: Early ledger sequence request " << mHash;
|
||||
}
|
||||
else
|
||||
{
|
||||
ledger = app_.getLedgerMaster().getLedgerBySeq(m->ledgerseq());
|
||||
if (!ledger)
|
||||
{
|
||||
JLOG(p_journal_.debug()) << "getLedger: Don't have ledger with sequence " << m->ledgerseq();
|
||||
JLOG(p_journal_.debug()) << "getLedger: Don't have ledger with sequence " << m->ledgerseq() << ": "
|
||||
<< mHash;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2994,27 +3225,27 @@ PeerImp::getLedger(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
charge(Resource::feeMalformedRequest, "get_ledger ledgerSeq");
|
||||
|
||||
ledger.reset();
|
||||
JLOG(p_journal_.warn()) << "getLedger: Invalid ledger sequence " << ledgerSeq;
|
||||
JLOG(p_journal_.warn()) << "getLedger: Invalid ledger sequence " << ledgerSeq << ": " << mHash;
|
||||
}
|
||||
}
|
||||
else if (ledgerSeq < app_.getLedgerMaster().getEarliestFetch())
|
||||
{
|
||||
ledger.reset();
|
||||
JLOG(p_journal_.debug()) << "getLedger: Early ledger sequence request " << ledgerSeq;
|
||||
JLOG(p_journal_.debug()) << "getLedger: Early ledger sequence request " << ledgerSeq << ": " << mHash;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
JLOG(p_journal_.debug()) << "getLedger: Unable to find ledger";
|
||||
JLOG(p_journal_.debug()) << "getLedger: Unable to find ledger " << mHash;
|
||||
}
|
||||
|
||||
return ledger;
|
||||
}
|
||||
|
||||
std::shared_ptr<SHAMap const>
|
||||
PeerImp::getTxSet(std::shared_ptr<protocol::TMGetLedger> const& m) const
|
||||
PeerImp::getTxSet(std::shared_ptr<protocol::TMGetLedger> const& m, uint256 const& mHash) const
|
||||
{
|
||||
JLOG(p_journal_.trace()) << "getTxSet: TX set";
|
||||
JLOG(p_journal_.trace()) << "getTxSet: TX set " << mHash;
|
||||
|
||||
uint256 const txSetHash{m->ledgerhash()};
|
||||
std::shared_ptr<SHAMap> shaMap{app_.getInboundTransactions().getSet(txSetHash, false)};
|
||||
@@ -3023,20 +3254,27 @@ PeerImp::getTxSet(std::shared_ptr<protocol::TMGetLedger> const& m) const
|
||||
if (m->has_querytype() && !m->has_requestcookie())
|
||||
{
|
||||
// Attempt to relay the request to a peer
|
||||
if (auto const peer = getPeerWithTree(overlay_, txSetHash, this))
|
||||
// Note repeated messages will not relay to the same peer
|
||||
// before `getLedgerInterval` seconds. This prevents one
|
||||
// peer from getting flooded, and distributes the request
|
||||
// load. If a request has been relayed to all eligible
|
||||
// peers, then this message will not be relayed.
|
||||
if (auto const peer = getPeerWithTree(overlay_, txSetHash, this, [&](Peer::id_t id) {
|
||||
return app_.getHashRouter().shouldProcessForPeer(mHash, id, getledgerInterval);
|
||||
}))
|
||||
{
|
||||
m->set_requestcookie(id());
|
||||
peer->send(std::make_shared<Message>(*m, protocol::mtGET_LEDGER));
|
||||
JLOG(p_journal_.debug()) << "getTxSet: Request relayed";
|
||||
JLOG(p_journal_.debug()) << "getTxSet: Request relayed to peer [" << peer->id() << "]: " << mHash;
|
||||
}
|
||||
else
|
||||
{
|
||||
JLOG(p_journal_.debug()) << "getTxSet: Failed to find relay peer";
|
||||
JLOG(p_journal_.debug()) << "getTxSet: Failed to find relay peer: " << mHash;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
JLOG(p_journal_.debug()) << "getTxSet: Failed to find TX set";
|
||||
JLOG(p_journal_.debug()) << "getTxSet: Failed to find TX set " << mHash;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3044,7 +3282,7 @@ PeerImp::getTxSet(std::shared_ptr<protocol::TMGetLedger> const& m) const
|
||||
}
|
||||
|
||||
void
|
||||
PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m, uint256 const& mHash)
|
||||
{
|
||||
// Do not resource charge a peer responding to a relay
|
||||
if (!m->has_requestcookie())
|
||||
@@ -3057,9 +3295,71 @@ PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
bool fatLeaves{true};
|
||||
auto const itype{m->itype()};
|
||||
|
||||
auto getDestinations = [&] {
|
||||
// If a ledger data message is generated, it's going to be sent to every
|
||||
// peer that is waiting for it.
|
||||
|
||||
PeerCookieMap result;
|
||||
|
||||
std::size_t numCookies = 0;
|
||||
{
|
||||
// Don't do the work under this peer if this peer is not waiting for
|
||||
// any replies
|
||||
auto myCookies = releaseRequestCookies(mHash);
|
||||
if (myCookies.empty())
|
||||
{
|
||||
JLOG(p_journal_.debug()) << "TMGetLedger: peer is no longer "
|
||||
"waiting for response to request: "
|
||||
<< mHash;
|
||||
return result;
|
||||
}
|
||||
numCookies += myCookies.size();
|
||||
result[shared_from_this()] = myCookies;
|
||||
}
|
||||
|
||||
std::set<HashRouter::PeerShortID> const peers = app_.getHashRouter().getPeers(mHash);
|
||||
for (auto const peerID : peers)
|
||||
{
|
||||
// This loop does not need to be done under the HashRouter
|
||||
// lock because findPeerByShortID and releaseRequestCookies
|
||||
// are thread safe, and everything else is local
|
||||
if (auto p = overlay_.findPeerByShortID(peerID))
|
||||
{
|
||||
auto cookies = p->releaseRequestCookies(mHash);
|
||||
numCookies += cookies.size();
|
||||
if (result.contains(p))
|
||||
{
|
||||
// Unlikely, but if a request came in to this peer while
|
||||
// iterating, add the items instead of copying /
|
||||
// overwriting.
|
||||
XRPL_ASSERT(
|
||||
p.get() == this,
|
||||
"ripple::PeerImp::processLedgerRequest : found self in "
|
||||
"map");
|
||||
for (auto const& cookie : cookies)
|
||||
result[p].emplace(cookie);
|
||||
}
|
||||
else if (cookies.size())
|
||||
result[p] = cookies;
|
||||
}
|
||||
}
|
||||
|
||||
JLOG(p_journal_.debug()) << "TMGetLedger: Processing request for " << result.size() << " peers. Will send "
|
||||
<< numCookies << " messages if successful: " << mHash;
|
||||
|
||||
return result;
|
||||
};
|
||||
// Will only populate this if we're going to do work.
|
||||
PeerCookieMap destinations;
|
||||
|
||||
if (itype == protocol::liTS_CANDIDATE)
|
||||
{
|
||||
if (sharedMap = getTxSet(m); !sharedMap)
|
||||
destinations = getDestinations();
|
||||
if (destinations.empty())
|
||||
// Nowhere to send the response!
|
||||
return;
|
||||
|
||||
if (sharedMap = getTxSet(m, mHash); !sharedMap)
|
||||
return;
|
||||
map = sharedMap.get();
|
||||
|
||||
@@ -3067,8 +3367,6 @@ PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
ledgerData.set_ledgerseq(0);
|
||||
ledgerData.set_ledgerhash(m->ledgerhash());
|
||||
ledgerData.set_type(protocol::liTS_CANDIDATE);
|
||||
if (m->has_requestcookie())
|
||||
ledgerData.set_requestcookie(m->requestcookie());
|
||||
|
||||
// We'll already have most transactions
|
||||
fatLeaves = false;
|
||||
@@ -3086,7 +3384,12 @@ PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
return;
|
||||
}
|
||||
|
||||
if (ledger = getLedger(m); !ledger)
|
||||
destinations = getDestinations();
|
||||
if (destinations.empty())
|
||||
// Nowhere to send the response!
|
||||
return;
|
||||
|
||||
if (ledger = getLedger(m, mHash); !ledger)
|
||||
return;
|
||||
|
||||
// Fill out the reply
|
||||
@@ -3094,13 +3397,11 @@ PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
ledgerData.set_ledgerhash(ledgerHash.begin(), ledgerHash.size());
|
||||
ledgerData.set_ledgerseq(ledger->header().seq);
|
||||
ledgerData.set_type(itype);
|
||||
if (m->has_requestcookie())
|
||||
ledgerData.set_requestcookie(m->requestcookie());
|
||||
|
||||
switch (itype)
|
||||
{
|
||||
case protocol::liBASE:
|
||||
sendLedgerBase(ledger, ledgerData);
|
||||
sendLedgerBase(ledger, ledgerData, destinations);
|
||||
return;
|
||||
|
||||
case protocol::liTX_NODE:
|
||||
@@ -3203,7 +3504,7 @@ PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
if (ledgerData.nodes_size() == 0)
|
||||
return;
|
||||
|
||||
send(std::make_shared<Message>(ledgerData, protocol::mtLEDGER_DATA));
|
||||
sendToMultiple(ledgerData, destinations);
|
||||
}
|
||||
|
||||
int
|
||||
@@ -3251,6 +3552,19 @@ PeerImp::isHighLatency() const
|
||||
return latency_ >= peerHighLatency;
|
||||
}
|
||||
|
||||
std::set<std::optional<uint64_t>>
|
||||
PeerImp::releaseRequestCookies(uint256 const& requestHash)
|
||||
{
|
||||
std::set<std::optional<uint64_t>> result;
|
||||
std::lock_guard lock(cookieLock_);
|
||||
if (messageRequestCookies_.contains(requestHash))
|
||||
{
|
||||
std::swap(result, messageRequestCookies_[requestHash]);
|
||||
messageRequestCookies_.erase(requestHash);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
void
|
||||
PeerImp::Metrics::add_message(std::uint64_t bytes)
|
||||
{
|
||||
|
||||
@@ -248,6 +248,13 @@ private:
|
||||
bool ledgerReplayEnabled_ = false;
|
||||
LedgerReplayMsgHandler ledgerReplayMsgHandler_;
|
||||
|
||||
// Track message requests and responses
|
||||
// TODO: Use an expiring cache or something
|
||||
using MessageCookieMap = std::map<uint256, std::set<std::optional<uint64_t>>>;
|
||||
using PeerCookieMap = std::map<std::shared_ptr<Peer>, std::set<std::optional<uint64_t>>>;
|
||||
std::mutex mutable cookieLock_;
|
||||
MessageCookieMap messageRequestCookies_;
|
||||
|
||||
friend class OverlayImpl;
|
||||
|
||||
class Metrics
|
||||
@@ -489,6 +496,13 @@ public:
|
||||
return txReduceRelayEnabled_;
|
||||
}
|
||||
|
||||
//
|
||||
// Messages
|
||||
//
|
||||
|
||||
std::set<std::optional<uint64_t>>
|
||||
releaseRequestCookies(uint256 const& requestHash) override;
|
||||
|
||||
private:
|
||||
/**
|
||||
* @brief Handles a failure associated with a specific error code.
|
||||
@@ -774,16 +788,22 @@ private:
|
||||
std::shared_ptr<protocol::TMValidation> const& packet);
|
||||
|
||||
void
|
||||
sendLedgerBase(std::shared_ptr<Ledger const> const& ledger, protocol::TMLedgerData& ledgerData);
|
||||
|
||||
std::shared_ptr<Ledger const>
|
||||
getLedger(std::shared_ptr<protocol::TMGetLedger> const& m);
|
||||
|
||||
std::shared_ptr<SHAMap const>
|
||||
getTxSet(std::shared_ptr<protocol::TMGetLedger> const& m) const;
|
||||
sendLedgerBase(
|
||||
std::shared_ptr<Ledger const> const& ledger,
|
||||
protocol::TMLedgerData& ledgerData,
|
||||
PeerCookieMap const& destinations);
|
||||
|
||||
void
|
||||
processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m);
|
||||
sendToMultiple(protocol::TMLedgerData& ledgerData, PeerCookieMap const& destinations);
|
||||
|
||||
std::shared_ptr<Ledger const>
|
||||
getLedger(std::shared_ptr<protocol::TMGetLedger> const& m, uint256 const& mHash);
|
||||
|
||||
std::shared_ptr<SHAMap const>
|
||||
getTxSet(std::shared_ptr<protocol::TMGetLedger> const& m, uint256 const& mHash) const;
|
||||
|
||||
void
|
||||
processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m, uint256 const& mHash);
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
#include <xrpld/app/main/Application.h>
|
||||
#include <xrpld/app/misc/HashRouter.h>
|
||||
#include <xrpld/core/JobQueue.h>
|
||||
#include <xrpld/overlay/Overlay.h>
|
||||
#include <xrpld/overlay/PeerSet.h>
|
||||
|
||||
#include <xrpl/core/JobQueue.h>
|
||||
#include <xrpl/protocol/digest.h>
|
||||
|
||||
namespace xrpl {
|
||||
|
||||
@@ -81,16 +84,44 @@ PeerSetImpl::sendRequest(
|
||||
std::shared_ptr<Peer> const& peer)
|
||||
{
|
||||
auto packet = std::make_shared<Message>(message, type);
|
||||
|
||||
auto const messageHash = [&]() {
|
||||
auto const packetBuffer = packet->getBuffer(compression::Compressed::Off);
|
||||
return sha512Half(Slice(packetBuffer.data(), packetBuffer.size()));
|
||||
}();
|
||||
|
||||
// Allow messages to be re-sent to the same peer after a delay
|
||||
using namespace std::chrono_literals;
|
||||
constexpr std::chrono::seconds interval = 30s;
|
||||
|
||||
if (peer)
|
||||
{
|
||||
peer->send(packet);
|
||||
if (app_.getHashRouter().shouldProcessForPeer(messageHash, peer->id(), interval))
|
||||
{
|
||||
JLOG(journal_.trace()) << "Sending " << protocolMessageName(type) << " message to [" << peer->id()
|
||||
<< "]: " << messageHash;
|
||||
peer->send(packet);
|
||||
}
|
||||
else
|
||||
JLOG(journal_.debug()) << "Suppressing sending duplicate " << protocolMessageName(type) << " message to ["
|
||||
<< peer->id() << "]: " << messageHash;
|
||||
return;
|
||||
}
|
||||
|
||||
for (auto id : peers_)
|
||||
{
|
||||
if (auto p = app_.overlay().findPeerByShortID(id))
|
||||
p->send(packet);
|
||||
{
|
||||
if (app_.getHashRouter().shouldProcessForPeer(messageHash, p->id(), interval))
|
||||
{
|
||||
JLOG(journal_.trace()) << "Sending " << protocolMessageName(type) << " message to [" << p->id()
|
||||
<< "]: " << messageHash;
|
||||
p->send(packet);
|
||||
}
|
||||
else
|
||||
JLOG(journal_.debug()) << "Suppressing sending duplicate " << protocolMessageName(type)
|
||||
<< " message to [" << p->id() << "]: " << messageHash;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,6 +24,12 @@ protocolMessageType(protocol::TMGetLedger const&)
|
||||
return protocol::mtGET_LEDGER;
|
||||
}
|
||||
|
||||
inline protocol::MessageType
|
||||
protocolMessageType(protocol::TMLedgerData const&)
|
||||
{
|
||||
return protocol::mtLEDGER_DATA;
|
||||
}
|
||||
|
||||
inline protocol::MessageType
|
||||
protocolMessageType(protocol::TMReplayDeltaRequest const&)
|
||||
{
|
||||
@@ -425,4 +431,64 @@ invokeProtocolMessage(Buffers const& buffers, Handler& handler, std::size_t& hin
|
||||
|
||||
} // namespace xrpl
|
||||
|
||||
namespace protocol {
|
||||
|
||||
template <class Hasher>
|
||||
void
|
||||
hash_append(Hasher& h, TMGetLedger const& msg)
|
||||
{
|
||||
using beast::hash_append;
|
||||
using namespace ripple;
|
||||
hash_append(h, safe_cast<int>(protocolMessageType(msg)));
|
||||
hash_append(h, safe_cast<int>(msg.itype()));
|
||||
if (msg.has_ltype())
|
||||
hash_append(h, safe_cast<int>(msg.ltype()));
|
||||
|
||||
if (msg.has_ledgerhash())
|
||||
hash_append(h, msg.ledgerhash());
|
||||
|
||||
if (msg.has_ledgerseq())
|
||||
hash_append(h, msg.ledgerseq());
|
||||
|
||||
for (auto const& nodeId : msg.nodeids())
|
||||
hash_append(h, nodeId);
|
||||
hash_append(h, msg.nodeids_size());
|
||||
|
||||
// Do NOT include the request cookie. It does not affect the content of the
|
||||
// request, but only where to route the results.
|
||||
// if (msg.has_requestcookie())
|
||||
// hash_append(h, msg.requestcookie());
|
||||
|
||||
if (msg.has_querytype())
|
||||
hash_append(h, safe_cast<int>(msg.querytype()));
|
||||
|
||||
if (msg.has_querydepth())
|
||||
hash_append(h, msg.querydepth());
|
||||
}
|
||||
|
||||
template <class Hasher>
|
||||
void
|
||||
hash_append(Hasher& h, TMLedgerData const& msg)
|
||||
{
|
||||
using beast::hash_append;
|
||||
using namespace ripple;
|
||||
hash_append(h, safe_cast<int>(protocolMessageType(msg)));
|
||||
hash_append(h, msg.ledgerhash());
|
||||
hash_append(h, msg.ledgerseq());
|
||||
hash_append(h, safe_cast<int>(msg.type()));
|
||||
for (auto const& node : msg.nodes())
|
||||
{
|
||||
hash_append(h, node.nodedata());
|
||||
if (node.has_nodeid())
|
||||
hash_append(h, node.nodeid());
|
||||
}
|
||||
hash_append(h, msg.nodes_size());
|
||||
if (msg.has_requestcookie())
|
||||
hash_append(h, msg.requestcookie());
|
||||
if (msg.has_error())
|
||||
hash_append(h, safe_cast<int>(msg.error()));
|
||||
}
|
||||
|
||||
} // namespace protocol
|
||||
|
||||
#endif
|
||||
|
||||
@@ -21,7 +21,9 @@ namespace xrpl {
|
||||
constexpr ProtocolVersion const supportedProtocolList[]
|
||||
{
|
||||
{2, 1},
|
||||
{2, 2}
|
||||
{2, 2},
|
||||
// Adds TMLedgerData::responseCookies and directResponse
|
||||
{2, 3}
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
|
||||
@@ -3,7 +3,11 @@ cmake_minimum_required(VERSION 3.21)
|
||||
set(name example)
|
||||
set(version 0.1.0)
|
||||
|
||||
project(${name} VERSION ${version} LANGUAGES CXX)
|
||||
project(
|
||||
${name}
|
||||
VERSION ${version}
|
||||
LANGUAGES CXX
|
||||
)
|
||||
|
||||
find_package(xrpl CONFIG REQUIRED)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user