Compare commits

..

1 Commits

Author SHA1 Message Date
Bart
0f557df7f1 ci: Run pipelines on all macOS runners 2026-01-07 12:28:59 -05:00
1121 changed files with 69731 additions and 38908 deletions

View File

@@ -37,7 +37,7 @@ BinPackParameters: false
BreakBeforeBinaryOperators: false BreakBeforeBinaryOperators: false
BreakBeforeTernaryOperators: true BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: true BreakConstructorInitializersBeforeComma: true
ColumnLimit: 120 ColumnLimit: 80
CommentPragmas: "^ IWYU pragma:" CommentPragmas: "^ IWYU pragma:"
ConstructorInitializerAllOnOneLineOrOnePerLine: true ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 4 ConstructorInitializerIndentWidth: 4

View File

@@ -1,247 +0,0 @@
_help_parse: Options affecting listfile parsing
parse:
_help_additional_commands:
- Specify structure for custom cmake functions
additional_commands:
target_protobuf_sources:
pargs:
- target
- prefix
kwargs:
PROTOS: "*"
LANGUAGE: cpp
IMPORT_DIRS: "*"
GENERATE_EXTENSIONS: "*"
PLUGIN: "*"
_help_override_spec:
- Override configurations per-command where available
override_spec: {}
_help_vartags:
- Specify variable tags.
vartags: []
_help_proptags:
- Specify property tags.
proptags: []
_help_format: Options affecting formatting.
format:
_help_disable:
- Disable formatting entirely, making cmake-format a no-op
disable: false
_help_line_width:
- How wide to allow formatted cmake files
line_width: 120
_help_tab_size:
- How many spaces to tab for indent
tab_size: 4
_help_use_tabchars:
- If true, lines are indented using tab characters (utf-8
- 0x09) instead of <tab_size> space characters (utf-8 0x20).
- In cases where the layout would require a fractional tab
- character, the behavior of the fractional indentation is
- governed by <fractional_tab_policy>
use_tabchars: false
_help_fractional_tab_policy:
- If <use_tabchars> is True, then the value of this variable
- indicates how fractional indentions are handled during
- whitespace replacement. If set to 'use-space', fractional
- indentation is left as spaces (utf-8 0x20). If set to
- "`round-up` fractional indentation is replaced with a single"
- tab character (utf-8 0x09) effectively shifting the column
- to the next tabstop
fractional_tab_policy: use-space
_help_max_subgroups_hwrap:
- If an argument group contains more than this many sub-groups
- (parg or kwarg groups) then force it to a vertical layout.
max_subgroups_hwrap: 4
_help_max_pargs_hwrap:
- If a positional argument group contains more than this many
- arguments, then force it to a vertical layout.
max_pargs_hwrap: 5
_help_max_rows_cmdline:
- If a cmdline positional group consumes more than this many
- lines without nesting, then invalidate the layout (and nest)
max_rows_cmdline: 2
_help_separate_ctrl_name_with_space:
- If true, separate flow control names from their parentheses
- with a space
separate_ctrl_name_with_space: true
_help_separate_fn_name_with_space:
- If true, separate function names from parentheses with a
- space
separate_fn_name_with_space: false
_help_dangle_parens:
- If a statement is wrapped to more than one line, than dangle
- the closing parenthesis on its own line.
dangle_parens: false
_help_dangle_align:
- If the trailing parenthesis must be 'dangled' on its on
- "line, then align it to this reference: `prefix`: the start"
- "of the statement, `prefix-indent`: the start of the"
- "statement, plus one indentation level, `child`: align to"
- the column of the arguments
dangle_align: prefix
_help_min_prefix_chars:
- If the statement spelling length (including space and
- parenthesis) is smaller than this amount, then force reject
- nested layouts.
min_prefix_chars: 18
_help_max_prefix_chars:
- If the statement spelling length (including space and
- parenthesis) is larger than the tab width by more than this
- amount, then force reject un-nested layouts.
max_prefix_chars: 10
_help_max_lines_hwrap:
- If a candidate layout is wrapped horizontally but it exceeds
- this many lines, then reject the layout.
max_lines_hwrap: 2
_help_line_ending:
- What style line endings to use in the output.
line_ending: unix
_help_command_case:
- Format command names consistently as 'lower' or 'upper' case
command_case: canonical
_help_keyword_case:
- Format keywords consistently as 'lower' or 'upper' case
keyword_case: unchanged
_help_always_wrap:
- A list of command names which should always be wrapped
always_wrap: []
_help_enable_sort:
- If true, the argument lists which are known to be sortable
- will be sorted lexicographicall
enable_sort: true
_help_autosort:
- If true, the parsers may infer whether or not an argument
- list is sortable (without annotation).
autosort: true
_help_require_valid_layout:
- By default, if cmake-format cannot successfully fit
- everything into the desired linewidth it will apply the
- last, most aggressive attempt that it made. If this flag is
- True, however, cmake-format will print error, exit with non-
- zero status code, and write-out nothing
require_valid_layout: false
_help_layout_passes:
- A dictionary mapping layout nodes to a list of wrap
- decisions. See the documentation for more information.
layout_passes: {}
_help_markup: Options affecting comment reflow and formatting.
markup:
_help_bullet_char:
- What character to use for bulleted lists
bullet_char: "-"
_help_enum_char:
- What character to use as punctuation after numerals in an
- enumerated list
enum_char: .
_help_first_comment_is_literal:
- If comment markup is enabled, don't reflow the first comment
- block in each listfile. Use this to preserve formatting of
- your copyright/license statements.
first_comment_is_literal: false
_help_literal_comment_pattern:
- If comment markup is enabled, don't reflow any comment block
- which matches this (regex) pattern. Default is `None`
- (disabled).
literal_comment_pattern: null
_help_fence_pattern:
- Regular expression to match preformat fences in comments
- default= ``r'^\s*([`~]{3}[`~]*)(.*)$'``
fence_pattern: ^\s*([`~]{3}[`~]*)(.*)$
_help_ruler_pattern:
- Regular expression to match rulers in comments default=
- '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``'
ruler_pattern: ^\s*[^\w\s]{3}.*[^\w\s]{3}$
_help_explicit_trailing_pattern:
- If a comment line matches starts with this pattern then it
- is explicitly a trailing comment for the preceding
- argument. Default is '#<'
explicit_trailing_pattern: "#<"
_help_hashruler_min_length:
- If a comment line starts with at least this many consecutive
- hash characters, then don't lstrip() them off. This allows
- for lazy hash rulers where the first hash char is not
- separated by space
hashruler_min_length: 10
_help_canonicalize_hashrulers:
- If true, then insert a space between the first hash char and
- remaining hash chars in a hash ruler, and normalize its
- length to fill the column
canonicalize_hashrulers: true
_help_enable_markup:
- enable comment markup parsing and reflow
enable_markup: false
_help_lint: Options affecting the linter
lint:
_help_disabled_codes:
- a list of lint codes to disable
disabled_codes: []
_help_function_pattern:
- regular expression pattern describing valid function names
function_pattern: "[0-9a-z_]+"
_help_macro_pattern:
- regular expression pattern describing valid macro names
macro_pattern: "[0-9A-Z_]+"
_help_global_var_pattern:
- regular expression pattern describing valid names for
- variables with global (cache) scope
global_var_pattern: "[A-Z][0-9A-Z_]+"
_help_internal_var_pattern:
- regular expression pattern describing valid names for
- variables with global scope (but internal semantic)
internal_var_pattern: _[A-Z][0-9A-Z_]+
_help_local_var_pattern:
- regular expression pattern describing valid names for
- variables with local scope
local_var_pattern: "[a-z][a-z0-9_]+"
_help_private_var_pattern:
- regular expression pattern describing valid names for
- privatedirectory variables
private_var_pattern: _[0-9a-z_]+
_help_public_var_pattern:
- regular expression pattern describing valid names for public
- directory variables
public_var_pattern: "[A-Z][0-9A-Z_]+"
_help_argument_var_pattern:
- regular expression pattern describing valid names for
- function/macro arguments and loop variables.
argument_var_pattern: "[a-z][a-z0-9_]+"
_help_keyword_pattern:
- regular expression pattern describing valid names for
- keywords used in functions or macros
keyword_pattern: "[A-Z][0-9A-Z_]+"
_help_max_conditionals_custom_parser:
- In the heuristic for C0201, how many conditionals to match
- within a loop in before considering the loop a parser.
max_conditionals_custom_parser: 2
_help_min_statement_spacing:
- Require at least this many newlines between statements
min_statement_spacing: 1
_help_max_statement_spacing:
- Require no more than this many newlines between statements
max_statement_spacing: 2
max_returns: 6
max_branches: 12
max_arguments: 5
max_localvars: 15
max_statements: 50
_help_encode: Options affecting file encoding
encode:
_help_emit_byteorder_mark:
- If true, emit the unicode byte-order mark (BOM) at the start
- of the file
emit_byteorder_mark: false
_help_input_encoding:
- Specify the encoding of the input file. Defaults to utf-8
input_encoding: utf-8
_help_output_encoding:
- Specify the encoding of the output file. Defaults to utf-8.
- Note that cmake only claims to support utf-8 so be careful
- when using anything else
output_encoding: utf-8
_help_misc: Miscellaneous configurations options.
misc:
_help_per_command:
- A dictionary containing any per-command configuration
- overrides. Currently only `command_case` is supported.
per_command: {}

View File

@@ -28,7 +28,6 @@ ignoreRegExpList:
- /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags - /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags
suggestWords: suggestWords:
- xprl->xrpl - xprl->xrpl
- xprld->xrpld
- unsynched->unsynced - unsynched->unsynced
- synched->synced - synched->synced
- synch->sync - synch->sync
@@ -52,7 +51,6 @@ words:
- Btrfs - Btrfs
- canonicality - canonicality
- checkme - checkme
- choco
- chrono - chrono
- citardauq - citardauq
- clawback - clawback
@@ -62,7 +60,6 @@ words:
- compr - compr
- conanfile - conanfile
- conanrun - conanrun
- confs
- connectability - connectability
- coro - coro
- coros - coros
@@ -71,7 +68,6 @@ words:
- cryptoconditional - cryptoconditional
- cryptoconditions - cryptoconditions
- csprng - csprng
- ctest
- ctid - ctid
- currenttxhash - currenttxhash
- daria - daria
@@ -87,21 +83,19 @@ words:
- doxyfile - doxyfile
- dxrpl - dxrpl
- endmacro - endmacro
- endpointv
- exceptioned - exceptioned
- Falco - Falco
- finalizers - finalizers
- firewalled - firewalled
- fmtdur - fmtdur
- fsanitize
- funclets - funclets
- gcov - gcov
- gcovr - gcovr
- ghead
- Gnutella - Gnutella
- gpgcheck - gpgcheck
- gpgkey - gpgkey
- hotwallet - hotwallet
- hwrap
- ifndef - ifndef
- inequation - inequation
- insuf - insuf
@@ -109,14 +103,11 @@ words:
- iou - iou
- ious - ious
- isrdc - isrdc
- itype
- jemalloc - jemalloc
- jlog - jlog
- keylet - keylet
- keylets - keylets
- keyvadb - keyvadb
- kwarg
- kwargs
- ledgerentry - ledgerentry
- ledgerhash - ledgerhash
- ledgerindex - ledgerindex
@@ -132,7 +123,6 @@ words:
- lseq - lseq
- lsmf - lsmf
- ltype - ltype
- mcmodel
- MEMORYSTATUSEX - MEMORYSTATUSEX
- Merkle - Merkle
- Metafuncton - Metafuncton
@@ -166,7 +156,6 @@ words:
- nunl - nunl
- Nyffenegger - Nyffenegger
- ostr - ostr
- pargs
- partitioner - partitioner
- paychan - paychan
- paychans - paychans
@@ -202,12 +191,10 @@ words:
- roundings - roundings
- sahyadri - sahyadri
- Satoshi - Satoshi
- scons
- secp - secp
- sendq - sendq
- seqit - seqit
- sf - sf
- SFIELD
- shamap - shamap
- shamapitem - shamapitem
- sidechain - sidechain
@@ -234,8 +221,6 @@ words:
- takergets - takergets
- takerpays - takerpays
- ters - ters
- TMEndpointv2
- trixie
- tx - tx
- txid - txid
- txids - txids
@@ -243,8 +228,6 @@ words:
- txn - txn
- txns - txns
- txs - txs
- UBSAN
- ubsan
- umant - umant
- unacquired - unacquired
- unambiguity - unambiguity
@@ -280,7 +263,6 @@ words:
- xbridge - xbridge
- xchain - xchain
- ximinez - ximinez
- EXPECT_STREQ
- XMACRO - XMACRO
- xrpkuwait - xrpkuwait
- xrpl - xrpl

1
.gitattributes vendored
View File

@@ -1,6 +1,5 @@
# Set default behaviour, in case users don't have core.autocrlf set. # Set default behaviour, in case users don't have core.autocrlf set.
#* text=auto #* text=auto
# cspell: disable
# Visual Studio # Visual Studio
*.sln text eol=crlf *.sln text eol=crlf

View File

@@ -18,10 +18,6 @@ inputs:
description: "The logging verbosity." description: "The logging verbosity."
required: false required: false
default: "verbose" default: "verbose"
sanitizers:
description: "The sanitizers to enable."
required: false
default: ""
runs: runs:
using: composite using: composite
@@ -33,11 +29,9 @@ runs:
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }} BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
BUILD_TYPE: ${{ inputs.build_type }} BUILD_TYPE: ${{ inputs.build_type }}
LOG_VERBOSITY: ${{ inputs.log_verbosity }} LOG_VERBOSITY: ${{ inputs.log_verbosity }}
SANITIZERS: ${{ inputs.sanitizers }}
run: | run: |
echo 'Installing dependencies.' echo 'Installing dependencies.'
conan install \ conan install \
--profile ci \
--build="${BUILD_OPTION}" \ --build="${BUILD_OPTION}" \
--options:host='&:tests=True' \ --options:host='&:tests=True' \
--options:host='&:xrpld=True' \ --options:host='&:xrpld=True' \

View File

@@ -1,44 +0,0 @@
name: Generate build version number
description: "Generate build version number."
outputs:
version:
description: "The generated build version number."
value: ${{ steps.version.outputs.version }}
runs:
using: composite
steps:
# When a tag is pushed, the version is used as-is.
- name: Generate version for tag event
if: ${{ github.event_name == 'tag' }}
shell: bash
env:
VERSION: ${{ github.ref_name }}
run: echo "VERSION=${VERSION}" >> "${GITHUB_ENV}"
# When a tag is not pushed, then the version (e.g. 1.2.3-b0) is extracted
# from the BuildInfo.cpp file and the shortened commit hash appended to it.
# We use a plus sign instead of a hyphen because Conan recipe versions do
# not support two hyphens.
- name: Generate version for non-tag event
if: ${{ github.event_name != 'tag' }}
shell: bash
run: |
echo 'Extracting version from BuildInfo.cpp.'
VERSION="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')"
if [[ -z "${VERSION}" ]]; then
echo 'Unable to extract version from BuildInfo.cpp.'
exit 1
fi
echo 'Appending shortened commit hash to version.'
SHA='${{ github.sha }}'
VERSION="${VERSION}+${SHA:0:7}"
echo "VERSION=${VERSION}" >> "${GITHUB_ENV}"
- name: Output version
id: version
shell: bash
run: echo "version=${VERSION}" >> "${GITHUB_OUTPUT}"

View File

@@ -2,11 +2,11 @@ name: Setup Conan
description: "Set up Conan configuration, profile, and remote." description: "Set up Conan configuration, profile, and remote."
inputs: inputs:
remote_name: conan_remote_name:
description: "The name of the Conan remote to use." description: "The name of the Conan remote to use."
required: false required: false
default: xrplf default: xrplf
remote_url: conan_remote_url:
description: "The URL of the Conan endpoint to use." description: "The URL of the Conan endpoint to use."
required: false required: false
default: https://conan.ripplex.io default: https://conan.ripplex.io
@@ -28,19 +28,19 @@ runs:
shell: bash shell: bash
run: | run: |
echo 'Installing profile.' echo 'Installing profile.'
conan config install conan/profiles/ -tf $(conan config home)/profiles/ conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo 'Conan profile:' echo 'Conan profile:'
conan profile show --profile ci conan profile show
- name: Set up Conan remote - name: Set up Conan remote
shell: bash shell: bash
env: env:
REMOTE_NAME: ${{ inputs.remote_name }} CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
REMOTE_URL: ${{ inputs.remote_url }} CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
run: | run: |
echo "Adding Conan remote '${REMOTE_NAME}' at '${REMOTE_URL}'." echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
conan remote add --index 0 --force "${REMOTE_NAME}" "${REMOTE_URL}" conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
echo 'Listing Conan remotes.' echo 'Listing Conan remotes.'
conan remote list conan remote list

View File

@@ -70,7 +70,7 @@ that `test` code should _never_ be included in `xrpl` or `xrpld` code.)
## Validation ## Validation
The [levelization](generate.py) script takes no parameters, The [levelization](generate.sh) script takes no parameters,
reads no environment variables, and can be run from any directory, reads no environment variables, and can be run from any directory,
as long as it is in the expected location in the rippled repo. as long as it is in the expected location in the rippled repo.
It can be run at any time from within a checked out repo, and will It can be run at any time from within a checked out repo, and will
@@ -84,7 +84,7 @@ It generates many files of [results](results):
to the destination module, de-duped, and with frequency counts. to the destination module, de-duped, and with frequency counts.
- `includes/`: A directory where each file represents a module and - `includes/`: A directory where each file represents a module and
contains a list of modules and counts that the module _includes_. contains a list of modules and counts that the module _includes_.
- `included_by/`: Similar to `includes/`, but the other way around. Each - `includedby/`: Similar to `includes/`, but the other way around. Each
file represents a module and contains a list of modules and counts file represents a module and contains a list of modules and counts
that _include_ the module. that _include_ the module.
- [`loops.txt`](results/loops.txt): A list of direct loops detected - [`loops.txt`](results/loops.txt): A list of direct loops detected
@@ -104,7 +104,7 @@ It generates many files of [results](results):
Github Actions workflow to test that levelization loops haven't Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or they are improvements or not, so if you have resolved any issues or
done anything else to improve levelization, run `generate.py`, done anything else to improve levelization, run `levelization.sh`,
and commit the updated results. and commit the updated results.
The `loops.txt` and `ordering.txt` files relate the modules The `loops.txt` and `ordering.txt` files relate the modules
@@ -128,7 +128,7 @@ The committed files hide the detailed values intentionally, to
prevent false alarms and merging issues, and because it's easy to prevent false alarms and merging issues, and because it's easy to
get those details locally. get those details locally.
1. Run `generate.py` 1. Run `levelization.sh`
2. Grep the modules in `paths.txt`. 2. Grep the modules in `paths.txt`.
- For example, if a cycle is found `A ~= B`, simply `grep -w - For example, if a cycle is found `A ~= B`, simply `grep -w
A .github/scripts/levelization/results/paths.txt | grep -w B` A .github/scripts/levelization/results/paths.txt | grep -w B`

View File

@@ -1,369 +0,0 @@
#!/usr/bin/env python3
"""
Usage: generate.py
This script takes no parameters, and can be run from any directory,
as long as it is in the expected.
location in the repo.
"""
import os
import re
import subprocess
import sys
from collections import defaultdict
from pathlib import Path
from typing import Dict, List, Tuple, Set, Optional
# Compile regex patterns once at module level
INCLUDE_PATTERN = re.compile(r"^\s*#include.*/.*\.h")
INCLUDE_PATH_PATTERN = re.compile(r'[<"]([^>"]+)[>"]')
def dictionary_sort_key(s: str) -> str:
"""
Create a sort key that mimics 'sort -d' (dictionary order).
Dictionary order only considers blanks and alphanumeric characters.
This means punctuation like '.' is ignored during sorting.
"""
# Keep only alphanumeric characters and spaces
return "".join(c for c in s if c.isalnum() or c.isspace())
def get_level(file_path: str) -> str:
"""
Extract the level from a file path (second and third directory components).
Equivalent to bash: cut -d/ -f 2,3
Examples:
src/xrpld/app/main.cpp -> xrpld.app
src/libxrpl/protocol/STObject.cpp -> libxrpl.protocol
include/xrpl/basics/base_uint.h -> xrpl.basics
"""
parts = file_path.split("/")
# Get fields 2 and 3 (indices 1 and 2 in 0-based indexing)
if len(parts) >= 3:
level = f"{parts[1]}/{parts[2]}"
elif len(parts) >= 2:
level = f"{parts[1]}/toplevel"
else:
level = file_path
# If the "level" indicates a file, cut off the filename
if "." in level.split("/")[-1]: # Avoid Path object creation
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level = level.rsplit("/", 1)[0] + "/toplevel"
return level.replace("/", ".")
def extract_include_level(include_line: str) -> Optional[str]:
"""
Extract the include path from an #include directive.
Gets the first two directory components from the include path.
Equivalent to bash: cut -d/ -f 1,2
Examples:
#include <xrpl/basics/base_uint.h> -> xrpl.basics
#include "xrpld/app/main/Application.h" -> xrpld.app
"""
# Remove everything before the quote or angle bracket
match = INCLUDE_PATH_PATTERN.search(include_line)
if not match:
return None
include_path = match.group(1)
parts = include_path.split("/")
# Get first two fields (indices 0 and 1)
if len(parts) >= 2:
include_level = f"{parts[0]}/{parts[1]}"
else:
include_level = include_path
# If the "includelevel" indicates a file, cut off the filename
if "." in include_level.split("/")[-1]: # Avoid Path object creation
include_level = include_level.rsplit("/", 1)[0] + "/toplevel"
return include_level.replace("/", ".")
def find_repo_root(start_path: Path, depth_limit: int = 10) -> Path:
"""
Find the repository root by looking for .git directory or src/include folders.
Walks up the directory tree from the start path.
"""
current = start_path.resolve()
# Walk up the directory tree
for _ in range(depth_limit): # Limit search depth to prevent infinite loops
# Check if this directory has src or include folders
has_src = (current / "src").exists()
has_include = (current / "include").exists()
if has_src or has_include:
return current
# Check if this is a git repository root
if (current / ".git").exists():
# Check if it has src or include nearby
if has_src or has_include:
return current
# Move up one level
parent = current.parent
if parent == current: # Reached filesystem root
break
current = parent
# If we couldn't find it, raise an error
raise RuntimeError(
"Could not find repository root. "
"Expected to find a directory containing 'src' and/or 'include' folders."
)
def get_scan_directories(repo_root: Path) -> List[Path]:
"""
Get the list of directories to scan for include files.
Returns paths that actually exist.
"""
directories = []
for dir_name in ["include", "src"]:
dir_path = repo_root / dir_name
if dir_path.exists() and dir_path.is_dir():
directories.append(dir_path)
if not directories:
raise RuntimeError(f"No 'src' or 'include' directories found in {repo_root}")
return directories
def main():
# Change to the script's directory
script_dir = Path(__file__).parent.resolve()
os.chdir(script_dir)
# If the shell is interactive, clean up any flotsam before analyzing
# Match bash behavior: check if PS1 is set (indicates interactive shell)
# When running a script, PS1 is not set even if stdin/stdout are TTYs
if os.environ.get("PS1"):
try:
subprocess.run(["git", "clean", "-ix"], check=False, timeout=30)
except (subprocess.TimeoutExpired, KeyboardInterrupt):
print("Skipping git clean...")
except Exception:
# If git clean fails for any reason, just continue
pass
# Clean up and create results directory
results_dir = script_dir / "results"
if results_dir.exists():
import shutil
shutil.rmtree(results_dir)
results_dir.mkdir()
# Find the repository root by searching for src and include directories
try:
repo_root = find_repo_root(script_dir)
scan_dirs = get_scan_directories(repo_root)
print(f"Found repository root: {repo_root}")
print(f"Scanning directories:")
for scan_dir in scan_dirs:
print(f" - {scan_dir.relative_to(repo_root)}")
except RuntimeError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
print("\nScanning for raw includes...")
# Find all #include directives
raw_includes: List[Tuple[str, str]] = []
rawincludes_file = results_dir / "rawincludes.txt"
# Write to file as we go to avoid storing everything in memory
with open(rawincludes_file, "w", buffering=8192) as raw_f:
for dir_path in scan_dirs:
print(f" Scanning {dir_path.relative_to(repo_root)}...")
for file_path in dir_path.rglob("*"):
if not file_path.is_file():
continue
try:
rel_path_str = str(file_path.relative_to(repo_root))
# Read file with larger buffer for better performance
with open(
file_path,
"r",
encoding="utf-8",
errors="ignore",
buffering=8192,
) as f:
for line in f:
# Quick check before regex
if "#include" not in line or "boost" in line:
continue
if INCLUDE_PATTERN.match(line):
line_stripped = line.strip()
entry = f"{rel_path_str}:{line_stripped}\n"
print(entry, end="")
raw_f.write(entry)
raw_includes.append((rel_path_str, line_stripped))
except Exception as e:
print(f"Error reading {file_path}: {e}", file=sys.stderr)
# Build levelization paths and count directly (no need to sort first)
print("Build levelization paths")
path_counts: Dict[Tuple[str, str], int] = defaultdict(int)
for file_path, include_line in raw_includes:
level = get_level(file_path)
include_level = extract_include_level(include_line)
if include_level and level != include_level:
path_counts[(level, include_level)] += 1
# Sort and deduplicate paths (using dictionary order like bash 'sort -d')
print("Sort and deduplicate paths")
paths_file = results_dir / "paths.txt"
with open(paths_file, "w") as f:
# Sort using dictionary order: only alphanumeric and spaces matter
sorted_items = sorted(
path_counts.items(),
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
)
for (level, include_level), count in sorted_items:
line = f"{count:7} {level} {include_level}\n"
print(line.rstrip())
f.write(line)
# Split into flat-file database
print("Split into flat-file database")
includes_dir = results_dir / "includes"
included_by_dir = results_dir / "included_by"
includes_dir.mkdir()
included_by_dir.mkdir()
# Batch writes by grouping data first to avoid repeated file opens
includes_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
included_by_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
# Process in sorted order to match bash script behavior (dictionary order)
sorted_items = sorted(
path_counts.items(),
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
)
for (level, include_level), count in sorted_items:
includes_data[level].append((include_level, count))
included_by_data[include_level].append((level, count))
# Write all includes files in sorted order (dictionary order)
for level in sorted(includes_data.keys(), key=dictionary_sort_key):
entries = includes_data[level]
with open(includes_dir / level, "w") as f:
for include_level, count in entries:
line = f"{include_level} {count}\n"
print(line.rstrip())
f.write(line)
# Write all included_by files in sorted order (dictionary order)
for include_level in sorted(included_by_data.keys(), key=dictionary_sort_key):
entries = included_by_data[include_level]
with open(included_by_dir / include_level, "w") as f:
for level, count in entries:
line = f"{level} {count}\n"
print(line.rstrip())
f.write(line)
# Search for loops
print("Search for loops")
loops_file = results_dir / "loops.txt"
ordering_file = results_dir / "ordering.txt"
loops_found: Set[Tuple[str, str]] = set()
# Pre-load all include files into memory to avoid repeated I/O
# This is the biggest optimization - we were reading files repeatedly in nested loops
# Use list of tuples to preserve file order
includes_cache: Dict[str, List[Tuple[str, int]]] = {}
includes_lookup: Dict[str, Dict[str, int]] = {} # For fast lookup
# Note: bash script uses 'for source in *' which uses standard glob sorting,
# NOT dictionary order. So we use standard sorted() here, not dictionary_sort_key.
for include_file in sorted(includes_dir.iterdir(), key=lambda p: p.name):
if not include_file.is_file():
continue
includes_cache[include_file.name] = []
includes_lookup[include_file.name] = {}
with open(include_file, "r") as f:
for line in f:
parts = line.strip().split()
if len(parts) >= 2:
include_name = parts[0]
include_count = int(parts[1])
includes_cache[include_file.name].append(
(include_name, include_count)
)
includes_lookup[include_file.name][include_name] = include_count
with open(loops_file, "w", buffering=8192) as loops_f, open(
ordering_file, "w", buffering=8192
) as ordering_f:
# Use standard sorting to match bash glob expansion 'for source in *'
for source in sorted(includes_cache.keys()):
source_includes = includes_cache[source]
for include, include_freq in source_includes:
# Check if include file exists and references source
if include not in includes_lookup:
continue
source_freq = includes_lookup[include].get(source)
if source_freq is not None:
# Found a loop
loop_key = tuple(sorted([source, include]))
if loop_key in loops_found:
continue
loops_found.add(loop_key)
loops_f.write(f"Loop: {source} {include}\n")
# If the counts are close, indicate that the two modules are
# on the same level, though they shouldn't be
diff = include_freq - source_freq
if diff > 3:
loops_f.write(f" {source} > {include}\n\n")
elif diff < -3:
loops_f.write(f" {include} > {source}\n\n")
elif source_freq == include_freq:
loops_f.write(f" {include} == {source}\n\n")
else:
loops_f.write(f" {include} ~= {source}\n\n")
else:
ordering_f.write(f"{source} > {include}\n")
# Print results
print("\nOrdering:")
with open(ordering_file, "r") as f:
print(f.read(), end="")
print("\nLoops:")
with open(loops_file, "r") as f:
print(f.read(), end="")
if __name__ == "__main__":
main()

130
.github/scripts/levelization/generate.sh vendored Executable file
View File

@@ -0,0 +1,130 @@
#!/bin/bash
# Usage: generate.sh
# This script takes no parameters, reads no environment variables,
# and can be run from any directory, as long as it is in the expected
# location in the repo.
pushd $( dirname $0 )
if [ -v PS1 ]
then
# if the shell is interactive, clean up any flotsam before analyzing
git clean -ix
fi
# Ensure all sorting is ASCII-order consistently across platforms.
export LANG=C
rm -rfv results
mkdir results
includes="$( pwd )/results/rawincludes.txt"
pushd ../../..
echo Raw includes:
grep -r '^[ ]*#include.*/.*\.h' include src | \
grep -v boost | tee ${includes}
popd
pushd results
oldifs=${IFS}
IFS=:
mkdir includes
mkdir includedby
echo Build levelization paths
exec 3< ${includes} # open rawincludes.txt for input
while read -r -u 3 file include
do
level=$( echo ${file} | cut -d/ -f 2,3 )
# If the "level" indicates a file, cut off the filename
if [[ "${level##*.}" != "${level}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level="$( dirname ${level} )/toplevel"
fi
level=$( echo ${level} | tr '/' '.' )
includelevel=$( echo ${include} | sed 's/.*["<]//; s/[">].*//' | \
cut -d/ -f 1,2 )
if [[ "${includelevel##*.}" != "${includelevel}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
includelevel="$( dirname ${includelevel} )/toplevel"
fi
includelevel=$( echo ${includelevel} | tr '/' '.' )
if [[ "$level" != "$includelevel" ]]
then
echo $level $includelevel | tee -a paths.txt
fi
done
echo Sort and dedup paths
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
mv sortedpaths.txt paths.txt
exec 3>&- #close fd 3
IFS=${oldifs}
unset oldifs
echo Split into flat-file database
exec 4<paths.txt # open paths.txt for input
while read -r -u 4 count level include
do
echo ${include} ${count} | tee -a includes/${level}
echo ${level} ${count} | tee -a includedby/${include}
done
exec 4>&- #close fd 4
loops="$( pwd )/loops.txt"
ordering="$( pwd )/ordering.txt"
pushd includes
echo Search for loops
# Redirect stdout to a file
exec 4>&1
exec 1>"${loops}"
for source in *
do
if [[ -f "$source" ]]
then
exec 5<"${source}" # open for input
while read -r -u 5 include includefreq
do
if [[ -f $include ]]
then
if grep -q -w $source $include
then
if grep -q -w "Loop: $include $source" "${loops}"
then
continue
fi
sourcefreq=$( grep -w $source $include | cut -d\ -f2 )
echo "Loop: $source $include"
# If the counts are close, indicate that the two modules are
# on the same level, though they shouldn't be
if [[ $(( $includefreq - $sourcefreq )) -gt 3 ]]
then
echo -e " $source > $include\n"
elif [[ $(( $sourcefreq - $includefreq )) -gt 3 ]]
then
echo -e " $include > $source\n"
elif [[ $sourcefreq -eq $includefreq ]]
then
echo -e " $include == $source\n"
else
echo -e " $include ~= $source\n"
fi
else
echo "$source > $include" >> "${ordering}"
fi
fi
done
exec 5>&- #close fd 5
fi
done
exec 1>&4 #close fd 1
exec 4>&- #close fd 4
cat "${ordering}"
cat "${loops}"
popd
popd
popd

View File

@@ -104,7 +104,6 @@ test.overlay > xrpl.basics
test.overlay > xrpld.app test.overlay > xrpld.app
test.overlay > xrpld.overlay test.overlay > xrpld.overlay
test.overlay > xrpld.peerfinder test.overlay > xrpld.peerfinder
test.overlay > xrpl.nodestore
test.overlay > xrpl.protocol test.overlay > xrpl.protocol
test.overlay > xrpl.shamap test.overlay > xrpl.shamap
test.peerfinder > test.beast test.peerfinder > test.beast
@@ -153,7 +152,6 @@ tests.libxrpl > xrpl.json
tests.libxrpl > xrpl.net tests.libxrpl > xrpl.net
xrpl.core > xrpl.basics xrpl.core > xrpl.basics
xrpl.core > xrpl.json xrpl.core > xrpl.json
xrpl.core > xrpl.ledger
xrpl.json > xrpl.basics xrpl.json > xrpl.basics
xrpl.ledger > xrpl.basics xrpl.ledger > xrpl.basics
xrpl.ledger > xrpl.protocol xrpl.ledger > xrpl.protocol

View File

@@ -19,7 +19,7 @@ run from the repository root.
1. `.github/scripts/rename/definitions.sh`: This script will rename all 1. `.github/scripts/rename/definitions.sh`: This script will rename all
definitions, such as include guards, from `RIPPLE_XXX` and `RIPPLED_XXX` to definitions, such as include guards, from `RIPPLE_XXX` and `RIPPLED_XXX` to
`XRPL_XXX`. `XRPL_XXX`.
2. `.github/scripts/rename/copyright.sh`: This script will remove superfluous 2. `.github/scripts/rename/copyright.sh`: This script will remove superflous
copyright notices. copyright notices.
3. `.github/scripts/rename/cmake.sh`: This script will rename all CMake files 3. `.github/scripts/rename/cmake.sh`: This script will rename all CMake files
from `RippleXXX.cmake` or `RippledXXX.cmake` to `XrplXXX.cmake`, and any from `RippleXXX.cmake` or `RippledXXX.cmake` to `XrplXXX.cmake`, and any

View File

@@ -56,7 +56,7 @@ for DIRECTORY in "${DIRECTORIES[@]}"; do
done done
${SED_COMMAND} -i 's/rippled/xrpld/g' cfg/xrpld-example.cfg ${SED_COMMAND} -i 's/rippled/xrpld/g' cfg/xrpld-example.cfg
${SED_COMMAND} -i 's/rippled/xrpld/g' src/test/core/Config_test.cpp ${SED_COMMAND} -i 's/rippled/xrpld/g' src/test/core/Config_test.cpp
${SED_COMMAND} -i 's/ripplevalidators/xrplvalidators/g' src/test/core/Config_test.cpp # cspell: disable-line ${SED_COMMAND} -i 's/ripplevalidators/xrplvalidators/g' src/test/core/Config_test.cpp
${SED_COMMAND} -i 's/rippleConfig/xrpldConfig/g' src/test/core/Config_test.cpp ${SED_COMMAND} -i 's/rippleConfig/xrpldConfig/g' src/test/core/Config_test.cpp
${SED_COMMAND} -i 's@ripple/@xrpld/@g' src/test/core/Config_test.cpp ${SED_COMMAND} -i 's@ripple/@xrpld/@g' src/test/core/Config_test.cpp
${SED_COMMAND} -i 's/Rippled/File/g' src/test/core/Config_test.cpp ${SED_COMMAND} -i 's/Rippled/File/g' src/test/core/Config_test.cpp

View File

@@ -50,11 +50,11 @@ for DIRECTORY in "${DIRECTORIES[@]}"; do
# Handle the cases where the copyright notice is enclosed in /* ... */ # Handle the cases where the copyright notice is enclosed in /* ... */
# and usually surrounded by //---- and //======. # and usually surrounded by //---- and //======.
${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}" ${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}"
${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}" # cspell: ignore Bougalis Falco Hinnant Ritchford ${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}"
${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}" ${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}"
# Handle the cases where the copyright notice is commented out with //. # Handle the cases where the copyright notice is commented out with //.
${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}" # cspell: ignore Vinnie Falco ${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}"
done done
done done
@@ -83,16 +83,16 @@ if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/ValidatorInfo.cpp; then
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/ValidatorInfo.cpp echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/ValidatorInfo.cpp
fi fi
if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h # cspell: ignore Nikolaos Bougalis nikb echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h
fi fi
if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h # cspell: ignore Nikolaos Bougalis nikb echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h
fi fi
if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then
echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h # cspell: ignore Nikolaos Bougalis nikb echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h
fi fi
if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then
echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h # cspell: ignore Ritchford echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h
fi fi
# Restore newlines and tabs in string literals in the affected file. # Restore newlines and tabs in string literals in the affected file.

View File

@@ -20,8 +20,8 @@ class Config:
Generate a strategy matrix for GitHub Actions CI. Generate a strategy matrix for GitHub Actions CI.
On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and
Windows configurations, while upon merge into the develop or release branches, Windows configurations, while upon merge into the develop, release, or master
we will build all configurations, and test most of them. branches, we will build all configurations, and test most of them.
We will further set additional CMake arguments as follows: We will further set additional CMake arguments as follows:
- All builds will have the `tests`, `werr`, and `xrpld` options. - All builds will have the `tests`, `werr`, and `xrpld` options.
@@ -51,20 +51,22 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# Only generate a subset of configurations in PRs. # Only generate a subset of configurations in PRs.
if not all: if not all:
# Debian: # Debian:
# - Bookworm using GCC 13: Release on linux/amd64, set the reference # - Bookworm using GCC 13: Release and Unity on linux/amd64, set
# fee to 500. # the reference fee to 500.
# - Bookworm using GCC 15: Debug on linux/amd64, enable code # - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
# coverage (which will be done below). # code coverage (which will be done below).
# - Bookworm using Clang 16: Debug on linux/arm64, enable voidstar. # - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
# - Bookworm using Clang 17: Release on linux/amd64, set the # enable voidstar.
# reference fee to 1000. # - Bookworm using Clang 17: Release and no Unity on linux/amd64,
# - Bookworm using Clang 20: Debug on linux/amd64. # set the reference fee to 1000.
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
if os["distro_name"] == "debian": if os["distro_name"] == "debian":
skip = True skip = True
if os["distro_version"] == "bookworm": if os["distro_version"] == "bookworm":
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
and build_type == "Release" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}" cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
@@ -72,12 +74,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64" and architecture["platform"] == "linux/arm64"
): ):
cmake_args = f"-Dvoidstar=ON {cmake_args}" cmake_args = f"-Dvoidstar=ON {cmake_args}"
@@ -85,6 +89,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17"
and build_type == "Release" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}" cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
@@ -92,6 +97,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
@@ -99,14 +105,15 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue continue
# RHEL: # RHEL:
# - 9 using GCC 12: Debug on linux/amd64. # - 9 using GCC 12: Debug and Unity on linux/amd64.
# - 10 using Clang: Release on linux/amd64. # - 10 using Clang: Release and no Unity on linux/amd64.
if os["distro_name"] == "rhel": if os["distro_name"] == "rhel":
skip = True skip = True
if os["distro_version"] == "9": if os["distro_version"] == "9":
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
@@ -114,6 +121,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any"
and build_type == "Release" and build_type == "Release"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
@@ -121,16 +129,17 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue continue
# Ubuntu: # Ubuntu:
# - Jammy using GCC 12: Debug on linux/arm64. # - Jammy using GCC 12: Debug and no Unity on linux/arm64.
# - Noble using GCC 14: Release on linux/amd64. # - Noble using GCC 14: Release and Unity on linux/amd64.
# - Noble using Clang 18: Debug on linux/amd64. # - Noble using Clang 18: Debug and no Unity on linux/amd64.
# - Noble using Clang 19: Release on linux/arm64. # - Noble using Clang 19: Release and Unity on linux/arm64.
if os["distro_name"] == "ubuntu": if os["distro_name"] == "ubuntu":
skip = True skip = True
if os["distro_version"] == "jammy": if os["distro_version"] == "jammy":
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/arm64" and architecture["platform"] == "linux/arm64"
): ):
skip = False skip = False
@@ -138,18 +147,21 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14"
and build_type == "Release" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
skip = False skip = False
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19" f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19"
and build_type == "Release" and build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "linux/arm64" and architecture["platform"] == "linux/arm64"
): ):
skip = False skip = False
@@ -157,16 +169,20 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue continue
# MacOS: # MacOS:
# - Debug on macos/arm64. # - Debug and no Unity on macos/arm64.
if os["distro_name"] == "macos" and not ( if os["distro_name"] == "macos" and not (
build_type == "Debug" and architecture["platform"] == "macos/arm64" build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "macos/arm64"
): ):
continue continue
# Windows: # Windows:
# - Release on windows/amd64. # - Release and Unity on windows/amd64.
if os["distro_name"] == "windows" and not ( if os["distro_name"] == "windows" and not (
build_type == "Release" and architecture["platform"] == "windows/amd64" build_type == "Release"
and "-Dunity=ON" in cmake_args
and architecture["platform"] == "windows/amd64"
): ):
continue continue
@@ -193,17 +209,18 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
): ):
continue continue
# Enable code coverage for Debian Bookworm using GCC 15 in Debug on # Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
# linux/amd64 # Unity on linux/amd64
if ( if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15" f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
and build_type == "Debug" and build_type == "Debug"
and "-Dunity=OFF" in cmake_args
and architecture["platform"] == "linux/amd64" and architecture["platform"] == "linux/amd64"
): ):
cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}" cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}"
# Generate a unique name for the configuration, e.g. macos-arm64-debug # Generate a unique name for the configuration, e.g. macos-arm64-debug
# or debian-bookworm-gcc-12-amd64-release. # or debian-bookworm-gcc-12-amd64-release-unity.
config_name = os["distro_name"] config_name = os["distro_name"]
if (n := os["distro_version"]) != "": if (n := os["distro_version"]) != "":
config_name += f"-{n}" config_name += f"-{n}"
@@ -212,62 +229,28 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (n := os["compiler_version"]) != "": if (n := os["compiler_version"]) != "":
config_name += f"-{n}" config_name += f"-{n}"
config_name += ( config_name += (
f"-{architecture['platform'][architecture['platform'].find('/')+1:]}" f"-{architecture['platform'][architecture['platform'].find('/') + 1 :]}"
) )
config_name += f"-{build_type.lower()}" config_name += f"-{build_type.lower()}"
if "-Dcoverage=ON" in cmake_args: if "-Dcoverage=ON" in cmake_args:
config_name += "-coverage" config_name += "-coverage"
if "-Dunity=ON" in cmake_args:
config_name += "-unity"
# Add the configuration to the list, with the most unique fields first, # Add the configuration to the list, with the most unique fields first,
# so that they are easier to identify in the GitHub Actions UI, as long # so that they are easier to identify in the GitHub Actions UI, as long
# names get truncated. # names get truncated.
# Add Address and Thread (both coupled with UB) sanitizers for specific bookworm distros. configurations.append(
# GCC-Asan rippled-embedded tests are failing because of https://github.com/google/sanitizers/issues/856 {
if ( "config_name": config_name,
os["distro_version"] == "bookworm" "cmake_args": cmake_args,
and f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20" "cmake_target": cmake_target,
): "build_only": build_only,
# Add ASAN + UBSAN configuration. "build_type": build_type,
configurations.append( "os": os,
{ "architecture": architecture,
"config_name": config_name + "-asan-ubsan", }
"cmake_args": cmake_args, )
"cmake_target": cmake_target,
"build_only": build_only,
"build_type": build_type,
"os": os,
"architecture": architecture,
"sanitizers": "address,undefinedbehavior",
}
)
# TSAN is deactivated due to seg faults with latest compilers.
activate_tsan = False
if activate_tsan:
configurations.append(
{
"config_name": config_name + "-tsan-ubsan",
"cmake_args": cmake_args,
"cmake_target": cmake_target,
"build_only": build_only,
"build_type": build_type,
"os": os,
"architecture": architecture,
"sanitizers": "thread,undefinedbehavior",
}
)
else:
configurations.append(
{
"config_name": config_name,
"cmake_args": cmake_args,
"cmake_target": cmake_target,
"build_only": build_only,
"build_type": build_type,
"os": os,
"architecture": architecture,
"sanitizers": "",
}
)
return configurations return configurations

View File

@@ -15,198 +15,198 @@
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "12", "compiler_version": "12",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "13", "compiler_version": "13",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "14", "compiler_version": "14",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "15", "compiler_version": "15",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "16", "compiler_version": "16",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "17", "compiler_version": "17",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "18", "compiler_version": "18",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "19", "compiler_version": "19",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "bookworm", "distro_version": "bookworm",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "20", "compiler_version": "20",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "trixie", "distro_version": "trixie",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "14", "compiler_version": "14",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "trixie", "distro_version": "trixie",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "15", "compiler_version": "15",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "trixie", "distro_version": "trixie",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "20", "compiler_version": "20",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "debian", "distro_name": "debian",
"distro_version": "trixie", "distro_version": "trixie",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "21", "compiler_version": "21",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "8", "distro_version": "8",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "14", "compiler_version": "14",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "8", "distro_version": "8",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "any", "compiler_version": "any",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "9", "distro_version": "9",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "12", "compiler_version": "12",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "9", "distro_version": "9",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "13", "compiler_version": "13",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "9", "distro_version": "9",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "14", "compiler_version": "14",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "9", "distro_version": "9",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "any", "compiler_version": "any",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "10", "distro_version": "10",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "14", "compiler_version": "14",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "rhel", "distro_name": "rhel",
"distro_version": "10", "distro_version": "10",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "any", "compiler_version": "any",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "ubuntu", "distro_name": "ubuntu",
"distro_version": "jammy", "distro_version": "jammy",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "12", "compiler_version": "12",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "ubuntu", "distro_name": "ubuntu",
"distro_version": "noble", "distro_version": "noble",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "13", "compiler_version": "13",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "ubuntu", "distro_name": "ubuntu",
"distro_version": "noble", "distro_version": "noble",
"compiler_name": "gcc", "compiler_name": "gcc",
"compiler_version": "14", "compiler_version": "14",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "ubuntu", "distro_name": "ubuntu",
"distro_version": "noble", "distro_version": "noble",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "16", "compiler_version": "16",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "ubuntu", "distro_name": "ubuntu",
"distro_version": "noble", "distro_version": "noble",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "17", "compiler_version": "17",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "ubuntu", "distro_name": "ubuntu",
"distro_version": "noble", "distro_version": "noble",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "18", "compiler_version": "18",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
}, },
{ {
"distro_name": "ubuntu", "distro_name": "ubuntu",
"distro_version": "noble", "distro_version": "noble",
"compiler_name": "clang", "compiler_name": "clang",
"compiler_version": "19", "compiler_version": "19",
"image_sha": "ab4d1f0" "image_sha": "cc09fd3"
} }
], ],
"build_type": ["Debug", "Release"], "build_type": ["Debug", "Release"],
"cmake_args": [""] "cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
} }

View File

@@ -2,7 +2,7 @@
"architecture": [ "architecture": [
{ {
"platform": "macos/arm64", "platform": "macos/arm64",
"runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"] "runner": ["self-hosted", "macOS", "ARM64"]
} }
], ],
"os": [ "os": [
@@ -15,5 +15,8 @@
} }
], ],
"build_type": ["Debug", "Release"], "build_type": ["Debug", "Release"],
"cmake_args": ["-DCMAKE_POLICY_VERSION_MINIMUM=3.5"] "cmake_args": [
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
]
} }

View File

@@ -15,5 +15,5 @@
} }
], ],
"build_type": ["Debug", "Release"], "build_type": ["Debug", "Release"],
"cmake_args": [""] "cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
} }

View File

@@ -1,8 +1,7 @@
# This workflow runs all workflows to check, build and test the project on # This workflow runs all workflows to check, build and test the project on
# various Linux flavors, as well as on MacOS and Windows, on every push to a # various Linux flavors, as well as on MacOS and Windows, on every push to a
# user branch. However, it will not run if the pull request is a draft unless it # user branch. However, it will not run if the pull request is a draft unless it
# has the 'DraftRunCI' label. For commits to PRs that target a release branch, # has the 'DraftRunCI' label.
# it also uploads the libxrpl recipe to the Conan remote.
name: PR name: PR
on: on:
@@ -54,12 +53,12 @@ jobs:
.github/scripts/rename/** .github/scripts/rename/**
.github/workflows/reusable-check-levelization.yml .github/workflows/reusable-check-levelization.yml
.github/workflows/reusable-check-rename.yml .github/workflows/reusable-check-rename.yml
.github/workflows/reusable-notify-clio.yml
.github/workflows/on-pr.yml .github/workflows/on-pr.yml
# Keep the paths below in sync with those in `on-trigger.yml`. # Keep the paths below in sync with those in `on-trigger.yml`.
.github/actions/build-deps/** .github/actions/build-deps/**
.github/actions/build-test/** .github/actions/build-test/**
.github/actions/generate-version/**
.github/actions/setup-conan/** .github/actions/setup-conan/**
.github/scripts/strategy-matrix/** .github/scripts/strategy-matrix/**
.github/workflows/reusable-build.yml .github/workflows/reusable-build.yml
@@ -67,7 +66,6 @@ jobs:
.github/workflows/reusable-build-test.yml .github/workflows/reusable-build-test.yml
.github/workflows/reusable-strategy-matrix.yml .github/workflows/reusable-strategy-matrix.yml
.github/workflows/reusable-test.yml .github/workflows/reusable-test.yml
.github/workflows/reusable-upload-recipe.yml
.codecov.yml .codecov.yml
cmake/** cmake/**
conan/** conan/**
@@ -123,42 +121,22 @@ jobs:
secrets: secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
upload-recipe: notify-clio:
needs: needs:
- should-run - should-run
- build-test - build-test
# Only run when committing to a PR that targets a release branch in the if: ${{ needs.should-run.outputs.go == 'true' && (startsWith(github.base_ref, 'release') || github.base_ref == 'master') }}
# XRPLF repository. uses: ./.github/workflows/reusable-notify-clio.yml
if: ${{ github.repository_owner == 'XRPLF' && needs.should-run.outputs.go == 'true' && startsWith(github.ref, 'refs/heads/release') }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets: secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }} clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }} conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
notify-clio:
needs: upload-recipe
runs-on: ubuntu-latest
steps:
# Notify the Clio repository about the newly proposed release version, so
# it can be checked for compatibility before the release is actually made.
- name: Notify Clio
env:
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[ref]=${{ needs.upload-recipe.outputs.recipe_ref }}" \
-F "client_payload[pr_url]=${PR_URL}"
passed: passed:
if: failure() || cancelled() if: failure() || cancelled()
needs: needs:
- check-levelization
- check-rename
- build-test - build-test
- upload-recipe - check-levelization
- notify-clio
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Fail - name: Fail

View File

@@ -1,25 +0,0 @@
# This workflow uploads the libxrpl recipe to the Conan remote when a versioned
# tag is pushed.
name: Tag
on:
push:
tags:
- "v*"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
upload-recipe:
# Only run when a tag is pushed to the XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}

View File

@@ -1,7 +1,9 @@
# This workflow runs all workflows to build and test the code on various Linux # This workflow runs all workflows to build the dependencies required for the
# flavors, as well as on MacOS and Windows, on a scheduled basis, on merge into # project on various Linux flavors, as well as on MacOS and Windows, on a
# the 'develop' or 'release*' branches, or when requested manually. Upon pushes # scheduled basis, on merge into the 'develop', 'release', or 'master' branches,
# to the develop branch it also uploads the libxrpl recipe to the Conan remote. # or manually. The missing commits check is only run when the code is merged
# into the 'develop' or 'release' branches, and the documentation is built when
# the code is merged into the 'develop' branch.
name: Trigger name: Trigger
on: on:
@@ -9,6 +11,7 @@ on:
branches: branches:
- "develop" - "develop"
- "release*" - "release*"
- "master"
paths: paths:
# These paths are unique to `on-trigger.yml`. # These paths are unique to `on-trigger.yml`.
- ".github/workflows/on-trigger.yml" - ".github/workflows/on-trigger.yml"
@@ -16,7 +19,6 @@ on:
# Keep the paths below in sync with those in `on-pr.yml`. # Keep the paths below in sync with those in `on-pr.yml`.
- ".github/actions/build-deps/**" - ".github/actions/build-deps/**"
- ".github/actions/build-test/**" - ".github/actions/build-test/**"
- ".github/actions/generate-version/**"
- ".github/actions/setup-conan/**" - ".github/actions/setup-conan/**"
- ".github/scripts/strategy-matrix/**" - ".github/scripts/strategy-matrix/**"
- ".github/workflows/reusable-build.yml" - ".github/workflows/reusable-build.yml"
@@ -24,7 +26,6 @@ on:
- ".github/workflows/reusable-build-test.yml" - ".github/workflows/reusable-build-test.yml"
- ".github/workflows/reusable-strategy-matrix.yml" - ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml" - ".github/workflows/reusable-test.yml"
- ".github/workflows/reusable-upload-recipe.yml"
- ".codecov.yml" - ".codecov.yml"
- "cmake/**" - "cmake/**"
- "conan/**" - "conan/**"
@@ -69,20 +70,11 @@ jobs:
with: with:
# Enable ccache only for events targeting the XRPLF repository, since # Enable ccache only for events targeting the XRPLF repository, since
# other accounts will not have access to our remote cache storage. # other accounts will not have access to our remote cache storage.
# However, we do not enable ccache for events targeting a release branch, # However, we do not enable ccache for events targeting the master or a
# to protect against the rare case that the output produced by ccache is # release branch, to protect against the rare case that the output
# not identical to a regular compilation. # produced by ccache is not identical to a regular compilation.
ccache_enabled: ${{ github.repository_owner == 'XRPLF' && !startsWith(github.ref, 'refs/heads/release') }} ccache_enabled: ${{ github.repository_owner == 'XRPLF' && !(github.base_ref == 'master' || startsWith(github.base_ref, 'release')) }}
os: ${{ matrix.os }} os: ${{ matrix.os }}
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }} strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
secrets: secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
upload-recipe:
needs: build-test
# Only run when pushing to the develop branch in the XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' && github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}

View File

@@ -3,15 +3,13 @@ name: Run pre-commit hooks
on: on:
pull_request: pull_request:
push: push:
branches: branches: [develop, release, master]
- "develop"
- "release*"
workflow_dispatch: workflow_dispatch:
jobs: jobs:
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks. # Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
run-hooks: run-hooks:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@320be44621ca2a080f05aeb15817c44b84518108 uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
with: with:
runs_on: ubuntu-latest runs_on: ubuntu-latest
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-ab4d1f0" }' container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'

View File

@@ -36,7 +36,7 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get number of processors - name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc id: nproc
with: with:
subtract: ${{ env.NPROC_SUBTRACT }} subtract: ${{ env.NPROC_SUBTRACT }}

View File

@@ -51,12 +51,6 @@ on:
type: number type: number
default: 2 default: 2
sanitizers:
description: "The sanitizers to enable."
required: false
type: string
default: ""
secrets: secrets:
CODECOV_TOKEN: CODECOV_TOKEN:
description: "The Codecov token to use for uploading coverage reports." description: "The Codecov token to use for uploading coverage reports."
@@ -97,19 +91,18 @@ jobs:
# Determine if coverage and voidstar should be enabled. # Determine if coverage and voidstar should be enabled.
COVERAGE_ENABLED: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }} COVERAGE_ENABLED: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
VOIDSTAR_ENABLED: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }} VOIDSTAR_ENABLED: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
SANITIZERS_ENABLED: ${{ inputs.sanitizers != '' }}
steps: steps:
- name: Cleanup workspace (macOS and Windows) - name: Cleanup workspace (macOS and Windows)
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }} if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner - name: Prepare runner
uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
with: with:
enable_ccache: ${{ inputs.ccache_enabled }} disable_ccache: ${{ !inputs.ccache_enabled }}
- name: Set ccache log file - name: Set ccache log file
if: ${{ inputs.ccache_enabled && runner.debug == '1' }} if: ${{ inputs.ccache_enabled && runner.debug == '1' }}
@@ -119,14 +112,12 @@ jobs:
uses: ./.github/actions/print-env uses: ./.github/actions/print-env
- name: Get number of processors - name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc id: nproc
with: with:
subtract: ${{ inputs.nproc_subtract }} subtract: ${{ inputs.nproc_subtract }}
- name: Setup Conan - name: Setup Conan
env:
SANITIZERS: ${{ inputs.sanitizers }}
uses: ./.github/actions/setup-conan uses: ./.github/actions/setup-conan
- name: Build dependencies - name: Build dependencies
@@ -137,13 +128,11 @@ jobs:
# Set the verbosity to "quiet" for Windows to avoid an excessive # Set the verbosity to "quiet" for Windows to avoid an excessive
# amount of logs. For other OSes, the "verbose" logs are more useful. # amount of logs. For other OSes, the "verbose" logs are more useful.
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }} log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
sanitizers: ${{ inputs.sanitizers }}
- name: Configure CMake - name: Configure CMake
working-directory: ${{ env.BUILD_DIR }} working-directory: ${{ env.BUILD_DIR }}
env: env:
BUILD_TYPE: ${{ inputs.build_type }} BUILD_TYPE: ${{ inputs.build_type }}
SANITIZERS: ${{ inputs.sanitizers }}
CMAKE_ARGS: ${{ inputs.cmake_args }} CMAKE_ARGS: ${{ inputs.cmake_args }}
run: | run: |
cmake \ cmake \
@@ -185,7 +174,7 @@ jobs:
if-no-files-found: error if-no-files-found: error
- name: Check linking (Linux) - name: Check linking (Linux)
if: ${{ runner.os == 'Linux' && env.SANITIZERS_ENABLED == 'false' }} if: ${{ runner.os == 'Linux' }}
working-directory: ${{ env.BUILD_DIR }} working-directory: ${{ env.BUILD_DIR }}
run: | run: |
ldd ./xrpld ldd ./xrpld
@@ -202,14 +191,6 @@ jobs:
run: | run: |
./xrpld --version | grep libvoidstar ./xrpld --version | grep libvoidstar
- name: Set sanitizer options
if: ${{ !inputs.build_only && env.SANITIZERS_ENABLED == 'true' }}
run: |
echo "ASAN_OPTIONS=print_stacktrace=1:detect_container_overflow=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/asan.supp" >> ${GITHUB_ENV}
echo "TSAN_OPTIONS=second_deadlock_stack=1:halt_on_error=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/tsan.supp" >> ${GITHUB_ENV}
echo "UBSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/ubsan.supp" >> ${GITHUB_ENV}
echo "LSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/lsan.supp" >> ${GITHUB_ENV}
- name: Run the separate tests - name: Run the separate tests
if: ${{ !inputs.build_only }} if: ${{ !inputs.build_only }}
working-directory: ${{ env.BUILD_DIR }} working-directory: ${{ env.BUILD_DIR }}

View File

@@ -57,6 +57,5 @@ jobs:
runs_on: ${{ toJSON(matrix.architecture.runner) }} runs_on: ${{ toJSON(matrix.architecture.runner) }}
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }} image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
config_name: ${{ matrix.config_name }} config_name: ${{ matrix.config_name }}
sanitizers: ${{ matrix.sanitizers }}
secrets: secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -20,7 +20,7 @@ jobs:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check levelization - name: Check levelization
run: python .github/scripts/levelization/generate.py run: .github/scripts/levelization/generate.sh
- name: Check for differences - name: Check for differences
env: env:
MESSAGE: | MESSAGE: |
@@ -32,7 +32,7 @@ jobs:
removed from loops.txt, it's probably an improvement, while if removed from loops.txt, it's probably an improvement, while if
something was added, it's probably a regression. something was added, it's probably a regression.
Run '.github/scripts/levelization/generate.py' in your repo, commit Run '.github/scripts/levelization/generate.sh' in your repo, commit
and push the changes. See .github/scripts/levelization/README.md for and push the changes. See .github/scripts/levelization/README.md for
more info. more info.
run: | run: |

View File

@@ -0,0 +1,91 @@
# This workflow exports the built libxrpl package to the Conan remote on a
# a channel named after the pull request, and notifies the Clio repository about
# the new version so it can check for compatibility.
name: Notify Clio
# This workflow can only be triggered by other workflows.
on:
workflow_call:
inputs:
conan_remote_name:
description: "The name of the Conan remote to use."
required: false
type: string
default: xrplf
conan_remote_url:
description: "The URL of the Conan endpoint to use."
required: false
type: string
default: https://conan.ripplex.io
secrets:
clio_notify_token:
description: "The GitHub token to notify Clio about new versions."
required: true
conan_remote_username:
description: "The username for logging into the Conan remote."
required: true
conan_remote_password:
description: "The password for logging into the Conan remote."
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-clio
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
upload:
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Generate outputs
id: generate
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
echo 'Generating user and channel.'
echo "user=clio" >> "${GITHUB_OUTPUT}"
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
echo 'Extracting version.'
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
- name: Calculate conan reference
id: conan_ref
run: |
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
- name: Set up Conan
uses: ./.github/actions/setup-conan
with:
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
- name: Log into Conan remote
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
- name: Upload package
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
run: |
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
outputs:
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
notify:
needs: upload
runs-on: ubuntu-latest
steps:
- name: Notify Clio
env:
GH_TOKEN: ${{ secrets.clio_notify_token }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
-F "client_payload[pr_url]=${PR_URL}"

View File

@@ -1,97 +0,0 @@
# This workflow exports the built libxrpl package to the Conan remote.
name: Upload Conan recipe
# This workflow can only be triggered by other workflows.
on:
workflow_call:
inputs:
remote_name:
description: "The name of the Conan remote to use."
required: false
type: string
default: xrplf
remote_url:
description: "The URL of the Conan endpoint to use."
required: false
type: string
default: https://conan.ripplex.io
secrets:
remote_username:
description: "The username for logging into the Conan remote."
required: true
remote_password:
description: "The password for logging into the Conan remote."
required: true
outputs:
recipe_ref:
description: "The Conan recipe reference ('name/version') that was uploaded."
value: ${{ jobs.upload.outputs.ref }}
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-upload-recipe
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
upload:
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Generate build version number
id: version
uses: ./.github/actions/generate-version
- name: Set up Conan
uses: ./.github/actions/setup-conan
with:
remote_name: ${{ inputs.remote_name }}
remote_url: ${{ inputs.remote_url }}
- name: Log into Conan remote
env:
REMOTE_NAME: ${{ inputs.remote_name }}
REMOTE_USERNAME: ${{ secrets.remote_username }}
REMOTE_PASSWORD: ${{ secrets.remote_password }}
run: conan remote login "${REMOTE_NAME}" "${REMOTE_USERNAME}" --password "${REMOTE_PASSWORD}"
- name: Upload Conan recipe (version)
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=${{ steps.version.outputs.version }}
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/${{ steps.version.outputs.version }}
- name: Upload Conan recipe (develop)
if: ${{ github.ref == 'refs/heads/develop' }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=develop
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/develop
- name: Upload Conan recipe (rc)
if: ${{ startsWith(github.ref, 'refs/heads/release') }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=rc
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/rc
- name: Upload Conan recipe (release)
if: ${{ github.event_name == 'tag' }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=release
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/release
outputs:
ref: xrpl/${{ steps.version.outputs.version }}

View File

@@ -64,32 +64,30 @@ jobs:
steps: steps:
- name: Cleanup workspace (macOS and Windows) - name: Cleanup workspace (macOS and Windows)
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }} if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner - name: Prepare runner
uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
with: with:
enable_ccache: false disable_ccache: true
- name: Print build environment - name: Print build environment
uses: ./.github/actions/print-env uses: ./.github/actions/print-env
- name: Get number of processors - name: Get number of processors
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc id: nproc
with: with:
subtract: ${{ env.NPROC_SUBTRACT }} subtract: ${{ env.NPROC_SUBTRACT }}
- name: Setup Conan - name: Setup Conan
env:
SANITIZERS: ${{ matrix.sanitizers }}
uses: ./.github/actions/setup-conan uses: ./.github/actions/setup-conan
with: with:
remote_name: ${{ env.CONAN_REMOTE_NAME }} conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
remote_url: ${{ env.CONAN_REMOTE_URL }} conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
- name: Build dependencies - name: Build dependencies
uses: ./.github/actions/build-deps uses: ./.github/actions/build-deps
@@ -100,7 +98,6 @@ jobs:
# Set the verbosity to "quiet" for Windows to avoid an excessive # Set the verbosity to "quiet" for Windows to avoid an excessive
# amount of logs. For other OSes, the "verbose" logs are more useful. # amount of logs. For other OSes, the "verbose" logs are more useful.
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }} log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
sanitizers: ${{ matrix.sanitizers }}
- name: Log into Conan remote - name: Log into Conan remote
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }} if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}

7
.gitignore vendored
View File

@@ -1,5 +1,4 @@
# .gitignore # .gitignore
# cspell: disable
# Macintosh Desktop Services Store files. # Macintosh Desktop Services Store files.
.DS_Store .DS_Store
@@ -64,13 +63,7 @@ DerivedData
/.vs/ /.vs/
/.vscode/ /.vscode/
# zed IDE.
/.zed/
# AI tools. # AI tools.
/.augment /.augment
/.claude /.claude
/CLAUDE.md /CLAUDE.md
# Python
__pycache__

View File

@@ -26,36 +26,30 @@ repos:
args: [--style=file] args: [--style=file]
"types_or": [c++, c, proto] "types_or": [c++, c, proto]
- repo: https://github.com/cheshirekow/cmake-format-precommit
rev: e2c2116d86a80e72e7146a06e68b7c228afc6319 # frozen: v0.6.13
hooks:
- id: cmake-format
additional_dependencies: [PyYAML]
- repo: https://github.com/rbubley/mirrors-prettier - repo: https://github.com/rbubley/mirrors-prettier
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2 rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
hooks: hooks:
- id: prettier - id: prettier
- repo: https://github.com/psf/black-pre-commit-mirror - repo: https://github.com/psf/black-pre-commit-mirror
rev: 831207fd435b47aeffdf6af853097e64322b4d44 # frozen: v25.12.0 rev: 25.11.0
hooks: hooks:
- id: black - id: black
- repo: https://github.com/streetsidesoftware/cspell-cli # - repo: https://github.com/streetsidesoftware/cspell-cli
rev: 1cfa010f078c354f3ffb8413616280cc28f5ba21 # frozen: v9.4.0 # rev: v9.2.0
hooks: # hooks:
- id: cspell # Spell check changed files # - id: cspell # Spell check changed files
exclude: .config/cspell.config.yaml # - id: cspell # Spell check the commit message
- id: cspell # Spell check the commit message # name: check commit message spelling
name: check commit message spelling # args:
args: # - --no-must-find-files
- --no-must-find-files # - --no-progress
- --no-progress # - --no-summary
- --no-summary # - --files
- --files # - .git/COMMIT_EDITMSG
- .git/COMMIT_EDITMSG # stages: [commit-msg]
stages: [commit-msg] # always_run: true # This might not be necessary.
exclude: | exclude: |
(?x)^( (?x)^(

View File

@@ -6,85 +6,90 @@ For info about how [API versioning](https://xrpl.org/request-formatting.html#api
The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `rippled` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade. The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `rippled` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade.
The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release. For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release.
## API Version 3 (Beta)
API version 3 is currently a beta API. It requires enabling `[beta_rpc_api]` in the rippled configuration to use. See [API-VERSION-3.md](API-VERSION-3.md) for the full list of changes in API version 3.
## API Version 2 ## API Version 2
API version 2 is available in `rippled` version 2.0.0 and later. See [API-VERSION-2.md](API-VERSION-2.md) for the full list of changes in API version 2. API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
#### Removed methods
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
- `ledger_header` - Instead, use the `ledger` method.
#### Modifications to JSON transaction element in V2
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
- JSON transaction element is named `tx_json`
- Binary transaction element is named `tx_blob`
- JSON transaction metadata element is named `meta`
- Binary transaction metadata element is named `meta_blob`
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
- `ledger_index` - Ledger index (only set on validated ledgers)
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
This change affects the following methods:
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
#### Modification to `Payment` transaction JSON schema
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
#### Modifications to account_info response
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770)
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
## API Version 1 ## API Version 1
This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified. This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified.
## XRP Ledger server version 3.1.0 The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
[Version 3.1.0](https://github.com/XRPLF/rippled/releases/tag/3.1.0) was released on Jan 27, 2026. ### Inconsistency: server_info - network_id
### Additions in 3.1.0 The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
- `vault_info`: New RPC method to retrieve information about a specific vault (part of XLS-66 Lending Protocol). ([#6156](https://github.com/XRPLF/rippled/pull/6156))
## XRP Ledger server version 3.0.0
[Version 3.0.0](https://github.com/XRPLF/rippled/releases/tag/3.0.0) was released on Dec 9, 2025.
### Additions in 3.0.0
- `ledger_entry`: Supports all ledger entry types with dedicated parsers. ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `ledger_entry`: New error codes `entryNotFound` and `unexpectedLedgerType` for more specific error handling. ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `ledger_entry`: Improved error messages with more context (e.g., specifying which field is invalid or missing). ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `ledger_entry`: Assorted bug fixes in RPC processing. ([#5237](https://github.com/XRPLF/rippled/pull/5237))
- `simulate`: Supports additional metadata in the response. ([#5754](https://github.com/XRPLF/rippled/pull/5754))
## XRP Ledger server version 2.6.2
[Version 2.6.2](https://github.com/XRPLF/rippled/releases/tag/2.6.2) was released on Nov 19, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.6.1
[Version 2.6.1](https://github.com/XRPLF/rippled/releases/tag/2.6.1) was released on Sep 30, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.6.0
[Version 2.6.0](https://github.com/XRPLF/rippled/releases/tag/2.6.0) was released on Aug 27, 2025.
### Additions in 2.6.0
- `account_info`: Added `allowTrustLineLocking` flag in response. ([#5525](https://github.com/XRPLF/rippled/pull/5525))
- `ledger`: Removed the type filter from the RPC command. ([#4934](https://github.com/XRPLF/rippled/pull/4934))
- `subscribe` (`validations` stream): `network_id` is now included. ([#5579](https://github.com/XRPLF/rippled/pull/5579))
- `subscribe` (`transactions` stream): `nftoken_id`, `nftoken_ids`, and `offer_id` are now included in transaction metadata. ([#5230](https://github.com/XRPLF/rippled/pull/5230))
## XRP Ledger server version 2.5.1
[Version 2.5.1](https://github.com/XRPLF/rippled/releases/tag/2.5.1) was released on Sep 17, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.5.0 ## XRP Ledger server version 2.5.0
[Version 2.5.0](https://github.com/XRPLF/rippled/releases/tag/2.5.0) was released on Jun 24, 2025. As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
### Additions and bugfixes in 2.5.0 ### Additions and bugfixes in 2.5.0
- `tx`: Added `ctid` field to the response and improved error handling. ([#4738](https://github.com/XRPLF/rippled/pull/4738)) - `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled.
- `ledger_entry`: Improved error messages in `permissioned_domain`. ([#5344](https://github.com/XRPLF/rippled/pull/5344))
- `simulate`: Improved multi-sign usage. ([#5479](https://github.com/XRPLF/rippled/pull/5479))
- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled. ([#5385](https://github.com/XRPLF/rippled/pull/5385))
- `subscribe` (admin): Removed webhook queue limit to prevent dropping notifications; reduced HTTP timeout from 10 minutes to 30 seconds. ([#5163](https://github.com/XRPLF/rippled/pull/5163))
- `ledger_data` (gRPC): Fixed crashing issue with some invalid markers. ([#5137](https://github.com/XRPLF/rippled/pull/5137))
- `account_lines`: Fixed error with `no_ripple` and `no_ripple_peer` sometimes showing up incorrectly. ([#5345](https://github.com/XRPLF/rippled/pull/5345))
- `account_tx`: Fixed issue with incorrect CTIDs. ([#5408](https://github.com/XRPLF/rippled/pull/5408))
## XRP Ledger server version 2.4.0 ## XRP Ledger server version 2.4.0
@@ -92,19 +97,11 @@ This release contains bug fixes only and no API changes.
### Additions and bugfixes in 2.4.0 ### Additions and bugfixes in 2.4.0
- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate). ([#5069](https://github.com/XRPLF/rippled/pull/5069)) - `ledger_entry`: `state` is added an alias for `ripple_state`.
- Signing methods (`sign`, `sign_for`, `submit`): Autofill fees better, properly handle transactions without a base fee, and autofill the `NetworkID` field. ([#5069](https://github.com/XRPLF/rippled/pull/5069)) - `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name.
- `ledger_entry`: `state` is added as an alias for `ripple_state`. ([#5199](https://github.com/XRPLF/rippled/pull/5199)) - `validators`: Added new field `validator_list_threshold` in response.
- `ledger`, `ledger_data`, `account_objects`: Support filtering ledger entry types by their canonical names (case-insensitive). ([#5271](https://github.com/XRPLF/rippled/pull/5271)) - `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate)
- `validators`: Added new field `validator_list_threshold` in response. ([#5112](https://github.com/XRPLF/rippled/pull/5112)) - Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field.
- `server_info`: Added git commit hash info on admin connection. ([#5225](https://github.com/XRPLF/rippled/pull/5225))
- `server_definitions`: Changed larger `UInt` serialized types to `Hash`. ([#5231](https://github.com/XRPLF/rippled/pull/5231))
## XRP Ledger server version 2.3.1
[Version 2.3.1](https://github.com/XRPLF/rippled/releases/tag/2.3.1) was released on Jan 29, 2025.
This release contains bug fixes only and no API changes.
## XRP Ledger server version 2.3.0 ## XRP Ledger server version 2.3.0
@@ -112,30 +109,19 @@ This release contains bug fixes only and no API changes.
### Breaking changes in 2.3.0 ### Breaking changes in 2.3.0
- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs). Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC. - `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs).
Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC.
### Additions and bugfixes in 2.3.0 ### Additions and bugfixes in 2.3.0
- `book_changes`: Returns a `validated` field in its response. ([#5096](https://github.com/XRPLF/rippled/pull/5096)) - `book_changes`: Returns a `validated` field in its response, which was missing in prior versions.
- `book_changes`: Accepts shortcut strings (`current`, `closed`, `validated`) for the `ledger_index` parameter. ([#5096](https://github.com/XRPLF/rippled/pull/5096))
- `server_definitions`: Include `index` in response. ([#5190](https://github.com/XRPLF/rippled/pull/5190))
- `account_nfts`: Fix issue where unassociated marker would return incorrect results. ([#5045](https://github.com/XRPLF/rippled/pull/5045))
- `account_objects`: Fix issue where invalid marker would not return an error. ([#5046](https://github.com/XRPLF/rippled/pull/5046))
- `account_objects`: Disallow filtering by ledger entry types that an account cannot hold. ([#5056](https://github.com/XRPLF/rippled/pull/5056))
- `tx`: Allow lowercase CTID. ([#5049](https://github.com/XRPLF/rippled/pull/5049))
- `feature`: Better error handling for invalid values of `feature`. ([#5063](https://github.com/XRPLF/rippled/pull/5063))
## XRP Ledger server version 2.2.0 ## XRP Ledger server version 2.2.0
[Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive): [Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive):
- `feature`: Add a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781)) - The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 2.0.1
[Version 2.0.1](https://github.com/XRPLF/rippled/releases/tag/2.0.1) was released on Jan 29, 2024. The following additions are non-breaking:
- `path_find`: Fixes unbounded memory growth. ([#4822](https://github.com/XRPLF/rippled/pull/4822))
## XRP Ledger server version 2.0.0 ## XRP Ledger server version 2.0.0
@@ -143,18 +129,24 @@ This release contains bug fixes only and no API changes.
- `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries. - `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries.
- In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking. - In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking.
- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting). The full list of changes is in [API-VERSION-2.md](API-VERSION-2.md). - API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting).
## XRP Ledger server version 2.2.0
The following is a non-breaking addition to the API.
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 1.12.0 ## XRP Ledger server version 1.12.0
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive): [Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive).
- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. ([#4427](https://github.com/XRPLF/rippled/pull/4427)) - `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427)
- `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port. - `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port.
- This allows crawlers to build a more detailed topology without needing to port-scan nodes. - This allows crawlers to build a more detailed topology without needing to port-scan nodes.
- (For peers and other non-admin clients, the info about admin ports is excluded.) - (For peers and other non-admin clients, the info about admin ports is excluded.)
- Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). ([#4553](https://github.com/XRPLF/rippled/pull/4553)) - Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). (https://github.com/XRPLF/rippled/pull/4553)
- Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback`. ([#4617](https://github.com/XRPLF/rippled/pull/4617)) - Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback` (https://github.com/XRPLF/rippled/pull/4617)
- Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well. - Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well.
- Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks. - Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks.
- Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields: - Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields:
@@ -189,16 +181,16 @@ This release contains bug fixes only and no API changes.
### Breaking changes in 1.11 ### Breaking changes in 1.11
- Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. ([#4291](https://github.com/XRPLF/rippled/pull/4291)) - Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. (https://github.com/XRPLF/rippled/pull/4291)
- The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`. - The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`.
- Removed the acceptance of seeds or public keys in place of account addresses. ([#4404](https://github.com/XRPLF/rippled/pull/4404)) - Removed the acceptance of seeds or public keys in place of account addresses. (https://github.com/XRPLF/rippled/pull/4404)
- This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network). - This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network).
- For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. ([#4398](https://github.com/XRPLF/rippled/pull/4398)) - For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. (https://github.com/XRPLF/rippled/pull/4398)
- If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406)) - If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406))
- There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence. - There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence.
- This is potentially a breaking change if clients have logic for determining whether an account can be deleted. - This is potentially a breaking change if clients have logic for determining whether an account can be deleted.
- NetworkID - NetworkID
- For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. ([#4370](https://github.com/XRPLF/rippled/pull/4370)) - For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. (https://github.com/XRPLF/rippled/pull/4370)
- This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher. - This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher.
- The field must be omitted for Mainnet, so there is no change for Mainnet users. - The field must be omitted for Mainnet, so there is no change for Mainnet users.
- There are three new local error codes: - There are three new local error codes:
@@ -208,10 +200,10 @@ This release contains bug fixes only and no API changes.
### Additions and bug fixes in 1.11 ### Additions and bug fixes in 1.11
- Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. ([#4447](https://github.com/XRPLF/rippled/pull/4447)) - Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. (https://github.com/XRPLF/rippled/pull/4447)
- Added an `account_flags` object to the `account_info` method response. ([#4459](https://github.com/XRPLF/rippled/pull/4459)) - Added an `account_flags` object to the `account_info` method response. (https://github.com/XRPLF/rippled/pull/4459)
- Added `NFTokenPages` to the `account_objects` RPC. ([#4352](https://github.com/XRPLF/rippled/pull/4352)) - Added `NFTokenPages` to the `account_objects` RPC. (https://github.com/XRPLF/rippled/pull/4352)
- Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. ([#4361](https://github.com/XRPLF/rippled/pull/4361)) - Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. (https://github.com/XRPLF/rippled/pull/4361)
## XRP Ledger server version 1.10.0 ## XRP Ledger server version 1.10.0

View File

@@ -1,66 +0,0 @@
# API Version 2
API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
## Removed methods
In API version 2, the following deprecated methods are no longer available: ([#4759](https://github.com/XRPLF/rippled/pull/4759))
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
- `ledger_header` - Instead, use the `ledger` method.
## Modifications to JSON transaction element in API version 2
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. ([#4775](https://github.com/XRPLF/rippled/pull/4775))
This helps to unify the JSON serialization format of transactions. ([clio#722](https://github.com/XRPLF/clio/issues/722), [#4727](https://github.com/XRPLF/rippled/issues/4727))
- JSON transaction element is named `tx_json`
- Binary transaction element is named `tx_blob`
- JSON transaction metadata element is named `meta`
- Binary transaction metadata element is named `meta_blob`
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
- `ledger_index` - Ledger index (only set on validated ledgers)
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
This change affects the following methods:
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
## Modifications to `Payment` transaction JSON schema
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. ([#4733](https://github.com/XRPLF/rippled/pull/4733))
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: ([#4733](https://github.com/XRPLF/rippled/pull/4733))
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
## Modifications to account_info response
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) ([#3770](https://github.com/XRPLF/rippled/pull/3770))
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. ([#4585](https://github.com/XRPLF/rippled/pull/4585))
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
## Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. ([#4571](https://github.com/XRPLF/rippled/pull/4571))
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. ([#4545](https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579))
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. ([#4288](https://github.com/XRPLF/rippled/issues/4288))
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. ([#4620](https://github.com/XRPLF/rippled/pull/4620))
## Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. ([#4620](https://github.com/XRPLF/rippled/pull/4620))

View File

@@ -1,27 +0,0 @@
# API Version 3
API version 3 is currently a **beta API**. It requires enabling `[beta_rpc_api]` in the rippled configuration to use. To use this API, clients specify `"api_version" : 3` in each request.
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
## Breaking Changes
### Modifications to `amm_info`
The order of error checks has been changed to provide more specific error messages. ([#4924](https://github.com/XRPLF/rippled/pull/4924))
- **Before (API v2)**: When sending an invalid account or asset to `amm_info` while other parameters are not set as expected, the method returns a generic `rpcINVALID_PARAMS` error.
- **After (API v3)**: The same scenario returns a more specific error: `rpcISSUE_MALFORMED` for malformed assets or `rpcACT_MALFORMED` for malformed accounts.
### Modifications to `ledger_entry`
Added support for string shortcuts to look up fixed-location ledger entries using the `"index"` parameter. ([#5644](https://github.com/XRPLF/rippled/pull/5644))
In API version 3, the following string values can be used with the `"index"` parameter:
- `"index": "amendments"` - Returns the `Amendments` ledger entry
- `"index": "fee"` - Returns the `FeeSettings` ledger entry
- `"index": "nunl"` - Returns the `NegativeUNL` ledger entry
- `"index": "hashes"` - Returns the "short" `LedgerHashes` ledger entry (recent ledger hashes)
These shortcuts are only available in API version 3 and later. In API versions 1 and 2, these string values would result in an error.

View File

@@ -1,5 +1,5 @@
| :warning: **WARNING** :warning: | | :warning: **WARNING** :warning:
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | |---|
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). | | These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
> These instructions also assume a basic familiarity with Conan and CMake. > These instructions also assume a basic familiarity with Conan and CMake.
@@ -148,8 +148,7 @@ function extract_version {
} }
# Define which recipes to export. # Define which recipes to export.
recipes=('ed25519' 'grpc' 'nudb' 'openssl' 'secp256k1' 'snappy' 'soci') recipes=(ed25519 grpc secp256k1 snappy soci)
folders=('all' 'all' 'all' '3.x.x' 'all' 'all' 'all')
# Selectively check out the recipes from our CCI fork. # Selectively check out the recipes from our CCI fork.
cd external cd external
@@ -158,24 +157,20 @@ cd conan-center-index
git init git init
git remote add origin git@github.com:XRPLF/conan-center-index.git git remote add origin git@github.com:XRPLF/conan-center-index.git
git sparse-checkout init git sparse-checkout init
for ((index = 1; index <= ${#recipes[@]}; index++)); do for recipe in ${recipes[@]}; do
recipe=${recipes[index]} echo "Checking out ${recipe}..."
folder=${folders[index]} git sparse-checkout add recipes/${recipe}/all
echo "Checking out recipe '${recipe}' from folder '${folder}'..."
git sparse-checkout add recipes/${recipe}/${folder}
done done
git fetch origin master git fetch origin master
git checkout master git checkout master
cd ../.. cd ../..
# Export the recipes into the local cache. # Export the recipes into the local cache.
for ((index = 1; index <= ${#recipes[@]}; index++)); do for recipe in ${recipes[@]}; do
recipe=${recipes[index]}
folder=${folders[index]}
version=$(extract_version ${recipe}) version=$(extract_version ${recipe})
echo "Exporting '${recipe}/${version}' from '${recipe}/${folder}'..." echo "Exporting ${recipe}/${version}..."
conan export --version $(extract_version ${recipe}) \ conan export --version $(extract_version ${recipe}) \
external/conan-center-index/recipes/${recipe}/${folder} external/conan-center-index/recipes/${recipe}/all
done done
``` ```
@@ -368,36 +363,6 @@ The workaround for this error is to add two lines to your profile:
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS'] tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
``` ```
### Set Up Ccache
To speed up repeated compilations, we recommend that you install
[ccache](https://ccache.dev), a tool that wraps your compiler so that it can
cache build objects locally.
#### Linux
You can install it using the package manager, e.g. `sudo apt install ccache`
(Ubuntu) or `sudo dnf install ccache` (RHEL).
#### macOS
You can install it using Homebrew, i.e. `brew install ccache`.
#### Windows
You can install it using Chocolatey, i.e. `choco install ccache`. If you already
have Ccache installed, then `choco upgrade ccache` will update it to the latest
version. However, if you see an error such as:
```
terminate called after throwing an instance of 'std::bad_alloc'
what(): std::bad_alloc
C:\Program Files\Microsoft Visual Studio\2022\Community\MSBuild\Microsoft\VC\v170\Microsoft.CppCommon.targets(617,5): error MSB6006: "cl.exe" exited with code 3.
```
then please install a specific version of Ccache that we know works, via: `choco
install ccache --version 4.11.3 --allow-downgrade`.
### Build and Test ### Build and Test
1. Create a build directory and move into it. 1. Create a build directory and move into it.
@@ -553,31 +518,23 @@ stored inside the build directory, as either of:
- file named `coverage.`_extension_, with a suitable extension for the report format, or - file named `coverage.`_extension_, with a suitable extension for the report format, or
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats. - directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
## Sanitizers
To build dependencies and xrpld with sanitizer instrumentation, set the
`SANITIZERS` environment variable (only once before running conan and cmake) and use the `sanitizers` profile in conan:
```bash
export SANITIZERS=address,undefinedbehavior
conan install .. --output-folder . --profile:all sanitizers --build missing --settings build_type=Debug
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Debug -Dxrpld=ON -Dtests=ON ..
```
See [Sanitizers docs](./docs/build/sanitizers.md) for more details.
## Options ## Options
| Option | Default Value | Description | | Option | Default Value | Description |
| ---------- | ------------- | -------------------------------------------------------------- | | ---------- | ------------- | ------------------------------------------------------------------ |
| `assert` | OFF | Enable assertions. | | `assert` | OFF | Enable assertions. |
| `coverage` | OFF | Prepare the coverage report. | | `coverage` | OFF | Prepare the coverage report. |
| `tests` | OFF | Build tests. | | `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
| `xrpld` | OFF | Build the xrpld application, and not just the libxrpl library. | | `tests` | OFF | Build tests. |
| `werr` | OFF | Treat compilation warnings as errors | | `unity` | OFF | Configure a unity build. |
| `wextra` | OFF | Enable additional compilation warnings | | `xrpld` | OFF | Build the xrpld application, and not just the libxrpl library. |
| `werr` | OFF | Treat compilation warnings as errors |
| `wextra` | OFF | Enable additional compilation warnings |
[Unity builds][5] may be faster for the first build
(at the cost of much more memory) since they concatenate sources into fewer
translation units. Non-unity builds may be faster for incremental builds,
and can be helpful for detecting `#include` omissions.
## Troubleshooting ## Troubleshooting
@@ -645,6 +602,7 @@ If you want to experiment with a new package, follow these steps:
[1]: https://github.com/conan-io/conan-center-index/issues/13168 [1]: https://github.com/conan-io/conan-center-index/issues/13168
[2]: https://en.cppreference.com/w/cpp/compiler_support/20 [2]: https://en.cppreference.com/w/cpp/compiler_support/20
[3]: https://docs.conan.io/en/latest/getting_started.html [3]: https://docs.conan.io/en/latest/getting_started.html
[5]: https://en.wikipedia.org/wiki/Unity_build
[6]: https://github.com/boostorg/beast/issues/2648 [6]: https://github.com/boostorg/beast/issues/2648
[7]: https://github.com/boostorg/beast/issues/2661 [7]: https://github.com/boostorg/beast/issues/2661
[gcovr]: https://gcovr.com/en/stable/getting-started.html [gcovr]: https://gcovr.com/en/stable/getting-started.html

View File

@@ -1,16 +1,14 @@
cmake_minimum_required(VERSION 3.16) cmake_minimum_required(VERSION 3.16)
if (POLICY CMP0074) if(POLICY CMP0074)
cmake_policy(SET CMP0074 NEW) cmake_policy(SET CMP0074 NEW)
endif () endif()
if (POLICY CMP0077) if(POLICY CMP0077)
cmake_policy(SET CMP0077 NEW) cmake_policy(SET CMP0077 NEW)
endif () endif()
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows. # Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
if (DEFINED CMAKE_MODULE_PATH) file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
endif ()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
project(xrpl) project(xrpl)
@@ -18,130 +16,138 @@ set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20) set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_STANDARD_REQUIRED ON)
include(CompilationEnv) if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
if (is_gcc)
# GCC-specific fixes # GCC-specific fixes
add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage) add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage)
# -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3 # -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3
elseif (is_clang) elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# Clang-specific fixes # Clang-specific fixes
add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options
elseif (is_msvc) elseif(MSVC)
# MSVC-specific fixes # MSVC-specific fixes
add_compile_options(/wd4068) # Ignore unknown pragmas add_compile_options(/wd4068) # Ignore unknown pragmas
endif () endif()
# Enable ccache to speed up builds. # Enable ccache to speed up builds.
include(Ccache) include(Ccache)
# make GIT_COMMIT_HASH define available to all sources # make GIT_COMMIT_HASH define available to all sources
find_package(Git) find_package(Git)
if (Git_FOUND) if(Git_FOUND)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch) OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
if (gch) if(gch)
set(GIT_COMMIT_HASH "${gch}") set(GIT_COMMIT_HASH "${gch}")
message(STATUS gch: ${GIT_COMMIT_HASH}) message(STATUS gch: ${GIT_COMMIT_HASH})
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}") add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
endif () endif()
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb) OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
if (gb) if(gb)
set(GIT_BRANCH "${gb}") set(GIT_BRANCH "${gb}")
message(STATUS gb: ${GIT_BRANCH}) message(STATUS gb: ${GIT_BRANCH})
add_definitions(-DGIT_BRANCH="${GIT_BRANCH}") add_definitions(-DGIT_BRANCH="${GIT_BRANCH}")
endif () endif()
endif () # git endif() #git
if (thread_safety_analysis) if(thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS)
-DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS) add_compile_options("-stdlib=libc++")
add_compile_options("-stdlib=libc++") add_link_options("-stdlib=libc++")
add_link_options("-stdlib=libc++") endif()
endif ()
include(CheckCXXCompilerFlag) include (CheckCXXCompilerFlag)
include(FetchContent) include (FetchContent)
include(ExternalProject) include (ExternalProject)
include(CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
if (target) if (target)
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build") message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
endif () endif ()
include(XrplSanity) include(XrplSanity)
include(XrplVersion) include(XrplVersion)
include(XrplSettings) include(XrplSettings)
# this check has to remain in the top-level cmake because of the early return statement # this check has to remain in the top-level cmake
# because of the early return statement
if (packages_only) if (packages_only)
if (NOT TARGET rpm) if (NOT TARGET rpm)
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?") message (FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
endif () endif()
return() return ()
endif () endif ()
include(XrplCompiler) include(XrplCompiler)
include(XrplSanitizers)
include(XrplInterface) include(XrplInterface)
option(only_docs "Include only the docs target?" FALSE) option(only_docs "Include only the docs target?" FALSE)
include(XrplDocs) include(XrplDocs)
if (only_docs) if(only_docs)
return() return()
endif () endif()
###
include(deps/Boost) include(deps/Boost)
find_package(OpenSSL 1.1.1 REQUIRED)
set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
add_subdirectory(external/antithesis-sdk) add_subdirectory(external/antithesis-sdk)
find_package(date REQUIRED)
find_package(ed25519 REQUIRED)
find_package(gRPC REQUIRED) find_package(gRPC REQUIRED)
find_package(LibArchive REQUIRED)
find_package(lz4 REQUIRED) find_package(lz4 REQUIRED)
find_package(nudb REQUIRED) # Target names with :: are not allowed in a generator expression.
find_package(OpenSSL REQUIRED) # We need to pull the include directories and imported location properties
find_package(secp256k1 REQUIRED) # from separate targets.
find_package(LibArchive REQUIRED)
find_package(SOCI REQUIRED) find_package(SOCI REQUIRED)
find_package(SQLite3 REQUIRED) find_package(SQLite3 REQUIRED)
find_package(xxHash REQUIRED)
target_link_libraries(
xrpl_libs
INTERFACE ed25519::ed25519
lz4::lz4
OpenSSL::Crypto
OpenSSL::SSL
secp256k1::secp256k1
soci::soci
SQLite::SQLite3)
option(rocksdb "Enable RocksDB" ON) option(rocksdb "Enable RocksDB" ON)
if (rocksdb) if(rocksdb)
find_package(RocksDB REQUIRED) find_package(RocksDB REQUIRED)
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1) set_target_properties(RocksDB::rocksdb PROPERTIES
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb) INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
endif () )
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
endif()
find_package(date REQUIRED)
find_package(ed25519 REQUIRED)
find_package(nudb REQUIRED)
find_package(secp256k1 REQUIRED)
find_package(xxHash REQUIRED)
target_link_libraries(xrpl_libs INTERFACE
ed25519::ed25519
lz4::lz4
OpenSSL::Crypto
OpenSSL::SSL
secp256k1::secp256k1
soci::soci
SQLite::SQLite3
)
# Work around changes to Conan recipe for now. # Work around changes to Conan recipe for now.
if (TARGET nudb::core) if(TARGET nudb::core)
set(nudb nudb::core) set(nudb nudb::core)
elseif (TARGET NuDB::nudb) elseif(TARGET NuDB::nudb)
set(nudb NuDB::nudb) set(nudb NuDB::nudb)
else () else()
message(FATAL_ERROR "unknown nudb target") message(FATAL_ERROR "unknown nudb target")
endif () endif()
target_link_libraries(xrpl_libs INTERFACE ${nudb}) target_link_libraries(xrpl_libs INTERFACE ${nudb})
if (coverage) if(coverage)
include(XrplCov) include(XrplCov)
endif () endif()
set(PROJECT_EXPORT_SET XrplExports) set(PROJECT_EXPORT_SET XrplExports)
include(XrplCore) include(XrplCore)
include(XrplInstall) include(XrplInstall)
include(XrplValidatorKeys) include(XrplValidatorKeys)
if (tests) if(tests)
include(CTest) include(CTest)
add_subdirectory(src/tests/libxrpl) add_subdirectory(src/tests/libxrpl)
endif () endif()

View File

@@ -872,8 +872,7 @@ git push --delete upstream-push master-next
11. [Create a new release on 11. [Create a new release on
Github](https://github.com/XRPLF/rippled/releases). Be sure that Github](https://github.com/XRPLF/rippled/releases). Be sure that
"Set as the latest release" is checked. "Set as the latest release" is checked.
12. Open a PR to update the [API-CHANGELOG](API-CHANGELOG.md) and `API-VERSION-[n].md` with the changes for this release (if any are missing). 12. Finally [reverse merge the release into `develop`](#follow-up-reverse-merge).
13. Finally, [reverse merge the release into `develop`](#follow-up-reverse-merge).
#### Special cases: point releases, hotfixes, etc. #### Special cases: point releases, hotfixes, etc.

View File

@@ -78,61 +78,72 @@ To report a qualifying bug, please send a detailed report to:
| Email Address | bugs@ripple.com | | Email Address | bugs@ripple.com |
| :-----------: | :-------------------------------------------------- | | :-----------: | :-------------------------------------------------- |
| Short Key ID | `0xA9F514E0` | | Short Key ID | `0xC57929BE` |
| Long Key ID | `0xD900855AA9F514E0` | | Long Key ID | `0xCD49A0AFC57929BE` |
| Fingerprint | `B72C 0654 2F2A E250 2763 A268 D900 855A A9F5 14E0` | | Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is: The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
``` ```
-----BEGIN PGP PUBLIC KEY BLOCK----- -----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBGkSZAQBEACprU199OhgdsOsygNjiQV4msuN3vDOUooehL+NwfsGfW79Tbqq mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
Q2u7uQ3NZjW+M2T4nsDwuhkr7pe7xSReR5W8ssaczvtUyxkvbMClilcgZ2OSCAuC kCpUYEDal0ygkKobu8SzOoATcDl18iCrScX39VpTm96vISFZMhmOryYCIp4QLJNN
N9tzJsqOqkwBvXoNXkn//T2jnPz0ZU2wSF+NrEibq5FeuyGdoX3yXXBxq9pW9HzK 4HKc2ZdBj6W4igNi6vj5Qo6JMyGpLY2mz4CZskbt0TNuUxWrGood+UrCzpY8x7/N
HkQll63QSl6BzVSGRQq+B6lGgaZGLwf3mzmIND9Z5VGLNK2jKynyz9z091whNG/M a93fcvNw+prgCr0rCH3hAPmAFfsOBbtGzNnmq7xf3jg5r4Z4sDiNIF1X1y53DAfV
kV+E7/r/bujHk7WIVId07G5/COTXmSr7kFnNEkd2Umw42dkgfiNKvlmJ9M7c1wLK rWDx49IKsuCEJfPMp1MnBSvDvLaQ2hKXs+cOpx1BCZgHn3skouEUxxgqbtTzBLt1
KbL9Eb4ADuW6rRc5k4s1e6GT8R4/VPliWbCl9SE32hXH8uTkqVIFZP2eyM5WRRHs xXpmuijsaltWngPnGO7mOAzbpZSdBm82/Emrk9bPMuD0QaLQjWr7HkTSUs6ZsKt4
aKzitkQG9UK9gcb0kdgUkxOvvgPHAe5IuZlcHFzU4y0dBbU1VEFWVpiLU0q+IuNw 7CLPdWqxyY/QVw9UaxeHEtWGQGMIQGgVJGh1fjtUr5O1sC9z9jXcQ0HuIHnRCTls
5BRemeHc59YNsngkmAZ+/9zouoShRusZmC8Wzotv75C2qVBcjijPvmjWAUz0Zunm GP7hklJmfH5V4SyAJQ06/hLuEhUJ7dn+BlqCsT0tLmYTgZYNzNcLHcqBFMEZHvHw
Lsr+O71vqHE73pERjD07wuD/ISjiYRYYE/bVrXtXLZijC7qAH4RE3nID+2ojcZyO 9GENMx/tDXgajKql4bJnzuTK0iGU/YepanANLd1JHECJ4jzTtmKOus9SOGlB2/l1
/2jMQvt7un56RsGH4UBHi3aBHi9bUoDGCXKiQY981cEuNaOxpou7Mh3x/ONzzSvk 0t0ADDYAS3eqOdOcUvo9ElSLCI5vSVHhShSte/n2FMWU+kMUboTUisEG8CgQnrng
sTV6nl1LOZHykN1JyKwaNbTSAiuyoN+7lOBqbV04DNYAHL88PrT21P83aQARAQAB g2CvvQvqDkeOtZeqMcC7HdiZS0q3LJUWtwA/ViwxrVlBDCxiTUXCotyBWwARAQAB
tB1SaXBwbGUgTGFicyA8YnVnc0ByaXBwbGUuY29tPokCTgQTAQgAOBYhBLcsBlQv tDBSaXBwbGUgTGFicyBCdWcgQm91bnR5IFByb2dyYW0gPGJ1Z3NAcmlwcGxlLmNv
KuJQJ2OiaNkAhVqp9RTgBQJpEmQEAhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheA bT6JAjcEEwEKACEFAlUwGHYCGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQ
AAoJENkAhVqp9RTgBzgP/i7y+aDWl1maig1XMdyb+o0UGusumFSW4Hmj278wlKVv zUmgr8V5Kb6R0g//SwY/mVJY59k87iL26/KayauSoOcz7xjcST26l4ZHVVX85gOY
usgLPihYgHE0PKrv6WRyKOMC1tQEcYYN93M+OeQ1vFhS2YyURq6RCMmh4zq/awXG HYZl8k0+m8X3zxeYm9a3QAoAml8sfoaFRFQP8ynnefRrLUPaZ2MjbJ0SACMwZNef
uZbG36OURB5NH8lGBOHiN/7O+nY0CgenBT2JWm+GW3nEOAVOVm4+r5GlpPlv+Dp1 T6o7Mi8LBAaiNZdYVyIfX1oM6YXtqYkuJdav6ZCyvVYqc9OvMJPY2ZzJYuI/ZtvQ
NPBThcKXFMnH73++NpSQoDzTfRYHPxhDAX3jkLi/moXfSanOLlR6l94XNNN0jBHW /lTndxCeg9ALNX/iezOLGdfMpf4HuIFVwcPPlwGi+HDlB9/bggDEHC8z434SXVFc
Quao0rzf4WSXq9g6AS224xhAA5JyIcFl8TX7hzj5HaFn3VWo3COoDu4U7H+BM0fl aQatXAPcDkjMUweU7y0CZtYEj00HITd4pSX6MqGiHrxlDZTqinCOPs1Ieqp7qufs
85yqiMQypp7EhN2gxpMMWaHY5TFM85U/bFXFYfEgihZ4/gt4uoIzsNI9jlX7mYvG MzlM6irLGucxj1+wa16ieyYvEtGaPIsksUKkywx0O7cf8N2qKg+eIkUk6O0Uc6eO
KFdDij+oTlRsuOxdIy60B3dKcwOH9nZZCz0SPsN/zlRWgKzK4gDKdGhFkU9OlvPu CszizmiXIXy4O6OiLlVHGKkXHMSW9Nwe9GE95O8G9WR8OZCEuDv+mHPAutO+IjdP
94ZqscanoiWKDoZkF96+sjgfjkuHsDK7Lwc1Xi+T4drHG/3aVpkYabXox+lrKB/S PDAAUvy+3XnkceO+HGWRpVvJZfFP2YH4A33InFL5yqlJmSoR/yVingGLxk55bZDM
yxZjeqOIQzWPhnLgCaLyvsKo5hxKzL0w3eURu8F3IS7RgOOlljv4M+Me9sEVcdNV +HYGR3VeMb8Xj1rf/02qERsZyccMCFdAvKDbTwmvglyHdVLu5sPmktxbBYiemfyJ
aN3/tQwbaomSX1X5D5YXqhBwC3rU3wXwamsscRTGEpkV+JCX6KUqGP7nWmxCpAly qxMxmYXCc9S0hWrWZW7edktBa9NpE58z1mx+hRIrDNbS2sDHrib9PULYCySyVYcF
FL05XuOd5SVHJjXLeuje0JqLUpN514uL+bThWwDbDTdAdwW3oK/2WbXz7IfJRLBj P+PWEe1CAS5jqkR2ker5td2/pHNnJIycynBEs7l6zbc9fu+nktFJz0q2B+GJAhwE
uQINBGkSZAQBEADdI3SL2F72qkrgFqXWE6HSRBu9bsAvTE5QrRPWk7ux6at537r4 EAEKAAYFAlUwGaQACgkQ+tiY1qQ2QkjMFw//f2hNY3BPNe+1qbhzumMDCnbTnGif
S4sIw2dOwLvbyIrDgKNq3LQ5wCK88NO/NeCOFm4AiCJSl3pJHXYnTDoUxTrrxx+o kLuAGl9OKt81VHG1f6RnaGiLpR696+6Ja45KzH15cQ5JJl5Bgs1YkR/noTGX8IAD
vSRI4I3fHEql/MqzgiAb0YUezjgFdh3vYheMPp/309PFbOLhiFqEcx80Mx5h06UH c70eNwiFu8JXTaaeeJrsmFkF9Tueufb364risYkvPP8tNUD3InBFEZT3WN7JKwix
gDzu1qNj3Ec+31NLic5zwkrAkvFvD54d6bqYR3SEgMau6aYEewpGHbWBi2pLqSi2 coD4/BwekUwOZVDd/uCFEyhlhZsROxdKNisNo3VtAq2s+3tIBAmTrriFUl0K+ZC5
lQcAeOFixqGpTwDmAnYR8YtjBYepy0MojEAdTHcQQlOYSDk4q4elG+io2N8vECfU zgavcpnPN57zMtW9aK+VO3wXqAKYLYmtgxkVzSLUZt2M7JuwOaAdyuYWAneKZPCu
rD6ORecN48GXdZINYWTAdslrUeanmBdgQrYkSpce8TSghgT9P01SNaXxmyaehVUO 1AXkmyo+d84sd5mZaKOr5xArAFiNMWPUcZL4rkS1Fq4dKtGAqzzR7a7hWtA5o27T
lqI4pcg5G2oojAE8ncNS3TwDtt7daTaTC3bAdr4PXDVAzNAiewjMNZPB7xidkDGQ 6vynuxZ1n0PPh0er2O/zF4znIjm5RhTlfjp/VmhZdQfpulFEQ/dMxxGkQ9z5IYbX
Y4W1LxTMXyJVWxehYOH7tsbBRKninlfRnLgYzmtIbNRAAvNcsxU6ihv3AV0WFknN mTlSDbCSb+FMsanRBJ7Drp5EmBIudVGY6SHI5Re1RQiEh7GoDfUMUwZO+TVDII5R
YbSzotEv1Xq/5wk309x8zCDe+sP0cQicvbXafXmUzPAZzeqFg+VLFn7F9MP1WGlW Ra7WyuimYleJgDo/+7HyfuIyGDaUCVj6pwVtYtYIdOI3tTw1R1Mr0V8yaNVnJghL
B1u7VIvBF1Mp9Nd3EAGBAoLRdRu+0dVWIjPTQuPIuD9cCatJA0wVaKUrjYbBMl88 CHcEJQL+YHSmiMM3ySil3O6tm1By6lFz8bVe/rgG/5uklQrnjMR37jYboi1orCC4
a12LixNVGeSFS9N7ADHx0/o7GNT6l88YbaLP6zggUHpUD/bR+cDN7vllIQARAQAB yeIoQeV0ItlxeTyBwYIV/o1DBNxDevTZvJabC93WiGLw2XFjpZ0q/9+zI2rJUZJh
iQI2BBgBCAAgFiEEtywGVC8q4lAnY6Jo2QCFWqn1FOAFAmkSZAQCGwwACgkQ2QCF qxmKP+D4e27lCI65Ag0EVTAYdgEQAMvttYNqeRNBRpSX8fk45WVIV8Fb21fWdwk6
Wqn1FOAfAA/8CYq4p0p4bobY20CKEMsZrkBTFJyPDqzFwMeTjgpzqbD7Y3Qq5QCK 2SkZnJURbiC0LxQnOi7wrtii7DeFZtwM2kFHihS1VHekBnIKKZQSgGoKuFAQMGyu
OBbvY02GWdiIsNOzKdBxiuam2xYP9WHZj4y7/uWEvT0qlPVmDFu+HXjoJ43oxwFd a426H4ZsSmA9Ufd7kRbvdtEcp7/RTAanhrSL4lkBhaKJrXlxBJ27o3nd7/rh7r3a
CUp2gMuQ4cSL3X94VRJ3BkVL+tgBm8CNY0vnTLLOO3kum/R69VsGJS1JSGUWjNM+ OszbPY6DJ5bWClX3KooPTDl/RF2lHn+fweFk58UvuunHIyo4BWJUdilSXIjLun+P
4qwS3mz+73xJu1HmERyN2RZF/DGIZI2PyONQQ6aH85G1Dd2ohu2/DBAkQAMBrPbj Qaik4ZAsZVwNhdNz05d+vtai4AwbYoO7adboMLRkYaXSQwGytkm+fM6r7OpXHYuS
FrbDaBLyFhODxU3kTWqnfLlaElSm2EGdIU2yx7n4BggEa//NZRMm5kyeo4vzhtlQ cR4zB/OK5hxCVEpWfiwN71N2NMvnEMaWd/9uhqxJzyvYgkVUXV9274TUe16pzXnW
YIVUMLAOLZvnEqDnsLKp+22FzNR/O+htBQC4lPywl53oYSALdhz1IQlcAC1ru5KR ZLfmitjwc91e7mJBBfKNenDdhaLEIlDRwKTLj7k58f9srpMnyZFacntu5pUMNblB
XPzhIXV6IIzkcx9xNkEclZxmsuy5ERXyKEmLbIHAlzFmnrldlt2ZgXDtzaorLmxj cjXwWxz5ZaQikLnKYhIvrIEwtWPyjqOzNXNvYfZamve/LJ8HmWGCKao3QHoAIDvB
klKibxd5tF50qOpOivz+oPtFo7n+HmFa1nlVAMxlDCUdM0pEVeYDKI5zfVwalyhZ 9XBxrDyTJDpxbog6Qu4SY8AdgVlan6c/PsLDc7EUegeYiNTzsOK+eq3G5/E92eIu
NnjpakdZSXMwgc7NP/hH9buF35hKDp7EckT2y3JNYwHsDdy1icXN2q40XZw5tSIn TsUXlciypFcRm1q8vLRr+HYYe2mJDo4GetB1zLkAFBcYJm/x9iJQbu0hn5NxJvZO
zkPWdu3OUY8PISohN6Pw4h0RH4ZmoX97E8sEfmdKaT58U4Hf2aAv5r9IWCSrAVqY R0Y5nOJQdyi+muJzKYwhkuzaOlswzqVXkq/7+QCjg7QsycdcwDjiQh3OrsgXHrwl
u5jvac29CzQR9Kal0A+8phHAXHNFD83SwzIC0syaT9ficAguwGH8X6Q= M7gyafL9ABEBAAGJAh8EGAEKAAkFAlUwGHYCGwwACgkQzUmgr8V5Kb50BxAAhj9T
=nGuD TwmNrgRldTHszj+Qc+v8RWqV6j+R+zc0cn5XlUa6XFaXI1OFFg71H4dhCPEiYeN0
IrnocyMNvCol+eKIlPKbPTmoixjQ4udPTR1DC1Bx1MyW5FqOrsgBl5t0e1VwEViM
NspSStxu5Hsr6oWz2GD48lXZWJOgoL1RLs+uxjcyjySD/em2fOKASwchYmI+ezRv
plfhAFIMKTSCN2pgVTEOaaz13M0U+MoprThqF1LWzkGkkC7n/1V1f5tn83BWiagG
2N2Q4tHLfyouzMUKnX28kQ9sXfxwmYb2sA9FNIgxy+TdKU2ofLxivoWT8zS189z/
Yj9fErmiMjns2FzEDX+bipAw55X4D/RsaFgC+2x2PDbxeQh6JalRA2Wjq32Ouubx
u+I4QhEDJIcVwt9x6LPDuos1F+M5QW0AiUhKrZJ17UrxOtaquh/nPUL9T3l2qPUn
1ChrZEEEhHO6vA8+jn0+cV9n5xEz30Str9iHnDQ5QyR5LyV4UBPgTdWyQzNVKA69
KsSr9lbHEtQFRzGuBKwt6UlSFv9vPWWJkJit5XDKAlcKuGXj0J8OlltToocGElkF
+gEBZfoOWi/IBjRLrFW2cT3p36DTR5O1Ud/1DLnWRqgWNBLrbs2/KMKE6EnHttyD
7Tz8SQkuxltX/yBXMV3Ddy0t6nWV2SZEfuxJAQI=
=spg4
-----END PGP PUBLIC KEY BLOCK----- -----END PGP PUBLIC KEY BLOCK-----
``` ```

View File

@@ -1,29 +1,30 @@
macro (exclude_from_default target_) macro (exclude_from_default target_)
set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_ALL ON) set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
set_target_properties(${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON) set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
endmacro () endmacro ()
macro (exclude_if_included target_) macro (exclude_if_included target_)
get_directory_property(has_parent PARENT_DIRECTORY) get_directory_property(has_parent PARENT_DIRECTORY)
if (has_parent) if (has_parent)
exclude_from_default(${target_}) exclude_from_default (${target_})
endif () endif ()
endmacro () endmacro ()
find_package(Git) find_package(Git)
function (git_branch branch_val) function (git_branch branch_val)
if (NOT GIT_FOUND) if (NOT GIT_FOUND)
return() return ()
endif () endif ()
set(_branch "") set (_branch "")
execute_process(COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD" execute_process (COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE _git_exit_code RESULT_VARIABLE _git_exit_code
OUTPUT_VARIABLE _temp_branch OUTPUT_VARIABLE _temp_branch
OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET) OUTPUT_STRIP_TRAILING_WHITESPACE
if (_git_exit_code EQUAL 0) ERROR_QUIET)
set(_branch ${_temp_branch}) if (_git_exit_code EQUAL 0)
endif () set (_branch ${_temp_branch})
set(${branch_val} "${_branch}" PARENT_SCOPE) endif ()
set (${branch_val} "${_branch}" PARENT_SCOPE)
endfunction () endfunction ()

View File

@@ -15,17 +15,18 @@ endif ()
# https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake. # https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake.
if ("${CCACHE_PATH}" MATCHES "chocolatey") if ("${CCACHE_PATH}" MATCHES "chocolatey")
message(DEBUG "Ccache path: ${CCACHE_PATH}") message(DEBUG "Ccache path: ${CCACHE_PATH}")
# Chocolatey uses a shim executable that we cannot use directly, in which case we have to find the executable it # Chocolatey uses a shim executable that we cannot use directly, in which
# points to. If we cannot find the target executable then we cannot use ccache. # case we have to find the executable it points to. If we cannot find the
# target executable then we cannot use ccache.
find_program(BASH_PATH "bash") find_program(BASH_PATH "bash")
if (NOT BASH_PATH) if (NOT BASH_PATH)
message(WARNING "Could not find bash.") message(WARNING "Could not find bash.")
return() return()
endif () endif ()
execute_process(COMMAND bash -c execute_process(
"export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1" COMMAND bash -c "export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1"
OUTPUT_VARIABLE CCACHE_PATH) OUTPUT_VARIABLE CCACHE_PATH)
if (NOT CCACHE_PATH) if (NOT CCACHE_PATH)
message(WARNING "Could not find ccache target.") message(WARNING "Could not find ccache target.")
@@ -36,14 +37,15 @@ endif ()
message(STATUS "Found ccache: ${CCACHE_PATH}") message(STATUS "Found ccache: ${CCACHE_PATH}")
# Tell cmake to use ccache for compiling with Visual Studio. # Tell cmake to use ccache for compiling with Visual Studio.
file(COPY_FILE ${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe ONLY_IF_DIFFERENT) file(COPY_FILE
set(CMAKE_VS_GLOBALS "CLToolExe=cl.exe" "CLToolPath=${CMAKE_BINARY_DIR}" "TrackFileAccess=false" ${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe
"UseMultiToolTask=true") ONLY_IF_DIFFERENT)
set(CMAKE_VS_GLOBALS
"CLToolExe=cl.exe"
"CLToolPath=${CMAKE_BINARY_DIR}"
"TrackFileAccess=false"
"UseMultiToolTask=true")
# By default Visual Studio generators will use /Zi to capture debug information, which is not compatible with ccache, so # By default Visual Studio generators will use /Zi, which is not compatible with
# tell it to use /Z7 instead. # ccache, so tell it to use /Z7 instead.
if (MSVC) set(CMAKE_MSVC_DEBUG_INFORMATION_FORMAT "$<$<CONFIG:Debug,RelWithDebInfo>:Embedded>")
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
string(REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
endforeach ()
endif ()

View File

@@ -172,47 +172,51 @@ include(CMakeParseArguments)
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE) option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
# Check prereqs # Check prereqs
find_program(GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test) find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
if (DEFINED CODE_COVERAGE_GCOV_TOOL) if(DEFINED CODE_COVERAGE_GCOV_TOOL)
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}") set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
elseif (DEFINED ENV{CODE_COVERAGE_GCOV_TOOL}) elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}") set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if (APPLE) if(APPLE)
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE) execute_process( COMMAND xcrun -f llvm-cov
else () OUTPUT_VARIABLE LLVMCOV_PATH
find_program(LLVMCOV_PATH llvm-cov) OUTPUT_STRIP_TRAILING_WHITESPACE
endif () )
if (LLVMCOV_PATH) else()
set(GCOV_TOOL "${LLVMCOV_PATH} gcov") find_program( LLVMCOV_PATH llvm-cov )
endif () endif()
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU") if(LLVMCOV_PATH)
find_program(GCOV_PATH gcov) set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
set(GCOV_TOOL "${GCOV_PATH}") endif()
endif () elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
find_program( GCOV_PATH gcov )
set(GCOV_TOOL "${GCOV_PATH}")
endif()
# Check supported compiler (Clang, GNU and Flang) # Check supported compiler (Clang, GNU and Flang)
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
foreach (LANG ${LANGUAGES}) foreach(LANG ${LANGUAGES})
if ("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if ("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3) if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...") message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
endif () endif()
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
"(LLVM)?[Ff]lang") AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...") message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif () endif()
endforeach () endforeach()
set(COVERAGE_COMPILER_FLAGS "-g --coverage" CACHE INTERNAL "") set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
set(COVERAGE_CXX_COMPILER_FLAGS "") set(COVERAGE_CXX_COMPILER_FLAGS "")
set(COVERAGE_C_COMPILER_FLAGS "") set(COVERAGE_C_COMPILER_FLAGS "")
set(COVERAGE_CXX_LINKER_FLAGS "") set(COVERAGE_CXX_LINKER_FLAGS "")
set(COVERAGE_C_LINKER_FLAGS "") set(COVERAGE_C_LINKER_FLAGS "")
if (CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)") if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag) include(CheckCXXCompilerFlag)
include(CheckCCompilerFlag) include(CheckCCompilerFlag)
include(CheckLinkerFlag) include(CheckLinkerFlag)
@@ -223,51 +227,51 @@ if (CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS}) set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path) check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if (HAVE_cxx_fprofile_abs_path) if(HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path") set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
endif () endif()
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path) check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if (HAVE_c_fprofile_abs_path) if(HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path") set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
endif () endif()
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path) check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
if (HAVE_cxx_linker_fprofile_abs_path) if(HAVE_cxx_linker_fprofile_abs_path)
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path") set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
endif () endif()
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path) check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
if (HAVE_c_linker_fprofile_abs_path) if(HAVE_c_linker_fprofile_abs_path)
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path") set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
endif () endif()
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update) check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
if (HAVE_cxx_fprofile_update) if(HAVE_cxx_fprofile_update)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic") set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
endif () endif()
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update) check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
if (HAVE_c_fprofile_update) if(HAVE_c_fprofile_update)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic") set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
endif () endif()
check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update) check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
if (HAVE_cxx_linker_fprofile_update) if(HAVE_cxx_linker_fprofile_update)
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic") set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
endif () endif()
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update) check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
if (HAVE_c_linker_fprofile_update) if(HAVE_c_linker_fprofile_update)
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic") set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
endif () endif()
endif () endif()
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
if (NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)) if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading") message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
endif () # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG) endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
# Defines a target for running and collection code coverage information # Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports. # Builds dependencies, runs the given executable and outputs reports.
@@ -291,186 +295,193 @@ endif () # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
# ) # )
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the # The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
# GCVOR command. # GCVOR command.
function (setup_target_for_coverage_gcovr) function(setup_target_for_coverage_gcovr)
set(options NONE) set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME FORMAT) set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if (NOT GCOV_TOOL) if(NOT GCOV_TOOL)
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...") message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
endif () endif()
if (NOT GCOVR_PATH) if(NOT GCOVR_PATH)
message(FATAL_ERROR "Could not find gcovr tool! Aborting...") message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
endif () endif()
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if (DEFINED Coverage_BASE_DIRECTORY) if(DEFINED Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else () else()
set(BASEDIR ${PROJECT_SOURCE_DIR}) set(BASEDIR ${PROJECT_SOURCE_DIR})
endif () endif()
if (NOT DEFINED Coverage_FORMAT) if(NOT DEFINED Coverage_FORMAT)
set(Coverage_FORMAT xml) set(Coverage_FORMAT xml)
endif () endif()
if (NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS) if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set") message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set")
endif () endif()
if ("--output" IN_LIST GCOVR_ADDITIONAL_ARGS) if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...") message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
else () else()
if ((Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested")) if((Coverage_FORMAT STREQUAL "html-details")
OR (Coverage_FORMAT STREQUAL "html-nested"))
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html) set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME}) set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
elseif (Coverage_FORMAT STREQUAL "html-single") elseif(Coverage_FORMAT STREQUAL "html-single")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL "json-details") elseif((Coverage_FORMAT STREQUAL "json-summary")
OR (Coverage_FORMAT STREQUAL "coveralls")) OR (Coverage_FORMAT STREQUAL "json-details")
OR (Coverage_FORMAT STREQUAL "coveralls"))
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
elseif (Coverage_FORMAT STREQUAL "txt") elseif(Coverage_FORMAT STREQUAL "txt")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
elseif (Coverage_FORMAT STREQUAL "csv") elseif(Coverage_FORMAT STREQUAL "csv")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
elseif (Coverage_FORMAT STREQUAL "lcov") elseif(Coverage_FORMAT STREQUAL "lcov")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.lcov) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.lcov)
else () else()
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml) set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
endif () endif()
endif () endif()
if ((Coverage_FORMAT STREQUAL "cobertura") OR (Coverage_FORMAT STREQUAL "xml")) if((Coverage_FORMAT STREQUAL "cobertura")
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}") OR (Coverage_FORMAT STREQUAL "xml"))
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
set(Coverage_FORMAT cobertura) # overwrite xml set(Coverage_FORMAT cobertura) # overwrite xml
elseif (Coverage_FORMAT STREQUAL "sonarqube") elseif(Coverage_FORMAT STREQUAL "sonarqube")
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
elseif (Coverage_FORMAT STREQUAL "jacoco") elseif(Coverage_FORMAT STREQUAL "jacoco")
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty )
elseif (Coverage_FORMAT STREQUAL "clover") elseif(Coverage_FORMAT STREQUAL "clover")
list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty )
elseif (Coverage_FORMAT STREQUAL "lcov") elseif(Coverage_FORMAT STREQUAL "lcov")
list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}" )
elseif (Coverage_FORMAT STREQUAL "json-summary") elseif(Coverage_FORMAT STREQUAL "json-summary")
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
elseif (Coverage_FORMAT STREQUAL "json-details") elseif(Coverage_FORMAT STREQUAL "json-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
elseif (Coverage_FORMAT STREQUAL "coveralls") elseif(Coverage_FORMAT STREQUAL "coveralls")
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty) list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
elseif (Coverage_FORMAT STREQUAL "csv") elseif(Coverage_FORMAT STREQUAL "csv")
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
elseif (Coverage_FORMAT STREQUAL "txt") elseif(Coverage_FORMAT STREQUAL "txt")
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
elseif (Coverage_FORMAT STREQUAL "html-single") elseif(Coverage_FORMAT STREQUAL "html-single")
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained) list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
elseif (Coverage_FORMAT STREQUAL "html-nested") elseif(Coverage_FORMAT STREQUAL "html-nested")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
elseif (Coverage_FORMAT STREQUAL "html-details") elseif(Coverage_FORMAT STREQUAL "html-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}") list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
else () else()
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...") message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
endif () endif()
# Collect excludes (CMake 3.4+: Also compute absolute paths) # Collect excludes (CMake 3.4+: Also compute absolute paths)
set(GCOVR_EXCLUDES "") set(GCOVR_EXCLUDES "")
foreach (EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
if (CMAKE_VERSION VERSION_GREATER 3.4) if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
endif () endif()
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
endforeach () endforeach()
list(REMOVE_DUPLICATES GCOVR_EXCLUDES) list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
# Combine excludes to several -e arguments # Combine excludes to several -e arguments
set(GCOVR_EXCLUDE_ARGS "") set(GCOVR_EXCLUDE_ARGS "")
foreach (EXCLUDE ${GCOVR_EXCLUDES}) foreach(EXCLUDE ${GCOVR_EXCLUDES})
list(APPEND GCOVR_EXCLUDE_ARGS "-e") list(APPEND GCOVR_EXCLUDE_ARGS "-e")
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}") list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
endforeach () endforeach()
# Set up commands which will be run to generate coverage data # Set up commands which will be run to generate coverage data
# If EXECUTABLE is not set, the user is expected to run the tests manually # If EXECUTABLE is not set, the user is expected to run the tests manually
# before running the coverage target NAME # before running the coverage target NAME
if (DEFINED Coverage_EXECUTABLE) if(DEFINED Coverage_EXECUTABLE)
set(GCOVR_EXEC_TESTS_CMD ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}) set(GCOVR_EXEC_TESTS_CMD
endif () ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
endif()
# Create folder # Create folder
if (DEFINED GCOVR_CREATE_FOLDER) if(DEFINED GCOVR_CREATE_FOLDER)
set(GCOVR_FOLDER_CMD ${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER}) set(GCOVR_FOLDER_CMD
endif () ${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
endif()
# Running gcovr # Running gcovr
set(GCOVR_CMD set(GCOVR_CMD
${GCOVR_PATH} ${GCOVR_PATH}
--gcov-executable --gcov-executable ${GCOV_TOOL}
${GCOV_TOOL}
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file --gcov-ignore-parse-errors=negative_hits.warn_once_per_file
-r -r ${BASEDIR}
${BASEDIR}
${GCOVR_ADDITIONAL_ARGS} ${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS} ${GCOVR_EXCLUDE_ARGS}
--object-directory=${PROJECT_BINARY_DIR}) --object-directory=${PROJECT_BINARY_DIR}
)
if (CODE_COVERAGE_VERBOSE) if(CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report") message(STATUS "Executed command report")
if (NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "") if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
message(STATUS "Command to run tests: ") message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}") string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}") message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
endif () endif()
if (NOT "${GCOVR_FOLDER_CMD}" STREQUAL "") if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
message(STATUS "Command to create a folder: ") message(STATUS "Command to create a folder: ")
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}") string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}") message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
endif () endif()
message(STATUS "Command to generate gcovr coverage data: ") message(STATUS "Command to generate gcovr coverage data: ")
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}") string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
message(STATUS "${GCOVR_CMD_SPACED}") message(STATUS "${GCOVR_CMD_SPACED}")
endif () endif()
add_custom_target(${Coverage_NAME} add_custom_target(${Coverage_NAME}
COMMAND ${GCOVR_EXEC_TESTS_CMD} COMMAND ${GCOVR_EXEC_TESTS_CMD}
COMMAND ${GCOVR_FOLDER_CMD} COMMAND ${GCOVR_FOLDER_CMD}
COMMAND ${GCOVR_CMD} COMMAND ${GCOVR_CMD}
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR} BYPRODUCTS ${GCOVR_OUTPUT_FILE}
DEPENDS ${Coverage_DEPENDENCIES} WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
VERBATIM # Protect arguments to commands DEPENDS ${Coverage_DEPENDENCIES}
COMMENT "Running gcovr to produce code coverage report.") VERBATIM # Protect arguments to commands
COMMENT "Running gcovr to produce code coverage report."
)
# Show info where to find the report # Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD COMMAND echo add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}") COMMAND echo
endfunction () # setup_target_for_coverage_gcovr COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
)
endfunction() # setup_target_for_coverage_gcovr
function (add_code_coverage_to_target name scope) function(add_code_coverage_to_target name scope)
separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}") separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}") separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}") separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}") separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
# Add compiler options to the target # Add compiler options to the target
target_compile_options(${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}> target_compile_options(${name} ${scope}
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>) $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
target_link_libraries( target_link_libraries (${name} ${scope}
${name} $<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
${scope} $<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} )
gcov> endfunction() # add_code_coverage_to_target
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}
gcov>)
endfunction () # add_code_coverage_to_target

View File

@@ -1,58 +0,0 @@
# Shared detection of compiler, operating system, and architecture.
#
# This module centralizes environment detection so that other CMake modules can use the same variables instead of
# repeating checks on CMAKE_* and built-in platform variables.
# Only run once per configure step.
include_guard(GLOBAL)
# --------------------------------------------------------------------
# Compiler detection (C++)
# --------------------------------------------------------------------
set(is_clang FALSE)
set(is_gcc FALSE)
set(is_msvc FALSE)
set(is_xcode FALSE)
if (CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang
set(is_clang TRUE)
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set(is_gcc TRUE)
elseif (MSVC)
set(is_msvc TRUE)
else ()
message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}")
endif ()
# Xcode generator detection
if (CMAKE_GENERATOR STREQUAL "Xcode")
set(is_xcode TRUE)
endif ()
# --------------------------------------------------------------------
# Operating system detection
# --------------------------------------------------------------------
set(is_linux FALSE)
set(is_windows FALSE)
set(is_macos FALSE)
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
set(is_linux TRUE)
elseif (CMAKE_SYSTEM_NAME STREQUAL "Windows")
set(is_windows TRUE)
elseif (CMAKE_SYSTEM_NAME STREQUAL "Darwin")
set(is_macos TRUE)
endif ()
# --------------------------------------------------------------------
# Architecture
# --------------------------------------------------------------------
set(is_amd64 FALSE)
set(is_arm64 FALSE)
if (CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64")
set(is_amd64 TRUE)
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64")
set(is_arm64 TRUE)
else ()
message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif ()

View File

@@ -1,13 +1,25 @@
include(isolate_headers) include(isolate_headers)
function (xrpl_add_test name) function(xrpl_add_test name)
set(target ${PROJECT_NAME}.test.${name}) set(target ${PROJECT_NAME}.test.${name})
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp" file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp") "${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
add_executable(${target} ${ARGN} ${sources}) "${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
)
add_executable(${target} ${ARGN} ${sources})
isolate_headers(${target} "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests/${name}" PRIVATE) isolate_headers(
${target}
"${CMAKE_SOURCE_DIR}"
"${CMAKE_SOURCE_DIR}/tests/${name}"
PRIVATE
)
add_test(NAME ${target} COMMAND ${target}) # Make sure the test isn't optimized away in unity builds
endfunction () set_target_properties(${target} PROPERTIES
UNITY_BUILD_MODE GROUP
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
endfunction()

View File

@@ -2,164 +2,154 @@
setup project-wide compiler settings setup project-wide compiler settings
#]===================================================================] #]===================================================================]
include(CompilationEnv)
#[=========================================================[ #[=========================================================[
TODO some/most of these common settings belong in a TODO some/most of these common settings belong in a
toolchain file, especially the ABI-impacting ones toolchain file, especially the ABI-impacting ones
#]=========================================================] #]=========================================================]
add_library(common INTERFACE) add_library (common INTERFACE)
add_library(Xrpl::common ALIAS common) add_library (Xrpl::common ALIAS common)
include(XrplSanitizers)
# add a single global dependency on this interface lib # add a single global dependency on this interface lib
link_libraries(Xrpl::common) link_libraries (Xrpl::common)
# Respect CMAKE_POSITION_INDEPENDENT_CODE setting (may be set by Conan toolchain) set_target_properties (common
if (NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE) PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ON)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
endif ()
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
set(CMAKE_CXX_EXTENSIONS OFF) set(CMAKE_CXX_EXTENSIONS OFF)
target_compile_definitions( target_compile_definitions (common
common INTERFACE
INTERFACE $<$<CONFIG:Debug>:DEBUG $<$<CONFIG:Debug>:DEBUG _DEBUG>
_DEBUG> #[===[
#[===[
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
explicitly except for the special case of (profile ON) and (assert OFF). explicitly except for the special case of (profile ON) and (assert OFF).
Presumably this is because we don't want profile builds asserting unless Presumably this is because we don't want profile builds asserting unless
asserts were specifically requested. asserts were specifically requested.
]===] ]===]
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG> $<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x. # TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
OPENSSL_SUPPRESS_DEPRECATED) OPENSSL_SUPPRESS_DEPRECATED
)
if (MSVC) if (MSVC)
# remove existing exception flag since we set it to -EHa # remove existing exception flag since we set it to -EHa
string(REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") string (REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE) foreach (var_
CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
# also remove dynamic runtime # also remove dynamic runtime
string(REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}") string (REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
# /ZI (Edit & Continue debugging information) is incompatible with Gy- # /ZI (Edit & Continue debugging information) is incompatible with Gy-
string(REPLACE "/ZI" "/Zi" ${var_} "${${var_}}") string (REPLACE "/ZI" "/Zi" ${var_} "${${var_}}")
# omit debug info completely under CI (not needed) # omit debug info completely under CI (not needed)
if (is_ci) if (is_ci)
string(REPLACE "/Zi" " " ${var_} "${${var_}}") string (REPLACE "/Zi" " " ${var_} "${${var_}}")
string(REPLACE "/Z7" " " ${var_} "${${var_}}") endif ()
endif () endforeach ()
endforeach ()
target_compile_options( target_compile_options (common
common INTERFACE
INTERFACE # Increase object file max size -bigobj # Increase object file max size
-bigobj -fp:precise # Floating point behavior
# Floating point behavior -Gd # __cdecl calling convention
-fp:precise -Gm- # Minimal rebuild: disabled
# __cdecl calling convention -Gy- # Function level linking: disabled
-Gd -MP # Multiprocessor compilation
# Minimal rebuild: disabled -openmp- # pragma omp: disabled
-Gm- -errorReport:none # No error reporting to Internet
# Function level linking: disabled -nologo # Suppress login banner
-Gy- -wd4018 # Disable signed/unsigned comparison warnings
# Multiprocessor compilation -wd4244 # Disable float to int possible loss of data warnings
-MP -wd4267 # Disable size_t to T possible loss of data warnings
# pragma omp: disabled -wd4800 # Disable C4800(int to bool performance)
-openmp- -wd4503 # Decorated name length exceeded, name was truncated
# No error reporting to Internet $<$<COMPILE_LANGUAGE:CXX>:
-errorReport:none -EHa
# Suppress login banner -GR
-nologo >
# Disable signed/unsigned comparison warnings $<$<CONFIG:Release>:-Ox>
-wd4018 $<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:
# Disable float to int possible loss of data warnings -GS
-wd4244 -Zc:forScope
# Disable size_t to T possible loss of data warnings >
-wd4267 # static runtime
# Disable C4800(int to bool performance) $<$<CONFIG:Debug>:-MTd>
-wd4800 $<$<NOT:$<CONFIG:Debug>>:-MT>
# Decorated name length exceeded, name was truncated $<$<BOOL:${werr}>:-WX>
-wd4503 )
$<$<COMPILE_LANGUAGE:CXX>: target_compile_definitions (common
-EHa INTERFACE
-GR _WIN32_WINNT=0x6000
> _SCL_SECURE_NO_WARNINGS
$<$<CONFIG:Release>:-Ox> _CRT_SECURE_NO_WARNINGS
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>: WIN32_CONSOLE
-GS WIN32_LEAN_AND_MEAN
-Zc:forScope NOMINMAX
> # TODO: Resolve these warnings, don't just silence them
# static runtime _SILENCE_ALL_CXX17_DEPRECATION_WARNINGS
$<$<CONFIG:Debug>:-MTd> $<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:_CRTDBG_MAP_ALLOC>)
$<$<NOT:$<CONFIG:Debug>>:-MT> target_link_libraries (common
$<$<BOOL:${werr}>:-WX>) INTERFACE
target_compile_definitions( -errorreport:none
common -machine:X64)
INTERFACE _WIN32_WINNT=0x6000
_SCL_SECURE_NO_WARNINGS
_CRT_SECURE_NO_WARNINGS
WIN32_CONSOLE
WIN32_LEAN_AND_MEAN
NOMINMAX
# TODO: Resolve these warnings, don't just silence them
_SILENCE_ALL_CXX17_DEPRECATION_WARNINGS
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:_CRTDBG_MAP_ALLOC>)
target_link_libraries(common INTERFACE -errorreport:none -machine:X64)
else () else ()
target_compile_options( target_compile_options (common
common INTERFACE
INTERFACE -Wall -Wall
-Wdeprecated -Wdeprecated
$<$<BOOL:${is_clang}>:-Wno-deprecated-declarations> $<$<BOOL:${is_clang}>:-Wno-deprecated-declarations>
$<$<BOOL:${wextra}>:-Wextra $<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
-Wno-unused-parameter> $<$<BOOL:${werr}>:-Werror>
$<$<BOOL:${werr}>:-Werror> -fstack-protector
-fstack-protector -Wno-sign-compare
-Wno-sign-compare -Wno-unused-but-set-variable
-Wno-unused-but-set-variable $<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing> # tweak gcc optimization for debug
# tweak gcc optimization for debug $<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0> # Add debug symbols to release config
# Add debug symbols to release config $<$<CONFIG:Release>:-g>)
$<$<CONFIG:Release>:-g>) target_link_libraries (common
target_link_libraries( INTERFACE
common -rdynamic
INTERFACE -rdynamic $<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id> # link to static libc/c++ iff:
# link to static libc/c++ iff: * static option set and * NOT APPLE (AppleClang does not support static # * static option set and
# libc/c++) and * NOT SANITIZERS (sanitizers typically don't work with static libc/c++) # * NOT APPLE (AppleClang does not support static libc/c++) and
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${SANITIZERS_ENABLED}>>>: # * NOT san (sanitizers typically don't work with static libc/c++)
-static-libstdc++ $<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:
-static-libgcc -static-libstdc++
>) -static-libgcc
>)
endif () endif ()
# Antithesis instrumentation will only be built and deployed using machines running Linux. # Antithesis instrumentation will only be built and deployed using machines running Linux.
if (voidstar) if (voidstar)
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug") if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...") message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...")
elseif (NOT is_linux) elseif (NOT is_linux)
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...") message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0)) elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...") message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
endif () endif ()
endif () endif ()
if (use_mold) if (use_mold)
# use mold linker if available # use mold linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) execute_process (
if ("${LD_VERSION}" MATCHES "mold") COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version
target_link_libraries(common INTERFACE -fuse-ld=mold) ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
endif () if ("${LD_VERSION}" MATCHES "mold")
unset(LD_VERSION) target_link_libraries (common INTERFACE -fuse-ld=mold)
endif ()
unset (LD_VERSION)
elseif (use_gold AND is_gcc) elseif (use_gold AND is_gcc)
# use gold linker if available # use gold linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) execute_process (
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
#[=========================================================[ #[=========================================================[
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
default instead of DT_RPATH, so you might have slightly default intead of DT_RPATH, so you might have slightly
unexpected runtime ld behavior if you were expecting unexpected runtime ld behavior if you were expecting
DT_RPATH. Specify --disable-new-dtags to gold if you do DT_RPATH. Specify --disable-new-dtags to gold if you do
not want the default DT_RUNPATH behavior. This rpath not want the default DT_RUNPATH behavior. This rpath
@@ -170,31 +160,34 @@ elseif (use_gold AND is_gcc)
disabling would be to figure out all the settings disabling would be to figure out all the settings
required to make gold play nicely with jemalloc. required to make gold play nicely with jemalloc.
#]=========================================================] #]=========================================================]
if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc)) if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc))
target_link_libraries( target_link_libraries (common
common INTERFACE
INTERFACE -fuse-ld=gold -fuse-ld=gold
-Wl,--no-as-needed -Wl,--no-as-needed
#[=========================================================[ #[=========================================================[
see https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1253638/comments/5 see https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1253638/comments/5
DT_RUNPATH does not work great for transitive DT_RUNPATH does not work great for transitive
dependencies (of which boost has a few) - so just dependencies (of which boost has a few) - so just
switch to DT_RPATH if doing dynamic linking with gold switch to DT_RPATH if doing dynamic linking with gold
#]=========================================================] #]=========================================================]
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>) $<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
endif () endif ()
unset(LD_VERSION) unset (LD_VERSION)
elseif (use_lld) elseif (use_lld)
# use lld linker if available # use lld linker if available
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION) execute_process (
if ("${LD_VERSION}" MATCHES "LLD") COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
target_link_libraries(common INTERFACE -fuse-ld=lld) ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
endif () if ("${LD_VERSION}" MATCHES "LLD")
unset(LD_VERSION) target_link_libraries (common INTERFACE -fuse-ld=lld)
endif () endif ()
unset (LD_VERSION)
endif()
if (assert) if (assert)
foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE) foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
string(REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}") STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")
endforeach () endforeach ()
endif () endif ()

View File

@@ -1,52 +1,54 @@
include(CMakeFindDependencyMacro) include (CMakeFindDependencyMacro)
# need to represent system dependencies of the lib here # need to represent system dependencies of the lib here
#[=========================================================[ #[=========================================================[
Boost Boost
#]=========================================================] #]=========================================================]
if (static OR APPLE OR MSVC) if (static OR APPLE OR MSVC)
set(Boost_USE_STATIC_LIBS ON) set (Boost_USE_STATIC_LIBS ON)
endif () endif ()
set(Boost_USE_MULTITHREADED ON) set (Boost_USE_MULTITHREADED ON)
if (static OR MSVC) if (static OR MSVC)
set(Boost_USE_STATIC_RUNTIME ON) set (Boost_USE_STATIC_RUNTIME ON)
else () else ()
set(Boost_USE_STATIC_RUNTIME OFF) set (Boost_USE_STATIC_RUNTIME OFF)
endif () endif ()
find_dependency(Boost find_dependency (Boost
COMPONENTS COMPONENTS
chrono chrono
container container
context context
coroutine coroutine
date_time date_time
filesystem filesystem
program_options program_options
regex regex
system system
thread) thread)
#[=========================================================[ #[=========================================================[
OpenSSL OpenSSL
#]=========================================================] #]=========================================================]
if (NOT DEFINED OPENSSL_ROOT_DIR) if (NOT DEFINED OPENSSL_ROOT_DIR)
if (DEFINED ENV{OPENSSL_ROOT}) if (DEFINED ENV{OPENSSL_ROOT})
set(OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT}) set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
elseif (APPLE) elseif (APPLE)
find_program(homebrew brew) find_program (homebrew brew)
if (homebrew) if (homebrew)
execute_process(COMMAND ${homebrew} --prefix openssl OUTPUT_VARIABLE OPENSSL_ROOT_DIR execute_process (COMMAND ${homebrew} --prefix openssl
OUTPUT_STRIP_TRAILING_WHITESPACE) OUTPUT_VARIABLE OPENSSL_ROOT_DIR
endif () OUTPUT_STRIP_TRAILING_WHITESPACE)
endif () endif ()
file(TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR) endif ()
file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
endif () endif ()
if (static OR APPLE OR MSVC) if (static OR APPLE OR MSVC)
set(OPENSSL_USE_STATIC_LIBS ON) set (OPENSSL_USE_STATIC_LIBS ON)
endif () endif ()
set(OPENSSL_MSVC_STATIC_RT ON) set (OPENSSL_MSVC_STATIC_RT ON)
find_dependency(OpenSSL REQUIRED) find_dependency (OpenSSL REQUIRED)
find_dependency(ZLIB) find_dependency (ZLIB)
find_dependency(date) find_dependency (date)
if (TARGET ZLIB::ZLIB) if (TARGET ZLIB::ZLIB)
set_target_properties(OpenSSL::Crypto PROPERTIES INTERFACE_LINK_LIBRARIES ZLIB::ZLIB) set_target_properties(OpenSSL::Crypto PROPERTIES
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
endif () endif ()

View File

@@ -4,45 +4,69 @@
include(target_protobuf_sources) include(target_protobuf_sources)
# Protocol buffers cannot participate in a unity build,
# because all the generated sources
# define a bunch of `static const` variables with the same names,
# so we just build them as a separate library.
add_library(xrpl.libpb) add_library(xrpl.libpb)
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
PROTOS include/xrpl/proto/xrpl.proto) target_protobuf_sources(xrpl.libpb xrpl/proto
LANGUAGE cpp
IMPORT_DIRS include/xrpl/proto
PROTOS include/xrpl/proto/xrpl.proto
)
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto") file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto PROTOS "${protos}") target_protobuf_sources(xrpl.libpb xrpl/proto
target_protobuf_sources( LANGUAGE cpp
xrpl.libpb xrpl/proto IMPORT_DIRS include/xrpl/proto
LANGUAGE grpc PROTOS "${protos}"
IMPORT_DIRS include/xrpl/proto )
PROTOS "${protos}" target_protobuf_sources(xrpl.libpb xrpl/proto
PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin> LANGUAGE grpc
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc) IMPORT_DIRS include/xrpl/proto
PROTOS "${protos}"
PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc
)
target_compile_options( target_compile_options(xrpl.libpb
xrpl.libpb PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>: --system-header-prefix="google/protobuf" PUBLIC
-Wno-deprecated-dynamic-exception-spec > $<$<BOOL:${MSVC}>:-wd4996>
PRIVATE $<$<BOOL:${is_msvc}>:-wd4065> $<$<NOT:$<BOOL:${is_msvc}>>:-Wno-deprecated-declarations>) $<$<BOOL:${XCODE}>:
--system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec
>
PRIVATE
$<$<BOOL:${MSVC}>:-wd4065>
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
)
target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf gRPC::grpc++) target_link_libraries(xrpl.libpb
PUBLIC
protobuf::libprotobuf
gRPC::grpc++
)
# TODO: Clean up the number of library targets later. # TODO: Clean up the number of library targets later.
add_library(xrpl.imports.main INTERFACE) add_library(xrpl.imports.main INTERFACE)
target_link_libraries( target_link_libraries(xrpl.imports.main
xrpl.imports.main INTERFACE
INTERFACE absl::random_random absl::random_random
date::date date::date
ed25519::ed25519 ed25519::ed25519
LibArchive::LibArchive LibArchive::LibArchive
OpenSSL::Crypto OpenSSL::Crypto
Xrpl::boost Xrpl::boost
Xrpl::libs Xrpl::libs
Xrpl::opts Xrpl::opts
Xrpl::syslibs Xrpl::syslibs
secp256k1::secp256k1 secp256k1::secp256k1
xrpl.libpb xrpl.libpb
xxHash::xxhash xxHash::xxhash
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>) $<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
)
include(add_module) include(add_module)
include(target_link_modules) include(target_link_modules)
@@ -64,11 +88,18 @@ target_link_libraries(xrpl.libxrpl.crypto PUBLIC xrpl.libxrpl.basics)
# Level 04 # Level 04
add_module(xrpl protocol) add_module(xrpl protocol)
target_link_libraries(xrpl.libxrpl.protocol PUBLIC xrpl.libxrpl.crypto xrpl.libxrpl.json) target_link_libraries(xrpl.libxrpl.protocol PUBLIC
xrpl.libxrpl.crypto
xrpl.libxrpl.json
)
# Level 05 # Level 05
add_module(xrpl core) add_module(xrpl core)
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol) target_link_libraries(xrpl.libxrpl.core PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
)
# Level 06 # Level 06
add_module(xrpl resource) add_module(xrpl resource)
@@ -76,45 +107,62 @@ target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
# Level 07 # Level 07
add_module(xrpl net) add_module(xrpl net)
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol target_link_libraries(xrpl.libxrpl.net PUBLIC
xrpl.libxrpl.resource) xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
)
add_module(xrpl server) add_module(xrpl server)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol) target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
add_module(xrpl nodestore) add_module(xrpl nodestore)
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol) target_link_libraries(xrpl.libxrpl.nodestore PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
)
add_module(xrpl shamap) add_module(xrpl shamap)
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto xrpl.libxrpl.protocol target_link_libraries(xrpl.libxrpl.shamap PUBLIC
xrpl.libxrpl.nodestore) xrpl.libxrpl.basics
xrpl.libxrpl.crypto
xrpl.libxrpl.protocol
xrpl.libxrpl.nodestore
)
add_module(xrpl ledger) add_module(xrpl ledger)
target_link_libraries(xrpl.libxrpl.ledger PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol) target_link_libraries(xrpl.libxrpl.ledger PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
)
add_library(xrpl.libxrpl) add_library(xrpl.libxrpl)
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl) set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
add_library(xrpl::libxrpl ALIAS xrpl.libxrpl) add_library(xrpl::libxrpl ALIAS xrpl.libxrpl)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp") file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp"
)
target_sources(xrpl.libxrpl PRIVATE ${sources}) target_sources(xrpl.libxrpl PRIVATE ${sources})
target_link_modules( target_link_modules(xrpl PUBLIC
xrpl basics
PUBLIC beast
basics core
beast crypto
core json
crypto protocol
json resource
protocol server
resource nodestore
server shamap
nodestore net
shamap ledger
net )
ledger)
# All headers in libxrpl are in modules. # All headers in libxrpl are in modules.
# Uncomment this stanza if you have not yet moved new headers into a module. # Uncomment this stanza if you have not yet moved new headers into a module.
@@ -125,34 +173,63 @@ target_link_modules(
# $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include> # $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
# $<INSTALL_INTERFACE:include>) # $<INSTALL_INTERFACE:include>)
if (xrpld) if(xrpld)
add_executable(xrpld) add_executable(xrpld)
if (tests) if(tests)
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS) target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
target_compile_definitions(xrpld PRIVATE UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}) target_compile_definitions(xrpld PRIVATE
endif () UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
target_include_directories(xrpld PRIVATE $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>) )
endif()
target_include_directories(xrpld
PRIVATE
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp") file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
)
target_sources(xrpld PRIVATE ${sources})
if(tests)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
)
target_sources(xrpld PRIVATE ${sources}) target_sources(xrpld PRIVATE ${sources})
endif()
if (tests) target_link_libraries(xrpld
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp") Xrpl::boost
target_sources(xrpld PRIVATE ${sources}) Xrpl::opts
endif () Xrpl::libs
xrpl.libxrpl
)
exclude_if_included(xrpld)
# define a macro for tests that might need to
# be excluded or run differently in CI environment
if(is_ci)
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
endif ()
target_link_libraries(xrpld Xrpl::boost Xrpl::opts Xrpl::libs xrpl.libxrpl) if(voidstar)
exclude_if_included(xrpld) target_compile_options(xrpld
# define a macro for tests that might need to PRIVATE
# be excluded or run differently in CI environment -fsanitize-coverage=trace-pc-guard
if (is_ci) )
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI) # xrpld requires access to antithesis-sdk-cpp implementation file
endif () # antithesis_instrumentation.h, which is not exported as INTERFACE
target_include_directories(xrpld
PRIVATE
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
)
endif()
if (voidstar) # any files that don't play well with unity should be added here
target_compile_options(xrpld PRIVATE -fsanitize-coverage=trace-pc-guard) if(tests)
# xrpld requires access to antithesis-sdk-cpp implementation file set_source_files_properties(
# antithesis_instrumentation.h, which is not exported as INTERFACE # these two seem to produce conflicts in beast teardown template methods
target_include_directories(xrpld PRIVATE ${CMAKE_SOURCE_DIR}/external/antithesis-sdk) src/test/rpc/ValidatorRPC_test.cpp
endif () src/test/ledger/Invariants_test.cpp
endif () PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
endif()
endif()

View File

@@ -2,51 +2,40 @@
coverage report target coverage report target
#]===================================================================] #]===================================================================]
if (NOT coverage) if(NOT coverage)
message(FATAL_ERROR "Code coverage not enabled! Aborting ...") message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
endif () endif()
if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC") if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag") message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
return() return()
endif () endif()
include(ProcessorCount) include(ProcessorCount)
ProcessorCount(PROCESSOR_COUNT) ProcessorCount(PROCESSOR_COUNT)
include(CodeCoverage) include(CodeCoverage)
# The instructions for these commands come from the `CodeCoverage` module, which was copied from # The instructions for these commands come from the `CodeCoverage` module,
# https://github.com/bilke/cmake-modules, commit fb7d2a3, then locally changed (see CHANGES: section in # which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
# `CodeCoverage.cmake`) # then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args}) set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
if (NOT GCOVR_ADDITIONAL_ARGS STREQUAL "") if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
separate_arguments(GCOVR_ADDITIONAL_ARGS) separate_arguments(GCOVR_ADDITIONAL_ARGS)
endif () endif()
list(APPEND list(APPEND GCOVR_ADDITIONAL_ARGS
GCOVR_ADDITIONAL_ARGS --exclude-throw-branches
--exclude-throw-branches --exclude-noncode-lines
--exclude-noncode-lines --exclude-unreachable-branches -s
--exclude-unreachable-branches -j ${PROCESSOR_COUNT})
-s
-j
${PROCESSOR_COUNT})
setup_target_for_coverage_gcovr( setup_target_for_coverage_gcovr(
NAME NAME coverage
coverage FORMAT ${coverage_format}
FORMAT EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
${coverage_format} DEPENDENCIES xrpld xrpl.tests
EXCLUDE )
"src/test"
"src/tests"
"include/xrpl/beast/test"
"include/xrpl/beast/unit_test"
"${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
DEPENDENCIES
xrpld
xrpl.tests)
add_code_coverage_to_target(opts INTERFACE) add_code_coverage_to_target(opts INTERFACE)

View File

@@ -2,44 +2,45 @@
docs target (optional) docs target (optional)
#]===================================================================] #]===================================================================]
if (NOT only_docs) if(NOT only_docs)
return() return()
endif () endif()
find_package(Doxygen) find_package(Doxygen)
if (NOT TARGET Doxygen::doxygen) if(NOT TARGET Doxygen::doxygen)
message(STATUS "doxygen executable not found -- skipping docs target") message(STATUS "doxygen executable not found -- skipping docs target")
return() return()
endif () endif()
set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs") set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs")
set(doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src") set(doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src")
set(doxygen_index_file "${doxygen_output_directory}/html/index.html") set(doxygen_index_file "${doxygen_output_directory}/html/index.html")
set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile") set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile")
file(GLOB_RECURSE file(GLOB_RECURSE doxygen_input
doxygen_input docs/*.md
docs/*.md include/*.h
include/*.h include/*.cpp
include/*.cpp include/*.md
include/*.md src/*.h
src/*.h src/*.cpp
src/*.cpp src/*.md
src/*.md Builds/*.md
Builds/*.md *.md)
*.md) list(APPEND doxygen_input
list(APPEND doxygen_input external/README.md) external/README.md
)
set(dependencies "${doxygen_input}" "${doxyfile}") set(dependencies "${doxygen_input}" "${doxyfile}")
function (verbose_find_path variable name) function(verbose_find_path variable name)
# find_path sets a CACHE variable, so don't try using a "local" variable. # find_path sets a CACHE variable, so don't try using a "local" variable.
find_path(${variable} "${name}" ${ARGN}) find_path(${variable} "${name}" ${ARGN})
if (NOT ${variable}) if(NOT ${variable})
message(NOTICE "could not find ${name}") message(NOTICE "could not find ${name}")
else () else()
message(STATUS "found ${name}: ${${variable}}/${name}") message(STATUS "found ${name}: ${${variable}}/${name}")
endif () endif()
endfunction () endfunction()
verbose_find_path(doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml) verbose_find_path(doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml)
verbose_find_path(doxygen_dot_path dot) verbose_find_path(doxygen_dot_path dot)
@@ -47,26 +48,36 @@ verbose_find_path(doxygen_dot_path dot)
# https://en.cppreference.com/w/Cppreference:Archives # https://en.cppreference.com/w/Cppreference:Archives
# https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step # https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step
set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake") set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake")
file(WRITE "${download_script}" file(WRITE
"file(DOWNLOAD \ "${download_script}"
"file(DOWNLOAD \
https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \ https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \
${CMAKE_BINARY_DIR}/docs/cppreference.zip \ ${CMAKE_BINARY_DIR}/docs/cppreference.zip \
EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \ EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \
)\n \ )\n \
execute_process( \ execute_process( \
COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \ COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \
)\n") )\n"
)
set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml") set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml")
add_custom_command(OUTPUT "${tagfile}" COMMAND "${CMAKE_COMMAND}" -P "${download_script}" add_custom_command(
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs") OUTPUT "${tagfile}"
COMMAND "${CMAKE_COMMAND}" -P "${download_script}"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs"
)
set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/") set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/")
add_custom_command( add_custom_command(
OUTPUT "${doxygen_index_file}" OUTPUT "${doxygen_index_file}"
COMMAND "${CMAKE_COMMAND}" -E env "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}" COMMAND "${CMAKE_COMMAND}" -E env
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}" "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" "DOXYGEN_DOT_PATH=${doxygen_dot_path}" "DOXYGEN_INCLUDE_PATH=${doxygen_include_path}"
"${DOXYGEN_EXECUTABLE}" "${doxyfile}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" "DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}"
DEPENDS "${dependencies}" "${tagfile}") "DOXYGEN_DOT_PATH=${doxygen_dot_path}"
add_custom_target(docs DEPENDS "${doxygen_index_file}" SOURCES "${dependencies}") "${DOXYGEN_EXECUTABLE}" "${doxyfile}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
DEPENDS "${dependencies}" "${tagfile}")
add_custom_target(docs
DEPENDS "${doxygen_index_file}"
SOURCES "${dependencies}")

View File

@@ -4,53 +4,57 @@
include(create_symbolic_link) include(create_symbolic_link)
# If no suffix is defined for executables (e.g. Windows uses .exe but Linux install (
# and macOS use none), then explicitly set it to the empty string. TARGETS
if (NOT DEFINED suffix) common
set(suffix "") opts
endif () xrpl_boost
xrpl_libs
xrpl_syslibs
xrpl.imports.main
xrpl.libpb
xrpl.libxrpl
xrpl.libxrpl.basics
xrpl.libxrpl.beast
xrpl.libxrpl.core
xrpl.libxrpl.crypto
xrpl.libxrpl.json
xrpl.libxrpl.ledger
xrpl.libxrpl.net
xrpl.libxrpl.nodestore
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl.shamap
antithesis-sdk-cpp
EXPORT XrplExports
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES DESTINATION include)
install(TARGETS common install(
opts DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
xrpl_boost DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
xrpl_libs )
xrpl_syslibs
xrpl.imports.main
xrpl.libpb
xrpl.libxrpl
xrpl.libxrpl.basics
xrpl.libxrpl.beast
xrpl.libxrpl.core
xrpl.libxrpl.crypto
xrpl.libxrpl.json
xrpl.libxrpl.ledger
xrpl.libxrpl.net
xrpl.libxrpl.nodestore
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl.shamap
antithesis-sdk-cpp
EXPORT XrplExports
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES
DESTINATION include)
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") install (EXPORT XrplExports
FILE XrplTargets.cmake
install(EXPORT XrplExports FILE XrplTargets.cmake NAMESPACE Xrpl:: DESTINATION lib/cmake/xrpl) NAMESPACE Xrpl::
include(CMakePackageConfigHelpers) DESTINATION lib/cmake/xrpl)
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version} COMPATIBILITY SameMajorVersion) include (CMakePackageConfigHelpers)
write_basic_package_version_file (
XrplConfigVersion.cmake
VERSION ${xrpld_version}
COMPATIBILITY SameMajorVersion)
if (is_root_project AND TARGET xrpld) if (is_root_project AND TARGET xrpld)
install(TARGETS xrpld RUNTIME DESTINATION bin) install (TARGETS xrpld RUNTIME DESTINATION bin)
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON) set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
# sample configs should not overwrite existing files # sample configs should not overwrite existing files
# install if-not-exists workaround as suggested by # install if-not-exists workaround as suggested by
# https://cmake.org/Bug/view.php?id=12646 # https://cmake.org/Bug/view.php?id=12646
install(CODE " install(CODE "
macro (copy_if_not_exists SRC DEST NEWNAME) macro (copy_if_not_exists SRC DEST NEWNAME)
if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\") if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
file (INSTALL FILE_PERMISSIONS OWNER_READ OWNER_WRITE DESTINATION \"\${CMAKE_INSTALL_PREFIX}/\${DEST}\" FILES \"\${SRC}\" RENAME \"\${NEWNAME}\") file (INSTALL FILE_PERMISSIONS OWNER_READ OWNER_WRITE DESTINATION \"\${CMAKE_INSTALL_PREFIX}/\${DEST}\" FILES \"\${SRC}\" RENAME \"\${NEWNAME}\")
@@ -61,7 +65,7 @@ if (is_root_project AND TARGET xrpld)
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg) copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg)
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt) copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
") ")
install(CODE " install(CODE "
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\") set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link) include(create_symbolic_link)
create_symbolic_link(xrpld${suffix} \ create_symbolic_link(xrpld${suffix} \
@@ -69,5 +73,8 @@ if (is_root_project AND TARGET xrpld)
") ")
endif () endif ()
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake ${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake install (
DESTINATION lib/cmake/xrpl) FILES
${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
DESTINATION lib/cmake/xrpl)

View File

@@ -2,82 +2,96 @@
xrpld compile options/settings via an interface library xrpld compile options/settings via an interface library
#]===================================================================] #]===================================================================]
include(CompilationEnv) add_library (opts INTERFACE)
add_library (Xrpl::opts ALIAS opts)
target_compile_definitions (opts
INTERFACE
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
BOOST_CONTAINER_FWD_BAD_DEQUE
HAS_UNCAUGHT_EXCEPTIONS=1
$<$<BOOL:${boost_show_deprecated}>:
BOOST_ASIO_NO_DEPRECATED
BOOST_FILESYSTEM_NO_DEPRECATED
>
$<$<NOT:$<BOOL:${boost_show_deprecated}>>:
BOOST_COROUTINES_NO_DEPRECATION_WARNING
BOOST_BEAST_ALLOW_DEPRECATED
BOOST_FILESYSTEM_DEPRECATED
>
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
target_compile_options (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
# Set defaults for optional variables to avoid uninitialized variable warnings target_link_libraries (opts
if (NOT DEFINED voidstar) INTERFACE
set(voidstar OFF) $<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
if(jemalloc)
find_package(jemalloc REQUIRED)
target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC)
target_link_libraries(opts INTERFACE jemalloc::jemalloc)
endif () endif ()
add_library(opts INTERFACE) if (san)
add_library(Xrpl::opts ALIAS opts) target_compile_options (opts
target_compile_definitions( INTERFACE
opts # sanitizers recommend minimum of -O1 for reasonable performance
INTERFACE BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS $<$<CONFIG:Debug>:-O1>
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT ${SAN_FLAG}
BOOST_CONTAINER_FWD_BAD_DEQUE -fno-omit-frame-pointer)
HAS_UNCAUGHT_EXCEPTIONS=1 target_compile_definitions (opts
$<$<BOOL:${boost_show_deprecated}>: INTERFACE
BOOST_ASIO_NO_DEPRECATED $<$<STREQUAL:${san},address>:SANITIZER=ASAN>
BOOST_FILESYSTEM_NO_DEPRECATED $<$<STREQUAL:${san},thread>:SANITIZER=TSAN>
> $<$<STREQUAL:${san},memory>:SANITIZER=MSAN>
$<$<NOT:$<BOOL:${boost_show_deprecated}>>: $<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN>)
BOOST_COROUTINES_NO_DEPRECATION_WARNING target_link_libraries (opts INTERFACE ${SAN_FLAG} ${SAN_LIB})
BOOST_BEAST_ALLOW_DEPRECATED
BOOST_FILESYSTEM_DEPRECATED
>
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
target_compile_options(
opts
INTERFACE $<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized> $<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
if (jemalloc)
find_package(jemalloc REQUIRED)
target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC)
target_link_libraries(opts INTERFACE jemalloc::jemalloc)
endif () endif ()
#[===================================================================[ #[===================================================================[
xrpld transitive library deps via an interface library xrpld transitive library deps via an interface library
#]===================================================================] #]===================================================================]
add_library(xrpl_syslibs INTERFACE) add_library (xrpl_syslibs INTERFACE)
add_library(Xrpl::syslibs ALIAS xrpl_syslibs) add_library (Xrpl::syslibs ALIAS xrpl_syslibs)
target_link_libraries( target_link_libraries (xrpl_syslibs
xrpl_syslibs INTERFACE
INTERFACE $<$<BOOL:${is_msvc}>: $<$<BOOL:${MSVC}>:
legacy_stdio_definitions.lib legacy_stdio_definitions.lib
Shlwapi Shlwapi
kernel32 kernel32
user32 user32
gdi32 gdi32
winspool winspool
comdlg32 comdlg32
advapi32 advapi32
shell32 shell32
ole32 ole32
oleaut32 oleaut32
uuid uuid
odbc32 odbc32
odbccp32 odbccp32
crypt32 crypt32
> >
$<$<NOT:$<BOOL:${is_msvc}>>:dl> $<$<NOT:$<BOOL:${MSVC}>>:dl>
$<$<NOT:$<OR:$<BOOL:${is_msvc}>,$<BOOL:${is_macos}>>>:rt>) $<$<NOT:$<OR:$<BOOL:${MSVC}>,$<BOOL:${APPLE}>>>:rt>)
if (NOT is_msvc) if (NOT MSVC)
set(THREADS_PREFER_PTHREAD_FLAG ON) set (THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads) find_package (Threads)
target_link_libraries(xrpl_syslibs INTERFACE Threads::Threads) target_link_libraries (xrpl_syslibs INTERFACE Threads::Threads)
endif () endif ()
add_library(xrpl_libs INTERFACE) add_library (xrpl_libs INTERFACE)
add_library(Xrpl::libs ALIAS xrpl_libs) add_library (Xrpl::libs ALIAS xrpl_libs)
target_link_libraries(xrpl_libs INTERFACE Xrpl::syslibs) target_link_libraries (xrpl_libs INTERFACE Xrpl::syslibs)

View File

@@ -1,197 +0,0 @@
#[===================================================================[
Configure sanitizers based on environment variables.
This module reads the following environment variables:
- SANITIZERS: The sanitizers to enable. Possible values:
- "address"
- "address,undefinedbehavior"
- "thread"
- "thread,undefinedbehavior"
- "undefinedbehavior"
The compiler type and platform are detected in CompilationEnv.cmake.
The sanitizer compile options are applied to the 'common' interface library
which is linked to all targets in the project.
Internal flag variables set by this module:
- SANITIZER_TYPES: List of sanitizer types to enable (e.g., "address",
"thread", "undefined"). And two more flags for undefined behavior sanitizer (e.g., "float-divide-by-zero", "unsigned-integer-overflow").
This list is joined with commas and passed to -fsanitize=<list>.
- SANITIZERS_COMPILE_FLAGS: Compiler flags for sanitizer instrumentation.
Includes:
* -fno-omit-frame-pointer: Preserves frame pointers for stack traces
* -O1: Minimum optimization for reasonable performance
* -fsanitize=<types>: Enables sanitizer instrumentation
* -fsanitize-ignorelist=<path>: (Clang only) Compile-time ignorelist
* -mcmodel=large/medium: (GCC only) Code model for large binaries
* -Wno-stringop-overflow: (GCC only) Suppresses false positive warnings
* -Wno-tsan: (For GCC TSAN combination only) Suppresses atomic_thread_fence warnings
- SANITIZERS_LINK_FLAGS: Linker flags for sanitizer runtime libraries.
Includes:
* -fsanitize=<types>: Links sanitizer runtime libraries
* -mcmodel=large/medium: (GCC only) Matches compile-time code model
- SANITIZERS_RELOCATION_FLAGS: (GCC only) Code model flags for linking.
Used to handle large instrumented binaries on x86_64:
* -mcmodel=large: For AddressSanitizer (prevents relocation errors)
* -mcmodel=medium: For ThreadSanitizer (large model is incompatible)
#]===================================================================]
include(CompilationEnv)
# Read environment variable
set(SANITIZERS "")
if (DEFINED ENV{SANITIZERS})
set(SANITIZERS "$ENV{SANITIZERS}")
endif ()
# Set SANITIZERS_ENABLED flag for use in other modules
if (SANITIZERS MATCHES "address|thread|undefinedbehavior")
set(SANITIZERS_ENABLED TRUE)
else ()
set(SANITIZERS_ENABLED FALSE)
return()
endif ()
# Sanitizers are not supported on Windows/MSVC
if (is_msvc)
message(FATAL_ERROR "Sanitizers are not supported on Windows/MSVC. "
"Please unset the SANITIZERS environment variable.")
endif ()
message(STATUS "Configuring sanitizers: ${SANITIZERS}")
# Parse SANITIZERS value to determine which sanitizers to enable
set(enable_asan FALSE)
set(enable_tsan FALSE)
set(enable_ubsan FALSE)
# Normalize SANITIZERS into a list
set(san_list "${SANITIZERS}")
string(REPLACE "," ";" san_list "${san_list}")
separate_arguments(san_list)
foreach (san IN LISTS san_list)
if (san STREQUAL "address")
set(enable_asan TRUE)
elseif (san STREQUAL "thread")
set(enable_tsan TRUE)
elseif (san STREQUAL "undefinedbehavior")
set(enable_ubsan TRUE)
else ()
message(FATAL_ERROR "Unsupported sanitizer type: ${san}"
"Supported: address, thread, undefinedbehavior and their combinations.")
endif ()
endforeach ()
# Validate sanitizer compatibility
if (enable_asan AND enable_tsan)
message(FATAL_ERROR "AddressSanitizer and ThreadSanitizer are incompatible and cannot be enabled simultaneously. "
"Use 'address' or 'thread', optionally with 'undefinedbehavior'.")
endif ()
# Frame pointer is required for meaningful stack traces. Sanitizers recommend minimum of -O1 for reasonable performance
set(SANITIZERS_COMPILE_FLAGS "-fno-omit-frame-pointer" "-O1")
# Build the sanitizer flags list
set(SANITIZER_TYPES)
if (enable_asan)
list(APPEND SANITIZER_TYPES "address")
elseif (enable_tsan)
list(APPEND SANITIZER_TYPES "thread")
endif ()
if (enable_ubsan)
# UB sanitizer flags
list(APPEND SANITIZER_TYPES "undefined" "float-divide-by-zero")
if (is_clang)
# Clang supports additional UB checks. More info here
# https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html
list(APPEND SANITIZER_TYPES "unsigned-integer-overflow")
endif ()
endif ()
# Configure code model for GCC on amd64 Use large code model for ASAN to avoid relocation errors Use medium code model
# for TSAN (large is not compatible with TSAN)
set(SANITIZERS_RELOCATION_FLAGS)
# Compiler-specific configuration
if (is_gcc)
# Disable mold, gold and lld linkers for GCC with sanitizers Use default linker (bfd/ld) which is more lenient with
# mixed code models This is needed since the size of instrumented binary exceeds the limits set by mold, lld and
# gold linkers
set(use_mold OFF CACHE BOOL "Use mold linker" FORCE)
set(use_gold OFF CACHE BOOL "Use gold linker" FORCE)
set(use_lld OFF CACHE BOOL "Use lld linker" FORCE)
message(STATUS " Disabled mold, gold, and lld linkers for GCC with sanitizers")
# Suppress false positive warnings in GCC with stringop-overflow
list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-stringop-overflow")
if (is_amd64 AND enable_asan)
message(STATUS " Using large code model (-mcmodel=large)")
list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=large")
list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=large")
elseif (enable_tsan)
# GCC doesn't support atomic_thread_fence with tsan. Suppress warnings.
list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-tsan")
message(STATUS " Using medium code model (-mcmodel=medium)")
list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=medium")
list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=medium")
endif ()
# Join sanitizer flags with commas for -fsanitize option
list(JOIN SANITIZER_TYPES "," SANITIZER_TYPES_STR)
# Add sanitizer to compile and link flags
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
set(SANITIZERS_LINK_FLAGS "${SANITIZERS_RELOCATION_FLAGS}" "-fsanitize=${SANITIZER_TYPES_STR}")
elseif (is_clang)
# Add ignorelist for Clang (GCC doesn't support this) Use CMAKE_SOURCE_DIR to get the path to the ignorelist
set(IGNORELIST_PATH "${CMAKE_SOURCE_DIR}/sanitizers/suppressions/sanitizer-ignorelist.txt")
if (NOT EXISTS "${IGNORELIST_PATH}")
message(FATAL_ERROR "Sanitizer ignorelist not found: ${IGNORELIST_PATH}")
endif ()
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize-ignorelist=${IGNORELIST_PATH}")
message(STATUS " Using sanitizer ignorelist: ${IGNORELIST_PATH}")
# Join sanitizer flags with commas for -fsanitize option
list(JOIN SANITIZER_TYPES "," SANITIZER_TYPES_STR)
# Add sanitizer to compile and link flags
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
set(SANITIZERS_LINK_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
endif ()
message(STATUS " Compile flags: ${SANITIZERS_COMPILE_FLAGS}")
message(STATUS " Link flags: ${SANITIZERS_LINK_FLAGS}")
# Apply the sanitizer flags to the 'common' interface library This is the same library used by XrplCompiler.cmake
target_compile_options(common INTERFACE $<$<COMPILE_LANGUAGE:CXX>:${SANITIZERS_COMPILE_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${SANITIZERS_COMPILE_FLAGS}>)
# Apply linker flags
target_link_options(common INTERFACE ${SANITIZERS_LINK_FLAGS})
# Define SANITIZERS macro for BuildInfo.cpp
set(sanitizers_list)
if (enable_asan)
list(APPEND sanitizers_list "ASAN")
endif ()
if (enable_tsan)
list(APPEND sanitizers_list "TSAN")
endif ()
if (enable_ubsan)
list(APPEND sanitizers_list "UBSAN")
endif ()
if (sanitizers_list)
list(JOIN sanitizers_list "." sanitizers_str)
target_compile_definitions(common INTERFACE SANITIZERS=${sanitizers_str})
endif ()

View File

@@ -2,43 +2,49 @@
sanity checks sanity checks
#]===================================================================] #]===================================================================]
include(CompilationEnv)
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
set(CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE) set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
if (NOT is_multiconfig) if (NOT is_multiconfig)
if (NOT CMAKE_BUILD_TYPE) if (NOT CMAKE_BUILD_TYPE)
message(STATUS "Build type not specified - defaulting to Release") message (STATUS "Build type not specified - defaulting to Release")
set(CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE) set (CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE)
elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release)) elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release))
# for simplicity, these are the only two config types we care about. Limiting the build types simplifies dealing # for simplicity, these are the only two config types we care about. Limiting
# with external project builds especially # the build types simplifies dealing with external project builds especially
message(FATAL_ERROR " *** Only Debug or Release build types are currently supported ***") message (FATAL_ERROR " *** Only Debug or Release build types are currently supported ***")
endif () endif ()
endif () endif ()
if (is_clang) # both Clang and AppleClang if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0) set (is_clang TRUE)
message(FATAL_ERROR "This project requires clang 16 or later") if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
endif () CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
elseif (is_gcc) message (FATAL_ERROR "This project requires clang 16 or later")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0) endif ()
message(FATAL_ERROR "This project requires GCC 12 or later") elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
endif () set (is_gcc TRUE)
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
message (FATAL_ERROR "This project requires GCC 12 or later")
endif ()
endif () endif ()
# check for in-source build and fail # check for in-source build and fail
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}") if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message(FATAL_ERROR "Builds (in-source) are not allowed in " message (FATAL_ERROR "Builds (in-source) are not allowed in "
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles " "${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.") "directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
endif () endif ()
if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32") if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
message(FATAL_ERROR "Visual Studio 32-bit build is not supported.") message (FATAL_ERROR "Visual Studio 32-bit build is not supported.")
endif ()
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
message (FATAL_ERROR "Xrpld requires a 64 bit target architecture.\n"
"The most likely cause of this warning is trying to build xrpld with a 32-bit OS.")
endif () endif ()
if (APPLE AND NOT HOMEBREW) if (APPLE AND NOT HOMEBREW)
find_program(HOMEBREW brew) find_program (HOMEBREW brew)
endif () endif ()

View File

@@ -2,105 +2,143 @@
declare options and variables declare options and variables
#]===================================================================] #]===================================================================]
include(CompilationEnv) if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
set (is_linux TRUE)
else()
set(is_linux FALSE)
endif()
set(is_ci FALSE) if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
if (DEFINED ENV{CI}) set(is_ci TRUE)
if ("$ENV{CI}" STREQUAL "true") else()
set(is_ci TRUE) set(is_ci FALSE)
endif () endif()
endif ()
get_directory_property(has_parent PARENT_DIRECTORY) get_directory_property(has_parent PARENT_DIRECTORY)
if (has_parent) if(has_parent)
set(is_root_project OFF) set(is_root_project OFF)
else () else()
set(is_root_project ON) set(is_root_project ON)
endif () endif()
option(assert "Enables asserts, even in release builds" OFF) option(assert "Enables asserts, even in release builds" OFF)
option(xrpld "Build xrpld" ON) option(xrpld "Build xrpld" ON)
option(tests "Build tests" ON) option(tests "Build tests" ON)
if (tests) if(tests)
# This setting allows making a separate workflow to test fees other than default 10 # This setting allows making a separate workflow to test fees other than default 10
if (NOT UNIT_TEST_REFERENCE_FEE) if(NOT UNIT_TEST_REFERENCE_FEE)
set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "") set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "")
endif () endif()
endif () endif()
if (is_clang AND is_linux) option(unity "Creates a build using UNITY support in cmake." OFF)
option(voidstar "Enable Antithesis instrumentation." OFF) if(unity)
endif () if(NOT is_ci)
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
endif()
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
endif()
if (is_gcc OR is_clang) if(is_clang AND is_linux)
include(ProcessorCount) option(voidstar "Enable Antithesis instrumentation." OFF)
ProcessorCount(PROCESSOR_COUNT) endif()
option(coverage "Generates coverage info." OFF) if(is_gcc OR is_clang)
option(profile "Add profiling flags" OFF) include(ProcessorCount)
set(coverage_format "html-details" CACHE STRING "Output format of the coverage report.") ProcessorCount(PROCESSOR_COUNT)
set(coverage_extra_args "" CACHE STRING "Additional arguments to pass to gcovr.")
option(wextra "compile with extra gcc/clang warnings enabled" ON)
else ()
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif ()
if (is_linux AND NOT SANITIZER) option(coverage "Generates coverage info." OFF)
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF) option(profile "Add profiling flags" OFF)
option(static "link protobuf, openssl, libc++, and boost statically" ON) set(coverage_format "html-details" CACHE STRING
option(perf "Enables flags that assist with perf recording" OFF) "Output format of the coverage report.")
option(use_gold "enables detection of gold (binutils) linker" ON) set(coverage_extra_args "" CACHE STRING
option(use_mold "enables detection of mold (binutils) linker" ON) "Additional arguments to pass to gcovr.")
# Set a default value for the log flag based on the build type. This provides a sensible default (on for debug, off option(wextra "compile with extra gcc/clang warnings enabled" ON)
# for release) while still allowing the user to override it for any build. else()
if (CMAKE_BUILD_TYPE STREQUAL "Debug") set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
set(TRUNCATED_LOGS_DEFAULT ON) set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
else () set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
set(TRUNCATED_LOGS_DEFAULT OFF) endif()
endif ()
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux." ${TRUNCATED_LOGS_DEFAULT})
if (TRUNCATED_THREAD_NAME_LOGS)
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
endif ()
else ()
# we are not ready to allow shared-libs on windows because it would require export declarations. On macos it's more
# feasible, but static openssl produces odd linker errors, thus we disable shared lib builds for now.
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
endif ()
if (is_clang) if(is_linux)
option(use_lld "enables detection of lld linker" ON) option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
else () option(static "link protobuf, openssl, libc++, and boost statically" ON)
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE) option(perf "Enables flags that assist with perf recording" OFF)
endif () option(use_gold "enables detection of gold (binutils) linker" ON)
option(use_mold "enables detection of mold (binutils) linker" ON)
else()
# we are not ready to allow shared-libs on windows because it would require
# export declarations. On macos it's more feasible, but static openssl
# produces odd linker errors, thus we disable shared lib builds for now.
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
endif()
if(is_clang)
option(use_lld "enables detection of lld linker" ON)
else()
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif()
option(jemalloc "Enables jemalloc for heap profiling" OFF) option(jemalloc "Enables jemalloc for heap profiling" OFF)
option(werr "treat warnings as errors" OFF) option(werr "treat warnings as errors" OFF)
option(local_protobuf "Force a local build of protobuf instead of looking for an installed version." OFF) option(local_protobuf
option(local_grpc "Force a local build of gRPC instead of looking for an installed version." OFF) "Force a local build of protobuf instead of looking for an installed version." OFF)
option(local_grpc
"Force a local build of gRPC instead of looking for an installed version." OFF)
# this one is a string and therefore can't be an option
set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
if(san)
string(TOLOWER ${san} san)
set(SAN_FLAG "-fsanitize=${san}")
set(SAN_LIB "")
if(is_gcc)
if(san STREQUAL "address")
set(SAN_LIB "asan")
elseif(san STREQUAL "thread")
set(SAN_LIB "tsan")
elseif(san STREQUAL "memory")
set(SAN_LIB "msan")
elseif(san STREQUAL "undefined")
set(SAN_LIB "ubsan")
endif()
endif()
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
if(NOT COMPILER_SUPPORTS_SAN)
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif()
endif()
# the remaining options are obscure and rarely used # the remaining options are obscure and rarely used
option(beast_no_unit_test_inline "Prevents unit test definitions from being inserted into global table" OFF) option(beast_no_unit_test_inline
option(single_io_service_thread "Restricts the number of threads calling io_context::run to one. \ "Prevents unit test definitions from being inserted into global table"
This can be useful when debugging." OFF) OFF)
option(boost_show_deprecated "Allow boost to fail on deprecated usage. Only useful if you're trying\ option(single_io_service_thread
to find deprecated calls." OFF) "Restricts the number of threads calling io_context::run to one. \
This can be useful when debugging."
OFF)
option(boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls."
OFF)
if (WIN32) if(WIN32)
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF) option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else () else()
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE) set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif () endif()
if (coverage) if(coverage)
message(STATUS "coverage build requested - forcing Debug build") message(STATUS "coverage build requested - forcing Debug build")
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE) set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
endif () endif()

View File

@@ -1,17 +1,20 @@
option(validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF) option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
if (validator_keys) if (validator_keys)
git_branch(current_branch) git_branch (current_branch)
# default to tracking VK master branch unless we are on release # default to tracking VK master branch unless we are on release
if (NOT (current_branch STREQUAL "release")) if (NOT (current_branch STREQUAL "release"))
set(current_branch "master") set (current_branch "master")
endif () endif ()
message(STATUS "Tracking ValidatorKeys branch: ${current_branch}") message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare(validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git FetchContent_Declare (
GIT_TAG "${current_branch}") validator_keys
FetchContent_MakeAvailable(validator_keys) GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}") GIT_TAG "${current_branch}"
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) )
FetchContent_MakeAvailable(validator_keys)
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
endif () endif ()

View File

@@ -3,13 +3,13 @@
#]===================================================================] #]===================================================================]
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO) file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
foreach (line_ ${BUILD_INFO}) foreach(line_ ${BUILD_INFO})
if (line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"") if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
set(xrpld_version ${CMAKE_MATCH_1}) set(xrpld_version ${CMAKE_MATCH_1})
endif () endif()
endforeach () endforeach()
if (xrpld_version) if(xrpld_version)
message(STATUS "xrpld version: ${xrpld_version}") message(STATUS "xrpld version: ${xrpld_version}")
else () else()
message(FATAL_ERROR "unable to determine xrpld version") message(FATAL_ERROR "unable to determine xrpld version")
endif () endif()

View File

@@ -12,14 +12,26 @@ include(isolate_headers)
# add_module(parent a) # add_module(parent a)
# add_module(parent b) # add_module(parent b)
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a) # target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
function (add_module parent name) function(add_module parent name)
set(target ${PROJECT_NAME}.lib${parent}.${name}) set(target ${PROJECT_NAME}.lib${parent}.${name})
add_library(${target} OBJECT) add_library(${target} OBJECT)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp") file(GLOB_RECURSE sources CONFIGURE_DEPENDS
target_sources(${target} PRIVATE ${sources}) "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp"
target_include_directories(${target} PUBLIC "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>") )
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/include" target_sources(${target} PRIVATE ${sources})
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" PUBLIC) target_include_directories(${target} PUBLIC
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src" "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}" "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>"
PRIVATE) )
endfunction () isolate_headers(
${target}
"${CMAKE_CURRENT_SOURCE_DIR}/include"
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}"
PUBLIC
)
isolate_headers(
${target}
"${CMAKE_CURRENT_SOURCE_DIR}/src"
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
PRIVATE
)
endfunction()

View File

@@ -1,19 +1,20 @@
# file(CREATE_SYMLINK) only works on Windows with administrator privileges. https://stackoverflow.com/a/61244115/618906 # file(CREATE_SYMLINK) only works on Windows with administrator privileges.
function (create_symbolic_link target link) # https://stackoverflow.com/a/61244115/618906
if (WIN32) function(create_symbolic_link target link)
if (NOT IS_SYMLINK "${link}") if(WIN32)
if (NOT IS_ABSOLUTE "${target}") if(NOT IS_SYMLINK "${link}")
# Relative links work do not work on Windows. if(NOT IS_ABSOLUTE "${target}")
set(target "${link}/../${target}") # Relative links work do not work on Windows.
endif () set(target "${link}/../${target}")
file(TO_NATIVE_PATH "${target}" target) endif()
file(TO_NATIVE_PATH "${link}" link) file(TO_NATIVE_PATH "${target}" target)
execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}") file(TO_NATIVE_PATH "${link}" link)
endif () execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}")
else () endif()
file(CREATE_LINK "${target}" "${link}" SYMBOLIC) else()
endif () file(CREATE_LINK "${target}" "${link}" SYMBOLIC)
if (NOT IS_SYMLINK "${link}") endif()
message(ERROR "failed to create symlink: <${link}>") if(NOT IS_SYMLINK "${link}")
endif () message(ERROR "failed to create symlink: <${link}>")
endfunction () endif()
endfunction()

View File

@@ -1,44 +1,47 @@
include(CompilationEnv) find_package(Boost 1.82 REQUIRED
include(XrplSanitizers) COMPONENTS
chrono
find_package(Boost REQUIRED container
COMPONENTS chrono coroutine
container date_time
coroutine filesystem
date_time json
filesystem program_options
json regex
program_options system
regex thread
system )
thread)
add_library(xrpl_boost INTERFACE) add_library(xrpl_boost INTERFACE)
add_library(Xrpl::boost ALIAS xrpl_boost) add_library(Xrpl::boost ALIAS xrpl_boost)
target_link_libraries( target_link_libraries(xrpl_boost
xrpl_boost INTERFACE
INTERFACE Boost::headers Boost::headers
Boost::chrono Boost::chrono
Boost::container Boost::container
Boost::coroutine Boost::coroutine
Boost::date_time Boost::date_time
Boost::filesystem Boost::filesystem
Boost::json Boost::json
Boost::process Boost::process
Boost::program_options Boost::program_options
Boost::regex Boost::regex
Boost::thread) Boost::system
if (Boost_COMPILER) Boost::thread)
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking) if(Boost_COMPILER)
endif () target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
if (SANITIZERS_ENABLED AND is_clang) endif()
# TODO: gcc does not support -fsanitize-blacklist...can we do something else for gcc ? if(san AND is_clang)
if (NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers) # TODO: gcc does not support -fsanitize-blacklist...can we do something else
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES) # for gcc ?
endif () if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist") get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*") endif()
target_compile_options(opts INTERFACE # ignore boost headers for sanitizing message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt) file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
endif () target_compile_options(opts
INTERFACE
# ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
endif()

View File

@@ -37,12 +37,12 @@ include(create_symbolic_link)
# `${CMAKE_CURRENT_BINARY_DIR}/include/${target}`. # `${CMAKE_CURRENT_BINARY_DIR}/include/${target}`.
# #
# isolate_headers(target A B scope) # isolate_headers(target A B scope)
function (isolate_headers target A B scope) function(isolate_headers target A B scope)
file(RELATIVE_PATH C "${A}" "${B}") file(RELATIVE_PATH C "${A}" "${B}")
set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}") set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}")
set(Y "${X}/${C}") set(Y "${X}/${C}")
cmake_path(GET Y PARENT_PATH parent) cmake_path(GET Y PARENT_PATH parent)
file(MAKE_DIRECTORY "${parent}") file(MAKE_DIRECTORY "${parent}")
create_symbolic_link("${B}" "${Y}") create_symbolic_link("${B}" "${Y}")
target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>") target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>")
endfunction () endfunction()

View File

@@ -6,19 +6,19 @@
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a) # target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
# add_library(project.libparent) # add_library(project.libparent)
# target_link_modules(parent PUBLIC a b) # target_link_modules(parent PUBLIC a b)
function (target_link_modules parent scope) function(target_link_modules parent scope)
set(library ${PROJECT_NAME}.lib${parent}) set(library ${PROJECT_NAME}.lib${parent})
foreach (name ${ARGN}) foreach(name ${ARGN})
set(module ${library}.${name}) set(module ${library}.${name})
get_target_property(sources ${library} SOURCES) get_target_property(sources ${library} SOURCES)
list(LENGTH sources before) list(LENGTH sources before)
get_target_property(dupes ${module} SOURCES) get_target_property(dupes ${module} SOURCES)
list(LENGTH dupes expected) list(LENGTH dupes expected)
list(REMOVE_ITEM sources ${dupes}) list(REMOVE_ITEM sources ${dupes})
list(LENGTH sources after) list(LENGTH sources after)
math(EXPR actual "${before} - ${after}") math(EXPR actual "${before} - ${after}")
message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}") message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}")
set_target_properties(${library} PROPERTIES SOURCES "${sources}") set_target_properties(${library} PROPERTIES SOURCES "${sources}")
target_link_libraries(${library} ${scope} ${module}) target_link_libraries(${library} ${scope} ${module})
endforeach () endforeach()
endfunction () endfunction()

View File

@@ -35,20 +35,28 @@ find_package(Protobuf REQUIRED)
# This prefix should appear at the start of all your consumer includes. # This prefix should appear at the start of all your consumer includes.
# ARGN: # ARGN:
# A list of .proto files. # A list of .proto files.
function (target_protobuf_sources target prefix) function(target_protobuf_sources target prefix)
set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}") set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}")
file(MAKE_DIRECTORY "${dir}/${prefix}") file(MAKE_DIRECTORY "${dir}/${prefix}")
protobuf_generate(TARGET ${target} PROTOC_OUT_DIR "${dir}/${prefix}" "${ARGN}") protobuf_generate(
target_include_directories( TARGET ${target}
${target} SYSTEM PROTOC_OUT_DIR "${dir}/${prefix}"
PUBLIC # Allows #include <package/path/to/file.proto> used by consumer files. "${ARGN}"
$<BUILD_INTERFACE:${dir}> )
# Allows #include "path/to/file.proto" used by generated files. target_include_directories(${target} SYSTEM PUBLIC
$<BUILD_INTERFACE:${dir}/${prefix}> # Allows #include <package/path/to/file.proto> used by consumer files.
# Allows #include <package/path/to/file.proto> used by consumer files. $<BUILD_INTERFACE:${dir}>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}> # Allows #include "path/to/file.proto" used by generated files.
# Allows #include "path/to/file.proto" used by generated files. $<BUILD_INTERFACE:${dir}/${prefix}>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/${prefix}>) # Allows #include <package/path/to/file.proto> used by consumer files.
install(DIRECTORY ${dir}/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} FILES_MATCHING PATTERN "*.h") $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
endfunction () # Allows #include "path/to/file.proto" used by generated files.
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/${prefix}>
)
install(
DIRECTORY ${dir}/
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING PATTERN "*.h"
)
endfunction()

View File

@@ -1,63 +1,59 @@
{ {
"version": "0.5", "version": "0.5",
"requires": [ "requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1765850150.075", "zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1765850149.987", "xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1765850149.926", "sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1765850149.46", "soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1765850147.878", "snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
"secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765850147.928", "secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765202256.763",
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1765850186.86", "rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1762797952.535",
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1765850148.103", "re2/20230301#ca3b241baec15bd31ea9187150e0b333%1764175362.029",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038", "protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
"openssl/3.5.4#1b986e61b38fdfda3b40bebc1b234393%1768312656.257", "openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1760106486.594",
"nudb/2.0.9#0432758a24204da08fee953ec9ea03cb%1769436073.32", "nudb/2.0.9#fb8dfd1a5557f5e0528114c2da17721e%1763150366.909",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1765850143.914", "lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1765842973.492", "libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1765842973.03", "libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
"libarchive/3.8.1#ffee18995c706e02bf96e7a2f7042e0d%1765850144.736", "libarchive/3.8.1#ffee18995c706e02bf96e7a2f7042e0d%1764175360.142",
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244", "jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
"gtest/1.17.0#5224b3b3ff3b4ce1133cbdd27d53ee7d%1768312129.152", "grpc/1.72.0#f244a57bff01e708c55a1100b12e1589%1763158050.628",
"grpc/1.72.0#f244a57bff01e708c55a1100b12e1589%1765850193.734", "ed25519/2015.03#ae761bdc52730a843f0809bdf6c1b1f6%1764270189.893",
"ed25519/2015.03#ae761bdc52730a843f0809bdf6c1b1f6%1765850143.772", "doctest/2.4.12#eb9fb352fb2fdfc8abb17ec270945165%1762797941.757",
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1765850143.772", "date/3.0.4#862e11e80030356b53c2c38599ceb32b%1763584497.32",
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1765850144.336", "c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1764175359.429",
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1765850143.837", "bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1764175359.429",
"boost/1.90.0#d5e8defe7355494953be18524a7f135b%1765955095.179", "boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
"abseil/20250127.0#99262a368bd01c0ccca8790dfced9719%1766517936.993" "abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
], ],
"build_requires": [ "build_requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1765850150.075", "zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1765850165.196", "strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1756234281.733",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038", "protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1765850144.707", "nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",
"msys2/cci.latest#1996656c3c98e5765b25b60ff5cf77b4%1764840888.758", "msys2/cci.latest#1996656c3c98e5765b25b60ff5cf77b4%1764840888.758",
"m4/1.4.19#70dc8bbb33e981d119d2acc0175cf381%1763158052.846", "m4/1.4.19#70dc8bbb33e981d119d2acc0175cf381%1763158052.846",
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1765850153.937", "cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1764175359.44",
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1765850153.479", "cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1764175359.429",
"b2/5.3.3#107c15377719889654eb9a162a673975%1765850144.355", "b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28",
"automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56", "automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56",
"autoconf/2.71#51077f068e61700d65bb05541ea1e4b0%1731054366.86", "autoconf/2.71#51077f068e61700d65bb05541ea1e4b0%1731054366.86",
"abseil/20250127.0#99262a368bd01c0ccca8790dfced9719%1766517936.993" "abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
], ],
"python_requires": [], "python_requires": [],
"overrides": { "overrides": {
"boost/1.90.0#d5e8defe7355494953be18524a7f135b": [
null,
"boost/1.90.0"
],
"protobuf/5.27.0": [ "protobuf/5.27.0": [
"protobuf/6.32.1" "protobuf/6.32.1"
], ],
"lz4/1.9.4": [ "lz4/1.9.4": [
"lz4/1.10.0" "lz4/1.10.0"
], ],
"boost/1.83.0": [
"boost/1.88.0"
],
"sqlite3/3.44.2": [ "sqlite3/3.44.2": [
"sqlite3/3.49.1" "sqlite3/3.49.1"
], ],
"boost/1.83.0": [
"boost/1.90.0"
],
"lz4/[>=1.9.4 <2]": [ "lz4/[>=1.9.4 <2]": [
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504" "lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504"
] ]

View File

@@ -1 +0,0 @@
include(sanitizers)

View File

@@ -1,59 +0,0 @@
include(default)
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
{% set sanitizers = os.getenv("SANITIZERS") %}
[conf]
{% if sanitizers %}
{% if compiler == "gcc" %}
{% if "address" in sanitizers or "thread" in sanitizers or "undefinedbehavior" in sanitizers %}
{% set sanitizer_list = [] %}
{% set model_code = "" %}
{% set extra_cxxflags = ["-fno-omit-frame-pointer", "-O1", "-Wno-stringop-overflow"] %}
{% if "address" in sanitizers %}
{% set _ = sanitizer_list.append("address") %}
{% set model_code = "-mcmodel=large" %}
{% elif "thread" in sanitizers %}
{% set _ = sanitizer_list.append("thread") %}
{% set model_code = "-mcmodel=medium" %}
{% set _ = extra_cxxflags.append("-Wno-tsan") %}
{% endif %}
{% if "undefinedbehavior" in sanitizers %}
{% set _ = sanitizer_list.append("undefined") %}
{% set _ = sanitizer_list.append("float-divide-by-zero") %}
{% endif %}
{% set sanitizer_flags = "-fsanitize=" ~ ",".join(sanitizer_list) ~ " " ~ model_code %}
tools.build:cxxflags+=['{{sanitizer_flags}} {{" ".join(extra_cxxflags)}}']
tools.build:sharedlinkflags+=['{{sanitizer_flags}}']
tools.build:exelinkflags+=['{{sanitizer_flags}}']
{% endif %}
{% elif compiler == "apple-clang" or compiler == "clang" %}
{% if "address" in sanitizers or "thread" in sanitizers or "undefinedbehavior" in sanitizers %}
{% set sanitizer_list = [] %}
{% set extra_cxxflags = ["-fno-omit-frame-pointer", "-O1"] %}
{% if "address" in sanitizers %}
{% set _ = sanitizer_list.append("address") %}
{% elif "thread" in sanitizers %}
{% set _ = sanitizer_list.append("thread") %}
{% endif %}
{% if "undefinedbehavior" in sanitizers %}
{% set _ = sanitizer_list.append("undefined") %}
{% set _ = sanitizer_list.append("float-divide-by-zero") %}
{% set _ = sanitizer_list.append("unsigned-integer-overflow") %}
{% endif %}
{% set sanitizer_flags = "-fsanitize=" ~ ",".join(sanitizer_list) %}
tools.build:cxxflags+=['{{sanitizer_flags}} {{" ".join(extra_cxxflags)}}']
tools.build:sharedlinkflags+=['{{sanitizer_flags}}']
tools.build:exelinkflags+=['{{sanitizer_flags}}']
{% endif %}
{% endif %}
{% endif %}
tools.info.package_id:confs+=["tools.build:cxxflags", "tools.build:exelinkflags", "tools.build:sharedlinkflags"]

View File

@@ -23,6 +23,7 @@ class Xrpl(ConanFile):
"shared": [True, False], "shared": [True, False],
"static": [True, False], "static": [True, False],
"tests": [True, False], "tests": [True, False],
"unity": [True, False],
"xrpld": [True, False], "xrpld": [True, False],
} }
@@ -38,7 +39,7 @@ class Xrpl(ConanFile):
] ]
test_requires = [ test_requires = [
"gtest/1.17.0", "doctest/2.4.12",
] ]
tool_requires = [ tool_requires = [
@@ -54,6 +55,7 @@ class Xrpl(ConanFile):
"shared": False, "shared": False,
"static": True, "static": True,
"tests": False, "tests": False,
"unity": False,
"xrpld": False, "xrpld": False,
"date/*:header_only": True, "date/*:header_only": True,
"ed25519/*:shared": False, "ed25519/*:shared": False,
@@ -85,13 +87,7 @@ class Xrpl(ConanFile):
"libarchive/*:with_xattr": False, "libarchive/*:with_xattr": False,
"libarchive/*:with_zlib": False, "libarchive/*:with_zlib": False,
"lz4/*:shared": False, "lz4/*:shared": False,
"openssl/*:no_dtls": True,
"openssl/*:no_ssl": True,
"openssl/*:no_ssl3": True,
"openssl/*:no_tls1": True,
"openssl/*:no_tls1_1": True,
"openssl/*:shared": False, "openssl/*:shared": False,
"openssl/*:tls_security_level": 2,
"protobuf/*:shared": False, "protobuf/*:shared": False,
"protobuf/*:with_zlib": True, "protobuf/*:with_zlib": True,
"rocksdb/*:enable_sse": False, "rocksdb/*:enable_sse": False,
@@ -129,7 +125,7 @@ class Xrpl(ConanFile):
transitive_headers_opt = ( transitive_headers_opt = (
{"transitive_headers": True} if conan_version.split(".")[0] == "2" else {} {"transitive_headers": True} if conan_version.split(".")[0] == "2" else {}
) )
self.requires("boost/1.90.0", force=True, **transitive_headers_opt) self.requires("boost/1.88.0", force=True, **transitive_headers_opt)
self.requires("date/3.0.4", **transitive_headers_opt) self.requires("date/3.0.4", **transitive_headers_opt)
self.requires("lz4/1.10.0", force=True) self.requires("lz4/1.10.0", force=True)
self.requires("protobuf/6.32.1", force=True) self.requires("protobuf/6.32.1", force=True)
@@ -166,6 +162,7 @@ class Xrpl(ConanFile):
tc.variables["rocksdb"] = self.options.rocksdb tc.variables["rocksdb"] = self.options.rocksdb
tc.variables["BUILD_SHARED_LIBS"] = self.options.shared tc.variables["BUILD_SHARED_LIBS"] = self.options.shared
tc.variables["static"] = self.options.static tc.variables["static"] = self.options.static
tc.variables["unity"] = self.options.unity
tc.variables["xrpld"] = self.options.xrpld tc.variables["xrpld"] = self.options.xrpld
tc.generate() tc.generate()
@@ -200,6 +197,7 @@ class Xrpl(ConanFile):
"boost::program_options", "boost::program_options",
"boost::process", "boost::process",
"boost::regex", "boost::regex",
"boost::system",
"boost::thread", "boost::thread",
"date::date", "date::date",
"ed25519::ed25519", "ed25519::ed25519",

View File

@@ -1,207 +0,0 @@
# Sanitizer Configuration for Rippled
This document explains how to properly configure and run sanitizers (AddressSanitizer, undefinedbehaviorSanitizer, ThreadSanitizer) with the xrpld project.
Corresponding suppression files are located in the `sanitizers/suppressions` directory.
- [Sanitizer Configuration for Rippled](#sanitizer-configuration-for-rippled)
- [Building with Sanitizers](#building-with-sanitizers)
- [Summary](#summary)
- [Build steps:](#build-steps)
- [Install dependencies](#install-dependencies)
- [Call CMake](#call-cmake)
- [Build](#build)
- [Running Tests with Sanitizers](#running-tests-with-sanitizers)
- [AddressSanitizer (ASAN)](#addresssanitizer-asan)
- [ThreadSanitizer (TSan)](#threadsanitizer-tsan)
- [LeakSanitizer (LSan)](#leaksanitizer-lsan)
- [UndefinedBehaviorSanitizer (UBSan)](#undefinedbehaviorsanitizer-ubsan)
- [Suppression Files](#suppression-files)
- [`asan.supp`](#asansupp)
- [`lsan.supp`](#lsansupp)
- [`ubsan.supp`](#ubsansupp)
- [`tsan.supp`](#tsansupp)
- [`sanitizer-ignorelist.txt`](#sanitizer-ignorelisttxt)
- [Troubleshooting](#troubleshooting)
- ["ASAN is ignoring requested \_\_asan_handle_no_return" warnings](#asan-is-ignoring-requested-__asan_handle_no_return-warnings)
- [Sanitizer Mismatch Errors](#sanitizer-mismatch-errors)
- [References](#references)
## Building with Sanitizers
### Summary
Follow the same instructions as mentioned in [BUILD.md](../../BUILD.md) but with the following changes:
1. Make sure you have a clean build directory.
2. Set the `SANITIZERS` environment variable before calling conan install and cmake. Only set it once. Make sure both conan and cmake read the same values.
Example: `export SANITIZERS=address,undefinedbehavior`
3. Optionally use `--profile:all sanitizers` with Conan to build dependencies with sanitizer instrumentation. [!NOTE]Building with sanitizer-instrumented dependencies is slower but produces fewer false positives.
4. Set `ASAN_OPTIONS`, `LSAN_OPTIONS`, `UBSAN_OPTIONS` and `TSAN_OPTIONS` environment variables to configure sanitizer behavior when running executables. [More details below](#running-tests-with-sanitizers).
---
### Build steps:
```bash
cd /path/to/rippled
rm -rf .build
mkdir .build
cd .build
```
#### Install dependencies
The `SANITIZERS` environment variable is used by both Conan and CMake.
```bash
export SANITIZERS=address,undefinedbehavior
# Standard build (without instrumenting dependencies)
conan install .. --output-folder . --build missing --settings build_type=Debug
# Or with sanitizer-instrumented dependencies (takes longer but fewer false positives)
conan install .. --output-folder . --profile:all sanitizers --build missing --settings build_type=Debug
```
[!CAUTION]
Do not mix Address and Thread sanitizers - they are incompatible.
Since you already set the `SANITIZERS` environment variable when running Conan, same values will be read for the next part.
#### Call CMake
```bash
cmake .. -G Ninja \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=Debug \
-Dtests=ON -Dxrpld=ON
```
#### Build
```bash
cmake --build . --parallel 4
```
## Running Tests with Sanitizers
### AddressSanitizer (ASAN)
**IMPORTANT**: ASAN with Boost produces many false positives. Use these options:
```bash
export ASAN_OPTIONS="print_stacktrace=1:detect_container_overflow=0:suppressions=path/to/asan.supp:halt_on_error=0:log_path=asan.log"
export LSAN_OPTIONS="suppressions=path/to/lsan.supp:halt_on_error=0:log_path=lsan.log"
# Run tests
./xrpld --unittest --unittest-jobs=5
```
**Why `detect_container_overflow=0`?**
- Boost intrusive containers (used in `aged_unordered_container`) trigger false positives
- Boost context switching (used in `Workers.cpp`) confuses ASAN's stack tracking
- Since we usually don't build Boost (because we don't want to instrument Boost and detect issues in Boost code) with ASAN but use Boost containers in ASAN instrumented rippled code, it generates false positives.
- Building dependencies with ASAN instrumentation reduces false positives. But we don't want to instrument dependencies like Boost with ASAN because it is slow (to compile as well as run tests) and not necessary.
- See: https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow
- More such flags are detailed [here](https://github.com/google/sanitizers/wiki/AddressSanitizerFlags)
### ThreadSanitizer (TSan)
```bash
export TSAN_OPTIONS="suppressions=path/to/tsan.supp halt_on_error=0 log_path=tsan.log"
# Run tests
./xrpld --unittest --unittest-jobs=5
```
More details [here](https://github.com/google/sanitizers/wiki/ThreadSanitizerCppManual).
### LeakSanitizer (LSan)
LSan is automatically enabled with ASAN. To disable it:
```bash
export ASAN_OPTIONS="detect_leaks=0"
```
More details [here](https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer).
### UndefinedBehaviorSanitizer (UBSan)
```bash
export UBSAN_OPTIONS="suppressions=path/to/ubsan.supp:print_stacktrace=1:halt_on_error=0:log_path=ubsan.log"
# Run tests
./xrpld --unittest --unittest-jobs=5
```
More details [here](https://clang.llvm.org/docs/undefinedbehaviorSanitizer.html).
## Suppression Files
[!NOTE] Attached files contain more details.
### [`asan.supp`](../../sanitizers/suppressions/asan.supp)
- **Purpose**: Suppress AddressSanitizer (ASAN) errors only
- **Format**: `interceptor_name:<pattern>` where pattern matches file names. Supported suppression types are:
- interceptor_name
- interceptor_via_fun
- interceptor_via_lib
- odr_violation
- **More info**: [AddressSanitizer](https://github.com/google/sanitizers/wiki/AddressSanitizer)
- **Note**: Cannot suppress stack-buffer-overflow, container-overflow, etc.
### [`lsan.supp`](../../sanitizers/suppressions/lsan.supp)
- **Purpose**: Suppress LeakSanitizer (LSan) errors only
- **Format**: `leak:<pattern>` where pattern matches function/file names
- **More info**: [LeakSanitizer](https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer)
### [`ubsan.supp`](../../sanitizers/suppressions/ubsan.supp)
- **Purpose**: Suppress undefinedbehaviorSanitizer errors
- **Format**: `<error_type>:<pattern>` (e.g., `unsigned-integer-overflow:protobuf`)
- **Covers**: Intentional overflows in sanitizers/suppressions libraries (protobuf, gRPC, stdlib)
- More info [UBSan suppressions](https://clang.llvm.org/docs/SanitizerSpecialCaseList.html).
### [`tsan.supp`](../../sanitizers/suppressions/tsan.supp)
- **Purpose**: Suppress ThreadSanitizer data race warnings
- **Format**: `race:<pattern>` where pattern matches function/file names
- **More info**: [ThreadSanitizer suppressions](https://github.com/google/sanitizers/wiki/ThreadSanitizerSuppressions)
### [`sanitizer-ignorelist.txt`](../../sanitizers/suppressions/sanitizer-ignorelist.txt)
- **Purpose**: Compile-time ignorelist for all sanitizers
- **Usage**: Passed via `-fsanitize-ignorelist=absolute/path/to/sanitizer-ignorelist.txt`
- **Format**: `<level>:<pattern>` (e.g., `src:Workers.cpp`)
## Troubleshooting
### "ASAN is ignoring requested \_\_asan_handle_no_return" warnings
These warnings appear when using Boost context switching and are harmless. They indicate potential false positives.
### Sanitizer Mismatch Errors
If you see undefined symbols like `___tsan_atomic_load` when building with ASAN:
**Problem**: Dependencies were built with a different sanitizer than the main project.
**Solution**: Rebuild everything with the same sanitizer:
```bash
rm -rf .build
# Then follow the build instructions above
```
Then review the log files: `asan.log.*`, `ubsan.log.*`, `tsan.log.*`
## References
- [AddressSanitizer Wiki](https://github.com/google/sanitizers/wiki/AddressSanitizer)
- [AddressSanitizer Flags](https://github.com/google/sanitizers/wiki/AddressSanitizerFlags)
- [Container Overflow Detection](https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow)
- [UndefinedBehavior Sanitizer](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html)
- [ThreadSanitizer](https://github.com/google/sanitizers/wiki/ThreadSanitizerCppManual)

View File

@@ -13,7 +13,9 @@ namespace xrpl {
@throws runtime_error @throws runtime_error
*/ */
void void
extractTarLz4(boost::filesystem::path const& src, boost::filesystem::path const& dst); extractTarLz4(
boost::filesystem::path const& src,
boost::filesystem::path const& dst);
} // namespace xrpl } // namespace xrpl

View File

@@ -14,7 +14,8 @@
namespace xrpl { namespace xrpl {
using IniFileSections = std::unordered_map<std::string, std::vector<std::string>>; using IniFileSections =
std::unordered_map<std::string, std::vector<std::string>>;
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
@@ -85,7 +86,8 @@ public:
if (lines_.empty()) if (lines_.empty())
return ""; return "";
if (lines_.size() > 1) if (lines_.size() > 1)
Throw<std::runtime_error>("A legacy value must have exactly one line. Section: " + name_); Throw<std::runtime_error>(
"A legacy value must have exactly one line. Section: " + name_);
return lines_[0]; return lines_[0];
} }
@@ -231,7 +233,10 @@ public:
The previous value, if any, is overwritten. The previous value, if any, is overwritten.
*/ */
void void
overwrite(std::string const& section, std::string const& key, std::string const& value); overwrite(
std::string const& section,
std::string const& key,
std::string const& value);
/** Remove all the key/value pairs from the section. /** Remove all the key/value pairs from the section.
*/ */
@@ -269,7 +274,9 @@ public:
bool bool
had_trailing_comments() const had_trailing_comments() const
{ {
return std::any_of(map_.cbegin(), map_.cend(), [](auto s) { return s.second.had_trailing_comments(); }); return std::any_of(map_.cbegin(), map_.cend(), [](auto s) {
return s.second.had_trailing_comments();
});
} }
protected: protected:
@@ -308,7 +315,10 @@ set(T& target, std::string const& name, Section const& section)
*/ */
template <class T> template <class T>
bool bool
set(T& target, T const& defaultValue, std::string const& name, Section const& section) set(T& target,
T const& defaultValue,
std::string const& name,
Section const& section)
{ {
bool found_and_valid = set<T>(target, name, section); bool found_and_valid = set<T>(target, name, section);
if (!found_and_valid) if (!found_and_valid)
@@ -323,7 +333,9 @@ set(T& target, T const& defaultValue, std::string const& name, Section const& se
// NOTE This routine might be more clumsy than the previous two // NOTE This routine might be more clumsy than the previous two
template <class T = std::string> template <class T = std::string>
T T
get(Section const& section, std::string const& name, T const& defaultValue = T{}) get(Section const& section,
std::string const& name,
T const& defaultValue = T{})
{ {
try try
{ {

View File

@@ -25,7 +25,8 @@ public:
Buffer() = default; Buffer() = default;
/** Create an uninitialized buffer with the given size. */ /** Create an uninitialized buffer with the given size. */
explicit Buffer(std::size_t size) : p_(size ? new std::uint8_t[size] : nullptr), size_(size) explicit Buffer(std::size_t size)
: p_(size ? new std::uint8_t[size] : nullptr), size_(size)
{ {
} }
@@ -61,7 +62,8 @@ public:
/** Move-construct. /** Move-construct.
The other buffer is reset. The other buffer is reset.
*/ */
Buffer(Buffer&& other) noexcept : p_(std::move(other.p_)), size_(other.size_) Buffer(Buffer&& other) noexcept
: p_(std::move(other.p_)), size_(other.size_)
{ {
other.size_ = 0; other.size_ = 0;
} }
@@ -92,7 +94,8 @@ public:
{ {
// Ensure the slice isn't a subset of the buffer. // Ensure the slice isn't a subset of the buffer.
XRPL_ASSERT( XRPL_ASSERT(
s.size() == 0 || size_ == 0 || s.data() < p_.get() || s.data() >= p_.get() + size_, s.size() == 0 || size_ == 0 || s.data() < p_.get() ||
s.data() >= p_.get() + size_,
"xrpl::Buffer::operator=(Slice) : input not a subset"); "xrpl::Buffer::operator=(Slice) : input not a subset");
if (auto p = alloc(s.size())) if (auto p = alloc(s.size()))

View File

@@ -36,7 +36,10 @@ lz4Compress(void const* in, std::size_t inSize, BufferFactory&& bf)
auto compressed = bf(outCapacity); auto compressed = bf(outCapacity);
auto compressedSize = LZ4_compress_default( auto compressedSize = LZ4_compress_default(
reinterpret_cast<char const*>(in), reinterpret_cast<char*>(compressed), inSize, outCapacity); reinterpret_cast<char const*>(in),
reinterpret_cast<char*>(compressed),
inSize,
outCapacity);
if (compressedSize == 0) if (compressedSize == 0)
Throw<std::runtime_error>("lz4 compress: failed"); Throw<std::runtime_error>("lz4 compress: failed");
@@ -67,8 +70,10 @@ lz4Decompress(
Throw<std::runtime_error>("lz4Decompress: integer overflow (output)"); Throw<std::runtime_error>("lz4Decompress: integer overflow (output)");
if (LZ4_decompress_safe( if (LZ4_decompress_safe(
reinterpret_cast<char const*>(in), reinterpret_cast<char*>(decompressed), inSize, decompressedSize) != reinterpret_cast<char const*>(in),
decompressedSize) reinterpret_cast<char*>(decompressed),
inSize,
decompressedSize) != decompressedSize)
Throw<std::runtime_error>("lz4Decompress: failed"); Throw<std::runtime_error>("lz4Decompress: failed");
return decompressedSize; return decompressedSize;
@@ -84,7 +89,11 @@ lz4Decompress(
*/ */
template <typename InputStream> template <typename InputStream>
std::size_t std::size_t
lz4Decompress(InputStream& in, std::size_t inSize, std::uint8_t* decompressed, std::size_t decompressedSize) lz4Decompress(
InputStream& in,
std::size_t inSize,
std::uint8_t* decompressed,
std::size_t decompressedSize)
{ {
std::vector<std::uint8_t> compressed; std::vector<std::uint8_t> compressed;
std::uint8_t const* chunk = nullptr; std::uint8_t const* chunk = nullptr;
@@ -107,7 +116,9 @@ lz4Decompress(InputStream& in, std::size_t inSize, std::uint8_t* decompressed, s
compressed.resize(inSize); compressed.resize(inSize);
} }
chunkSize = chunkSize < (inSize - copiedInSize) ? chunkSize : (inSize - copiedInSize); chunkSize = chunkSize < (inSize - copiedInSize)
? chunkSize
: (inSize - copiedInSize);
std::copy(chunk, chunk + chunkSize, compressed.data() + copiedInSize); std::copy(chunk, chunk + chunkSize, compressed.data() + copiedInSize);
@@ -124,7 +135,8 @@ lz4Decompress(InputStream& in, std::size_t inSize, std::uint8_t* decompressed, s
if (in.ByteCount() > (currentBytes + copiedInSize)) if (in.ByteCount() > (currentBytes + copiedInSize))
in.BackUp(in.ByteCount() - currentBytes - copiedInSize); in.BackUp(in.ByteCount() - currentBytes - copiedInSize);
if ((copiedInSize == 0 && chunkSize < inSize) || (copiedInSize > 0 && copiedInSize != inSize)) if ((copiedInSize == 0 && chunkSize < inSize) ||
(copiedInSize > 0 && copiedInSize != inSize))
Throw<std::runtime_error>("lz4 decompress: insufficient input size"); Throw<std::runtime_error>("lz4 decompress: insufficient input size");
return lz4Decompress(chunk, inSize, decompressed, decompressedSize); return lz4Decompress(chunk, inSize, decompressed, decompressedSize);

View File

@@ -56,7 +56,9 @@ private:
if (m_value != value_type()) if (m_value != value_type())
{ {
std::size_t elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - m_when).count(); std::size_t elapsed =
std::chrono::duration_cast<std::chrono::seconds>(now - m_when)
.count();
// A span larger than four times the window decays the // A span larger than four times the window decays the
// value to an insignificant amount so just reset it. // value to an insignificant amount so just reset it.

View File

@@ -108,20 +108,23 @@ Unexpected(E (&)[N]) -> Unexpected<E const*>;
// Definition of Expected. All of the machinery comes from boost::result. // Definition of Expected. All of the machinery comes from boost::result.
template <class T, class E> template <class T, class E>
class [[nodiscard]] Expected : private boost::outcome_v2::result<T, E, detail::throw_policy> class [[nodiscard]] Expected
: private boost::outcome_v2::result<T, E, detail::throw_policy>
{ {
using Base = boost::outcome_v2::result<T, E, detail::throw_policy>; using Base = boost::outcome_v2::result<T, E, detail::throw_policy>;
public: public:
template <typename U> template <typename U>
requires std::convertible_to<U, T> requires std::convertible_to<U, T>
constexpr Expected(U&& r) : Base(boost::outcome_v2::in_place_type_t<T>{}, std::forward<U>(r)) constexpr Expected(U&& r)
: Base(boost::outcome_v2::in_place_type_t<T>{}, std::forward<U>(r))
{ {
} }
template <typename U> template <typename U>
requires std::convertible_to<U, E> && (!std::is_reference_v<U>) requires std::convertible_to<U, E> && (!std::is_reference_v<U>)
constexpr Expected(Unexpected<U> e) : Base(boost::outcome_v2::in_place_type_t<E>{}, std::move(e.value())) constexpr Expected(Unexpected<U> e)
: Base(boost::outcome_v2::in_place_type_t<E>{}, std::move(e.value()))
{ {
} }
@@ -192,7 +195,8 @@ public:
// Specialization of Expected<void, E>. Allows returning either success // Specialization of Expected<void, E>. Allows returning either success
// (without a value) or the reason for the failure. // (without a value) or the reason for the failure.
template <class E> template <class E>
class [[nodiscard]] Expected<void, E> : private boost::outcome_v2::result<void, E, detail::throw_policy> class [[nodiscard]] Expected<void, E>
: private boost::outcome_v2::result<void, E, detail::throw_policy>
{ {
using Base = boost::outcome_v2::result<void, E, detail::throw_policy>; using Base = boost::outcome_v2::result<void, E, detail::throw_policy>;

View File

@@ -15,7 +15,10 @@ getFileContents(
std::optional<std::size_t> maxSize = std::nullopt); std::optional<std::size_t> maxSize = std::nullopt);
void void
writeFileContents(boost::system::error_code& ec, boost::filesystem::path const& destPath, std::string const& contents); writeFileContents(
boost::system::error_code& ec,
boost::filesystem::path const& destPath,
std::string const& contents);
} // namespace xrpl } // namespace xrpl

View File

@@ -45,8 +45,8 @@ struct SharedIntrusiveAdoptNoIncrementTag
// //
template <class T> template <class T>
concept CAdoptTag = concept CAdoptTag = std::is_same_v<T, SharedIntrusiveAdoptIncrementStrongTag> ||
std::is_same_v<T, SharedIntrusiveAdoptIncrementStrongTag> || std::is_same_v<T, SharedIntrusiveAdoptNoIncrementTag>; std::is_same_v<T, SharedIntrusiveAdoptNoIncrementTag>;
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
@@ -58,7 +58,7 @@ concept CAdoptTag =
When the strong pointer count goes to zero, the "partialDestructor" is When the strong pointer count goes to zero, the "partialDestructor" is
called. This can be used to destroy as much of the object as possible while called. This can be used to destroy as much of the object as possible while
still retaining the reference counts. For example, for SHAMapInnerNodes the still retaining the reference counts. For example, for SHAMapInnerNodes the
children may be reset in that function. Note that std::shared_pointer WILL children may be reset in that function. Note that std::shared_poiner WILL
run the destructor when the strong count reaches zero, but may not free the run the destructor when the strong count reaches zero, but may not free the
memory used by the object until the weak count reaches zero. In rippled, we memory used by the object until the weak count reaches zero. In rippled, we
typically allocate shared pointers with the `make_shared` function. When typically allocate shared pointers with the `make_shared` function. When
@@ -122,7 +122,9 @@ public:
controlled by the rhs param. controlled by the rhs param.
*/ */
template <class TT> template <class TT>
SharedIntrusive(StaticCastTagSharedIntrusive, SharedIntrusive<TT> const& rhs); SharedIntrusive(
StaticCastTagSharedIntrusive,
SharedIntrusive<TT> const& rhs);
/** Create a new SharedIntrusive by statically casting the pointer /** Create a new SharedIntrusive by statically casting the pointer
controlled by the rhs param. controlled by the rhs param.
@@ -134,7 +136,9 @@ public:
controlled by the rhs param. controlled by the rhs param.
*/ */
template <class TT> template <class TT>
SharedIntrusive(DynamicCastTagSharedIntrusive, SharedIntrusive<TT> const& rhs); SharedIntrusive(
DynamicCastTagSharedIntrusive,
SharedIntrusive<TT> const& rhs);
/** Create a new SharedIntrusive by dynamically casting the pointer /** Create a new SharedIntrusive by dynamically casting the pointer
controlled by the rhs param. controlled by the rhs param.
@@ -300,7 +304,9 @@ class SharedWeakUnion
// Tagged pointer. Low bit determines if this is a strong or a weak // Tagged pointer. Low bit determines if this is a strong or a weak
// pointer. The low bit must be masked to zero when converting back to a // pointer. The low bit must be masked to zero when converting back to a
// pointer. If the low bit is '1', this is a weak pointer. // pointer. If the low bit is '1', this is a weak pointer.
static_assert(alignof(T) >= 2, "Bad alignment: Combo pointer requires low bit to be zero"); static_assert(
alignof(T) >= 2,
"Bad alignment: Combo pointer requires low bit to be zero");
public: public:
SharedWeakUnion() = default; SharedWeakUnion() = default;
@@ -444,7 +450,9 @@ make_SharedIntrusive(Args&&... args)
auto p = new TT(std::forward<Args>(args)...); auto p = new TT(std::forward<Args>(args)...);
static_assert( static_assert(
noexcept(SharedIntrusive<TT>(std::declval<TT*>(), std::declval<SharedIntrusiveAdoptNoIncrementTag>())), noexcept(SharedIntrusive<TT>(
std::declval<TT*>(),
std::declval<SharedIntrusiveAdoptNoIncrementTag>())),
"SharedIntrusive constructor should not throw or this can leak " "SharedIntrusive constructor should not throw or this can leak "
"memory"); "memory");

View File

@@ -12,7 +12,9 @@ template <class T>
template <CAdoptTag TAdoptTag> template <CAdoptTag TAdoptTag>
SharedIntrusive<T>::SharedIntrusive(T* p, TAdoptTag) noexcept : ptr_{p} SharedIntrusive<T>::SharedIntrusive(T* p, TAdoptTag) noexcept : ptr_{p}
{ {
if constexpr (std::is_same_v<TAdoptTag, SharedIntrusiveAdoptIncrementStrongTag>) if constexpr (std::is_same_v<
TAdoptTag,
SharedIntrusiveAdoptIncrementStrongTag>)
{ {
if (p) if (p)
p->addStrongRef(); p->addStrongRef();
@@ -44,14 +46,16 @@ SharedIntrusive<T>::SharedIntrusive(SharedIntrusive<TT> const& rhs)
} }
template <class T> template <class T>
SharedIntrusive<T>::SharedIntrusive(SharedIntrusive&& rhs) : ptr_{rhs.unsafeExchange(nullptr)} SharedIntrusive<T>::SharedIntrusive(SharedIntrusive&& rhs)
: ptr_{rhs.unsafeExchange(nullptr)}
{ {
} }
template <class T> template <class T>
template <class TT> template <class TT>
requires std::convertible_to<TT*, T*> requires std::convertible_to<TT*, T*>
SharedIntrusive<T>::SharedIntrusive(SharedIntrusive<TT>&& rhs) : ptr_{rhs.unsafeExchange(nullptr)} SharedIntrusive<T>::SharedIntrusive(SharedIntrusive<TT>&& rhs)
: ptr_{rhs.unsafeExchange(nullptr)}
{ {
} }
template <class T> template <class T>
@@ -108,7 +112,9 @@ requires std::convertible_to<TT*, T*>
SharedIntrusive<T>& SharedIntrusive<T>&
SharedIntrusive<T>::operator=(SharedIntrusive<TT>&& rhs) SharedIntrusive<T>::operator=(SharedIntrusive<TT>&& rhs)
{ {
static_assert(!std::is_same_v<T, TT>, "This overload should not be instantiated for T == TT"); static_assert(
!std::is_same_v<T, TT>,
"This overload should not be instantiated for T == TT");
unsafeReleaseAndStore(rhs.unsafeExchange(nullptr)); unsafeReleaseAndStore(rhs.unsafeExchange(nullptr));
return *this; return *this;
@@ -133,7 +139,9 @@ template <CAdoptTag TAdoptTag>
void void
SharedIntrusive<T>::adopt(T* p) SharedIntrusive<T>::adopt(T* p)
{ {
if constexpr (std::is_same_v<TAdoptTag, SharedIntrusiveAdoptIncrementStrongTag>) if constexpr (std::is_same_v<
TAdoptTag,
SharedIntrusiveAdoptIncrementStrongTag>)
{ {
if (p) if (p)
p->addStrongRef(); p->addStrongRef();
@@ -149,7 +157,9 @@ SharedIntrusive<T>::~SharedIntrusive()
template <class T> template <class T>
template <class TT> template <class TT>
SharedIntrusive<T>::SharedIntrusive(StaticCastTagSharedIntrusive, SharedIntrusive<TT> const& rhs) SharedIntrusive<T>::SharedIntrusive(
StaticCastTagSharedIntrusive,
SharedIntrusive<TT> const& rhs)
: ptr_{[&] { : ptr_{[&] {
auto p = static_cast<T*>(rhs.unsafeGetRawPtr()); auto p = static_cast<T*>(rhs.unsafeGetRawPtr());
if (p) if (p)
@@ -161,14 +171,18 @@ SharedIntrusive<T>::SharedIntrusive(StaticCastTagSharedIntrusive, SharedIntrusiv
template <class T> template <class T>
template <class TT> template <class TT>
SharedIntrusive<T>::SharedIntrusive(StaticCastTagSharedIntrusive, SharedIntrusive<TT>&& rhs) SharedIntrusive<T>::SharedIntrusive(
StaticCastTagSharedIntrusive,
SharedIntrusive<TT>&& rhs)
: ptr_{static_cast<T*>(rhs.unsafeExchange(nullptr))} : ptr_{static_cast<T*>(rhs.unsafeExchange(nullptr))}
{ {
} }
template <class T> template <class T>
template <class TT> template <class TT>
SharedIntrusive<T>::SharedIntrusive(DynamicCastTagSharedIntrusive, SharedIntrusive<TT> const& rhs) SharedIntrusive<T>::SharedIntrusive(
DynamicCastTagSharedIntrusive,
SharedIntrusive<TT> const& rhs)
: ptr_{[&] { : ptr_{[&] {
auto p = dynamic_cast<T*>(rhs.unsafeGetRawPtr()); auto p = dynamic_cast<T*>(rhs.unsafeGetRawPtr());
if (p) if (p)
@@ -180,7 +194,9 @@ SharedIntrusive<T>::SharedIntrusive(DynamicCastTagSharedIntrusive, SharedIntrusi
template <class T> template <class T>
template <class TT> template <class TT>
SharedIntrusive<T>::SharedIntrusive(DynamicCastTagSharedIntrusive, SharedIntrusive<TT>&& rhs) SharedIntrusive<T>::SharedIntrusive(
DynamicCastTagSharedIntrusive,
SharedIntrusive<TT>&& rhs)
{ {
// This can be simplified without the `exchange`, but the `exchange` is kept // This can be simplified without the `exchange`, but the `exchange` is kept
// in anticipation of supporting atomic operations. // in anticipation of supporting atomic operations.
@@ -299,7 +315,8 @@ WeakIntrusive<T>::WeakIntrusive(WeakIntrusive&& rhs) : ptr_{rhs.ptr_}
} }
template <class T> template <class T>
WeakIntrusive<T>::WeakIntrusive(SharedIntrusive<T> const& rhs) : ptr_{rhs.unsafeGetRawPtr()} WeakIntrusive<T>::WeakIntrusive(SharedIntrusive<T> const& rhs)
: ptr_{rhs.unsafeGetRawPtr()}
{ {
if (ptr_) if (ptr_)
ptr_->addWeakRef(); ptr_->addWeakRef();

View File

@@ -160,19 +160,22 @@ private:
See description of the `refCounts` field for a fuller description of See description of the `refCounts` field for a fuller description of
this field. this field.
*/ */
static constexpr FieldType partialDestroyStartedMask = (one << (FieldTypeBits - 1)); static constexpr FieldType partialDestroyStartedMask =
(one << (FieldTypeBits - 1));
/** Flag that is set when the partialDestroy function has finished running /** Flag that is set when the partialDestroy function has finished running
See description of the `refCounts` field for a fuller description of See description of the `refCounts` field for a fuller description of
this field. this field.
*/ */
static constexpr FieldType partialDestroyFinishedMask = (one << (FieldTypeBits - 2)); static constexpr FieldType partialDestroyFinishedMask =
(one << (FieldTypeBits - 2));
/** Mask that will zero out all the `count` bits and leave the tag bits /** Mask that will zero out all the `count` bits and leave the tag bits
unchanged. unchanged.
*/ */
static constexpr FieldType tagMask = partialDestroyStartedMask | partialDestroyFinishedMask; static constexpr FieldType tagMask =
partialDestroyStartedMask | partialDestroyFinishedMask;
/** Mask that will zero out the `tag` bits and leave the count bits /** Mask that will zero out the `tag` bits and leave the count bits
unchanged. unchanged.
@@ -181,11 +184,13 @@ private:
/** Mask that will zero out everything except the strong count. /** Mask that will zero out everything except the strong count.
*/ */
static constexpr FieldType strongMask = ((one << StrongCountNumBits) - 1) & valueMask; static constexpr FieldType strongMask =
((one << StrongCountNumBits) - 1) & valueMask;
/** Mask that will zero out everything except the weak count. /** Mask that will zero out everything except the weak count.
*/ */
static constexpr FieldType weakMask = (((one << WeakCountNumBits) - 1) << StrongCountNumBits) & valueMask; static constexpr FieldType weakMask =
(((one << WeakCountNumBits) - 1) << StrongCountNumBits) & valueMask;
/** Unpack the count and tag fields from the packed atomic integer form. */ /** Unpack the count and tag fields from the packed atomic integer form. */
struct RefCountPair struct RefCountPair
@@ -210,8 +215,10 @@ private:
FieldType FieldType
combinedValue() const noexcept; combinedValue() const noexcept;
static constexpr CountType maxStrongValue = static_cast<CountType>((one << StrongCountNumBits) - 1); static constexpr CountType maxStrongValue =
static constexpr CountType maxWeakValue = static_cast<CountType>((one << WeakCountNumBits) - 1); static_cast<CountType>((one << StrongCountNumBits) - 1);
static constexpr CountType maxWeakValue =
static_cast<CountType>((one << WeakCountNumBits) - 1);
/** Put an extra margin to detect when running up against limits. /** Put an extra margin to detect when running up against limits.
This is only used in debug code, and is useful if we reduce the This is only used in debug code, and is useful if we reduce the
number of bits in the strong and weak counts (to 16 and 14 bits). number of bits in the strong and weak counts (to 16 and 14 bits).
@@ -267,7 +274,8 @@ IntrusiveRefCounts::releaseStrongRef() const
} }
} }
if (refCounts.compare_exchange_weak(prevIntVal, nextIntVal, std::memory_order_acq_rel)) if (refCounts.compare_exchange_weak(
prevIntVal, nextIntVal, std::memory_order_acq_rel))
{ {
// Can't be in partial destroy because only decrementing the strong // Can't be in partial destroy because only decrementing the strong
// count to zero can start a partial destroy, and that can't happen // count to zero can start a partial destroy, and that can't happen
@@ -323,7 +331,8 @@ IntrusiveRefCounts::addWeakReleaseStrongRef() const
action = partialDestroy; action = partialDestroy;
} }
} }
if (refCounts.compare_exchange_weak(prevIntVal, nextIntVal, std::memory_order_acq_rel)) if (refCounts.compare_exchange_weak(
prevIntVal, nextIntVal, std::memory_order_acq_rel))
{ {
XRPL_ASSERT( XRPL_ASSERT(
(!(prevIntVal & partialDestroyStartedMask)), (!(prevIntVal & partialDestroyStartedMask)),
@@ -367,7 +376,8 @@ IntrusiveRefCounts::checkoutStrongRefFromWeak() const noexcept
auto curValue = RefCountPair{1, 1}.combinedValue(); auto curValue = RefCountPair{1, 1}.combinedValue();
auto desiredValue = RefCountPair{2, 1}.combinedValue(); auto desiredValue = RefCountPair{2, 1}.combinedValue();
while (!refCounts.compare_exchange_weak(curValue, desiredValue, std::memory_order_acq_rel)) while (!refCounts.compare_exchange_weak(
curValue, desiredValue, std::memory_order_acq_rel))
{ {
RefCountPair const prev{curValue}; RefCountPair const prev{curValue};
if (!prev.strong) if (!prev.strong)
@@ -396,15 +406,20 @@ inline IntrusiveRefCounts::~IntrusiveRefCounts() noexcept
{ {
#ifndef NDEBUG #ifndef NDEBUG
auto v = refCounts.load(std::memory_order_acquire); auto v = refCounts.load(std::memory_order_acquire);
XRPL_ASSERT((!(v & valueMask)), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : count must be zero"); XRPL_ASSERT(
(!(v & valueMask)),
"xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : count must be zero");
auto t = v & tagMask; auto t = v & tagMask;
XRPL_ASSERT((!t || t == tagMask), "xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : valid tag"); XRPL_ASSERT(
(!t || t == tagMask),
"xrpl::IntrusiveRefCounts::~IntrusiveRefCounts : valid tag");
#endif #endif
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
inline IntrusiveRefCounts::RefCountPair::RefCountPair(IntrusiveRefCounts::FieldType v) noexcept inline IntrusiveRefCounts::RefCountPair::RefCountPair(
IntrusiveRefCounts::FieldType v) noexcept
: strong{static_cast<CountType>(v & strongMask)} : strong{static_cast<CountType>(v & strongMask)}
, weak{static_cast<CountType>((v & weakMask) >> StrongCountNumBits)} , weak{static_cast<CountType>((v & weakMask) >> StrongCountNumBits)}
, partialDestroyStartedBit{v & partialDestroyStartedMask} , partialDestroyStartedBit{v & partialDestroyStartedMask}
@@ -434,8 +449,10 @@ IntrusiveRefCounts::RefCountPair::combinedValue() const noexcept
(strong < checkStrongMaxValue && weak < checkWeakMaxValue), (strong < checkStrongMaxValue && weak < checkWeakMaxValue),
"xrpl::IntrusiveRefCounts::RefCountPair::combinedValue : inputs " "xrpl::IntrusiveRefCounts::RefCountPair::combinedValue : inputs "
"inside range"); "inside range");
return (static_cast<IntrusiveRefCounts::FieldType>(weak) << IntrusiveRefCounts::StrongCountNumBits) | return (static_cast<IntrusiveRefCounts::FieldType>(weak)
static_cast<IntrusiveRefCounts::FieldType>(strong) | partialDestroyStartedBit | partialDestroyFinishedBit; << IntrusiveRefCounts::StrongCountNumBits) |
static_cast<IntrusiveRefCounts::FieldType>(strong) |
partialDestroyStartedBit | partialDestroyFinishedBit;
} }
template <class T> template <class T>
@@ -443,9 +460,11 @@ inline void
partialDestructorFinished(T** o) partialDestructorFinished(T** o)
{ {
T& self = **o; T& self = **o;
IntrusiveRefCounts::RefCountPair p = self.refCounts.fetch_or(IntrusiveRefCounts::partialDestroyFinishedMask); IntrusiveRefCounts::RefCountPair p =
self.refCounts.fetch_or(IntrusiveRefCounts::partialDestroyFinishedMask);
XRPL_ASSERT( XRPL_ASSERT(
(!p.partialDestroyFinishedBit && p.partialDestroyStartedBit && !p.strong), (!p.partialDestroyFinishedBit && p.partialDestroyStartedBit &&
!p.strong),
"xrpl::partialDestructorFinished : not a weak ref"); "xrpl::partialDestructorFinished : not a weak ref");
if (!p.weak) if (!p.weak)
{ {

View File

@@ -55,7 +55,8 @@ template <class = void>
boost::thread_specific_ptr<detail::LocalValues>& boost::thread_specific_ptr<detail::LocalValues>&
getLocalValues() getLocalValues()
{ {
static boost::thread_specific_ptr<detail::LocalValues> tsp(&detail::LocalValues::cleanup); static boost::thread_specific_ptr<detail::LocalValues> tsp(
&detail::LocalValues::cleanup);
return tsp; return tsp;
} }
@@ -104,7 +105,9 @@ LocalValue<T>::operator*()
} }
return *reinterpret_cast<T*>( return *reinterpret_cast<T*>(
lvs->values.emplace(this, std::make_unique<detail::LocalValues::Value<T>>(t_)).first->second->get()); lvs->values
.emplace(this, std::make_unique<detail::LocalValues::Value<T>>(t_))
.first->second->get());
} }
} // namespace xrpl } // namespace xrpl

View File

@@ -39,17 +39,22 @@ private:
std::string partition_; std::string partition_;
public: public:
Sink(std::string const& partition, beast::severities::Severity thresh, Logs& logs); Sink(
std::string const& partition,
beast::severities::Severity thresh,
Logs& logs);
Sink(Sink const&) = delete; Sink(Sink const&) = delete;
Sink& Sink&
operator=(Sink const&) = delete; operator=(Sink const&) = delete;
void void
write(beast::severities::Severity level, std::string const& text) override; write(beast::severities::Severity level, std::string const& text)
override;
void void
writeAlways(beast::severities::Severity level, std::string const& text) override; writeAlways(beast::severities::Severity level, std::string const& text)
override;
}; };
/** Manages a system file containing logged output. /** Manages a system file containing logged output.
@@ -135,7 +140,11 @@ private:
}; };
std::mutex mutable mutex_; std::mutex mutable mutex_;
std::map<std::string, std::unique_ptr<beast::Journal::Sink>, boost::beast::iless> sinks_; std::map<
std::string,
std::unique_ptr<beast::Journal::Sink>,
boost::beast::iless>
sinks_;
beast::severities::Severity thresh_; beast::severities::Severity thresh_;
File file_; File file_;
bool silent_ = false; bool silent_ = false;
@@ -171,7 +180,11 @@ public:
partition_severities() const; partition_severities() const;
void void
write(beast::severities::Severity level, std::string const& partition, std::string const& text, bool console); write(
beast::severities::Severity level,
std::string const& partition,
std::string const& text,
bool console);
std::string std::string
rotate(); rotate();
@@ -188,7 +201,9 @@ public:
} }
virtual std::unique_ptr<beast::Journal::Sink> virtual std::unique_ptr<beast::Journal::Sink>
makeSink(std::string const& partition, beast::severities::Severity startingLevel); makeSink(
std::string const& partition,
beast::severities::Severity startingLevel);
public: public:
static LogSeverity static LogSeverity

View File

@@ -1,12 +1,8 @@
#ifndef XRPL_BASICS_NUMBER_H_INCLUDED #ifndef XRPL_BASICS_NUMBER_H_INCLUDED
#define XRPL_BASICS_NUMBER_H_INCLUDED #define XRPL_BASICS_NUMBER_H_INCLUDED
#include <xrpl/beast/utility/instrumentation.h>
#include <cstdint> #include <cstdint>
#include <functional>
#include <limits> #include <limits>
#include <optional>
#include <ostream> #include <ostream>
#include <string> #include <string>
@@ -17,237 +13,42 @@ class Number;
std::string std::string
to_string(Number const& amount); to_string(Number const& amount);
template <typename T>
constexpr std::optional<int>
logTen(T value)
{
int log = 0;
while (value >= 10 && value % 10 == 0)
{
value /= 10;
++log;
}
if (value == 1)
return log;
return std::nullopt;
}
template <typename T> template <typename T>
constexpr bool constexpr bool
isPowerOfTen(T value) isPowerOfTen(T value)
{ {
return logTen(value).has_value(); while (value >= 10 && value % 10 == 0)
value /= 10;
return value == 1;
} }
/** MantissaRange defines a range for the mantissa of a normalized Number.
*
* The mantissa is in the range [min, max], where
* * min is a power of 10, and
* * max = min * 10 - 1.
*
* The mantissa_scale enum indicates whether the range is "small" or "large".
* This intentionally restricts the number of MantissaRanges that can be
* instantiated to two: one for each scale.
*
* The "small" scale is based on the behavior of STAmount for IOUs. It has a min
* value of 10^15, and a max value of 10^16-1. This was sufficient for
* uses before Lending Protocol was implemented, mostly related to AMM.
*
* However, it does not have sufficient precision to represent the full integer
* range of int64_t values (-2^63 to 2^63-1), which are needed for XRP and MPT
* values. The implementation of SingleAssetVault, and LendingProtocol need to
* represent those integer values accurately and precisely, both for the
* STNumber field type, and for internal calculations. That necessitated the
* "large" scale.
*
* The "large" scale is intended to represent all values that can be represented
* by an STAmount - IOUs, XRP, and MPTs. It has a min value of 10^18, and a max
* value of 10^19-1.
*
* Note that if the mentioned amendments are eventually retired, this class
* should be left in place, but the "small" scale option should be removed. This
* will allow for future expansion beyond 64-bits if it is ever needed.
*/
struct MantissaRange
{
using rep = std::uint64_t;
enum mantissa_scale { small, large };
explicit constexpr MantissaRange(mantissa_scale scale_)
: min(getMin(scale_)), max(min * 10 - 1), log(logTen(min).value_or(-1)), scale(scale_)
{
}
rep min;
rep max;
int log;
mantissa_scale scale;
private:
static constexpr rep
getMin(mantissa_scale scale_)
{
switch (scale_)
{
case small:
return 1'000'000'000'000'000ULL;
case large:
return 1'000'000'000'000'000'000ULL;
default:
// Since this can never be called outside a non-constexpr
// context, this throw assures that the build fails if an
// invalid scale is used.
throw std::runtime_error("Unknown mantissa scale");
}
}
};
// Like std::integral, but only 64-bit integral types.
template <class T>
concept Integral64 = std::is_same_v<T, std::int64_t> || std::is_same_v<T, std::uint64_t>;
/** Number is a floating point type that can represent a wide range of values.
*
* It can represent all values that can be represented by an STAmount -
* regardless of asset type - XRPAmount, MPTAmount, and IOUAmount, with at least
* as much precision as those types require.
*
* ---- Internal Representation ----
*
* Internally, Number is represented with three values:
* 1. a bool sign flag,
* 2. a std::uint64_t mantissa,
* 3. an int exponent.
*
* The internal mantissa is an unsigned integer in the range defined by the
* current MantissaRange. The exponent is an integer in the range
* [minExponent, maxExponent].
*
* See the description of MantissaRange for more details on the ranges.
*
* A non-zero mantissa is (almost) always normalized, meaning it and the
* exponent are grown or shrunk until the mantissa is in the range
* [MantissaRange.min, MantissaRange.max].
*
* Note:
* 1. Normalization can be disabled by using the "unchecked" ctor tag. This
* should only be used at specific conversion points, some constexpr
* values, and in unit tests.
* 2. The max of the "large" range, 10^19-1, is the largest 10^X-1 value that
* fits in an unsigned 64-bit number. (10^19-1 < 2^64-1 and
* 10^20-1 > 2^64-1). This avoids under- and overflows.
*
* ---- External Interface ----
*
* The external interface of Number consists of a std::int64_t mantissa, which
* is restricted to 63-bits, and an int exponent, which must be in the range
* [minExponent, maxExponent]. The range of the mantissa depends on which
* MantissaRange is currently active. For the "short" range, the mantissa will
* be between 10^15 and 10^16-1. For the "large" range, the mantissa will be
* between -(2^63-1) and 2^63-1. As noted above, the "large" range is needed to
* represent the full range of valid XRP and MPT integer values accurately.
*
* Note:
* 1. 2^63-1 is between 10^18 and 10^19-1, which are the limits of the "large"
* mantissa range.
* 2. The functions mantissa() and exponent() return the external view of the
* Number value, specifically using a signed 63-bit mantissa. This may
* require altering the internal representation to fit into that range
* before the value is returned. The interface guarantees consistency of
* the two values.
* 3. Number cannot represent -2^63 (std::numeric_limits<std::int64_t>::min())
* as an exact integer, but it doesn't need to, because all asset values
* on-ledger are non-negative. This is due to implementation details of
* several operations which use unsigned arithmetic internally. This is
* sufficient to represent all valid XRP values (where the absolute value
* can not exceed INITIAL_XRP: 10^17), and MPT values (where the absolute
* value can not exceed maxMPTokenAmount: 2^63-1).
*
* ---- Mantissa Range Switching ----
*
* The mantissa range may be changed at runtime via setMantissaScale(). The
* default mantissa range is "large". The range is updated whenever transaction
* processing begins, based on whether SingleAssetVault or LendingProtocol are
* enabled. If either is enabled, the mantissa range is set to "large". If not,
* it is set to "small", preserving backward compatibility and correct
* "amendment-gating".
*
* It is extremely unlikely that any more calls to setMantissaScale() will be
* needed outside of unit tests.
*
* ---- Usage With Different Ranges ----
*
* Outside of unit tests, and existing checks, code that uses Number should not
* know or care which mantissa range is active.
*
* The results of computations using Numbers with a small mantissa may differ
* from computations using Numbers with a large mantissa, specifically as it
* effects the results after rounding. That is why the large mantissa range is
* amendment gated in transaction processing.
*
* It is extremely unlikely that any more calls to getMantissaScale() will be
* needed outside of unit tests.
*
* Code that uses Number should not assume or check anything about the
* mantissa() or exponent() except that they fit into the "large" range
* specified in the "External Interface" section.
*
* ----- Unit Tests -----
*
* Within unit tests, it may be useful to explicitly switch between the two
* ranges, or to check which range is active when checking the results of
* computations. If the test is doing the math directly, the
* set/getMantissaScale() functions may be most appropriate. However, if the
* test has anything to do with transaction processing, it should enable or
* disable the amendments that control the mantissa range choice
* (SingleAssetVault and LendingProtocol), and/or check if either of those
* amendments are enabled to determine which result to expect.
*
*/
class Number class Number
{ {
using rep = std::int64_t; using rep = std::int64_t;
using internalrep = MantissaRange::rep; rep mantissa_{0};
bool negative_{false};
internalrep mantissa_{0};
int exponent_{std::numeric_limits<int>::lowest()}; int exponent_{std::numeric_limits<int>::lowest()};
public: public:
// The range for the mantissa when normalized
constexpr static std::int64_t minMantissa = 1'000'000'000'000'000LL;
static_assert(isPowerOfTen(minMantissa));
constexpr static std::int64_t maxMantissa = minMantissa * 10 - 1;
static_assert(maxMantissa == 9'999'999'999'999'999LL);
// The range for the exponent when normalized // The range for the exponent when normalized
constexpr static int minExponent = -32768; constexpr static int minExponent = -32768;
constexpr static int maxExponent = 32768; constexpr static int maxExponent = 32768;
constexpr static internalrep maxRep = std::numeric_limits<rep>::max();
static_assert(maxRep == 9'223'372'036'854'775'807);
static_assert(-maxRep == std::numeric_limits<rep>::min() + 1);
// May need to make unchecked private
struct unchecked struct unchecked
{ {
explicit unchecked() = default; explicit unchecked() = default;
}; };
// Like unchecked, normalized is used with the ctors that take an
// internalrep mantissa. Unlike unchecked, those ctors will normalize the
// value.
// Only unit tests are expected to use this class
struct normalized
{
explicit normalized() = default;
};
explicit constexpr Number() = default; explicit constexpr Number() = default;
Number(rep mantissa); Number(rep mantissa);
explicit Number(rep mantissa, int exponent); explicit Number(rep mantissa, int exponent);
explicit constexpr Number(bool negative, internalrep mantissa, int exponent, unchecked) noexcept; explicit constexpr Number(rep mantissa, int exponent, unchecked) noexcept;
// Assume unsigned values are... unsigned. i.e. positive
explicit constexpr Number(internalrep mantissa, int exponent, unchecked) noexcept;
// Only unit tests are expected to use this ctor
explicit Number(bool negative, internalrep mantissa, int exponent, normalized);
// Assume unsigned values are... unsigned. i.e. positive
explicit Number(internalrep mantissa, int exponent, normalized);
constexpr rep constexpr rep
mantissa() const noexcept; mantissa() const noexcept;
@@ -277,11 +78,11 @@ public:
Number& Number&
operator/=(Number const& x); operator/=(Number const& x);
static Number static constexpr Number
min() noexcept; min() noexcept;
static Number static constexpr Number
max() noexcept; max() noexcept;
static Number static constexpr Number
lowest() noexcept; lowest() noexcept;
/** Conversions to Number are implicit and conversions away from Number /** Conversions to Number are implicit and conversions away from Number
@@ -295,7 +96,7 @@ public:
friend constexpr bool friend constexpr bool
operator==(Number const& x, Number const& y) noexcept operator==(Number const& x, Number const& y) noexcept
{ {
return x.negative_ == y.negative_ && x.mantissa_ == y.mantissa_ && x.exponent_ == y.exponent_; return x.mantissa_ == y.mantissa_ && x.exponent_ == y.exponent_;
} }
friend constexpr bool friend constexpr bool
@@ -309,8 +110,8 @@ public:
{ {
// If the two amounts have different signs (zero is treated as positive) // If the two amounts have different signs (zero is treated as positive)
// then the comparison is true iff the left is negative. // then the comparison is true iff the left is negative.
bool const lneg = x.negative_; bool const lneg = x.mantissa_ < 0;
bool const rneg = y.negative_; bool const rneg = y.mantissa_ < 0;
if (lneg != rneg) if (lneg != rneg)
return lneg; return lneg;
@@ -338,7 +139,7 @@ public:
constexpr int constexpr int
signum() const noexcept signum() const noexcept
{ {
return negative_ ? -1 : (mantissa_ ? 1 : 0); return (mantissa_ < 0) ? -1 : (mantissa_ ? 1 : 0);
} }
Number Number
@@ -368,15 +169,6 @@ public:
return os << to_string(x); return os << to_string(x);
} }
friend std::string
to_string(Number const& amount);
friend Number
root(Number f, unsigned d);
friend Number
root2(Number f);
// Thread local rounding control. Default is to_nearest // Thread local rounding control. Default is to_nearest
enum rounding_mode { to_nearest, towards_zero, downward, upward }; enum rounding_mode { to_nearest, towards_zero, downward, upward };
static rounding_mode static rounding_mode
@@ -385,194 +177,44 @@ public:
static rounding_mode static rounding_mode
setround(rounding_mode mode); setround(rounding_mode mode);
/** Returns which mantissa scale is currently in use for normalization.
*
* If you think you need to call this outside of unit tests, no you don't.
*/
static MantissaRange::mantissa_scale
getMantissaScale();
/** Changes which mantissa scale is used for normalization.
*
* If you think you need to call this outside of unit tests, no you don't.
*/
static void
setMantissaScale(MantissaRange::mantissa_scale scale);
inline static internalrep
minMantissa()
{
return range_.get().min;
}
inline static internalrep
maxMantissa()
{
return range_.get().max;
}
inline static int
mantissaLog()
{
return range_.get().log;
}
/// oneSmall is needed because the ranges are private
constexpr static Number
oneSmall();
/// oneLarge is needed because the ranges are private
constexpr static Number
oneLarge();
// And one is needed because it needs to choose between oneSmall and
// oneLarge based on the current range
static Number
one();
template <Integral64 T>
[[nodiscard]]
std::pair<T, int>
normalizeToRange(T minMantissa, T maxMantissa) const;
private: private:
static thread_local rounding_mode mode_; static thread_local rounding_mode mode_;
// The available ranges for mantissa
constexpr static MantissaRange smallRange{MantissaRange::small};
static_assert(isPowerOfTen(smallRange.min));
static_assert(smallRange.min == 1'000'000'000'000'000LL);
static_assert(smallRange.max == 9'999'999'999'999'999LL);
static_assert(smallRange.log == 15);
static_assert(smallRange.min < maxRep);
static_assert(smallRange.max < maxRep);
constexpr static MantissaRange largeRange{MantissaRange::large};
static_assert(isPowerOfTen(largeRange.min));
static_assert(largeRange.min == 1'000'000'000'000'000'000ULL);
static_assert(largeRange.max == internalrep(9'999'999'999'999'999'999ULL));
static_assert(largeRange.log == 18);
static_assert(largeRange.min < maxRep);
static_assert(largeRange.max > maxRep);
// The range for the mantissa when normalized.
// Use reference_wrapper to avoid making copies, and prevent accidentally
// changing the values inside the range.
static thread_local std::reference_wrapper<MantissaRange const> range_;
void void
normalize(); normalize();
constexpr bool
/** Normalize Number components to an arbitrary range.
*
* min/maxMantissa are parameters because this function is used by both
* normalize(), which reads from range_, and by normalizeToRange,
* which is public and can accept an arbitrary range from the caller.
*/
template <class T>
static void
normalize(
bool& negative,
T& mantissa,
int& exponent,
internalrep const& minMantissa,
internalrep const& maxMantissa);
template <class T>
friend void
doNormalize(
bool& negative,
T& mantissa_,
int& exponent_,
MantissaRange::rep const& minMantissa,
MantissaRange::rep const& maxMantissa);
bool
isnormal() const noexcept; isnormal() const noexcept;
// Copy the number, but modify the exponent by "exponentDelta". Because the
// mantissa doesn't change, the result will be "mostly" normalized, but the
// exponent could go out of range, so it will be checked.
Number
shiftExponent(int exponentDelta) const;
// Safely convert rep (int64) mantissa to internalrep (uint64). If the rep
// is negative, returns the positive value. This takes a little extra work
// because converting std::numeric_limits<std::int64_t>::min() flirts with
// UB, and can vary across compilers.
static internalrep
externalToInternal(rep mantissa);
class Guard; class Guard;
}; };
inline constexpr Number::Number(bool negative, internalrep mantissa, int exponent, unchecked) noexcept
: negative_(negative), mantissa_{mantissa}, exponent_{exponent}
{
}
inline constexpr Number::Number(internalrep mantissa, int exponent, unchecked) noexcept
: Number(false, mantissa, exponent, unchecked{})
{
}
constexpr static Number numZero{}; constexpr static Number numZero{};
inline Number::Number(bool negative, internalrep mantissa, int exponent, normalized) inline constexpr Number::Number(rep mantissa, int exponent, unchecked) noexcept
: Number(negative, mantissa, exponent, unchecked{}) : mantissa_{mantissa}, exponent_{exponent}
{
normalize();
}
inline Number::Number(internalrep mantissa, int exponent, normalized) : Number(false, mantissa, exponent, normalized{})
{ {
} }
inline Number::Number(rep mantissa, int exponent) inline Number::Number(rep mantissa, int exponent)
: Number(mantissa < 0, externalToInternal(mantissa), exponent, normalized{}) : mantissa_{mantissa}, exponent_{exponent}
{ {
normalize();
} }
inline Number::Number(rep mantissa) : Number{mantissa, 0} inline Number::Number(rep mantissa) : Number{mantissa, 0}
{ {
} }
/** Returns the mantissa of the external view of the Number.
*
* Please see the "---- External Interface ----" section of the class
* documentation for an explanation of why the internal value may be modified.
*/
inline constexpr Number::rep inline constexpr Number::rep
Number::mantissa() const noexcept Number::mantissa() const noexcept
{ {
auto m = mantissa_; return mantissa_;
if (m > maxRep)
{
XRPL_ASSERT_PARTS(
!isnormal() || (m % 10 == 0 && m / 10 <= maxRep),
"xrpl::Number::mantissa",
"large normalized mantissa has no remainder");
m /= 10;
}
auto const sign = negative_ ? -1 : 1;
return sign * static_cast<Number::rep>(m);
} }
/** Returns the exponent of the external view of the Number.
*
* Please see the "---- External Interface ----" section of the class
* documentation for an explanation of why the internal value may be modified.
*/
inline constexpr int inline constexpr int
Number::exponent() const noexcept Number::exponent() const noexcept
{ {
auto e = exponent_; return exponent_;
if (mantissa_ > maxRep)
{
XRPL_ASSERT_PARTS(
!isnormal() || (mantissa_ % 10 == 0 && mantissa_ / 10 <= maxRep),
"xrpl::Number::exponent",
"large normalized mantissa has no remainder");
++e;
}
return e;
} }
inline constexpr Number inline constexpr Number
@@ -584,17 +226,15 @@ Number::operator+() const noexcept
inline constexpr Number inline constexpr Number
Number::operator-() const noexcept Number::operator-() const noexcept
{ {
if (mantissa_ == 0)
return Number{};
auto x = *this; auto x = *this;
x.negative_ = !x.negative_; x.mantissa_ = -x.mantissa_;
return x; return x;
} }
inline Number& inline Number&
Number::operator++() Number::operator++()
{ {
*this += one(); *this += Number{1000000000000000, -15, unchecked{}};
return *this; return *this;
} }
@@ -609,7 +249,7 @@ Number::operator++(int)
inline Number& inline Number&
Number::operator--() Number::operator--()
{ {
*this -= one(); *this -= Number{1000000000000000, -15, unchecked{}};
return *this; return *this;
} }
@@ -659,48 +299,30 @@ operator/(Number const& x, Number const& y)
return z; return z;
} }
inline Number inline constexpr Number
Number::min() noexcept Number::min() noexcept
{ {
return Number{false, range_.get().min, minExponent, unchecked{}}; return Number{minMantissa, minExponent, unchecked{}};
} }
inline Number inline constexpr Number
Number::max() noexcept Number::max() noexcept
{ {
return Number{false, std::min(range_.get().max, maxRep), maxExponent, unchecked{}}; return Number{maxMantissa, maxExponent, unchecked{}};
} }
inline Number inline constexpr Number
Number::lowest() noexcept Number::lowest() noexcept
{ {
return Number{true, std::min(range_.get().max, maxRep), maxExponent, unchecked{}}; return -Number{maxMantissa, maxExponent, unchecked{}};
} }
inline bool inline constexpr bool
Number::isnormal() const noexcept Number::isnormal() const noexcept
{ {
MantissaRange const& range = range_; auto const abs_m = mantissa_ < 0 ? -mantissa_ : mantissa_;
auto const abs_m = mantissa_; return minMantissa <= abs_m && abs_m <= maxMantissa &&
return *this == Number{} || minExponent <= exponent_ && exponent_ <= maxExponent;
(range.min <= abs_m && abs_m <= range.max && (abs_m <= maxRep || abs_m % 10 == 0) && minExponent <= exponent_ &&
exponent_ <= maxExponent);
}
template <Integral64 T>
std::pair<T, int>
Number::normalizeToRange(T minMantissa, T maxMantissa) const
{
bool negative = negative_;
internalrep mantissa = mantissa_;
int exponent = exponent_;
if constexpr (std::is_unsigned_v<T>)
XRPL_ASSERT_PARTS(!negative, "xrpl::Number::normalizeToRange", "Number is non-negative for unsigned range.");
Number::normalize(negative, mantissa, exponent, minMantissa, maxMantissa);
auto const sign = negative ? -1 : 1;
return std::make_pair(static_cast<T>(sign * mantissa), exponent);
} }
inline constexpr Number inline constexpr Number
@@ -742,20 +364,6 @@ squelch(Number const& x, Number const& limit) noexcept
return x; return x;
} }
inline std::string
to_string(MantissaRange::mantissa_scale const& scale)
{
switch (scale)
{
case MantissaRange::small:
return "small";
case MantissaRange::large:
return "large";
default:
throw std::runtime_error("Bad scale");
}
}
class saveNumberRoundMode class saveNumberRoundMode
{ {
Number::rounding_mode mode_; Number::rounding_mode mode_;
@@ -765,7 +373,8 @@ public:
{ {
Number::setround(mode_); Number::setround(mode_);
} }
explicit saveNumberRoundMode(Number::rounding_mode mode) noexcept : mode_{mode} explicit saveNumberRoundMode(Number::rounding_mode mode) noexcept
: mode_{mode}
{ {
} }
saveNumberRoundMode(saveNumberRoundMode const&) = delete; saveNumberRoundMode(saveNumberRoundMode const&) = delete;
@@ -782,7 +391,8 @@ class NumberRoundModeGuard
saveNumberRoundMode saved_; saveNumberRoundMode saved_;
public: public:
explicit NumberRoundModeGuard(Number::rounding_mode mode) noexcept : saved_{Number::setround(mode)} explicit NumberRoundModeGuard(Number::rounding_mode mode) noexcept
: saved_{Number::setround(mode)}
{ {
} }
@@ -792,32 +402,6 @@ public:
operator=(NumberRoundModeGuard const&) = delete; operator=(NumberRoundModeGuard const&) = delete;
}; };
/** Sets the new scale and restores the old scale when it leaves scope.
*
* If you think you need to use this class outside of unit tests, no you don't.
*
*/
class NumberMantissaScaleGuard
{
MantissaRange::mantissa_scale const saved_;
public:
explicit NumberMantissaScaleGuard(MantissaRange::mantissa_scale scale) noexcept : saved_{Number::getMantissaScale()}
{
Number::setMantissaScale(scale);
}
~NumberMantissaScaleGuard()
{
Number::setMantissaScale(saved_);
}
NumberMantissaScaleGuard(NumberMantissaScaleGuard const&) = delete;
NumberMantissaScaleGuard&
operator=(NumberMantissaScaleGuard const&) = delete;
};
} // namespace xrpl } // namespace xrpl
#endif // XRPL_BASICS_NUMBER_H_INCLUDED #endif // XRPL_BASICS_NUMBER_H_INCLUDED

View File

@@ -11,7 +11,8 @@ namespace xrpl {
class Resolver class Resolver
{ {
public: public:
using HandlerType = std::function<void(std::string, std::vector<beast::IP::Endpoint>)>; using HandlerType =
std::function<void(std::string, std::vector<beast::IP::Endpoint>)>;
virtual ~Resolver() = 0; virtual ~Resolver() = 0;
@@ -40,7 +41,9 @@ public:
} }
virtual void virtual void
resolve(std::vector<std::string> const& names, HandlerType const& handler) = 0; resolve(
std::vector<std::string> const& names,
HandlerType const& handler) = 0;
/** @} */ /** @} */
}; };

View File

@@ -5,28 +5,34 @@
namespace xrpl { namespace xrpl {
template <class T> template <class T>
SharedWeakCachePointer<T>::SharedWeakCachePointer(SharedWeakCachePointer const& rhs) = default; SharedWeakCachePointer<T>::SharedWeakCachePointer(
SharedWeakCachePointer const& rhs) = default;
template <class T> template <class T>
template <class TT> template <class TT>
requires std::convertible_to<TT*, T*> requires std::convertible_to<TT*, T*>
SharedWeakCachePointer<T>::SharedWeakCachePointer(std::shared_ptr<TT> const& rhs) : combo_{rhs} SharedWeakCachePointer<T>::SharedWeakCachePointer(
std::shared_ptr<TT> const& rhs)
: combo_{rhs}
{ {
} }
template <class T> template <class T>
SharedWeakCachePointer<T>::SharedWeakCachePointer(SharedWeakCachePointer&& rhs) = default; SharedWeakCachePointer<T>::SharedWeakCachePointer(
SharedWeakCachePointer&& rhs) = default;
template <class T> template <class T>
template <class TT> template <class TT>
requires std::convertible_to<TT*, T*> requires std::convertible_to<TT*, T*>
SharedWeakCachePointer<T>::SharedWeakCachePointer(std::shared_ptr<TT>&& rhs) : combo_{std::move(rhs)} SharedWeakCachePointer<T>::SharedWeakCachePointer(std::shared_ptr<TT>&& rhs)
: combo_{std::move(rhs)}
{ {
} }
template <class T> template <class T>
SharedWeakCachePointer<T>& SharedWeakCachePointer<T>&
SharedWeakCachePointer<T>::operator=(SharedWeakCachePointer const& rhs) = default; SharedWeakCachePointer<T>::operator=(SharedWeakCachePointer const& rhs) =
default;
template <class T> template <class T>
template <class TT> template <class TT>

View File

@@ -51,7 +51,11 @@ class SlabAllocator
// The extent of the underlying memory block: // The extent of the underlying memory block:
std::size_t const size_; std::size_t const size_;
SlabBlock(SlabBlock* next, std::uint8_t* data, std::size_t size, std::size_t item) SlabBlock(
SlabBlock* next,
std::uint8_t* data,
std::size_t size,
std::size_t item)
: next_(next), p_(data), size_(size) : next_(next), p_(data), size_(size)
{ {
// We don't need to grab the mutex here, since we're the only // We don't need to grab the mutex here, since we're the only
@@ -122,7 +126,9 @@ class SlabAllocator
void void
deallocate(std::uint8_t* ptr) noexcept deallocate(std::uint8_t* ptr) noexcept
{ {
XRPL_ASSERT(own(ptr), "xrpl::SlabAllocator::SlabBlock::deallocate : own input"); XRPL_ASSERT(
own(ptr),
"xrpl::SlabAllocator::SlabBlock::deallocate : own input");
std::lock_guard l(m_); std::lock_guard l(m_);
@@ -156,13 +162,18 @@ public:
contexts (e.g. when minimal memory usage is needed) and contexts (e.g. when minimal memory usage is needed) and
allows for graceful failure. allows for graceful failure.
*/ */
constexpr explicit SlabAllocator(std::size_t extra, std::size_t alloc = 0, std::size_t align = 0) constexpr explicit SlabAllocator(
std::size_t extra,
std::size_t alloc = 0,
std::size_t align = 0)
: itemAlignment_(align ? align : alignof(Type)) : itemAlignment_(align ? align : alignof(Type))
, itemSize_(boost::alignment::align_up(sizeof(Type) + extra, itemAlignment_)) , itemSize_(
boost::alignment::align_up(sizeof(Type) + extra, itemAlignment_))
, slabSize_(alloc) , slabSize_(alloc)
{ {
XRPL_ASSERT( XRPL_ASSERT(
(itemAlignment_ & (itemAlignment_ - 1)) == 0, "xrpl::SlabAllocator::SlabAllocator : valid alignment"); (itemAlignment_ & (itemAlignment_ - 1)) == 0,
"xrpl::SlabAllocator::SlabAllocator : valid alignment");
} }
SlabAllocator(SlabAllocator const& other) = delete; SlabAllocator(SlabAllocator const& other) = delete;
@@ -211,7 +222,8 @@ public:
// We want to allocate the memory at a 2 MiB boundary, to make it // We want to allocate the memory at a 2 MiB boundary, to make it
// possible to use hugepage mappings on Linux: // possible to use hugepage mappings on Linux:
auto buf = boost::alignment::aligned_alloc(megabytes(std::size_t(2)), size); auto buf =
boost::alignment::aligned_alloc(megabytes(std::size_t(2)), size);
// clang-format off // clang-format off
if (!buf) [[unlikely]] if (!buf) [[unlikely]]
@@ -229,21 +241,31 @@ public:
// We need to carve out a bit of memory for the slab header // We need to carve out a bit of memory for the slab header
// and then align the rest appropriately: // and then align the rest appropriately:
auto slabData = reinterpret_cast<void*>(reinterpret_cast<std::uint8_t*>(buf) + sizeof(SlabBlock)); auto slabData = reinterpret_cast<void*>(
reinterpret_cast<std::uint8_t*>(buf) + sizeof(SlabBlock));
auto slabSize = size - sizeof(SlabBlock); auto slabSize = size - sizeof(SlabBlock);
// This operation is essentially guaranteed not to fail but // This operation is essentially guaranteed not to fail but
// let's be careful anyways. // let's be careful anyways.
if (!boost::alignment::align(itemAlignment_, itemSize_, slabData, slabSize)) if (!boost::alignment::align(
itemAlignment_, itemSize_, slabData, slabSize))
{ {
boost::alignment::aligned_free(buf); boost::alignment::aligned_free(buf);
return nullptr; return nullptr;
} }
slab = new (buf) SlabBlock(slabs_.load(), reinterpret_cast<std::uint8_t*>(slabData), slabSize, itemSize_); slab = new (buf) SlabBlock(
slabs_.load(),
reinterpret_cast<std::uint8_t*>(slabData),
slabSize,
itemSize_);
// Link the new slab // Link the new slab
while (!slabs_.compare_exchange_weak(slab->next_, slab, std::memory_order_release, std::memory_order_relaxed)) while (!slabs_.compare_exchange_weak(
slab->next_,
slab,
std::memory_order_release,
std::memory_order_relaxed))
{ {
; // Nothing to do ; // Nothing to do
} }
@@ -300,7 +322,10 @@ public:
std::size_t align; std::size_t align;
public: public:
constexpr SlabConfig(std::size_t extra_, std::size_t alloc_ = 0, std::size_t align_ = alignof(Type)) constexpr SlabConfig(
std::size_t extra_,
std::size_t alloc_ = 0,
std::size_t align_ = alignof(Type))
: extra(extra_), alloc(alloc_), align(align_) : extra(extra_), alloc(alloc_), align(align_)
{ {
} }
@@ -311,14 +336,23 @@ public:
// Ensure that the specified allocators are sorted from smallest to // Ensure that the specified allocators are sorted from smallest to
// largest by size: // largest by size:
std::sort( std::sort(
std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) { return a.extra < b.extra; }); std::begin(cfg),
std::end(cfg),
[](SlabConfig const& a, SlabConfig const& b) {
return a.extra < b.extra;
});
// We should never have two slabs of the same size // We should never have two slabs of the same size
if (std::adjacent_find(std::begin(cfg), std::end(cfg), [](SlabConfig const& a, SlabConfig const& b) { if (std::adjacent_find(
return a.extra == b.extra; std::begin(cfg),
}) != cfg.end()) std::end(cfg),
[](SlabConfig const& a, SlabConfig const& b) {
return a.extra == b.extra;
}) != cfg.end())
{ {
throw std::runtime_error("SlabAllocatorSet<" + beast::type_name<Type>() + ">: duplicate slab size"); throw std::runtime_error(
"SlabAllocatorSet<" + beast::type_name<Type>() +
">: duplicate slab size");
} }
for (auto const& c : cfg) for (auto const& c : cfg)

View File

@@ -41,7 +41,8 @@ public:
operator=(Slice const&) noexcept = default; operator=(Slice const&) noexcept = default;
/** Create a slice pointing to existing memory. */ /** Create a slice pointing to existing memory. */
Slice(void const* data, std::size_t size) noexcept : data_(reinterpret_cast<std::uint8_t const*>(data)), size_(size) Slice(void const* data, std::size_t size) noexcept
: data_(reinterpret_cast<std::uint8_t const*>(data)), size_(size)
{ {
} }
@@ -84,7 +85,9 @@ public:
std::uint8_t std::uint8_t
operator[](std::size_t i) const noexcept operator[](std::size_t i) const noexcept
{ {
XRPL_ASSERT(i < size_, "xrpl::Slice::operator[](std::size_t) const : valid input"); XRPL_ASSERT(
i < size_,
"xrpl::Slice::operator[](std::size_t) const : valid input");
return data_[i]; return data_[i];
} }
@@ -149,8 +152,8 @@ public:
/** Return a "sub slice" of given length starting at the given position /** Return a "sub slice" of given length starting at the given position
Note that the subslice encompasses the range [pos, pos + rCount), Note that the subslice encompasses the range [pos, pos + rcount),
where rCount is the smaller of count and size() - pos. where rcount is the smaller of count and size() - pos.
@param pos position of the first character @param pos position of the first character
@count requested length @count requested length
@@ -159,7 +162,9 @@ public:
@throws std::out_of_range if pos > size() @throws std::out_of_range if pos > size()
*/ */
Slice Slice
substr(std::size_t pos, std::size_t count = std::numeric_limits<std::size_t>::max()) const substr(
std::size_t pos,
std::size_t count = std::numeric_limits<std::size_t>::max()) const
{ {
if (pos > size()) if (pos > size())
throw std::out_of_range("Requested sub-slice is out of bounds"); throw std::out_of_range("Requested sub-slice is out of bounds");
@@ -198,7 +203,11 @@ operator!=(Slice const& lhs, Slice const& rhs) noexcept
inline bool inline bool
operator<(Slice const& lhs, Slice const& rhs) noexcept operator<(Slice const& lhs, Slice const& rhs) noexcept
{ {
return std::lexicographical_compare(lhs.data(), lhs.data() + lhs.size(), rhs.data(), rhs.data() + rhs.size()); return std::lexicographical_compare(
lhs.data(),
lhs.data() + lhs.size(),
rhs.data(),
rhs.data() + rhs.size());
} }
template <class Stream> template <class Stream>
@@ -210,14 +219,18 @@ operator<<(Stream& s, Slice const& v)
} }
template <class T, std::size_t N> template <class T, std::size_t N>
std::enable_if_t<std::is_same<T, char>::value || std::is_same<T, unsigned char>::value, Slice> std::enable_if_t<
std::is_same<T, char>::value || std::is_same<T, unsigned char>::value,
Slice>
makeSlice(std::array<T, N> const& a) makeSlice(std::array<T, N> const& a)
{ {
return Slice(a.data(), a.size()); return Slice(a.data(), a.size());
} }
template <class T, class Alloc> template <class T, class Alloc>
std::enable_if_t<std::is_same<T, char>::value || std::is_same<T, unsigned char>::value, Slice> std::enable_if_t<
std::is_same<T, char>::value || std::is_same<T, unsigned char>::value,
Slice>
makeSlice(std::vector<T, Alloc> const& v) makeSlice(std::vector<T, Alloc> const& v)
{ {
return Slice(v.data(), v.size()); return Slice(v.data(), v.size());

View File

@@ -31,7 +31,7 @@ template <class Iterator>
std::optional<Blob> std::optional<Blob>
strUnHex(std::size_t strSize, Iterator begin, Iterator end) strUnHex(std::size_t strSize, Iterator begin, Iterator end)
{ {
static constexpr std::array<int, 256> const digitLookupTable = []() { static constexpr std::array<int, 256> const unxtab = []() {
std::array<int, 256> t{}; std::array<int, 256> t{};
for (auto& x : t) for (auto& x : t)
@@ -57,7 +57,7 @@ strUnHex(std::size_t strSize, Iterator begin, Iterator end)
if (strSize & 1) if (strSize & 1)
{ {
int c = digitLookupTable[*iter++]; int c = unxtab[*iter++];
if (c < 0) if (c < 0)
return {}; return {};
@@ -67,12 +67,12 @@ strUnHex(std::size_t strSize, Iterator begin, Iterator end)
while (iter != end) while (iter != end)
{ {
int cHigh = digitLookupTable[*iter++]; int cHigh = unxtab[*iter++];
if (cHigh < 0) if (cHigh < 0)
return {}; return {};
int cLow = digitLookupTable[*iter++]; int cLow = unxtab[*iter++];
if (cLow < 0) if (cLow < 0)
return {}; return {};
@@ -109,7 +109,8 @@ struct parsedURL
bool bool
operator==(parsedURL const& other) const operator==(parsedURL const& other) const
{ {
return scheme == other.scheme && domain == other.domain && port == other.port && path == other.path; return scheme == other.scheme && domain == other.domain &&
port == other.port && path == other.path;
} }
}; };

View File

@@ -56,7 +56,8 @@ public:
clock_type::duration expiration, clock_type::duration expiration,
clock_type& clock, clock_type& clock,
beast::Journal journal, beast::Journal journal,
beast::insight::Collector::ptr const& collector = beast::insight::NullCollector::New()); beast::insight::Collector::ptr const& collector =
beast::insight::NullCollector::New());
public: public:
/** Return the clock associated with the cache. */ /** Return the clock associated with the cache. */
@@ -113,10 +114,15 @@ public:
*/ */
template <class R> template <class R>
bool bool
canonicalize(key_type const& key, SharedPointerType& data, R&& replaceCallback); canonicalize(
key_type const& key,
SharedPointerType& data,
R&& replaceCallback);
bool bool
canonicalize_replace_cache(key_type const& key, SharedPointerType const& data); canonicalize_replace_cache(
key_type const& key,
SharedPointerType const& data);
bool bool
canonicalize_replace_client(key_type const& key, SharedPointerType& data); canonicalize_replace_client(key_type const& key, SharedPointerType& data);
@@ -130,7 +136,8 @@ public:
*/ */
template <class ReturnType = bool> template <class ReturnType = bool>
auto auto
insert(key_type const& key, T const& value) -> std::enable_if_t<!IsKeyCache, ReturnType>; insert(key_type const& key, T const& value)
-> std::enable_if_t<!IsKeyCache, ReturnType>;
template <class ReturnType = bool> template <class ReturnType = bool>
auto auto
@@ -176,7 +183,10 @@ private:
struct Stats struct Stats
{ {
template <class Handler> template <class Handler>
Stats(std::string const& prefix, Handler const& handler, beast::insight::Collector::ptr const& collector) Stats(
std::string const& prefix,
Handler const& handler,
beast::insight::Collector::ptr const& collector)
: hook(collector->make_hook(handler)) : hook(collector->make_hook(handler))
, size(collector->make_gauge(prefix, "size")) , size(collector->make_gauge(prefix, "size"))
, hit_rate(collector->make_gauge(prefix, "hit_rate")) , hit_rate(collector->make_gauge(prefix, "hit_rate"))
@@ -198,7 +208,8 @@ private:
public: public:
clock_type::time_point last_access; clock_type::time_point last_access;
explicit KeyOnlyEntry(clock_type::time_point const& last_access_) : last_access(last_access_) explicit KeyOnlyEntry(clock_type::time_point const& last_access_)
: last_access(last_access_)
{ {
} }
@@ -215,7 +226,9 @@ private:
shared_weak_combo_pointer_type ptr; shared_weak_combo_pointer_type ptr;
clock_type::time_point last_access; clock_type::time_point last_access;
ValueEntry(clock_type::time_point const& last_access_, shared_pointer_type const& ptr_) ValueEntry(
clock_type::time_point const& last_access_,
shared_pointer_type const& ptr_)
: ptr(ptr_), last_access(last_access_) : ptr(ptr_), last_access(last_access_)
{ {
} }
@@ -249,13 +262,18 @@ private:
} }
}; };
typedef typename std::conditional<IsKeyCache, KeyOnlyEntry, ValueEntry>::type Entry; typedef
typename std::conditional<IsKeyCache, KeyOnlyEntry, ValueEntry>::type
Entry;
using KeyOnlyCacheType = hardened_partitioned_hash_map<key_type, KeyOnlyEntry, Hash, KeyEqual>; using KeyOnlyCacheType =
hardened_partitioned_hash_map<key_type, KeyOnlyEntry, Hash, KeyEqual>;
using KeyValueCacheType = hardened_partitioned_hash_map<key_type, ValueEntry, Hash, KeyEqual>; using KeyValueCacheType =
hardened_partitioned_hash_map<key_type, ValueEntry, Hash, KeyEqual>;
using cache_type = hardened_partitioned_hash_map<key_type, Entry, Hash, KeyEqual>; using cache_type =
hardened_partitioned_hash_map<key_type, Entry, Hash, KeyEqual>;
[[nodiscard]] std::thread [[nodiscard]] std::thread
sweepHelper( sweepHelper(

View File

@@ -15,13 +15,22 @@ template <
class Hash, class Hash,
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::TaggedCache( inline TaggedCache<
std::string const& name, Key,
int size, T,
clock_type::duration expiration, IsKeyCache,
clock_type& clock, SharedWeakUnionPointer,
beast::Journal journal, SharedPointerType,
beast::insight::Collector::ptr const& collector) Hash,
KeyEqual,
Mutex>::
TaggedCache(
std::string const& name,
int size,
clock_type::duration expiration,
clock_type& clock,
beast::Journal journal,
beast::insight::Collector::ptr const& collector)
: m_journal(journal) : m_journal(journal)
, m_clock(clock) , m_clock(clock)
, m_stats(name, std::bind(&TaggedCache::collect_metrics, this), collector) , m_stats(name, std::bind(&TaggedCache::collect_metrics, this), collector)
@@ -44,8 +53,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline auto inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::clock() TaggedCache<
-> clock_type& Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::clock() -> clock_type&
{ {
return m_clock; return m_clock;
} }
@@ -60,7 +76,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline std::size_t inline std::size_t
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::size() const TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::size() const
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
return m_cache.size(); return m_cache.size();
@@ -76,7 +100,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline int inline int
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getCacheSize() const TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::getCacheSize() const
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
return m_cache_count; return m_cache_count;
@@ -92,7 +124,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline int inline int
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getTrackSize() const TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::getTrackSize() const
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
return m_cache.size(); return m_cache.size();
@@ -108,7 +148,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline float inline float
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getHitRate() TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::getHitRate()
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
auto const total = static_cast<float>(m_hits + m_misses); auto const total = static_cast<float>(m_hits + m_misses);
@@ -125,7 +173,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline void inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::clear() TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::clear()
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
m_cache.clear(); m_cache.clear();
@@ -142,7 +198,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline void inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::reset() TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::reset()
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
m_cache.clear(); m_cache.clear();
@@ -162,8 +226,15 @@ template <
class Mutex> class Mutex>
template <class KeyComparable> template <class KeyComparable>
inline bool inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::touch_if_exists( TaggedCache<
KeyComparable const& key) Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::touch_if_exists(KeyComparable const& key)
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
auto const iter(m_cache.find(key)); auto const iter(m_cache.find(key));
@@ -187,7 +258,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline void inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweep() TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::sweep()
{ {
// Keep references to all the stuff we sweep // Keep references to all the stuff we sweep
// For performance, each worker thread should exit before the swept data // For performance, each worker thread should exit before the swept data
@@ -201,7 +280,8 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
if (m_target_size == 0 || (static_cast<int>(m_cache.size()) <= m_target_size)) if (m_target_size == 0 ||
(static_cast<int>(m_cache.size()) <= m_target_size))
{ {
when_expire = now - m_target_age; when_expire = now - m_target_age;
} }
@@ -213,8 +293,10 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (when_expire > (now - minimumAge)) if (when_expire > (now - minimumAge))
when_expire = now - minimumAge; when_expire = now - minimumAge;
JLOG(m_journal.trace()) << m_name << " is growing fast " << m_cache.size() << " of " << m_target_size JLOG(m_journal.trace())
<< " aging at " << (now - when_expire).count() << " of " << m_target_age.count(); << m_name << " is growing fast " << m_cache.size() << " of "
<< m_target_size << " aging at " << (now - when_expire).count()
<< " of " << m_target_age.count();
} }
std::vector<std::thread> workers; std::vector<std::thread> workers;
@@ -223,7 +305,13 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
for (std::size_t p = 0; p < m_cache.partitions(); ++p) for (std::size_t p = 0; p < m_cache.partitions(); ++p)
{ {
workers.push_back(sweepHelper(when_expire, now, m_cache.map()[p], allStuffToSweep[p], allRemovals, lock)); workers.push_back(sweepHelper(
when_expire,
now,
m_cache.map()[p],
allStuffToSweep[p],
allRemovals,
lock));
} }
for (std::thread& worker : workers) for (std::thread& worker : workers)
worker.join(); worker.join();
@@ -234,7 +322,9 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
// and decrement the reference count on each strong pointer. // and decrement the reference count on each strong pointer.
JLOG(m_journal.debug()) JLOG(m_journal.debug())
<< m_name << " TaggedCache sweep lock duration " << m_name << " TaggedCache sweep lock duration "
<< std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count() << std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::steady_clock::now() - start)
.count()
<< "ms"; << "ms";
} }
@@ -248,9 +338,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline bool inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::del( TaggedCache<
key_type const& key, Key,
bool valid) T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::del(key_type const& key, bool valid)
{ {
// Remove from cache, if !valid, remove from map too. Returns true if // Remove from cache, if !valid, remove from map too. Returns true if
// removed from cache // removed from cache
@@ -289,10 +385,19 @@ template <
class Mutex> class Mutex>
template <class R> template <class R>
inline bool inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::canonicalize( TaggedCache<
key_type const& key, Key,
SharedPointerType& data, T,
R&& replaceCallback) IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::
canonicalize(
key_type const& key,
SharedPointerType& data,
R&& replaceCallback)
{ {
// Return canonical value, store if needed, refresh in cache // Return canonical value, store if needed, refresh in cache
// Return values: true=we had the data already // Return values: true=we had the data already
@@ -303,7 +408,9 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (cit == m_cache.end()) if (cit == m_cache.end())
{ {
m_cache.emplace( m_cache.emplace(
std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(m_clock.now(), data)); std::piecewise_construct,
std::forward_as_tuple(key),
std::forward_as_tuple(m_clock.now(), data));
++m_cache_count; ++m_cache_count;
return false; return false;
} }
@@ -373,10 +480,21 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline bool inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>:: TaggedCache<
canonicalize_replace_cache(key_type const& key, SharedPointerType const& data) Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::
canonicalize_replace_cache(
key_type const& key,
SharedPointerType const& data)
{ {
return canonicalize(key, const_cast<SharedPointerType&>(data), []() { return true; }); return canonicalize(
key, const_cast<SharedPointerType&>(data), []() { return true; });
} }
template < template <
@@ -389,7 +507,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline bool inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>:: TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::
canonicalize_replace_client(key_type const& key, SharedPointerType& data) canonicalize_replace_client(key_type const& key, SharedPointerType& data)
{ {
return canonicalize(key, data, []() { return false; }); return canonicalize(key, data, []() { return false; });
@@ -405,8 +531,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline SharedPointerType inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::fetch( TaggedCache<
key_type const& key) Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::fetch(key_type const& key)
{ {
std::lock_guard<mutex_type> l(m_mutex); std::lock_guard<mutex_type> l(m_mutex);
auto ret = initialFetch(key, l); auto ret = initialFetch(key, l);
@@ -426,9 +559,16 @@ template <
class Mutex> class Mutex>
template <class ReturnType> template <class ReturnType>
inline auto inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::insert( TaggedCache<
key_type const& key, Key,
T const& value) -> std::enable_if_t<!IsKeyCache, ReturnType> T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::insert(key_type const& key, T const& value)
-> std::enable_if_t<!IsKeyCache, ReturnType>
{ {
static_assert( static_assert(
std::is_same_v<std::shared_ptr<T>, SharedPointerType> || std::is_same_v<std::shared_ptr<T>, SharedPointerType> ||
@@ -457,13 +597,23 @@ template <
class Mutex> class Mutex>
template <class ReturnType> template <class ReturnType>
inline auto inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::insert( TaggedCache<
key_type const& key) -> std::enable_if_t<IsKeyCache, ReturnType> Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::insert(key_type const& key)
-> std::enable_if_t<IsKeyCache, ReturnType>
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
clock_type::time_point const now(m_clock.now()); clock_type::time_point const now(m_clock.now());
auto [it, inserted] = auto [it, inserted] = m_cache.emplace(
m_cache.emplace(std::piecewise_construct, std::forward_as_tuple(key), std::forward_as_tuple(now)); std::piecewise_construct,
std::forward_as_tuple(key),
std::forward_as_tuple(now));
if (!inserted) if (!inserted)
it->second.last_access = now; it->second.last_access = now;
return inserted; return inserted;
@@ -479,9 +629,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline bool inline bool
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::retrieve( TaggedCache<
key_type const& key, Key,
T& data) T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::retrieve(key_type const& key, T& data)
{ {
// retrieve the value of the stored data // retrieve the value of the stored data
auto entry = fetch(key); auto entry = fetch(key);
@@ -503,8 +659,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline auto inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::peekMutex() TaggedCache<
-> mutex_type& Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::peekMutex() -> mutex_type&
{ {
return m_mutex; return m_mutex;
} }
@@ -519,8 +682,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline auto inline auto
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::getKeys() const TaggedCache<
-> std::vector<key_type> Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::getKeys() const -> std::vector<key_type>
{ {
std::vector<key_type> v; std::vector<key_type> v;
@@ -544,7 +714,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline double inline double
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::rate() const TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::rate() const
{ {
std::lock_guard lock(m_mutex); std::lock_guard lock(m_mutex);
auto const tot = m_hits + m_misses; auto const tot = m_hits + m_misses;
@@ -564,9 +742,15 @@ template <
class Mutex> class Mutex>
template <class Handler> template <class Handler>
inline SharedPointerType inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::fetch( TaggedCache<
key_type const& digest, Key,
Handler const& h) T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::fetch(key_type const& digest, Handler const& h)
{ {
{ {
std::lock_guard l(m_mutex); std::lock_guard l(m_mutex);
@@ -580,7 +764,8 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
std::lock_guard l(m_mutex); std::lock_guard l(m_mutex);
++m_misses; ++m_misses;
auto const [it, inserted] = m_cache.emplace(digest, Entry(m_clock.now(), std::move(sle))); auto const [it, inserted] =
m_cache.emplace(digest, Entry(m_clock.now(), std::move(sle)));
if (!inserted) if (!inserted)
it->second.touch(m_clock.now()); it->second.touch(m_clock.now());
return it->second.ptr.getStrong(); return it->second.ptr.getStrong();
@@ -597,9 +782,16 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline SharedPointerType inline SharedPointerType
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::initialFetch( TaggedCache<
key_type const& key, Key,
std::lock_guard<mutex_type> const& l) T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::
initialFetch(key_type const& key, std::lock_guard<mutex_type> const& l)
{ {
auto cit = m_cache.find(key); auto cit = m_cache.find(key);
if (cit == m_cache.end()) if (cit == m_cache.end())
@@ -635,7 +827,15 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline void inline void
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::collect_metrics() TaggedCache<
Key,
T,
IsKeyCache,
SharedWeakUnionPointer,
SharedPointerType,
Hash,
KeyEqual,
Mutex>::collect_metrics()
{ {
m_stats.size.set(getCacheSize()); m_stats.size.set(getCacheSize());
@@ -661,13 +861,22 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline std::thread inline std::thread
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweepHelper( TaggedCache<
clock_type::time_point const& when_expire, Key,
[[maybe_unused]] clock_type::time_point const& now, T,
typename KeyValueCacheType::map_type& partition, IsKeyCache,
SweptPointersVector& stuffToSweep, SharedWeakUnionPointer,
std::atomic<int>& allRemovals, SharedPointerType,
std::lock_guard<std::recursive_mutex> const&) Hash,
KeyEqual,
Mutex>::
sweepHelper(
clock_type::time_point const& when_expire,
[[maybe_unused]] clock_type::time_point const& now,
typename KeyValueCacheType::map_type& partition,
SweptPointersVector& stuffToSweep,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
{ {
return std::thread([&, this]() { return std::thread([&, this]() {
int cacheRemovals = 0; int cacheRemovals = 0;
@@ -721,8 +930,10 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (mapRemovals || cacheRemovals) if (mapRemovals || cacheRemovals)
{ {
JLOG(m_journal.debug()) << "TaggedCache partition sweep " << m_name << ": cache = " << partition.size() JLOG(m_journal.debug())
<< "-" << cacheRemovals << ", map-=" << mapRemovals; << "TaggedCache partition sweep " << m_name
<< ": cache = " << partition.size() << "-" << cacheRemovals
<< ", map-=" << mapRemovals;
} }
allRemovals += cacheRemovals; allRemovals += cacheRemovals;
@@ -739,13 +950,22 @@ template <
class KeyEqual, class KeyEqual,
class Mutex> class Mutex>
inline std::thread inline std::thread
TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash, KeyEqual, Mutex>::sweepHelper( TaggedCache<
clock_type::time_point const& when_expire, Key,
clock_type::time_point const& now, T,
typename KeyOnlyCacheType::map_type& partition, IsKeyCache,
SweptPointersVector&, SharedWeakUnionPointer,
std::atomic<int>& allRemovals, SharedPointerType,
std::lock_guard<std::recursive_mutex> const&) Hash,
KeyEqual,
Mutex>::
sweepHelper(
clock_type::time_point const& when_expire,
clock_type::time_point const& now,
typename KeyOnlyCacheType::map_type& partition,
SweptPointersVector&,
std::atomic<int>& allRemovals,
std::lock_guard<std::recursive_mutex> const&)
{ {
return std::thread([&, this]() { return std::thread([&, this]() {
int cacheRemovals = 0; int cacheRemovals = 0;
@@ -775,8 +995,10 @@ TaggedCache<Key, T, IsKeyCache, SharedWeakUnionPointer, SharedPointerType, Hash,
if (mapRemovals || cacheRemovals) if (mapRemovals || cacheRemovals)
{ {
JLOG(m_journal.debug()) << "TaggedCache partition sweep " << m_name << ": cache = " << partition.size() JLOG(m_journal.debug())
<< "-" << cacheRemovals << ", map-=" << mapRemovals; << "TaggedCache partition sweep " << m_name
<< ": cache = " << partition.size() << "-" << cacheRemovals
<< ", map-=" << mapRemovals;
} }
allRemovals += cacheRemovals; allRemovals += cacheRemovals;

View File

@@ -40,7 +40,8 @@ template <
class Hash = beast::uhash<>, class Hash = beast::uhash<>,
class Pred = std::equal_to<Key>, class Pred = std::equal_to<Key>,
class Allocator = std::allocator<std::pair<Key const, Value>>> class Allocator = std::allocator<std::pair<Key const, Value>>>
using hash_multimap = std::unordered_multimap<Key, Value, Hash, Pred, Allocator>; using hash_multimap =
std::unordered_multimap<Key, Value, Hash, Pred, Allocator>;
template < template <
class Value, class Value,
@@ -74,7 +75,8 @@ template <
class Hash = hardened_hash<strong_hash>, class Hash = hardened_hash<strong_hash>,
class Pred = std::equal_to<Key>, class Pred = std::equal_to<Key>,
class Allocator = std::allocator<std::pair<Key const, Value>>> class Allocator = std::allocator<std::pair<Key const, Value>>>
using hardened_partitioned_hash_map = partitioned_unordered_map<Key, Value, Hash, Pred, Allocator>; using hardened_partitioned_hash_map =
partitioned_unordered_map<Key, Value, Hash, Pred, Allocator>;
template < template <
class Key, class Key,
@@ -82,7 +84,8 @@ template <
class Hash = hardened_hash<strong_hash>, class Hash = hardened_hash<strong_hash>,
class Pred = std::equal_to<Key>, class Pred = std::equal_to<Key>,
class Allocator = std::allocator<std::pair<Key const, Value>>> class Allocator = std::allocator<std::pair<Key const, Value>>>
using hardened_hash_multimap = std::unordered_multimap<Key, Value, Hash, Pred, Allocator>; using hardened_hash_multimap =
std::unordered_multimap<Key, Value, Hash, Pred, Allocator>;
template < template <
class Value, class Value,
@@ -96,7 +99,8 @@ template <
class Hash = hardened_hash<strong_hash>, class Hash = hardened_hash<strong_hash>,
class Pred = std::equal_to<Value>, class Pred = std::equal_to<Value>,
class Allocator = std::allocator<Value>> class Allocator = std::allocator<Value>>
using hardened_hash_multiset = std::unordered_multiset<Value, Hash, Pred, Allocator>; using hardened_hash_multiset =
std::unordered_multiset<Value, Hash, Pred, Allocator>;
} // namespace xrpl } // namespace xrpl

View File

@@ -52,7 +52,13 @@ generalized_set_intersection(
// std::set_intersection. // std::set_intersection.
template <class FwdIter1, class InputIter2, class Pred, class Comp> template <class FwdIter1, class InputIter2, class Pred, class Comp>
FwdIter1 FwdIter1
remove_if_intersect_or_match(FwdIter1 first1, FwdIter1 last1, InputIter2 first2, InputIter2 last2, Pred pred, Comp comp) remove_if_intersect_or_match(
FwdIter1 first1,
FwdIter1 last1,
InputIter2 first2,
InputIter2 last2,
Pred pred,
Comp comp)
{ {
// [original-first1, current-first1) is the set of elements to be preserved. // [original-first1, current-first1) is the set of elements to be preserved.
// [current-first1, i) is the set of elements that have been removed. // [current-first1, i) is the set of elements that have been removed.

View File

@@ -46,7 +46,8 @@ base64_encode(std::uint8_t const* data, std::size_t len);
inline std::string inline std::string
base64_encode(std::string const& s) base64_encode(std::string const& s)
{ {
return base64_encode(reinterpret_cast<std::uint8_t const*>(s.data()), s.size()); return base64_encode(
reinterpret_cast<std::uint8_t const*>(s.data()), s.size());
} }
std::string std::string

View File

@@ -65,9 +65,13 @@ struct is_contiguous_container<Slice> : std::true_type
template <std::size_t Bits, class Tag = void> template <std::size_t Bits, class Tag = void>
class base_uint class base_uint
{ {
static_assert((Bits % 32) == 0, "The length of a base_uint in bits must be a multiple of 32."); static_assert(
(Bits % 32) == 0,
"The length of a base_uint in bits must be a multiple of 32.");
static_assert(Bits >= 64, "The length of a base_uint in bits must be at least 64."); static_assert(
Bits >= 64,
"The length of a base_uint in bits must be at least 64.");
static constexpr std::size_t WIDTH = Bits / 32; static constexpr std::size_t WIDTH = Bits / 32;
@@ -178,7 +182,9 @@ private:
{ {
// Local lambda that converts a single hex char to four bits and // Local lambda that converts a single hex char to four bits and
// ORs those bits into a uint32_t. // ORs those bits into a uint32_t.
auto hexCharToUInt = [](char c, std::uint32_t shift, std::uint32_t& accum) -> ParseResult { auto hexCharToUInt = [](char c,
std::uint32_t shift,
std::uint32_t& accum) -> ParseResult {
std::uint32_t nibble = 0xFFu; std::uint32_t nibble = 0xFFu;
if (c < '0' || c > 'f') if (c < '0' || c > 'f')
return ParseResult::badChar; return ParseResult::badChar;
@@ -215,7 +221,8 @@ private:
std::uint32_t accum = {}; std::uint32_t accum = {};
for (std::uint32_t shift : {4u, 0u, 12u, 8u, 20u, 16u, 28u, 24u}) for (std::uint32_t shift : {4u, 0u, 12u, 8u, 20u, 16u, 28u, 24u})
{ {
if (auto const result = hexCharToUInt(*in++, shift, accum); result != ParseResult::okay) if (auto const result = hexCharToUInt(*in++, shift, accum);
result != ParseResult::okay)
return Unexpected(result); return Unexpected(result);
} }
ret[i++] = accum; ret[i++] = accum;
@@ -254,7 +261,8 @@ public:
// This constructor is intended to be used at compile time since it might // This constructor is intended to be used at compile time since it might
// throw at runtime. Consider declaring this constructor consteval once // throw at runtime. Consider declaring this constructor consteval once
// we get to C++23. // we get to C++23.
explicit constexpr base_uint(std::string_view sv) noexcept(false) : data_(parseFromStringViewThrows(sv)) explicit constexpr base_uint(std::string_view sv) noexcept(false)
: data_(parseFromStringViewThrows(sv))
{ {
} }
@@ -379,7 +387,8 @@ public:
// prefix operator // prefix operator
for (int i = WIDTH - 1; i >= 0; --i) for (int i = WIDTH - 1; i >= 0; --i)
{ {
data_[i] = boost::endian::native_to_big(boost::endian::big_to_native(data_[i]) + 1); data_[i] = boost::endian::native_to_big(
boost::endian::big_to_native(data_[i]) + 1);
if (data_[i] != 0) if (data_[i] != 0)
break; break;
} }
@@ -403,7 +412,8 @@ public:
for (int i = WIDTH - 1; i >= 0; --i) for (int i = WIDTH - 1; i >= 0; --i)
{ {
auto prev = data_[i]; auto prev = data_[i];
data_[i] = boost::endian::native_to_big(boost::endian::big_to_native(data_[i]) - 1); data_[i] = boost::endian::native_to_big(
boost::endian::big_to_native(data_[i]) - 1);
if (prev != 0) if (prev != 0)
break; break;
@@ -443,9 +453,11 @@ public:
for (int i = WIDTH; i--;) for (int i = WIDTH; i--;)
{ {
std::uint64_t n = carry + boost::endian::big_to_native(data_[i]) + boost::endian::big_to_native(b.data_[i]); std::uint64_t n = carry + boost::endian::big_to_native(data_[i]) +
boost::endian::big_to_native(b.data_[i]);
data_[i] = boost::endian::native_to_big(static_cast<std::uint32_t>(n)); data_[i] =
boost::endian::native_to_big(static_cast<std::uint32_t>(n));
carry = n >> 32; carry = n >> 32;
} }
@@ -545,7 +557,8 @@ operator<=>(base_uint<Bits, Tag> const& lhs, base_uint<Bits, Tag> const& rhs)
if (ret.first == lhs.cend()) if (ret.first == lhs.cend())
return std::strong_ordering::equivalent; return std::strong_ordering::equivalent;
return (*ret.first > *ret.second) ? std::strong_ordering::greater : std::strong_ordering::less; return (*ret.first > *ret.second) ? std::strong_ordering::greater
: std::strong_ordering::less;
} }
template <std::size_t Bits, typename Tag> template <std::size_t Bits, typename Tag>
@@ -604,7 +617,9 @@ template <std::size_t Bits, class Tag>
inline std::string inline std::string
to_short_string(base_uint<Bits, Tag> const& a) to_short_string(base_uint<Bits, Tag> const& a)
{ {
static_assert(base_uint<Bits, Tag>::bytes > 4, "For 4 bytes or less, use a native type"); static_assert(
base_uint<Bits, Tag>::bytes > 4,
"For 4 bytes or less, use a native type");
return strHex(a.cbegin(), a.cbegin() + 4) + "..."; return strHex(a.cbegin(), a.cbegin() + 4) + "...";
} }
@@ -638,7 +653,8 @@ static_assert(sizeof(uint256) == 256 / 8, "There should be no padding bytes");
namespace beast { namespace beast {
template <std::size_t Bits, class Tag> template <std::size_t Bits, class Tag>
struct is_uniquely_represented<xrpl::base_uint<Bits, Tag>> : public std::true_type struct is_uniquely_represented<xrpl::base_uint<Bits, Tag>>
: public std::true_type
{ {
explicit is_uniquely_represented() = default; explicit is_uniquely_represented() = default;
}; };

View File

@@ -16,9 +16,12 @@ namespace xrpl {
// A few handy aliases // A few handy aliases
using days = std::chrono::duration<int, std::ratio_multiply<std::chrono::hours::period, std::ratio<24>>>; using days = std::chrono::duration<
int,
std::ratio_multiply<std::chrono::hours::period, std::ratio<24>>>;
using weeks = std::chrono::duration<int, std::ratio_multiply<days::period, std::ratio<7>>>; using weeks = std::chrono::
duration<int, std::ratio_multiply<days::period, std::ratio<7>>>;
/** Clock for measuring the network time. /** Clock for measuring the network time.
@@ -31,7 +34,8 @@ using weeks = std::chrono::duration<int, std::ratio_multiply<days::period, std::
*/ */
constexpr static std::chrono::seconds epoch_offset = constexpr static std::chrono::seconds epoch_offset =
date::sys_days{date::year{2000} / 1 / 1} - date::sys_days{date::year{1970} / 1 / 1}; date::sys_days{date::year{2000} / 1 / 1} -
date::sys_days{date::year{1970} / 1 / 1};
static_assert(epoch_offset.count() == 946684800); static_assert(epoch_offset.count() == 946684800);
@@ -60,7 +64,8 @@ to_string(NetClock::time_point tp)
{ {
// 2000-01-01 00:00:00 UTC is 946684800s from 1970-01-01 00:00:00 UTC // 2000-01-01 00:00:00 UTC is 946684800s from 1970-01-01 00:00:00 UTC
using namespace std::chrono; using namespace std::chrono;
return to_string(system_clock::time_point{tp.time_since_epoch() + epoch_offset}); return to_string(
system_clock::time_point{tp.time_since_epoch() + epoch_offset});
} }
template <class Duration> template <class Duration>
@@ -77,7 +82,8 @@ to_string_iso(NetClock::time_point tp)
// 2000-01-01 00:00:00 UTC is 946684800s from 1970-01-01 00:00:00 UTC // 2000-01-01 00:00:00 UTC is 946684800s from 1970-01-01 00:00:00 UTC
// Note, NetClock::duration is seconds, as checked by static_assert // Note, NetClock::duration is seconds, as checked by static_assert
static_assert(std::is_same_v<NetClock::duration::period, std::ratio<1>>); static_assert(std::is_same_v<NetClock::duration::period, std::ratio<1>>);
return to_string_iso(date::sys_time<NetClock::duration>{tp.time_since_epoch() + epoch_offset}); return to_string_iso(date::sys_time<NetClock::duration>{
tp.time_since_epoch() + epoch_offset});
} }
/** A clock for measuring elapsed time. /** A clock for measuring elapsed time.

View File

@@ -36,10 +36,15 @@ template <class E, class... Args>
[[noreturn]] inline void [[noreturn]] inline void
Throw(Args&&... args) Throw(Args&&... args)
{ {
static_assert(std::is_convertible<E*, std::exception*>::value, "Exception must derive from std::exception."); static_assert(
std::is_convertible<E*, std::exception*>::value,
"Exception must derive from std::exception.");
E e(std::forward<Args>(args)...); E e(std::forward<Args>(args)...);
LogThrow(std::string("Throwing exception of type " + beast::type_name<E>() + ": ") + e.what()); LogThrow(
std::string(
"Throwing exception of type " + beast::type_name<E>() + ": ") +
e.what());
throw e; throw e;
} }

View File

@@ -24,7 +24,8 @@ public:
Collection const& collection; Collection const& collection;
std::string const delimiter; std::string const delimiter;
explicit CollectionAndDelimiter(Collection const& c, std::string delim) : collection(c), delimiter(std::move(delim)) explicit CollectionAndDelimiter(Collection const& c, std::string delim)
: collection(c), delimiter(std::move(delim))
{ {
} }
@@ -32,7 +33,11 @@ public:
friend Stream& friend Stream&
operator<<(Stream& s, CollectionAndDelimiter const& cd) operator<<(Stream& s, CollectionAndDelimiter const& cd)
{ {
return join(s, std::begin(cd.collection), std::end(cd.collection), cd.delimiter); return join(
s,
std::begin(cd.collection),
std::end(cd.collection),
cd.delimiter);
} }
}; };
@@ -64,7 +69,8 @@ public:
char const* collection; char const* collection;
std::string const delimiter; std::string const delimiter;
explicit CollectionAndDelimiter(char const c[N], std::string delim) : collection(c), delimiter(std::move(delim)) explicit CollectionAndDelimiter(char const c[N], std::string delim)
: collection(c), delimiter(std::move(delim))
{ {
} }

View File

@@ -51,7 +51,8 @@ public:
using const_reference = value_type const&; using const_reference = value_type const&;
using pointer = value_type*; using pointer = value_type*;
using const_pointer = value_type const*; using const_pointer = value_type const*;
using map_type = std::unordered_map<key_type, mapped_type, hasher, key_equal, allocator_type>; using map_type = std::
unordered_map<key_type, mapped_type, hasher, key_equal, allocator_type>;
using partition_map_type = std::vector<map_type>; using partition_map_type = std::vector<map_type>;
struct iterator struct iterator
@@ -112,7 +113,8 @@ public:
friend bool friend bool
operator==(iterator const& lhs, iterator const& rhs) operator==(iterator const& lhs, iterator const& rhs)
{ {
return lhs.map_ == rhs.map_ && lhs.ait_ == rhs.ait_ && lhs.mit_ == rhs.mit_; return lhs.map_ == rhs.map_ && lhs.ait_ == rhs.ait_ &&
lhs.mit_ == rhs.mit_;
} }
friend bool friend bool
@@ -188,7 +190,8 @@ public:
friend bool friend bool
operator==(const_iterator const& lhs, const_iterator const& rhs) operator==(const_iterator const& lhs, const_iterator const& rhs)
{ {
return lhs.map_ == rhs.map_ && lhs.ait_ == rhs.ait_ && lhs.mit_ == rhs.mit_; return lhs.map_ == rhs.map_ && lhs.ait_ == rhs.ait_ &&
lhs.mit_ == rhs.mit_;
} }
friend bool friend bool
@@ -228,11 +231,14 @@ private:
} }
public: public:
partitioned_unordered_map(std::optional<std::size_t> partitions = std::nullopt) partitioned_unordered_map(
std::optional<std::size_t> partitions = std::nullopt)
{ {
// Set partitions to the number of hardware threads if the parameter // Set partitions to the number of hardware threads if the parameter
// is either empty or set to 0. // is either empty or set to 0.
partitions_ = partitions && *partitions ? *partitions : std::thread::hardware_concurrency(); partitions_ = partitions && *partitions
? *partitions
: std::thread::hardware_concurrency();
map_.resize(partitions_); map_.resize(partitions_);
XRPL_ASSERT( XRPL_ASSERT(
partitions_, partitions_,
@@ -331,8 +337,10 @@ public:
auto const& key = std::get<0>(keyTuple); auto const& key = std::get<0>(keyTuple);
iterator it(&map_); iterator it(&map_);
it.ait_ = it.map_->begin() + partitioner(key); it.ait_ = it.map_->begin() + partitioner(key);
auto [eit, inserted] = auto [eit, inserted] = it.ait_->emplace(
it.ait_->emplace(std::piecewise_construct, std::forward<T>(keyTuple), std::forward<U>(valueTuple)); std::piecewise_construct,
std::forward<T>(keyTuple),
std::forward<U>(valueTuple));
it.mit_ = eit; it.mit_ = eit;
return {it, inserted}; return {it, inserted};
} }
@@ -343,7 +351,8 @@ public:
{ {
iterator it(&map_); iterator it(&map_);
it.ait_ = it.map_->begin() + partitioner(key); it.ait_ = it.map_->begin() + partitioner(key);
auto [eit, inserted] = it.ait_->emplace(std::forward<T>(key), std::forward<U>(val)); auto [eit, inserted] =
it.ait_->emplace(std::forward<T>(key), std::forward<U>(val));
it.mit_ = eit; it.mit_ = eit;
return {it, inserted}; return {it, inserted};
} }

View File

@@ -20,7 +20,8 @@ static_assert(
"The Ripple default PRNG engine must return an unsigned integral type."); "The Ripple default PRNG engine must return an unsigned integral type.");
static_assert( static_assert(
std::numeric_limits<beast::xor_shift_engine::result_type>::max() >= std::numeric_limits<std::uint64_t>::max(), std::numeric_limits<beast::xor_shift_engine::result_type>::max() >=
std::numeric_limits<std::uint64_t>::max(),
"The Ripple default PRNG engine return must be at least 64 bits wide."); "The Ripple default PRNG engine return must be at least 64 bits wide.");
#endif #endif
@@ -89,7 +90,9 @@ default_prng()
*/ */
/** @{ */ /** @{ */
template <class Engine, class Integral> template <class Engine, class Integral>
std::enable_if_t<std::is_integral<Integral>::value && detail::is_engine<Engine>::value, Integral> std::enable_if_t<
std::is_integral<Integral>::value && detail::is_engine<Engine>::value,
Integral>
rand_int(Engine& engine, Integral min, Integral max) rand_int(Engine& engine, Integral min, Integral max)
{ {
XRPL_ASSERT(max > min, "xrpl::rand_int : max over min inputs"); XRPL_ASSERT(max > min, "xrpl::rand_int : max over min inputs");
@@ -108,7 +111,9 @@ rand_int(Integral min, Integral max)
} }
template <class Engine, class Integral> template <class Engine, class Integral>
std::enable_if_t<std::is_integral<Integral>::value && detail::is_engine<Engine>::value, Integral> std::enable_if_t<
std::is_integral<Integral>::value && detail::is_engine<Engine>::value,
Integral>
rand_int(Engine& engine, Integral max) rand_int(Engine& engine, Integral max)
{ {
return rand_int(engine, Integral(0), max); return rand_int(engine, Integral(0), max);
@@ -122,7 +127,9 @@ rand_int(Integral max)
} }
template <class Integral, class Engine> template <class Integral, class Engine>
std::enable_if_t<std::is_integral<Integral>::value && detail::is_engine<Engine>::value, Integral> std::enable_if_t<
std::is_integral<Integral>::value && detail::is_engine<Engine>::value,
Integral>
rand_int(Engine& engine) rand_int(Engine& engine)
{ {
return rand_int(engine, std::numeric_limits<Integral>::max()); return rand_int(engine, std::numeric_limits<Integral>::max());
@@ -140,17 +147,23 @@ rand_int()
/** @{ */ /** @{ */
template <class Byte, class Engine> template <class Byte, class Engine>
std::enable_if_t< std::enable_if_t<
(std::is_same<Byte, unsigned char>::value || std::is_same<Byte, std::uint8_t>::value) && (std::is_same<Byte, unsigned char>::value ||
std::is_same<Byte, std::uint8_t>::value) &&
detail::is_engine<Engine>::value, detail::is_engine<Engine>::value,
Byte> Byte>
rand_byte(Engine& engine) rand_byte(Engine& engine)
{ {
return static_cast<Byte>( return static_cast<Byte>(rand_int<Engine, std::uint32_t>(
rand_int<Engine, std::uint32_t>(engine, std::numeric_limits<Byte>::min(), std::numeric_limits<Byte>::max())); engine,
std::numeric_limits<Byte>::min(),
std::numeric_limits<Byte>::max()));
} }
template <class Byte = std::uint8_t> template <class Byte = std::uint8_t>
std::enable_if_t<(std::is_same<Byte, unsigned char>::value || std::is_same<Byte, std::uint8_t>::value), Byte> std::enable_if_t<
(std::is_same<Byte, unsigned char>::value ||
std::is_same<Byte, std::uint8_t>::value),
Byte>
rand_byte() rand_byte()
{ {
return rand_byte<Byte>(default_prng()); return rand_byte<Byte>(default_prng());

View File

@@ -1,5 +1,5 @@
#ifndef XRPL_BASICS_ROCKSDB_H_INCLUDED #ifndef XRPL_UNITY_ROCKSDB_H_INCLUDED
#define XRPL_BASICS_ROCKSDB_H_INCLUDED #define XRPL_UNITY_ROCKSDB_H_INCLUDED
#if XRPL_ROCKSDB_AVAILABLE #if XRPL_ROCKSDB_AVAILABLE
// #include <rocksdb2/port/port_posix.h> // #include <rocksdb2/port/port_posix.h>

View File

@@ -12,29 +12,38 @@ namespace xrpl {
template <class Src, class Dest> template <class Src, class Dest>
concept SafeToCast = (std::is_integral_v<Src> && std::is_integral_v<Dest>) && concept SafeToCast = (std::is_integral_v<Src> && std::is_integral_v<Dest>) &&
(std::is_signed<Src>::value || std::is_unsigned<Dest>::value) && (std::is_signed<Src>::value || std::is_unsigned<Dest>::value) &&
(std::is_signed<Src>::value != std::is_signed<Dest>::value ? sizeof(Dest) > sizeof(Src) (std::is_signed<Src>::value != std::is_signed<Dest>::value
: sizeof(Dest) >= sizeof(Src)); ? sizeof(Dest) > sizeof(Src)
: sizeof(Dest) >= sizeof(Src));
template <class Dest, class Src> template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_integral_v<Dest> && std::is_integral_v<Src>, Dest> inline constexpr std::
safe_cast(Src s) noexcept enable_if_t<std::is_integral_v<Dest> && std::is_integral_v<Src>, Dest>
safe_cast(Src s) noexcept
{ {
static_assert(std::is_signed_v<Dest> || std::is_unsigned_v<Src>, "Cannot cast signed to unsigned"); static_assert(
constexpr unsigned not_same = std::is_signed_v<Dest> != std::is_signed_v<Src>; std::is_signed_v<Dest> || std::is_unsigned_v<Src>,
static_assert(sizeof(Dest) >= sizeof(Src) + not_same, "Destination is too small to hold all values of source"); "Cannot cast signed to unsigned");
constexpr unsigned not_same =
std::is_signed_v<Dest> != std::is_signed_v<Src>;
static_assert(
sizeof(Dest) >= sizeof(Src) + not_same,
"Destination is too small to hold all values of source");
return static_cast<Dest>(s); return static_cast<Dest>(s);
} }
template <class Dest, class Src> template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_enum_v<Dest> && std::is_integral_v<Src>, Dest> inline constexpr std::
safe_cast(Src s) noexcept enable_if_t<std::is_enum_v<Dest> && std::is_integral_v<Src>, Dest>
safe_cast(Src s) noexcept
{ {
return static_cast<Dest>(safe_cast<std::underlying_type_t<Dest>>(s)); return static_cast<Dest>(safe_cast<std::underlying_type_t<Dest>>(s));
} }
template <class Dest, class Src> template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_integral_v<Dest> && std::is_enum_v<Src>, Dest> inline constexpr std::
safe_cast(Src s) noexcept enable_if_t<std::is_integral_v<Dest> && std::is_enum_v<Src>, Dest>
safe_cast(Src s) noexcept
{ {
return safe_cast<Dest>(static_cast<std::underlying_type_t<Src>>(s)); return safe_cast<Dest>(static_cast<std::underlying_type_t<Src>>(s));
} }
@@ -44,8 +53,9 @@ safe_cast(Src s) noexcept
// underlying types become safe, it can be converted to a safe_cast. // underlying types become safe, it can be converted to a safe_cast.
template <class Dest, class Src> template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_integral_v<Dest> && std::is_integral_v<Src>, Dest> inline constexpr std::
unsafe_cast(Src s) noexcept enable_if_t<std::is_integral_v<Dest> && std::is_integral_v<Src>, Dest>
unsafe_cast(Src s) noexcept
{ {
static_assert( static_assert(
!SafeToCast<Src, Dest>, !SafeToCast<Src, Dest>,
@@ -55,15 +65,17 @@ unsafe_cast(Src s) noexcept
} }
template <class Dest, class Src> template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_enum_v<Dest> && std::is_integral_v<Src>, Dest> inline constexpr std::
unsafe_cast(Src s) noexcept enable_if_t<std::is_enum_v<Dest> && std::is_integral_v<Src>, Dest>
unsafe_cast(Src s) noexcept
{ {
return static_cast<Dest>(unsafe_cast<std::underlying_type_t<Dest>>(s)); return static_cast<Dest>(unsafe_cast<std::underlying_type_t<Dest>>(s));
} }
template <class Dest, class Src> template <class Dest, class Src>
inline constexpr std::enable_if_t<std::is_integral_v<Dest> && std::is_enum_v<Src>, Dest> inline constexpr std::
unsafe_cast(Src s) noexcept enable_if_t<std::is_integral_v<Dest> && std::is_enum_v<Src>, Dest>
unsafe_cast(Src s) noexcept
{ {
return unsafe_cast<Dest>(static_cast<std::underlying_type_t<Src>>(s)); return unsafe_cast<Dest>(static_cast<std::underlying_type_t<Src>>(s));
} }

View File

@@ -36,8 +36,10 @@ public:
} }
scope_exit(scope_exit&& rhs) noexcept( scope_exit(scope_exit&& rhs) noexcept(
std::is_nothrow_move_constructible_v<EF> || std::is_nothrow_copy_constructible_v<EF>) std::is_nothrow_move_constructible_v<EF> ||
: exit_function_{std::forward<EF>(rhs.exit_function_)}, execute_on_destruction_{rhs.execute_on_destruction_} std::is_nothrow_copy_constructible_v<EF>)
: exit_function_{std::forward<EF>(rhs.exit_function_)}
, execute_on_destruction_{rhs.execute_on_destruction_}
{ {
rhs.release(); rhs.release();
} }
@@ -48,11 +50,14 @@ public:
template <class EFP> template <class EFP>
explicit scope_exit( explicit scope_exit(
EFP&& f, EFP&& f,
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_exit> && std::is_constructible_v<EF, EFP>>* = std::enable_if_t<
0) noexcept !std::is_same_v<std::remove_cv_t<EFP>, scope_exit> &&
std::is_constructible_v<EF, EFP>>* = 0) noexcept
: exit_function_{std::forward<EFP>(f)} : exit_function_{std::forward<EFP>(f)}
{ {
static_assert(std::is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>); static_assert(
std::
is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>);
} }
void void
@@ -75,12 +80,14 @@ class scope_fail
public: public:
~scope_fail() ~scope_fail()
{ {
if (execute_on_destruction_ && std::uncaught_exceptions() > uncaught_on_creation_) if (execute_on_destruction_ &&
std::uncaught_exceptions() > uncaught_on_creation_)
exit_function_(); exit_function_();
} }
scope_fail(scope_fail&& rhs) noexcept( scope_fail(scope_fail&& rhs) noexcept(
std::is_nothrow_move_constructible_v<EF> || std::is_nothrow_copy_constructible_v<EF>) std::is_nothrow_move_constructible_v<EF> ||
std::is_nothrow_copy_constructible_v<EF>)
: exit_function_{std::forward<EF>(rhs.exit_function_)} : exit_function_{std::forward<EF>(rhs.exit_function_)}
, execute_on_destruction_{rhs.execute_on_destruction_} , execute_on_destruction_{rhs.execute_on_destruction_}
, uncaught_on_creation_{rhs.uncaught_on_creation_} , uncaught_on_creation_{rhs.uncaught_on_creation_}
@@ -94,11 +101,14 @@ public:
template <class EFP> template <class EFP>
explicit scope_fail( explicit scope_fail(
EFP&& f, EFP&& f,
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_fail> && std::is_constructible_v<EF, EFP>>* = std::enable_if_t<
0) noexcept !std::is_same_v<std::remove_cv_t<EFP>, scope_fail> &&
std::is_constructible_v<EF, EFP>>* = 0) noexcept
: exit_function_{std::forward<EFP>(f)} : exit_function_{std::forward<EFP>(f)}
{ {
static_assert(std::is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>); static_assert(
std::
is_nothrow_constructible_v<EF, decltype(std::forward<EFP>(f))>);
} }
void void
@@ -121,12 +131,14 @@ class scope_success
public: public:
~scope_success() noexcept(noexcept(exit_function_())) ~scope_success() noexcept(noexcept(exit_function_()))
{ {
if (execute_on_destruction_ && std::uncaught_exceptions() <= uncaught_on_creation_) if (execute_on_destruction_ &&
std::uncaught_exceptions() <= uncaught_on_creation_)
exit_function_(); exit_function_();
} }
scope_success(scope_success&& rhs) noexcept( scope_success(scope_success&& rhs) noexcept(
std::is_nothrow_move_constructible_v<EF> || std::is_nothrow_copy_constructible_v<EF>) std::is_nothrow_move_constructible_v<EF> ||
std::is_nothrow_copy_constructible_v<EF>)
: exit_function_{std::forward<EF>(rhs.exit_function_)} : exit_function_{std::forward<EF>(rhs.exit_function_)}
, execute_on_destruction_{rhs.execute_on_destruction_} , execute_on_destruction_{rhs.execute_on_destruction_}
, uncaught_on_creation_{rhs.uncaught_on_creation_} , uncaught_on_creation_{rhs.uncaught_on_creation_}
@@ -140,7 +152,9 @@ public:
template <class EFP> template <class EFP>
explicit scope_success( explicit scope_success(
EFP&& f, EFP&& f,
std::enable_if_t<!std::is_same_v<std::remove_cv_t<EFP>, scope_success> && std::is_constructible_v<EF, EFP>>* = std::enable_if_t<
!std::is_same_v<std::remove_cv_t<EFP>, scope_success> &&
std::is_constructible_v<EF, EFP>>* =
0) noexcept(std::is_nothrow_constructible_v<EF, EFP> || std::is_nothrow_constructible_v<EF, EFP&>) 0) noexcept(std::is_nothrow_constructible_v<EF, EFP> || std::is_nothrow_constructible_v<EF, EFP&>)
: exit_function_{std::forward<EFP>(f)} : exit_function_{std::forward<EFP>(f)}
{ {
@@ -199,9 +213,12 @@ class scope_unlock
std::unique_lock<Mutex>* plock; std::unique_lock<Mutex>* plock;
public: public:
explicit scope_unlock(std::unique_lock<Mutex>& lock) noexcept(true) : plock(&lock) explicit scope_unlock(std::unique_lock<Mutex>& lock) noexcept(true)
: plock(&lock)
{ {
XRPL_ASSERT(plock->owns_lock(), "xrpl::scope_unlock::scope_unlock : mutex must be locked"); XRPL_ASSERT(
plock->owns_lock(),
"xrpl::scope_unlock::scope_unlock : mutex must be locked");
plock->unlock(); plock->unlock();
} }

View File

@@ -100,9 +100,12 @@ public:
@note For performance reasons, you should strive to have `lock` be @note For performance reasons, you should strive to have `lock` be
on a cacheline by itself. on a cacheline by itself.
*/ */
packed_spinlock(std::atomic<T>& lock, int index) : bits_(lock), mask_(static_cast<T>(1) << index) packed_spinlock(std::atomic<T>& lock, int index)
: bits_(lock), mask_(static_cast<T>(1) << index)
{ {
XRPL_ASSERT(index >= 0 && (mask_ != 0), "xrpl::packed_spinlock::packed_spinlock : valid index and mask"); XRPL_ASSERT(
index >= 0 && (mask_ != 0),
"xrpl::packed_spinlock::packed_spinlock : valid index and mask");
} }
[[nodiscard]] bool [[nodiscard]] bool
@@ -175,7 +178,10 @@ public:
T expected = 0; T expected = 0;
return lock_.compare_exchange_weak( return lock_.compare_exchange_weak(
expected, std::numeric_limits<T>::max(), std::memory_order_acquire, std::memory_order_relaxed); expected,
std::numeric_limits<T>::max(),
std::memory_order_acquire,
std::memory_order_relaxed);
} }
void void

View File

@@ -11,7 +11,9 @@ std::string
strHex(FwdIt begin, FwdIt end) strHex(FwdIt begin, FwdIt end)
{ {
static_assert( static_assert(
std::is_convertible<typename std::iterator_traits<FwdIt>::iterator_category, std::forward_iterator_tag>::value, std::is_convertible<
typename std::iterator_traits<FwdIt>::iterator_category,
std::forward_iterator_tag>::value,
"FwdIt must be a forward iterator"); "FwdIt must be a forward iterator");
std::string result; std::string result;
result.reserve(2 * std::distance(begin, end)); result.reserve(2 * std::distance(begin, end));

Some files were not shown because too many files have changed in this diff Show More