mirror of
https://github.com/XRPLF/rippled.git
synced 2026-01-15 20:25:26 +00:00
Compare commits
8 Commits
bthomee/up
...
bthomee/ma
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9de4bcac05 | ||
|
|
90234d1fd0 | ||
|
|
2543f2eb58 | ||
|
|
5c3eaa5101 | ||
|
|
6dfaeb11bc | ||
|
|
01d57c9aa3 | ||
|
|
c115b77970 | ||
|
|
05a76895ad |
@@ -32,7 +32,9 @@ parsers:
|
||||
slack_app: false
|
||||
|
||||
ignore:
|
||||
- "src/test/"
|
||||
- "src/tests/"
|
||||
- ".github/scripts/"
|
||||
- "include/xrpl/beast/test/"
|
||||
- "include/xrpl/beast/unit_test/"
|
||||
- "src/test/"
|
||||
- "src/tests/"
|
||||
- "tests/"
|
||||
|
||||
@@ -1,286 +0,0 @@
|
||||
ignorePaths:
|
||||
- build/**
|
||||
- src/libxrpl/crypto
|
||||
- src/test/** # Will be removed in the future
|
||||
- CMakeUserPresets.json
|
||||
- Doxyfile
|
||||
- docs/**/*.puml
|
||||
- cmake/**
|
||||
- LICENSE.md
|
||||
language: en
|
||||
allowCompoundWords: true
|
||||
ignoreRandomStrings: true
|
||||
minWordLength: 5
|
||||
dictionaries:
|
||||
- cpp
|
||||
- en_US
|
||||
- en_GB
|
||||
ignoreRegExpList:
|
||||
- /[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds
|
||||
- /(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards
|
||||
- /(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
|
||||
- /::[a-z:_]+/g # things from other namespaces
|
||||
- /lib[a-z]+/g # libraries
|
||||
- /[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates
|
||||
- /[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years
|
||||
- /\[[A-Za-z0-9-]+\]\(https:\/\/github.com\/[A-Za-z0-9-]+\)/g # Github usernames
|
||||
- /-[DWw][a-zA-Z0-9_-]+=/g # compile flags
|
||||
- /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags
|
||||
suggestWords:
|
||||
- xprl->xrpl
|
||||
- xprld->xrpld
|
||||
- unsynched->unsynced
|
||||
- synched->synced
|
||||
- synch->sync
|
||||
words:
|
||||
- abempty
|
||||
- AMMID
|
||||
- amt
|
||||
- amts
|
||||
- asnode
|
||||
- asynchrony
|
||||
- attestation
|
||||
- authorises
|
||||
- autobridge
|
||||
- autobridged
|
||||
- autobridging
|
||||
- bimap
|
||||
- bindir
|
||||
- bookdir
|
||||
- Bougalis
|
||||
- Britto
|
||||
- Btrfs
|
||||
- canonicality
|
||||
- checkme
|
||||
- choco
|
||||
- chrono
|
||||
- citardauq
|
||||
- clawback
|
||||
- clawbacks
|
||||
- coeffs
|
||||
- coldwallet
|
||||
- compr
|
||||
- conanfile
|
||||
- conanrun
|
||||
- confs
|
||||
- connectability
|
||||
- coro
|
||||
- coros
|
||||
- cowid
|
||||
- cryptocondition
|
||||
- cryptoconditional
|
||||
- cryptoconditions
|
||||
- csprng
|
||||
- ctest
|
||||
- ctid
|
||||
- currenttxhash
|
||||
- daria
|
||||
- dcmake
|
||||
- dearmor
|
||||
- deleteme
|
||||
- demultiplexer
|
||||
- deserializaton
|
||||
- desync
|
||||
- desynced
|
||||
- determ
|
||||
- distro
|
||||
- doxyfile
|
||||
- dxrpl
|
||||
- endmacro
|
||||
- exceptioned
|
||||
- Falco
|
||||
- finalizers
|
||||
- firewalled
|
||||
- fmtdur
|
||||
- fsanitize
|
||||
- funclets
|
||||
- gcov
|
||||
- gcovr
|
||||
- ghead
|
||||
- Gnutella
|
||||
- gpgcheck
|
||||
- gpgkey
|
||||
- hotwallet
|
||||
- ifndef
|
||||
- inequation
|
||||
- insuf
|
||||
- insuff
|
||||
- iou
|
||||
- ious
|
||||
- isrdc
|
||||
- itype
|
||||
- jemalloc
|
||||
- jlog
|
||||
- keylet
|
||||
- keylets
|
||||
- keyvadb
|
||||
- ledgerentry
|
||||
- ledgerhash
|
||||
- ledgerindex
|
||||
- leftw
|
||||
- legleux
|
||||
- levelization
|
||||
- levelized
|
||||
- libpb
|
||||
- libxrpl
|
||||
- llection
|
||||
- LOCALGOOD
|
||||
- logwstream
|
||||
- lseq
|
||||
- lsmf
|
||||
- ltype
|
||||
- mcmodel
|
||||
- MEMORYSTATUSEX
|
||||
- Merkle
|
||||
- Metafuncton
|
||||
- misprediction
|
||||
- mptbalance
|
||||
- mptflags
|
||||
- mptid
|
||||
- mptissuance
|
||||
- mptissuanceid
|
||||
- mptoken
|
||||
- mptokenid
|
||||
- mptokenissuance
|
||||
- mptokens
|
||||
- mpts
|
||||
- multisig
|
||||
- multisign
|
||||
- multisigned
|
||||
- Nakamoto
|
||||
- nftid
|
||||
- nftoffer
|
||||
- nftoken
|
||||
- nftokenid
|
||||
- nftokenpages
|
||||
- nftokens
|
||||
- nftpage
|
||||
- nikb
|
||||
- nonxrp
|
||||
- noripple
|
||||
- nudb
|
||||
- nullptr
|
||||
- nunl
|
||||
- Nyffenegger
|
||||
- ostr
|
||||
- partitioner
|
||||
- paychan
|
||||
- paychans
|
||||
- permdex
|
||||
- perminute
|
||||
- permissioned
|
||||
- pointee
|
||||
- preauth
|
||||
- preauthorization
|
||||
- preauthorize
|
||||
- preauthorizes
|
||||
- preclaim
|
||||
- protobuf
|
||||
- protos
|
||||
- ptrs
|
||||
- pyenv
|
||||
- qalloc
|
||||
- queuable
|
||||
- Raphson
|
||||
- replayer
|
||||
- rerere
|
||||
- retriable
|
||||
- RIPD
|
||||
- ripdtop
|
||||
- rippleci
|
||||
- rippled
|
||||
- ripplerpc
|
||||
- rippletest
|
||||
- RLUSD
|
||||
- rngfill
|
||||
- rocksdb
|
||||
- Rohrs
|
||||
- roundings
|
||||
- sahyadri
|
||||
- Satoshi
|
||||
- scons
|
||||
- secp
|
||||
- sendq
|
||||
- seqit
|
||||
- sf
|
||||
- SFIELD
|
||||
- shamap
|
||||
- shamapitem
|
||||
- sidechain
|
||||
- SIGGOOD
|
||||
- sle
|
||||
- sles
|
||||
- soci
|
||||
- socidb
|
||||
- sslws
|
||||
- statsd
|
||||
- STATSDCOLLECTOR
|
||||
- stissue
|
||||
- stnum
|
||||
- stobj
|
||||
- stobject
|
||||
- stpath
|
||||
- stpathset
|
||||
- sttx
|
||||
- stvar
|
||||
- stvector
|
||||
- stxchainattestations
|
||||
- superpeer
|
||||
- superpeers
|
||||
- takergets
|
||||
- takerpays
|
||||
- ters
|
||||
- TMEndpointv2
|
||||
- trixie
|
||||
- tx
|
||||
- txid
|
||||
- txids
|
||||
- txjson
|
||||
- txn
|
||||
- txns
|
||||
- txs
|
||||
- UBSAN
|
||||
- ubsan
|
||||
- umant
|
||||
- unacquired
|
||||
- unambiguity
|
||||
- unauthorizes
|
||||
- unauthorizing
|
||||
- unergonomic
|
||||
- unfetched
|
||||
- unflatten
|
||||
- unfund
|
||||
- unimpair
|
||||
- unroutable
|
||||
- unscalable
|
||||
- unserviced
|
||||
- unshareable
|
||||
- unshares
|
||||
- unsquelch
|
||||
- unsquelched
|
||||
- unsquelching
|
||||
- unvalidated
|
||||
- unveto
|
||||
- unvetoed
|
||||
- upvotes
|
||||
- USDB
|
||||
- variadics
|
||||
- venv
|
||||
- vfalco
|
||||
- vinnie
|
||||
- wextra
|
||||
- wptr
|
||||
- writeme
|
||||
- wsrch
|
||||
- wthread
|
||||
- xbridge
|
||||
- xchain
|
||||
- ximinez
|
||||
- EXPECT_STREQ
|
||||
- XMACRO
|
||||
- xrpkuwait
|
||||
- xrpl
|
||||
- xrpld
|
||||
- xrplf
|
||||
- xxhash
|
||||
- xxhasher
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,6 +1,5 @@
|
||||
# Set default behaviour, in case users don't have core.autocrlf set.
|
||||
#* text=auto
|
||||
# cspell: disable
|
||||
|
||||
# Visual Studio
|
||||
*.sln text eol=crlf
|
||||
|
||||
6
.github/actions/build-deps/action.yml
vendored
6
.github/actions/build-deps/action.yml
vendored
@@ -18,10 +18,6 @@ inputs:
|
||||
description: "The logging verbosity."
|
||||
required: false
|
||||
default: "verbose"
|
||||
sanitizers:
|
||||
description: "The sanitizers to enable."
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
@@ -33,11 +29,9 @@ runs:
|
||||
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
|
||||
SANITIZERS: ${{ inputs.sanitizers }}
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
conan install \
|
||||
--profile ci \
|
||||
--build="${BUILD_OPTION}" \
|
||||
--options:host='&:tests=True' \
|
||||
--options:host='&:xrpld=True' \
|
||||
|
||||
21
.github/actions/extract-version/action.yml
vendored
21
.github/actions/extract-version/action.yml
vendored
@@ -1,21 +0,0 @@
|
||||
name: Extract version
|
||||
description: "Extract version from BuildInfo.cpp"
|
||||
|
||||
outputs:
|
||||
version:
|
||||
description: "The version extracted from BuildInfo.cpp."
|
||||
value: ${{ steps.version.outputs.version }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Extract version
|
||||
id: version
|
||||
shell: bash
|
||||
run: |
|
||||
VERSION="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')"
|
||||
if [[ -z "${VERSION}" ]]; then
|
||||
echo 'Unable to extract version from BuildInfo.cpp.'
|
||||
exit 1
|
||||
fi
|
||||
echo "version=${VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
24
.github/actions/print-env/action.yml
vendored
24
.github/actions/print-env/action.yml
vendored
@@ -11,6 +11,12 @@ runs:
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
- name: Check configuration (Linux and macOS)
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
@@ -21,23 +27,17 @@ runs:
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
|
||||
- name: Check configuration (all)
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking Ccache version.'
|
||||
ccache --version
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
2
.github/actions/setup-conan/action.yml
vendored
2
.github/actions/setup-conan/action.yml
vendored
@@ -28,7 +28,7 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing profile.'
|
||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
|
||||
echo 'Conan profile:'
|
||||
conan profile show
|
||||
|
||||
72
.github/actions/upload-recipe/action.yml
vendored
72
.github/actions/upload-recipe/action.yml
vendored
@@ -1,72 +0,0 @@
|
||||
name: Upload Conan recipe
|
||||
description: "Upload recipe to a Conan remote."
|
||||
|
||||
inputs:
|
||||
conan_recipe_name:
|
||||
description: "The name of the recipe to use."
|
||||
required: false
|
||||
default: xrpl
|
||||
conan_recipe_version:
|
||||
description: "The version of the recipe to use."
|
||||
required: true
|
||||
conan_recipe_channel:
|
||||
description: "The optional Conan channel to use."
|
||||
required: false
|
||||
conan_recipe_user:
|
||||
description: "The optional Conan user to use."
|
||||
required: false
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote."
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote."
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
conan_ref: ${{ steps.ref.outputs.ref }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Calculate Conan reference
|
||||
id: ref
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_RECIPE_NAME: ${{ inputs.conan_recipe_name }}
|
||||
CONAN_RECIPE_VERSION: ${{ inputs.conan_recipe_version }}
|
||||
CONAN_RECIPE_CHANNEL: ${{ inputs.conan_recipe_channel }}
|
||||
CONAN_RECIPE_USER: ${{ inputs.conan_recipe_user }}
|
||||
run: |
|
||||
if [[ -n "${CONAN_RECIPE_USER}" && -n "${CONAN_RECIPE_CHANNEL}" ]]; then
|
||||
echo "ref=${CONAN_RECIPE_NAME}/${CONAN_RECIPE_VERSION}@${CONAN_RECIPE_USER}/${CONAN_RECIPE_CHANNEL}" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "ref=${CONAN_RECIPE_NAME}/${CONAN_RECIPE_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
- name: Log into Conan remote
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
CONAN_REMOTE_USERNAME: ${{ inputs.conan_remote_username }}
|
||||
CONAN_REMOTE_PASSWORD: ${{ inputs.conan_remote_password }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_USERNAME}" --password "${CONAN_REMOTE_PASSWORD}"
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_RECIPE_CHANNEL: ${{ inputs.conan_recipe_channel }}
|
||||
CONAN_RECIPE_USER: ${{ inputs.conan_recipe_user }}
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --channel="${CONAN_RECIPE_CHANNEL}" --user="${CONAN_RECIPE_USER}" .
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" ${{ steps.ref.outputs.ref }}
|
||||
4
.github/scripts/levelization/README.md
vendored
4
.github/scripts/levelization/README.md
vendored
@@ -81,10 +81,10 @@ It generates many files of [results](results):
|
||||
|
||||
- `rawincludes.txt`: The raw dump of the `#includes`
|
||||
- `paths.txt`: A second dump grouping the source module
|
||||
to the destination module, de-duped, and with frequency counts.
|
||||
to the destination module, deduped, and with frequency counts.
|
||||
- `includes/`: A directory where each file represents a module and
|
||||
contains a list of modules and counts that the module _includes_.
|
||||
- `included_by/`: Similar to `includes/`, but the other way around. Each
|
||||
- `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
file represents a module and contains a list of modules and counts
|
||||
that _include_ the module.
|
||||
- [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
|
||||
6
.github/scripts/levelization/generate.sh
vendored
6
.github/scripts/levelization/generate.sh
vendored
@@ -29,7 +29,7 @@ pushd results
|
||||
oldifs=${IFS}
|
||||
IFS=:
|
||||
mkdir includes
|
||||
mkdir included_by
|
||||
mkdir includedby
|
||||
echo Build levelization paths
|
||||
exec 3< ${includes} # open rawincludes.txt for input
|
||||
while read -r -u 3 file include
|
||||
@@ -59,7 +59,7 @@ do
|
||||
echo $level $includelevel | tee -a paths.txt
|
||||
fi
|
||||
done
|
||||
echo Sort and deduplicate paths
|
||||
echo Sort and dedup paths
|
||||
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
|
||||
mv sortedpaths.txt paths.txt
|
||||
exec 3>&- #close fd 3
|
||||
@@ -71,7 +71,7 @@ exec 4<paths.txt # open paths.txt for input
|
||||
while read -r -u 4 count level include
|
||||
do
|
||||
echo ${include} ${count} | tee -a includes/${level}
|
||||
echo ${level} ${count} | tee -a included_by/${include}
|
||||
echo ${level} ${count} | tee -a includedby/${include}
|
||||
done
|
||||
exec 4>&- #close fd 4
|
||||
|
||||
|
||||
6
.github/scripts/rename/README.md
vendored
6
.github/scripts/rename/README.md
vendored
@@ -19,7 +19,7 @@ run from the repository root.
|
||||
1. `.github/scripts/rename/definitions.sh`: This script will rename all
|
||||
definitions, such as include guards, from `RIPPLE_XXX` and `RIPPLED_XXX` to
|
||||
`XRPL_XXX`.
|
||||
2. `.github/scripts/rename/copyright.sh`: This script will remove superfluous
|
||||
2. `.github/scripts/rename/copyright.sh`: This script will remove superflous
|
||||
copyright notices.
|
||||
3. `.github/scripts/rename/cmake.sh`: This script will rename all CMake files
|
||||
from `RippleXXX.cmake` or `RippledXXX.cmake` to `XrplXXX.cmake`, and any
|
||||
@@ -31,9 +31,6 @@ run from the repository root.
|
||||
the `xrpld` binary.
|
||||
5. `.github/scripts/rename/namespace.sh`: This script will rename the C++
|
||||
namespaces from `ripple` to `xrpl`.
|
||||
6. `.github/scripts/rename/config.sh`: This script will rename the config from
|
||||
`rippled.cfg` to `xrpld.cfg`, and updating the code accordingly. The old
|
||||
filename will still be accepted.
|
||||
|
||||
You can run all these scripts from the repository root as follows:
|
||||
|
||||
@@ -43,5 +40,4 @@ You can run all these scripts from the repository root as follows:
|
||||
./.github/scripts/rename/cmake.sh .
|
||||
./.github/scripts/rename/binary.sh .
|
||||
./.github/scripts/rename/namespace.sh .
|
||||
./.github/scripts/rename/config.sh .
|
||||
```
|
||||
|
||||
72
.github/scripts/rename/config.sh
vendored
72
.github/scripts/rename/config.sh
vendored
@@ -1,72 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit the script as soon as an error occurs.
|
||||
set -e
|
||||
|
||||
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||
SED_COMMAND=sed
|
||||
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||
if ! command -v gsed &> /dev/null; then
|
||||
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||
exit 1
|
||||
fi
|
||||
SED_COMMAND=gsed
|
||||
fi
|
||||
|
||||
# This script renames the config from `rippled.cfg` to `xrpld.cfg`, and updates
|
||||
# the code accordingly. The old filename will still be accepted.
|
||||
# Usage: .github/scripts/rename/config.sh <repository directory>
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <repository directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DIRECTORY=$1
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
if [ ! -d "${DIRECTORY}" ]; then
|
||||
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
pushd ${DIRECTORY}
|
||||
|
||||
# Add the xrpld.cfg to the .gitignore.
|
||||
if ! grep -q 'xrpld.cfg' .gitignore; then
|
||||
${SED_COMMAND} -i '/rippled.cfg/a\
|
||||
/xrpld.cfg' .gitignore
|
||||
fi
|
||||
|
||||
# Rename the files.
|
||||
if [ -e rippled.cfg ]; then
|
||||
mv rippled.cfg xrpld.cfg
|
||||
fi
|
||||
if [ -e cfg/rippled-example.cfg ]; then
|
||||
mv cfg/rippled-example.cfg cfg/xrpld-example.cfg
|
||||
fi
|
||||
|
||||
# Rename inside the files.
|
||||
DIRECTORIES=("cfg" "cmake" "include" "src")
|
||||
for DIRECTORY in "${DIRECTORIES[@]}"; do
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
|
||||
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.cmake" -o -name "*.txt" -o -name "*.cfg" -o -name "*.md" \) | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
${SED_COMMAND} -i -E 's/rippled(-example)?[ .]cfg/xrpld\1.cfg/g' "${FILE}"
|
||||
done
|
||||
done
|
||||
${SED_COMMAND} -i 's/rippled/xrpld/g' cfg/xrpld-example.cfg
|
||||
${SED_COMMAND} -i 's/rippled/xrpld/g' src/test/core/Config_test.cpp
|
||||
${SED_COMMAND} -i 's/ripplevalidators/xrplvalidators/g' src/test/core/Config_test.cpp # cspell: disable-line
|
||||
${SED_COMMAND} -i 's/rippleConfig/xrpldConfig/g' src/test/core/Config_test.cpp
|
||||
${SED_COMMAND} -i 's@ripple/@xrpld/@g' src/test/core/Config_test.cpp
|
||||
${SED_COMMAND} -i 's/Rippled/File/g' src/test/core/Config_test.cpp
|
||||
|
||||
|
||||
# Restore the old config file name in the code that maintains support for now.
|
||||
${SED_COMMAND} -i 's/configLegacyName = "xrpld.cfg"/configLegacyName = "rippled.cfg"/g' src/xrpld/core/detail/Config.cpp
|
||||
|
||||
# Restore an URL.
|
||||
${SED_COMMAND} -i 's/connect-your-xrpld-to-the-xrp-test-net.html/connect-your-rippled-to-the-xrp-test-net.html/g' cfg/xrpld-example.cfg
|
||||
|
||||
popd
|
||||
echo "Renaming complete."
|
||||
12
.github/scripts/rename/copyright.sh
vendored
12
.github/scripts/rename/copyright.sh
vendored
@@ -50,11 +50,11 @@ for DIRECTORY in "${DIRECTORIES[@]}"; do
|
||||
# Handle the cases where the copyright notice is enclosed in /* ... */
|
||||
# and usually surrounded by //---- and //======.
|
||||
${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}"
|
||||
${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}" # cspell: ignore Bougalis Falco Hinnant Ritchford
|
||||
${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}"
|
||||
${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}"
|
||||
|
||||
# Handle the cases where the copyright notice is commented out with //.
|
||||
${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}" # cspell: ignore Vinnie Falco
|
||||
${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}"
|
||||
done
|
||||
done
|
||||
|
||||
@@ -83,16 +83,16 @@ if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/ValidatorInfo.cpp; then
|
||||
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/ValidatorInfo.cpp
|
||||
fi
|
||||
if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then
|
||||
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h # cspell: ignore Nikolaos Bougalis nikb
|
||||
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h
|
||||
fi
|
||||
if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then
|
||||
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h # cspell: ignore Nikolaos Bougalis nikb
|
||||
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h
|
||||
fi
|
||||
if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then
|
||||
echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h # cspell: ignore Nikolaos Bougalis nikb
|
||||
echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h
|
||||
fi
|
||||
if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then
|
||||
echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h # cspell: ignore Ritchford
|
||||
echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h
|
||||
fi
|
||||
|
||||
# Restore newlines and tabs in string literals in the affected file.
|
||||
|
||||
118
.github/scripts/strategy-matrix/README.md
vendored
Normal file
118
.github/scripts/strategy-matrix/README.md
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
# Strategy Matrix
|
||||
|
||||
The scripts in this directory will generate a strategy matrix for GitHub Actions
|
||||
CI, depending on the trigger that caused the workflow to run and the platform
|
||||
specified.
|
||||
|
||||
There are several build, test, and publish settings that can be enabled for each
|
||||
configuration. The settings are combined in a Cartesian product to generate the
|
||||
full matrix, while filtering out any combinations not applicable to the trigger.
|
||||
|
||||
## Platforms
|
||||
|
||||
We support three platforms: Linux, macOS, and Windows.
|
||||
|
||||
### Linux
|
||||
|
||||
We support a variety of distributions (Debian, RHEL, and Ubuntu) and compilers
|
||||
(GCC and Clang) on Linux. As there are so many combinations, we don't run them
|
||||
all. Instead, we focus on a few key ones for PR commits and merges, while we run
|
||||
most of them on a scheduled or ad hoc basis.
|
||||
|
||||
Some noteworthy configurations are:
|
||||
|
||||
- The official release build is GCC 14 on Debian Bullseye.
|
||||
- Although we generally enable assertions in release builds, we disable them
|
||||
for the official release build.
|
||||
- We publish .deb and .rpm packages for this build, as well as a Docker image.
|
||||
- For PR commits we also publish packages and images for testing purposes.
|
||||
- Antithesis instrumentation is only supported on Clang 16+ on AMD64.
|
||||
- We publish a Docker image for this build, but no packages.
|
||||
- Coverage reports are generated on Bullseye with GCC 15.
|
||||
- It must be enabled for both commits (to show PR coverage) and merges (to
|
||||
show default branch coverage).
|
||||
|
||||
Note that we try to run pipelines equally across both AMD64 and ARM64, but in
|
||||
some cases we cannot build on ARM64:
|
||||
|
||||
- All Clang 20+ builds on ARM64 are currently skipped due to a Boost build
|
||||
error.
|
||||
- All RHEL builds on AMD64 are currently skipped due to a build failure that
|
||||
needs further investigation.
|
||||
|
||||
Also note that to create a Docker image we ideally build on both AMD64 and
|
||||
ARM64 to create a multi-arch image. Both configs should therefore be triggered
|
||||
by the same event. However, as the script outputs individual configs, the
|
||||
workflow must be able to run both builds separately and then merge the
|
||||
single-arch images afterward into a multi-arch image.
|
||||
|
||||
### MacOS
|
||||
|
||||
We support building on macOS, which uses the Apple Clang compiler and the ARM64
|
||||
architecture. We use default settings for all builds, and don't publish any
|
||||
packages or images.
|
||||
|
||||
### Windows
|
||||
|
||||
We also support building on Windows, which uses the MSVC compiler and the AMD64
|
||||
architecture. While we could build on ARM64, we have not yet found a suitable
|
||||
cloud machine to use as a GitHub runner. We use default settings for all builds,
|
||||
and don't publish any packages or images.
|
||||
|
||||
## Triggers
|
||||
|
||||
We have four triggers that can cause the workflow to run:
|
||||
|
||||
- `commit`: A commit is pushed to a branch for which a pull request is open.
|
||||
- `merge`: A pull request is merged.
|
||||
- `label`: A label is added to a pull request.
|
||||
- `schedule`: The workflow is run on a scheduled basis.
|
||||
|
||||
The `label` trigger is currently not used, but it is reserved for future use.
|
||||
|
||||
The `schedule` trigger is used to run the workflow each weekday, and is also
|
||||
used for ad hoc testing via the `workflow_dispatch` event.
|
||||
|
||||
### Dependencies
|
||||
|
||||
The pipeline that is run for the `schedule` trigger will recompile and upload
|
||||
all Conan packages to the remote for each configuration that is enabled. In
|
||||
case any dependencies were added or updated in a recently merged PR, they will
|
||||
then be available in the remote for the following pipeline runs. It is therefore
|
||||
important that all configurations that are enabled for the `commit`, `merge`,
|
||||
and `label` triggers are also enabled for the `schedule` trigger. We run
|
||||
additional configurations in the `schedule` trigger that are not run for the
|
||||
other triggers, to get extra confidence that the codebase can compile and run on
|
||||
all supported platforms.
|
||||
|
||||
#### Caveats
|
||||
|
||||
There is some nuance here in that certain options affect the compilation of the
|
||||
dependencies, while others do not. This means that that same options need to be
|
||||
enabled for the `schedule` trigger as for the other triggers to ensure any
|
||||
dependency changes get cached in the Conan remote.
|
||||
|
||||
- Build mode (`unity`): Does not affect the dependencies.
|
||||
- Build option (`coverage`, `voidstar`): Does not affect the dependencies.
|
||||
- Build option (`sanitizer asan`, `sanitizer tsan`): Affects the dependencies.
|
||||
- Build type (`debug`, `release`): Affects the dependencies.
|
||||
- Build type (`publish`): Same effect as `release` on the dependencies.
|
||||
- Test option (`reference fee`): Does not affect the dependencies.
|
||||
- Publish option (`package`, `image`): Does not affect the dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
Our GitHub CI pipeline uses the `generate.py` script to generate the matrix for
|
||||
the current workflow invocation. Naturally, the script can be run locally to
|
||||
generate the matrix for testing purposes, e.g.:
|
||||
|
||||
```bash
|
||||
python3 generate.py --platform=linux --trigger=commit
|
||||
```
|
||||
|
||||
If you want to pretty-print the output, you can pipe it to `jq` after stripping
|
||||
off the `matrix=` prefix, e.g.:
|
||||
|
||||
```bash
|
||||
python3 generate.py --platform=linux --trigger=commit | cut -d= -f2- | jq
|
||||
```
|
||||
0
.github/scripts/strategy-matrix/__init__.py
vendored
Normal file
0
.github/scripts/strategy-matrix/__init__.py
vendored
Normal file
490
.github/scripts/strategy-matrix/generate.py
vendored
490
.github/scripts/strategy-matrix/generate.py
vendored
@@ -1,339 +1,211 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import dataclasses
|
||||
import itertools
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from collections.abc import Iterator
|
||||
|
||||
THIS_DIR = Path(__file__).parent.resolve()
|
||||
import linux
|
||||
import macos
|
||||
import windows
|
||||
from helpers.defs import *
|
||||
from helpers.enums import *
|
||||
from helpers.funcs import *
|
||||
from helpers.unique import *
|
||||
|
||||
# The GitHub runner tags to use for the different architectures.
|
||||
RUNNER_TAGS = {
|
||||
Arch.LINUX_AMD64: ["self-hosted", "Linux", "X64", "heavy"],
|
||||
Arch.LINUX_ARM64: ["self-hosted", "Linux", "ARM64", "heavy-arm64"],
|
||||
Arch.MACOS_ARM64: ["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
|
||||
Arch.WINDOWS_AMD64: ["self-hosted", "Windows", "devbox"],
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
architecture: list[dict]
|
||||
os: list[dict]
|
||||
build_type: list[str]
|
||||
cmake_args: list[str]
|
||||
def generate_configs(distros: list[Distro], trigger: Trigger) -> list[Config]:
|
||||
"""Generate a strategy matrix for GitHub Actions CI.
|
||||
|
||||
Args:
|
||||
distros: The distros to generate the matrix for.
|
||||
trigger: The trigger that caused the workflow to run.
|
||||
|
||||
Returns:
|
||||
list[Config]: The generated configurations.
|
||||
|
||||
Raises:
|
||||
ValueError: If any of the required fields are empty or invalid.
|
||||
TypeError: If any of the required fields are of the wrong type.
|
||||
|
||||
"""
|
||||
|
||||
configs = []
|
||||
for distro in distros:
|
||||
for config in generate_config_for_distro(distro, trigger):
|
||||
configs.append(config)
|
||||
|
||||
if not is_unique(configs):
|
||||
raise ValueError("configs must be a list of unique Config")
|
||||
|
||||
return configs
|
||||
|
||||
|
||||
"""
|
||||
Generate a strategy matrix for GitHub Actions CI.
|
||||
def generate_config_for_distro(distro: Distro, trigger: Trigger) -> Iterator[Config]:
|
||||
"""Generate a strategy matrix for a specific distro.
|
||||
|
||||
On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and
|
||||
Windows configurations, while upon merge into the develop, release, or master
|
||||
branches, we will build all configurations, and test most of them.
|
||||
Args:
|
||||
distro: The distro to generate the matrix for.
|
||||
trigger: The trigger that caused the workflow to run.
|
||||
|
||||
We will further set additional CMake arguments as follows:
|
||||
- All builds will have the `tests`, `werr`, and `xrpld` options.
|
||||
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
|
||||
- All release builds will have the `assert` option.
|
||||
- Certain Debian Bookworm configurations will change the reference fee, enable
|
||||
codecov, and enable voidstar in PRs.
|
||||
"""
|
||||
Yields:
|
||||
Config: The next configuration to build.
|
||||
|
||||
Raises:
|
||||
ValueError: If any of the required fields are empty or invalid.
|
||||
TypeError: If any of the required fields are of the wrong type.
|
||||
|
||||
def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
configurations = []
|
||||
for architecture, os, build_type, cmake_args in itertools.product(
|
||||
config.architecture, config.os, config.build_type, config.cmake_args
|
||||
):
|
||||
# The default CMake target is 'all' for Linux and MacOS and 'install'
|
||||
# for Windows, but it can get overridden for certain configurations.
|
||||
cmake_target = "install" if os["distro_name"] == "windows" else "all"
|
||||
|
||||
# We build and test all configurations by default, except for Windows in
|
||||
# Debug, because it is too slow, as well as when code coverage is
|
||||
# enabled as that mode already runs the tests.
|
||||
build_only = False
|
||||
if os["distro_name"] == "windows" and build_type == "Debug":
|
||||
build_only = True
|
||||
|
||||
# Only generate a subset of configurations in PRs.
|
||||
if not all:
|
||||
# Debian:
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
|
||||
# the reference fee to 500.
|
||||
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||
# code coverage (which will be done below).
|
||||
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
|
||||
# enable voidstar.
|
||||
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
|
||||
# set the reference fee to 1000.
|
||||
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
|
||||
if os["distro_name"] == "debian":
|
||||
skip = True
|
||||
if os["distro_version"] == "bookworm":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
cmake_args = f"-Dvoidstar=ON {cmake_args}"
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
if os["distro_name"] == "rhel":
|
||||
skip = True
|
||||
if os["distro_version"] == "9":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
elif os["distro_version"] == "10":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Ubuntu:
|
||||
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
|
||||
# - Noble using GCC 14: Release and Unity on linux/amd64.
|
||||
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
|
||||
# - Noble using Clang 19: Release and Unity on linux/arm64.
|
||||
if os["distro_name"] == "ubuntu":
|
||||
skip = True
|
||||
if os["distro_version"] == "jammy":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
skip = False
|
||||
elif os["distro_version"] == "noble":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# MacOS:
|
||||
# - Debug and no Unity on macos/arm64.
|
||||
if os["distro_name"] == "macos" and not (
|
||||
build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "macos/arm64"
|
||||
):
|
||||
continue
|
||||
|
||||
# Windows:
|
||||
# - Release and Unity on windows/amd64.
|
||||
if os["distro_name"] == "windows" and not (
|
||||
build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "windows/amd64"
|
||||
):
|
||||
continue
|
||||
|
||||
# Additional CMake arguments.
|
||||
cmake_args = f"{cmake_args} -Dtests=ON -Dwerr=ON -Dxrpld=ON"
|
||||
if not f"{os['compiler_name']}-{os['compiler_version']}" in [
|
||||
"gcc-12",
|
||||
"clang-16",
|
||||
]:
|
||||
cmake_args = f"{cmake_args} -Dwextra=ON"
|
||||
if build_type == "Release":
|
||||
cmake_args = f"{cmake_args} -Dassert=ON"
|
||||
|
||||
# We skip all RHEL on arm64 due to a build failure that needs further
|
||||
# investigation.
|
||||
if os["distro_name"] == "rhel" and architecture["platform"] == "linux/arm64":
|
||||
"""
|
||||
for spec in distro.specs:
|
||||
if trigger not in spec.triggers:
|
||||
continue
|
||||
|
||||
# We skip all clang 20+ on arm64 due to Boost build error.
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}"
|
||||
in ["clang-20", "clang-21"]
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
continue
|
||||
|
||||
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
|
||||
# Unity on linux/amd64
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}"
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
config_name = os["distro_name"]
|
||||
if (n := os["distro_version"]) != "":
|
||||
config_name += f"-{n}"
|
||||
if (n := os["compiler_name"]) != "":
|
||||
config_name += f"-{n}"
|
||||
if (n := os["compiler_version"]) != "":
|
||||
config_name += f"-{n}"
|
||||
config_name += (
|
||||
f"-{architecture['platform'][architecture['platform'].find('/')+1:]}"
|
||||
os_name = distro.os_name
|
||||
os_version = distro.os_version
|
||||
compiler_name = distro.compiler_name
|
||||
compiler_version = distro.compiler_version
|
||||
image_sha = distro.image_sha
|
||||
yield from generate_config_for_distro_spec(
|
||||
os_name,
|
||||
os_version,
|
||||
compiler_name,
|
||||
compiler_version,
|
||||
image_sha,
|
||||
spec,
|
||||
trigger,
|
||||
)
|
||||
config_name += f"-{build_type.lower()}"
|
||||
if "-Dcoverage=ON" in cmake_args:
|
||||
config_name += "-coverage"
|
||||
if "-Dunity=ON" in cmake_args:
|
||||
config_name += "-unity"
|
||||
|
||||
# Add the configuration to the list, with the most unique fields first,
|
||||
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||
# names get truncated.
|
||||
# Add Address and Thread (both coupled with UB) sanitizers for specific bookworm distros.
|
||||
# GCC-Asan rippled-embedded tests are failing because of https://github.com/google/sanitizers/issues/856
|
||||
if (
|
||||
os["distro_version"] == "bookworm"
|
||||
and f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
|
||||
):
|
||||
# Add ASAN + UBSAN configuration.
|
||||
configurations.append(
|
||||
{
|
||||
"config_name": config_name + "-asan-ubsan",
|
||||
"cmake_args": cmake_args,
|
||||
"cmake_target": cmake_target,
|
||||
"build_only": build_only,
|
||||
"build_type": build_type,
|
||||
"os": os,
|
||||
"architecture": architecture,
|
||||
"sanitizers": "address,undefinedbehavior",
|
||||
}
|
||||
)
|
||||
# TSAN is deactivated due to seg faults with latest compilers.
|
||||
activate_tsan = False
|
||||
if activate_tsan:
|
||||
configurations.append(
|
||||
{
|
||||
"config_name": config_name + "-tsan-ubsan",
|
||||
"cmake_args": cmake_args,
|
||||
"cmake_target": cmake_target,
|
||||
"build_only": build_only,
|
||||
"build_type": build_type,
|
||||
"os": os,
|
||||
"architecture": architecture,
|
||||
"sanitizers": "thread,undefinedbehavior",
|
||||
}
|
||||
)
|
||||
else:
|
||||
configurations.append(
|
||||
{
|
||||
"config_name": config_name,
|
||||
"cmake_args": cmake_args,
|
||||
"cmake_target": cmake_target,
|
||||
"build_only": build_only,
|
||||
"build_type": build_type,
|
||||
"os": os,
|
||||
"architecture": architecture,
|
||||
"sanitizers": "",
|
||||
}
|
||||
)
|
||||
|
||||
return configurations
|
||||
|
||||
|
||||
def read_config(file: Path) -> Config:
|
||||
config = json.loads(file.read_text())
|
||||
if (
|
||||
config["architecture"] is None
|
||||
or config["os"] is None
|
||||
or config["build_type"] is None
|
||||
or config["cmake_args"] is None
|
||||
def generate_config_for_distro_spec(
|
||||
os_name: str,
|
||||
os_version: str,
|
||||
compiler_name: str,
|
||||
compiler_version: str,
|
||||
image_sha: str,
|
||||
spec: Spec,
|
||||
trigger: Trigger,
|
||||
) -> Iterator[Config]:
|
||||
"""Generate a strategy matrix for a specific distro and spec.
|
||||
|
||||
Args:
|
||||
os_name: The OS name.
|
||||
os_version: The OS version.
|
||||
compiler_name: The compiler name.
|
||||
compiler_version: The compiler version.
|
||||
image_sha: The image SHA.
|
||||
spec: The spec to generate the matrix for.
|
||||
trigger: The trigger that caused the workflow to run.
|
||||
|
||||
Yields:
|
||||
Config: The next configuration to build.
|
||||
|
||||
"""
|
||||
|
||||
for trigger_, arch, build_mode, build_type in itertools.product(
|
||||
spec.triggers, spec.archs, spec.build_modes, spec.build_types
|
||||
):
|
||||
raise Exception("Invalid configuration file.")
|
||||
if trigger_ != trigger:
|
||||
continue
|
||||
|
||||
return Config(**config)
|
||||
build_option = spec.build_option
|
||||
test_option = spec.test_option
|
||||
publish_option = spec.publish_option
|
||||
|
||||
# Determine the configuration name.
|
||||
config_name = generate_config_name(
|
||||
os_name,
|
||||
os_version,
|
||||
compiler_name,
|
||||
compiler_version,
|
||||
arch,
|
||||
build_type,
|
||||
build_mode,
|
||||
build_option,
|
||||
)
|
||||
|
||||
# Determine the CMake arguments.
|
||||
cmake_args = generate_cmake_args(
|
||||
compiler_name,
|
||||
compiler_version,
|
||||
build_type,
|
||||
build_mode,
|
||||
build_option,
|
||||
test_option,
|
||||
)
|
||||
|
||||
# Determine the CMake target.
|
||||
cmake_target = generate_cmake_target(os_name, build_type)
|
||||
|
||||
# Determine whether to enable running tests, and to create a package
|
||||
# and/or image.
|
||||
enable_tests, enable_package, enable_image = generate_enable_options(
|
||||
os_name, build_type, publish_option
|
||||
)
|
||||
|
||||
# Determine the image to run in, if applicable.
|
||||
image = generate_image_name(
|
||||
os_name,
|
||||
os_version,
|
||||
compiler_name,
|
||||
compiler_version,
|
||||
image_sha,
|
||||
)
|
||||
|
||||
# Generate the configuration.
|
||||
yield Config(
|
||||
config_name=config_name,
|
||||
cmake_args=cmake_args,
|
||||
cmake_target=cmake_target,
|
||||
build_type=("Debug" if build_type == BuildType.DEBUG else "Release"),
|
||||
enable_tests=enable_tests,
|
||||
enable_package=enable_package,
|
||||
enable_image=enable_image,
|
||||
runs_on=RUNNER_TAGS[arch],
|
||||
image=image,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
help="Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).",
|
||||
action="store_true",
|
||||
"--platform",
|
||||
"-p",
|
||||
required=False,
|
||||
type=Platform,
|
||||
choices=list(Platform),
|
||||
help="The platform to run on.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
help="Path to the JSON file containing the strategy matrix configurations.",
|
||||
required=False,
|
||||
type=Path,
|
||||
"--trigger",
|
||||
"-t",
|
||||
required=True,
|
||||
type=Trigger,
|
||||
choices=list(Trigger),
|
||||
help="The trigger that caused the workflow to run.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
matrix = []
|
||||
if args.config is None or args.config == "":
|
||||
matrix += generate_strategy_matrix(
|
||||
args.all, read_config(THIS_DIR / "linux.json")
|
||||
)
|
||||
matrix += generate_strategy_matrix(
|
||||
args.all, read_config(THIS_DIR / "macos.json")
|
||||
)
|
||||
matrix += generate_strategy_matrix(
|
||||
args.all, read_config(THIS_DIR / "windows.json")
|
||||
)
|
||||
else:
|
||||
matrix += generate_strategy_matrix(args.all, read_config(args.config))
|
||||
# Collect the distros to generate configs for.
|
||||
distros = []
|
||||
if args.platform in [None, Platform.LINUX]:
|
||||
distros += linux.DEBIAN_DISTROS + linux.RHEL_DISTROS + linux.UBUNTU_DISTROS
|
||||
if args.platform in [None, Platform.MACOS]:
|
||||
distros += macos.DISTROS
|
||||
if args.platform in [None, Platform.WINDOWS]:
|
||||
distros += windows.DISTROS
|
||||
|
||||
# Generate the strategy matrix.
|
||||
print(f"matrix={json.dumps({'include': matrix})}")
|
||||
# Generate the configs.
|
||||
configs = generate_configs(distros, args.trigger)
|
||||
|
||||
# Convert the configs into the format expected by GitHub Actions.
|
||||
include = []
|
||||
for config in configs:
|
||||
include.append(dataclasses.asdict(config))
|
||||
print(f"matrix={json.dumps({'include': include})}")
|
||||
|
||||
466
.github/scripts/strategy-matrix/generate_test.py
vendored
Normal file
466
.github/scripts/strategy-matrix/generate_test.py
vendored
Normal file
@@ -0,0 +1,466 @@
|
||||
import pytest
|
||||
|
||||
from generate import *
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def macos_distro():
|
||||
return Distro(
|
||||
os_name="macos",
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.MACOS_ARM64],
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_option=BuildOption.COVERAGE,
|
||||
build_types=[BuildType.RELEASE],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def windows_distro():
|
||||
return Distro(
|
||||
os_name="windows",
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.WINDOWS_AMD64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_option=BuildOption.SANITIZE_ASAN,
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.IMAGE_ONLY,
|
||||
test_option=TestOption.REFERENCE_FEE_500,
|
||||
triggers=[Trigger.COMMIT, Trigger.SCHEDULE],
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def linux_distro():
|
||||
return Distro(
|
||||
os_name="debian",
|
||||
os_version="bookworm",
|
||||
compiler_name="clang",
|
||||
compiler_version="16",
|
||||
image_sha="a1b2c3d4",
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_option=BuildOption.SANITIZE_TSAN,
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.LABEL],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64, Arch.LINUX_ARM64],
|
||||
build_modes=[BuildMode.UNITY_OFF, BuildMode.UNITY_ON],
|
||||
build_option=BuildOption.VOIDSTAR,
|
||||
build_types=[BuildType.PUBLISH],
|
||||
publish_option=PublishOption.PACKAGE_AND_IMAGE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT, Trigger.LABEL],
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_macos_generate_config_for_distro_spec_matches_trigger(macos_distro):
|
||||
trigger = Trigger.COMMIT
|
||||
|
||||
distro = macos_distro
|
||||
result = list(
|
||||
generate_config_for_distro_spec(
|
||||
distro.os_name,
|
||||
distro.os_version,
|
||||
distro.compiler_name,
|
||||
distro.compiler_version,
|
||||
distro.image_sha,
|
||||
distro.specs[0],
|
||||
trigger,
|
||||
)
|
||||
)
|
||||
assert result == [
|
||||
Config(
|
||||
config_name="macos-coverage-release-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0",
|
||||
cmake_target="all",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
|
||||
image=None,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_macos_generate_config_for_distro_spec_no_match_trigger(macos_distro):
|
||||
trigger = Trigger.MERGE
|
||||
|
||||
distro = macos_distro
|
||||
result = list(
|
||||
generate_config_for_distro_spec(
|
||||
distro.os_name,
|
||||
distro.os_version,
|
||||
distro.compiler_name,
|
||||
distro.compiler_version,
|
||||
distro.image_sha,
|
||||
distro.specs[0],
|
||||
trigger,
|
||||
)
|
||||
)
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_macos_generate_config_for_distro_matches_trigger(macos_distro):
|
||||
trigger = Trigger.COMMIT
|
||||
|
||||
distro = macos_distro
|
||||
result = list(generate_config_for_distro(distro, trigger))
|
||||
assert result == [
|
||||
Config(
|
||||
config_name="macos-coverage-release-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0",
|
||||
cmake_target="all",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
|
||||
image=None,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_macos_generate_config_for_distro_no_match_trigger(macos_distro):
|
||||
trigger = Trigger.MERGE
|
||||
|
||||
distro = macos_distro
|
||||
result = list(generate_config_for_distro(distro, trigger))
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_windows_generate_config_for_distro_spec_matches_trigger(
|
||||
windows_distro,
|
||||
):
|
||||
trigger = Trigger.COMMIT
|
||||
|
||||
distro = windows_distro
|
||||
result = list(
|
||||
generate_config_for_distro_spec(
|
||||
distro.os_name,
|
||||
distro.os_version,
|
||||
distro.compiler_name,
|
||||
distro.compiler_version,
|
||||
distro.image_sha,
|
||||
distro.specs[0],
|
||||
trigger,
|
||||
)
|
||||
)
|
||||
assert result == [
|
||||
Config(
|
||||
config_name="windows-asan-debug-unity-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON -DUNIT_TEST_REFERENCE_FEE=500",
|
||||
cmake_target="install",
|
||||
build_type="Debug",
|
||||
enable_tests=False,
|
||||
enable_package=False,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Windows", "devbox"],
|
||||
image=None,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_windows_generate_config_for_distro_spec_no_match_trigger(
|
||||
windows_distro,
|
||||
):
|
||||
trigger = Trigger.MERGE
|
||||
|
||||
distro = windows_distro
|
||||
result = list(
|
||||
generate_config_for_distro_spec(
|
||||
distro.os_name,
|
||||
distro.os_version,
|
||||
distro.compiler_name,
|
||||
distro.compiler_version,
|
||||
distro.image_sha,
|
||||
distro.specs[0],
|
||||
trigger,
|
||||
)
|
||||
)
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_windows_generate_config_for_distro_matches_trigger(
|
||||
windows_distro,
|
||||
):
|
||||
trigger = Trigger.COMMIT
|
||||
|
||||
distro = windows_distro
|
||||
result = list(generate_config_for_distro(distro, trigger))
|
||||
assert result == [
|
||||
Config(
|
||||
config_name="windows-asan-debug-unity-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON -DUNIT_TEST_REFERENCE_FEE=500",
|
||||
cmake_target="install",
|
||||
build_type="Debug",
|
||||
enable_tests=False,
|
||||
enable_package=False,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Windows", "devbox"],
|
||||
image=None,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_windows_generate_config_for_distro_no_match_trigger(
|
||||
windows_distro,
|
||||
):
|
||||
trigger = Trigger.MERGE
|
||||
|
||||
distro = windows_distro
|
||||
result = list(generate_config_for_distro(distro, trigger))
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_linux_generate_config_for_distro_spec_matches_trigger(linux_distro):
|
||||
trigger = Trigger.LABEL
|
||||
|
||||
distro = linux_distro
|
||||
result = list(
|
||||
generate_config_for_distro_spec(
|
||||
distro.os_name,
|
||||
distro.os_version,
|
||||
distro.compiler_name,
|
||||
distro.compiler_version,
|
||||
distro.image_sha,
|
||||
distro.specs[1],
|
||||
trigger,
|
||||
)
|
||||
)
|
||||
assert result == [
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "X64", "heavy"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-unity-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "X64", "heavy"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-unity-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_linux_generate_config_for_distro_spec_no_match_trigger(linux_distro):
|
||||
trigger = Trigger.MERGE
|
||||
|
||||
distro = linux_distro
|
||||
result = list(
|
||||
generate_config_for_distro_spec(
|
||||
distro.os_name,
|
||||
distro.os_version,
|
||||
distro.compiler_name,
|
||||
distro.compiler_version,
|
||||
distro.image_sha,
|
||||
distro.specs[1],
|
||||
trigger,
|
||||
)
|
||||
)
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_linux_generate_config_for_distro_matches_trigger(linux_distro):
|
||||
trigger = Trigger.LABEL
|
||||
|
||||
distro = linux_distro
|
||||
result = list(generate_config_for_distro(distro, trigger))
|
||||
assert result == [
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-tsan-debug-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["self-hosted", "Linux", "X64", "heavy"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "X64", "heavy"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-unity-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "X64", "heavy"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-unity-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_linux_generate_config_for_distro_no_match_trigger(linux_distro):
|
||||
trigger = Trigger.MERGE
|
||||
|
||||
distro = linux_distro
|
||||
result = list(generate_config_for_distro(distro, trigger))
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_generate_configs(macos_distro, windows_distro, linux_distro):
|
||||
trigger = Trigger.COMMIT
|
||||
|
||||
distros = [macos_distro, windows_distro, linux_distro]
|
||||
result = generate_configs(distros, trigger)
|
||||
assert result == [
|
||||
Config(
|
||||
config_name="macos-coverage-release-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0",
|
||||
cmake_target="all",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
|
||||
image=None,
|
||||
),
|
||||
Config(
|
||||
config_name="windows-asan-debug-unity-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON -DUNIT_TEST_REFERENCE_FEE=500",
|
||||
cmake_target="install",
|
||||
build_type="Debug",
|
||||
enable_tests=False,
|
||||
enable_package=False,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Windows", "devbox"],
|
||||
image=None,
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "X64", "heavy"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-unity-amd64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "X64", "heavy"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
Config(
|
||||
config_name="debian-bookworm-clang-16-voidstar-publish-unity-arm64",
|
||||
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
|
||||
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_generate_configs_raises_on_duplicate_configs(macos_distro):
|
||||
trigger = Trigger.COMMIT
|
||||
|
||||
distros = [macos_distro, macos_distro]
|
||||
with pytest.raises(ValueError):
|
||||
generate_configs(distros, trigger)
|
||||
0
.github/scripts/strategy-matrix/helpers/__init__.py
vendored
Normal file
0
.github/scripts/strategy-matrix/helpers/__init__.py
vendored
Normal file
190
.github/scripts/strategy-matrix/helpers/defs.py
vendored
Executable file
190
.github/scripts/strategy-matrix/helpers/defs.py
vendored
Executable file
@@ -0,0 +1,190 @@
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from helpers.enums import *
|
||||
from helpers.unique import *
|
||||
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
"""Represents a configuration to include in the strategy matrix.
|
||||
|
||||
Raises:
|
||||
ValueError: If any of the required fields are empty or invalid.
|
||||
TypeError: If any of the required fields are of the wrong type.
|
||||
"""
|
||||
|
||||
config_name: str
|
||||
cmake_args: str
|
||||
cmake_target: str
|
||||
build_type: str
|
||||
enable_tests: bool
|
||||
enable_package: bool
|
||||
enable_image: bool
|
||||
runs_on: list[str]
|
||||
image: str | None = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.config_name:
|
||||
raise ValueError("config_name cannot be empty")
|
||||
if not isinstance(self.config_name, str):
|
||||
raise TypeError("config_name must be a string")
|
||||
|
||||
if not self.cmake_args:
|
||||
raise ValueError("cmake_args cannot be empty")
|
||||
if not isinstance(self.cmake_args, str):
|
||||
raise TypeError("cmake_args must be a string")
|
||||
|
||||
if not self.cmake_target:
|
||||
raise ValueError("cmake_target cannot be empty")
|
||||
if not isinstance(self.cmake_target, str):
|
||||
raise TypeError("cmake_target must be a string")
|
||||
if self.cmake_target not in ["all", "install"]:
|
||||
raise ValueError("cmake_target must be 'all' or 'install'")
|
||||
|
||||
if not self.build_type:
|
||||
raise ValueError("build_type cannot be empty")
|
||||
if not isinstance(self.build_type, str):
|
||||
raise TypeError("build_type must be a string")
|
||||
if self.build_type not in ["Debug", "Release"]:
|
||||
raise ValueError("build_type must be 'Debug' or 'Release'")
|
||||
|
||||
if not isinstance(self.enable_tests, bool):
|
||||
raise TypeError("enable_tests must be a boolean")
|
||||
if not isinstance(self.enable_package, bool):
|
||||
raise TypeError("enable_package must be a boolean")
|
||||
if not isinstance(self.enable_image, bool):
|
||||
raise TypeError("enable_image must be a boolean")
|
||||
|
||||
if not self.runs_on:
|
||||
raise ValueError("runs_on cannot be empty")
|
||||
if not isinstance(self.runs_on, list):
|
||||
raise TypeError("runs_on must be a list")
|
||||
if not all(isinstance(runner, str) for runner in self.runs_on):
|
||||
raise TypeError("runs_on must be a list of strings")
|
||||
if not all(self.runs_on):
|
||||
raise ValueError("runs_on must be a list of non-empty strings")
|
||||
if len(self.runs_on) != len(set(self.runs_on)):
|
||||
raise ValueError("runs_on must be a list of unique strings")
|
||||
|
||||
if self.image and not isinstance(self.image, str):
|
||||
raise TypeError("image must be a string")
|
||||
|
||||
|
||||
@dataclass
|
||||
class Spec:
|
||||
"""Represents a specification used by a configuration.
|
||||
|
||||
Raises:
|
||||
ValueError: If any of the required fields are empty.
|
||||
TypeError: If any of the required fields are of the wrong type.
|
||||
"""
|
||||
|
||||
archs: list[Arch] = field(
|
||||
default_factory=lambda: [Arch.LINUX_AMD64, Arch.LINUX_ARM64]
|
||||
)
|
||||
build_option: BuildOption = BuildOption.NONE
|
||||
build_modes: list[BuildMode] = field(
|
||||
default_factory=lambda: [BuildMode.UNITY_OFF, BuildMode.UNITY_ON]
|
||||
)
|
||||
build_types: list[BuildType] = field(
|
||||
default_factory=lambda: [BuildType.DEBUG, BuildType.RELEASE]
|
||||
)
|
||||
publish_option: PublishOption = PublishOption.NONE
|
||||
test_option: TestOption = TestOption.NONE
|
||||
triggers: list[Trigger] = field(
|
||||
default_factory=lambda: [Trigger.COMMIT, Trigger.MERGE, Trigger.SCHEDULE]
|
||||
)
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.archs:
|
||||
raise ValueError("archs cannot be empty")
|
||||
if not isinstance(self.archs, list):
|
||||
raise TypeError("archs must be a list")
|
||||
if not all(isinstance(arch, str) for arch in self.archs):
|
||||
raise TypeError("archs must be a list of Arch")
|
||||
if len(self.archs) != len(set(self.archs)):
|
||||
raise ValueError("archs must be a list of unique Arch")
|
||||
|
||||
if not isinstance(self.build_option, BuildOption):
|
||||
raise TypeError("build_option must be a BuildOption")
|
||||
|
||||
if not self.build_modes:
|
||||
raise ValueError("build_modes cannot be empty")
|
||||
if not isinstance(self.build_modes, list):
|
||||
raise TypeError("build_modes must be a list")
|
||||
if not all(
|
||||
isinstance(build_mode, BuildMode) for build_mode in self.build_modes
|
||||
):
|
||||
raise TypeError("build_modes must be a list of BuildMode")
|
||||
if len(self.build_modes) != len(set(self.build_modes)):
|
||||
raise ValueError("build_modes must be a list of unique BuildMode")
|
||||
|
||||
if not self.build_types:
|
||||
raise ValueError("build_types cannot be empty")
|
||||
if not isinstance(self.build_types, list):
|
||||
raise TypeError("build_types must be a list")
|
||||
if not all(
|
||||
isinstance(build_type, BuildType) for build_type in self.build_types
|
||||
):
|
||||
raise TypeError("build_types must be a list of BuildType")
|
||||
if len(self.build_types) != len(set(self.build_types)):
|
||||
raise ValueError("build_types must be a list of unique BuildType")
|
||||
|
||||
if not isinstance(self.publish_option, PublishOption):
|
||||
raise TypeError("publish_option must be a PublishOption")
|
||||
|
||||
if not isinstance(self.test_option, TestOption):
|
||||
raise TypeError("test_option must be a TestOption")
|
||||
|
||||
if not self.triggers:
|
||||
raise ValueError("triggers cannot be empty")
|
||||
if not isinstance(self.triggers, list):
|
||||
raise TypeError("triggers must be a list")
|
||||
if not all(isinstance(trigger, Trigger) for trigger in self.triggers):
|
||||
raise TypeError("triggers must be a list of Trigger")
|
||||
if len(self.triggers) != len(set(self.triggers)):
|
||||
raise ValueError("triggers must be a list of unique Trigger")
|
||||
|
||||
|
||||
@dataclass
|
||||
class Distro:
|
||||
"""Represents a Linux, Windows or macOS distribution with specifications.
|
||||
|
||||
Raises:
|
||||
ValueError: If any of the required fields are empty.
|
||||
TypeError: If any of the required fields are of the wrong type.
|
||||
"""
|
||||
|
||||
os_name: str
|
||||
os_version: str = ""
|
||||
compiler_name: str = ""
|
||||
compiler_version: str = ""
|
||||
image_sha: str = ""
|
||||
specs: list[Spec] = field(default_factory=list)
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.os_name:
|
||||
raise ValueError("os_name cannot be empty")
|
||||
if not isinstance(self.os_name, str):
|
||||
raise TypeError("os_name must be a string")
|
||||
|
||||
if self.os_version and not isinstance(self.os_version, str):
|
||||
raise TypeError("os_version must be a string")
|
||||
|
||||
if self.compiler_name and not isinstance(self.compiler_name, str):
|
||||
raise TypeError("compiler_name must be a string")
|
||||
|
||||
if self.compiler_version and not isinstance(self.compiler_version, str):
|
||||
raise TypeError("compiler_version must be a string")
|
||||
|
||||
if self.image_sha and not isinstance(self.image_sha, str):
|
||||
raise TypeError("image_sha must be a string")
|
||||
|
||||
if not self.specs:
|
||||
raise ValueError("specs cannot be empty")
|
||||
if not isinstance(self.specs, list):
|
||||
raise TypeError("specs must be a list")
|
||||
if not all(isinstance(spec, Spec) for spec in self.specs):
|
||||
raise TypeError("specs must be a list of Spec")
|
||||
if not is_unique(self.specs):
|
||||
raise ValueError("specs must be a list of unique Spec")
|
||||
743
.github/scripts/strategy-matrix/helpers/defs_test.py
vendored
Normal file
743
.github/scripts/strategy-matrix/helpers/defs_test.py
vendored
Normal file
@@ -0,0 +1,743 @@
|
||||
import pytest
|
||||
|
||||
from helpers.defs import *
|
||||
from helpers.enums import *
|
||||
from helpers.funcs import *
|
||||
|
||||
|
||||
def test_config_valid_none_image():
|
||||
assert Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image=None,
|
||||
)
|
||||
|
||||
|
||||
def test_config_valid_empty_image():
|
||||
assert Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="install",
|
||||
build_type="Debug",
|
||||
enable_tests=False,
|
||||
enable_package=True,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="",
|
||||
)
|
||||
|
||||
|
||||
def test_config_valid_with_image():
|
||||
assert Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="install",
|
||||
build_type="Release",
|
||||
enable_tests=False,
|
||||
enable_package=True,
|
||||
enable_image=True,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_empty_config_name():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_config_name():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name=123,
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_empty_cmake_args():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_cmake_args():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args=123,
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_empty_cmake_target():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_invalid_cmake_target():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="invalid",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_cmake_target():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target=123,
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_empty_build_type():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_invalid_build_type():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="invalid",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_build_type():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type=123,
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_enable_tests():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=123,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_enable_package():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=123,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_enable_image():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=True,
|
||||
enable_image=123,
|
||||
runs_on=["label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_none_runs_on():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=None,
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_empty_runs_on():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=[],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_invalid_runs_on():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=[""],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_runs_on():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=[123],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_duplicate_runs_on():
|
||||
with pytest.raises(ValueError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label", "label"],
|
||||
image="image",
|
||||
)
|
||||
|
||||
|
||||
def test_config_raises_on_wrong_image():
|
||||
with pytest.raises(TypeError):
|
||||
Config(
|
||||
config_name="config",
|
||||
cmake_args="-Doption=ON",
|
||||
cmake_target="all",
|
||||
build_type="Debug",
|
||||
enable_tests=True,
|
||||
enable_package=False,
|
||||
enable_image=False,
|
||||
runs_on=["label"],
|
||||
image=123,
|
||||
)
|
||||
|
||||
|
||||
def test_spec_valid():
|
||||
assert Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_none_archs():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=None,
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_empty_archs():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_wrong_archs():
|
||||
with pytest.raises(TypeError):
|
||||
Spec(
|
||||
archs=[123],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_duplicate_archs():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64, Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_wrong_build_option():
|
||||
with pytest.raises(TypeError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=123,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_none_build_modes():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=None,
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_empty_build_modes():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_wrong_build_modes():
|
||||
with pytest.raises(TypeError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[123],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_none_build_types():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=None,
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_empty_build_types():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_wrong_build_types():
|
||||
with pytest.raises(TypeError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[123],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_duplicate_build_types():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG, BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_wrong_publish_option():
|
||||
with pytest.raises(TypeError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=123,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_wrong_test_option():
|
||||
with pytest.raises(TypeError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=123,
|
||||
triggers=[Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_none_triggers():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=None,
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_empty_triggers():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_wrong_triggers():
|
||||
with pytest.raises(TypeError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[123],
|
||||
)
|
||||
|
||||
|
||||
def test_spec_raises_on_duplicate_triggers():
|
||||
with pytest.raises(ValueError):
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_option=BuildOption.NONE,
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.NONE,
|
||||
test_option=TestOption.NONE,
|
||||
triggers=[Trigger.COMMIT, Trigger.COMMIT],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_valid_none_image_sha():
|
||||
assert Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha=None,
|
||||
specs=[Spec()], # This is valid due to the default values.
|
||||
)
|
||||
|
||||
|
||||
def test_distro_valid_empty_os_compiler_image_sha():
|
||||
assert Distro(
|
||||
os_name="os_name",
|
||||
os_version="",
|
||||
compiler_name="",
|
||||
compiler_version="",
|
||||
image_sha="",
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_valid_with_image():
|
||||
assert Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_empty_os_name():
|
||||
with pytest.raises(ValueError):
|
||||
Distro(
|
||||
os_name="",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_wrong_os_name():
|
||||
with pytest.raises(TypeError):
|
||||
Distro(
|
||||
os_name=123,
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_wrong_os_version():
|
||||
with pytest.raises(TypeError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version=123,
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_wrong_compiler_name():
|
||||
with pytest.raises(TypeError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name=123,
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_wrong_compiler_version():
|
||||
with pytest.raises(TypeError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version=123,
|
||||
image_sha="image_sha",
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_wrong_image_sha():
|
||||
with pytest.raises(TypeError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha=123,
|
||||
specs=[Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_none_specs():
|
||||
with pytest.raises(ValueError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=None,
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_empty_specs():
|
||||
with pytest.raises(ValueError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_invalid_specs():
|
||||
with pytest.raises(ValueError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[Spec(triggers=[])],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_duplicate_specs():
|
||||
with pytest.raises(ValueError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[Spec(), Spec()],
|
||||
)
|
||||
|
||||
|
||||
def test_distro_raises_on_wrong_specs():
|
||||
with pytest.raises(TypeError):
|
||||
Distro(
|
||||
os_name="os_name",
|
||||
os_version="os_version",
|
||||
compiler_name="compiler_name",
|
||||
compiler_version="compiler_version",
|
||||
image_sha="image_sha",
|
||||
specs=[123],
|
||||
)
|
||||
75
.github/scripts/strategy-matrix/helpers/enums.py
vendored
Executable file
75
.github/scripts/strategy-matrix/helpers/enums.py
vendored
Executable file
@@ -0,0 +1,75 @@
|
||||
from enum import StrEnum, auto
|
||||
|
||||
|
||||
class Arch(StrEnum):
|
||||
"""Represents architectures to build for."""
|
||||
|
||||
LINUX_AMD64 = "linux/amd64"
|
||||
LINUX_ARM64 = "linux/arm64"
|
||||
MACOS_ARM64 = "macos/arm64"
|
||||
WINDOWS_AMD64 = "windows/amd64"
|
||||
|
||||
|
||||
class BuildMode(StrEnum):
|
||||
"""Represents whether to perform a unity or non-unity build."""
|
||||
|
||||
UNITY_OFF = auto()
|
||||
UNITY_ON = auto()
|
||||
|
||||
|
||||
class BuildOption(StrEnum):
|
||||
"""Represents build options to enable."""
|
||||
|
||||
NONE = auto()
|
||||
COVERAGE = auto()
|
||||
SANITIZE_ASAN = (
|
||||
auto()
|
||||
) # Address Sanitizer, also includes Undefined Behavior Sanitizer.
|
||||
SANITIZE_TSAN = (
|
||||
auto()
|
||||
) # Thread Sanitizer, also includes Undefined Behavior Sanitizer.
|
||||
VOIDSTAR = auto()
|
||||
|
||||
|
||||
class BuildType(StrEnum):
|
||||
"""Represents the build type to use."""
|
||||
|
||||
DEBUG = auto()
|
||||
RELEASE = auto()
|
||||
PUBLISH = auto() # Release build without assertions.
|
||||
|
||||
|
||||
class PublishOption(StrEnum):
|
||||
"""Represents whether to publish a package, an image, or both."""
|
||||
|
||||
NONE = auto()
|
||||
PACKAGE_ONLY = auto()
|
||||
IMAGE_ONLY = auto()
|
||||
PACKAGE_AND_IMAGE = auto()
|
||||
|
||||
|
||||
class TestOption(StrEnum):
|
||||
"""Represents test options to enable, specifically the reference fee to use."""
|
||||
|
||||
__test__ = False # Tell pytest to not consider this as a test class.
|
||||
|
||||
NONE = "" # Use the default reference fee of 10.
|
||||
REFERENCE_FEE_500 = "500"
|
||||
REFERENCE_FEE_1000 = "1000"
|
||||
|
||||
|
||||
class Platform(StrEnum):
|
||||
"""Represents the platform to use."""
|
||||
|
||||
LINUX = "linux"
|
||||
MACOS = "macos"
|
||||
WINDOWS = "windows"
|
||||
|
||||
|
||||
class Trigger(StrEnum):
|
||||
"""Represents the trigger that caused the workflow to run."""
|
||||
|
||||
COMMIT = "commit"
|
||||
LABEL = "label"
|
||||
MERGE = "merge"
|
||||
SCHEDULE = "schedule"
|
||||
235
.github/scripts/strategy-matrix/helpers/funcs.py
vendored
Executable file
235
.github/scripts/strategy-matrix/helpers/funcs.py
vendored
Executable file
@@ -0,0 +1,235 @@
|
||||
from helpers.defs import *
|
||||
from helpers.enums import *
|
||||
|
||||
|
||||
def generate_config_name(
|
||||
os_name: str,
|
||||
os_version: str | None,
|
||||
compiler_name: str | None,
|
||||
compiler_version: str | None,
|
||||
arch: Arch,
|
||||
build_type: BuildType,
|
||||
build_mode: BuildMode,
|
||||
build_option: BuildOption,
|
||||
) -> str:
|
||||
"""Create a configuration name based on the distro details and build
|
||||
attributes.
|
||||
|
||||
The configuration name is used as the display name in the GitHub Actions
|
||||
UI, and since GitHub truncates long names we have to make sure the most
|
||||
important information is at the beginning of the name.
|
||||
|
||||
Args:
|
||||
os_name (str): The OS name.
|
||||
os_version (str): The OS version.
|
||||
compiler_name (str): The compiler name.
|
||||
compiler_version (str): The compiler version.
|
||||
arch (Arch): The architecture.
|
||||
build_type (BuildType): The build type.
|
||||
build_mode (BuildMode): The build mode.
|
||||
build_option (BuildOption): The build option.
|
||||
|
||||
Returns:
|
||||
str: The configuration name.
|
||||
|
||||
Raises:
|
||||
ValueError: If the OS name is empty.
|
||||
"""
|
||||
|
||||
if not os_name:
|
||||
raise ValueError("os_name cannot be empty")
|
||||
|
||||
config_name = os_name
|
||||
if os_version:
|
||||
config_name += f"-{os_version}"
|
||||
if compiler_name:
|
||||
config_name += f"-{compiler_name}"
|
||||
if compiler_version:
|
||||
config_name += f"-{compiler_version}"
|
||||
|
||||
if build_option == BuildOption.COVERAGE:
|
||||
config_name += "-coverage"
|
||||
elif build_option == BuildOption.VOIDSTAR:
|
||||
config_name += "-voidstar"
|
||||
elif build_option == BuildOption.SANITIZE_ASAN:
|
||||
config_name += "-asan"
|
||||
elif build_option == BuildOption.SANITIZE_TSAN:
|
||||
config_name += "-tsan"
|
||||
|
||||
if build_type == BuildType.DEBUG:
|
||||
config_name += "-debug"
|
||||
elif build_type == BuildType.RELEASE:
|
||||
config_name += "-release"
|
||||
elif build_type == BuildType.PUBLISH:
|
||||
config_name += "-publish"
|
||||
|
||||
if build_mode == BuildMode.UNITY_ON:
|
||||
config_name += "-unity"
|
||||
|
||||
config_name += f"-{arch.value.split('/')[1]}"
|
||||
|
||||
return config_name
|
||||
|
||||
|
||||
def generate_cmake_args(
|
||||
compiler_name: str | None,
|
||||
compiler_version: str | None,
|
||||
build_type: BuildType,
|
||||
build_mode: BuildMode,
|
||||
build_option: BuildOption,
|
||||
test_option: TestOption,
|
||||
) -> str:
|
||||
"""Create the CMake arguments based on the build type and enabled build
|
||||
options.
|
||||
|
||||
- All builds will have the `tests`, `werr`, and `xrpld` options.
|
||||
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
|
||||
- All release builds will have the `assert` option.
|
||||
- Set the unity option if specified.
|
||||
- Set the coverage option if specified.
|
||||
- Set the voidstar option if specified.
|
||||
- Set the reference fee if specified.
|
||||
|
||||
Args:
|
||||
compiler_name (str): The compiler name.
|
||||
compiler_version (str): The compiler version.
|
||||
build_type (BuildType): The build type.
|
||||
build_mode (BuildMode): The build mode.
|
||||
build_option (BuildOption): The build option.
|
||||
test_option (TestOption): The test option.
|
||||
|
||||
Returns:
|
||||
str: The CMake arguments.
|
||||
|
||||
"""
|
||||
|
||||
cmake_args = "-Dtests=ON -Dwerr=ON -Dxrpld=ON"
|
||||
if not f"{compiler_name}-{compiler_version}" in [
|
||||
"gcc-12",
|
||||
"clang-16",
|
||||
]:
|
||||
cmake_args += " -Dwextra=ON"
|
||||
|
||||
if build_type == BuildType.RELEASE:
|
||||
cmake_args += " -Dassert=ON"
|
||||
|
||||
if build_mode == BuildMode.UNITY_ON:
|
||||
cmake_args += " -Dunity=ON"
|
||||
|
||||
if build_option == BuildOption.COVERAGE:
|
||||
cmake_args += " -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0"
|
||||
elif build_option == BuildOption.SANITIZE_ASAN:
|
||||
pass # TODO: Add ASAN-UBSAN flags.
|
||||
elif build_option == BuildOption.SANITIZE_TSAN:
|
||||
pass # TODO: Add TSAN-UBSAN flags.
|
||||
elif build_option == BuildOption.VOIDSTAR:
|
||||
cmake_args += " -Dvoidstar=ON"
|
||||
|
||||
if test_option != TestOption.NONE:
|
||||
cmake_args += f" -DUNIT_TEST_REFERENCE_FEE={test_option.value}"
|
||||
|
||||
return cmake_args
|
||||
|
||||
|
||||
def generate_cmake_target(os_name: str, build_type: BuildType) -> str:
|
||||
"""Create the CMake target based on the build type.
|
||||
|
||||
The `install` target is used for Windows and for publishing a package, while
|
||||
the `all` target is used for all other configurations.
|
||||
|
||||
Args:
|
||||
os_name (str): The OS name.
|
||||
build_type (BuildType): The build type.
|
||||
|
||||
Returns:
|
||||
str: The CMake target.
|
||||
"""
|
||||
if os_name == "windows" or build_type == BuildType.PUBLISH:
|
||||
return "install"
|
||||
return "all"
|
||||
|
||||
|
||||
def generate_enable_options(
|
||||
os_name: str,
|
||||
build_type: BuildType,
|
||||
publish_option: PublishOption,
|
||||
) -> tuple[bool, bool, bool]:
|
||||
"""Create the enable flags based on the OS name, build option, and publish
|
||||
option.
|
||||
|
||||
We build and test all configurations by default, except for Windows in
|
||||
Debug, because it is too slow.
|
||||
|
||||
Args:
|
||||
os_name (str): The OS name.
|
||||
build_type (BuildType): The build type.
|
||||
publish_option (PublishOption): The publish option.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing the enable test, enable package, and enable image flags.
|
||||
"""
|
||||
enable_tests = (
|
||||
False if os_name == "windows" and build_type == BuildType.DEBUG else True
|
||||
)
|
||||
|
||||
enable_package = (
|
||||
True
|
||||
if publish_option
|
||||
in [
|
||||
PublishOption.PACKAGE_ONLY,
|
||||
PublishOption.PACKAGE_AND_IMAGE,
|
||||
]
|
||||
else False
|
||||
)
|
||||
|
||||
enable_image = (
|
||||
True
|
||||
if publish_option
|
||||
in [
|
||||
PublishOption.IMAGE_ONLY,
|
||||
PublishOption.PACKAGE_AND_IMAGE,
|
||||
]
|
||||
else False
|
||||
)
|
||||
|
||||
return enable_tests, enable_package, enable_image
|
||||
|
||||
|
||||
def generate_image_name(
|
||||
os_name: str,
|
||||
os_version: str,
|
||||
compiler_name: str,
|
||||
compiler_version: str,
|
||||
image_sha: str,
|
||||
) -> str | None:
|
||||
"""Create the Docker image name based on the distro details.
|
||||
|
||||
Args:
|
||||
os_name (str): The OS name.
|
||||
os_version (str): The OS version.
|
||||
compiler_name (str): The compiler name.
|
||||
compiler_version (str): The compiler version.
|
||||
image_sha (str): The image SHA.
|
||||
|
||||
Returns:
|
||||
str: The Docker image name or None if not applicable.
|
||||
|
||||
Raises:
|
||||
ValueError: If any of the arguments is empty for Linux.
|
||||
"""
|
||||
|
||||
if os_name == "windows" or os_name == "macos":
|
||||
return None
|
||||
|
||||
if not os_name:
|
||||
raise ValueError("os_name cannot be empty")
|
||||
if not os_version:
|
||||
raise ValueError("os_version cannot be empty")
|
||||
if not compiler_name:
|
||||
raise ValueError("compiler_name cannot be empty")
|
||||
if not compiler_version:
|
||||
raise ValueError("compiler_version cannot be empty")
|
||||
if not image_sha:
|
||||
raise ValueError("image_sha cannot be empty")
|
||||
|
||||
return f"ghcr.io/xrplf/ci/{os_name}-{os_version}:{compiler_name}-{compiler_version}-{image_sha}"
|
||||
419
.github/scripts/strategy-matrix/helpers/funcs_test.py
vendored
Normal file
419
.github/scripts/strategy-matrix/helpers/funcs_test.py
vendored
Normal file
@@ -0,0 +1,419 @@
|
||||
import pytest
|
||||
|
||||
from helpers.enums import *
|
||||
from helpers.funcs import *
|
||||
|
||||
|
||||
def test_generate_config_name_a_b_c_d_debug_amd64():
|
||||
assert (
|
||||
generate_config_name(
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"d",
|
||||
Arch.LINUX_AMD64,
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
)
|
||||
== "a-b-c-d-debug-amd64"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_config_name_a_b_c_release_unity_arm64():
|
||||
assert (
|
||||
generate_config_name(
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"",
|
||||
Arch.LINUX_ARM64,
|
||||
BuildType.RELEASE,
|
||||
BuildMode.UNITY_ON,
|
||||
BuildOption.NONE,
|
||||
)
|
||||
== "a-b-c-release-unity-arm64"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_config_name_a_b_coverage_publish_amd64():
|
||||
assert (
|
||||
generate_config_name(
|
||||
"a",
|
||||
"b",
|
||||
"",
|
||||
"",
|
||||
Arch.LINUX_AMD64,
|
||||
BuildType.PUBLISH,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.COVERAGE,
|
||||
)
|
||||
== "a-b-coverage-publish-amd64"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_config_name_a_asan_debug_unity_arm64():
|
||||
assert (
|
||||
generate_config_name(
|
||||
"a",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
Arch.LINUX_ARM64,
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_ON,
|
||||
BuildOption.SANITIZE_ASAN,
|
||||
)
|
||||
== "a-asan-debug-unity-arm64"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_config_name_a_c_tsan_release_amd64():
|
||||
assert (
|
||||
generate_config_name(
|
||||
"a",
|
||||
"",
|
||||
"c",
|
||||
"",
|
||||
Arch.LINUX_AMD64,
|
||||
BuildType.RELEASE,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.SANITIZE_TSAN,
|
||||
)
|
||||
== "a-c-tsan-release-amd64"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_config_name_a_d_voidstar_debug_amd64():
|
||||
assert (
|
||||
generate_config_name(
|
||||
"a",
|
||||
"",
|
||||
"",
|
||||
"d",
|
||||
Arch.LINUX_AMD64,
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.VOIDSTAR,
|
||||
)
|
||||
== "a-d-voidstar-debug-amd64"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_config_name_raises_on_none_os_name():
|
||||
with pytest.raises(ValueError):
|
||||
generate_config_name(
|
||||
None,
|
||||
"b",
|
||||
"c",
|
||||
"d",
|
||||
Arch.LINUX_AMD64,
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
)
|
||||
|
||||
|
||||
def test_generate_config_name_raises_on_empty_os_name():
|
||||
with pytest.raises(ValueError):
|
||||
generate_config_name(
|
||||
"",
|
||||
"b",
|
||||
"c",
|
||||
"d",
|
||||
Arch.LINUX_AMD64,
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_debug():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_gcc_12_no_wextra():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"gcc",
|
||||
"12",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_clang_16_no_wextra():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"clang",
|
||||
"16",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_release():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.RELEASE,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_publish():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.PUBLISH,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_unity():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_ON,
|
||||
BuildOption.NONE,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_coverage():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.COVERAGE,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_voidstar():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.VOIDSTAR,
|
||||
TestOption.NONE,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dvoidstar=ON"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_reference_fee_500():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
TestOption.REFERENCE_FEE_500,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -DUNIT_TEST_REFERENCE_FEE=500"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_reference_fee_1000():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.DEBUG,
|
||||
BuildMode.UNITY_OFF,
|
||||
BuildOption.NONE,
|
||||
TestOption.REFERENCE_FEE_1000,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -DUNIT_TEST_REFERENCE_FEE=1000"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_args_a_b_multiple():
|
||||
assert (
|
||||
generate_cmake_args(
|
||||
"a",
|
||||
"b",
|
||||
BuildType.RELEASE,
|
||||
BuildMode.UNITY_ON,
|
||||
BuildOption.VOIDSTAR,
|
||||
TestOption.REFERENCE_FEE_500,
|
||||
)
|
||||
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dunity=ON -Dvoidstar=ON -DUNIT_TEST_REFERENCE_FEE=500"
|
||||
)
|
||||
|
||||
|
||||
def test_generate_cmake_target_linux_debug():
|
||||
assert generate_cmake_target("linux", BuildType.DEBUG) == "all"
|
||||
|
||||
|
||||
def test_generate_cmake_target_linux_release():
|
||||
assert generate_cmake_target("linux", BuildType.RELEASE) == "all"
|
||||
|
||||
|
||||
def test_generate_cmake_target_linux_publish():
|
||||
assert generate_cmake_target("linux", BuildType.PUBLISH) == "install"
|
||||
|
||||
|
||||
def test_generate_cmake_target_macos_debug():
|
||||
assert generate_cmake_target("macos", BuildType.DEBUG) == "all"
|
||||
|
||||
|
||||
def test_generate_cmake_target_macos_release():
|
||||
assert generate_cmake_target("macos", BuildType.RELEASE) == "all"
|
||||
|
||||
|
||||
def test_generate_cmake_target_macos_publish():
|
||||
assert generate_cmake_target("macos", BuildType.PUBLISH) == "install"
|
||||
|
||||
|
||||
def test_generate_cmake_target_windows_debug():
|
||||
assert generate_cmake_target("windows", BuildType.DEBUG) == "install"
|
||||
|
||||
|
||||
def test_generate_cmake_target_windows_release():
|
||||
assert generate_cmake_target("windows", BuildType.DEBUG) == "install"
|
||||
|
||||
|
||||
def test_generate_cmake_target_windows_publish():
|
||||
assert generate_cmake_target("windows", BuildType.DEBUG) == "install"
|
||||
|
||||
|
||||
def test_generate_enable_options_linux_debug_no_publish():
|
||||
assert generate_enable_options("linux", BuildType.DEBUG, PublishOption.NONE) == (
|
||||
True,
|
||||
False,
|
||||
False,
|
||||
)
|
||||
|
||||
|
||||
def test_generate_enable_options_linux_release_package_only():
|
||||
assert generate_enable_options(
|
||||
"linux", BuildType.RELEASE, PublishOption.PACKAGE_ONLY
|
||||
) == (True, True, False)
|
||||
|
||||
|
||||
def test_generate_enable_options_linux_publish_image_only():
|
||||
assert generate_enable_options(
|
||||
"linux", BuildType.PUBLISH, PublishOption.IMAGE_ONLY
|
||||
) == (True, False, True)
|
||||
|
||||
|
||||
def test_generate_enable_options_macos_debug_package_only():
|
||||
assert generate_enable_options(
|
||||
"macos", BuildType.DEBUG, PublishOption.PACKAGE_ONLY
|
||||
) == (True, True, False)
|
||||
|
||||
|
||||
def test_generate_enable_options_macos_release_image_only():
|
||||
assert generate_enable_options(
|
||||
"macos", BuildType.RELEASE, PublishOption.IMAGE_ONLY
|
||||
) == (True, False, True)
|
||||
|
||||
|
||||
def test_generate_enable_options_macos_publish_package_and_image():
|
||||
assert generate_enable_options(
|
||||
"macos", BuildType.PUBLISH, PublishOption.PACKAGE_AND_IMAGE
|
||||
) == (True, True, True)
|
||||
|
||||
|
||||
def test_generate_enable_options_windows_debug_package_and_image():
|
||||
assert generate_enable_options(
|
||||
"windows", BuildType.DEBUG, PublishOption.PACKAGE_AND_IMAGE
|
||||
) == (False, True, True)
|
||||
|
||||
|
||||
def test_generate_enable_options_windows_release_no_publish():
|
||||
assert generate_enable_options(
|
||||
"windows", BuildType.RELEASE, PublishOption.NONE
|
||||
) == (True, False, False)
|
||||
|
||||
|
||||
def test_generate_enable_options_windows_publish_image_only():
|
||||
assert generate_enable_options(
|
||||
"windows", BuildType.PUBLISH, PublishOption.IMAGE_ONLY
|
||||
) == (True, False, True)
|
||||
|
||||
|
||||
def test_generate_image_name_linux():
|
||||
assert generate_image_name("a", "b", "c", "d", "e") == "ghcr.io/xrplf/ci/a-b:c-d-e"
|
||||
|
||||
|
||||
def test_generate_image_name_linux_raises_on_empty_os_name():
|
||||
with pytest.raises(ValueError):
|
||||
generate_image_name("", "b", "c", "d", "e")
|
||||
|
||||
|
||||
def test_generate_image_name_linux_raises_on_empty_os_version():
|
||||
with pytest.raises(ValueError):
|
||||
generate_image_name("a", "", "c", "d", "e")
|
||||
|
||||
|
||||
def test_generate_image_name_linux_raises_on_empty_compiler_name():
|
||||
with pytest.raises(ValueError):
|
||||
generate_image_name("a", "b", "", "d", "e")
|
||||
|
||||
|
||||
def test_generate_image_name_linux_raises_on_empty_compiler_version():
|
||||
with pytest.raises(ValueError):
|
||||
generate_image_name("a", "b", "c", "", "e")
|
||||
|
||||
|
||||
def test_generate_image_name_linux_raises_on_empty_image_sha():
|
||||
with pytest.raises(ValueError):
|
||||
generate_image_name("a", "b", "c", "e", "")
|
||||
|
||||
|
||||
def test_generate_image_name_macos():
|
||||
assert generate_image_name("macos", "", "", "", "") is None
|
||||
|
||||
|
||||
def test_generate_image_name_macos_extra():
|
||||
assert generate_image_name("macos", "value", "does", "not", "matter") is None
|
||||
|
||||
|
||||
def test_generate_image_name_windows():
|
||||
assert generate_image_name("windows", "", "", "", "") is None
|
||||
|
||||
|
||||
def test_generate_image_name_windows_extra():
|
||||
assert generate_image_name("windows", "value", "does", "not", "matter") is None
|
||||
30
.github/scripts/strategy-matrix/helpers/unique.py
vendored
Normal file
30
.github/scripts/strategy-matrix/helpers/unique.py
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
import json
|
||||
from dataclasses import _is_dataclass_instance, asdict
|
||||
from typing import Any
|
||||
|
||||
|
||||
def is_unique(items: list[Any]) -> bool:
|
||||
"""Check if a list of dataclass objects contains only unique items.
|
||||
|
||||
As the items may not be hashable, we convert them to JSON strings first, and
|
||||
then check if the list of strings is the same size as the set of strings.
|
||||
|
||||
Args:
|
||||
items: The list of dataclass objects to check.
|
||||
|
||||
Returns:
|
||||
True if the list contains only unique items, False otherwise.
|
||||
|
||||
Raises:
|
||||
TypeError: If any of the items is not a dataclass.
|
||||
"""
|
||||
|
||||
l = list()
|
||||
s = set()
|
||||
for item in items:
|
||||
if not _is_dataclass_instance(item):
|
||||
raise TypeError("items must be a list of dataclasses")
|
||||
j = json.dumps(asdict(item))
|
||||
l.append(j)
|
||||
s.add(j)
|
||||
return len(l) == len(s)
|
||||
40
.github/scripts/strategy-matrix/helpers/unique_test.py
vendored
Normal file
40
.github/scripts/strategy-matrix/helpers/unique_test.py
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pytest
|
||||
|
||||
from helpers.unique import *
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExampleInt:
|
||||
value: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExampleList:
|
||||
values: list[int]
|
||||
|
||||
|
||||
def test_unique_int():
|
||||
assert is_unique([ExampleInt(1), ExampleInt(2), ExampleInt(3)])
|
||||
|
||||
|
||||
def test_not_unique_int():
|
||||
assert not is_unique([ExampleInt(1), ExampleInt(2), ExampleInt(1)])
|
||||
|
||||
|
||||
def test_unique_list():
|
||||
assert is_unique(
|
||||
[ExampleList([1, 2, 3]), ExampleList([4, 5, 6]), ExampleList([7, 8, 9])]
|
||||
)
|
||||
|
||||
|
||||
def test_not_unique_list():
|
||||
assert not is_unique(
|
||||
[ExampleList([1, 2, 3]), ExampleList([4, 5, 6]), ExampleList([1, 2, 3])]
|
||||
)
|
||||
|
||||
|
||||
def test_unique_raises_on_non_dataclass():
|
||||
with pytest.raises(TypeError):
|
||||
is_unique([1, 2, 3])
|
||||
212
.github/scripts/strategy-matrix/linux.json
vendored
212
.github/scripts/strategy-matrix/linux.json
vendored
@@ -1,212 +0,0 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": ["self-hosted", "Linux", "X64", "heavy"]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": ["self-hosted", "Linux", "ARM64", "heavy-arm64"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "21",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "ab4d1f0"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
385
.github/scripts/strategy-matrix/linux.py
vendored
Executable file
385
.github/scripts/strategy-matrix/linux.py
vendored
Executable file
@@ -0,0 +1,385 @@
|
||||
from helpers.defs import *
|
||||
from helpers.enums import *
|
||||
|
||||
# The default CI image SHAs to use, which can be specified per distro group and
|
||||
# can be overridden for individual distros, which is useful when debugging using
|
||||
# a locally built CI image. See https://github.com/XRPLF/ci for the images.
|
||||
DEBIAN_SHA = "sha-ca4517d"
|
||||
RHEL_SHA = "sha-ca4517d"
|
||||
UBUNTU_SHA = "sha-84afd81"
|
||||
|
||||
# We only build a selection of configurations for the various triggers to reduce
|
||||
# pipeline runtime. Across all three operating systems we aim to cover all GCC
|
||||
# and Clang versions, while not duplicating configurations too much. See also
|
||||
# the README for more details.
|
||||
|
||||
# The Debian distros to build configurations for.
|
||||
#
|
||||
# We have the following distros available:
|
||||
# - Debian Bullseye: GCC 12-15
|
||||
# - Debian Bookworm: GCC 13-15, Clang 16-20
|
||||
# - Debian Trixie: GCC 14-15, Clang 20-21
|
||||
DEBIAN_DISTROS = [
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="bullseye",
|
||||
compiler_name="gcc",
|
||||
compiler_version="14",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.PACKAGE_ONLY,
|
||||
triggers=[Trigger.COMMIT, Trigger.LABEL],
|
||||
),
|
||||
Spec(
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.PUBLISH],
|
||||
publish_option=PublishOption.PACKAGE_AND_IMAGE,
|
||||
triggers=[Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="bullseye",
|
||||
compiler_name="gcc",
|
||||
compiler_version="15",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_option=BuildOption.COVERAGE,
|
||||
build_types=[BuildType.DEBUG],
|
||||
triggers=[Trigger.COMMIT, Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="bookworm",
|
||||
compiler_name="gcc",
|
||||
compiler_version="15",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="bookworm",
|
||||
compiler_name="clang",
|
||||
compiler_version="16",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_option=BuildOption.VOIDSTAR,
|
||||
build_types=[BuildType.DEBUG],
|
||||
publish_option=PublishOption.IMAGE_ONLY,
|
||||
triggers=[Trigger.COMMIT],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_types=[BuildType.RELEASE],
|
||||
triggers=[Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="bookworm",
|
||||
compiler_name="clang",
|
||||
compiler_version="17",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="bookworm",
|
||||
compiler_name="clang",
|
||||
compiler_version="18",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="bookworm",
|
||||
compiler_name="clang",
|
||||
compiler_version="19",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="trixie",
|
||||
compiler_name="gcc",
|
||||
compiler_version="15",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="debian",
|
||||
os_version="trixie",
|
||||
compiler_name="clang",
|
||||
compiler_version="21",
|
||||
image_sha=DEBIAN_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
triggers=[Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
# The RHEL distros to build configurations for.
|
||||
#
|
||||
# We have the following distros available:
|
||||
# - RHEL 8: GCC 14, Clang "any"
|
||||
# - RHEL 9: GCC 12-14, Clang "any"
|
||||
# - RHEL 10: GCC 14, Clang "any"
|
||||
RHEL_DISTROS = [
|
||||
Distro(
|
||||
os_name="rhel",
|
||||
os_version="8",
|
||||
compiler_name="gcc",
|
||||
compiler_version="14",
|
||||
image_sha=RHEL_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="rhel",
|
||||
os_version="8",
|
||||
compiler_name="clang",
|
||||
compiler_version="any",
|
||||
image_sha=RHEL_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="rhel",
|
||||
os_version="9",
|
||||
compiler_name="gcc",
|
||||
compiler_version="12",
|
||||
image_sha=RHEL_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_types=[BuildType.DEBUG],
|
||||
triggers=[Trigger.COMMIT],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_types=[BuildType.RELEASE],
|
||||
triggers=[Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="rhel",
|
||||
os_version="9",
|
||||
compiler_name="gcc",
|
||||
compiler_version="13",
|
||||
image_sha=RHEL_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="rhel",
|
||||
os_version="10",
|
||||
compiler_name="clang",
|
||||
compiler_version="any",
|
||||
image_sha=RHEL_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
# The Ubuntu distros to build configurations for.
|
||||
#
|
||||
# We have the following distros available:
|
||||
# - Ubuntu Jammy (22.04): GCC 12
|
||||
# - Ubuntu Noble (24.04): GCC 13-14, Clang 16-20
|
||||
UBUNTU_DISTROS = [
|
||||
Distro(
|
||||
os_name="ubuntu",
|
||||
os_version="jammy",
|
||||
compiler_name="gcc",
|
||||
compiler_version="12",
|
||||
image_sha=UBUNTU_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="ubuntu",
|
||||
os_version="noble",
|
||||
compiler_name="gcc",
|
||||
compiler_version="13",
|
||||
image_sha=UBUNTU_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_types=[BuildType.RELEASE],
|
||||
triggers=[Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="ubuntu",
|
||||
os_version="noble",
|
||||
compiler_name="gcc",
|
||||
compiler_version="14",
|
||||
image_sha=UBUNTU_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="ubuntu",
|
||||
os_version="noble",
|
||||
compiler_name="clang",
|
||||
compiler_version="17",
|
||||
image_sha=UBUNTU_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
triggers=[Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="ubuntu",
|
||||
os_version="noble",
|
||||
compiler_name="clang",
|
||||
compiler_version="18",
|
||||
image_sha=UBUNTU_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="ubuntu",
|
||||
os_version="noble",
|
||||
compiler_name="clang",
|
||||
compiler_version="19",
|
||||
image_sha=UBUNTU_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
Distro(
|
||||
os_name="ubuntu",
|
||||
os_version="noble",
|
||||
compiler_name="clang",
|
||||
compiler_version="20",
|
||||
image_sha=UBUNTU_SHA,
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_types=[BuildType.DEBUG],
|
||||
triggers=[Trigger.COMMIT],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.RELEASE],
|
||||
triggers=[Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.LINUX_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
22
.github/scripts/strategy-matrix/macos.json
vendored
22
.github/scripts/strategy-matrix/macos.json
vendored
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "macos/arm64",
|
||||
"runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": [
|
||||
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
|
||||
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
||||
]
|
||||
}
|
||||
20
.github/scripts/strategy-matrix/macos.py
vendored
Executable file
20
.github/scripts/strategy-matrix/macos.py
vendored
Executable file
@@ -0,0 +1,20 @@
|
||||
from helpers.defs import *
|
||||
from helpers.enums import *
|
||||
|
||||
DISTROS = [
|
||||
Distro(
|
||||
os_name="macos",
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.MACOS_ARM64],
|
||||
build_modes=[BuildMode.UNITY_OFF],
|
||||
build_types=[BuildType.DEBUG],
|
||||
triggers=[Trigger.COMMIT, Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.MACOS_ARM64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
19
.github/scripts/strategy-matrix/windows.json
vendored
19
.github/scripts/strategy-matrix/windows.json
vendored
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "windows/amd64",
|
||||
"runner": ["self-hosted", "Windows", "devbox"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
20
.github/scripts/strategy-matrix/windows.py
vendored
Executable file
20
.github/scripts/strategy-matrix/windows.py
vendored
Executable file
@@ -0,0 +1,20 @@
|
||||
from helpers.defs import *
|
||||
from helpers.enums import *
|
||||
|
||||
DISTROS = [
|
||||
Distro(
|
||||
os_name="windows",
|
||||
specs=[
|
||||
Spec(
|
||||
archs=[Arch.WINDOWS_AMD64],
|
||||
build_modes=[BuildMode.UNITY_ON],
|
||||
build_types=[BuildType.RELEASE],
|
||||
triggers=[Trigger.COMMIT, Trigger.MERGE],
|
||||
),
|
||||
Spec(
|
||||
archs=[Arch.WINDOWS_AMD64],
|
||||
triggers=[Trigger.SCHEDULE],
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
8
.github/workflows/on-pr.yml
vendored
8
.github/workflows/on-pr.yml
vendored
@@ -112,12 +112,10 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
platform: [linux, macos, windows]
|
||||
with:
|
||||
# Enable ccache only for events targeting the XRPLF repository, since
|
||||
# other accounts will not have access to our remote cache storage.
|
||||
ccache_enabled: ${{ github.repository_owner == 'XRPLF' }}
|
||||
os: ${{ matrix.os }}
|
||||
platform: ${{ matrix.platform }}
|
||||
trigger: commit
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
||||
13
.github/workflows/on-trigger.yml
vendored
13
.github/workflows/on-trigger.yml
vendored
@@ -66,15 +66,10 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
platform: [linux, macos, windows]
|
||||
with:
|
||||
# Enable ccache only for events targeting the XRPLF repository, since
|
||||
# other accounts will not have access to our remote cache storage.
|
||||
# However, we do not enable ccache for events targeting the master or a
|
||||
# release branch, to protect against the rare case that the output
|
||||
# produced by ccache is not identical to a regular compilation.
|
||||
ccache_enabled: ${{ github.repository_owner == 'XRPLF' && !(github.base_ref == 'master' || startsWith(github.base_ref, 'release')) }}
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
platform: ${{ matrix.platform }}
|
||||
# The workflow dispatch event uses the same trigger as the schedule event.
|
||||
trigger: ${{ github.event_name == 'push' && 'merge' || 'schedule' }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/pre-commit.yml
vendored
4
.github/workflows/pre-commit.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
jobs:
|
||||
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||
run-hooks:
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@282890f46d6921249d5659dd38babcb0bd8aef48
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||
with:
|
||||
runs_on: ubuntu-latest
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-ab4d1f0" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||
|
||||
99
.github/workflows/reusable-build-test-config.yml
vendored
99
.github/workflows/reusable-build-test-config.yml
vendored
@@ -3,21 +3,10 @@ name: Build and test configuration
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
ccache_enabled:
|
||||
description: "Whether to enable ccache."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
required: true
|
||||
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
@@ -27,8 +16,23 @@ on:
|
||||
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
type: string
|
||||
required: true
|
||||
|
||||
enable_tests:
|
||||
description: "Whether to run the tests."
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
enable_package:
|
||||
description: "Whether to publish a package."
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
enable_image:
|
||||
description: "Whether to publish an image."
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
@@ -51,12 +55,6 @@ on:
|
||||
type: number
|
||||
default: 2
|
||||
|
||||
sanitizers:
|
||||
description: "The sanitizers to enable."
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
@@ -78,26 +76,8 @@ jobs:
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
# Use a namespace to keep the objects separate for each configuration.
|
||||
CCACHE_NAMESPACE: ${{ inputs.config_name }}
|
||||
# Ccache supports both Redis and HTTP endpoints.
|
||||
# * For Redis, use the following format: redis://ip:port, see
|
||||
# https://github.com/ccache/ccache/wiki/Redis-storage. Note that TLS is
|
||||
# not directly supported by ccache, and requires use of a proxy.
|
||||
# * For HTTP use the following format: http://ip:port/cache when using
|
||||
# nginx as backend or http://ip:port|layout=bazel when using Bazel
|
||||
# Remote Cache, see https://github.com/ccache/ccache/wiki/HTTP-storage.
|
||||
# Note that HTTPS is not directly supported by ccache.
|
||||
CCACHE_REMOTE_ONLY: true
|
||||
CCACHE_REMOTE_STORAGE: http://cache.dev.ripplex.io:8080|layout=bazel
|
||||
# Ignore the creation and modification timestamps on files, since the
|
||||
# header files are copied into separate directories by CMake, which will
|
||||
# otherwise result in cache misses.
|
||||
CCACHE_SLOPPINESS: include_file_ctime,include_file_mtime
|
||||
# Determine if coverage and voidstar should be enabled.
|
||||
COVERAGE_ENABLED: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
|
||||
VOIDSTAR_ENABLED: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
SANITIZERS_ENABLED: ${{ inputs.sanitizers != '' }}
|
||||
ENABLED_VOIDSTAR: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
ENABLED_COVERAGE: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
|
||||
steps:
|
||||
- name: Cleanup workspace (macOS and Windows)
|
||||
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
|
||||
@@ -107,13 +87,9 @@ jobs:
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/prepare-runner@f05cab7b8541eee6473aa42beb9d2fe35608a190
|
||||
uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
with:
|
||||
enable_ccache: ${{ inputs.ccache_enabled }}
|
||||
|
||||
- name: Set ccache log file
|
||||
if: ${{ inputs.ccache_enabled && runner.debug == '1' }}
|
||||
run: echo "CCACHE_LOGFILE=${{ runner.temp }}/ccache.log" >> "${GITHUB_ENV}"
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
@@ -135,13 +111,11 @@ jobs:
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
sanitizers: ${{ inputs.sanitizers }}
|
||||
|
||||
- name: Configure CMake
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
SANITIZERS: ${{ inputs.sanitizers }}
|
||||
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||
run: |
|
||||
cmake \
|
||||
@@ -164,15 +138,6 @@ jobs:
|
||||
--parallel "${BUILD_NPROC}" \
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: ${{ inputs.ccache_enabled }}
|
||||
run: |
|
||||
ccache --show-stats -vv
|
||||
if [ '${{ runner.debug }}' = '1' ]; then
|
||||
cat "${CCACHE_LOGFILE}"
|
||||
curl ${CCACHE_REMOTE_STORAGE%|*}/status || true
|
||||
fi
|
||||
|
||||
- name: Upload the binary (Linux)
|
||||
if: ${{ github.repository_owner == 'XRPLF' && runner.os == 'Linux' }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
@@ -183,7 +148,7 @@ jobs:
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Check linking (Linux)
|
||||
if: ${{ runner.os == 'Linux' && env.SANITIZERS_ENABLED == 'false' }}
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
run: |
|
||||
ldd ./xrpld
|
||||
@@ -195,21 +160,13 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Verify presence of instrumentation (Linux)
|
||||
if: ${{ runner.os == 'Linux' && env.VOIDSTAR_ENABLED == 'true' }}
|
||||
if: ${{ runner.os == 'Linux' && env.ENABLED_VOIDSTAR == 'true' }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
run: |
|
||||
./xrpld --version | grep libvoidstar
|
||||
|
||||
- name: Set sanitizer options
|
||||
if: ${{ !inputs.build_only && env.SANITIZERS_ENABLED == 'true' }}
|
||||
run: |
|
||||
echo "ASAN_OPTIONS=print_stacktrace=1:detect_container_overflow=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/asan.supp" >> ${GITHUB_ENV}
|
||||
echo "TSAN_OPTIONS=second_deadlock_stack=1:halt_on_error=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/tsan.supp" >> ${GITHUB_ENV}
|
||||
echo "UBSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/ubsan.supp" >> ${GITHUB_ENV}
|
||||
echo "LSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/lsan.supp" >> ${GITHUB_ENV}
|
||||
|
||||
- name: Run the separate tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
if: ${{ inputs.enable_tests }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
# Windows locks some of the build files while running tests, and parallel jobs can collide
|
||||
env:
|
||||
@@ -222,7 +179,7 @@ jobs:
|
||||
-j "${PARALLELISM}"
|
||||
|
||||
- name: Run the embedded tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
if: ${{ inputs.enable_tests }}
|
||||
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', env.BUILD_DIR, inputs.build_type) || env.BUILD_DIR }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
@@ -230,7 +187,7 @@ jobs:
|
||||
./xrpld --unittest --unittest-jobs "${BUILD_NPROC}"
|
||||
|
||||
- name: Debug failure (Linux)
|
||||
if: ${{ failure() && runner.os == 'Linux' && !inputs.build_only }}
|
||||
if: ${{ (failure() || cancelled()) && runner.os == 'Linux' && inputs.enable_tests }}
|
||||
run: |
|
||||
echo "IPv4 local port range:"
|
||||
cat /proc/sys/net/ipv4/ip_local_port_range
|
||||
@@ -238,7 +195,7 @@ jobs:
|
||||
netstat -an
|
||||
|
||||
- name: Prepare coverage report
|
||||
if: ${{ !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
|
||||
if: ${{ github.repository_owner == 'XRPLF' && env.ENABLED_COVERAGE == 'true' }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
@@ -251,7 +208,7 @@ jobs:
|
||||
--target coverage
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
|
||||
if: ${{ github.repository_owner == 'XRPLF' && env.ENABLED_COVERAGE == 'true' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
|
||||
34
.github/workflows/reusable-build-test.yml
vendored
34
.github/workflows/reusable-build-test.yml
vendored
@@ -8,24 +8,14 @@ name: Build and test
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ccache_enabled:
|
||||
description: "Whether to enable ccache."
|
||||
platform:
|
||||
description: "The platform to generate the strategy matrix for ('linux', 'macos', 'windows'). If not provided all platforms are used."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
type: string
|
||||
trigger:
|
||||
description: "The trigger that caused the workflow to run ('commit', 'label', 'merge', 'schedule')."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
@@ -36,8 +26,8 @@ jobs:
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
platform: ${{ inputs.platform }}
|
||||
trigger: ${{ inputs.trigger }}
|
||||
|
||||
# Build and test the binary for each configuration.
|
||||
build-test-config:
|
||||
@@ -49,14 +39,14 @@ jobs:
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
with:
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
ccache_enabled: ${{ inputs.ccache_enabled }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||
enable_tests: ${{ matrix.enable_tests }}
|
||||
enable_package: ${{ matrix.enable_package }}
|
||||
enable_image: ${{ matrix.enable_image }}
|
||||
runs_on: ${{ toJson(matrix.runs_on) }}
|
||||
image: ${{ matrix.image }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
sanitizers: ${{ matrix.sanitizers }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
2
.github/workflows/reusable-check-rename.yml
vendored
2
.github/workflows/reusable-check-rename.yml
vendored
@@ -29,8 +29,6 @@ jobs:
|
||||
run: .github/scripts/rename/binary.sh .
|
||||
- name: Check namespaces
|
||||
run: .github/scripts/rename/namespace.sh .
|
||||
- name: Check config name
|
||||
run: .github/scripts/rename/config.sh .
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
32
.github/workflows/reusable-notify-clio.yml
vendored
32
.github/workflows/reusable-notify-clio.yml
vendored
@@ -44,29 +44,37 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Extract version
|
||||
id: version
|
||||
uses: ./.github/actions/extract-version
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Upload recipe
|
||||
uses: ./.github/actions/upload-recipe
|
||||
id: upload
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Calculate conan reference
|
||||
id: conan_ref
|
||||
run: |
|
||||
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_recipe_version: ${{ steps.version.outputs.version }}
|
||||
conan_recipe_channel: ${{ steps.generate.outputs.channel }}
|
||||
conan_recipe_user: ${{ steps.generate.outputs.user }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
- name: Log into Conan remote
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
outputs:
|
||||
conan_ref: ${{ steps.upload.outputs.conan_ref }}
|
||||
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
|
||||
18
.github/workflows/reusable-strategy-matrix.yml
vendored
18
.github/workflows/reusable-strategy-matrix.yml
vendored
@@ -3,16 +3,14 @@ name: Generate strategy matrix
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
platform:
|
||||
description: "The platform to generate the strategy matrix for ('linux', 'macos', 'windows'). If not provided all platforms are used."
|
||||
required: false
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
trigger:
|
||||
description: "The trigger that caused the workflow to run ('commit', 'label', 'merge', 'schedule')."
|
||||
required: true
|
||||
type: string
|
||||
default: "minimal"
|
||||
outputs:
|
||||
matrix:
|
||||
description: "The generated strategy matrix."
|
||||
@@ -40,6 +38,6 @@ jobs:
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
PLATFORM: ${{ inputs.platform != '' && format('--platform={0}', inputs.platform) || '' }}
|
||||
TRIGGER: ${{ format('--trigger={0}', inputs.trigger) }}
|
||||
run: ./generate.py ${PLATFORM} ${TRIGGER} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
25
.github/workflows/upload-conan-deps.yml
vendored
25
.github/workflows/upload-conan-deps.yml
vendored
@@ -19,17 +19,17 @@ on:
|
||||
branches: [develop]
|
||||
paths:
|
||||
# This allows testing changes to the upload workflow in a PR
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
- ".github/workflows/upload-conan-deps.yml"
|
||||
push:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
- .github/workflows/reusable-strategy-matrix.yml
|
||||
- .github/actions/build-deps/action.yml
|
||||
- .github/actions/setup-conan/action.yml
|
||||
- ".github/workflows/upload-conan-deps.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/actions/build-deps/action.yml"
|
||||
- ".github/actions/setup-conan/action.yml"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- conanfile.py
|
||||
- conan.lock
|
||||
- "conanfile.py"
|
||||
- "conan.lock"
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
@@ -49,7 +49,8 @@ jobs:
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||
# The workflow dispatch event uses the same trigger as the schedule event.
|
||||
trigger: ${{ github.event_name == 'pull_request' && 'commit' || (github.event_name == 'push' && 'merge' || 'schedule') }}
|
||||
|
||||
# Build and upload the dependencies for each configuration.
|
||||
run-upload-conan-deps:
|
||||
@@ -59,8 +60,8 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
container: ${{ matrix.image }}
|
||||
steps:
|
||||
- name: Cleanup workspace (macOS and Windows)
|
||||
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
|
||||
@@ -70,9 +71,9 @@ jobs:
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/prepare-runner@f05cab7b8541eee6473aa42beb9d2fe35608a190
|
||||
uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
with:
|
||||
enable_ccache: false
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,5 +1,4 @@
|
||||
# .gitignore
|
||||
# cspell: disable
|
||||
|
||||
# Macintosh Desktop Services Store files.
|
||||
.DS_Store
|
||||
@@ -20,6 +19,7 @@ Release/
|
||||
/tmp/
|
||||
CMakeSettings.json
|
||||
CMakeUserPresets.json
|
||||
__pycache__
|
||||
|
||||
# Coverage files.
|
||||
*.gcno
|
||||
@@ -36,7 +36,6 @@ gmon.out
|
||||
|
||||
# Customized configs.
|
||||
/rippled.cfg
|
||||
/xrpld.cfg
|
||||
/validators.txt
|
||||
|
||||
# Locally patched Conan recipes
|
||||
|
||||
@@ -32,25 +32,10 @@ repos:
|
||||
- id: prettier
|
||||
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 831207fd435b47aeffdf6af853097e64322b4d44 # frozen: v25.12.0
|
||||
rev: 25.11.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/streetsidesoftware/cspell-cli
|
||||
rev: 1cfa010f078c354f3ffb8413616280cc28f5ba21 # frozen: v9.4.0
|
||||
hooks:
|
||||
- id: cspell # Spell check changed files
|
||||
exclude: .config/cspell.config.yaml
|
||||
- id: cspell # Spell check the commit message
|
||||
name: check commit message spelling
|
||||
args:
|
||||
- --no-must-find-files
|
||||
- --no-progress
|
||||
- --no-summary
|
||||
- --files
|
||||
- .git/COMMIT_EDITMSG
|
||||
stages: [commit-msg]
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
external/.*|
|
||||
|
||||
57
BUILD.md
57
BUILD.md
@@ -1,5 +1,5 @@
|
||||
| :warning: **WARNING** :warning: |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| :warning: **WARNING** :warning:
|
||||
|---|
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
@@ -148,8 +148,7 @@ function extract_version {
|
||||
}
|
||||
|
||||
# Define which recipes to export.
|
||||
recipes=('ed25519' 'grpc' 'openssl' 'secp256k1' 'snappy' 'soci')
|
||||
folders=('all' 'all' '3.x.x' 'all' 'all' 'all')
|
||||
recipes=(ed25519 grpc secp256k1 snappy soci)
|
||||
|
||||
# Selectively check out the recipes from our CCI fork.
|
||||
cd external
|
||||
@@ -158,24 +157,20 @@ cd conan-center-index
|
||||
git init
|
||||
git remote add origin git@github.com:XRPLF/conan-center-index.git
|
||||
git sparse-checkout init
|
||||
for ((index = 1; index <= ${#recipes[@]}; index++)); do
|
||||
recipe=${recipes[index]}
|
||||
folder=${folders[index]}
|
||||
echo "Checking out recipe '${recipe}' from folder '${folder}'..."
|
||||
git sparse-checkout add recipes/${recipe}/${folder}
|
||||
for recipe in ${recipes[@]}; do
|
||||
echo "Checking out ${recipe}..."
|
||||
git sparse-checkout add recipes/${recipe}/all
|
||||
done
|
||||
git fetch origin master
|
||||
git checkout master
|
||||
cd ../..
|
||||
|
||||
# Export the recipes into the local cache.
|
||||
for ((index = 1; index <= ${#recipes[@]}; index++)); do
|
||||
recipe=${recipes[index]}
|
||||
folder=${folders[index]}
|
||||
for recipe in ${recipes[@]}; do
|
||||
version=$(extract_version ${recipe})
|
||||
echo "Exporting '${recipe}/${version}' from '${recipe}/${folder}'..."
|
||||
echo "Exporting ${recipe}/${version}..."
|
||||
conan export --version $(extract_version ${recipe}) \
|
||||
external/conan-center-index/recipes/${recipe}/${folder}
|
||||
external/conan-center-index/recipes/${recipe}/all
|
||||
done
|
||||
```
|
||||
|
||||
@@ -523,32 +518,18 @@ stored inside the build directory, as either of:
|
||||
- file named `coverage.`_extension_, with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
## Sanitizers
|
||||
|
||||
To build dependencies and xrpld with sanitizer instrumentation, set the
|
||||
`SANITIZERS` environment variable (only once before running conan and cmake) and use the `sanitizers` profile in conan:
|
||||
|
||||
```bash
|
||||
export SANITIZERS=address,undefinedbehavior
|
||||
|
||||
conan install .. --output-folder . --profile:all sanitizers --build missing --settings build_type=Debug
|
||||
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Debug -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
See [Sanitizers docs](./docs/build/sanitizers.md) for more details.
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| ---------- | ------------- | -------------------------------------------------------------- |
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
| Option | Default Value | Description |
|
||||
| ---------- | ------------- | ------------------------------------------------------------------ |
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
|
||||
@@ -16,23 +16,18 @@ set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(CompilationEnv)
|
||||
|
||||
if(is_gcc)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
||||
# GCC-specific fixes
|
||||
add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage)
|
||||
# -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3
|
||||
elseif(is_clang)
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
# Clang-specific fixes
|
||||
add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options
|
||||
elseif(is_msvc)
|
||||
elseif(MSVC)
|
||||
# MSVC-specific fixes
|
||||
add_compile_options(/wd4068) # Ignore unknown pragmas
|
||||
endif()
|
||||
|
||||
# Enable ccache to speed up builds.
|
||||
include(Ccache)
|
||||
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if(Git_FOUND)
|
||||
@@ -79,7 +74,6 @@ if (packages_only)
|
||||
return ()
|
||||
endif ()
|
||||
include(XrplCompiler)
|
||||
include(XrplSanitizers)
|
||||
include(XrplInterface)
|
||||
|
||||
option(only_docs "Include only the docs target?" FALSE)
|
||||
@@ -91,18 +85,34 @@ endif()
|
||||
###
|
||||
|
||||
include(deps/Boost)
|
||||
find_package(OpenSSL 1.1.1 REQUIRED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
|
||||
add_subdirectory(external/antithesis-sdk)
|
||||
find_package(date REQUIRED)
|
||||
find_package(ed25519 REQUIRED)
|
||||
find_package(gRPC REQUIRED)
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(lz4 REQUIRED)
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(OpenSSL REQUIRED)
|
||||
find_package(secp256k1 REQUIRED)
|
||||
# Target names with :: are not allowed in a generator expression.
|
||||
# We need to pull the include directories and imported location properties
|
||||
# from separate targets.
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
|
||||
endif()
|
||||
|
||||
find_package(date REQUIRED)
|
||||
find_package(ed25519 REQUIRED)
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(secp256k1 REQUIRED)
|
||||
find_package(xxHash REQUIRED)
|
||||
|
||||
target_link_libraries(xrpl_libs INTERFACE
|
||||
@@ -115,15 +125,6 @@ target_link_libraries(xrpl_libs INTERFACE
|
||||
SQLite::SQLite3
|
||||
)
|
||||
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
|
||||
endif()
|
||||
|
||||
# Work around changes to Conan recipe for now.
|
||||
if(TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
|
||||
@@ -555,16 +555,16 @@ Rippled uses a linear workflow model that can be summarized as:
|
||||
git fetch --multiple upstreams user1 user2 user3 [...]
|
||||
git checkout -B release-next --no-track upstream/develop
|
||||
|
||||
# Only do an ff-only merge if pr-branch1 is either already
|
||||
# Only do an ff-only merge if prbranch1 is either already
|
||||
# squashed, or needs to be merged with separate commits,
|
||||
# and has no merge commits.
|
||||
# Use -S on the ff-only merge if pr-branch1 isn't signed.
|
||||
git merge [-S] --ff-only user1/pr-branch1
|
||||
# Use -S on the ff-only merge if prbranch1 isn't signed.
|
||||
git merge [-S] --ff-only user1/prbranch1
|
||||
|
||||
git merge --squash user2/pr-branch2
|
||||
git merge --squash user2/prbranch2
|
||||
git commit -S # Use the commit message provided on the PR
|
||||
|
||||
git merge --squash user3/pr-branch3
|
||||
git merge --squash user3/prbranch3
|
||||
git commit -S # Use the commit message provided on the PR
|
||||
|
||||
[...]
|
||||
@@ -876,7 +876,7 @@ git push --delete upstream-push master-next
|
||||
|
||||
#### Special cases: point releases, hotfixes, etc.
|
||||
|
||||
On occasion, a bug or issue is discovered in a version that already
|
||||
On occassion, a bug or issue is discovered in a version that already
|
||||
had a final release. Most of the time, development will have started
|
||||
on the next version, and will usually have changes in `develop`
|
||||
and often in `release`.
|
||||
|
||||
@@ -42,7 +42,7 @@ For more information on responsible disclosure, please read this [Wikipedia arti
|
||||
|
||||
## Report Handling Process
|
||||
|
||||
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic pre-commitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
|
||||
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic precommitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
|
||||
|
||||
Once we receive a report, we:
|
||||
|
||||
|
||||
@@ -29,18 +29,18 @@
|
||||
#
|
||||
# Purpose
|
||||
#
|
||||
# This file documents and provides examples of all xrpld server process
|
||||
# configuration options. When the xrpld server instance is launched, it
|
||||
# This file documents and provides examples of all rippled server process
|
||||
# configuration options. When the rippled server instance is launched, it
|
||||
# looks for a file with the following name:
|
||||
#
|
||||
# xrpld.cfg
|
||||
# rippled.cfg
|
||||
#
|
||||
# For more information on where the xrpld server instance searches for the
|
||||
# For more information on where the rippled server instance searches for the
|
||||
# file, visit:
|
||||
#
|
||||
# https://xrpl.org/commandline-usage.html#generic-options
|
||||
#
|
||||
# This file should be named xrpld.cfg. This file is UTF-8 with DOS, UNIX,
|
||||
# This file should be named rippled.cfg. This file is UTF-8 with DOS, UNIX,
|
||||
# or Mac style end of lines. Blank lines and lines beginning with '#' are
|
||||
# ignored. Undefined sections are reserved. No escapes are currently defined.
|
||||
#
|
||||
@@ -89,8 +89,8 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# xrpld offers various server protocols to clients making inbound
|
||||
# connections. The listening ports xrpld uses are "universal" ports
|
||||
# rippled offers various server protocols to clients making inbound
|
||||
# connections. The listening ports rippled uses are "universal" ports
|
||||
# which may be configured to handshake in one or more of the available
|
||||
# supported protocols. These universal ports simplify administration:
|
||||
# A single open port can be used for multiple protocols.
|
||||
@@ -103,7 +103,7 @@
|
||||
#
|
||||
# A list of port names and key/value pairs. A port name must start with a
|
||||
# letter and contain only letters and numbers. The name is not case-sensitive.
|
||||
# For each name in this list, xrpld will look for a configuration file
|
||||
# For each name in this list, rippled will look for a configuration file
|
||||
# section with the same name and use it to create a listening port. The
|
||||
# name is informational only; the choice of name does not affect the function
|
||||
# of the listening port.
|
||||
@@ -134,7 +134,7 @@
|
||||
# ip = 127.0.0.1
|
||||
# protocol = http
|
||||
#
|
||||
# When xrpld is used as a command line client (for example, issuing a
|
||||
# When rippled is used as a command line client (for example, issuing a
|
||||
# server stop command), the first port advertising the http or https
|
||||
# protocol will be used to make the connection.
|
||||
#
|
||||
@@ -175,7 +175,7 @@
|
||||
# same time. It is possible have both Websockets and Secure Websockets
|
||||
# together in one port.
|
||||
#
|
||||
# NOTE If no ports support the peer protocol, xrpld cannot
|
||||
# NOTE If no ports support the peer protocol, rippled cannot
|
||||
# receive incoming peer connections or become a superpeer.
|
||||
#
|
||||
# limit = <number>
|
||||
@@ -194,7 +194,7 @@
|
||||
# required. IP address restrictions, if any, will be checked in addition
|
||||
# to the credentials specified here.
|
||||
#
|
||||
# When acting in the client role, xrpld will supply these credentials
|
||||
# When acting in the client role, rippled will supply these credentials
|
||||
# using HTTP's Basic Authentication headers when making outbound HTTP/S
|
||||
# requests.
|
||||
#
|
||||
@@ -218,7 +218,7 @@
|
||||
# administrative commands.
|
||||
#
|
||||
# NOTE A common configuration value for the admin field is "localhost".
|
||||
# If you are listening on all IPv4/IPv6 addresses by specifying
|
||||
# If you are listening on all IPv4/IPv6 addresses by specifing
|
||||
# ip = :: then you can use admin = ::ffff:127.0.0.1,::1 to allow
|
||||
# administrative access from both IPv4 and IPv6 localhost
|
||||
# connections.
|
||||
@@ -237,7 +237,7 @@
|
||||
# WS, or WSS protocol interfaces. If administrative commands are
|
||||
# disabled for a port, these credentials have no effect.
|
||||
#
|
||||
# When acting in the client role, xrpld will supply these credentials
|
||||
# When acting in the client role, rippled will supply these credentials
|
||||
# in the submitted JSON for any administrative command requests when
|
||||
# invoking JSON-RPC commands on remote servers.
|
||||
#
|
||||
@@ -258,7 +258,7 @@
|
||||
# resource controls will default to those for non-administrative users.
|
||||
#
|
||||
# The secure_gateway IP addresses are intended to represent
|
||||
# proxies. Since xrpld trusts these hosts, they must be
|
||||
# proxies. Since rippled trusts these hosts, they must be
|
||||
# responsible for properly authenticating the remote user.
|
||||
#
|
||||
# If some IP addresses are included for both "admin" and
|
||||
@@ -272,7 +272,7 @@
|
||||
# Use the specified files when configuring SSL on the port.
|
||||
#
|
||||
# NOTE If no files are specified and secure protocols are selected,
|
||||
# xrpld will generate an internal self-signed certificate.
|
||||
# rippled will generate an internal self-signed certificate.
|
||||
#
|
||||
# The files have these meanings:
|
||||
#
|
||||
@@ -297,12 +297,12 @@
|
||||
# Control the ciphers which the server will support over SSL on the port,
|
||||
# specified using the OpenSSL "cipher list format".
|
||||
#
|
||||
# NOTE If unspecified, xrpld will automatically configure a modern
|
||||
# NOTE If unspecified, rippled will automatically configure a modern
|
||||
# cipher suite. This default suite should be widely supported.
|
||||
#
|
||||
# You should not modify this string unless you have a specific
|
||||
# reason and cryptographic expertise. Incorrect modification may
|
||||
# keep xrpld from connecting to other instances of xrpld or
|
||||
# keep rippled from connecting to other instances of rippled or
|
||||
# prevent RPC and WebSocket clients from connecting.
|
||||
#
|
||||
# send_queue_limit = [1..65535]
|
||||
@@ -382,7 +382,7 @@
|
||||
#-----------------
|
||||
#
|
||||
# These settings control security and access attributes of the Peer to Peer
|
||||
# server section of the xrpld process. Peer Protocol implements the
|
||||
# server section of the rippled process. Peer Protocol implements the
|
||||
# Ripple Payment protocol. It is over peer connections that transactions
|
||||
# and validations are passed from to machine to machine, to determine the
|
||||
# contents of validated ledgers.
|
||||
@@ -396,7 +396,7 @@
|
||||
# true - enables compression
|
||||
# false - disables compression [default].
|
||||
#
|
||||
# The xrpld server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# the server automatically compresses communications with peer servers
|
||||
# that also have link compression enabled.
|
||||
@@ -432,7 +432,7 @@
|
||||
#
|
||||
# [ips_fixed]
|
||||
#
|
||||
# List of IP addresses or hostnames to which xrpld should always attempt to
|
||||
# List of IP addresses or hostnames to which rippled should always attempt to
|
||||
# maintain peer connections with. This is useful for manually forming private
|
||||
# networks, for example to configure a validation server that connects to the
|
||||
# Ripple network through a public-facing server, or for building a set
|
||||
@@ -573,7 +573,7 @@
|
||||
#
|
||||
# minimum_txn_in_ledger_standalone = <number>
|
||||
#
|
||||
# Like minimum_txn_in_ledger when xrpld is running in standalone
|
||||
# Like minimum_txn_in_ledger when rippled is running in standalone
|
||||
# mode. Default: 1000.
|
||||
#
|
||||
# target_txn_in_ledger = <number>
|
||||
@@ -710,7 +710,7 @@
|
||||
#
|
||||
# [validator_token]
|
||||
#
|
||||
# This is an alternative to [validation_seed] that allows xrpld to perform
|
||||
# This is an alternative to [validation_seed] that allows rippled to perform
|
||||
# validation without having to store the validator keys on the network
|
||||
# connected server. The field should contain a single token in the form of a
|
||||
# base64-encoded blob.
|
||||
@@ -745,7 +745,7 @@
|
||||
#
|
||||
# Specify the file by its name or path.
|
||||
# Unless an absolute path is specified, it will be considered relative to
|
||||
# the folder in which the xrpld.cfg file is located.
|
||||
# the folder in which the rippled.cfg file is located.
|
||||
#
|
||||
# Examples:
|
||||
# /home/ripple/validators.txt
|
||||
@@ -840,7 +840,7 @@
|
||||
#
|
||||
# 0: Disable the ledger replay feature [default]
|
||||
# 1: Enable the ledger replay feature. With this feature enabled, when
|
||||
# acquiring a ledger from the network, a xrpld node only downloads
|
||||
# acquiring a ledger from the network, a rippled node only downloads
|
||||
# the ledger header and the transactions instead of the whole ledger.
|
||||
# And the ledger is built by applying the transactions to the parent
|
||||
# ledger.
|
||||
@@ -851,7 +851,7 @@
|
||||
#
|
||||
#----------------
|
||||
#
|
||||
# The xrpld server instance uses HTTPS GET requests in a variety of
|
||||
# The rippled server instance uses HTTPS GET requests in a variety of
|
||||
# circumstances, including but not limited to contacting trusted domains to
|
||||
# fetch information such as mapping an email address to a Ripple Payment
|
||||
# Network address.
|
||||
@@ -891,7 +891,7 @@
|
||||
#
|
||||
#------------
|
||||
#
|
||||
# xrpld creates 4 SQLite database to hold bookkeeping information
|
||||
# rippled creates 4 SQLite database to hold bookkeeping information
|
||||
# about transactions, local credentials, and various other things.
|
||||
# It also creates the NodeDB, which holds all the objects that
|
||||
# make up the current and historical ledgers.
|
||||
@@ -902,7 +902,7 @@
|
||||
# the performance of the server.
|
||||
#
|
||||
# Partial pathnames will be considered relative to the location of
|
||||
# the xrpld.cfg file.
|
||||
# the rippled.cfg file.
|
||||
#
|
||||
# [node_db] Settings for the Node Database (required)
|
||||
#
|
||||
@@ -920,11 +920,11 @@
|
||||
# type = NuDB
|
||||
#
|
||||
# NuDB is a high-performance database written by Ripple Labs and optimized
|
||||
# for xrpld and solid-state drives.
|
||||
# for rippled and solid-state drives.
|
||||
#
|
||||
# NuDB maintains its high speed regardless of the amount of history
|
||||
# stored. Online delete may be selected, but is not required. NuDB is
|
||||
# available on all platforms that xrpld runs on.
|
||||
# available on all platforms that rippled runs on.
|
||||
#
|
||||
# type = RocksDB
|
||||
#
|
||||
@@ -1049,7 +1049,7 @@
|
||||
#
|
||||
# recovery_wait_seconds
|
||||
# The online delete process checks periodically
|
||||
# that xrpld is still in sync with the network,
|
||||
# that rippled is still in sync with the network,
|
||||
# and that the validated ledger is less than
|
||||
# 'age_threshold_seconds' old. If not, then continue
|
||||
# sleeping for this number of seconds and
|
||||
@@ -1069,8 +1069,8 @@
|
||||
# The server creates and maintains 4 to 5 bookkeeping SQLite databases in
|
||||
# the 'database_path' location. If you omit this configuration setting,
|
||||
# the server creates a directory called "db" located in the same place as
|
||||
# your xrpld.cfg file.
|
||||
# Partial pathnames are relative to the location of the xrpld executable.
|
||||
# your rippled.cfg file.
|
||||
# Partial pathnames are relative to the location of the rippled executable.
|
||||
#
|
||||
# [sqlite] Tuning settings for the SQLite databases (optional)
|
||||
#
|
||||
@@ -1120,7 +1120,7 @@
|
||||
# The default is "wal", which uses a write-ahead
|
||||
# log to implement database transactions.
|
||||
# Alternately, "memory" saves disk I/O, but if
|
||||
# xrpld crashes during a transaction, the
|
||||
# rippled crashes during a transaction, the
|
||||
# database is likely to be corrupted.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_journal_mode
|
||||
# for more details about the available options.
|
||||
@@ -1130,7 +1130,7 @@
|
||||
# synchronous Valid values: off, normal, full, extra
|
||||
# The default is "normal", which works well with
|
||||
# the "wal" journal mode. Alternatively, "off"
|
||||
# allows xrpld to continue as soon as data is
|
||||
# allows rippled to continue as soon as data is
|
||||
# passed to the OS, which can significantly
|
||||
# increase speed, but risks data corruption if
|
||||
# the host computer crashes before writing that
|
||||
@@ -1144,7 +1144,7 @@
|
||||
# The default is "file", which will use files
|
||||
# for temporary database tables and indices.
|
||||
# Alternatively, "memory" may save I/O, but
|
||||
# xrpld does not currently use many, if any,
|
||||
# rippled does not currently use many, if any,
|
||||
# of these temporary objects.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_temp_store
|
||||
# for more details about the available options.
|
||||
@@ -1173,7 +1173,7 @@
|
||||
#
|
||||
# These settings are designed to help server administrators diagnose
|
||||
# problems, and obtain detailed information about the activities being
|
||||
# performed by the xrpld process.
|
||||
# performed by the rippled process.
|
||||
#
|
||||
#
|
||||
#
|
||||
@@ -1190,7 +1190,7 @@
|
||||
#
|
||||
# Configuration parameters for the Beast. Insight stats collection module.
|
||||
#
|
||||
# Insight is a module that collects information from the areas of xrpld
|
||||
# Insight is a module that collects information from the areas of rippled
|
||||
# that have instrumentation. The configuration parameters control where the
|
||||
# collection metrics are sent. The parameters are expressed as key = value
|
||||
# pairs with no white space. The main parameter is the choice of server:
|
||||
@@ -1199,7 +1199,7 @@
|
||||
#
|
||||
# Choice of server to send metrics to. Currently the only choice is
|
||||
# "statsd" which sends UDP packets to a StatsD daemon, which must be
|
||||
# running while xrpld is running. More information on StatsD is
|
||||
# running while rippled is running. More information on StatsD is
|
||||
# available here:
|
||||
# https://github.com/b/statsd_spec
|
||||
#
|
||||
@@ -1209,7 +1209,7 @@
|
||||
# in the format, n.n.n.n:port.
|
||||
#
|
||||
# "prefix" A string prepended to each collected metric. This is used
|
||||
# to distinguish between different running instances of xrpld.
|
||||
# to distinguish between different running instances of rippled.
|
||||
#
|
||||
# If this section is missing, or the server type is unspecified or unknown,
|
||||
# statistics are not collected or reported.
|
||||
@@ -1236,7 +1236,7 @@
|
||||
#
|
||||
# Example:
|
||||
# [perf]
|
||||
# perf_log=/var/log/xrpld/perf.log
|
||||
# perf_log=/var/log/rippled/perf.log
|
||||
# log_interval=2
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
@@ -1246,7 +1246,7 @@
|
||||
#----------
|
||||
#
|
||||
# The vote settings configure settings for the entire Ripple network.
|
||||
# While a single instance of xrpld cannot unilaterally enforce network-wide
|
||||
# While a single instance of rippled cannot unilaterally enforce network-wide
|
||||
# settings, these choices become part of the instance's vote during the
|
||||
# consensus process for each voting ledger.
|
||||
#
|
||||
@@ -1260,7 +1260,7 @@
|
||||
# The reference transaction is the simplest form of transaction.
|
||||
# It represents an XRP payment between two parties.
|
||||
#
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# If this parameter is unspecified, rippled will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
@@ -1272,7 +1272,7 @@
|
||||
# account's XRP balance that is at or below the reserve may only be
|
||||
# spent on transaction fees, and not transferred out of the account.
|
||||
#
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# If this parameter is unspecified, rippled will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
@@ -1284,7 +1284,7 @@
|
||||
# each ledger item owned by the account. Ledger items an account may
|
||||
# own include trust lines, open orders, and tickets.
|
||||
#
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# If this parameter is unspecified, rippled will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
@@ -1326,7 +1326,7 @@
|
||||
# tool instead.
|
||||
#
|
||||
# This flag has no effect on the "sign" and "sign_for" command line options
|
||||
# that xrpld makes available.
|
||||
# that rippled makes available.
|
||||
#
|
||||
# The default value of this field is "false"
|
||||
#
|
||||
@@ -1405,7 +1405,7 @@
|
||||
#--------------------
|
||||
#
|
||||
# Administrators can use these values as a starting point for configuring
|
||||
# their instance of xrpld, but each value should be checked to make sure
|
||||
# their instance of rippled, but each value should be checked to make sure
|
||||
# it meets the business requirements for the organization.
|
||||
#
|
||||
# Server
|
||||
@@ -1415,7 +1415,7 @@
|
||||
# "peer"
|
||||
#
|
||||
# Peer protocol open to everyone. This is required to accept
|
||||
# incoming xrpld connections. This does not affect automatic
|
||||
# incoming rippled connections. This does not affect automatic
|
||||
# or manual outgoing Peer protocol connections.
|
||||
#
|
||||
# "rpc"
|
||||
@@ -1432,7 +1432,7 @@
|
||||
#
|
||||
# ETL commands for Clio. We recommend setting secure_gateway
|
||||
# in this section to a comma-separated list of the addresses
|
||||
# of your Clio servers, in order to bypass xrpld's rate limiting.
|
||||
# of your Clio servers, in order to bypass rippled's rate limiting.
|
||||
#
|
||||
# This port is commented out but can be enabled by removing
|
||||
# the '#' from each corresponding line including the entry under [server]
|
||||
@@ -1449,8 +1449,8 @@
|
||||
# NOTE
|
||||
#
|
||||
# To accept connections on well known ports such as 80 (HTTP) or
|
||||
# 443 (HTTPS), most operating systems will require xrpld to
|
||||
# run with administrator privileges, or else xrpld will not start.
|
||||
# 443 (HTTPS), most operating systems will require rippled to
|
||||
# run with administrator privileges, or else rippled will not start.
|
||||
|
||||
[server]
|
||||
port_rpc_admin_local
|
||||
@@ -1496,7 +1496,7 @@ secure_gateway = 127.0.0.1
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# This is primary persistent datastore for xrpld. This includes transaction
|
||||
# This is primary persistent datastore for rippled. This includes transaction
|
||||
# metadata, account states, and ledger headers. Helpful information can be
|
||||
# found at https://xrpl.org/capacity-planning.html#node-db-type
|
||||
# type=NuDB is recommended for non-validators with fast SSDs. Validators or
|
||||
@@ -1511,19 +1511,19 @@ secure_gateway = 127.0.0.1
|
||||
# deletion.
|
||||
[node_db]
|
||||
type=NuDB
|
||||
path=/var/lib/xrpld/db/nudb
|
||||
path=/var/lib/rippled/db/nudb
|
||||
nudb_block_size=4096
|
||||
online_delete=512
|
||||
advisory_delete=0
|
||||
|
||||
[database_path]
|
||||
/var/lib/xrpld/db
|
||||
/var/lib/rippled/db
|
||||
|
||||
|
||||
# This needs to be an absolute directory reference, not a relative one.
|
||||
# Modify this value as required.
|
||||
[debug_logfile]
|
||||
/var/log/xrpld/debug.log
|
||||
/var/log/rippled/debug.log
|
||||
|
||||
# To use the XRP test network
|
||||
# (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
@@ -1533,7 +1533,7 @@ advisory_delete=0
|
||||
|
||||
# File containing trusted validator keys or validator list publishers.
|
||||
# Unless an absolute path is specified, it will be considered relative to the
|
||||
# folder in which the xrpld.cfg file is located.
|
||||
# folder in which the rippled.cfg file is located.
|
||||
[validators_file]
|
||||
validators.txt
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# Default validators.txt
|
||||
#
|
||||
# This file is located in the same folder as your xrpld.cfg file
|
||||
# This file is located in the same folder as your rippled.cfg file
|
||||
# and defines which validators your server trusts not to collude.
|
||||
#
|
||||
# This file is UTF-8 with DOS, UNIX, or Mac style line endings.
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
find_program(CCACHE_PATH "ccache")
|
||||
if (NOT CCACHE_PATH)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
# For Linux and macOS we can use the ccache binary directly.
|
||||
if (NOT MSVC)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PATH}")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PATH}")
|
||||
message(STATUS "Found ccache: ${CCACHE_PATH}")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
# For Windows more effort is required. The code below is a modified version of
|
||||
# https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake.
|
||||
if ("${CCACHE_PATH}" MATCHES "chocolatey")
|
||||
message(DEBUG "Ccache path: ${CCACHE_PATH}")
|
||||
# Chocolatey uses a shim executable that we cannot use directly, in which
|
||||
# case we have to find the executable it points to. If we cannot find the
|
||||
# target executable then we cannot use ccache.
|
||||
find_program(BASH_PATH "bash")
|
||||
if (NOT BASH_PATH)
|
||||
message(WARNING "Could not find bash.")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
execute_process(
|
||||
COMMAND bash -c "export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1"
|
||||
OUTPUT_VARIABLE CCACHE_PATH)
|
||||
|
||||
if (NOT CCACHE_PATH)
|
||||
message(WARNING "Could not find ccache target.")
|
||||
return()
|
||||
endif ()
|
||||
file(TO_CMAKE_PATH "${CCACHE_PATH}" CCACHE_PATH)
|
||||
endif ()
|
||||
message(STATUS "Found ccache: ${CCACHE_PATH}")
|
||||
|
||||
# Tell cmake to use ccache for compiling with Visual Studio.
|
||||
file(COPY_FILE
|
||||
${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe
|
||||
ONLY_IF_DIFFERENT)
|
||||
set(CMAKE_VS_GLOBALS
|
||||
"CLToolExe=cl.exe"
|
||||
"CLToolPath=${CMAKE_BINARY_DIR}"
|
||||
"TrackFileAccess=false"
|
||||
"UseMultiToolTask=true")
|
||||
|
||||
# By default Visual Studio generators will use /Zi to capture debug information,
|
||||
# which is not compatible with ccache, so tell it to use /Z7 instead.
|
||||
if (MSVC)
|
||||
foreach (var_
|
||||
CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
|
||||
CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
string (REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
|
||||
endforeach ()
|
||||
endif ()
|
||||
@@ -1,54 +0,0 @@
|
||||
# Shared detection of compiler, operating system, and architecture.
|
||||
#
|
||||
# This module centralizes environment detection so that other
|
||||
# CMake modules can use the same variables instead of repeating
|
||||
# checks on CMAKE_* and built-in platform variables.
|
||||
|
||||
# Only run once per configure step.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Compiler detection (C++)
|
||||
# --------------------------------------------------------------------
|
||||
set(is_clang FALSE)
|
||||
set(is_gcc FALSE)
|
||||
set(is_msvc FALSE)
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") # Clang or AppleClang
|
||||
set(is_clang TRUE)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(is_gcc TRUE)
|
||||
elseif(MSVC)
|
||||
set(is_msvc TRUE)
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}")
|
||||
endif()
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Operating system detection
|
||||
# --------------------------------------------------------------------
|
||||
set(is_linux FALSE)
|
||||
set(is_windows FALSE)
|
||||
set(is_macos FALSE)
|
||||
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set(is_linux TRUE)
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Windows")
|
||||
set(is_windows TRUE)
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
|
||||
set(is_macos TRUE)
|
||||
endif()
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Architecture
|
||||
# --------------------------------------------------------------------
|
||||
set(is_amd64 FALSE)
|
||||
set(is_arm64 FALSE)
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|AMD64")
|
||||
set(is_amd64 TRUE)
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64")
|
||||
set(is_arm64 TRUE)
|
||||
else()
|
||||
message(FATAL_ERROR "Unknown architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
@@ -2,23 +2,16 @@
|
||||
setup project-wide compiler settings
|
||||
#]===================================================================]
|
||||
|
||||
include(CompilationEnv)
|
||||
|
||||
#[=========================================================[
|
||||
TODO some/most of these common settings belong in a
|
||||
toolchain file, especially the ABI-impacting ones
|
||||
#]=========================================================]
|
||||
add_library (common INTERFACE)
|
||||
add_library (Xrpl::common ALIAS common)
|
||||
include(XrplSanitizers)
|
||||
# add a single global dependency on this interface lib
|
||||
link_libraries (Xrpl::common)
|
||||
# Respect CMAKE_POSITION_INDEPENDENT_CODE setting (may be set by Conan toolchain)
|
||||
if(NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
endif()
|
||||
set_target_properties (common
|
||||
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
|
||||
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
@@ -51,7 +44,6 @@ if (MSVC)
|
||||
# omit debug info completely under CI (not needed)
|
||||
if (is_ci)
|
||||
string (REPLACE "/Zi" " " ${var_} "${${var_}}")
|
||||
string (REPLACE "/Z7" " " ${var_} "${${var_}}")
|
||||
endif ()
|
||||
endforeach ()
|
||||
|
||||
@@ -123,8 +115,8 @@ else ()
|
||||
# link to static libc/c++ iff:
|
||||
# * static option set and
|
||||
# * NOT APPLE (AppleClang does not support static libc/c++) and
|
||||
# * NOT SANITIZERS (sanitizers typically don't work with static libc/c++)
|
||||
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${SANITIZERS_ENABLED}>>>:
|
||||
# * NOT san (sanitizers typically don't work with static libc/c++)
|
||||
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
>)
|
||||
@@ -157,7 +149,7 @@ elseif (use_gold AND is_gcc)
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
#[=========================================================[
|
||||
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
|
||||
default instead of DT_RPATH, so you might have slightly
|
||||
default intead of DT_RPATH, so you might have slightly
|
||||
unexpected runtime ld behavior if you were expecting
|
||||
DT_RPATH. Specify --disable-new-dtags to gold if you do
|
||||
not want the default DT_RUNPATH behavior. This rpath
|
||||
|
||||
@@ -206,7 +206,7 @@ if(xrpld)
|
||||
)
|
||||
exclude_if_included(xrpld)
|
||||
# define a macro for tests that might need to
|
||||
# be excluded or run differently in CI environment
|
||||
# be exluded or run differently in CI environment
|
||||
if(is_ci)
|
||||
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
|
||||
endif ()
|
||||
|
||||
@@ -62,7 +62,7 @@ if (is_root_project AND TARGET xrpld)
|
||||
message (\"-- Skipping : \$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
|
||||
endif ()
|
||||
endmacro()
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/rippled-example.cfg\" etc rippled.cfg)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
|
||||
")
|
||||
install(CODE "
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
xrpld compile options/settings via an interface library
|
||||
#]===================================================================]
|
||||
|
||||
include(CompilationEnv)
|
||||
|
||||
add_library (opts INTERFACE)
|
||||
add_library (Xrpl::opts ALIAS opts)
|
||||
target_compile_definitions (opts
|
||||
@@ -44,6 +42,22 @@ if(jemalloc)
|
||||
target_link_libraries(opts INTERFACE jemalloc::jemalloc)
|
||||
endif ()
|
||||
|
||||
if (san)
|
||||
target_compile_options (opts
|
||||
INTERFACE
|
||||
# sanitizers recommend minimum of -O1 for reasonable performance
|
||||
$<$<CONFIG:Debug>:-O1>
|
||||
${SAN_FLAG}
|
||||
-fno-omit-frame-pointer)
|
||||
target_compile_definitions (opts
|
||||
INTERFACE
|
||||
$<$<STREQUAL:${san},address>:SANITIZER=ASAN>
|
||||
$<$<STREQUAL:${san},thread>:SANITIZER=TSAN>
|
||||
$<$<STREQUAL:${san},memory>:SANITIZER=MSAN>
|
||||
$<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN>)
|
||||
target_link_libraries (opts INTERFACE ${SAN_FLAG} ${SAN_LIB})
|
||||
endif ()
|
||||
|
||||
#[===================================================================[
|
||||
xrpld transitive library deps via an interface library
|
||||
#]===================================================================]
|
||||
|
||||
@@ -1,198 +0,0 @@
|
||||
#[===================================================================[
|
||||
Configure sanitizers based on environment variables.
|
||||
|
||||
This module reads the following environment variables:
|
||||
- SANITIZERS: The sanitizers to enable. Possible values:
|
||||
- "address"
|
||||
- "address,undefinedbehavior"
|
||||
- "thread"
|
||||
- "thread,undefinedbehavior"
|
||||
- "undefinedbehavior"
|
||||
|
||||
The compiler type and platform are detected in CompilationEnv.cmake.
|
||||
The sanitizer compile options are applied to the 'common' interface library
|
||||
which is linked to all targets in the project.
|
||||
|
||||
Internal flag variables set by this module:
|
||||
|
||||
- SANITIZER_TYPES: List of sanitizer types to enable (e.g., "address",
|
||||
"thread", "undefined"). And two more flags for undefined behavior sanitizer (e.g., "float-divide-by-zero", "unsigned-integer-overflow").
|
||||
This list is joined with commas and passed to -fsanitize=<list>.
|
||||
|
||||
- SANITIZERS_COMPILE_FLAGS: Compiler flags for sanitizer instrumentation.
|
||||
Includes:
|
||||
* -fno-omit-frame-pointer: Preserves frame pointers for stack traces
|
||||
* -O1: Minimum optimization for reasonable performance
|
||||
* -fsanitize=<types>: Enables sanitizer instrumentation
|
||||
* -fsanitize-ignorelist=<path>: (Clang only) Compile-time ignorelist
|
||||
* -mcmodel=large/medium: (GCC only) Code model for large binaries
|
||||
* -Wno-stringop-overflow: (GCC only) Suppresses false positive warnings
|
||||
* -Wno-tsan: (For GCC TSAN combination only) Suppresses atomic_thread_fence warnings
|
||||
|
||||
- SANITIZERS_LINK_FLAGS: Linker flags for sanitizer runtime libraries.
|
||||
Includes:
|
||||
* -fsanitize=<types>: Links sanitizer runtime libraries
|
||||
* -mcmodel=large/medium: (GCC only) Matches compile-time code model
|
||||
|
||||
- SANITIZERS_RELOCATION_FLAGS: (GCC only) Code model flags for linking.
|
||||
Used to handle large instrumented binaries on x86_64:
|
||||
* -mcmodel=large: For AddressSanitizer (prevents relocation errors)
|
||||
* -mcmodel=medium: For ThreadSanitizer (large model is incompatible)
|
||||
#]===================================================================]
|
||||
|
||||
include(CompilationEnv)
|
||||
|
||||
# Read environment variable
|
||||
set(SANITIZERS $ENV{SANITIZERS})
|
||||
|
||||
# Set SANITIZERS_ENABLED flag for use in other modules
|
||||
if(SANITIZERS MATCHES "address|thread|undefinedbehavior")
|
||||
set(SANITIZERS_ENABLED TRUE)
|
||||
else()
|
||||
set(SANITIZERS_ENABLED FALSE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# Sanitizers are not supported on Windows/MSVC
|
||||
if(is_msvc)
|
||||
message(FATAL_ERROR "Sanitizers are not supported on Windows/MSVC. "
|
||||
"Please unset the SANITIZERS environment variable.")
|
||||
endif()
|
||||
|
||||
message(STATUS "Configuring sanitizers: ${SANITIZERS}")
|
||||
|
||||
# Parse SANITIZERS value to determine which sanitizers to enable
|
||||
set(enable_asan FALSE)
|
||||
set(enable_tsan FALSE)
|
||||
set(enable_ubsan FALSE)
|
||||
|
||||
# Normalize SANITIZERS into a list
|
||||
set(san_list "${SANITIZERS}")
|
||||
string(REPLACE "," ";" san_list "${san_list}")
|
||||
separate_arguments(san_list)
|
||||
|
||||
foreach(san IN LISTS san_list)
|
||||
if(san STREQUAL "address")
|
||||
set(enable_asan TRUE)
|
||||
elseif(san STREQUAL "thread")
|
||||
set(enable_tsan TRUE)
|
||||
elseif(san STREQUAL "undefinedbehavior")
|
||||
set(enable_ubsan TRUE)
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported sanitizer type: ${san}"
|
||||
"Supported: address, thread, undefinedbehavior and their combinations.")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# Validate sanitizer compatibility
|
||||
if(enable_asan AND enable_tsan)
|
||||
message(FATAL_ERROR "AddressSanitizer and ThreadSanitizer are incompatible and cannot be enabled simultaneously. "
|
||||
"Use 'address' or 'thread', optionally with 'undefinedbehavior'.")
|
||||
endif()
|
||||
|
||||
# Frame pointer is required for meaningful stack traces. Sanitizers recommend minimum of -O1 for reasonable performance
|
||||
set(SANITIZERS_COMPILE_FLAGS "-fno-omit-frame-pointer" "-O1")
|
||||
|
||||
# Build the sanitizer flags list
|
||||
set(SANITIZER_TYPES)
|
||||
|
||||
if(enable_asan)
|
||||
list(APPEND SANITIZER_TYPES "address")
|
||||
elseif(enable_tsan)
|
||||
list(APPEND SANITIZER_TYPES "thread")
|
||||
endif()
|
||||
|
||||
if(enable_ubsan)
|
||||
# UB sanitizer flags
|
||||
list(APPEND SANITIZER_TYPES "undefined" "float-divide-by-zero")
|
||||
if(is_clang)
|
||||
# Clang supports additional UB checks. More info here https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html
|
||||
list(APPEND SANITIZER_TYPES "unsigned-integer-overflow")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Configure code model for GCC on amd64
|
||||
# Use large code model for ASAN to avoid relocation errors
|
||||
# Use medium code model for TSAN (large is not compatible with TSAN)
|
||||
set(SANITIZERS_RELOCATION_FLAGS)
|
||||
|
||||
# Compiler-specific configuration
|
||||
if(is_gcc)
|
||||
# Disable mold, gold and lld linkers for GCC with sanitizers
|
||||
# Use default linker (bfd/ld) which is more lenient with mixed code models
|
||||
# This is needed since the size of instrumented binary exceeds the limits set by mold, lld and gold linkers
|
||||
set(use_mold OFF CACHE BOOL "Use mold linker" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "Use gold linker" FORCE)
|
||||
set(use_lld OFF CACHE BOOL "Use lld linker" FORCE)
|
||||
message(STATUS " Disabled mold, gold, and lld linkers for GCC with sanitizers")
|
||||
|
||||
# Suppress false positive warnings in GCC with stringop-overflow
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-stringop-overflow")
|
||||
|
||||
if(is_amd64 AND enable_asan)
|
||||
message(STATUS " Using large code model (-mcmodel=large)")
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=large")
|
||||
list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=large")
|
||||
elseif(enable_tsan)
|
||||
# GCC doesn't support atomic_thread_fence with tsan. Suppress warnings.
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-Wno-tsan")
|
||||
message(STATUS " Using medium code model (-mcmodel=medium)")
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-mcmodel=medium")
|
||||
list(APPEND SANITIZERS_RELOCATION_FLAGS "-mcmodel=medium")
|
||||
endif()
|
||||
|
||||
# Join sanitizer flags with commas for -fsanitize option
|
||||
list(JOIN SANITIZER_TYPES "," SANITIZER_TYPES_STR)
|
||||
|
||||
# Add sanitizer to compile and link flags
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
set(SANITIZERS_LINK_FLAGS "${SANITIZERS_RELOCATION_FLAGS}" "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
|
||||
elseif(is_clang)
|
||||
# Add ignorelist for Clang (GCC doesn't support this)
|
||||
# Use CMAKE_SOURCE_DIR to get the path to the ignorelist
|
||||
set(IGNORELIST_PATH "${CMAKE_SOURCE_DIR}/sanitizers/suppressions/sanitizer-ignorelist.txt")
|
||||
if(NOT EXISTS "${IGNORELIST_PATH}")
|
||||
message(FATAL_ERROR "Sanitizer ignorelist not found: ${IGNORELIST_PATH}")
|
||||
endif()
|
||||
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize-ignorelist=${IGNORELIST_PATH}")
|
||||
message(STATUS " Using sanitizer ignorelist: ${IGNORELIST_PATH}")
|
||||
|
||||
# Join sanitizer flags with commas for -fsanitize option
|
||||
list(JOIN SANITIZER_TYPES "," SANITIZER_TYPES_STR)
|
||||
|
||||
# Add sanitizer to compile and link flags
|
||||
list(APPEND SANITIZERS_COMPILE_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
set(SANITIZERS_LINK_FLAGS "-fsanitize=${SANITIZER_TYPES_STR}")
|
||||
endif()
|
||||
|
||||
message(STATUS " Compile flags: ${SANITIZERS_COMPILE_FLAGS}")
|
||||
message(STATUS " Link flags: ${SANITIZERS_LINK_FLAGS}")
|
||||
|
||||
# Apply the sanitizer flags to the 'common' interface library
|
||||
# This is the same library used by XrplCompiler.cmake
|
||||
target_compile_options(common INTERFACE
|
||||
$<$<COMPILE_LANGUAGE:CXX>:${SANITIZERS_COMPILE_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${SANITIZERS_COMPILE_FLAGS}>
|
||||
)
|
||||
|
||||
# Apply linker flags
|
||||
target_link_options(common INTERFACE ${SANITIZERS_LINK_FLAGS})
|
||||
|
||||
# Define SANITIZERS macro for BuildInfo.cpp
|
||||
set(sanitizers_list)
|
||||
if(enable_asan)
|
||||
list(APPEND sanitizers_list "ASAN")
|
||||
endif()
|
||||
if(enable_tsan)
|
||||
list(APPEND sanitizers_list "TSAN")
|
||||
endif()
|
||||
if(enable_ubsan)
|
||||
list(APPEND sanitizers_list "UBSAN")
|
||||
endif()
|
||||
|
||||
if(sanitizers_list)
|
||||
list(JOIN sanitizers_list "." sanitizers_str)
|
||||
target_compile_definitions(common INTERFACE SANITIZERS=${sanitizers_str})
|
||||
endif()
|
||||
@@ -2,8 +2,6 @@
|
||||
sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
include(CompilationEnv)
|
||||
|
||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
|
||||
@@ -18,12 +16,14 @@ if (NOT is_multiconfig)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (is_clang) # both Clang and AppleClang
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
|
||||
set (is_clang TRUE)
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
|
||||
message (FATAL_ERROR "This project requires clang 16 or later")
|
||||
endif ()
|
||||
elseif (is_gcc)
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set (is_gcc TRUE)
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
|
||||
message (FATAL_ERROR "This project requires GCC 12 or later")
|
||||
endif ()
|
||||
@@ -40,6 +40,11 @@ if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
|
||||
message (FATAL_ERROR "Visual Studio 32-bit build is not supported.")
|
||||
endif ()
|
||||
|
||||
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
message (FATAL_ERROR "Xrpld requires a 64 bit target architecture.\n"
|
||||
"The most likely cause of this warning is trying to build xrpld with a 32-bit OS.")
|
||||
endif ()
|
||||
|
||||
if (APPLE AND NOT HOMEBREW)
|
||||
find_program (HOMEBREW brew)
|
||||
endif ()
|
||||
|
||||
@@ -2,7 +2,11 @@
|
||||
declare options and variables
|
||||
#]===================================================================]
|
||||
|
||||
include(CompilationEnv)
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else()
|
||||
set(is_linux FALSE)
|
||||
endif()
|
||||
|
||||
if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set(is_ci TRUE)
|
||||
@@ -58,27 +62,12 @@ else()
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
|
||||
if(is_linux AND NOT SANITIZER)
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
option(use_mold "enables detection of mold (binutils) linker" ON)
|
||||
# Set a default value for the log flag based on the build type.
|
||||
# This provides a sensible default (on for debug, off for release)
|
||||
# while still allowing the user to override it for any build.
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(TRUNCATED_LOGS_DEFAULT ON)
|
||||
else()
|
||||
set(TRUNCATED_LOGS_DEFAULT OFF)
|
||||
endif()
|
||||
option(TRUNCATED_THREAD_NAME_LOGS
|
||||
"Show warnings about truncated thread names on Linux."
|
||||
${TRUNCATED_LOGS_DEFAULT}
|
||||
)
|
||||
if(TRUNCATED_THREAD_NAME_LOGS)
|
||||
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
|
||||
endif()
|
||||
else()
|
||||
# we are not ready to allow shared-libs on windows because it would require
|
||||
# export declarations. On macos it's more feasible, but static openssl
|
||||
@@ -103,6 +92,33 @@ option(local_protobuf
|
||||
option(local_grpc
|
||||
"Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
|
||||
# this one is a string and therefore can't be an option
|
||||
set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if(san)
|
||||
string(TOLOWER ${san} san)
|
||||
set(SAN_FLAG "-fsanitize=${san}")
|
||||
set(SAN_LIB "")
|
||||
if(is_gcc)
|
||||
if(san STREQUAL "address")
|
||||
set(SAN_LIB "asan")
|
||||
elseif(san STREQUAL "thread")
|
||||
set(SAN_LIB "tsan")
|
||||
elseif(san STREQUAL "memory")
|
||||
set(SAN_LIB "msan")
|
||||
elseif(san STREQUAL "undefined")
|
||||
set(SAN_LIB "ubsan")
|
||||
endif()
|
||||
endif()
|
||||
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if(NOT COMPILER_SUPPORTS_SAN)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
include(CompilationEnv)
|
||||
include(XrplSanitizers)
|
||||
|
||||
find_package(Boost REQUIRED
|
||||
find_package(Boost 1.82 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
@@ -35,7 +32,7 @@ target_link_libraries(xrpl_boost
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
|
||||
endif()
|
||||
if(SANITIZERS_ENABLED AND is_clang)
|
||||
if(san AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||
# for gcc ?
|
||||
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
|
||||
|
||||
62
conan.lock
62
conan.lock
@@ -1,44 +1,44 @@
|
||||
{
|
||||
"version": "0.5",
|
||||
"requires": [
|
||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1765850150.075",
|
||||
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1765850149.987",
|
||||
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1765850149.926",
|
||||
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1765850149.46",
|
||||
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1765850147.878",
|
||||
"secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765850147.928",
|
||||
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1765850186.86",
|
||||
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1765850148.103",
|
||||
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038",
|
||||
"openssl/3.5.4#1b986e61b38fdfda3b40bebc1b234393%1768312656.257",
|
||||
"nudb/2.0.9#fb8dfd1a5557f5e0528114c2da17721e%1765850143.957",
|
||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1765850143.914",
|
||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1765842973.492",
|
||||
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1765842973.03",
|
||||
"libarchive/3.8.1#ffee18995c706e02bf96e7a2f7042e0d%1765850144.736",
|
||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
|
||||
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
|
||||
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
|
||||
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
|
||||
"secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765202256.763",
|
||||
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1762797952.535",
|
||||
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1764175362.029",
|
||||
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
|
||||
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1760106486.594",
|
||||
"nudb/2.0.9#fb8dfd1a5557f5e0528114c2da17721e%1763150366.909",
|
||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
||||
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
|
||||
"libarchive/3.8.1#ffee18995c706e02bf96e7a2f7042e0d%1764175360.142",
|
||||
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
|
||||
"gtest/1.17.0#5224b3b3ff3b4ce1133cbdd27d53ee7d%1768312129.152",
|
||||
"grpc/1.72.0#f244a57bff01e708c55a1100b12e1589%1765850193.734",
|
||||
"ed25519/2015.03#ae761bdc52730a843f0809bdf6c1b1f6%1765850143.772",
|
||||
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1765850143.772",
|
||||
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1765850144.336",
|
||||
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1765850143.837",
|
||||
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1765850172.862",
|
||||
"abseil/20250127.0#99262a368bd01c0ccca8790dfced9719%1766517936.993"
|
||||
"grpc/1.72.0#f244a57bff01e708c55a1100b12e1589%1763158050.628",
|
||||
"ed25519/2015.03#ae761bdc52730a843f0809bdf6c1b1f6%1764270189.893",
|
||||
"doctest/2.4.12#eb9fb352fb2fdfc8abb17ec270945165%1762797941.757",
|
||||
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1763584497.32",
|
||||
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1764175359.429",
|
||||
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1764175359.429",
|
||||
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
|
||||
"abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
|
||||
],
|
||||
"build_requires": [
|
||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1765850150.075",
|
||||
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1765850165.196",
|
||||
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1765850161.038",
|
||||
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1765850144.707",
|
||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1756234281.733",
|
||||
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
|
||||
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",
|
||||
"msys2/cci.latest#1996656c3c98e5765b25b60ff5cf77b4%1764840888.758",
|
||||
"m4/1.4.19#70dc8bbb33e981d119d2acc0175cf381%1763158052.846",
|
||||
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1765850153.937",
|
||||
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1765850153.479",
|
||||
"b2/5.3.3#107c15377719889654eb9a162a673975%1765850144.355",
|
||||
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1764175359.44",
|
||||
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1764175359.429",
|
||||
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28",
|
||||
"automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56",
|
||||
"autoconf/2.71#51077f068e61700d65bb05541ea1e4b0%1731054366.86",
|
||||
"abseil/20250127.0#99262a368bd01c0ccca8790dfced9719%1766517936.993"
|
||||
"abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
|
||||
],
|
||||
"python_requires": [],
|
||||
"overrides": {
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
include(sanitizers)
|
||||
@@ -1,59 +0,0 @@
|
||||
include(default)
|
||||
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
|
||||
{% set sanitizers = os.getenv("SANITIZERS") %}
|
||||
|
||||
[conf]
|
||||
{% if sanitizers %}
|
||||
{% if compiler == "gcc" %}
|
||||
{% if "address" in sanitizers or "thread" in sanitizers or "undefinedbehavior" in sanitizers %}
|
||||
{% set sanitizer_list = [] %}
|
||||
{% set model_code = "" %}
|
||||
{% set extra_cxxflags = ["-fno-omit-frame-pointer", "-O1", "-Wno-stringop-overflow"] %}
|
||||
|
||||
{% if "address" in sanitizers %}
|
||||
{% set _ = sanitizer_list.append("address") %}
|
||||
{% set model_code = "-mcmodel=large" %}
|
||||
{% elif "thread" in sanitizers %}
|
||||
{% set _ = sanitizer_list.append("thread") %}
|
||||
{% set model_code = "-mcmodel=medium" %}
|
||||
{% set _ = extra_cxxflags.append("-Wno-tsan") %}
|
||||
{% endif %}
|
||||
|
||||
{% if "undefinedbehavior" in sanitizers %}
|
||||
{% set _ = sanitizer_list.append("undefined") %}
|
||||
{% set _ = sanitizer_list.append("float-divide-by-zero") %}
|
||||
{% endif %}
|
||||
|
||||
{% set sanitizer_flags = "-fsanitize=" ~ ",".join(sanitizer_list) ~ " " ~ model_code %}
|
||||
|
||||
tools.build:cxxflags+=['{{sanitizer_flags}} {{" ".join(extra_cxxflags)}}']
|
||||
tools.build:sharedlinkflags+=['{{sanitizer_flags}}']
|
||||
tools.build:exelinkflags+=['{{sanitizer_flags}}']
|
||||
{% endif %}
|
||||
{% elif compiler == "apple-clang" or compiler == "clang" %}
|
||||
{% if "address" in sanitizers or "thread" in sanitizers or "undefinedbehavior" in sanitizers %}
|
||||
{% set sanitizer_list = [] %}
|
||||
{% set extra_cxxflags = ["-fno-omit-frame-pointer", "-O1"] %}
|
||||
|
||||
{% if "address" in sanitizers %}
|
||||
{% set _ = sanitizer_list.append("address") %}
|
||||
{% elif "thread" in sanitizers %}
|
||||
{% set _ = sanitizer_list.append("thread") %}
|
||||
{% endif %}
|
||||
|
||||
{% if "undefinedbehavior" in sanitizers %}
|
||||
{% set _ = sanitizer_list.append("undefined") %}
|
||||
{% set _ = sanitizer_list.append("float-divide-by-zero") %}
|
||||
{% set _ = sanitizer_list.append("unsigned-integer-overflow") %}
|
||||
{% endif %}
|
||||
|
||||
{% set sanitizer_flags = "-fsanitize=" ~ ",".join(sanitizer_list) %}
|
||||
|
||||
tools.build:cxxflags+=['{{sanitizer_flags}} {{" ".join(extra_cxxflags)}}']
|
||||
tools.build:sharedlinkflags+=['{{sanitizer_flags}}']
|
||||
tools.build:exelinkflags+=['{{sanitizer_flags}}']
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
tools.info.package_id:confs+=["tools.build:cxxflags", "tools.build:exelinkflags", "tools.build:sharedlinkflags"]
|
||||
14
conanfile.py
14
conanfile.py
@@ -39,7 +39,7 @@ class Xrpl(ConanFile):
|
||||
]
|
||||
|
||||
test_requires = [
|
||||
"gtest/1.17.0",
|
||||
"doctest/2.4.12",
|
||||
]
|
||||
|
||||
tool_requires = [
|
||||
@@ -87,13 +87,7 @@ class Xrpl(ConanFile):
|
||||
"libarchive/*:with_xattr": False,
|
||||
"libarchive/*:with_zlib": False,
|
||||
"lz4/*:shared": False,
|
||||
"openssl/*:no_dtls": True,
|
||||
"openssl/*:no_ssl": True,
|
||||
"openssl/*:no_ssl3": True,
|
||||
"openssl/*:no_tls1": True,
|
||||
"openssl/*:no_tls1_1": True,
|
||||
"openssl/*:shared": False,
|
||||
"openssl/*:tls_security_level": 2,
|
||||
"protobuf/*:shared": False,
|
||||
"protobuf/*:with_zlib": True,
|
||||
"rocksdb/*:enable_sse": False,
|
||||
@@ -188,10 +182,12 @@ class Xrpl(ConanFile):
|
||||
libxrpl.libs = [
|
||||
"xrpl",
|
||||
"xrpl.libpb",
|
||||
"ed25519",
|
||||
"secp256k1",
|
||||
]
|
||||
# TODO: Fix the protobufs to include each other relative to
|
||||
# `include/`, not `include/xrpl/proto/`.
|
||||
libxrpl.includedirs = ["include", "include/xrpl/proto"]
|
||||
# `include/`, not `include/ripple/proto/`.
|
||||
libxrpl.includedirs = ["include", "include/ripple/proto"]
|
||||
libxrpl.requires = [
|
||||
"boost::headers",
|
||||
"boost::chrono",
|
||||
|
||||
@@ -134,7 +134,7 @@ validation messages (_PAV_) received from each validator on the node's UNL. Note
|
||||
that the node will only count the validation messages that agree with its own
|
||||
validations.
|
||||
|
||||
We define the **PAV** as the Percentage of Agreed Validation
|
||||
We define the **PAV** as the **P**ercentage of **A**greed **V**alidation
|
||||
messages received for the last N ledgers, where N = 256 by default.
|
||||
|
||||
When the PAV drops below the **_low-water mark_**, the validator is considered
|
||||
|
||||
@@ -43,14 +43,14 @@ alt phase == OPEN
|
||||
alt sqn%256==0
|
||||
CA -[#green]> RM: <font color=green>getValidations
|
||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||
hnote over CA#lightgreen: use validations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||
end
|
||||
CA -> GC
|
||||
GC -> CA: propose
|
||||
deactivate CA
|
||||
end
|
||||
else phase == ESTABLISH
|
||||
hnote over GC: receive peer positions
|
||||
hnote over GC: receive peer postions
|
||||
GC -> GC : update our position
|
||||
GC -> CA : propose \n(if position changed)
|
||||
GC -> GC : check if have consensus
|
||||
|
||||
207
docs/build/sanitizers.md
vendored
207
docs/build/sanitizers.md
vendored
@@ -1,207 +0,0 @@
|
||||
# Sanitizer Configuration for Rippled
|
||||
|
||||
This document explains how to properly configure and run sanitizers (AddressSanitizer, undefinedbehaviorSanitizer, ThreadSanitizer) with the xrpld project.
|
||||
Corresponding suppression files are located in the `sanitizers/suppressions` directory.
|
||||
|
||||
- [Sanitizer Configuration for Rippled](#sanitizer-configuration-for-rippled)
|
||||
- [Building with Sanitizers](#building-with-sanitizers)
|
||||
- [Summary](#summary)
|
||||
- [Build steps:](#build-steps)
|
||||
- [Install dependencies](#install-dependencies)
|
||||
- [Call CMake](#call-cmake)
|
||||
- [Build](#build)
|
||||
- [Running Tests with Sanitizers](#running-tests-with-sanitizers)
|
||||
- [AddressSanitizer (ASAN)](#addresssanitizer-asan)
|
||||
- [ThreadSanitizer (TSan)](#threadsanitizer-tsan)
|
||||
- [LeakSanitizer (LSan)](#leaksanitizer-lsan)
|
||||
- [UndefinedBehaviorSanitizer (UBSan)](#undefinedbehaviorsanitizer-ubsan)
|
||||
- [Suppression Files](#suppression-files)
|
||||
- [`asan.supp`](#asansupp)
|
||||
- [`lsan.supp`](#lsansupp)
|
||||
- [`ubsan.supp`](#ubsansupp)
|
||||
- [`tsan.supp`](#tsansupp)
|
||||
- [`sanitizer-ignorelist.txt`](#sanitizer-ignorelisttxt)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
- ["ASAN is ignoring requested \_\_asan_handle_no_return" warnings](#asan-is-ignoring-requested-__asan_handle_no_return-warnings)
|
||||
- [Sanitizer Mismatch Errors](#sanitizer-mismatch-errors)
|
||||
- [References](#references)
|
||||
|
||||
## Building with Sanitizers
|
||||
|
||||
### Summary
|
||||
|
||||
Follow the same instructions as mentioned in [BUILD.md](../../BUILD.md) but with the following changes:
|
||||
|
||||
1. Make sure you have a clean build directory.
|
||||
2. Set the `SANITIZERS` environment variable before calling conan install and cmake. Only set it once. Make sure both conan and cmake read the same values.
|
||||
Example: `export SANITIZERS=address,undefinedbehavior`
|
||||
3. Optionally use `--profile:all sanitizers` with Conan to build dependencies with sanitizer instrumentation. [!NOTE]Building with sanitizer-instrumented dependencies is slower but produces fewer false positives.
|
||||
4. Set `ASAN_OPTIONS`, `LSAN_OPTIONS`, `UBSAN_OPTIONS` and `TSAN_OPTIONS` environment variables to configure sanitizer behavior when running executables. [More details below](#running-tests-with-sanitizers).
|
||||
|
||||
---
|
||||
|
||||
### Build steps:
|
||||
|
||||
```bash
|
||||
cd /path/to/rippled
|
||||
rm -rf .build
|
||||
mkdir .build
|
||||
cd .build
|
||||
```
|
||||
|
||||
#### Install dependencies
|
||||
|
||||
The `SANITIZERS` environment variable is used by both Conan and CMake.
|
||||
|
||||
```bash
|
||||
export SANITIZERS=address,undefinedbehavior
|
||||
# Standard build (without instrumenting dependencies)
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
|
||||
# Or with sanitizer-instrumented dependencies (takes longer but fewer false positives)
|
||||
conan install .. --output-folder . --profile:all sanitizers --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
[!CAUTION]
|
||||
Do not mix Address and Thread sanitizers - they are incompatible.
|
||||
|
||||
Since you already set the `SANITIZERS` environment variable when running Conan, same values will be read for the next part.
|
||||
|
||||
#### Call CMake
|
||||
|
||||
```bash
|
||||
cmake .. -G Ninja \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-Dtests=ON -Dxrpld=ON
|
||||
```
|
||||
|
||||
#### Build
|
||||
|
||||
```bash
|
||||
cmake --build . --parallel 4
|
||||
```
|
||||
|
||||
## Running Tests with Sanitizers
|
||||
|
||||
### AddressSanitizer (ASAN)
|
||||
|
||||
**IMPORTANT**: ASAN with Boost produces many false positives. Use these options:
|
||||
|
||||
```bash
|
||||
export ASAN_OPTIONS="print_stacktrace=1:detect_container_overflow=0:suppressions=path/to/asan.supp:halt_on_error=0:log_path=asan.log"
|
||||
export LSAN_OPTIONS="suppressions=path/to/lsan.supp:halt_on_error=0:log_path=lsan.log"
|
||||
|
||||
# Run tests
|
||||
./xrpld --unittest --unittest-jobs=5
|
||||
```
|
||||
|
||||
**Why `detect_container_overflow=0`?**
|
||||
|
||||
- Boost intrusive containers (used in `aged_unordered_container`) trigger false positives
|
||||
- Boost context switching (used in `Workers.cpp`) confuses ASAN's stack tracking
|
||||
- Since we usually don't build Boost (because we don't want to instrument Boost and detect issues in Boost code) with ASAN but use Boost containers in ASAN instrumented rippled code, it generates false positives.
|
||||
- Building dependencies with ASAN instrumentation reduces false positives. But we don't want to instrument dependencies like Boost with ASAN because it is slow (to compile as well as run tests) and not necessary.
|
||||
- See: https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow
|
||||
- More such flags are detailed [here](https://github.com/google/sanitizers/wiki/AddressSanitizerFlags)
|
||||
|
||||
### ThreadSanitizer (TSan)
|
||||
|
||||
```bash
|
||||
export TSAN_OPTIONS="suppressions=path/to/tsan.supp halt_on_error=0 log_path=tsan.log"
|
||||
|
||||
# Run tests
|
||||
./xrpld --unittest --unittest-jobs=5
|
||||
```
|
||||
|
||||
More details [here](https://github.com/google/sanitizers/wiki/ThreadSanitizerCppManual).
|
||||
|
||||
### LeakSanitizer (LSan)
|
||||
|
||||
LSan is automatically enabled with ASAN. To disable it:
|
||||
|
||||
```bash
|
||||
export ASAN_OPTIONS="detect_leaks=0"
|
||||
```
|
||||
|
||||
More details [here](https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer).
|
||||
|
||||
### UndefinedBehaviorSanitizer (UBSan)
|
||||
|
||||
```bash
|
||||
export UBSAN_OPTIONS="suppressions=path/to/ubsan.supp:print_stacktrace=1:halt_on_error=0:log_path=ubsan.log"
|
||||
|
||||
# Run tests
|
||||
./xrpld --unittest --unittest-jobs=5
|
||||
```
|
||||
|
||||
More details [here](https://clang.llvm.org/docs/undefinedbehaviorSanitizer.html).
|
||||
|
||||
## Suppression Files
|
||||
|
||||
[!NOTE] Attached files contain more details.
|
||||
|
||||
### [`asan.supp`](../../sanitizers/suppressions/asan.supp)
|
||||
|
||||
- **Purpose**: Suppress AddressSanitizer (ASAN) errors only
|
||||
- **Format**: `interceptor_name:<pattern>` where pattern matches file names. Supported suppression types are:
|
||||
- interceptor_name
|
||||
- interceptor_via_fun
|
||||
- interceptor_via_lib
|
||||
- odr_violation
|
||||
- **More info**: [AddressSanitizer](https://github.com/google/sanitizers/wiki/AddressSanitizer)
|
||||
- **Note**: Cannot suppress stack-buffer-overflow, container-overflow, etc.
|
||||
|
||||
### [`lsan.supp`](../../sanitizers/suppressions/lsan.supp)
|
||||
|
||||
- **Purpose**: Suppress LeakSanitizer (LSan) errors only
|
||||
- **Format**: `leak:<pattern>` where pattern matches function/file names
|
||||
- **More info**: [LeakSanitizer](https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer)
|
||||
|
||||
### [`ubsan.supp`](../../sanitizers/suppressions/ubsan.supp)
|
||||
|
||||
- **Purpose**: Suppress undefinedbehaviorSanitizer errors
|
||||
- **Format**: `<error_type>:<pattern>` (e.g., `unsigned-integer-overflow:protobuf`)
|
||||
- **Covers**: Intentional overflows in sanitizers/suppressions libraries (protobuf, gRPC, stdlib)
|
||||
- More info [UBSan suppressions](https://clang.llvm.org/docs/SanitizerSpecialCaseList.html).
|
||||
|
||||
### [`tsan.supp`](../../sanitizers/suppressions/tsan.supp)
|
||||
|
||||
- **Purpose**: Suppress ThreadSanitizer data race warnings
|
||||
- **Format**: `race:<pattern>` where pattern matches function/file names
|
||||
- **More info**: [ThreadSanitizer suppressions](https://github.com/google/sanitizers/wiki/ThreadSanitizerSuppressions)
|
||||
|
||||
### [`sanitizer-ignorelist.txt`](../../sanitizers/suppressions/sanitizer-ignorelist.txt)
|
||||
|
||||
- **Purpose**: Compile-time ignorelist for all sanitizers
|
||||
- **Usage**: Passed via `-fsanitize-ignorelist=absolute/path/to/sanitizer-ignorelist.txt`
|
||||
- **Format**: `<level>:<pattern>` (e.g., `src:Workers.cpp`)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "ASAN is ignoring requested \_\_asan_handle_no_return" warnings
|
||||
|
||||
These warnings appear when using Boost context switching and are harmless. They indicate potential false positives.
|
||||
|
||||
### Sanitizer Mismatch Errors
|
||||
|
||||
If you see undefined symbols like `___tsan_atomic_load` when building with ASAN:
|
||||
|
||||
**Problem**: Dependencies were built with a different sanitizer than the main project.
|
||||
|
||||
**Solution**: Rebuild everything with the same sanitizer:
|
||||
|
||||
```bash
|
||||
rm -rf .build
|
||||
# Then follow the build instructions above
|
||||
```
|
||||
|
||||
Then review the log files: `asan.log.*`, `ubsan.log.*`, `tsan.log.*`
|
||||
|
||||
## References
|
||||
|
||||
- [AddressSanitizer Wiki](https://github.com/google/sanitizers/wiki/AddressSanitizer)
|
||||
- [AddressSanitizer Flags](https://github.com/google/sanitizers/wiki/AddressSanitizerFlags)
|
||||
- [Container Overflow Detection](https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow)
|
||||
- [UndefinedBehavior Sanitizer](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html)
|
||||
- [ThreadSanitizer](https://github.com/google/sanitizers/wiki/ThreadSanitizerCppManual)
|
||||
@@ -189,7 +189,7 @@ validations. It checks this on every call to `timerEntry`.
|
||||
- _Wrong Ledger_ indicates the node is not working on the correct prior ledger
|
||||
and does not have it available. It requests that ledger from the network, but
|
||||
continues to work towards consensus this round while waiting. If it had been
|
||||
_proposing_, it will send a special "bow-out" proposal to its peers to indicate
|
||||
_proposing_, it will send a special "bowout" proposal to its peers to indicate
|
||||
its change in mode for the rest of this round. For the duration of the round,
|
||||
it defers to peer positions for determining the consensus outcome as if it
|
||||
were just _observing_.
|
||||
@@ -515,7 +515,7 @@ are excerpts of the generic consensus implementation and of helper types that wi
|
||||
interact with the concrete implementing class.
|
||||
|
||||
```{.cpp}
|
||||
// Represents a transaction under dispute this round
|
||||
// Represents a transction under dispute this round
|
||||
template <class Tx_t, class NodeID_t> class DisputedTx;
|
||||
|
||||
// Represents how the node participates in Consensus this round
|
||||
|
||||
@@ -58,7 +58,7 @@ concept CAdoptTag = std::is_same_v<T, SharedIntrusiveAdoptIncrementStrongTag> ||
|
||||
When the strong pointer count goes to zero, the "partialDestructor" is
|
||||
called. This can be used to destroy as much of the object as possible while
|
||||
still retaining the reference counts. For example, for SHAMapInnerNodes the
|
||||
children may be reset in that function. Note that std::shared_pointer WILL
|
||||
children may be reset in that function. Note that std::shared_poiner WILL
|
||||
run the destructor when the strong count reaches zero, but may not free the
|
||||
memory used by the object until the weak count reaches zero. In rippled, we
|
||||
typically allocate shared pointers with the `make_shared` function. When
|
||||
|
||||
@@ -301,7 +301,7 @@ IntrusiveRefCounts::addWeakReleaseStrongRef() const
|
||||
// change the counts and flags (the count could be atomically changed, but
|
||||
// the flags depend on the current value of the counts).
|
||||
//
|
||||
// Note: If this becomes a perf bottleneck, the `partialDestroyStartedMask`
|
||||
// Note: If this becomes a perf bottleneck, the `partialDestoryStartedMask`
|
||||
// may be able to be set non-atomically. But it is easier to reason about
|
||||
// the code if the flag is set atomically.
|
||||
while (1)
|
||||
|
||||
@@ -221,8 +221,7 @@ public:
|
||||
private:
|
||||
enum {
|
||||
// Maximum line length for log messages.
|
||||
// If the message exceeds this length it will be truncated with
|
||||
// ellipses.
|
||||
// If the message exceeds this length it will be truncated with elipses.
|
||||
maximumMessageCharacters = 12 * 1024
|
||||
};
|
||||
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
#ifndef XRPL_BASICS_NUMBER_H_INCLUDED
|
||||
#define XRPL_BASICS_NUMBER_H_INCLUDED
|
||||
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <cstdint>
|
||||
#include <limits>
|
||||
#include <optional>
|
||||
#include <ostream>
|
||||
#include <string>
|
||||
|
||||
@@ -16,252 +13,42 @@ class Number;
|
||||
std::string
|
||||
to_string(Number const& amount);
|
||||
|
||||
template <typename T>
|
||||
constexpr std::optional<int>
|
||||
logTen(T value)
|
||||
{
|
||||
int log = 0;
|
||||
while (value >= 10 && value % 10 == 0)
|
||||
{
|
||||
value /= 10;
|
||||
++log;
|
||||
}
|
||||
if (value == 1)
|
||||
return log;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
constexpr bool
|
||||
isPowerOfTen(T value)
|
||||
{
|
||||
return logTen(value).has_value();
|
||||
while (value >= 10 && value % 10 == 0)
|
||||
value /= 10;
|
||||
return value == 1;
|
||||
}
|
||||
|
||||
/** MantissaRange defines a range for the mantissa of a normalized Number.
|
||||
*
|
||||
* The mantissa is in the range [min, max], where
|
||||
* * min is a power of 10, and
|
||||
* * max = min * 10 - 1.
|
||||
*
|
||||
* The mantissa_scale enum indicates whether the range is "small" or "large".
|
||||
* This intentionally restricts the number of MantissaRanges that can be
|
||||
* instantiated to two: one for each scale.
|
||||
*
|
||||
* The "small" scale is based on the behavior of STAmount for IOUs. It has a min
|
||||
* value of 10^15, and a max value of 10^16-1. This was sufficient for
|
||||
* uses before Lending Protocol was implemented, mostly related to AMM.
|
||||
*
|
||||
* However, it does not have sufficient precision to represent the full integer
|
||||
* range of int64_t values (-2^63 to 2^63-1), which are needed for XRP and MPT
|
||||
* values. The implementation of SingleAssetVault, and LendingProtocol need to
|
||||
* represent those integer values accurately and precisely, both for the
|
||||
* STNumber field type, and for internal calculations. That necessitated the
|
||||
* "large" scale.
|
||||
*
|
||||
* The "large" scale is intended to represent all values that can be represented
|
||||
* by an STAmount - IOUs, XRP, and MPTs. It has a min value of 10^18, and a max
|
||||
* value of 10^19-1.
|
||||
*
|
||||
* Note that if the mentioned amendments are eventually retired, this class
|
||||
* should be left in place, but the "small" scale option should be removed. This
|
||||
* will allow for future expansion beyond 64-bits if it is ever needed.
|
||||
*/
|
||||
struct MantissaRange
|
||||
{
|
||||
using rep = std::uint64_t;
|
||||
enum mantissa_scale { small, large };
|
||||
|
||||
explicit constexpr MantissaRange(mantissa_scale scale_)
|
||||
: min(getMin(scale_))
|
||||
, max(min * 10 - 1)
|
||||
, log(logTen(min).value_or(-1))
|
||||
, scale(scale_)
|
||||
{
|
||||
}
|
||||
|
||||
rep min;
|
||||
rep max;
|
||||
int log;
|
||||
mantissa_scale scale;
|
||||
|
||||
private:
|
||||
static constexpr rep
|
||||
getMin(mantissa_scale scale_)
|
||||
{
|
||||
switch (scale_)
|
||||
{
|
||||
case small:
|
||||
return 1'000'000'000'000'000ULL;
|
||||
case large:
|
||||
return 1'000'000'000'000'000'000ULL;
|
||||
default:
|
||||
// Since this can never be called outside a non-constexpr
|
||||
// context, this throw assures that the build fails if an
|
||||
// invalid scale is used.
|
||||
throw std::runtime_error("Unknown mantissa scale");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Like std::integral, but only 64-bit integral types.
|
||||
template <class T>
|
||||
concept Integral64 =
|
||||
std::is_same_v<T, std::int64_t> || std::is_same_v<T, std::uint64_t>;
|
||||
|
||||
/** Number is a floating point type that can represent a wide range of values.
|
||||
*
|
||||
* It can represent all values that can be represented by an STAmount -
|
||||
* regardless of asset type - XRPAmount, MPTAmount, and IOUAmount, with at least
|
||||
* as much precision as those types require.
|
||||
*
|
||||
* ---- Internal Representation ----
|
||||
*
|
||||
* Internally, Number is represented with three values:
|
||||
* 1. a bool sign flag,
|
||||
* 2. a std::uint64_t mantissa,
|
||||
* 3. an int exponent.
|
||||
*
|
||||
* The internal mantissa is an unsigned integer in the range defined by the
|
||||
* current MantissaRange. The exponent is an integer in the range
|
||||
* [minExponent, maxExponent].
|
||||
*
|
||||
* See the description of MantissaRange for more details on the ranges.
|
||||
*
|
||||
* A non-zero mantissa is (almost) always normalized, meaning it and the
|
||||
* exponent are grown or shrunk until the mantissa is in the range
|
||||
* [MantissaRange.min, MantissaRange.max].
|
||||
*
|
||||
* Note:
|
||||
* 1. Normalization can be disabled by using the "unchecked" ctor tag. This
|
||||
* should only be used at specific conversion points, some constexpr
|
||||
* values, and in unit tests.
|
||||
* 2. The max of the "large" range, 10^19-1, is the largest 10^X-1 value that
|
||||
* fits in an unsigned 64-bit number. (10^19-1 < 2^64-1 and
|
||||
* 10^20-1 > 2^64-1). This avoids under- and overflows.
|
||||
*
|
||||
* ---- External Interface ----
|
||||
*
|
||||
* The external interface of Number consists of a std::int64_t mantissa, which
|
||||
* is restricted to 63-bits, and an int exponent, which must be in the range
|
||||
* [minExponent, maxExponent]. The range of the mantissa depends on which
|
||||
* MantissaRange is currently active. For the "short" range, the mantissa will
|
||||
* be between 10^15 and 10^16-1. For the "large" range, the mantissa will be
|
||||
* between -(2^63-1) and 2^63-1. As noted above, the "large" range is needed to
|
||||
* represent the full range of valid XRP and MPT integer values accurately.
|
||||
*
|
||||
* Note:
|
||||
* 1. 2^63-1 is between 10^18 and 10^19-1, which are the limits of the "large"
|
||||
* mantissa range.
|
||||
* 2. The functions mantissa() and exponent() return the external view of the
|
||||
* Number value, specifically using a signed 63-bit mantissa. This may
|
||||
* require altering the internal representation to fit into that range
|
||||
* before the value is returned. The interface guarantees consistency of
|
||||
* the two values.
|
||||
* 3. Number cannot represent -2^63 (std::numeric_limits<std::int64_t>::min())
|
||||
* as an exact integer, but it doesn't need to, because all asset values
|
||||
* on-ledger are non-negative. This is due to implementation details of
|
||||
* several operations which use unsigned arithmetic internally. This is
|
||||
* sufficient to represent all valid XRP values (where the absolute value
|
||||
* can not exceed INITIAL_XRP: 10^17), and MPT values (where the absolute
|
||||
* value can not exceed maxMPTokenAmount: 2^63-1).
|
||||
*
|
||||
* ---- Mantissa Range Switching ----
|
||||
*
|
||||
* The mantissa range may be changed at runtime via setMantissaScale(). The
|
||||
* default mantissa range is "large". The range is updated whenever transaction
|
||||
* processing begins, based on whether SingleAssetVault or LendingProtocol are
|
||||
* enabled. If either is enabled, the mantissa range is set to "large". If not,
|
||||
* it is set to "small", preserving backward compatibility and correct
|
||||
* "amendment-gating".
|
||||
*
|
||||
* It is extremely unlikely that any more calls to setMantissaScale() will be
|
||||
* needed outside of unit tests.
|
||||
*
|
||||
* ---- Usage With Different Ranges ----
|
||||
*
|
||||
* Outside of unit tests, and existing checks, code that uses Number should not
|
||||
* know or care which mantissa range is active.
|
||||
*
|
||||
* The results of computations using Numbers with a small mantissa may differ
|
||||
* from computations using Numbers with a large mantissa, specifically as it
|
||||
* effects the results after rounding. That is why the large mantissa range is
|
||||
* amendment gated in transaction processing.
|
||||
*
|
||||
* It is extremely unlikely that any more calls to getMantissaScale() will be
|
||||
* needed outside of unit tests.
|
||||
*
|
||||
* Code that uses Number should not assume or check anything about the
|
||||
* mantissa() or exponent() except that they fit into the "large" range
|
||||
* specified in the "External Interface" section.
|
||||
*
|
||||
* ----- Unit Tests -----
|
||||
*
|
||||
* Within unit tests, it may be useful to explicitly switch between the two
|
||||
* ranges, or to check which range is active when checking the results of
|
||||
* computations. If the test is doing the math directly, the
|
||||
* set/getMantissaScale() functions may be most appropriate. However, if the
|
||||
* test has anything to do with transaction processing, it should enable or
|
||||
* disable the amendments that control the mantissa range choice
|
||||
* (SingleAssetVault and LendingProtocol), and/or check if either of those
|
||||
* amendments are enabled to determine which result to expect.
|
||||
*
|
||||
*/
|
||||
class Number
|
||||
{
|
||||
using rep = std::int64_t;
|
||||
using internalrep = MantissaRange::rep;
|
||||
|
||||
bool negative_{false};
|
||||
internalrep mantissa_{0};
|
||||
rep mantissa_{0};
|
||||
int exponent_{std::numeric_limits<int>::lowest()};
|
||||
|
||||
public:
|
||||
// The range for the mantissa when normalized
|
||||
constexpr static std::int64_t minMantissa = 1'000'000'000'000'000LL;
|
||||
static_assert(isPowerOfTen(minMantissa));
|
||||
constexpr static std::int64_t maxMantissa = minMantissa * 10 - 1;
|
||||
static_assert(maxMantissa == 9'999'999'999'999'999LL);
|
||||
|
||||
// The range for the exponent when normalized
|
||||
constexpr static int minExponent = -32768;
|
||||
constexpr static int maxExponent = 32768;
|
||||
|
||||
constexpr static internalrep maxRep = std::numeric_limits<rep>::max();
|
||||
static_assert(maxRep == 9'223'372'036'854'775'807);
|
||||
static_assert(-maxRep == std::numeric_limits<rep>::min() + 1);
|
||||
|
||||
// May need to make unchecked private
|
||||
struct unchecked
|
||||
{
|
||||
explicit unchecked() = default;
|
||||
};
|
||||
|
||||
// Like unchecked, normalized is used with the ctors that take an
|
||||
// internalrep mantissa. Unlike unchecked, those ctors will normalize the
|
||||
// value.
|
||||
// Only unit tests are expected to use this class
|
||||
struct normalized
|
||||
{
|
||||
explicit normalized() = default;
|
||||
};
|
||||
|
||||
explicit constexpr Number() = default;
|
||||
|
||||
Number(rep mantissa);
|
||||
explicit Number(rep mantissa, int exponent);
|
||||
explicit constexpr Number(
|
||||
bool negative,
|
||||
internalrep mantissa,
|
||||
int exponent,
|
||||
unchecked) noexcept;
|
||||
// Assume unsigned values are... unsigned. i.e. positive
|
||||
explicit constexpr Number(
|
||||
internalrep mantissa,
|
||||
int exponent,
|
||||
unchecked) noexcept;
|
||||
// Only unit tests are expected to use this ctor
|
||||
explicit Number(
|
||||
bool negative,
|
||||
internalrep mantissa,
|
||||
int exponent,
|
||||
normalized);
|
||||
// Assume unsigned values are... unsigned. i.e. positive
|
||||
explicit Number(internalrep mantissa, int exponent, normalized);
|
||||
explicit constexpr Number(rep mantissa, int exponent, unchecked) noexcept;
|
||||
|
||||
constexpr rep
|
||||
mantissa() const noexcept;
|
||||
@@ -291,11 +78,11 @@ public:
|
||||
Number&
|
||||
operator/=(Number const& x);
|
||||
|
||||
static Number
|
||||
static constexpr Number
|
||||
min() noexcept;
|
||||
static Number
|
||||
static constexpr Number
|
||||
max() noexcept;
|
||||
static Number
|
||||
static constexpr Number
|
||||
lowest() noexcept;
|
||||
|
||||
/** Conversions to Number are implicit and conversions away from Number
|
||||
@@ -309,8 +96,7 @@ public:
|
||||
friend constexpr bool
|
||||
operator==(Number const& x, Number const& y) noexcept
|
||||
{
|
||||
return x.negative_ == y.negative_ && x.mantissa_ == y.mantissa_ &&
|
||||
x.exponent_ == y.exponent_;
|
||||
return x.mantissa_ == y.mantissa_ && x.exponent_ == y.exponent_;
|
||||
}
|
||||
|
||||
friend constexpr bool
|
||||
@@ -324,8 +110,8 @@ public:
|
||||
{
|
||||
// If the two amounts have different signs (zero is treated as positive)
|
||||
// then the comparison is true iff the left is negative.
|
||||
bool const lneg = x.negative_;
|
||||
bool const rneg = y.negative_;
|
||||
bool const lneg = x.mantissa_ < 0;
|
||||
bool const rneg = y.mantissa_ < 0;
|
||||
|
||||
if (lneg != rneg)
|
||||
return lneg;
|
||||
@@ -353,7 +139,7 @@ public:
|
||||
constexpr int
|
||||
signum() const noexcept
|
||||
{
|
||||
return negative_ ? -1 : (mantissa_ ? 1 : 0);
|
||||
return (mantissa_ < 0) ? -1 : (mantissa_ ? 1 : 0);
|
||||
}
|
||||
|
||||
Number
|
||||
@@ -383,15 +169,6 @@ public:
|
||||
return os << to_string(x);
|
||||
}
|
||||
|
||||
friend std::string
|
||||
to_string(Number const& amount);
|
||||
|
||||
friend Number
|
||||
root(Number f, unsigned d);
|
||||
|
||||
friend Number
|
||||
root2(Number f);
|
||||
|
||||
// Thread local rounding control. Default is to_nearest
|
||||
enum rounding_mode { to_nearest, towards_zero, downward, upward };
|
||||
static rounding_mode
|
||||
@@ -400,206 +177,44 @@ public:
|
||||
static rounding_mode
|
||||
setround(rounding_mode mode);
|
||||
|
||||
/** Returns which mantissa scale is currently in use for normalization.
|
||||
*
|
||||
* If you think you need to call this outside of unit tests, no you don't.
|
||||
*/
|
||||
static MantissaRange::mantissa_scale
|
||||
getMantissaScale();
|
||||
/** Changes which mantissa scale is used for normalization.
|
||||
*
|
||||
* If you think you need to call this outside of unit tests, no you don't.
|
||||
*/
|
||||
static void
|
||||
setMantissaScale(MantissaRange::mantissa_scale scale);
|
||||
|
||||
inline static internalrep
|
||||
minMantissa()
|
||||
{
|
||||
return range_.get().min;
|
||||
}
|
||||
|
||||
inline static internalrep
|
||||
maxMantissa()
|
||||
{
|
||||
return range_.get().max;
|
||||
}
|
||||
|
||||
inline static int
|
||||
mantissaLog()
|
||||
{
|
||||
return range_.get().log;
|
||||
}
|
||||
|
||||
/// oneSmall is needed because the ranges are private
|
||||
constexpr static Number
|
||||
oneSmall();
|
||||
/// oneLarge is needed because the ranges are private
|
||||
constexpr static Number
|
||||
oneLarge();
|
||||
|
||||
// And one is needed because it needs to choose between oneSmall and
|
||||
// oneLarge based on the current range
|
||||
static Number
|
||||
one();
|
||||
|
||||
template <Integral64 T>
|
||||
[[nodiscard]]
|
||||
std::pair<T, int>
|
||||
normalizeToRange(T minMantissa, T maxMantissa) const;
|
||||
|
||||
private:
|
||||
static thread_local rounding_mode mode_;
|
||||
// The available ranges for mantissa
|
||||
|
||||
constexpr static MantissaRange smallRange{MantissaRange::small};
|
||||
static_assert(isPowerOfTen(smallRange.min));
|
||||
static_assert(smallRange.min == 1'000'000'000'000'000LL);
|
||||
static_assert(smallRange.max == 9'999'999'999'999'999LL);
|
||||
static_assert(smallRange.log == 15);
|
||||
static_assert(smallRange.min < maxRep);
|
||||
static_assert(smallRange.max < maxRep);
|
||||
constexpr static MantissaRange largeRange{MantissaRange::large};
|
||||
static_assert(isPowerOfTen(largeRange.min));
|
||||
static_assert(largeRange.min == 1'000'000'000'000'000'000ULL);
|
||||
static_assert(largeRange.max == internalrep(9'999'999'999'999'999'999ULL));
|
||||
static_assert(largeRange.log == 18);
|
||||
static_assert(largeRange.min < maxRep);
|
||||
static_assert(largeRange.max > maxRep);
|
||||
|
||||
// The range for the mantissa when normalized.
|
||||
// Use reference_wrapper to avoid making copies, and prevent accidentally
|
||||
// changing the values inside the range.
|
||||
static thread_local std::reference_wrapper<MantissaRange const> range_;
|
||||
|
||||
void
|
||||
normalize();
|
||||
|
||||
/** Normalize Number components to an arbitrary range.
|
||||
*
|
||||
* min/maxMantissa are parameters because this function is used by both
|
||||
* normalize(), which reads from range_, and by normalizeToRange,
|
||||
* which is public and can accept an arbitrary range from the caller.
|
||||
*/
|
||||
template <class T>
|
||||
static void
|
||||
normalize(
|
||||
bool& negative,
|
||||
T& mantissa,
|
||||
int& exponent,
|
||||
internalrep const& minMantissa,
|
||||
internalrep const& maxMantissa);
|
||||
|
||||
template <class T>
|
||||
friend void
|
||||
doNormalize(
|
||||
bool& negative,
|
||||
T& mantissa_,
|
||||
int& exponent_,
|
||||
MantissaRange::rep const& minMantissa,
|
||||
MantissaRange::rep const& maxMantissa);
|
||||
|
||||
bool
|
||||
constexpr bool
|
||||
isnormal() const noexcept;
|
||||
|
||||
// Copy the number, but modify the exponent by "exponentDelta". Because the
|
||||
// mantissa doesn't change, the result will be "mostly" normalized, but the
|
||||
// exponent could go out of range, so it will be checked.
|
||||
Number
|
||||
shiftExponent(int exponentDelta) const;
|
||||
|
||||
// Safely convert rep (int64) mantissa to internalrep (uint64). If the rep
|
||||
// is negative, returns the positive value. This takes a little extra work
|
||||
// because converting std::numeric_limits<std::int64_t>::min() flirts with
|
||||
// UB, and can vary across compilers.
|
||||
static internalrep
|
||||
externalToInternal(rep mantissa);
|
||||
|
||||
class Guard;
|
||||
};
|
||||
|
||||
inline constexpr Number::Number(
|
||||
bool negative,
|
||||
internalrep mantissa,
|
||||
int exponent,
|
||||
unchecked) noexcept
|
||||
: negative_(negative), mantissa_{mantissa}, exponent_{exponent}
|
||||
{
|
||||
}
|
||||
|
||||
inline constexpr Number::Number(
|
||||
internalrep mantissa,
|
||||
int exponent,
|
||||
unchecked) noexcept
|
||||
: Number(false, mantissa, exponent, unchecked{})
|
||||
{
|
||||
}
|
||||
|
||||
constexpr static Number numZero{};
|
||||
|
||||
inline Number::Number(
|
||||
bool negative,
|
||||
internalrep mantissa,
|
||||
int exponent,
|
||||
normalized)
|
||||
: Number(negative, mantissa, exponent, unchecked{})
|
||||
{
|
||||
normalize();
|
||||
}
|
||||
|
||||
inline Number::Number(internalrep mantissa, int exponent, normalized)
|
||||
: Number(false, mantissa, exponent, normalized{})
|
||||
inline constexpr Number::Number(rep mantissa, int exponent, unchecked) noexcept
|
||||
: mantissa_{mantissa}, exponent_{exponent}
|
||||
{
|
||||
}
|
||||
|
||||
inline Number::Number(rep mantissa, int exponent)
|
||||
: Number(mantissa < 0, externalToInternal(mantissa), exponent, normalized{})
|
||||
: mantissa_{mantissa}, exponent_{exponent}
|
||||
{
|
||||
normalize();
|
||||
}
|
||||
|
||||
inline Number::Number(rep mantissa) : Number{mantissa, 0}
|
||||
{
|
||||
}
|
||||
|
||||
/** Returns the mantissa of the external view of the Number.
|
||||
*
|
||||
* Please see the "---- External Interface ----" section of the class
|
||||
* documentation for an explanation of why the internal value may be modified.
|
||||
*/
|
||||
inline constexpr Number::rep
|
||||
Number::mantissa() const noexcept
|
||||
{
|
||||
auto m = mantissa_;
|
||||
if (m > maxRep)
|
||||
{
|
||||
XRPL_ASSERT_PARTS(
|
||||
!isnormal() || (m % 10 == 0 && m / 10 <= maxRep),
|
||||
"xrpl::Number::mantissa",
|
||||
"large normalized mantissa has no remainder");
|
||||
m /= 10;
|
||||
}
|
||||
auto const sign = negative_ ? -1 : 1;
|
||||
return sign * static_cast<Number::rep>(m);
|
||||
return mantissa_;
|
||||
}
|
||||
|
||||
/** Returns the exponent of the external view of the Number.
|
||||
*
|
||||
* Please see the "---- External Interface ----" section of the class
|
||||
* documentation for an explanation of why the internal value may be modified.
|
||||
*/
|
||||
inline constexpr int
|
||||
Number::exponent() const noexcept
|
||||
{
|
||||
auto e = exponent_;
|
||||
if (mantissa_ > maxRep)
|
||||
{
|
||||
XRPL_ASSERT_PARTS(
|
||||
!isnormal() || (mantissa_ % 10 == 0 && mantissa_ / 10 <= maxRep),
|
||||
"xrpl::Number::exponent",
|
||||
"large normalized mantissa has no remainder");
|
||||
++e;
|
||||
}
|
||||
return e;
|
||||
return exponent_;
|
||||
}
|
||||
|
||||
inline constexpr Number
|
||||
@@ -611,17 +226,15 @@ Number::operator+() const noexcept
|
||||
inline constexpr Number
|
||||
Number::operator-() const noexcept
|
||||
{
|
||||
if (mantissa_ == 0)
|
||||
return Number{};
|
||||
auto x = *this;
|
||||
x.negative_ = !x.negative_;
|
||||
x.mantissa_ = -x.mantissa_;
|
||||
return x;
|
||||
}
|
||||
|
||||
inline Number&
|
||||
Number::operator++()
|
||||
{
|
||||
*this += one();
|
||||
*this += Number{1000000000000000, -15, unchecked{}};
|
||||
return *this;
|
||||
}
|
||||
|
||||
@@ -636,7 +249,7 @@ Number::operator++(int)
|
||||
inline Number&
|
||||
Number::operator--()
|
||||
{
|
||||
*this -= one();
|
||||
*this -= Number{1000000000000000, -15, unchecked{}};
|
||||
return *this;
|
||||
}
|
||||
|
||||
@@ -686,54 +299,30 @@ operator/(Number const& x, Number const& y)
|
||||
return z;
|
||||
}
|
||||
|
||||
inline Number
|
||||
inline constexpr Number
|
||||
Number::min() noexcept
|
||||
{
|
||||
return Number{false, range_.get().min, minExponent, unchecked{}};
|
||||
return Number{minMantissa, minExponent, unchecked{}};
|
||||
}
|
||||
|
||||
inline Number
|
||||
inline constexpr Number
|
||||
Number::max() noexcept
|
||||
{
|
||||
return Number{
|
||||
false, std::min(range_.get().max, maxRep), maxExponent, unchecked{}};
|
||||
return Number{maxMantissa, maxExponent, unchecked{}};
|
||||
}
|
||||
|
||||
inline Number
|
||||
inline constexpr Number
|
||||
Number::lowest() noexcept
|
||||
{
|
||||
return Number{
|
||||
true, std::min(range_.get().max, maxRep), maxExponent, unchecked{}};
|
||||
return -Number{maxMantissa, maxExponent, unchecked{}};
|
||||
}
|
||||
|
||||
inline bool
|
||||
inline constexpr bool
|
||||
Number::isnormal() const noexcept
|
||||
{
|
||||
MantissaRange const& range = range_;
|
||||
auto const abs_m = mantissa_;
|
||||
return *this == Number{} ||
|
||||
(range.min <= abs_m && abs_m <= range.max &&
|
||||
(abs_m <= maxRep || abs_m % 10 == 0) && minExponent <= exponent_ &&
|
||||
exponent_ <= maxExponent);
|
||||
}
|
||||
|
||||
template <Integral64 T>
|
||||
std::pair<T, int>
|
||||
Number::normalizeToRange(T minMantissa, T maxMantissa) const
|
||||
{
|
||||
bool negative = negative_;
|
||||
internalrep mantissa = mantissa_;
|
||||
int exponent = exponent_;
|
||||
|
||||
if constexpr (std::is_unsigned_v<T>)
|
||||
XRPL_ASSERT_PARTS(
|
||||
!negative,
|
||||
"xrpl::Number::normalizeToRange",
|
||||
"Number is non-negative for unsigned range.");
|
||||
Number::normalize(negative, mantissa, exponent, minMantissa, maxMantissa);
|
||||
|
||||
auto const sign = negative ? -1 : 1;
|
||||
return std::make_pair(static_cast<T>(sign * mantissa), exponent);
|
||||
auto const abs_m = mantissa_ < 0 ? -mantissa_ : mantissa_;
|
||||
return minMantissa <= abs_m && abs_m <= maxMantissa &&
|
||||
minExponent <= exponent_ && exponent_ <= maxExponent;
|
||||
}
|
||||
|
||||
inline constexpr Number
|
||||
@@ -775,20 +364,6 @@ squelch(Number const& x, Number const& limit) noexcept
|
||||
return x;
|
||||
}
|
||||
|
||||
inline std::string
|
||||
to_string(MantissaRange::mantissa_scale const& scale)
|
||||
{
|
||||
switch (scale)
|
||||
{
|
||||
case MantissaRange::small:
|
||||
return "small";
|
||||
case MantissaRange::large:
|
||||
return "large";
|
||||
default:
|
||||
throw std::runtime_error("Bad scale");
|
||||
}
|
||||
}
|
||||
|
||||
class saveNumberRoundMode
|
||||
{
|
||||
Number::rounding_mode mode_;
|
||||
@@ -827,34 +402,6 @@ public:
|
||||
operator=(NumberRoundModeGuard const&) = delete;
|
||||
};
|
||||
|
||||
/** Sets the new scale and restores the old scale when it leaves scope.
|
||||
*
|
||||
* If you think you need to use this class outside of unit tests, no you don't.
|
||||
*
|
||||
*/
|
||||
class NumberMantissaScaleGuard
|
||||
{
|
||||
MantissaRange::mantissa_scale const saved_;
|
||||
|
||||
public:
|
||||
explicit NumberMantissaScaleGuard(
|
||||
MantissaRange::mantissa_scale scale) noexcept
|
||||
: saved_{Number::getMantissaScale()}
|
||||
{
|
||||
Number::setMantissaScale(scale);
|
||||
}
|
||||
|
||||
~NumberMantissaScaleGuard()
|
||||
{
|
||||
Number::setMantissaScale(saved_);
|
||||
}
|
||||
|
||||
NumberMantissaScaleGuard(NumberMantissaScaleGuard const&) = delete;
|
||||
|
||||
NumberMantissaScaleGuard&
|
||||
operator=(NumberMantissaScaleGuard const&) = delete;
|
||||
};
|
||||
|
||||
} // namespace xrpl
|
||||
|
||||
#endif // XRPL_BASICS_NUMBER_H_INCLUDED
|
||||
|
||||
@@ -152,8 +152,8 @@ public:
|
||||
|
||||
/** Return a "sub slice" of given length starting at the given position
|
||||
|
||||
Note that the subslice encompasses the range [pos, pos + rCount),
|
||||
where rCount is the smaller of count and size() - pos.
|
||||
Note that the subslice encompasses the range [pos, pos + rcount),
|
||||
where rcount is the smaller of count and size() - pos.
|
||||
|
||||
@param pos position of the first character
|
||||
@count requested length
|
||||
|
||||
@@ -31,7 +31,7 @@ template <class Iterator>
|
||||
std::optional<Blob>
|
||||
strUnHex(std::size_t strSize, Iterator begin, Iterator end)
|
||||
{
|
||||
static constexpr std::array<int, 256> const digitLookupTable = []() {
|
||||
static constexpr std::array<int, 256> const unxtab = []() {
|
||||
std::array<int, 256> t{};
|
||||
|
||||
for (auto& x : t)
|
||||
@@ -57,7 +57,7 @@ strUnHex(std::size_t strSize, Iterator begin, Iterator end)
|
||||
|
||||
if (strSize & 1)
|
||||
{
|
||||
int c = digitLookupTable[*iter++];
|
||||
int c = unxtab[*iter++];
|
||||
|
||||
if (c < 0)
|
||||
return {};
|
||||
@@ -67,12 +67,12 @@ strUnHex(std::size_t strSize, Iterator begin, Iterator end)
|
||||
|
||||
while (iter != end)
|
||||
{
|
||||
int cHigh = digitLookupTable[*iter++];
|
||||
int cHigh = unxtab[*iter++];
|
||||
|
||||
if (cHigh < 0)
|
||||
return {};
|
||||
|
||||
int cLow = digitLookupTable[*iter++];
|
||||
int cLow = unxtab[*iter++];
|
||||
|
||||
if (cLow < 0)
|
||||
return {};
|
||||
|
||||
@@ -3189,12 +3189,11 @@ operator==(aged_unordered_container<
|
||||
{
|
||||
if (size() != other.size())
|
||||
return false;
|
||||
for (auto iter(cbegin()), last(cend()), otherLast(other.cend());
|
||||
iter != last;
|
||||
for (auto iter(cbegin()), last(cend()), olast(other.cend()); iter != last;
|
||||
++iter)
|
||||
{
|
||||
auto otherIter(other.find(extract(*iter)));
|
||||
if (otherIter == otherLast)
|
||||
auto oiter(other.find(extract(*iter)));
|
||||
if (oiter == olast)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
||||
@@ -5,8 +5,6 @@
|
||||
#ifndef BEAST_CORE_CURRENT_THREAD_NAME_H_INCLUDED
|
||||
#define BEAST_CORE_CURRENT_THREAD_NAME_H_INCLUDED
|
||||
|
||||
#include <boost/predef.h>
|
||||
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
|
||||
@@ -18,31 +16,6 @@ namespace beast {
|
||||
void
|
||||
setCurrentThreadName(std::string_view newThreadName);
|
||||
|
||||
#if BOOST_OS_LINUX
|
||||
|
||||
// On Linux, thread names are limited to 16 bytes including the null terminator.
|
||||
// Maximum number of characters is therefore 15.
|
||||
constexpr std::size_t maxThreadNameLength = 15;
|
||||
|
||||
/** Sets the name of the caller thread with compile-time size checking.
|
||||
@tparam N The size of the string literal including null terminator
|
||||
@param newThreadName A string literal to set as the thread name
|
||||
|
||||
This template overload enforces that thread names are at most 16 characters
|
||||
(including null terminator) at compile time, matching Linux's limit.
|
||||
*/
|
||||
template <std::size_t N>
|
||||
void
|
||||
setCurrentThreadName(char const (&newThreadName)[N])
|
||||
{
|
||||
static_assert(
|
||||
N <= maxThreadNameLength + 1,
|
||||
"Thread name cannot exceed 15 characters");
|
||||
|
||||
setCurrentThreadName(std::string_view(newThreadName, N - 1));
|
||||
}
|
||||
#endif
|
||||
|
||||
/** Returns the name of the caller thread.
|
||||
|
||||
The name returned is the name as set by a call to setCurrentThreadName().
|
||||
|
||||
@@ -18,7 +18,7 @@ namespace beast {
|
||||
|
||||
namespace detail {
|
||||
|
||||
// These specializations get called by the non-member functions to do the work
|
||||
// These specializatons get called by the non-member functions to do the work
|
||||
template <class Out, class In>
|
||||
struct LexicalCast;
|
||||
|
||||
|
||||
@@ -203,7 +203,7 @@ struct is_contiguously_hashable<T[N], HashAlgorithm>
|
||||
Throws:
|
||||
Never
|
||||
Effect:
|
||||
Returns the resulting hash of all the input data.
|
||||
Returns the reslting hash of all the input data.
|
||||
*/
|
||||
/** @{ */
|
||||
|
||||
|
||||
@@ -376,7 +376,7 @@ public:
|
||||
print statement examples
|
||||
"parent.child" prints child and all of its children
|
||||
"parent.child." start at the parent and print down to child
|
||||
"parent.grandchild" prints nothing- grandchild not direct descendent
|
||||
"parent.grandchild" prints nothing- grandchild not direct discendent
|
||||
"parent.grandchild." starts at the parent and prints down to grandchild
|
||||
"parent.grandchild.*" starts at parent, print through grandchild
|
||||
children
|
||||
|
||||
@@ -40,7 +40,7 @@ public:
|
||||
using microseconds = std::chrono::microseconds;
|
||||
|
||||
/**
|
||||
* Configuration from [perf] section of xrpld.cfg.
|
||||
* Configuration from [perf] section of rippled.cfg.
|
||||
*/
|
||||
struct Setup
|
||||
{
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#ifndef XRPL_JSON_JSON_READER_H_INCLUDED
|
||||
#define XRPL_JSON_JSON_READER_H_INCLUDED
|
||||
|
||||
#define CPPTL_JSON_READER_H_INCLUDED
|
||||
|
||||
#include <xrpl/json/json_forwards.h>
|
||||
#include <xrpl/json/json_value.h>
|
||||
|
||||
@@ -66,7 +68,7 @@ public:
|
||||
* error occurred during parsing.
|
||||
*/
|
||||
std::string
|
||||
getFormattedErrorMessages() const;
|
||||
getFormatedErrorMessages() const;
|
||||
|
||||
static constexpr unsigned nest_limit{25};
|
||||
|
||||
@@ -229,4 +231,4 @@ operator>>(std::istream&, Value&);
|
||||
|
||||
} // namespace Json
|
||||
|
||||
#endif // XRPL_JSON_JSON_READER_H_INCLUDED
|
||||
#endif // CPPTL_JSON_READER_H_INCLUDED
|
||||
|
||||
@@ -44,7 +44,7 @@ enum ValueType {
|
||||
class StaticString
|
||||
{
|
||||
public:
|
||||
constexpr explicit StaticString(char const* czString) : str_(czString)
|
||||
constexpr explicit StaticString(char const* czstring) : str_(czstring)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -682,4 +682,4 @@ public:
|
||||
|
||||
} // namespace Json
|
||||
|
||||
#endif // XRPL_JSON_JSON_VALUE_H_INCLUDED
|
||||
#endif // CPPTL_JSON_H_INCLUDED
|
||||
|
||||
@@ -90,7 +90,7 @@ private:
|
||||
void
|
||||
writeArrayValue(Value const& value);
|
||||
bool
|
||||
isMultilineArray(Value const& value);
|
||||
isMultineArray(Value const& value);
|
||||
void
|
||||
pushValue(std::string const& value);
|
||||
void
|
||||
@@ -157,7 +157,7 @@ private:
|
||||
void
|
||||
writeArrayValue(Value const& value);
|
||||
bool
|
||||
isMultilineArray(Value const& value);
|
||||
isMultineArray(Value const& value);
|
||||
void
|
||||
pushValue(std::string const& value);
|
||||
void
|
||||
|
||||
@@ -15,7 +15,7 @@ namespace xrpl {
|
||||
namespace credentials {
|
||||
|
||||
// These function will be used by the code that use DepositPreauth / Credentials
|
||||
// (and any future pre-authorization modes) as part of authorization (all the
|
||||
// (and any future preauthorization modes) as part of authorization (all the
|
||||
// transfer funds transactions)
|
||||
|
||||
// Check if credential sfExpiration field has passed ledger's parentCloseTime
|
||||
@@ -41,8 +41,7 @@ checkFields(STTx const& tx, beast::Journal j);
|
||||
|
||||
// Accessing the ledger to check if provided credentials are valid. Do not use
|
||||
// in doApply (only in preclaim) since it does not remove expired credentials.
|
||||
// If you call it in preclaim, you also must call verifyDepositPreauth in
|
||||
// doApply
|
||||
// If you call it in prelaim, you also must call verifyDepositPreauth in doApply
|
||||
TER
|
||||
valid(
|
||||
STTx const& tx,
|
||||
|
||||
@@ -61,9 +61,6 @@ enum FreezeHandling { fhIGNORE_FREEZE, fhZERO_IF_FROZEN };
|
||||
/** Controls the treatment of unauthorized MPT balances */
|
||||
enum AuthHandling { ahIGNORE_AUTH, ahZERO_IF_UNAUTHORIZED };
|
||||
|
||||
/** Controls whether to include the account's full spendable balance */
|
||||
enum SpendableHandling { shSIMPLE_BALANCE, shFULL_BALANCE };
|
||||
|
||||
[[nodiscard]] bool
|
||||
isGlobalFrozen(ReadView const& view, AccountID const& issuer);
|
||||
|
||||
@@ -308,17 +305,7 @@ isLPTokenFrozen(
|
||||
Issue const& asset,
|
||||
Issue const& asset2);
|
||||
|
||||
// Returns the amount an account can spend.
|
||||
//
|
||||
// If shSIMPLE_BALANCE is specified, this is the amount the account can spend
|
||||
// without going into debt.
|
||||
//
|
||||
// If shFULL_BALANCE is specified, this is the amount the account can spend
|
||||
// total. Specifically:
|
||||
// * The account can go into debt if using a trust line, and the other side has
|
||||
// a non-zero limit.
|
||||
// * If the account is the asset issuer the limit is defined by the asset /
|
||||
// issuance.
|
||||
// Returns the amount an account can spend without going into debt.
|
||||
//
|
||||
// <-- saAmount: amount of currency held by account. May be negative.
|
||||
[[nodiscard]] STAmount
|
||||
@@ -328,8 +315,7 @@ accountHolds(
|
||||
Currency const& currency,
|
||||
AccountID const& issuer,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
beast::Journal j,
|
||||
SpendableHandling includeFullBalance = shSIMPLE_BALANCE);
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountHolds(
|
||||
@@ -337,8 +323,7 @@ accountHolds(
|
||||
AccountID const& account,
|
||||
Issue const& issue,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
beast::Journal j,
|
||||
SpendableHandling includeFullBalance = shSIMPLE_BALANCE);
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountHolds(
|
||||
@@ -347,8 +332,7 @@ accountHolds(
|
||||
MPTIssue const& mptIssue,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
AuthHandling zeroIfUnauthorized,
|
||||
beast::Journal j,
|
||||
SpendableHandling includeFullBalance = shSIMPLE_BALANCE);
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountHolds(
|
||||
@@ -357,8 +341,50 @@ accountHolds(
|
||||
Asset const& asset,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
AuthHandling zeroIfUnauthorized,
|
||||
beast::Journal j,
|
||||
SpendableHandling includeFullBalance = shSIMPLE_BALANCE);
|
||||
beast::Journal j);
|
||||
|
||||
// Returns the amount an account can spend total.
|
||||
//
|
||||
// These functions use accountHolds, but unlike accountHolds:
|
||||
// * The account can go into debt.
|
||||
// * If the account is the asset issuer the only limit is defined by the asset /
|
||||
// issuance.
|
||||
//
|
||||
// <-- saAmount: amount of currency held by account. May be negative.
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Currency const& currency,
|
||||
AccountID const& issuer,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Issue const& issue,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
MPTIssue const& mptIssue,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
AuthHandling zeroIfUnauthorized,
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Asset const& asset,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
AuthHandling zeroIfUnauthorized,
|
||||
beast::Journal j);
|
||||
|
||||
// Returns the amount an account can spend of the currency type saDefault, or
|
||||
// returns saDefault if this account is the issuer of the currency in
|
||||
@@ -629,7 +655,7 @@ createPseudoAccount(
|
||||
uint256 const& pseudoOwnerKey,
|
||||
SField const& ownerField);
|
||||
|
||||
// Returns true if and only if sleAcct is a pseudo-account or specific
|
||||
// Returns true iff sleAcct is a pseudo-account or specific
|
||||
// pseudo-accounts in pseudoFieldFilter.
|
||||
//
|
||||
// Returns false if sleAcct is
|
||||
@@ -684,16 +710,13 @@ checkDestinationAndTag(SLE::const_ref toSle, bool hasDestinationTag);
|
||||
* - If withdrawing to self, succeed.
|
||||
* - If not, checks if the receiver requires deposit authorization, and if
|
||||
* the sender has it.
|
||||
* - Checks that the receiver will not exceed the limit (IOU trustline limit
|
||||
* or MPT MaximumAmount).
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
canWithdraw(
|
||||
ReadView const& view,
|
||||
AccountID const& from,
|
||||
ReadView const& view,
|
||||
AccountID const& to,
|
||||
SLE::const_ref toSle,
|
||||
STAmount const& amount,
|
||||
bool hasDestinationTag);
|
||||
|
||||
/** Checks that can withdraw funds from an object to itself or a destination.
|
||||
@@ -707,15 +730,12 @@ canWithdraw(
|
||||
* - If withdrawing to self, succeed.
|
||||
* - If not, checks if the receiver requires deposit authorization, and if
|
||||
* the sender has it.
|
||||
* - Checks that the receiver will not exceed the limit (IOU trustline limit
|
||||
* or MPT MaximumAmount).
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
canWithdraw(
|
||||
ReadView const& view,
|
||||
AccountID const& from,
|
||||
ReadView const& view,
|
||||
AccountID const& to,
|
||||
STAmount const& amount,
|
||||
bool hasDestinationTag);
|
||||
|
||||
/** Checks that can withdraw funds from an object to itself or a destination.
|
||||
@@ -729,8 +749,6 @@ canWithdraw(
|
||||
* - If withdrawing to self, succeed.
|
||||
* - If not, checks if the receiver requires deposit authorization, and if
|
||||
* the sender has it.
|
||||
* - Checks that the receiver will not exceed the limit (IOU trustline limit
|
||||
* or MPT MaximumAmount).
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
canWithdraw(ReadView const& view, STTx const& tx);
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
namespace xrpl {
|
||||
namespace NodeStore {
|
||||
|
||||
/** Simple NodeStore Scheduler that just performs the tasks synchronously. */
|
||||
/** Simple NodeStore Scheduler that just peforms the tasks synchronously. */
|
||||
class DummyScheduler : public Scheduler
|
||||
{
|
||||
public:
|
||||
|
||||
@@ -55,7 +55,7 @@ public:
|
||||
HyperLevelDB, LevelDBFactory, SQLite, MDB
|
||||
|
||||
If the fastBackendParameter is omitted or empty, no ephemeral database
|
||||
is used. If the scheduler parameter is omitted or unspecified, a
|
||||
is used. If the scheduler parameter is omited or unspecified, a
|
||||
synchronous scheduler is used which performs all tasks immediately on
|
||||
the caller's thread.
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ Facebook's RocksDB database, builds on LevelDB.
|
||||
|
||||
Use SQLite.
|
||||
|
||||
'path' specifies where the backend will store its data files.
|
||||
'path' speficies where the backend will store its data files.
|
||||
|
||||
Choices for 'compression'
|
||||
|
||||
@@ -130,7 +130,7 @@ newer versions of RocksDB (TBD).
|
||||
## Discussion
|
||||
|
||||
RocksDBQuickFactory is intended to provide a testbed for comparing potential
|
||||
rocksdb performance with the existing recommended configuration in xrpld.cfg.
|
||||
rocksdb performance with the existing recommended configuration in rippled.cfg.
|
||||
Through various executions and profiling some conclusions are presented below.
|
||||
|
||||
- If the write ahead log is enabled, insert speed soon clogs up under load. The
|
||||
@@ -161,7 +161,7 @@ Through various executions and profiling some conclusions are presented below.
|
||||
|
||||
- Multiple runs of the benchmarks can yield surprisingly different results. This
|
||||
can perhaps be attributed to the asynchronous nature of rocksdb's compaction
|
||||
process. The benchmarks are artificial and create highly unlikely write load to
|
||||
process. The benchmarks are artifical and create highly unlikely write load to
|
||||
create the dataset to measure different read access patterns. Therefore multiple
|
||||
runs of the benchmarks are required to get a feel for the effectiveness of the
|
||||
changes. This contrasts sharply with the keyvadb benchmarking were highly
|
||||
|
||||
@@ -9,7 +9,7 @@ import "org/xrpl/rpc/v1/get_ledger_entry.proto";
|
||||
import "org/xrpl/rpc/v1/get_ledger_data.proto";
|
||||
import "org/xrpl/rpc/v1/get_ledger_diff.proto";
|
||||
|
||||
// These methods are binary only methods for retrieving arbitrary ledger state
|
||||
// These methods are binary only methods for retrieiving arbitrary ledger state
|
||||
// via gRPC. These methods are used by clio, but can also be
|
||||
// used by any client that wants to extract ledger state in an efficient manner.
|
||||
// They do not directly mimic the JSON equivalent methods.
|
||||
|
||||
@@ -17,9 +17,9 @@ enum MessageType {
|
||||
mtHAVE_SET = 35;
|
||||
mtVALIDATION = 41;
|
||||
mtGET_OBJECTS = 42;
|
||||
mtVALIDATOR_LIST = 54;
|
||||
mtVALIDATORLIST = 54;
|
||||
mtSQUELCH = 55;
|
||||
mtVALIDATOR_LIST_COLLECTION = 56;
|
||||
mtVALIDATORLISTCOLLECTION = 56;
|
||||
mtPROOF_PATH_REQ = 57;
|
||||
mtPROOF_PATH_RESPONSE = 58;
|
||||
mtREPLAY_DELTA_REQ = 59;
|
||||
@@ -308,7 +308,7 @@ message TMSquelch {
|
||||
}
|
||||
|
||||
enum TMLedgerMapType {
|
||||
lmTRANSACTION = 1; // transaction map
|
||||
lmTRANASCTION = 1; // transaction map
|
||||
lmACCOUNT_STATE = 2; // account state map
|
||||
}
|
||||
|
||||
|
||||
@@ -121,7 +121,7 @@ toAmount(
|
||||
{
|
||||
if (isXRP(issue))
|
||||
return STAmount(issue, static_cast<std::int64_t>(n));
|
||||
return STAmount(issue, n);
|
||||
return STAmount(issue, n.mantissa(), n.exponent());
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@@ -67,6 +67,9 @@ enum class HashPrefix : std::uint32_t {
|
||||
/** Payment Channel Claim */
|
||||
paymentChannelClaim = detail::make_hash_prefix('C', 'L', 'M'),
|
||||
|
||||
/** Credentials signature */
|
||||
credential = detail::make_hash_prefix('C', 'R', 'D'),
|
||||
|
||||
/** Batch */
|
||||
batch = detail::make_hash_prefix('B', 'C', 'H'),
|
||||
};
|
||||
|
||||
@@ -20,16 +20,14 @@ namespace xrpl {
|
||||
|
||||
Arithmetic operations can throw std::overflow_error during normalization
|
||||
if the amount exceeds the largest representable amount, but underflows
|
||||
will silently truncate to zero.
|
||||
will silently trunctate to zero.
|
||||
*/
|
||||
class IOUAmount : private boost::totally_ordered<IOUAmount>,
|
||||
private boost::additive<IOUAmount>
|
||||
{
|
||||
private:
|
||||
using mantissa_type = std::int64_t;
|
||||
using exponent_type = int;
|
||||
mantissa_type mantissa_;
|
||||
exponent_type exponent_;
|
||||
std::int64_t mantissa_;
|
||||
int exponent_;
|
||||
|
||||
/** Adjusts the mantissa and exponent to the proper range.
|
||||
|
||||
@@ -40,14 +38,11 @@ private:
|
||||
void
|
||||
normalize();
|
||||
|
||||
static IOUAmount
|
||||
fromNumber(Number const& number);
|
||||
|
||||
public:
|
||||
IOUAmount() = default;
|
||||
explicit IOUAmount(Number const& other);
|
||||
IOUAmount(beast::Zero);
|
||||
IOUAmount(mantissa_type mantissa, exponent_type exponent);
|
||||
IOUAmount(std::int64_t mantissa, int exponent);
|
||||
|
||||
IOUAmount& operator=(beast::Zero);
|
||||
|
||||
@@ -76,10 +71,10 @@ public:
|
||||
int
|
||||
signum() const noexcept;
|
||||
|
||||
exponent_type
|
||||
int
|
||||
exponent() const noexcept;
|
||||
|
||||
mantissa_type
|
||||
std::int64_t
|
||||
mantissa() const noexcept;
|
||||
|
||||
static IOUAmount
|
||||
@@ -97,7 +92,7 @@ inline IOUAmount::IOUAmount(beast::Zero)
|
||||
*this = beast::zero;
|
||||
}
|
||||
|
||||
inline IOUAmount::IOUAmount(mantissa_type mantissa, exponent_type exponent)
|
||||
inline IOUAmount::IOUAmount(std::int64_t mantissa, int exponent)
|
||||
: mantissa_(mantissa), exponent_(exponent)
|
||||
{
|
||||
normalize();
|
||||
@@ -154,13 +149,13 @@ IOUAmount::signum() const noexcept
|
||||
return (mantissa_ < 0) ? -1 : (mantissa_ ? 1 : 0);
|
||||
}
|
||||
|
||||
inline IOUAmount::exponent_type
|
||||
inline int
|
||||
IOUAmount::exponent() const noexcept
|
||||
{
|
||||
return exponent_;
|
||||
}
|
||||
|
||||
inline IOUAmount::mantissa_type
|
||||
inline std::int64_t
|
||||
IOUAmount::mantissa() const noexcept
|
||||
{
|
||||
return mantissa_;
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
namespace xrpl {
|
||||
|
||||
class SeqProxy;
|
||||
/** Keylet computation functions.
|
||||
/** Keylet computation funclets.
|
||||
|
||||
Entries in the ledger are located using 256-bit locators. The locators are
|
||||
calculated using a wide range of parameters specific to the entry whose
|
||||
|
||||
@@ -37,9 +37,6 @@ public:
|
||||
bool
|
||||
native() const;
|
||||
|
||||
bool
|
||||
integral() const;
|
||||
|
||||
friend constexpr std::weak_ordering
|
||||
operator<=>(Issue const& lhs, Issue const& rhs);
|
||||
};
|
||||
|
||||
@@ -46,12 +46,6 @@ public:
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
bool
|
||||
integral() const
|
||||
{
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
constexpr bool
|
||||
|
||||
@@ -29,7 +29,7 @@ enum GranularPermissionType : std::uint32_t {
|
||||
#pragma pop_macro("PERMISSION")
|
||||
};
|
||||
|
||||
enum Delegation { delegable, notDelegable };
|
||||
enum Delegation { delegatable, notDelegatable };
|
||||
|
||||
class Permission
|
||||
{
|
||||
@@ -38,7 +38,7 @@ private:
|
||||
|
||||
std::unordered_map<std::uint16_t, uint256> txFeatureMap_;
|
||||
|
||||
std::unordered_map<std::uint16_t, Delegation> delegableTx_;
|
||||
std::unordered_map<std::uint16_t, Delegation> delegatableTx_;
|
||||
|
||||
std::unordered_map<std::string, GranularPermissionType>
|
||||
granularPermissionMap_;
|
||||
@@ -71,7 +71,8 @@ public:
|
||||
getTxFeature(TxType txType) const;
|
||||
|
||||
bool
|
||||
isDelegable(std::uint32_t const& permissionValue, Rules const& rules) const;
|
||||
isDelegatable(std::uint32_t const& permissionValue, Rules const& rules)
|
||||
const;
|
||||
|
||||
// for tx level permission, permission value is equal to tx type plus one
|
||||
uint32_t
|
||||
|
||||
@@ -179,7 +179,7 @@ static constexpr int loanPaymentsPerFeeIncrement = 5;
|
||||
*
|
||||
* This limit is enforced during the loan payment process, and thus is not
|
||||
* estimated. If the limit is hit, no further payments or overpayments will be
|
||||
* processed, no matter how much of the transaction Amount is left, but the
|
||||
* processed, no matter how much of the transation Amount is left, but the
|
||||
* transaction will succeed with the payments that have been processed up to
|
||||
* that point.
|
||||
*
|
||||
@@ -233,7 +233,6 @@ std::size_t constexpr maxMPTokenMetadataLength = 1024;
|
||||
|
||||
/** The maximum amount of MPTokenIssuance */
|
||||
std::uint64_t constexpr maxMPTokenAmount = 0x7FFF'FFFF'FFFF'FFFFull;
|
||||
static_assert(Number::maxRep >= maxMPTokenAmount);
|
||||
|
||||
/** The maximum length of Data payload */
|
||||
std::size_t constexpr maxDataPayloadLength = 256;
|
||||
|
||||
@@ -210,7 +210,7 @@ public:
|
||||
|
||||
private:
|
||||
// The ceil_in and ceil_out methods that deal in TAmount all convert
|
||||
// their arguments to STAmount and convert the result back to TAmount.
|
||||
// their arguments to STAoumout and convert the result back to TAmount.
|
||||
// This helper function takes care of all the conversion operations.
|
||||
template <
|
||||
class In,
|
||||
|
||||
@@ -135,10 +135,7 @@ public:
|
||||
sMD_Always = 0x10, // value when node containing it is affected at all
|
||||
sMD_BaseTen = 0x20, // value is treated as base 10, overriding behavior
|
||||
sMD_PseudoAccount = 0x40, // if this field is set in an ACCOUNT_ROOT
|
||||
// _only_, then it is a pseudo-account
|
||||
sMD_NeedsAsset = 0x80, // This field needs to be associated with an
|
||||
// asset before it is serialized as a ledger
|
||||
// object. Intended for STNumber.
|
||||
// _only_, then it is a pseudo-account
|
||||
sMD_Default =
|
||||
sMD_ChangeOrig | sMD_ChangeNew | sMD_DeleteFinal | sMD_Create
|
||||
};
|
||||
|
||||
@@ -138,7 +138,7 @@ public:
|
||||
|
||||
template <AssetType A>
|
||||
STAmount(A const& asset, Number const& number)
|
||||
: STAmount(fromNumber(asset, number))
|
||||
: STAmount(asset, number.mantissa(), number.exponent())
|
||||
{
|
||||
}
|
||||
|
||||
@@ -282,10 +282,6 @@ public:
|
||||
mpt() const;
|
||||
|
||||
private:
|
||||
template <AssetType A>
|
||||
static STAmount
|
||||
fromNumber(A const& asset, Number const& number);
|
||||
|
||||
static std::unique_ptr<STAmount>
|
||||
construct(SerialIter&, SField const& name);
|
||||
|
||||
@@ -349,19 +345,10 @@ STAmount::STAmount(
|
||||
, mIsNegative(negative)
|
||||
{
|
||||
// mValue is uint64, but needs to fit in the range of int64
|
||||
if (Number::getMantissaScale() == MantissaRange::small)
|
||||
{
|
||||
XRPL_ASSERT(
|
||||
mValue <= std::numeric_limits<std::int64_t>::max(),
|
||||
"xrpl::STAmount::STAmount(SField, A, std::uint64_t, int, bool) : "
|
||||
"maximum mantissa input");
|
||||
}
|
||||
else
|
||||
{
|
||||
if (integral() && mValue > std::numeric_limits<std::int64_t>::max())
|
||||
throw std::overflow_error(
|
||||
"STAmount mantissa is too large " + std::to_string(mantissa));
|
||||
}
|
||||
XRPL_ASSERT(
|
||||
mValue <= std::numeric_limits<std::int64_t>::max(),
|
||||
"xrpl::STAmount::STAmount(SField, A, std::uint64_t, int, bool) : "
|
||||
"maximum mantissa input");
|
||||
canonicalize();
|
||||
}
|
||||
|
||||
@@ -555,23 +542,14 @@ STAmount::operator=(XRPAmount const& amount)
|
||||
return *this;
|
||||
}
|
||||
|
||||
template <AssetType A>
|
||||
inline STAmount
|
||||
STAmount::fromNumber(A const& a, Number const& number)
|
||||
inline STAmount&
|
||||
STAmount::operator=(Number const& number)
|
||||
{
|
||||
bool const negative = number.mantissa() < 0;
|
||||
Number const working{negative ? -number : number};
|
||||
Asset asset{a};
|
||||
if (asset.integral())
|
||||
{
|
||||
std::uint64_t const intValue = static_cast<std::int64_t>(working);
|
||||
return STAmount{asset, intValue, 0, negative};
|
||||
}
|
||||
|
||||
auto const [mantissa, exponent] =
|
||||
working.normalizeToRange(cMinValue, cMaxValue);
|
||||
|
||||
return STAmount{asset, mantissa, exponent, negative};
|
||||
mIsNegative = number.mantissa() < 0;
|
||||
mValue = mIsNegative ? -number.mantissa() : number.mantissa();
|
||||
mOffset = number.exponent();
|
||||
canonicalize();
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline void
|
||||
@@ -721,32 +699,17 @@ getRate(STAmount const& offerOut, STAmount const& offerIn);
|
||||
* @param rounding Optional Number rounding mode
|
||||
*
|
||||
*/
|
||||
[[nodiscard]] STAmount
|
||||
STAmount
|
||||
roundToScale(
|
||||
STAmount const& value,
|
||||
std::int32_t scale,
|
||||
Number::rounding_mode rounding = Number::getround());
|
||||
|
||||
/** Round an arbitrary precision Number IN PLACE to the precision of a given
|
||||
* Asset.
|
||||
*
|
||||
* This is used to ensure that calculations do not collect dust for IOUs, or
|
||||
* fractional amounts for the integral types XRP and MPT.
|
||||
*
|
||||
* @param asset The relevant asset
|
||||
* @param value The lvalue to be rounded
|
||||
*/
|
||||
template <AssetType A>
|
||||
void
|
||||
roundToAsset(A const& asset, Number& value)
|
||||
{
|
||||
value = STAmount{asset, value};
|
||||
}
|
||||
|
||||
/** Round an arbitrary precision Number to the precision of a given Asset.
|
||||
*
|
||||
* This is used to ensure that calculations do not collect dust beyond specified
|
||||
* scale for IOUs, or fractional amounts for the integral types XRP and MPT.
|
||||
* This is used to ensure that calculations do not collect dust beyond the
|
||||
* precision of the reference value for IOUs, or fractional amounts for the
|
||||
* integral types XRP and MPT.
|
||||
*
|
||||
* @param asset The relevant asset
|
||||
* @param value The value to be rounded
|
||||
@@ -755,7 +718,7 @@ roundToAsset(A const& asset, Number& value)
|
||||
* @param rounding Optional Number rounding mode
|
||||
*/
|
||||
template <AssetType A>
|
||||
[[nodiscard]] Number
|
||||
Number
|
||||
roundToAsset(
|
||||
A const& asset,
|
||||
Number const& value,
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
#include <xrpl/basics/CountedObject.h>
|
||||
#include <xrpl/basics/Number.h>
|
||||
#include <xrpl/protocol/STBase.h>
|
||||
#include <xrpl/protocol/STTakesAsset.h>
|
||||
|
||||
#include <ostream>
|
||||
|
||||
@@ -20,19 +19,8 @@ namespace xrpl {
|
||||
* it can represent a value of any token type (XRP, IOU, or MPT)
|
||||
* without paying the storage cost of duplicating asset information
|
||||
* that may be deduced from the context.
|
||||
*
|
||||
* STNumber derives from STTakesAsset, so that it can be associated with the
|
||||
* related Asset during transaction processing. Which asset is relevant depends
|
||||
* on the object and transaction. As of this writing, only Vault, LoanBroker,
|
||||
* and Loan objects use STNumber fields. All of those fields represent amounts
|
||||
* of the Vault's Asset, so they should be associated with the Vault's Asset.
|
||||
*
|
||||
* e.g.
|
||||
* associateAsset(*loanSle, asset);
|
||||
* associateAsset(*brokerSle, asset);
|
||||
* associateAsset(*vaultSle, asset);
|
||||
*/
|
||||
class STNumber : public STTakesAsset, public CountedObject<STNumber>
|
||||
class STNumber : public STBase, public CountedObject<STNumber>
|
||||
{
|
||||
private:
|
||||
Number value_;
|
||||
@@ -68,9 +56,6 @@ public:
|
||||
bool
|
||||
isDefault() const override;
|
||||
|
||||
void
|
||||
associateAsset(Asset const& a) override;
|
||||
|
||||
operator Number() const
|
||||
{
|
||||
return value_;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user