Compare commits

..

24 Commits

Author SHA1 Message Date
JCW
52becffa48 Fix issues 2025-08-28 23:04:56 +01:00
JCW
b5c4fd4c51 Fix issues 2025-08-28 22:41:41 +01:00
JCW
ffa323808d Fix issues 2025-08-28 21:55:14 +01:00
JCW
e7e800197e Performance improvement 2025-08-28 20:36:46 +01:00
JCW
6e35bb91ec Hardcode the log style as json
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:14 +01:00
JCW
276c02197f Fix PR comments
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:14 +01:00
Jingchen
fb228860c8 Update include/xrpl/basics/Log.h
Co-authored-by: Vito Tumas <5780819+Tapanito@users.noreply.github.com>
2025-08-27 16:04:14 +01:00
JCW
b6c2b5cec5 Fix formatting
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:14 +01:00
JCW
516271e8fc Improve coverage
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:13 +01:00
JCW
0d87dfbdb4 Remove unneeded file
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:13 +01:00
JCW
f7b00a929b Fix errors
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:13 +01:00
JCW
9b3dd2c3b2 Fix errors
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:13 +01:00
JCW
1a159e040e Fix errors
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:13 +01:00
JCW
56964984a5 Fix errors
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:13 +01:00
JCW
0b31d52896 Fix formatting
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:13 +01:00
JCW
5cf589af16 Fix errors
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:12 +01:00
JCW
2754c6343b Fix formatting
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:12 +01:00
JCW
98bc036d1f Fix to_string error
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:12 +01:00
JCW
429617e1ca Fix errors
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:12 +01:00
JCW
a513f95fb5 Fix formatting
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:12 +01:00
JCW
3740308b61 Support structured logs
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:12 +01:00
JCW
f1625c9802 Support structured logs
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:12 +01:00
JCW
73bc28bf4f Support structured logs
Signed-off-by: JCW <a1q123456@users.noreply.github.com>
2025-08-27 16:04:11 +01:00
Bart
1240bae12b Update Conan dependencies: OpenSSL (#5617)
This change updates OpenSSL from 1.1.1w to 3.5.2. The code works as-is, but many functions have been marked as deprecated and thus will need to be rewritten. For now we explicitly add the `-DOPENSSL_SUPPRESS_DEPRECATED` to give us time to do so, while providing us with the benefits of the updated version.
2025-08-27 16:04:11 +01:00
1723 changed files with 136394 additions and 58755 deletions

View File

@@ -1,20 +1,4 @@
---
BreakBeforeBraces: Custom
BraceWrapping:
AfterClass: true
AfterControlStatement: true
AfterEnum: false
AfterFunction: true
AfterNamespace: false
AfterObjCDeclaration: true
AfterStruct: true
AfterUnion: true
BeforeCatch: true
BeforeElse: true
IndentBraces: false
KeepEmptyLinesAtTheStartOfBlocks: false
MaxEmptyLinesToKeep: 1
---
Language: Cpp
AccessModifierOffset: -4
AlignAfterOpenBracket: AlwaysBreak
@@ -34,7 +18,20 @@ AlwaysBreakBeforeMultilineStrings: true
AlwaysBreakTemplateDeclarations: true
BinPackArguments: false
BinPackParameters: false
BraceWrapping:
AfterClass: true
AfterControlStatement: true
AfterEnum: false
AfterFunction: true
AfterNamespace: false
AfterObjCDeclaration: true
AfterStruct: true
AfterUnion: true
BeforeCatch: true
BeforeElse: true
IndentBraces: false
BreakBeforeBinaryOperators: false
BreakBeforeBraces: Custom
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: true
ColumnLimit: 80
@@ -69,6 +66,8 @@ IndentFunctionDeclarationAfterType: false
IndentRequiresClause: true
IndentWidth: 4
IndentWrappedFunctionNames: false
KeepEmptyLinesAtTheStartOfBlocks: false
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCSpaceAfterProperty: false
ObjCSpaceBeforeProtocolList: false
@@ -97,7 +96,7 @@ TabWidth: 8
UseTab: Never
QualifierAlignment: Right
---
Language: Proto
BasedOnStyle: Google
ColumnLimit: 0
Language: JavaScript
---
Language: Json
IndentWidth: 2

View File

@@ -32,9 +32,6 @@ parsers:
slack_app: false
ignore:
- ".github/scripts/"
- "src/test/"
- "include/xrpl/beast/test/"
- "include/xrpl/beast/unit_test/"
- "src/test/"
- "src/tests/"
- "tests/"

View File

@@ -12,5 +12,3 @@ fe9a5365b8a52d4acc42eb27369247e6f238a4f9
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
552377c76f55b403a1c876df873a23d780fcc81c
97f0747e103f13e26e45b731731059b32f7679ac
b13370ac0d207217354f1fc1c29aef87769fb8a1
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce

4
.gitattributes vendored
View File

@@ -1,6 +1,10 @@
# Set default behaviour, in case users don't have core.autocrlf set.
#* text=auto
# These annoying files
rippled.1 binary
LICENSE binary
# Visual Studio
*.sln text eol=crlf
*.vcproj text eol=crlf

View File

@@ -1,7 +1,7 @@
---
name: Bug Report
about: Create a report to help us improve xrpld
title: "[Title with short description] (Version: [xrpld version])"
about: Create a report to help us improve rippled
title: "[Title with short description] (Version: [rippled version])"
labels: ""
assignees: ""
---
@@ -27,7 +27,7 @@ assignees: ""
## Environment
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
<!-- If you are using a formal release, please use the version returned by './xrpld --version' as the version number-->
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
## Supporting Files

View File

@@ -1,42 +0,0 @@
name: Build Conan dependencies
description: "Install Conan dependencies, optionally forcing a rebuild of all dependencies."
# Note that actions do not support 'type' and all inputs are strings, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
inputs:
build_type:
description: 'The build type to use ("Debug", "Release").'
required: true
build_nproc:
description: "The number of processors to use for building."
required: true
force_build:
description: 'Force building of all dependencies ("true", "false").'
required: false
default: "false"
log_verbosity:
description: "The logging verbosity."
required: false
default: "verbose"
runs:
using: composite
steps:
- name: Install Conan dependencies
shell: bash
env:
BUILD_NPROC: ${{ inputs.build_nproc }}
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
BUILD_TYPE: ${{ inputs.build_type }}
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
run: |
echo 'Installing dependencies.'
conan install \
--build="${BUILD_OPTION}" \
--options:host='&:tests=True' \
--options:host='&:xrpld=True' \
--settings:all build_type="${BUILD_TYPE}" \
--conf:all tools.build:jobs=${BUILD_NPROC} \
--conf:all tools.build:verbosity="${LOG_VERBOSITY}" \
--conf:all tools.compilation:verbosity="${LOG_VERBOSITY}" \
.

34
.github/actions/build/action.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: build
inputs:
generator:
default: null
configuration:
required: true
cmake-args:
default: null
cmake-target:
default: all
# An implicit input is the environment variable `build_dir`.
runs:
using: composite
steps:
- name: configure
shell: bash
run: |
cd ${build_dir}
cmake \
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
-Dtests=TRUE \
-Dxrpld=TRUE \
${{ inputs.cmake-args }} \
..
- name: build
shell: bash
run: |
cmake \
--build ${build_dir} \
--config ${{ inputs.configuration }} \
--parallel ${NUM_PROCESSORS:-$(nproc)} \
--target ${{ inputs.cmake-target }}

38
.github/actions/dependencies/action.yml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: dependencies
inputs:
configuration:
required: true
# Implicit inputs are the environment variables `build_dir`, CONAN_REMOTE_URL,
# CONAN_REMOTE_USERNAME, and CONAN_REMOTE_PASSWORD. The latter two are only
# used to upload newly built dependencies to the Conan remote.
runs:
using: composite
steps:
- name: add Conan remote
if: ${{ env.CONAN_REMOTE_URL != '' }}
shell: bash
run: |
echo "Adding Conan remote 'xrplf' at ${{ env.CONAN_REMOTE_URL }}."
conan remote add --index 0 --force xrplf ${{ env.CONAN_REMOTE_URL }}
echo "Listing Conan remotes."
conan remote list
- name: install dependencies
shell: bash
run: |
mkdir -p ${{ env.build_dir }}
cd ${{ env.build_dir }}
conan install \
--output-folder . \
--build missing \
--options:host "&:tests=True" \
--options:host "&:xrpld=True" \
--settings:all build_type=${{ inputs.configuration }} \
..
- name: upload dependencies
if: ${{ env.CONAN_REMOTE_URL != '' && env.CONAN_REMOTE_USERNAME != '' && env.CONAN_REMOTE_PASSWORD != '' && github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
shell: bash
run: |
echo "Logging into Conan remote 'xrplf' at ${{ env.CONAN_REMOTE_URL }}."
conan remote login xrplf "${{ env.CONAN_REMOTE_USERNAME }}" --password "${{ env.CONAN_REMOTE_PASSWORD }}"
echo "Uploading dependencies."
conan upload '*' --confirm --check --remote xrplf

View File

@@ -1,43 +0,0 @@
name: Print build environment
description: "Print environment and some tooling versions"
runs:
using: composite
steps:
- name: Check configuration (Windows)
if: ${{ runner.os == 'Windows' }}
shell: bash
run: |
echo 'Checking environment variables.'
set
echo 'Checking CMake version.'
cmake --version
echo 'Checking Conan version.'
conan --version
- name: Check configuration (Linux and macOS)
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
shell: bash
run: |
echo 'Checking path.'
echo ${PATH} | tr ':' '\n'
echo 'Checking environment variables.'
env | sort
echo 'Checking CMake version.'
cmake --version
echo 'Checking compiler version.'
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
echo 'Checking Conan version.'
conan --version
echo 'Checking Ninja version.'
ninja --version
echo 'Checking nproc version.'
nproc --version

View File

@@ -1,46 +0,0 @@
name: Setup Conan
description: "Set up Conan configuration, profile, and remote."
inputs:
conan_remote_name:
description: "The name of the Conan remote to use."
required: false
default: xrplf
conan_remote_url:
description: "The URL of the Conan endpoint to use."
required: false
default: https://conan.ripplex.io
runs:
using: composite
steps:
- name: Set up Conan configuration
shell: bash
run: |
echo 'Installing configuration.'
cat conan/global.conf ${{ runner.os == 'Linux' && '>>' || '>' }} $(conan config home)/global.conf
echo 'Conan configuration:'
conan config show '*'
- name: Set up Conan profile
shell: bash
run: |
echo 'Installing profile.'
conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo 'Conan profile:'
conan profile show
- name: Set up Conan remote
shell: bash
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
run: |
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
echo 'Listing Conan remotes.'
conan remote list

View File

@@ -1,43 +0,0 @@
## Renaming ripple(d) to xrpl(d)
In the initial phases of development of the XRPL, the open source codebase was
called "rippled" and it remains with that name even today. Today, over 1000
nodes run the application, and code contributions have been submitted by
developers located around the world. The XRPL community is larger than ever.
In light of the decentralized and diversified nature of XRPL, we will rename any
references to `ripple` and `rippled` to `xrpl` and `xrpld`, when appropriate.
See [here](https://xls.xrpl.org/xls/XLS-0095-rename-rippled-to-xrpld.html) for
more information.
### Scripts
To facilitate this transition, there will be multiple scripts that developers
can run on their own PRs and forks to minimize conflicts. Each script should be
run from the repository root.
1. `.github/scripts/rename/definitions.sh`: This script will rename all
definitions, such as include guards, from `RIPPLE_XXX` and `RIPPLED_XXX` to
`XRPL_XXX`.
2. `.github/scripts/rename/copyright.sh`: This script will remove superflous
copyright notices.
3. `.github/scripts/rename/cmake.sh`: This script will rename all CMake files
from `RippleXXX.cmake` or `RippledXXX.cmake` to `XrplXXX.cmake`, and any
references to `ripple` and `rippled` (with or without capital letters) to
`xrpl` and `xrpld`, respectively. The name of the binary will remain as-is,
and will only be renamed to `xrpld` by a later script.
4. `.github/scripts/rename/binary.sh`: This script will rename the binary from
`rippled` to `xrpld`, and reverses the symlink so that `rippled` points to
the `xrpld` binary.
5. `.github/scripts/rename/namespace.sh`: This script will rename the C++
namespaces from `ripple` to `xrpl`.
You can run all these scripts from the repository root as follows:
```shell
./.github/scripts/rename/definitions.sh .
./.github/scripts/rename/copyright.sh .
./.github/scripts/rename/cmake.sh .
./.github/scripts/rename/binary.sh .
./.github/scripts/rename/namespace.sh .
```

View File

@@ -1,54 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1
fi
SED_COMMAND=gsed
fi
# This script changes the binary name from `rippled` to `xrpld`, and reverses
# the symlink that currently points from `xrpld` to `rippled` so that it points
# from `rippled` to `xrpld` instead.
# Usage: .github/scripts/rename/binary.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
pushd ${DIRECTORY}
# Remove the binary name override added by the cmake.sh script.
${SED_COMMAND} -z -i -E 's@\s+# For the time being.+"rippled"\)@@' cmake/XrplCore.cmake
# Reverse the symlink.
${SED_COMMAND} -i -E 's@create_symbolic_link\(rippled@create_symbolic_link(xrpld@' cmake/XrplInstall.cmake
${SED_COMMAND} -i -E 's@/xrpld\$\{suffix\}@/rippled${suffix}@' cmake/XrplInstall.cmake
# Rename references to the binary.
${SED_COMMAND} -i -E 's@rippled@xrpld@g' BUILD.md
${SED_COMMAND} -i -E 's@rippled@xrpld@g' CONTRIBUTING.md
${SED_COMMAND} -i -E 's@rippled@xrpld@g' .github/ISSUE_TEMPLATE/bug_report.md
# Restore and/or fix certain renames. The pre-commit hook will update the
# formatting upon saving/committing.
${SED_COMMAND} -i -E 's@ripple/xrpld@XRPLF/rippled@g' BUILD.md
${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' BUILD.md
${SED_COMMAND} -i -E 's@xrpld \(`xrpld`\)@xrpld@g' BUILD.md
${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' CONTRIBUTING.md
popd
echo "Processing complete."

View File

@@ -1,92 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# On MacOS, ensure that GNU sed and head are installed and available as `gsed`
# and `ghead`, respectively.
SED_COMMAND=sed
HEAD_COMMAND=head
if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1
fi
SED_COMMAND=gsed
if ! command -v ghead &> /dev/null; then
echo "Error: ghead is not installed. Please install it using 'brew install coreutils'."
exit 1
fi
HEAD_COMMAND=ghead
fi
# This script renames CMake files from `RippleXXX.cmake` or `RippledXXX.cmake`
# to `XrplXXX.cmake`, and any references to `ripple` and `rippled` (with or
# without capital letters) to `xrpl` and `xrpld`, respectively. The name of the
# binary will remain as-is, and will only be renamed to `xrpld` in a different
# script, but the proto file will be renamed.
# Usage: .github/scripts/rename/cmake.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
pushd ${DIRECTORY}
# Rename the files.
find cmake -type f -name 'Rippled*.cmake' -exec bash -c 'mv "${1}" "${1/Rippled/Xrpl}"' - {} \;
find cmake -type f -name 'Ripple*.cmake' -exec bash -c 'mv "${1}" "${1/Ripple/Xrpl}"' - {} \;
if [ -e cmake/xrpl_add_test.cmake ]; then
mv cmake/xrpl_add_test.cmake cmake/XrplAddTest.cmake
fi
if [ -e include/xrpl/proto/ripple.proto ]; then
mv include/xrpl/proto/ripple.proto include/xrpl/proto/xrpl.proto
fi
# Rename inside the files.
find cmake -type f -name '*.cmake' | while read -r FILE; do
echo "Processing file: ${FILE}"
${SED_COMMAND} -i 's/Rippled/Xrpld/g' "${FILE}"
${SED_COMMAND} -i 's/Ripple/Xrpl/g' "${FILE}"
${SED_COMMAND} -i 's/rippled/xrpld/g' "${FILE}"
${SED_COMMAND} -i 's/ripple/xrpl/g' "${FILE}"
done
${SED_COMMAND} -i -E 's/Rippled?/Xrpl/g' CMakeLists.txt
${SED_COMMAND} -i 's/ripple/xrpl/g' CMakeLists.txt
${SED_COMMAND} -i 's/include(xrpl_add_test)/include(XrplAddTest)/' src/tests/libxrpl/CMakeLists.txt
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' include/xrpl/protocol/messages.h
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' BUILD.md
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' BUILD.md
# Restore the name of the validator keys repository.
${SED_COMMAND} -i 's@xrpl/validator-keys-tool@ripple/validator-keys-tool@' cmake/XrplValidatorKeys.cmake
# Ensure the name of the binary and config remain 'rippled' for now.
${SED_COMMAND} -i -E 's/xrpld(-example)?\.cfg/rippled\1.cfg/g' cmake/XrplInstall.cmake
if grep -q '"xrpld"' cmake/XrplCore.cmake; then
# The script has been rerun, so just restore the name of the binary.
${SED_COMMAND} -i 's/"xrpld"/"rippled"/' cmake/XrplCore.cmake
elif ! grep -q '"rippled"' cmake/XrplCore.cmake; then
${HEAD_COMMAND} -n -1 cmake/XrplCore.cmake > cmake.tmp
echo ' # For the time being, we will keep the name of the binary as it was.' >> cmake.tmp
echo ' set_target_properties(xrpld PROPERTIES OUTPUT_NAME "rippled")' >> cmake.tmp
tail -1 cmake/XrplCore.cmake >> cmake.tmp
mv cmake.tmp cmake/XrplCore.cmake
fi
# Restore the symlink from 'xrpld' to 'rippled'.
${SED_COMMAND} -i -E 's@create_symbolic_link\(xrpld@create_symbolic_link(rippled@' cmake/XrplInstall.cmake
# Remove the symlink that previously pointed from 'ripple' to 'xrpl' but now is
# no longer needed.
${SED_COMMAND} -z -i -E 's@install\(CODE.+CMAKE_INSTALL_INCLUDEDIR}/xrpl\)\n"\)\n+@@' cmake/XrplInstall.cmake
popd
echo "Renaming complete."

View File

@@ -1,103 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1
fi
SED_COMMAND=gsed
fi
# This script removes superfluous copyright notices in source and header files
# in this project. Specifically, it removes all notices referencing Ripple,
# XRPLF, and certain individual contributors upon mutual agreement, so the one
# in the LICENSE.md file applies throughout. Copyright notices referencing
# external contributions, e.g. from Bitcoin, remain as-is.
# Usage: .github/scripts/rename/copyright.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
pushd ${DIRECTORY}
# Prevent sed and echo from removing newlines and tabs in string literals by
# temporarily replacing them with placeholders. This only affects one file.
PLACEHOLDER_NEWLINE="__NEWLINE__"
PLACEHOLDER_TAB="__TAB__"
${SED_COMMAND} -i -E "s@\\\n@${PLACEHOLDER_NEWLINE}@g" src/test/rpc/ValidatorInfo_test.cpp
${SED_COMMAND} -i -E "s@\\\t@${PLACEHOLDER_TAB}@g" src/test/rpc/ValidatorInfo_test.cpp
# Process the include/ and src/ directories.
DIRECTORIES=("include" "src")
for DIRECTORY in "${DIRECTORIES[@]}"; do
echo "Processing directory: ${DIRECTORY}"
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.macro" \) | while read -r FILE; do
echo "Processing file: ${FILE}"
# Handle the cases where the copyright notice is enclosed in /* ... */
# and usually surrounded by //---- and //======.
${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}"
${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}"
${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}"
# Handle the cases where the copyright notice is commented out with //.
${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}"
done
done
# Restore copyright notices that were removed from specific files, without
# restoring the verbiage that is already present in LICENSE.md. Ensure that if
# the script is run multiple times, duplicate notices are not added.
if ! grep -q 'Raw Material Software' include/xrpl/beast/core/CurrentThreadName.h; then
echo -e "// Portions of this file are from JUCE (http://www.juce.com).\n// Copyright (c) 2013 - Raw Material Software Ltd.\n// Please visit http://www.juce.com\n\n$(cat include/xrpl/beast/core/CurrentThreadName.h)" > include/xrpl/beast/core/CurrentThreadName.h
fi
if ! grep -q 'Dev Null' src/test/app/NetworkID_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/NetworkID_test.cpp)" > src/test/app/NetworkID_test.cpp
fi
if ! grep -q 'Dev Null' src/test/app/tx/apply_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/tx/apply_test.cpp)" > src/test/app/tx/apply_test.cpp
fi
if ! grep -q 'Dev Null' src/test/rpc/ManifestRPC_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ManifestRPC_test.cpp)" > src/test/rpc/ManifestRPC_test.cpp
fi
if ! grep -q 'Dev Null' src/test/rpc/ValidatorInfo_test.cpp; then
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ValidatorInfo_test.cpp)" > src/test/rpc/ValidatorInfo_test.cpp
fi
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/DoManifest.cpp; then
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/DoManifest.cpp)" > src/xrpld/rpc/handlers/DoManifest.cpp
fi
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/ValidatorInfo.cpp; then
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/ValidatorInfo.cpp
fi
if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h
fi
if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h
fi
if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then
echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h
fi
if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then
echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h
fi
# Restore newlines and tabs in string literals in the affected file.
${SED_COMMAND} -i -E "s@${PLACEHOLDER_NEWLINE}@\\\n@g" src/test/rpc/ValidatorInfo_test.cpp
${SED_COMMAND} -i -E "s@${PLACEHOLDER_TAB}@\\\t@g" src/test/rpc/ValidatorInfo_test.cpp
popd
echo "Removal complete."

View File

@@ -1,42 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1
fi
SED_COMMAND=gsed
fi
# This script renames definitions, such as include guards, in this project.
# Specifically, it renames "RIPPLED_XXX" and "RIPPLE_XXX" to "XRPL_XXX" by
# scanning all cmake, header, and source files in the specified directory and
# its subdirectories.
# Usage: .github/scripts/rename/definitions.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" \) | while read -r FILE; do
echo "Processing file: ${FILE}"
${SED_COMMAND} -i -E 's@#(define|endif|if|ifdef|ifndef)(.*)(RIPPLED_|RIPPLE_)([A-Z0-9_]+)@#\1\2XRPL_\4@g' "${FILE}"
done
find "${DIRECTORY}" -type f \( -name "*.cmake" -o -name "*.txt" \) | while read -r FILE; do
echo "Processing file: ${FILE}"
${SED_COMMAND} -i -E 's@(RIPPLED_|RIPPLE_)([A-Z0-9_]+)@XRPL_\2@g' "${FILE}"
done
echo "Renaming complete."

View File

@@ -1,58 +0,0 @@
#!/bin/bash
# Exit the script as soon as an error occurs.
set -e
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
SED_COMMAND=sed
if [[ "${OSTYPE}" == 'darwin'* ]]; then
if ! command -v gsed &> /dev/null; then
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
exit 1
fi
SED_COMMAND=gsed
fi
# This script renames the `ripple` namespace to `xrpl` in this project.
# Specifically, it renames all occurrences of `namespace ripple` and `ripple::`
# to `namespace xrpl` and `xrpl::`, respectively, by scanning all header and
# source files in the specified directory and its subdirectories, as well as any
# occurrences in the documentation. It also renames them in the test suites.
# Usage: .github/scripts/rename/namespace.sh <repository directory>
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <repository directory>"
exit 1
fi
DIRECTORY=$1
echo "Processing directory: ${DIRECTORY}"
if [ ! -d "${DIRECTORY}" ]; then
echo "Error: Directory '${DIRECTORY}' does not exist."
exit 1
fi
pushd ${DIRECTORY}
DIRECTORIES=("include" "src" "tests")
for DIRECTORY in "${DIRECTORIES[@]}"; do
echo "Processing directory: ${DIRECTORY}"
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" \) | while read -r FILE; do
echo "Processing file: ${FILE}"
${SED_COMMAND} -i 's/namespace ripple/namespace xrpl/g' "${FILE}"
${SED_COMMAND} -i 's/ripple::/xrpl::/g' "${FILE}"
${SED_COMMAND} -i -E 's/(BEAST_DEFINE_TESTSUITE.+)ripple(.+)/\1xrpl\2/g' "${FILE}"
done
done
# Special case for NuDBFactory that has ripple twice in the test suite name.
${SED_COMMAND} -i -E 's/(BEAST_DEFINE_TESTSUITE.+)ripple(.+)/\1xrpl\2/g' src/test/nodestore/NuDBFactory_test.cpp
DIRECTORY=$1
find "${DIRECTORY}" -type f -name "*.md" | while read -r FILE; do
echo "Processing file: ${FILE}"
${SED_COMMAND} -i 's/ripple::/xrpl::/g' "${FILE}"
done
popd
echo "Renaming complete."

View File

@@ -1,118 +0,0 @@
# Strategy Matrix
The scripts in this directory will generate a strategy matrix for GitHub Actions
CI, depending on the trigger that caused the workflow to run and the platform
specified.
There are several build, test, and publish settings that can be enabled for each
configuration. The settings are combined in a Cartesian product to generate the
full matrix, while filtering out any combinations not applicable to the trigger.
## Platforms
We support three platforms: Linux, macOS, and Windows.
### Linux
We support a variety of distributions (Debian, RHEL, and Ubuntu) and compilers
(GCC and Clang) on Linux. As there are so many combinations, we don't run them
all. Instead, we focus on a few key ones for PR commits and merges, while we run
most of them on a scheduled or ad hoc basis.
Some noteworthy configurations are:
- The official release build is GCC 14 on Debian Bullseye.
- Although we generally enable assertions in release builds, we disable them
for the official release build.
- We publish .deb and .rpm packages for this build, as well as a Docker image.
- For PR commits we also publish packages and images for testing purposes.
- Antithesis instrumentation is only supported on Clang 16+ on AMD64.
- We publish a Docker image for this build, but no packages.
- Coverage reports are generated on Bullseye with GCC 15.
- It must be enabled for both commits (to show PR coverage) and merges (to
show default branch coverage).
Note that we try to run pipelines equally across both AMD64 and ARM64, but in
some cases we cannot build on ARM64:
- All Clang 20+ builds on ARM64 are currently skipped due to a Boost build
error.
- All RHEL builds on AMD64 are currently skipped due to a build failure that
needs further investigation.
Also note that to create a Docker image we ideally build on both AMD64 and
ARM64 to create a multi-arch image. Both configs should therefore be triggered
by the same event. However, as the script outputs individual configs, the
workflow must be able to run both builds separately and then merge the
single-arch images afterward into a multi-arch image.
### MacOS
We support building on macOS, which uses the Apple Clang compiler and the ARM64
architecture. We use default settings for all builds, and don't publish any
packages or images.
### Windows
We also support building on Windows, which uses the MSVC compiler and the AMD64
architecture. While we could build on ARM64, we have not yet found a suitable
cloud machine to use as a GitHub runner. We use default settings for all builds,
and don't publish any packages or images.
## Triggers
We have four triggers that can cause the workflow to run:
- `commit`: A commit is pushed to a branch for which a pull request is open.
- `merge`: A pull request is merged.
- `label`: A label is added to a pull request.
- `schedule`: The workflow is run on a scheduled basis.
The `label` trigger is currently not used, but it is reserved for future use.
The `schedule` trigger is used to run the workflow each weekday, and is also
used for ad hoc testing via the `workflow_dispatch` event.
### Dependencies
The pipeline that is run for the `schedule` trigger will recompile and upload
all Conan packages to the remote for each configuration that is enabled. In
case any dependencies were added or updated in a recently merged PR, they will
then be available in the remote for the following pipeline runs. It is therefore
important that all configurations that are enabled for the `commit`, `merge`,
and `label` triggers are also enabled for the `schedule` trigger. We run
additional configurations in the `schedule` trigger that are not run for the
other triggers, to get extra confidence that the codebase can compile and run on
all supported platforms.
#### Caveats
There is some nuance here in that certain options affect the compilation of the
dependencies, while others do not. This means that that same options need to be
enabled for the `schedule` trigger as for the other triggers to ensure any
dependency changes get cached in the Conan remote.
- Build mode (`unity`): Does not affect the dependencies.
- Build option (`coverage`, `voidstar`): Does not affect the dependencies.
- Build option (`sanitizer asan`, `sanitizer tsan`): Affects the dependencies.
- Build type (`debug`, `release`): Affects the dependencies.
- Build type (`publish`): Same effect as `release` on the dependencies.
- Test option (`reference fee`): Does not affect the dependencies.
- Publish option (`package`, `image`): Does not affect the dependencies.
## Usage
Our GitHub CI pipeline uses the `generate.py` script to generate the matrix for
the current workflow invocation. Naturally, the script can be run locally to
generate the matrix for testing purposes, e.g.:
```bash
python3 generate.py --platform=linux --trigger=commit
```
If you want to pretty-print the output, you can pipe it to `jq` after stripping
off the `matrix=` prefix, e.g.:
```bash
python3 generate.py --platform=linux --trigger=commit | cut -d= -f2- | jq
```

View File

@@ -1,211 +0,0 @@
#!/usr/bin/env python3
import argparse
import dataclasses
import itertools
from collections.abc import Iterator
import linux
import macos
import windows
from helpers.defs import *
from helpers.enums import *
from helpers.funcs import *
from helpers.unique import *
# The GitHub runner tags to use for the different architectures.
RUNNER_TAGS = {
Arch.LINUX_AMD64: ["self-hosted", "Linux", "X64", "heavy"],
Arch.LINUX_ARM64: ["self-hosted", "Linux", "ARM64", "heavy-arm64"],
Arch.MACOS_ARM64: ["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
Arch.WINDOWS_AMD64: ["self-hosted", "Windows", "devbox"],
}
def generate_configs(distros: list[Distro], trigger: Trigger) -> list[Config]:
"""Generate a strategy matrix for GitHub Actions CI.
Args:
distros: The distros to generate the matrix for.
trigger: The trigger that caused the workflow to run.
Returns:
list[Config]: The generated configurations.
Raises:
ValueError: If any of the required fields are empty or invalid.
TypeError: If any of the required fields are of the wrong type.
"""
configs = []
for distro in distros:
for config in generate_config_for_distro(distro, trigger):
configs.append(config)
if not is_unique(configs):
raise ValueError("configs must be a list of unique Config")
return configs
def generate_config_for_distro(distro: Distro, trigger: Trigger) -> Iterator[Config]:
"""Generate a strategy matrix for a specific distro.
Args:
distro: The distro to generate the matrix for.
trigger: The trigger that caused the workflow to run.
Yields:
Config: The next configuration to build.
Raises:
ValueError: If any of the required fields are empty or invalid.
TypeError: If any of the required fields are of the wrong type.
"""
for spec in distro.specs:
if trigger not in spec.triggers:
continue
os_name = distro.os_name
os_version = distro.os_version
compiler_name = distro.compiler_name
compiler_version = distro.compiler_version
image_sha = distro.image_sha
yield from generate_config_for_distro_spec(
os_name,
os_version,
compiler_name,
compiler_version,
image_sha,
spec,
trigger,
)
def generate_config_for_distro_spec(
os_name: str,
os_version: str,
compiler_name: str,
compiler_version: str,
image_sha: str,
spec: Spec,
trigger: Trigger,
) -> Iterator[Config]:
"""Generate a strategy matrix for a specific distro and spec.
Args:
os_name: The OS name.
os_version: The OS version.
compiler_name: The compiler name.
compiler_version: The compiler version.
image_sha: The image SHA.
spec: The spec to generate the matrix for.
trigger: The trigger that caused the workflow to run.
Yields:
Config: The next configuration to build.
"""
for trigger_, arch, build_mode, build_type in itertools.product(
spec.triggers, spec.archs, spec.build_modes, spec.build_types
):
if trigger_ != trigger:
continue
build_option = spec.build_option
test_option = spec.test_option
publish_option = spec.publish_option
# Determine the configuration name.
config_name = generate_config_name(
os_name,
os_version,
compiler_name,
compiler_version,
arch,
build_type,
build_mode,
build_option,
)
# Determine the CMake arguments.
cmake_args = generate_cmake_args(
compiler_name,
compiler_version,
build_type,
build_mode,
build_option,
test_option,
)
# Determine the CMake target.
cmake_target = generate_cmake_target(os_name, build_type)
# Determine whether to enable running tests, and to create a package
# and/or image.
enable_tests, enable_package, enable_image = generate_enable_options(
os_name, build_type, publish_option
)
# Determine the image to run in, if applicable.
image = generate_image_name(
os_name,
os_version,
compiler_name,
compiler_version,
image_sha,
)
# Generate the configuration.
yield Config(
config_name=config_name,
cmake_args=cmake_args,
cmake_target=cmake_target,
build_type=("Debug" if build_type == BuildType.DEBUG else "Release"),
enable_tests=enable_tests,
enable_package=enable_package,
enable_image=enable_image,
runs_on=RUNNER_TAGS[arch],
image=image,
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--platform",
"-p",
required=False,
type=Platform,
choices=list(Platform),
help="The platform to run on.",
)
parser.add_argument(
"--trigger",
"-t",
required=True,
type=Trigger,
choices=list(Trigger),
help="The trigger that caused the workflow to run.",
)
args = parser.parse_args()
# Collect the distros to generate configs for.
distros = []
if args.platform in [None, Platform.LINUX]:
distros += linux.DEBIAN_DISTROS + linux.RHEL_DISTROS + linux.UBUNTU_DISTROS
if args.platform in [None, Platform.MACOS]:
distros += macos.DISTROS
if args.platform in [None, Platform.WINDOWS]:
distros += windows.DISTROS
# Generate the configs.
configs = generate_configs(distros, args.trigger)
# Convert the configs into the format expected by GitHub Actions.
include = []
for config in configs:
include.append(dataclasses.asdict(config))
print(f"matrix={json.dumps({'include': include})}")

View File

@@ -1,466 +0,0 @@
import pytest
from generate import *
@pytest.fixture
def macos_distro():
return Distro(
os_name="macos",
specs=[
Spec(
archs=[Arch.MACOS_ARM64],
build_modes=[BuildMode.UNITY_OFF],
build_option=BuildOption.COVERAGE,
build_types=[BuildType.RELEASE],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
],
)
@pytest.fixture
def windows_distro():
return Distro(
os_name="windows",
specs=[
Spec(
archs=[Arch.WINDOWS_AMD64],
build_modes=[BuildMode.UNITY_ON],
build_option=BuildOption.SANITIZE_ASAN,
build_types=[BuildType.DEBUG],
publish_option=PublishOption.IMAGE_ONLY,
test_option=TestOption.REFERENCE_FEE_500,
triggers=[Trigger.COMMIT, Trigger.SCHEDULE],
)
],
)
@pytest.fixture
def linux_distro():
return Distro(
os_name="debian",
os_version="bookworm",
compiler_name="clang",
compiler_version="16",
image_sha="a1b2c3d4",
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
build_modes=[BuildMode.UNITY_OFF],
build_option=BuildOption.SANITIZE_TSAN,
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.LABEL],
),
Spec(
archs=[Arch.LINUX_AMD64, Arch.LINUX_ARM64],
build_modes=[BuildMode.UNITY_OFF, BuildMode.UNITY_ON],
build_option=BuildOption.VOIDSTAR,
build_types=[BuildType.PUBLISH],
publish_option=PublishOption.PACKAGE_AND_IMAGE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT, Trigger.LABEL],
),
],
)
def test_macos_generate_config_for_distro_spec_matches_trigger(macos_distro):
trigger = Trigger.COMMIT
distro = macos_distro
result = list(
generate_config_for_distro_spec(
distro.os_name,
distro.os_version,
distro.compiler_name,
distro.compiler_version,
distro.image_sha,
distro.specs[0],
trigger,
)
)
assert result == [
Config(
config_name="macos-coverage-release-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0",
cmake_target="all",
build_type="Release",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
image=None,
)
]
def test_macos_generate_config_for_distro_spec_no_match_trigger(macos_distro):
trigger = Trigger.MERGE
distro = macos_distro
result = list(
generate_config_for_distro_spec(
distro.os_name,
distro.os_version,
distro.compiler_name,
distro.compiler_version,
distro.image_sha,
distro.specs[0],
trigger,
)
)
assert result == []
def test_macos_generate_config_for_distro_matches_trigger(macos_distro):
trigger = Trigger.COMMIT
distro = macos_distro
result = list(generate_config_for_distro(distro, trigger))
assert result == [
Config(
config_name="macos-coverage-release-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0",
cmake_target="all",
build_type="Release",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
image=None,
)
]
def test_macos_generate_config_for_distro_no_match_trigger(macos_distro):
trigger = Trigger.MERGE
distro = macos_distro
result = list(generate_config_for_distro(distro, trigger))
assert result == []
def test_windows_generate_config_for_distro_spec_matches_trigger(
windows_distro,
):
trigger = Trigger.COMMIT
distro = windows_distro
result = list(
generate_config_for_distro_spec(
distro.os_name,
distro.os_version,
distro.compiler_name,
distro.compiler_version,
distro.image_sha,
distro.specs[0],
trigger,
)
)
assert result == [
Config(
config_name="windows-asan-debug-unity-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON -DUNIT_TEST_REFERENCE_FEE=500",
cmake_target="install",
build_type="Debug",
enable_tests=False,
enable_package=False,
enable_image=True,
runs_on=["self-hosted", "Windows", "devbox"],
image=None,
)
]
def test_windows_generate_config_for_distro_spec_no_match_trigger(
windows_distro,
):
trigger = Trigger.MERGE
distro = windows_distro
result = list(
generate_config_for_distro_spec(
distro.os_name,
distro.os_version,
distro.compiler_name,
distro.compiler_version,
distro.image_sha,
distro.specs[0],
trigger,
)
)
assert result == []
def test_windows_generate_config_for_distro_matches_trigger(
windows_distro,
):
trigger = Trigger.COMMIT
distro = windows_distro
result = list(generate_config_for_distro(distro, trigger))
assert result == [
Config(
config_name="windows-asan-debug-unity-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON -DUNIT_TEST_REFERENCE_FEE=500",
cmake_target="install",
build_type="Debug",
enable_tests=False,
enable_package=False,
enable_image=True,
runs_on=["self-hosted", "Windows", "devbox"],
image=None,
)
]
def test_windows_generate_config_for_distro_no_match_trigger(
windows_distro,
):
trigger = Trigger.MERGE
distro = windows_distro
result = list(generate_config_for_distro(distro, trigger))
assert result == []
def test_linux_generate_config_for_distro_spec_matches_trigger(linux_distro):
trigger = Trigger.LABEL
distro = linux_distro
result = list(
generate_config_for_distro_spec(
distro.os_name,
distro.os_version,
distro.compiler_name,
distro.compiler_version,
distro.image_sha,
distro.specs[1],
trigger,
)
)
assert result == [
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "X64", "heavy"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-unity-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "X64", "heavy"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-unity-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
]
def test_linux_generate_config_for_distro_spec_no_match_trigger(linux_distro):
trigger = Trigger.MERGE
distro = linux_distro
result = list(
generate_config_for_distro_spec(
distro.os_name,
distro.os_version,
distro.compiler_name,
distro.compiler_version,
distro.image_sha,
distro.specs[1],
trigger,
)
)
assert result == []
def test_linux_generate_config_for_distro_matches_trigger(linux_distro):
trigger = Trigger.LABEL
distro = linux_distro
result = list(generate_config_for_distro(distro, trigger))
assert result == [
Config(
config_name="debian-bookworm-clang-16-tsan-debug-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["self-hosted", "Linux", "X64", "heavy"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "X64", "heavy"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-unity-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "X64", "heavy"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-unity-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
]
def test_linux_generate_config_for_distro_no_match_trigger(linux_distro):
trigger = Trigger.MERGE
distro = linux_distro
result = list(generate_config_for_distro(distro, trigger))
assert result == []
def test_generate_configs(macos_distro, windows_distro, linux_distro):
trigger = Trigger.COMMIT
distros = [macos_distro, windows_distro, linux_distro]
result = generate_configs(distros, trigger)
assert result == [
Config(
config_name="macos-coverage-release-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0",
cmake_target="all",
build_type="Release",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["self-hosted", "macOS", "ARM64", "mac-runner-m1"],
image=None,
),
Config(
config_name="windows-asan-debug-unity-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON -DUNIT_TEST_REFERENCE_FEE=500",
cmake_target="install",
build_type="Debug",
enable_tests=False,
enable_package=False,
enable_image=True,
runs_on=["self-hosted", "Windows", "devbox"],
image=None,
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "X64", "heavy"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-unity-amd64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "X64", "heavy"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
Config(
config_name="debian-bookworm-clang-16-voidstar-publish-unity-arm64",
cmake_args="-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dunity=ON -Dvoidstar=ON",
cmake_target="install",
build_type="Release",
enable_tests=True,
enable_package=True,
enable_image=True,
runs_on=["self-hosted", "Linux", "ARM64", "heavy-arm64"],
image="ghcr.io/xrplf/ci/debian-bookworm:clang-16-a1b2c3d4",
),
]
def test_generate_configs_raises_on_duplicate_configs(macos_distro):
trigger = Trigger.COMMIT
distros = [macos_distro, macos_distro]
with pytest.raises(ValueError):
generate_configs(distros, trigger)

View File

@@ -1,190 +0,0 @@
from dataclasses import dataclass, field
from helpers.enums import *
from helpers.unique import *
@dataclass
class Config:
"""Represents a configuration to include in the strategy matrix.
Raises:
ValueError: If any of the required fields are empty or invalid.
TypeError: If any of the required fields are of the wrong type.
"""
config_name: str
cmake_args: str
cmake_target: str
build_type: str
enable_tests: bool
enable_package: bool
enable_image: bool
runs_on: list[str]
image: str | None = None
def __post_init__(self):
if not self.config_name:
raise ValueError("config_name cannot be empty")
if not isinstance(self.config_name, str):
raise TypeError("config_name must be a string")
if not self.cmake_args:
raise ValueError("cmake_args cannot be empty")
if not isinstance(self.cmake_args, str):
raise TypeError("cmake_args must be a string")
if not self.cmake_target:
raise ValueError("cmake_target cannot be empty")
if not isinstance(self.cmake_target, str):
raise TypeError("cmake_target must be a string")
if self.cmake_target not in ["all", "install"]:
raise ValueError("cmake_target must be 'all' or 'install'")
if not self.build_type:
raise ValueError("build_type cannot be empty")
if not isinstance(self.build_type, str):
raise TypeError("build_type must be a string")
if self.build_type not in ["Debug", "Release"]:
raise ValueError("build_type must be 'Debug' or 'Release'")
if not isinstance(self.enable_tests, bool):
raise TypeError("enable_tests must be a boolean")
if not isinstance(self.enable_package, bool):
raise TypeError("enable_package must be a boolean")
if not isinstance(self.enable_image, bool):
raise TypeError("enable_image must be a boolean")
if not self.runs_on:
raise ValueError("runs_on cannot be empty")
if not isinstance(self.runs_on, list):
raise TypeError("runs_on must be a list")
if not all(isinstance(runner, str) for runner in self.runs_on):
raise TypeError("runs_on must be a list of strings")
if not all(self.runs_on):
raise ValueError("runs_on must be a list of non-empty strings")
if len(self.runs_on) != len(set(self.runs_on)):
raise ValueError("runs_on must be a list of unique strings")
if self.image and not isinstance(self.image, str):
raise TypeError("image must be a string")
@dataclass
class Spec:
"""Represents a specification used by a configuration.
Raises:
ValueError: If any of the required fields are empty.
TypeError: If any of the required fields are of the wrong type.
"""
archs: list[Arch] = field(
default_factory=lambda: [Arch.LINUX_AMD64, Arch.LINUX_ARM64]
)
build_option: BuildOption = BuildOption.NONE
build_modes: list[BuildMode] = field(
default_factory=lambda: [BuildMode.UNITY_OFF, BuildMode.UNITY_ON]
)
build_types: list[BuildType] = field(
default_factory=lambda: [BuildType.DEBUG, BuildType.RELEASE]
)
publish_option: PublishOption = PublishOption.NONE
test_option: TestOption = TestOption.NONE
triggers: list[Trigger] = field(
default_factory=lambda: [Trigger.COMMIT, Trigger.MERGE, Trigger.SCHEDULE]
)
def __post_init__(self):
if not self.archs:
raise ValueError("archs cannot be empty")
if not isinstance(self.archs, list):
raise TypeError("archs must be a list")
if not all(isinstance(arch, str) for arch in self.archs):
raise TypeError("archs must be a list of Arch")
if len(self.archs) != len(set(self.archs)):
raise ValueError("archs must be a list of unique Arch")
if not isinstance(self.build_option, BuildOption):
raise TypeError("build_option must be a BuildOption")
if not self.build_modes:
raise ValueError("build_modes cannot be empty")
if not isinstance(self.build_modes, list):
raise TypeError("build_modes must be a list")
if not all(
isinstance(build_mode, BuildMode) for build_mode in self.build_modes
):
raise TypeError("build_modes must be a list of BuildMode")
if len(self.build_modes) != len(set(self.build_modes)):
raise ValueError("build_modes must be a list of unique BuildMode")
if not self.build_types:
raise ValueError("build_types cannot be empty")
if not isinstance(self.build_types, list):
raise TypeError("build_types must be a list")
if not all(
isinstance(build_type, BuildType) for build_type in self.build_types
):
raise TypeError("build_types must be a list of BuildType")
if len(self.build_types) != len(set(self.build_types)):
raise ValueError("build_types must be a list of unique BuildType")
if not isinstance(self.publish_option, PublishOption):
raise TypeError("publish_option must be a PublishOption")
if not isinstance(self.test_option, TestOption):
raise TypeError("test_option must be a TestOption")
if not self.triggers:
raise ValueError("triggers cannot be empty")
if not isinstance(self.triggers, list):
raise TypeError("triggers must be a list")
if not all(isinstance(trigger, Trigger) for trigger in self.triggers):
raise TypeError("triggers must be a list of Trigger")
if len(self.triggers) != len(set(self.triggers)):
raise ValueError("triggers must be a list of unique Trigger")
@dataclass
class Distro:
"""Represents a Linux, Windows or macOS distribution with specifications.
Raises:
ValueError: If any of the required fields are empty.
TypeError: If any of the required fields are of the wrong type.
"""
os_name: str
os_version: str = ""
compiler_name: str = ""
compiler_version: str = ""
image_sha: str = ""
specs: list[Spec] = field(default_factory=list)
def __post_init__(self):
if not self.os_name:
raise ValueError("os_name cannot be empty")
if not isinstance(self.os_name, str):
raise TypeError("os_name must be a string")
if self.os_version and not isinstance(self.os_version, str):
raise TypeError("os_version must be a string")
if self.compiler_name and not isinstance(self.compiler_name, str):
raise TypeError("compiler_name must be a string")
if self.compiler_version and not isinstance(self.compiler_version, str):
raise TypeError("compiler_version must be a string")
if self.image_sha and not isinstance(self.image_sha, str):
raise TypeError("image_sha must be a string")
if not self.specs:
raise ValueError("specs cannot be empty")
if not isinstance(self.specs, list):
raise TypeError("specs must be a list")
if not all(isinstance(spec, Spec) for spec in self.specs):
raise TypeError("specs must be a list of Spec")
if not is_unique(self.specs):
raise ValueError("specs must be a list of unique Spec")

View File

@@ -1,743 +0,0 @@
import pytest
from helpers.defs import *
from helpers.enums import *
from helpers.funcs import *
def test_config_valid_none_image():
assert Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image=None,
)
def test_config_valid_empty_image():
assert Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="install",
build_type="Debug",
enable_tests=False,
enable_package=True,
enable_image=False,
runs_on=["label"],
image="",
)
def test_config_valid_with_image():
assert Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="install",
build_type="Release",
enable_tests=False,
enable_package=True,
enable_image=True,
runs_on=["label"],
image="image",
)
def test_config_raises_on_empty_config_name():
with pytest.raises(ValueError):
Config(
config_name="",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_wrong_config_name():
with pytest.raises(TypeError):
Config(
config_name=123,
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_empty_cmake_args():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_wrong_cmake_args():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args=123,
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_empty_cmake_target():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_invalid_cmake_target():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="invalid",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_wrong_cmake_target():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target=123,
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_empty_build_type():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_invalid_build_type():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="invalid",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_wrong_build_type():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type=123,
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_wrong_enable_tests():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=123,
enable_package=False,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_wrong_enable_package():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=123,
enable_image=False,
runs_on=["label"],
image="image",
)
def test_config_raises_on_wrong_enable_image():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=True,
enable_image=123,
runs_on=["label"],
image="image",
)
def test_config_raises_on_none_runs_on():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=None,
image="image",
)
def test_config_raises_on_empty_runs_on():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=[],
image="image",
)
def test_config_raises_on_invalid_runs_on():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=[""],
image="image",
)
def test_config_raises_on_wrong_runs_on():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=[123],
image="image",
)
def test_config_raises_on_duplicate_runs_on():
with pytest.raises(ValueError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label", "label"],
image="image",
)
def test_config_raises_on_wrong_image():
with pytest.raises(TypeError):
Config(
config_name="config",
cmake_args="-Doption=ON",
cmake_target="all",
build_type="Debug",
enable_tests=True,
enable_package=False,
enable_image=False,
runs_on=["label"],
image=123,
)
def test_spec_valid():
assert Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_none_archs():
with pytest.raises(ValueError):
Spec(
archs=None,
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_empty_archs():
with pytest.raises(ValueError):
Spec(
archs=[],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_wrong_archs():
with pytest.raises(TypeError):
Spec(
archs=[123],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_duplicate_archs():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64, Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_wrong_build_option():
with pytest.raises(TypeError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=123,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_none_build_modes():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=None,
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_empty_build_modes():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_wrong_build_modes():
with pytest.raises(TypeError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[123],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_none_build_types():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=None,
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_empty_build_types():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_wrong_build_types():
with pytest.raises(TypeError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[123],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_duplicate_build_types():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG, BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_wrong_publish_option():
with pytest.raises(TypeError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=123,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_wrong_test_option():
with pytest.raises(TypeError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=123,
triggers=[Trigger.COMMIT],
)
def test_spec_raises_on_none_triggers():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=None,
)
def test_spec_raises_on_empty_triggers():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[],
)
def test_spec_raises_on_wrong_triggers():
with pytest.raises(TypeError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[123],
)
def test_spec_raises_on_duplicate_triggers():
with pytest.raises(ValueError):
Spec(
archs=[Arch.LINUX_AMD64],
build_option=BuildOption.NONE,
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.NONE,
test_option=TestOption.NONE,
triggers=[Trigger.COMMIT, Trigger.COMMIT],
)
def test_distro_valid_none_image_sha():
assert Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha=None,
specs=[Spec()], # This is valid due to the default values.
)
def test_distro_valid_empty_os_compiler_image_sha():
assert Distro(
os_name="os_name",
os_version="",
compiler_name="",
compiler_version="",
image_sha="",
specs=[Spec()],
)
def test_distro_valid_with_image():
assert Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[Spec()],
)
def test_distro_raises_on_empty_os_name():
with pytest.raises(ValueError):
Distro(
os_name="",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[Spec()],
)
def test_distro_raises_on_wrong_os_name():
with pytest.raises(TypeError):
Distro(
os_name=123,
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[Spec()],
)
def test_distro_raises_on_wrong_os_version():
with pytest.raises(TypeError):
Distro(
os_name="os_name",
os_version=123,
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[Spec()],
)
def test_distro_raises_on_wrong_compiler_name():
with pytest.raises(TypeError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name=123,
compiler_version="compiler_version",
image_sha="image_sha",
specs=[Spec()],
)
def test_distro_raises_on_wrong_compiler_version():
with pytest.raises(TypeError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version=123,
image_sha="image_sha",
specs=[Spec()],
)
def test_distro_raises_on_wrong_image_sha():
with pytest.raises(TypeError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha=123,
specs=[Spec()],
)
def test_distro_raises_on_none_specs():
with pytest.raises(ValueError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=None,
)
def test_distro_raises_on_empty_specs():
with pytest.raises(ValueError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[],
)
def test_distro_raises_on_invalid_specs():
with pytest.raises(ValueError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[Spec(triggers=[])],
)
def test_distro_raises_on_duplicate_specs():
with pytest.raises(ValueError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[Spec(), Spec()],
)
def test_distro_raises_on_wrong_specs():
with pytest.raises(TypeError):
Distro(
os_name="os_name",
os_version="os_version",
compiler_name="compiler_name",
compiler_version="compiler_version",
image_sha="image_sha",
specs=[123],
)

View File

@@ -1,75 +0,0 @@
from enum import StrEnum, auto
class Arch(StrEnum):
"""Represents architectures to build for."""
LINUX_AMD64 = "linux/amd64"
LINUX_ARM64 = "linux/arm64"
MACOS_ARM64 = "macos/arm64"
WINDOWS_AMD64 = "windows/amd64"
class BuildMode(StrEnum):
"""Represents whether to perform a unity or non-unity build."""
UNITY_OFF = auto()
UNITY_ON = auto()
class BuildOption(StrEnum):
"""Represents build options to enable."""
NONE = auto()
COVERAGE = auto()
SANITIZE_ASAN = (
auto()
) # Address Sanitizer, also includes Undefined Behavior Sanitizer.
SANITIZE_TSAN = (
auto()
) # Thread Sanitizer, also includes Undefined Behavior Sanitizer.
VOIDSTAR = auto()
class BuildType(StrEnum):
"""Represents the build type to use."""
DEBUG = auto()
RELEASE = auto()
PUBLISH = auto() # Release build without assertions.
class PublishOption(StrEnum):
"""Represents whether to publish a package, an image, or both."""
NONE = auto()
PACKAGE_ONLY = auto()
IMAGE_ONLY = auto()
PACKAGE_AND_IMAGE = auto()
class TestOption(StrEnum):
"""Represents test options to enable, specifically the reference fee to use."""
__test__ = False # Tell pytest to not consider this as a test class.
NONE = "" # Use the default reference fee of 10.
REFERENCE_FEE_500 = "500"
REFERENCE_FEE_1000 = "1000"
class Platform(StrEnum):
"""Represents the platform to use."""
LINUX = "linux"
MACOS = "macos"
WINDOWS = "windows"
class Trigger(StrEnum):
"""Represents the trigger that caused the workflow to run."""
COMMIT = "commit"
LABEL = "label"
MERGE = "merge"
SCHEDULE = "schedule"

View File

@@ -1,235 +0,0 @@
from helpers.defs import *
from helpers.enums import *
def generate_config_name(
os_name: str,
os_version: str | None,
compiler_name: str | None,
compiler_version: str | None,
arch: Arch,
build_type: BuildType,
build_mode: BuildMode,
build_option: BuildOption,
) -> str:
"""Create a configuration name based on the distro details and build
attributes.
The configuration name is used as the display name in the GitHub Actions
UI, and since GitHub truncates long names we have to make sure the most
important information is at the beginning of the name.
Args:
os_name (str): The OS name.
os_version (str): The OS version.
compiler_name (str): The compiler name.
compiler_version (str): The compiler version.
arch (Arch): The architecture.
build_type (BuildType): The build type.
build_mode (BuildMode): The build mode.
build_option (BuildOption): The build option.
Returns:
str: The configuration name.
Raises:
ValueError: If the OS name is empty.
"""
if not os_name:
raise ValueError("os_name cannot be empty")
config_name = os_name
if os_version:
config_name += f"-{os_version}"
if compiler_name:
config_name += f"-{compiler_name}"
if compiler_version:
config_name += f"-{compiler_version}"
if build_option == BuildOption.COVERAGE:
config_name += "-coverage"
elif build_option == BuildOption.VOIDSTAR:
config_name += "-voidstar"
elif build_option == BuildOption.SANITIZE_ASAN:
config_name += "-asan"
elif build_option == BuildOption.SANITIZE_TSAN:
config_name += "-tsan"
if build_type == BuildType.DEBUG:
config_name += "-debug"
elif build_type == BuildType.RELEASE:
config_name += "-release"
elif build_type == BuildType.PUBLISH:
config_name += "-publish"
if build_mode == BuildMode.UNITY_ON:
config_name += "-unity"
config_name += f"-{arch.value.split('/')[1]}"
return config_name
def generate_cmake_args(
compiler_name: str | None,
compiler_version: str | None,
build_type: BuildType,
build_mode: BuildMode,
build_option: BuildOption,
test_option: TestOption,
) -> str:
"""Create the CMake arguments based on the build type and enabled build
options.
- All builds will have the `tests`, `werr`, and `xrpld` options.
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
- All release builds will have the `assert` option.
- Set the unity option if specified.
- Set the coverage option if specified.
- Set the voidstar option if specified.
- Set the reference fee if specified.
Args:
compiler_name (str): The compiler name.
compiler_version (str): The compiler version.
build_type (BuildType): The build type.
build_mode (BuildMode): The build mode.
build_option (BuildOption): The build option.
test_option (TestOption): The test option.
Returns:
str: The CMake arguments.
"""
cmake_args = "-Dtests=ON -Dwerr=ON -Dxrpld=ON"
if not f"{compiler_name}-{compiler_version}" in [
"gcc-12",
"clang-16",
]:
cmake_args += " -Dwextra=ON"
if build_type == BuildType.RELEASE:
cmake_args += " -Dassert=ON"
if build_mode == BuildMode.UNITY_ON:
cmake_args += " -Dunity=ON"
if build_option == BuildOption.COVERAGE:
cmake_args += " -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0"
elif build_option == BuildOption.SANITIZE_ASAN:
pass # TODO: Add ASAN-UBSAN flags.
elif build_option == BuildOption.SANITIZE_TSAN:
pass # TODO: Add TSAN-UBSAN flags.
elif build_option == BuildOption.VOIDSTAR:
cmake_args += " -Dvoidstar=ON"
if test_option != TestOption.NONE:
cmake_args += f" -DUNIT_TEST_REFERENCE_FEE={test_option.value}"
return cmake_args
def generate_cmake_target(os_name: str, build_type: BuildType) -> str:
"""Create the CMake target based on the build type.
The `install` target is used for Windows and for publishing a package, while
the `all` target is used for all other configurations.
Args:
os_name (str): The OS name.
build_type (BuildType): The build type.
Returns:
str: The CMake target.
"""
if os_name == "windows" or build_type == BuildType.PUBLISH:
return "install"
return "all"
def generate_enable_options(
os_name: str,
build_type: BuildType,
publish_option: PublishOption,
) -> tuple[bool, bool, bool]:
"""Create the enable flags based on the OS name, build option, and publish
option.
We build and test all configurations by default, except for Windows in
Debug, because it is too slow.
Args:
os_name (str): The OS name.
build_type (BuildType): The build type.
publish_option (PublishOption): The publish option.
Returns:
tuple: A tuple containing the enable test, enable package, and enable image flags.
"""
enable_tests = (
False if os_name == "windows" and build_type == BuildType.DEBUG else True
)
enable_package = (
True
if publish_option
in [
PublishOption.PACKAGE_ONLY,
PublishOption.PACKAGE_AND_IMAGE,
]
else False
)
enable_image = (
True
if publish_option
in [
PublishOption.IMAGE_ONLY,
PublishOption.PACKAGE_AND_IMAGE,
]
else False
)
return enable_tests, enable_package, enable_image
def generate_image_name(
os_name: str,
os_version: str,
compiler_name: str,
compiler_version: str,
image_sha: str,
) -> str | None:
"""Create the Docker image name based on the distro details.
Args:
os_name (str): The OS name.
os_version (str): The OS version.
compiler_name (str): The compiler name.
compiler_version (str): The compiler version.
image_sha (str): The image SHA.
Returns:
str: The Docker image name or None if not applicable.
Raises:
ValueError: If any of the arguments is empty for Linux.
"""
if os_name == "windows" or os_name == "macos":
return None
if not os_name:
raise ValueError("os_name cannot be empty")
if not os_version:
raise ValueError("os_version cannot be empty")
if not compiler_name:
raise ValueError("compiler_name cannot be empty")
if not compiler_version:
raise ValueError("compiler_version cannot be empty")
if not image_sha:
raise ValueError("image_sha cannot be empty")
return f"ghcr.io/xrplf/ci/{os_name}-{os_version}:{compiler_name}-{compiler_version}-{image_sha}"

View File

@@ -1,419 +0,0 @@
import pytest
from helpers.enums import *
from helpers.funcs import *
def test_generate_config_name_a_b_c_d_debug_amd64():
assert (
generate_config_name(
"a",
"b",
"c",
"d",
Arch.LINUX_AMD64,
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
)
== "a-b-c-d-debug-amd64"
)
def test_generate_config_name_a_b_c_release_unity_arm64():
assert (
generate_config_name(
"a",
"b",
"c",
"",
Arch.LINUX_ARM64,
BuildType.RELEASE,
BuildMode.UNITY_ON,
BuildOption.NONE,
)
== "a-b-c-release-unity-arm64"
)
def test_generate_config_name_a_b_coverage_publish_amd64():
assert (
generate_config_name(
"a",
"b",
"",
"",
Arch.LINUX_AMD64,
BuildType.PUBLISH,
BuildMode.UNITY_OFF,
BuildOption.COVERAGE,
)
== "a-b-coverage-publish-amd64"
)
def test_generate_config_name_a_asan_debug_unity_arm64():
assert (
generate_config_name(
"a",
"",
"",
"",
Arch.LINUX_ARM64,
BuildType.DEBUG,
BuildMode.UNITY_ON,
BuildOption.SANITIZE_ASAN,
)
== "a-asan-debug-unity-arm64"
)
def test_generate_config_name_a_c_tsan_release_amd64():
assert (
generate_config_name(
"a",
"",
"c",
"",
Arch.LINUX_AMD64,
BuildType.RELEASE,
BuildMode.UNITY_OFF,
BuildOption.SANITIZE_TSAN,
)
== "a-c-tsan-release-amd64"
)
def test_generate_config_name_a_d_voidstar_debug_amd64():
assert (
generate_config_name(
"a",
"",
"",
"d",
Arch.LINUX_AMD64,
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.VOIDSTAR,
)
== "a-d-voidstar-debug-amd64"
)
def test_generate_config_name_raises_on_none_os_name():
with pytest.raises(ValueError):
generate_config_name(
None,
"b",
"c",
"d",
Arch.LINUX_AMD64,
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
)
def test_generate_config_name_raises_on_empty_os_name():
with pytest.raises(ValueError):
generate_config_name(
"",
"b",
"c",
"d",
Arch.LINUX_AMD64,
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
)
def test_generate_cmake_args_a_b_debug():
assert (
generate_cmake_args(
"a",
"b",
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON"
)
def test_generate_cmake_args_gcc_12_no_wextra():
assert (
generate_cmake_args(
"gcc",
"12",
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON"
)
def test_generate_cmake_args_clang_16_no_wextra():
assert (
generate_cmake_args(
"clang",
"16",
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON"
)
def test_generate_cmake_args_a_b_release():
assert (
generate_cmake_args(
"a",
"b",
BuildType.RELEASE,
BuildMode.UNITY_OFF,
BuildOption.NONE,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON"
)
def test_generate_cmake_args_a_b_publish():
assert (
generate_cmake_args(
"a",
"b",
BuildType.PUBLISH,
BuildMode.UNITY_OFF,
BuildOption.NONE,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON"
)
def test_generate_cmake_args_a_b_unity():
assert (
generate_cmake_args(
"a",
"b",
BuildType.DEBUG,
BuildMode.UNITY_ON,
BuildOption.NONE,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dunity=ON"
)
def test_generate_cmake_args_a_b_coverage():
assert (
generate_cmake_args(
"a",
"b",
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.COVERAGE,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0"
)
def test_generate_cmake_args_a_b_voidstar():
assert (
generate_cmake_args(
"a",
"b",
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.VOIDSTAR,
TestOption.NONE,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dvoidstar=ON"
)
def test_generate_cmake_args_a_b_reference_fee_500():
assert (
generate_cmake_args(
"a",
"b",
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
TestOption.REFERENCE_FEE_500,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -DUNIT_TEST_REFERENCE_FEE=500"
)
def test_generate_cmake_args_a_b_reference_fee_1000():
assert (
generate_cmake_args(
"a",
"b",
BuildType.DEBUG,
BuildMode.UNITY_OFF,
BuildOption.NONE,
TestOption.REFERENCE_FEE_1000,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -DUNIT_TEST_REFERENCE_FEE=1000"
)
def test_generate_cmake_args_a_b_multiple():
assert (
generate_cmake_args(
"a",
"b",
BuildType.RELEASE,
BuildMode.UNITY_ON,
BuildOption.VOIDSTAR,
TestOption.REFERENCE_FEE_500,
)
== "-Dtests=ON -Dwerr=ON -Dxrpld=ON -Dwextra=ON -Dassert=ON -Dunity=ON -Dvoidstar=ON -DUNIT_TEST_REFERENCE_FEE=500"
)
def test_generate_cmake_target_linux_debug():
assert generate_cmake_target("linux", BuildType.DEBUG) == "all"
def test_generate_cmake_target_linux_release():
assert generate_cmake_target("linux", BuildType.RELEASE) == "all"
def test_generate_cmake_target_linux_publish():
assert generate_cmake_target("linux", BuildType.PUBLISH) == "install"
def test_generate_cmake_target_macos_debug():
assert generate_cmake_target("macos", BuildType.DEBUG) == "all"
def test_generate_cmake_target_macos_release():
assert generate_cmake_target("macos", BuildType.RELEASE) == "all"
def test_generate_cmake_target_macos_publish():
assert generate_cmake_target("macos", BuildType.PUBLISH) == "install"
def test_generate_cmake_target_windows_debug():
assert generate_cmake_target("windows", BuildType.DEBUG) == "install"
def test_generate_cmake_target_windows_release():
assert generate_cmake_target("windows", BuildType.DEBUG) == "install"
def test_generate_cmake_target_windows_publish():
assert generate_cmake_target("windows", BuildType.DEBUG) == "install"
def test_generate_enable_options_linux_debug_no_publish():
assert generate_enable_options("linux", BuildType.DEBUG, PublishOption.NONE) == (
True,
False,
False,
)
def test_generate_enable_options_linux_release_package_only():
assert generate_enable_options(
"linux", BuildType.RELEASE, PublishOption.PACKAGE_ONLY
) == (True, True, False)
def test_generate_enable_options_linux_publish_image_only():
assert generate_enable_options(
"linux", BuildType.PUBLISH, PublishOption.IMAGE_ONLY
) == (True, False, True)
def test_generate_enable_options_macos_debug_package_only():
assert generate_enable_options(
"macos", BuildType.DEBUG, PublishOption.PACKAGE_ONLY
) == (True, True, False)
def test_generate_enable_options_macos_release_image_only():
assert generate_enable_options(
"macos", BuildType.RELEASE, PublishOption.IMAGE_ONLY
) == (True, False, True)
def test_generate_enable_options_macos_publish_package_and_image():
assert generate_enable_options(
"macos", BuildType.PUBLISH, PublishOption.PACKAGE_AND_IMAGE
) == (True, True, True)
def test_generate_enable_options_windows_debug_package_and_image():
assert generate_enable_options(
"windows", BuildType.DEBUG, PublishOption.PACKAGE_AND_IMAGE
) == (False, True, True)
def test_generate_enable_options_windows_release_no_publish():
assert generate_enable_options(
"windows", BuildType.RELEASE, PublishOption.NONE
) == (True, False, False)
def test_generate_enable_options_windows_publish_image_only():
assert generate_enable_options(
"windows", BuildType.PUBLISH, PublishOption.IMAGE_ONLY
) == (True, False, True)
def test_generate_image_name_linux():
assert generate_image_name("a", "b", "c", "d", "e") == "ghcr.io/xrplf/ci/a-b:c-d-e"
def test_generate_image_name_linux_raises_on_empty_os_name():
with pytest.raises(ValueError):
generate_image_name("", "b", "c", "d", "e")
def test_generate_image_name_linux_raises_on_empty_os_version():
with pytest.raises(ValueError):
generate_image_name("a", "", "c", "d", "e")
def test_generate_image_name_linux_raises_on_empty_compiler_name():
with pytest.raises(ValueError):
generate_image_name("a", "b", "", "d", "e")
def test_generate_image_name_linux_raises_on_empty_compiler_version():
with pytest.raises(ValueError):
generate_image_name("a", "b", "c", "", "e")
def test_generate_image_name_linux_raises_on_empty_image_sha():
with pytest.raises(ValueError):
generate_image_name("a", "b", "c", "e", "")
def test_generate_image_name_macos():
assert generate_image_name("macos", "", "", "", "") is None
def test_generate_image_name_macos_extra():
assert generate_image_name("macos", "value", "does", "not", "matter") is None
def test_generate_image_name_windows():
assert generate_image_name("windows", "", "", "", "") is None
def test_generate_image_name_windows_extra():
assert generate_image_name("windows", "value", "does", "not", "matter") is None

View File

@@ -1,30 +0,0 @@
import json
from dataclasses import _is_dataclass_instance, asdict
from typing import Any
def is_unique(items: list[Any]) -> bool:
"""Check if a list of dataclass objects contains only unique items.
As the items may not be hashable, we convert them to JSON strings first, and
then check if the list of strings is the same size as the set of strings.
Args:
items: The list of dataclass objects to check.
Returns:
True if the list contains only unique items, False otherwise.
Raises:
TypeError: If any of the items is not a dataclass.
"""
l = list()
s = set()
for item in items:
if not _is_dataclass_instance(item):
raise TypeError("items must be a list of dataclasses")
j = json.dumps(asdict(item))
l.append(j)
s.add(j)
return len(l) == len(s)

View File

@@ -1,40 +0,0 @@
from dataclasses import dataclass
import pytest
from helpers.unique import *
@dataclass
class ExampleInt:
value: int
@dataclass
class ExampleList:
values: list[int]
def test_unique_int():
assert is_unique([ExampleInt(1), ExampleInt(2), ExampleInt(3)])
def test_not_unique_int():
assert not is_unique([ExampleInt(1), ExampleInt(2), ExampleInt(1)])
def test_unique_list():
assert is_unique(
[ExampleList([1, 2, 3]), ExampleList([4, 5, 6]), ExampleList([7, 8, 9])]
)
def test_not_unique_list():
assert not is_unique(
[ExampleList([1, 2, 3]), ExampleList([4, 5, 6]), ExampleList([1, 2, 3])]
)
def test_unique_raises_on_non_dataclass():
with pytest.raises(TypeError):
is_unique([1, 2, 3])

View File

@@ -1,385 +0,0 @@
from helpers.defs import *
from helpers.enums import *
# The default CI image SHAs to use, which can be specified per distro group and
# can be overridden for individual distros, which is useful when debugging using
# a locally built CI image. See https://github.com/XRPLF/ci for the images.
DEBIAN_SHA = "sha-ca4517d"
RHEL_SHA = "sha-ca4517d"
UBUNTU_SHA = "sha-84afd81"
# We only build a selection of configurations for the various triggers to reduce
# pipeline runtime. Across all three operating systems we aim to cover all GCC
# and Clang versions, while not duplicating configurations too much. See also
# the README for more details.
# The Debian distros to build configurations for.
#
# We have the following distros available:
# - Debian Bullseye: GCC 12-15
# - Debian Bookworm: GCC 13-15, Clang 16-20
# - Debian Trixie: GCC 14-15, Clang 20-21
DEBIAN_DISTROS = [
Distro(
os_name="debian",
os_version="bullseye",
compiler_name="gcc",
compiler_version="14",
image_sha=DEBIAN_SHA,
specs=[
Spec(
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
publish_option=PublishOption.PACKAGE_ONLY,
triggers=[Trigger.COMMIT, Trigger.LABEL],
),
Spec(
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.PUBLISH],
publish_option=PublishOption.PACKAGE_AND_IMAGE,
triggers=[Trigger.MERGE],
),
Spec(
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="bullseye",
compiler_name="gcc",
compiler_version="15",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
build_modes=[BuildMode.UNITY_ON],
build_option=BuildOption.COVERAGE,
build_types=[BuildType.DEBUG],
triggers=[Trigger.COMMIT, Trigger.MERGE],
),
Spec(
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="bookworm",
compiler_name="gcc",
compiler_version="15",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="bookworm",
compiler_name="clang",
compiler_version="16",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
build_modes=[BuildMode.UNITY_OFF],
build_option=BuildOption.VOIDSTAR,
build_types=[BuildType.DEBUG],
publish_option=PublishOption.IMAGE_ONLY,
triggers=[Trigger.COMMIT],
),
Spec(
archs=[Arch.LINUX_ARM64],
build_modes=[BuildMode.UNITY_ON],
build_types=[BuildType.RELEASE],
triggers=[Trigger.MERGE],
),
Spec(
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="bookworm",
compiler_name="clang",
compiler_version="17",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="bookworm",
compiler_name="clang",
compiler_version="18",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="bookworm",
compiler_name="clang",
compiler_version="19",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="trixie",
compiler_name="gcc",
compiler_version="15",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="debian",
os_version="trixie",
compiler_name="clang",
compiler_version="21",
image_sha=DEBIAN_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
triggers=[Trigger.MERGE],
),
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
]
# The RHEL distros to build configurations for.
#
# We have the following distros available:
# - RHEL 8: GCC 14, Clang "any"
# - RHEL 9: GCC 12-14, Clang "any"
# - RHEL 10: GCC 14, Clang "any"
RHEL_DISTROS = [
Distro(
os_name="rhel",
os_version="8",
compiler_name="gcc",
compiler_version="14",
image_sha=RHEL_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="rhel",
os_version="8",
compiler_name="clang",
compiler_version="any",
image_sha=RHEL_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="rhel",
os_version="9",
compiler_name="gcc",
compiler_version="12",
image_sha=RHEL_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
build_modes=[BuildMode.UNITY_ON],
build_types=[BuildType.DEBUG],
triggers=[Trigger.COMMIT],
),
Spec(
archs=[Arch.LINUX_AMD64],
build_modes=[BuildMode.UNITY_ON],
build_types=[BuildType.RELEASE],
triggers=[Trigger.MERGE],
),
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="rhel",
os_version="9",
compiler_name="gcc",
compiler_version="13",
image_sha=RHEL_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="rhel",
os_version="10",
compiler_name="clang",
compiler_version="any",
image_sha=RHEL_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
]
# The Ubuntu distros to build configurations for.
#
# We have the following distros available:
# - Ubuntu Jammy (22.04): GCC 12
# - Ubuntu Noble (24.04): GCC 13-14, Clang 16-20
UBUNTU_DISTROS = [
Distro(
os_name="ubuntu",
os_version="jammy",
compiler_name="gcc",
compiler_version="12",
image_sha=UBUNTU_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="ubuntu",
os_version="noble",
compiler_name="gcc",
compiler_version="13",
image_sha=UBUNTU_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
build_modes=[BuildMode.UNITY_ON],
build_types=[BuildType.RELEASE],
triggers=[Trigger.MERGE],
),
Spec(
archs=[Arch.LINUX_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="ubuntu",
os_version="noble",
compiler_name="gcc",
compiler_version="14",
image_sha=UBUNTU_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="ubuntu",
os_version="noble",
compiler_name="clang",
compiler_version="17",
image_sha=UBUNTU_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
triggers=[Trigger.MERGE],
),
Spec(
archs=[Arch.LINUX_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="ubuntu",
os_version="noble",
compiler_name="clang",
compiler_version="18",
image_sha=UBUNTU_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="ubuntu",
os_version="noble",
compiler_name="clang",
compiler_version="19",
image_sha=UBUNTU_SHA,
specs=[
Spec(
archs=[Arch.LINUX_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
Distro(
os_name="ubuntu",
os_version="noble",
compiler_name="clang",
compiler_version="20",
image_sha=UBUNTU_SHA,
specs=[
Spec(
archs=[Arch.LINUX_AMD64],
build_modes=[BuildMode.UNITY_ON],
build_types=[BuildType.DEBUG],
triggers=[Trigger.COMMIT],
),
Spec(
archs=[Arch.LINUX_AMD64],
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.RELEASE],
triggers=[Trigger.MERGE],
),
Spec(
archs=[Arch.LINUX_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
]

View File

@@ -1,20 +0,0 @@
from helpers.defs import *
from helpers.enums import *
DISTROS = [
Distro(
os_name="macos",
specs=[
Spec(
archs=[Arch.MACOS_ARM64],
build_modes=[BuildMode.UNITY_OFF],
build_types=[BuildType.DEBUG],
triggers=[Trigger.COMMIT, Trigger.MERGE],
),
Spec(
archs=[Arch.MACOS_ARM64],
triggers=[Trigger.SCHEDULE],
),
],
),
]

View File

@@ -1,20 +0,0 @@
from helpers.defs import *
from helpers.enums import *
DISTROS = [
Distro(
os_name="windows",
specs=[
Spec(
archs=[Arch.WINDOWS_AMD64],
build_modes=[BuildMode.UNITY_ON],
build_types=[BuildType.RELEASE],
triggers=[Trigger.COMMIT, Trigger.MERGE],
),
Spec(
archs=[Arch.WINDOWS_AMD64],
triggers=[Trigger.SCHEDULE],
),
],
),
]

64
.github/workflows/clang-format.yml vendored Normal file
View File

@@ -0,0 +1,64 @@
name: clang-format
on:
push:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
jobs:
check:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
runs-on: ubuntu-24.04
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
steps:
# For jobs running in containers, $GITHUB_WORKSPACE and ${{ github.workspace }} might not be the
# same directory. The actions/checkout step is *supposed* to checkout into $GITHUB_WORKSPACE and
# then add it to safe.directory (see instructions at https://github.com/actions/checkout)
# but that's apparently not happening for some container images. We can't be sure what is actually
# happening, so let's pre-emptively add both directories to safe.directory. There's a
# Github issue opened in 2022 and not resolved in 2025 https://github.com/actions/runner/issues/2058 ¯\_(ツ)_/¯
- run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory ${{ github.workspace }}
- uses: actions/checkout@v4
- name: Format first-party sources
run: |
clang-format --version
find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
- name: Check for differences
id: assert
shell: bash
run: |
set -o pipefail
git diff --exit-code | tee "clang-format.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: clang-format.patch
if-no-files-found: ignore
path: clang-format.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
PREAMBLE: |
If you are reading this, you are looking at a failed Github Actions
job. That means you pushed one or more files that did not conform
to the formatting specified in .clang-format. That may be because
you neglected to run 'git clang-format' or 'clang-format' before
committing, or that your version of clang-format has an
incompatibility with the one on this
machine, which is:
SUGGESTION: |
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run 'git-clang-format --extensions cpp,h,hpp,ipp develop'
in your repo, commit, and push.
run: |
echo "${PREAMBLE}"
clang-format --version
echo "${SUGGESTION}"
exit 1

37
.github/workflows/doxygen.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
name: Build and publish Doxygen documentation
# To test this workflow, push your changes to your fork's `develop` branch.
on:
push:
branches:
- develop
- doxygen
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
documentation:
runs-on: ubuntu-latest
permissions:
contents: write
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: checkout
uses: actions/checkout@v4
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
cmake --version
doxygen --version
env | sort
- name: build
run: |
mkdir build
cd build
cmake -Donly_docs=TRUE ..
cmake --build . --target docs --parallel $(nproc)
- name: publish
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

53
.github/workflows/levelization.yml vendored Normal file
View File

@@ -0,0 +1,53 @@
name: levelization
on:
push:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
jobs:
check:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
runs-on: ubuntu-latest
env:
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v4
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
id: assert
run: |
set -o pipefail
git diff --exit-code | tee "levelization.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: levelization.patch
if-no-files-found: ignore
path: levelization.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
MESSAGE: |
If you are reading this, you are looking at a failed Github
Actions job. That means you changed the dependency relationships
between the modules in rippled. That may be an improvement or a
regression. This check doesn't judge.
A rule of thumb, though, is that if your changes caused
something to be removed from loops.txt, that's probably an
improvement. If something was added, it's probably a regression.
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run './Builds/levelization/levelization.sh' in your repo,
commit, and push.
See Builds/levelization/README.md for more info.
run: |
echo "${MESSAGE}"
exit 1

91
.github/workflows/libxrpl.yml vendored Normal file
View File

@@ -0,0 +1,91 @@
name: Check libXRPL compatibility with Clio
env:
CONAN_REMOTE_URL: https://conan.ripplex.io
CONAN_LOGIN_USERNAME_XRPLF: ${{ secrets.CONAN_REMOTE_USERNAME }}
CONAN_PASSWORD_XRPLF: ${{ secrets.CONAN_REMOTE_PASSWORD }}
on:
pull_request:
paths:
- "src/libxrpl/protocol/BuildInfo.cpp"
- ".github/workflows/libxrpl.yml"
types: [opened, reopened, synchronize, ready_for_review]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
publish:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
name: Publish libXRPL
outputs:
outcome: ${{ steps.upload.outputs.outcome }}
version: ${{ steps.version.outputs.version }}
channel: ${{ steps.channel.outputs.channel }}
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: Wait for essential checks to succeed
uses: lewagon/wait-on-check-action@v1.3.4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
running-workflow-name: wait-for-check-regexp
check-regexp: "(dependencies|test).*linux.*" # Ignore windows and mac tests but make sure linux passes
repo-token: ${{ secrets.GITHUB_TOKEN }}
wait-interval: 10
- name: Checkout
uses: actions/checkout@v4
- name: Generate channel
id: channel
shell: bash
run: |
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
- name: Export new package
shell: bash
run: |
conan export . ${{ steps.channel.outputs.channel }}
- name: Add Conan remote
shell: bash
run: |
echo "Adding Conan remote 'xrplf' at ${{ env.CONAN_REMOTE_URL }}."
conan remote add xrplf ${{ env.CONAN_REMOTE_URL }} --insert 0 --force
echo "Listing Conan remotes."
conan remote list
- name: Parse new version
id: version
shell: bash
run: |
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
- name: Try to authenticate to Conan remote
id: remote
shell: bash
run: |
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
echo outcome=$(conan user --remote xrplf --password >&2 \
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
- name: Upload new package
id: upload
if: (steps.remote.outputs.outcome == 'success')
shell: bash
run: |
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
notify_clio:
name: Notify Clio
runs-on: ubuntu-latest
needs: publish
env:
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
steps:
- name: Notify Clio about new version
if: (needs.publish.outputs.outcome == 'success')
shell: bash
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[version]=${{ needs.publish.outputs.version }}@${{ needs.publish.outputs.channel }}" \
-F "client_payload[pr]=${{ github.event.pull_request.number }}"

112
.github/workflows/macos.yml vendored Normal file
View File

@@ -0,0 +1,112 @@
name: macos
on:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows, instrumentation)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- "ci/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
CONAN_REMOTE_URL: https://conan.ripplex.io
CONAN_REMOTE_USERNAME: ${{ secrets.CONAN_REMOTE_USERNAME }}
CONAN_REMOTE_PASSWORD: ${{ secrets.CONAN_REMOTE_PASSWORD }}
# This part of the Conan configuration is specific to this workflow only; we
# do not want to pollute the 'conan/profiles' directory with settings that
# might not work for other workflows.
CONAN_GLOBAL_CONF: |
core.download:parallel={{os.cpu_count()}}
core.upload:parallel={{os.cpu_count()}}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
jobs:
test:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
strategy:
matrix:
platform:
- macos
generator:
- Ninja
configuration:
- Release
runs-on: [self-hosted, macOS, mac-runner-m1]
env:
# The `build` action requires these variables.
build_dir: .build
NUM_PROCESSORS: 12
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: install Conan
run: |
brew install conan
- name: install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
- name: install python
run: |
if which python > /dev/null 2>&1; then
echo "Python executable exists"
else
brew install python@3.13
ln -s /opt/homebrew/bin/python3 /opt/homebrew/bin/python
fi
- name: install cmake
run: |
if which cmake > /dev/null 2>&1; then
echo "cmake executable exists"
else
brew install cmake
fi
- name: install nproc
run: |
brew install coreutils
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
python --version
conan --version
cmake --version
nproc --version
echo -n "nproc returns: "
nproc
system_profiler SPHardwareDataType
sysctl -n hw.logicalcpu
clang --version
- name: configure Conan
run: |
echo "${CONAN_GLOBAL_CONF}" > $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: ${{ matrix.generator }}
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
n=$(nproc)
echo "Using $n test jobs"
cd ${build_dir}
./rippled --unittest --unittest-jobs $n
ctest -j $n --output-on-failure

60
.github/workflows/missing-commits.yml vendored Normal file
View File

@@ -0,0 +1,60 @@
name: missing-commits
on:
push:
branches:
# Only check that the branches are up to date when updating the
# relevant branches.
- develop
- release
jobs:
up_to_date:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for missing commits
id: commits
env:
SUGGESTION: |
If you are reading this, then the commits indicated above are
missing from "develop" and/or "release". Do a reverse-merge
as soon as possible. See CONTRIBUTING.md for instructions.
run: |
set -o pipefail
# Branches ordered by how "canonical" they are. Every commit in
# one branch should be in all the branches behind it
order=( master release develop )
branches=()
for branch in "${order[@]}"
do
# Check that the branches exist so that this job will work on
# forked repos, which don't necessarily have master and
# release branches.
if git ls-remote --exit-code --heads origin \
refs/heads/${branch} > /dev/null
then
branches+=( origin/${branch} )
fi
done
prior=()
for branch in "${branches[@]}"
do
if [[ ${#prior[@]} -ne 0 ]]
then
echo "Checking ${prior[@]} for commits missing from ${branch}"
git log --oneline --no-merges "${prior[@]}" \
^$branch | tee -a "missing-commits.txt"
echo
fi
prior+=( "${branch}" )
done
if [[ $( cat missing-commits.txt | wc -l ) -ne 0 ]]
then
echo "${SUGGESTION}"
exit 1
fi

422
.github/workflows/nix.yml vendored Normal file
View File

@@ -0,0 +1,422 @@
name: nix
on:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- "ci/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
CONAN_REMOTE_URL: https://conan.ripplex.io
CONAN_REMOTE_USERNAME: ${{ secrets.CONAN_REMOTE_USERNAME }}
CONAN_REMOTE_PASSWORD: ${{ secrets.CONAN_REMOTE_PASSWORD }}
# This part of the Conan configuration is specific to this workflow only; we
# do not want to pollute the 'conan/profiles' directory with settings that
# might not work for other workflows.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# This workflow has multiple job matrixes.
# They can be considered phases because most of the matrices ("test",
# "coverage", "conan", ) depend on the first ("dependencies").
#
# The first phase has a job in the matrix for each combination of
# variables that affects dependency ABI:
# platform, compiler, and configuration.
# It creates a GitHub artifact holding the Conan profile,
# and builds and caches binaries for all the dependencies.
# If an Artifactory remote is configured, they are cached there.
# If not, they are added to the GitHub artifact.
# GitHub's "cache" action has a size limit (10 GB) that is too small
# to hold the binaries if they are built locally.
# We must use the "{upload,download}-artifact" actions instead.
#
# The remaining phases have a job in the matrix for each test
# configuration. They install dependency binaries from the cache,
# whichever was used, and build and test rippled.
#
# "instrumentation" is independent, but is included here because it also
# builds on linux in the same "on:" conditions.
jobs:
dependencies:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
include:
- compiler: gcc
compiler_version: 12
distro: ubuntu
codename: jammy
- compiler: clang
compiler_version: 16
distro: debian
codename: bookworm
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
env:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
lsb_release -a || true
${{ matrix.compiler }}-${{ matrix.compiler_version }} --version
conan --version
cmake --version
env | sort
- name: configure Conan
run: |
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: archive profile
# Create this archive before dependencies are added to the local cache.
run: tar -czf conan.tar.gz -C ${CONAN_HOME} .
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: upload archive
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
path: conan.tar.gz
if-no-files-found: error
test:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
include:
- compiler: gcc
compiler_version: 12
distro: ubuntu
codename: jammy
- compiler: clang
compiler_version: 16
distro: debian
codename: bookworm
cmake-args:
-
- "-Dunity=ON"
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: check linking
run: |
cd ${build_dir}
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: test
run: |
cd ${build_dir}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
reference-fee-test:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
configuration:
- Debug
cmake-args:
- "-DUNIT_TEST_REFERENCE_FEE=200"
- "-DUNIT_TEST_REFERENCE_FEE=1000"
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
cd ${build_dir}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
coverage:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
configuration:
- Debug
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
gcovr --version
env | sort
ls ${CONAN_HOME}
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: >-
-Dassert=TRUE
-Dwerr=TRUE
-Dcoverage=ON
-Dcoverage_format=xml
-DCODE_COVERAGE_VERBOSE=ON
-DCMAKE_CXX_FLAGS="-O0"
-DCMAKE_C_FLAGS="-O0"
cmake-target: coverage
- name: move coverage report
shell: bash
run: |
mv "${build_dir}/coverage.xml" ./
- name: archive coverage report
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: coverage.xml
path: coverage.xml
retention-days: 30
- name: upload coverage report
uses: wandalen/wretry.action@v1.4.10
with:
action: codecov/codecov-action@v4.5.0
with: |
files: coverage.xml
fail_ci_if_error: true
disable_search: true
verbose: true
plugin: noop
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 210000 # in milliseconds
conan:
needs: dependencies
runs-on: [self-hosted, heavy]
container:
image: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
env:
build_dir: .build
platform: linux
compiler: gcc
compiler_version: 12
configuration: Release
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ env.platform }}-${{ env.compiler }}-${{ env.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ env.configuration }}
- name: export
run: |
conan export . --version head
- name: build
run: |
cd tests/conan
mkdir ${build_dir} && cd ${build_dir}
conan install .. \
--settings:all build_type=${configuration} \
--output-folder . \
--build missing
cmake .. \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=./build/${configuration}/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${configuration}
cmake --build .
./example | grep '^[[:digit:]]\+\.[[:digit:]]\+\.[[:digit:]]\+'
instrumentation-build:
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/debian-bookworm:clang-16
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: linux-clang-Debug
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
env | sort
ls ${CONAN_HOME}
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: Debug
- name: prepare environment
run: |
mkdir -p ${build_dir}
echo "SOURCE_DIR=$(pwd)" >> $GITHUB_ENV
echo "BUILD_DIR=$(pwd)/${build_dir}" >> $GITHUB_ENV
- name: build with instrumentation
run: |
cd ${BUILD_DIR}
cmake -S ${SOURCE_DIR} -B ${BUILD_DIR} \
-Dvoidstar=ON \
-Dtests=ON \
-Dxrpld=ON \
-DCMAKE_BUILD_TYPE=Debug \
-DSECP256K1_BUILD_BENCHMARK=OFF \
-DSECP256K1_BUILD_TESTS=OFF \
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
cmake --build . --parallel $(nproc)
- name: verify instrumentation enabled
run: |
cd ${BUILD_DIR}
./rippled --version | grep libvoidstar
- name: run unit tests
run: |
cd ${BUILD_DIR}
./rippled -u --unittest-jobs $(( $(nproc)/4 ))
ctest -j $(nproc) --output-on-failure

View File

@@ -1,141 +0,0 @@
# This workflow runs all workflows to check, build and test the project on
# various Linux flavors, as well as on MacOS and Windows, on every push to a
# user branch. However, it will not run if the pull request is a draft unless it
# has the 'DraftRunCI' label.
name: PR
on:
merge_group:
types:
- checks_requested
pull_request:
types:
- opened
- reopened
- synchronize
- ready_for_review
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# This job determines whether the rest of the workflow should run. It runs
# when the PR is not a draft (which should also cover merge-group) or
# has the 'DraftRunCI' label.
should-run:
if: ${{ !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Determine changed files
# This step checks whether any files have changed that should
# cause the next jobs to run. We do it this way rather than
# using `paths` in the `on:` section, because all required
# checks must pass, even for changes that do not modify anything
# that affects those checks. We would therefore like to make the
# checks required only if the job runs, but GitHub does not
# support that directly. By always executing the workflow on new
# commits and by using the changed-files action below, we ensure
# that Github considers any skipped jobs to have passed, and in
# turn the required checks as well.
id: changes
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
# These paths are unique to `on-pr.yml`.
.github/scripts/levelization/**
.github/scripts/rename/**
.github/workflows/reusable-check-levelization.yml
.github/workflows/reusable-check-rename.yml
.github/workflows/reusable-notify-clio.yml
.github/workflows/on-pr.yml
# Keep the paths below in sync with those in `on-trigger.yml`.
.github/actions/build-deps/**
.github/actions/build-test/**
.github/actions/setup-conan/**
.github/scripts/strategy-matrix/**
.github/workflows/reusable-build.yml
.github/workflows/reusable-build-test-config.yml
.github/workflows/reusable-build-test.yml
.github/workflows/reusable-strategy-matrix.yml
.github/workflows/reusable-test.yml
.codecov.yml
cmake/**
conan/**
external/**
include/**
src/**
tests/**
CMakeLists.txt
conanfile.py
conan.lock
- name: Check whether to run
# This step determines whether the rest of the workflow should
# run. The rest of the workflow will run if this job runs AND at
# least one of:
# * Any of the files checked in the `changes` step were modified
# * The PR is NOT a draft and is labeled "Ready to merge"
# * The workflow is running from the merge queue
id: go
env:
FILES: ${{ steps.changes.outputs.any_changed }}
DRAFT: ${{ github.event.pull_request.draft }}
READY: ${{ contains(github.event.pull_request.labels.*.name, 'Ready to merge') }}
MERGE: ${{ github.event_name == 'merge_group' }}
run: |
echo "go=${{ (env.DRAFT != 'true' && env.READY == 'true') || env.FILES == 'true' || env.MERGE == 'true' }}" >> "${GITHUB_OUTPUT}"
cat "${GITHUB_OUTPUT}"
outputs:
go: ${{ steps.go.outputs.go == 'true' }}
check-levelization:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-check-levelization.yml
check-rename:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-check-rename.yml
build-test:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-build-test.yml
strategy:
fail-fast: false
matrix:
platform: [linux, macos, windows]
with:
platform: ${{ matrix.platform }}
trigger: commit
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
notify-clio:
needs:
- should-run
- build-test
if: ${{ needs.should-run.outputs.go == 'true' && (startsWith(github.base_ref, 'release') || github.base_ref == 'master') }}
uses: ./.github/workflows/reusable-notify-clio.yml
secrets:
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
passed:
if: failure() || cancelled()
needs:
- build-test
- check-levelization
runs-on: ubuntu-latest
steps:
- name: Fail
run: false

View File

@@ -1,75 +0,0 @@
# This workflow runs all workflows to build the dependencies required for the
# project on various Linux flavors, as well as on MacOS and Windows, on a
# scheduled basis, on merge into the 'develop', 'release', or 'master' branches,
# or manually. The missing commits check is only run when the code is merged
# into the 'develop' or 'release' branches, and the documentation is built when
# the code is merged into the 'develop' branch.
name: Trigger
on:
push:
branches:
- "develop"
- "release*"
- "master"
paths:
# These paths are unique to `on-trigger.yml`.
- ".github/workflows/on-trigger.yml"
# Keep the paths below in sync with those in `on-pr.yml`.
- ".github/actions/build-deps/**"
- ".github/actions/build-test/**"
- ".github/actions/setup-conan/**"
- ".github/scripts/strategy-matrix/**"
- ".github/workflows/reusable-build.yml"
- ".github/workflows/reusable-build-test-config.yml"
- ".github/workflows/reusable-build-test.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml"
- ".codecov.yml"
- "cmake/**"
- "conan/**"
- "external/**"
- "include/**"
- "src/**"
- "tests/**"
- "CMakeLists.txt"
- "conanfile.py"
- "conan.lock"
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
# will force all dependencies to be rebuilt, which is useful to verify that
# all dependencies can be built successfully. Only the dependencies that
# are actually missing from the remote will be uploaded.
schedule:
- cron: "32 6 * * 1-5"
# Run when manually triggered via the GitHub UI or API.
workflow_dispatch:
concurrency:
# When a PR is merged into the develop branch it will be assigned a unique
# group identifier, so execution will continue even if another PR is merged
# while it is still running. In all other cases the group identifier is shared
# per branch, so that any in-progress runs are cancelled when a new commit is
# pushed.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
build-test:
uses: ./.github/workflows/reusable-build-test.yml
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
platform: [linux, macos, windows]
with:
platform: ${{ matrix.platform }}
# The workflow dispatch event uses the same trigger as the schedule event.
trigger: ${{ github.event_name == 'push' && 'merge' || 'schedule' }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,15 +0,0 @@
name: Run pre-commit hooks
on:
pull_request:
push:
branches: [develop, release, master]
workflow_dispatch:
jobs:
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
run-hooks:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
with:
runs_on: ubuntu-latest
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'

View File

@@ -1,72 +0,0 @@
# This workflow builds the documentation for the repository, and publishes it to
# GitHub Pages when changes are merged into the default branch.
name: Build and publish documentation
on:
push:
paths:
- ".github/workflows/publish-docs.yml"
- "*.md"
- "**/*.md"
- "docs/**"
- "include/**"
- "src/libxrpl/**"
- "src/xrpld/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
env:
BUILD_DIR: build
NPROC_SUBTRACT: 2
jobs:
publish:
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
permissions:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get number of processors
uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc
with:
subtract: ${{ env.NPROC_SUBTRACT }}
- name: Check configuration
run: |
echo 'Checking path.'
echo ${PATH} | tr ':' '\n'
echo 'Checking environment variables.'
env | sort
echo 'Checking CMake version.'
cmake --version
echo 'Checking Doxygen version.'
doxygen --version
- name: Build documentation
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
run: |
mkdir -p "${BUILD_DIR}"
cd "${BUILD_DIR}"
cmake -Donly_docs=ON ..
cmake --build . --target docs --parallel ${BUILD_NPROC}
- name: Publish documentation
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ${{ env.BUILD_DIR }}/docs/html

View File

@@ -1,220 +0,0 @@
name: Build and test configuration
on:
workflow_call:
inputs:
build_type:
description: 'The build type to use ("Debug", "Release").'
type: string
required: true
cmake_args:
description: "Additional arguments to pass to CMake."
required: false
type: string
default: ""
cmake_target:
description: "The CMake target to build."
type: string
required: true
enable_tests:
description: "Whether to run the tests."
required: true
type: boolean
enable_package:
description: "Whether to publish a package."
required: true
type: boolean
enable_image:
description: "Whether to publish an image."
required: true
type: boolean
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: true
type: string
config_name:
description: "The configuration string (used for naming artifacts and such)."
required: true
type: string
nproc_subtract:
description: "The number of processors to subtract when calculating parallelism."
required: false
type: number
default: 2
secrets:
CODECOV_TOKEN:
description: "The Codecov token to use for uploading coverage reports."
required: true
defaults:
run:
shell: bash
env:
# Conan installs the generators in the build/generators directory, see the
# layout() method in conanfile.py. We then run CMake from the build directory.
BUILD_DIR: build
jobs:
build-and-test:
name: ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: 60
env:
ENABLED_VOIDSTAR: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
ENABLED_COVERAGE: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
steps:
- name: Cleanup workspace (macOS and Windows)
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
with:
disable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Get number of processors
uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc
with:
subtract: ${{ inputs.nproc_subtract }}
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_nproc: ${{ steps.nproc.outputs.nproc }}
build_type: ${{ inputs.build_type }}
# Set the verbosity to "quiet" for Windows to avoid an excessive
# amount of logs. For other OSes, the "verbose" logs are more useful.
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
- name: Configure CMake
working-directory: ${{ env.BUILD_DIR }}
env:
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_ARGS: ${{ inputs.cmake_args }}
run: |
cmake \
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
${CMAKE_ARGS} \
..
- name: Build the binary
working-directory: ${{ env.BUILD_DIR }}
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_TARGET: ${{ inputs.cmake_target }}
run: |
cmake \
--build . \
--config "${BUILD_TYPE}" \
--parallel "${BUILD_NPROC}" \
--target "${CMAKE_TARGET}"
- name: Upload the binary (Linux)
if: ${{ github.repository_owner == 'XRPLF' && runner.os == 'Linux' }}
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: xrpld-${{ inputs.config_name }}
path: ${{ env.BUILD_DIR }}/xrpld
retention-days: 3
if-no-files-found: error
- name: Check linking (Linux)
if: ${{ runner.os == 'Linux' }}
working-directory: ${{ env.BUILD_DIR }}
run: |
ldd ./xrpld
if [ "$(ldd ./xrpld | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Verify presence of instrumentation (Linux)
if: ${{ runner.os == 'Linux' && env.ENABLED_VOIDSTAR == 'true' }}
working-directory: ${{ env.BUILD_DIR }}
run: |
./xrpld --version | grep libvoidstar
- name: Run the separate tests
if: ${{ inputs.enable_tests }}
working-directory: ${{ env.BUILD_DIR }}
# Windows locks some of the build files while running tests, and parallel jobs can collide
env:
BUILD_TYPE: ${{ inputs.build_type }}
PARALLELISM: ${{ runner.os == 'Windows' && '1' || steps.nproc.outputs.nproc }}
run: |
ctest \
--output-on-failure \
-C "${BUILD_TYPE}" \
-j "${PARALLELISM}"
- name: Run the embedded tests
if: ${{ inputs.enable_tests }}
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', env.BUILD_DIR, inputs.build_type) || env.BUILD_DIR }}
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
run: |
./xrpld --unittest --unittest-jobs "${BUILD_NPROC}"
- name: Debug failure (Linux)
if: ${{ (failure() || cancelled()) && runner.os == 'Linux' && inputs.enable_tests }}
run: |
echo "IPv4 local port range:"
cat /proc/sys/net/ipv4/ip_local_port_range
echo "Netstat:"
netstat -an
- name: Prepare coverage report
if: ${{ github.repository_owner == 'XRPLF' && env.ENABLED_COVERAGE == 'true' }}
working-directory: ${{ env.BUILD_DIR }}
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
BUILD_TYPE: ${{ inputs.build_type }}
run: |
cmake \
--build . \
--config "${BUILD_TYPE}" \
--parallel "${BUILD_NPROC}" \
--target coverage
- name: Upload coverage report
if: ${{ github.repository_owner == 'XRPLF' && env.ENABLED_COVERAGE == 'true' }}
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
disable_search: true
disable_telem: true
fail_ci_if_error: true
files: ${{ env.BUILD_DIR }}/coverage.xml
plugins: noop
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -1,52 +0,0 @@
# This workflow builds and tests the binary for various configurations.
name: Build and test
# This workflow can only be triggered by other workflows. Note that the
# workflow_call event does not support the 'choice' input type, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
# so we use 'string' instead.
on:
workflow_call:
inputs:
platform:
description: "The platform to generate the strategy matrix for ('linux', 'macos', 'windows'). If not provided all platforms are used."
required: false
type: string
trigger:
description: "The trigger that caused the workflow to run ('commit', 'label', 'merge', 'schedule')."
required: true
type: string
secrets:
CODECOV_TOKEN:
description: "The Codecov token to use for uploading coverage reports."
required: true
jobs:
# Generate the strategy matrix to be used by the following job.
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
platform: ${{ inputs.platform }}
trigger: ${{ inputs.trigger }}
# Build and test the binary for each configuration.
build-test-config:
needs:
- generate-matrix
uses: ./.github/workflows/reusable-build-test-config.yml
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
max-parallel: 10
with:
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_target: ${{ matrix.cmake_target }}
enable_tests: ${{ matrix.enable_tests }}
enable_package: ${{ matrix.enable_package }}
enable_image: ${{ matrix.enable_image }}
runs_on: ${{ toJson(matrix.runs_on) }}
image: ${{ matrix.image }}
config_name: ${{ matrix.config_name }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,46 +0,0 @@
# This workflow checks if the dependencies between the modules are correctly
# indexed.
name: Check levelization
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-levelization
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
levelization:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check levelization
run: .github/scripts/levelization/generate.sh
- name: Check for differences
env:
MESSAGE: |
The dependency relationships between the modules in xrpld have
changed, which may be an improvement or a regression.
A rule of thumb is that if your changes caused something to be
removed from loops.txt, it's probably an improvement, while if
something was added, it's probably a regression.
Run '.github/scripts/levelization/generate.sh' in your repo, commit
and push the changes. See .github/scripts/levelization/README.md for
more info.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the differences to give the contributor a hint about what to
# expect when running levelization on their own machine.
git diff
echo "${MESSAGE}"
exit 1
fi

View File

@@ -1,50 +0,0 @@
# This workflow checks if the codebase is properly renamed, see more info in
# .github/scripts/rename/README.md.
name: Check rename
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-rename
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
rename:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check definitions
run: .github/scripts/rename/definitions.sh .
- name: Check copyright notices
run: .github/scripts/rename/copyright.sh .
- name: Check CMake configs
run: .github/scripts/rename/cmake.sh .
- name: Check binary name
run: .github/scripts/rename/binary.sh .
- name: Check namespaces
run: .github/scripts/rename/namespace.sh .
- name: Check for differences
env:
MESSAGE: |
One or more files contain changes that do not adhere to new naming
conventions.
Run the scripts in '.github/scripts/rename/' in your repo, commit
and push the changes. See .github/scripts/rename/README.md for
more info.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the differences to give the contributor a hint about what to
# expect when running the renaming scripts on their own machine.
git diff
echo "${MESSAGE}"
exit 1
fi

View File

@@ -1,91 +0,0 @@
# This workflow exports the built libxrpl package to the Conan remote on a
# a channel named after the pull request, and notifies the Clio repository about
# the new version so it can check for compatibility.
name: Notify Clio
# This workflow can only be triggered by other workflows.
on:
workflow_call:
inputs:
conan_remote_name:
description: "The name of the Conan remote to use."
required: false
type: string
default: xrplf
conan_remote_url:
description: "The URL of the Conan endpoint to use."
required: false
type: string
default: https://conan.ripplex.io
secrets:
clio_notify_token:
description: "The GitHub token to notify Clio about new versions."
required: true
conan_remote_username:
description: "The username for logging into the Conan remote."
required: true
conan_remote_password:
description: "The password for logging into the Conan remote."
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-clio
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
upload:
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Generate outputs
id: generate
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
echo 'Generating user and channel.'
echo "user=clio" >> "${GITHUB_OUTPUT}"
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
echo 'Extracting version.'
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
- name: Calculate conan reference
id: conan_ref
run: |
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
- name: Set up Conan
uses: ./.github/actions/setup-conan
with:
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
- name: Log into Conan remote
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
- name: Upload package
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
run: |
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
outputs:
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
notify:
needs: upload
runs-on: ubuntu-latest
steps:
- name: Notify Clio
env:
GH_TOKEN: ${{ secrets.clio_notify_token }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
-F "client_payload[pr_url]=${PR_URL}"

View File

@@ -1,43 +0,0 @@
name: Generate strategy matrix
on:
workflow_call:
inputs:
platform:
description: "The platform to generate the strategy matrix for ('linux', 'macos', 'windows'). If not provided all platforms are used."
required: false
type: string
trigger:
description: "The trigger that caused the workflow to run ('commit', 'label', 'merge', 'schedule')."
required: true
type: string
outputs:
matrix:
description: "The generated strategy matrix."
value: ${{ jobs.generate-matrix.outputs.matrix }}
defaults:
run:
shell: bash
jobs:
generate-matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.generate.outputs.matrix }}
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: 3.13
- name: Generate strategy matrix
working-directory: .github/scripts/strategy-matrix
id: generate
env:
PLATFORM: ${{ inputs.platform != '' && format('--platform={0}', inputs.platform) || '' }}
TRIGGER: ${{ format('--trigger={0}', inputs.trigger) }}
run: ./generate.py ${PLATFORM} ${TRIGGER} >> "${GITHUB_OUTPUT}"

View File

@@ -1,111 +0,0 @@
name: Upload Conan Dependencies
on:
schedule:
- cron: "0 3 * * 2-6"
workflow_dispatch:
inputs:
force_source_build:
description: "Force source build of all dependencies"
required: false
default: false
type: boolean
force_upload:
description: "Force upload of all dependencies"
required: false
default: false
type: boolean
pull_request:
branches: [develop]
paths:
# This allows testing changes to the upload workflow in a PR
- ".github/workflows/upload-conan-deps.yml"
push:
branches: [develop]
paths:
- ".github/workflows/upload-conan-deps.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".github/actions/build-deps/action.yml"
- ".github/actions/setup-conan/action.yml"
- ".github/scripts/strategy-matrix/**"
- "conanfile.py"
- "conan.lock"
env:
CONAN_REMOTE_NAME: xrplf
CONAN_REMOTE_URL: https://conan.ripplex.io
NPROC_SUBTRACT: 2
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# Generate the strategy matrix to be used by the following job.
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
# The workflow dispatch event uses the same trigger as the schedule event.
trigger: ${{ github.event_name == 'pull_request' && 'commit' || (github.event_name == 'push' && 'merge' || 'schedule') }}
# Build and upload the dependencies for each configuration.
run-upload-conan-deps:
needs:
- generate-matrix
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
max-parallel: 10
runs-on: ${{ matrix.runs_on }}
container: ${{ matrix.image }}
steps:
- name: Cleanup workspace (macOS and Windows)
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
with:
disable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Get number of processors
uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
id: nproc
with:
subtract: ${{ env.NPROC_SUBTRACT }}
- name: Setup Conan
uses: ./.github/actions/setup-conan
with:
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_nproc: ${{ steps.nproc.outputs.nproc }}
build_type: ${{ matrix.build_type }}
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
# Set the verbosity to "quiet" for Windows to avoid an excessive
# amount of logs. For other OSes, the "verbose" logs are more useful.
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
- name: Log into Conan remote
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
- name: Upload Conan packages
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
env:
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}

106
.github/workflows/windows.yml vendored Normal file
View File

@@ -0,0 +1,106 @@
name: windows
on:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows, instrumentation)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- "ci/**"
# https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
CONAN_REMOTE_URL: https://conan.ripplex.io
CONAN_REMOTE_USERNAME: ${{ secrets.CONAN_REMOTE_USERNAME }}
CONAN_REMOTE_PASSWORD: ${{ secrets.CONAN_REMOTE_PASSWORD }}
# This part of the Conan configuration is specific to this workflow only; we
# do not want to pollute the 'conan/profiles' directory with settings that
# might not work for other workflows.
CONAN_GLOBAL_CONF: |
core.download:parallel={{os.cpu_count()}}
core.upload:parallel={{os.cpu_count()}}
tools.build:jobs=24
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
jobs:
test:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
strategy:
fail-fast: false
matrix:
version:
- generator: Visual Studio 17 2022
runs-on: windows-2022
configuration:
- type: Release
tests: true
- type: Debug
# Skip running unit tests on debug builds, because they
# take an unreasonable amount of time
tests: false
runtime: d
runs-on: ${{ matrix.version.runs-on }}
env:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: choose Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: 3.13
- name: learn Python cache directory
id: pip-cache
shell: bash
run: |
python -m pip install --upgrade pip
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
- name: restore Python cache directory
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
- name: install Conan
run: pip install wheel conan
- name: check environment
run: |
dir env:
$env:PATH -split ';'
python --version
conan --version
cmake --version
- name: configure Conan
shell: bash
run: |
echo "${CONAN_GLOBAL_CONF}" > $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration.type }}
- name: build
uses: ./.github/actions/build
with:
generator: "${{ matrix.version.generator }}"
configuration: ${{ matrix.configuration.type }}
# Hard code for now. Move to the matrix if varied options are needed
cmake-args: "-Dassert=TRUE -Dwerr=TRUE -Dreporting=OFF -Dunity=ON"
cmake-target: install
- name: test
shell: bash
if: ${{ matrix.configuration.tests }}
run: |
cd ${build_dir}/${{ matrix.configuration.type }}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure

123
.gitignore vendored
View File

@@ -1,47 +1,70 @@
# .gitignore
# Macintosh Desktop Services Store files.
bin/boostbook_catalog.xml
bin/config.log
bin/project-cache.jam
# Ignore vim swap files.
*.swp
# Ignore SCons support files.
.sconsign.dblite
# Ignore python compiled files.
*.pyc
# Ignore Macintosh Desktop Services Store files.
.DS_Store
# Build, intermediate, and temporary artifacts.
# Ignore backup/temps
*~
*.o
*.pdb
*.swp
/.clangd
Debug/
Release/
/.build/
/build/
/db/
/out.txt
/Testing/
/tmp/
CMakeSettings.json
CMakeUserPresets.json
__pycache__
# Coverage files.
# Ignore object files.
*.o
.nih_c
tags
TAGS
GTAGS
GRTAGS
GPATH
bin/rippled
Debug/*.*
Release/*.*
# Ignore coverage files.
*.gcno
*.gcda
*.gcov
# Profiling data.
gmon.out
# Levelization checking
Builds/levelization/results/rawincludes.txt
Builds/levelization/results/paths.txt
Builds/levelization/results/includes/
Builds/levelization/results/includedby/
# Levelization data.
.github/scripts/levelization/results/*
!.github/scripts/levelization/results/loops.txt
!.github/scripts/levelization/results/ordering.txt
# Ignore tmp directory.
tmp
# Customized configs.
/rippled.cfg
/validators.txt
# Ignore database directory.
db/
db/*.db
db/*.db-*
# Locally patched Conan recipes
external/conan-center-index/
# Ignore debug logs
debug_log.txt
# XCode IDE.
# Ignore customized configs
rippled.cfg
validators.txt
# Doxygen generated documentation output
HtmlDocumentation
docs/html_doc
# Xcode user-specific project settings
# Xcode
.DS_Store
/build/
*.pbxuser
!default.pbxuser
*.mode1v3
@@ -54,16 +77,38 @@ xcuserdata
profile
*.moved-aside
DerivedData
.idea/
*.hmap
# JetBrains IDE.
/.idea/
# Intel Parallel Studio 2013 XE
My Amplifier XE Results - RippleD
# Microsoft Visual Studio IDE.
/.vs/
/.vscode/
# Compiler intermediate output
/out.txt
# AI tools.
/.augment
/.claude
/CLAUDE.md
# Build Log
rippled-build.log
# Profiling data
gmon.out
Builds/VisualStudio2015/*.db
Builds/VisualStudio2015/*.user
Builds/VisualStudio2015/*.opendb
Builds/VisualStudio2015/*.sdf
# MSVC
*.pdb
.vs/
CMakeSettings.json
compile_commands.json
.clangd
packages
pkg_out
pkg
CMakeUserPresets.json
bld.rippled/
.vscode
# Suggested in-tree build directory
/.build/

View File

@@ -1,43 +1,6 @@
# To run pre-commit hooks, first install pre-commit:
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
#
# Then, run the following command to install the git hook scripts:
# - `pre-commit install`
# You can run all configured hooks against all files with:
# - `pre-commit run --all-files`
# To manually run a specific hook, use:
# - `pre-commit run <hook_id> --all-files`
# To run the hooks against only the staged files, use:
# - `pre-commit run`
# .pre-commit-config.yaml
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: mixed-line-ending
- id: check-merge-conflict
args: [--assume-in-merge]
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
rev: v18.1.8
hooks:
- id: clang-format
args: [--style=file]
"types_or": [c++, c, proto]
- repo: https://github.com/rbubley/mirrors-prettier
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
hooks:
- id: prettier
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 25.11.0
hooks:
- id: black
exclude: |
(?x)^(
external/.*|
.github/scripts/levelization/results/.*\.txt
)$

View File

@@ -1 +0,0 @@
external

158
BUILD.md
View File

@@ -10,7 +10,7 @@
## Branches
For a stable release, choose the `master` branch or one of the [tagged
releases](https://github.com/XRPLF/rippled/releases).
releases](https://github.com/ripple/rippled/releases).
```bash
git checkout master
@@ -33,19 +33,24 @@ git checkout develop
See [System Requirements](https://xrpl.org/system-requirements.html).
Building xrpld generally requires git, Python, Conan, CMake, and a C++
Building rippled generally requires git, Python, Conan, CMake, and a C++
compiler. Some guidance on setting up such a [C++ development environment can be
found here](./docs/build/environment.md).
- [Python 3.11](https://www.python.org/downloads/), or higher
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
- [CMake 3.22](https://cmake.org/download/), or higher
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
[^1]:
It is possible to build with Conan 1.60+, but the instructions are
significantly different, which is why we are not recommending it.
`xrpld` is written in the C++20 dialect and includes the `<concepts>` header.
[^2]:
CMake 4 is not yet supported by all dependencies required by this project.
If you are affected by this issue, follow [conan workaround for cmake
4](#workaround-for-cmake-4)
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
The [minimum compiler versions][2] required are:
| Compiler | Version |
@@ -66,7 +71,7 @@ Linux](./docs/build/environment.md#linux).
### Mac
Many xrpld engineers use macOS for development.
Many rippled engineers use macOS for development.
Here are [sample instructions for setting up a C++ development environment on
macOS](./docs/build/environment.md#macos).
@@ -88,13 +93,6 @@ These instructions assume a basic familiarity with Conan and CMake. If you are
unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official
[Getting Started][3] walkthrough.
#### Conan lockfile
To achieve reproducible dependencies, we use a [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html),
which has to be updated every time dependencies change.
Please see the [instructions on how to regenerate the lockfile](conan/lockfile/README.md).
#### Default profile
We recommend that you import the provided `conan/profiles/default` profile:
@@ -126,7 +124,7 @@ default profile.
### Patched recipes
The recipes in Conan Center occasionally need to be patched for compatibility
with the latest version of `xrpld`. We maintain a fork of the Conan Center
with the latest version of `rippled`. We maintain a fork of the Conan Center
[here](https://github.com/XRPLF/conan-center-index/) containing the patches.
To ensure our patched recipes are used, you must add our Conan remote at a
@@ -134,44 +132,24 @@ higher index than the default Conan Center remote, so it is consulted first. You
can do this by running:
```bash
conan remote add --index 0 xrplf https://conan.ripplex.io
conan remote add --index 0 xrplf "https://conan.ripplex.io"
```
Alternatively, you can pull the patched recipes into the repository and use them
locally:
```bash
# Extract the version number from the lockfile.
function extract_version {
version=$(cat conan.lock | sed -nE "s@.+${1}/(.+)#.+@\1@p" | head -n1)
echo ${version}
}
# Define which recipes to export.
recipes=(ed25519 grpc secp256k1 snappy soci)
# Selectively check out the recipes from our CCI fork.
cd external
mkdir -p conan-center-index
cd conan-center-index
git init
git remote add origin git@github.com:XRPLF/conan-center-index.git
git sparse-checkout init
for recipe in ${recipes[@]}; do
echo "Checking out ${recipe}..."
git sparse-checkout add recipes/${recipe}/all
done
git sparse-checkout set recipes/snappy
git sparse-checkout add recipes/soci
git fetch origin master
git checkout master
cd ../..
# Export the recipes into the local cache.
for recipe in ${recipes[@]}; do
version=$(extract_version ${recipe})
echo "Exporting ${recipe}/${version}..."
conan export --version $(extract_version ${recipe}) \
external/conan-center-index/recipes/${recipe}/all
done
conan export --version 1.1.10 recipes/snappy/all
conan export --version 4.0.3 recipes/soci/all
rm -rf .git
```
In the case we switch to a newer version of a dependency that still requires a
@@ -180,11 +158,6 @@ updated dependencies with the newer version. However, if we switch to a newer
version that no longer requires a patch, no action is required on your part, as
the new recipe will be automatically pulled from the official Conan Center.
> [!NOTE]
> You might need to add `--lockfile=""` to your `conan install` command
> to avoid automatic use of the existing `conan.lock` file when you run
> `conan export` manually on your machine
### Conan profile tweaks
#### Missing compiler version
@@ -292,7 +265,7 @@ sed -i.bak -e 's|^compiler\.libcxx=.*$|compiler.libcxx=libstdc++11|' $(conan con
to do that is to run the shortcut "x64 Native Tools Command Prompt" for the
version of Visual Studio that you have installed.
Windows developers must also build `xrpld` and its dependencies for the x64
Windows developers must also build `rippled` and its dependencies for the x64
architecture:
```bash
@@ -305,6 +278,21 @@ sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
```
#### Workaround for CMake 4
If your system CMake is version 4 rather than 3, you may have to configure Conan
profile to use CMake version 3 for dependencies, by adding the following two
lines to your profile:
```text
[tool_requires]
!cmake/*: cmake/[>=3 <4]
```
This will force Conan to download and use a locally cached CMake 3 version, and
is needed because some of the dependencies used by this project do not support
CMake 4.
#### Clang workaround for grpc
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
@@ -401,6 +389,19 @@ tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
`--settings build_type=$BUILD_TYPE` or in the profile itself,
under the section `[settings]` with the key `build_type`.
If you are using a Microsoft Visual C++ compiler,
then you will need to ensure consistency between the `build_type` setting
and the `compiler.runtime` setting.
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
```
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
```
3. Configure CMake and pass the toolchain file generated by Conan, located at
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
@@ -422,9 +423,9 @@ tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
```
**Note:** You can pass build options for `xrpld` in this step.
**Note:** You can pass build options for `rippled` in this step.
4. Build `xrpld`.
4. Build `rippled`.
For a single-configuration generator, it will build whatever configuration
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator, you
@@ -443,26 +444,26 @@ tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
cmake --build . --config Debug
```
5. Test xrpld.
5. Test rippled.
Single-config generators:
```
./xrpld --unittest --unittest-jobs N
./rippled --unittest --unittest-jobs N
```
Multi-config generators:
```
./Release/xrpld --unittest --unittest-jobs N
./Debug/xrpld --unittest --unittest-jobs N
./Release/rippled --unittest --unittest-jobs N
./Debug/rippled --unittest --unittest-jobs N
```
Replace the `--unittest-jobs` parameter N with the desired unit tests
concurrency. Recommended setting is half of the number of available CPU
cores.
The location of `xrpld` binary in your build directory depends on your
The location of `rippled` binary in your build directory depends on your
CMake generator. Pass `--help` to see the rest of the command line options.
## Coverage report
@@ -481,20 +482,20 @@ Prerequisites for the coverage report:
A coverage report is created when the following steps are completed, in order:
1. `xrpld` binary built with instrumentation data, enabled by the `coverage`
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
option mentioned above
2. completed one or more run of the unit tests, which populates coverage capture data
2. completed run of unit tests, which populates coverage capture data
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
to assemble both instrumentation data and the coverage capture data into a coverage report
The last step of the above is automated into a single target `coverage`. The instrumented
`xrpld` binary can also be used for regular development or testing work, at
The above steps are automated into a single target `coverage`. The instrumented
`rippled` binary can also be used for regular development or testing work, at
the cost of extra disk space utilization and a small performance hit
(to store coverage capture data). Since `xrpld` binary is simply a dependency of the
coverage report target, it is possible to re-run the `coverage` target without
rebuilding the `xrpld` binary. Note, running of the unit tests before the `coverage`
target is left to the developer. Each such run will append to the coverage data
collected in the build directory.
(to store coverage capture). In case of a spurious failure of unit tests, it is
possible to re-run the `coverage` target without rebuilding the `rippled` binary
(since it is simply a dependency of the coverage report target). It is also possible
to select only specific tests for the purpose of the coverage report, by setting
the `coverage_test` variable in `cmake`
The default coverage report format is `html-details`, but the user
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
@@ -503,6 +504,11 @@ to generate more than one format at a time by setting the `coverage_extra_args`
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
set to the number of available CPU cores. This may cause spurious test
errors on Apple. Developers can override the number of unit test jobs with
the `coverage_test_parallelism` variable in `cmake`.
Example use with some cmake variables set:
```
@@ -520,16 +526,16 @@ stored inside the build directory, as either of:
## Options
| Option | Default Value | Description |
| ---------- | ------------- | ------------------------------------------------------------------ |
| `assert` | OFF | Enable assertions. |
| `coverage` | OFF | Prepare the coverage report. |
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
| `tests` | OFF | Build tests. |
| `unity` | OFF | Configure a unity build. |
| `xrpld` | OFF | Build the xrpld application, and not just the libxrpl library. |
| `werr` | OFF | Treat compilation warnings as errors |
| `wextra` | OFF | Enable additional compilation warnings |
| Option | Default Value | Description |
| ---------- | ------------- | -------------------------------------------------------------------------- |
| `assert` | OFF | Enable assertions. |
| `coverage` | OFF | Prepare the coverage report. |
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
| `tests` | OFF | Build tests. |
| `unity` | OFF | Configure a unity build. |
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
| `werr` | OFF | Treat compilation warnings as errors |
| `wextra` | OFF | Enable additional compilation warnings |
[Unity builds][5] may be faster for the first build
(at the cost of much more memory) since they concatenate sources into fewer
@@ -558,13 +564,7 @@ After any updates or changes to dependencies, you may need to do the following:
```
3. Re-run [conan export](#patched-recipes) if needed.
4. [Regenerate lockfile](#conan-lockfile).
5. Re-run [conan install](#build-and-test).
#### ERROR: Package not resolved
If you're seeing an error like `ERROR: Package 'snappy/1.1.10' not resolved: Unable to find 'snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246' in remotes.`,
please add `xrplf` remote or re-run `conan export` for [patched recipes](#patched-recipes).
4. Re-run [conan install](#build-and-test).
### `protobuf/port_def.inc` file not found
@@ -573,7 +573,7 @@ you might have generated CMake files for a different `build_type` than the
`CMAKE_BUILD_TYPE` you passed to Conan.
```
/xrpld/.build/pb-xrpl.libpb/xrpl/proto/xrpl.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
10 | #include <google/protobuf/port_def.inc>
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1 error generated.

View File

@@ -3,26 +3,21 @@
Levelization is the term used to describe efforts to prevent rippled from
having or creating cyclic dependencies.
rippled code is organized into directories under `src/xrpld`, `src/libxrpl` (and
rippled code is organized into directories under `src/rippled` (and
`src/test`) representing modules. The modules are intended to be
organized into "tiers" or "levels" such that a module from one level can
only include code from lower levels. Additionally, a module
in one level should never include code in an `impl` or `detail` folder of any level
in one level should never include code in an `impl` folder of any level
other than it's own.
The codebase is split into two main areas:
- **libxrpl** (`src/libxrpl`, `include/xrpl`): Reusable library modules with public interfaces
- **xrpld** (`src/xrpld`): Application-specific implementation code
Unfortunately, over time, enforcement of levelization has been
inconsistent, so the current state of the code doesn't necessarily
reflect these rules. Whenever possible, developers should refactor any
levelization violations they find (by moving files or individual
classes). At the very least, don't make things worse.
The table below summarizes the _desired_ division of modules, based on the current
state of the rippled code. The levels are numbered from
The table below summarizes the _desired_ division of modules, based on the
state of the rippled code when it was created. The levels are numbered from
the bottom up with the lower level, lower numbered, more independent
modules listed first, and the higher level, higher numbered modules with
more dependencies listed later.
@@ -30,33 +25,18 @@ more dependencies listed later.
**tl;dr:** The modules listed first are more independent than the modules
listed later.
## libxrpl Modules (Reusable Libraries)
| Level / Tier | Module(s) |
| ------------ | ----------------------------------- |
| 01 | xrpl/beast |
| 02 | xrpl/basics |
| 03 | xrpl/json xrpl/crypto |
| 04 | xrpl/protocol |
| 05 | xrpl/core xrpl/resource xrpl/server |
| 06 | xrpl/ledger xrpl/nodestore xrpl/net |
| 07 | xrpl/shamap |
## xrpld Modules (Application Implementation)
| Level / Tier | Module(s) |
| ------------ | -------------------------------- |
| 05 | xrpld/conditions xrpld/consensus |
| 06 | xrpld/core xrpld/peerfinder |
| 07 | xrpld/shamap xrpld/overlay |
| 08 | xrpld/app |
| 09 | xrpld/rpc |
| 10 | xrpld/perflog |
## Test Modules
| Level / Tier | Module(s) |
| ------------ | -------------------------------------------------------------------------------------------------------- |
| 01 | ripple/beast ripple/unity |
| 02 | ripple/basics |
| 03 | ripple/json ripple/crypto |
| 04 | ripple/protocol |
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server |
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net |
| 07 | ripple/shamap ripple/overlay |
| 08 | ripple/app |
| 09 | ripple/rpc |
| 10 | ripple/perflog |
| 11 | test/jtx test/beast test/csf |
| 12 | test/unit_test |
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay |
@@ -65,12 +45,12 @@ listed later.
| 16 | test/rpc test/app |
(Note that `test` levelization is _much_ less important and _much_ less
strictly enforced than `xrpl`/`xrpld` levelization, other than the requirement
that `test` code should _never_ be included in `xrpl` or `xrpld` code.)
strictly enforced than `ripple` levelization, other than the requirement
that `test` code should _never_ be included in `ripple` code.)
## Validation
The [levelization](generate.sh) script takes no parameters,
The [levelization.sh](levelization.sh) script takes no parameters,
reads no environment variables, and can be run from any directory,
as long as it is in the expected location in the rippled repo.
It can be run at any time from within a checked out repo, and will
@@ -92,15 +72,15 @@ It generates many files of [results](results):
desired as described above. In a perfect repo, this file will be
empty.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
Github workflow](../../.github/workflows/levelization.yml) to validate
that nothing changed.
- [`ordering.txt`](results/ordering.txt): A list showing relationships
between modules where there are no loops as they actually exist, as
opposed to how they are desired as described above.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
Github workflow](../../.github/workflows/levelization.yml) to validate
that nothing changed.
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
- [`levelization.yml`](../../.github/workflows/levelization.yml)
Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or
@@ -131,4 +111,4 @@ get those details locally.
1. Run `levelization.sh`
2. Grep the modules in `paths.txt`.
- For example, if a cycle is found `A ~= B`, simply `grep -w
A .github/scripts/levelization/results/paths.txt | grep -w B`
A Builds/levelization/results/paths.txt | grep -w B`

View File

@@ -1,6 +1,6 @@
#!/bin/bash
# Usage: generate.sh
# Usage: levelization.sh
# This script takes no parameters, reads no environment variables,
# and can be run from any directory, as long as it is in the expected
# location in the repo.
@@ -19,7 +19,7 @@ export LANG=C
rm -rfv results
mkdir results
includes="$( pwd )/results/rawincludes.txt"
pushd ../../..
pushd ../..
echo Raw includes:
grep -r '^[ ]*#include.*/.*\.h' include src | \
grep -v boost | tee ${includes}

View File

@@ -7,6 +7,9 @@ Loop: test.jtx test.unit_test
Loop: xrpld.app xrpld.core
xrpld.app > xrpld.core
Loop: xrpld.app xrpld.ledger
xrpld.app > xrpld.ledger
Loop: xrpld.app xrpld.overlay
xrpld.overlay > xrpld.app
@@ -17,8 +20,14 @@ Loop: xrpld.app xrpld.rpc
xrpld.rpc > xrpld.app
Loop: xrpld.app xrpld.shamap
xrpld.shamap ~= xrpld.app
xrpld.app > xrpld.shamap
Loop: xrpld.core xrpld.perflog
xrpld.perflog == xrpld.core
Loop: xrpld.overlay xrpld.rpc
xrpld.rpc ~= xrpld.overlay
Loop: xrpld.perflog xrpld.rpc
xrpld.rpc ~= xrpld.perflog

View File

@@ -1,19 +1,9 @@
libxrpl.basics > xrpl.basics
libxrpl.core > xrpl.basics
libxrpl.core > xrpl.core
libxrpl.crypto > xrpl.basics
libxrpl.json > xrpl.basics
libxrpl.json > xrpl.json
libxrpl.ledger > xrpl.basics
libxrpl.ledger > xrpl.json
libxrpl.ledger > xrpl.ledger
libxrpl.ledger > xrpl.protocol
libxrpl.net > xrpl.basics
libxrpl.net > xrpl.net
libxrpl.nodestore > xrpl.basics
libxrpl.nodestore > xrpl.json
libxrpl.nodestore > xrpl.nodestore
libxrpl.nodestore > xrpl.protocol
libxrpl.protocol > xrpl.basics
libxrpl.protocol > xrpl.json
libxrpl.protocol > xrpl.protocol
@@ -24,28 +14,27 @@ libxrpl.server > xrpl.basics
libxrpl.server > xrpl.json
libxrpl.server > xrpl.protocol
libxrpl.server > xrpl.server
libxrpl.shamap > xrpl.basics
libxrpl.shamap > xrpl.protocol
libxrpl.shamap > xrpl.shamap
libxrpl.telemetry > xrpl.basics
libxrpl.telemetry > xrpl.json
libxrpl.telemetry > xrpl.telemetry
test.app > test.jtx
test.app > test.rpc
test.app > test.toplevel
test.app > test.unit_test
test.app > xrpl.basics
test.app > xrpl.core
test.app > xrpld.app
test.app > xrpld.core
test.app > xrpld.ledger
test.app > xrpld.nodestore
test.app > xrpld.overlay
test.app > xrpld.rpc
test.app > xrpl.json
test.app > xrpl.ledger
test.app > xrpl.nodestore
test.app > xrpl.protocol
test.app > xrpl.resource
test.basics > test.jtx
test.basics > test.unit_test
test.basics > xrpl.basics
test.basics > xrpl.core
test.basics > xrpld.perflog
test.basics > xrpld.rpc
test.basics > xrpl.json
test.basics > xrpl.protocol
@@ -58,28 +47,29 @@ test.consensus > test.unit_test
test.consensus > xrpl.basics
test.consensus > xrpld.app
test.consensus > xrpld.consensus
test.consensus > xrpld.ledger
test.consensus > xrpl.json
test.consensus > xrpl.ledger
test.core > test.jtx
test.core > test.toplevel
test.core > test.unit_test
test.core > xrpl.basics
test.core > xrpl.core
test.core > xrpld.core
test.core > xrpld.perflog
test.core > xrpl.json
test.core > xrpl.server
test.csf > xrpl.basics
test.csf > xrpld.consensus
test.csf > xrpl.json
test.csf > xrpl.protocol
test.csf > xrpl.telemetry
test.json > test.jtx
test.json > xrpl.json
test.jtx > xrpl.basics
test.jtx > xrpld.app
test.jtx > xrpld.core
test.jtx > xrpld.ledger
test.jtx > xrpld.rpc
test.jtx > xrpl.json
test.jtx > xrpl.ledger
test.jtx > xrpl.net
test.jtx > xrpl.protocol
test.jtx > xrpl.resource
@@ -89,14 +79,15 @@ test.ledger > test.toplevel
test.ledger > xrpl.basics
test.ledger > xrpld.app
test.ledger > xrpld.core
test.ledger > xrpl.ledger
test.ledger > xrpld.ledger
test.ledger > xrpl.protocol
test.nodestore > test.jtx
test.nodestore > test.toplevel
test.nodestore > test.unit_test
test.nodestore > xrpl.basics
test.nodestore > xrpld.core
test.nodestore > xrpl.nodestore
test.nodestore > xrpld.nodestore
test.nodestore > xrpld.unity
test.overlay > test.jtx
test.overlay > test.toplevel
test.overlay > test.unit_test
@@ -104,8 +95,8 @@ test.overlay > xrpl.basics
test.overlay > xrpld.app
test.overlay > xrpld.overlay
test.overlay > xrpld.peerfinder
test.overlay > xrpld.shamap
test.overlay > xrpl.protocol
test.overlay > xrpl.shamap
test.peerfinder > test.beast
test.peerfinder > test.unit_test
test.peerfinder > xrpl.basics
@@ -122,7 +113,6 @@ test.resource > xrpl.resource
test.rpc > test.jtx
test.rpc > test.toplevel
test.rpc > xrpl.basics
test.rpc > xrpl.core
test.rpc > xrpld.app
test.rpc > xrpld.core
test.rpc > xrpld.overlay
@@ -141,60 +131,61 @@ test.server > xrpl.json
test.server > xrpl.server
test.shamap > test.unit_test
test.shamap > xrpl.basics
test.shamap > xrpl.nodestore
test.shamap > xrpld.nodestore
test.shamap > xrpld.shamap
test.shamap > xrpl.protocol
test.shamap > xrpl.shamap
test.toplevel > test.csf
test.toplevel > xrpl.json
test.unit_test > xrpl.basics
tests.libxrpl > xrpl.basics
tests.libxrpl > xrpl.json
tests.libxrpl > xrpl.net
xrpl.core > xrpl.basics
xrpl.core > xrpl.json
tests.libxrpl > xrpl.telemetry
xrpl.json > xrpl.basics
xrpl.ledger > xrpl.basics
xrpl.ledger > xrpl.protocol
xrpl.net > xrpl.basics
xrpl.nodestore > xrpl.basics
xrpl.nodestore > xrpl.protocol
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json
xrpl.resource > xrpl.basics
xrpl.resource > xrpl.json
xrpl.resource > xrpl.protocol
xrpl.resource > xrpl.telemetry
xrpl.server > xrpl.basics
xrpl.server > xrpl.json
xrpl.server > xrpl.protocol
xrpl.shamap > xrpl.basics
xrpl.shamap > xrpl.nodestore
xrpl.shamap > xrpl.protocol
xrpl.server > xrpl.telemetry
xrpl.telemetry > xrpl.json
xrpld.app > test.unit_test
xrpld.app > xrpl.basics
xrpld.app > xrpl.core
xrpld.app > xrpld.conditions
xrpld.app > xrpld.consensus
xrpld.app > xrpld.nodestore
xrpld.app > xrpld.perflog
xrpld.app > xrpl.json
xrpld.app > xrpl.ledger
xrpld.app > xrpl.net
xrpld.app > xrpl.nodestore
xrpld.app > xrpl.protocol
xrpld.app > xrpl.resource
xrpld.app > xrpl.shamap
xrpld.app > xrpl.telemetry
xrpld.conditions > xrpl.basics
xrpld.conditions > xrpl.protocol
xrpld.consensus > xrpl.basics
xrpld.consensus > xrpl.json
xrpld.consensus > xrpl.protocol
xrpld.core > xrpl.basics
xrpld.core > xrpl.core
xrpld.core > xrpl.json
xrpld.core > xrpl.net
xrpld.core > xrpl.protocol
xrpld.core > xrpl.telemetry
xrpld.ledger > xrpl.basics
xrpld.ledger > xrpl.json
xrpld.ledger > xrpl.protocol
xrpld.nodestore > xrpl.basics
xrpld.nodestore > xrpld.core
xrpld.nodestore > xrpld.unity
xrpld.nodestore > xrpl.json
xrpld.nodestore > xrpl.protocol
xrpld.overlay > xrpl.basics
xrpld.overlay > xrpl.core
xrpld.overlay > xrpld.core
xrpld.overlay > xrpld.peerfinder
xrpld.overlay > xrpld.perflog
xrpld.overlay > xrpl.json
xrpld.overlay > xrpl.protocol
xrpld.overlay > xrpl.resource
@@ -203,17 +194,16 @@ xrpld.peerfinder > xrpl.basics
xrpld.peerfinder > xrpld.core
xrpld.peerfinder > xrpl.protocol
xrpld.perflog > xrpl.basics
xrpld.perflog > xrpl.core
xrpld.perflog > xrpld.rpc
xrpld.perflog > xrpl.json
xrpld.rpc > xrpl.basics
xrpld.rpc > xrpl.core
xrpld.rpc > xrpld.core
xrpld.rpc > xrpld.ledger
xrpld.rpc > xrpld.nodestore
xrpld.rpc > xrpl.json
xrpld.rpc > xrpl.ledger
xrpld.rpc > xrpl.net
xrpld.rpc > xrpl.nodestore
xrpld.rpc > xrpl.protocol
xrpld.rpc > xrpl.resource
xrpld.rpc > xrpl.server
xrpld.shamap > xrpl.shamap
xrpld.shamap > xrpl.basics
xrpld.shamap > xrpld.nodestore
xrpld.shamap > xrpl.protocol

View File

@@ -49,7 +49,7 @@ if(Git_FOUND)
endif() #git
if(thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
add_compile_options("-stdlib=libc++")
add_link_options("-stdlib=libc++")
endif()
@@ -62,9 +62,9 @@ if (target)
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
endif ()
include(XrplSanity)
include(XrplVersion)
include(XrplSettings)
include(RippledSanity)
include(RippledVersion)
include(RippledSettings)
# this check has to remain in the top-level cmake
# because of the early return statement
if (packages_only)
@@ -73,11 +73,11 @@ if (packages_only)
endif()
return ()
endif ()
include(XrplCompiler)
include(XrplInterface)
include(RippledCompiler)
include(RippledInterface)
option(only_docs "Include only the docs target?" FALSE)
include(XrplDocs)
include(RippledDocs)
if(only_docs)
return()
endif()
@@ -89,7 +89,15 @@ find_package(OpenSSL 1.1.1 REQUIRED)
set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
set(SECP256K1_INSTALL TRUE)
set(SECP256K1_BUILD_BENCHMARK FALSE)
set(SECP256K1_BUILD_TESTS FALSE)
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
set(SECP256K1_BUILD_EXAMPLES FALSE)
add_subdirectory(external/secp256k1)
add_library(secp256k1::secp256k1 ALIAS secp256k1)
add_subdirectory(external/ed25519-donna)
add_subdirectory(external/antithesis-sdk)
find_package(gRPC REQUIRED)
find_package(lz4 REQUIRED)
@@ -104,18 +112,16 @@ option(rocksdb "Enable RocksDB" ON)
if(rocksdb)
find_package(RocksDB REQUIRED)
set_target_properties(RocksDB::rocksdb PROPERTIES
INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
)
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
endif()
find_package(date REQUIRED)
find_package(ed25519 REQUIRED)
find_package(nudb REQUIRED)
find_package(secp256k1 REQUIRED)
find_package(date REQUIRED)
find_package(xxHash REQUIRED)
target_link_libraries(xrpl_libs INTERFACE
target_link_libraries(ripple_libs INTERFACE
ed25519::ed25519
lz4::lz4
OpenSSL::Crypto
@@ -133,16 +139,16 @@ elseif(TARGET NuDB::nudb)
else()
message(FATAL_ERROR "unknown nudb target")
endif()
target_link_libraries(xrpl_libs INTERFACE ${nudb})
target_link_libraries(ripple_libs INTERFACE ${nudb})
if(coverage)
include(XrplCov)
include(RippledCov)
endif()
set(PROJECT_EXPORT_SET XrplExports)
include(XrplCore)
include(XrplInstall)
include(XrplValidatorKeys)
set(PROJECT_EXPORT_SET RippleExports)
include(RippledCore)
include(RippledInstall)
include(RippledValidatorKeys)
if(tests)
include(CTest)

View File

@@ -24,7 +24,7 @@ your verifying key. Please set up [signature verification][signing].
In general, external contributions should be developed in your personal
[fork][forking]. Contributions from developers with write permissions
should be done in [the main repository][xrpld] in a branch with
should be done in [the main repository][rippled] in a branch with
a permitted prefix. Permitted prefixes are:
- XLS-[a-zA-Z0-9]+/.+
@@ -47,18 +47,13 @@ choose the next available standard number, and open a discussion with an
appropriate title to propose your draft standard.
When you submit a pull request, please link the corresponding XLS in the
description. An XLS still in `Draft` status is considered a
description. An XLS still in draft status is considered a
work-in-progress and open for discussion. Please allow time for
questions, suggestions, and changes to the XLS draft. It is the
responsibility of the XLS author to update the draft to match the final
implementation when its corresponding pull request is merged, unless the
author delegates that responsibility to others.
Any amendment or major RPC change requires either a new XLS or an update
to an existing XLS. Neither change will be released (in an amendment's
case, marked as `Supported::yes`) until the corresponding XLS's status
is `Final`.
## Before making a pull request
(Or marking a draft pull request as ready.)
@@ -73,7 +68,7 @@ Ensure that your code compiles according to the build instructions in
Please write tests for your code.
If your test can be run offline, in under 60 seconds, then it can be an
automatic test run by `xrpld --unittest`.
automatic test run by `rippled --unittest`.
Otherwise, it must be a manual test.
If you create new source files, they must be organized as follows:
@@ -86,7 +81,7 @@ If you create new source files, they must be organized as follows:
The source must be formatted according to the style guide below.
Header includes must be [levelized](.github/scripts/levelization).
Header includes must be [levelized](./Builds/levelization).
Changes should be usually squashed down into a single commit.
Some larger or more complicated change sets make more sense,
@@ -256,13 +251,13 @@ pre-commit install
We are using [Antithesis](https://antithesis.com/) for continuous fuzzing,
and keep a copy of [Antithesis C++ SDK](https://github.com/antithesishq/antithesis-sdk-cpp/)
in `external/antithesis-sdk`. One of the aims of fuzzing is to identify bugs
by finding external conditions which cause contracts violations inside `xrpld`.
by finding external conditions which cause contracts violations inside `rippled`.
The contracts are expressed as `XRPL_ASSERT` or `UNREACHABLE` (defined in
`include/xrpl/beast/utility/instrumentation.h`), which are effectively (outside
of Antithesis) wrappers for `assert(...)` with added name. The purpose of name
is to provide contracts with stable identity which does not rely on line numbers.
When `xrpld` is built with the Antithesis instrumentation enabled
When `rippled` is built with the Antithesis instrumentation enabled
(using `voidstar` CMake option) and ran on the Antithesis platform, the
contracts become
[test properties](https://antithesis.com/docs/using_antithesis/properties.html);
@@ -304,7 +299,7 @@ For this reason:
- Example **bad** name
`"RFC1751::insert(char* s, int x, int start, int length) : length is greater than or equal zero"`
(missing namespace, unnecessary full function signature, description too verbose).
Good name: `"xrpl::RFC1751::insert : minimum length"`.
Good name: `"ripple::RFC1751::insert : minimum length"`.
- In **few** well-justified cases a non-standard name can be used, in which case a
comment should be placed to explain the rationale (example in `contract.cpp`)
- Do **not** rename a contract without a good reason (e.g. the name no longer
@@ -318,7 +313,7 @@ For this reason:
To execute all unit tests:
`xrpld --unittest --unittest-jobs=<number of cores>`
`rippled --unittest --unittest-jobs=<number of cores>`
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
@@ -326,7 +321,7 @@ cause is still under investigation. If you observe this problem, try specifying
To run a specific set of test suites:
```
xrpld --unittest TestSuiteName
rippled --unittest TestSuiteName
```
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
@@ -1075,7 +1070,7 @@ git fetch upstreams
[contrib]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
[squash]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
[forking]: https://github.com/XRPLF/rippled/fork
[xrpld]: https://github.com/XRPLF/rippled
[rippled]: https://github.com/XRPLF/rippled
[signing]: https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification
[setup-upstreams]: ./bin/git/setup-upstreams.sh
[squash-branches]: ./bin/git/squash-branches.sh

View File

@@ -1,7 +1,7 @@
ISC License
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
Copyright (c) 2012-2025, the XRP Ledger developers.
Copyright (c) 2012-2020, the XRP Ledger developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above

View File

@@ -6,7 +6,7 @@ The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powe
## XRP
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
## rippled
@@ -23,26 +23,26 @@ If you are interested in running an **API Server** (including a **Full History S
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction fees.
- **[Responsible Software Governance][]:** A team of full-time developers at Ripple & other organizations maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests.
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
- **[Modern Features][]:** Features like Escrow, Checks, and Payment Channels support financial applications atop of the XRP Ledger. This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
[Censorship-Resistant Transaction Processing]: https://xrpl.org/transaction-censorship-detection.html#transaction-censorship-detection
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/consensus-research.html#consensus-research
[Finite XRP Supply]: https://xrpl.org/what-is-xrp.html
[Responsible Software Governance]: https://xrpl.org/contribute-code.html#contribute-code-to-the-xrp-ledger
[Secure, Adaptable Cryptography]: https://xrpl.org/cryptographic-keys.html#cryptographic-keys
[Modern Features]: https://xrpl.org/use-specialized-payment-types.html
[On-Ledger Decentralized Exchange]: https://xrpl.org/decentralized-exchange.html#decentralized-exchange
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
## Source Code
Here are some good places to start learning the source code:
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
- Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph.
- Read [the levelization document](./Builds/levelization) to get an idea of the internal dependency graph.
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
### Repository Contents

View File

@@ -5,7 +5,7 @@ then
name=$( basename $0 )
cat <<- USAGE
Usage: $name <username>
Where <username> is the Github username of the upstream repo. e.g. XRPLF
USAGE
exit 0
@@ -83,3 +83,4 @@ fi
_run git fetch --jobs=$(nproc) upstreams
exit 0

View File

@@ -5,7 +5,7 @@ then
name=$( basename $0 )
cat <<- USAGE
Usage: $name workbranch base/branch user/branch [user/branch [...]]
* workbranch will be created locally from base/branch
* base/branch and user/branch may be specified as user:branch to allow
easy copying from Github PRs
@@ -66,3 +66,4 @@ git push $push HEAD:$b
git fetch $repo
-------------------------------------------------------------------
PUSH

View File

@@ -396,8 +396,8 @@
# true - enables compression
# false - disables compression [default].
#
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
# at a cost of greater CPU usage. If you enable link compression,
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
# at a cost of greater CPU usage. If you enable link compression,
# the server automatically compresses communications with peer servers
# that also have link compression enabled.
# https://xrpl.org/enable-link-compression.html
@@ -975,47 +975,6 @@
# number of ledger records online. Must be greater
# than or equal to ledger_history.
#
# Optional keys for NuDB only:
#
# nudb_block_size EXPERIMENTAL: Block size in bytes for NuDB storage.
# Must be a power of 2 between 4096 and 32768. Default is 4096.
#
# This parameter controls the fundamental storage unit
# size for NuDB's internal data structures. The choice
# of block size can significantly impact performance
# depending on your storage hardware and filesystem:
#
# - 4096 bytes: Optimal for most standard SSDs and
# traditional filesystems (ext4, NTFS, HFS+).
# Provides good balance of performance and storage
# efficiency. Recommended for most deployments.
# Minimizes memory footprint and provides consistent
# low-latency access patterns across diverse hardware.
#
# - 8192-16384 bytes: May improve performance on
# high-end NVMe SSDs and copy-on-write filesystems
# like ZFS or Btrfs that benefit from larger block
# alignment. Can reduce metadata overhead for large
# databases. Offers better sequential throughput and
# reduced I/O operations at the cost of higher memory
# usage per operation.
#
# - 32768 bytes (32K): Maximum supported block size
# for high-performance scenarios with very fast
# storage. May increase memory usage and reduce
# efficiency for smaller databases. Best suited for
# enterprise environments with abundant RAM.
#
# Performance testing is recommended before deploying
# any non-default block size in production environments.
#
# Note: This setting cannot be changed after database
# creation without rebuilding the entire database.
# Choose carefully based on your hardware and expected
# database size.
#
# Example: nudb_block_size=4096
#
# These keys modify the behavior of online_delete, and thus are only
# relevant if online_delete is defined and non-zero:
#
@@ -1052,7 +1011,7 @@
# that rippled is still in sync with the network,
# and that the validated ledger is less than
# 'age_threshold_seconds' old. If not, then continue
# sleeping for this number of seconds and
# sleeping for this number of seconds and
# checking until healthy.
# Default is 5.
#
@@ -1154,7 +1113,7 @@
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
# The default is 4096 bytes. This setting determines
# the size of a page in the transaction.db file.
# See https://www.sqlite.org/pragma.html#pragma_page_size
# See https://www.sqlite.org/pragma.html#pragma_page_size
# for more details about the available options.
#
# journal_size_limit Valid values: integer
@@ -1512,7 +1471,6 @@ secure_gateway = 127.0.0.1
[node_db]
type=NuDB
path=/var/lib/rippled/db/nudb
nudb_block_size=4096
online_delete=512
advisory_delete=0

View File

@@ -1,3 +1,21 @@
macro(group_sources_in source_dir curdir)
file(GLOB children RELATIVE ${source_dir}/${curdir}
${source_dir}/${curdir}/*)
foreach (child ${children})
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
group_sources_in(${source_dir} ${curdir}/${child})
else()
string(REPLACE "/" "\\" groupname ${curdir})
source_group(${groupname} FILES
${source_dir}/${curdir}/${child})
endif()
endforeach()
endmacro()
macro(group_sources curdir)
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
endmacro()
macro (exclude_from_default target_)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)

View File

@@ -101,17 +101,6 @@
# 2025-05-12, Jingchen Wu
# - add -fprofile-update=atomic to ensure atomic profile generation
#
# 2025-08-28, Bronek Kozicki
# - fix "At least one COMMAND must be given" CMake warning from policy CMP0175
#
# 2025-09-03, Jingchen Wu
# - remove the unused function append_coverage_compiler_flags and append_coverage_compiler_flags_to_target
# - add a new function add_code_coverage_to_target
# - remove some unused code
#
# 2025-11-11, Bronek Kozicki
# - make EXECUTABLE and EXECUTABLE_ARGS optional
#
# USAGE:
#
# 1. Copy this file into your cmake modules path.
@@ -120,8 +109,10 @@
# using a CMake option() to enable it just optionally):
# include(CodeCoverage)
#
# 3. Append necessary compiler flags and linker flags for all supported source files:
# add_code_coverage_to_target(<target> <PRIVATE|PUBLIC|INTERFACE>)
# 3. Append necessary compiler flags for all supported source files:
# append_coverage_compiler_flags()
# Or for specific target:
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
#
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
#
@@ -210,69 +201,67 @@ endforeach()
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
set(COVERAGE_CXX_COMPILER_FLAGS "")
set(COVERAGE_C_COMPILER_FLAGS "")
set(COVERAGE_CXX_LINKER_FLAGS "")
set(COVERAGE_C_LINKER_FLAGS "")
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag)
include(CheckCCompilerFlag)
include(CheckLinkerFlag)
set(COVERAGE_CXX_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
set(COVERAGE_C_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
set(COVERAGE_CXX_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if(HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if(HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
if(HAVE_cxx_linker_fprofile_abs_path)
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
endif()
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
if(HAVE_c_linker_fprofile_abs_path)
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
endif()
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
check_cxx_compiler_flag(-fprofile-update HAVE_cxx_fprofile_update)
if(HAVE_cxx_fprofile_update)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
check_c_compiler_flag(-fprofile-update HAVE_c_fprofile_update)
if(HAVE_c_fprofile_update)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()
check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
if(HAVE_cxx_linker_fprofile_update)
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
endif()
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
if(HAVE_c_linker_fprofile_update)
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
endif()
endif()
set(CMAKE_Fortran_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
FORCE )
set(CMAKE_CXX_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C++ compiler during coverage builds."
FORCE )
set(CMAKE_C_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C compiler during coverage builds."
FORCE )
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used for linking binaries during coverage builds."
FORCE )
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
FORCE )
mark_as_advanced(
CMAKE_Fortran_FLAGS_COVERAGE
CMAKE_CXX_FLAGS_COVERAGE
CMAKE_C_FLAGS_COVERAGE
CMAKE_EXE_LINKER_FLAGS_COVERAGE
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
link_libraries(gcov)
endif()
# Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports.
# NOTE! The executable should always have a ZERO as exit code otherwise
@@ -320,10 +309,6 @@ function(setup_target_for_coverage_gcovr)
set(Coverage_FORMAT xml)
endif()
if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set")
endif()
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
else()
@@ -405,18 +390,17 @@ function(setup_target_for_coverage_gcovr)
endforeach()
# Set up commands which will be run to generate coverage data
# If EXECUTABLE is not set, the user is expected to run the tests manually
# before running the coverage target NAME
if(DEFINED Coverage_EXECUTABLE)
set(GCOVR_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
endif()
# Run tests
set(GCOVR_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
# Create folder
if(DEFINED GCOVR_CREATE_FOLDER)
set(GCOVR_FOLDER_CMD
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
else()
set(GCOVR_FOLDER_CMD echo) # dummy
endif()
# Running gcovr
@@ -433,13 +417,11 @@ function(setup_target_for_coverage_gcovr)
if(CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report")
if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
endif()
message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
message(STATUS "Command to create a folder: ")
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
@@ -464,24 +446,23 @@ function(setup_target_for_coverage_gcovr)
# Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND echo
COMMAND ;
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
)
endfunction() # setup_target_for_coverage_gcovr
function(add_code_coverage_to_target name scope)
separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
function(append_coverage_compiler_flags)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
endfunction() # append_coverage_compiler_flags
# Add compiler options to the target
target_compile_options(${name} ${scope}
$<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
target_link_libraries (${name} ${scope}
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
)
endfunction() # add_code_coverage_to_target
# Setup coverage for specific library
function(append_coverage_compiler_flags_to_target name)
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
target_compile_options(${name} PRIVATE ${_flag_list})
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
target_link_libraries(${name} PRIVATE gcov)
endif()
endfunction()

View File

@@ -12,7 +12,7 @@ if (static OR MSVC)
else ()
set (Boost_USE_STATIC_RUNTIME OFF)
endif ()
find_dependency (Boost
find_dependency (Boost 1.70
COMPONENTS
chrono
container
@@ -45,10 +45,12 @@ if (static OR APPLE OR MSVC)
set (OPENSSL_USE_STATIC_LIBS ON)
endif ()
set (OPENSSL_MSVC_STATIC_RT ON)
find_dependency (OpenSSL REQUIRED)
find_dependency (OpenSSL 1.1.1 REQUIRED)
find_dependency (ZLIB)
find_dependency (date)
if (TARGET ZLIB::ZLIB)
set_target_properties(OpenSSL::Crypto PROPERTIES
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
endif ()
include ("${CMAKE_CURRENT_LIST_DIR}/RippleTargets.cmake")

View File

@@ -7,9 +7,9 @@
toolchain file, especially the ABI-impacting ones
#]=========================================================]
add_library (common INTERFACE)
add_library (Xrpl::common ALIAS common)
add_library (Ripple::common ALIAS common)
# add a single global dependency on this interface lib
link_libraries (Xrpl::common)
link_libraries (Ripple::common)
set_target_properties (common
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ON)
set(CMAKE_CXX_EXTENSIONS OFF)

View File

@@ -13,7 +13,7 @@ set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
target_protobuf_sources(xrpl.libpb xrpl/proto
LANGUAGE cpp
IMPORT_DIRS include/xrpl/proto
PROTOS include/xrpl/proto/xrpl.proto
PROTOS include/xrpl/proto/ripple.proto
)
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
@@ -51,17 +51,18 @@ target_link_libraries(xrpl.libpb
# TODO: Clean up the number of library targets later.
add_library(xrpl.imports.main INTERFACE)
find_package(RapidJSON)
target_link_libraries(xrpl.imports.main
INTERFACE
LibArchive::LibArchive
OpenSSL::Crypto
Ripple::boost
Ripple::opts
Ripple::syslibs
absl::random_random
date::date
ed25519::ed25519
LibArchive::LibArchive
OpenSSL::Crypto
Xrpl::boost
Xrpl::libs
Xrpl::opts
Xrpl::syslibs
secp256k1::secp256k1
xrpl.libpb
xxHash::xxhash
@@ -73,7 +74,11 @@ include(target_link_modules)
# Level 01
add_module(xrpl beast)
target_link_libraries(xrpl.libxrpl.beast PUBLIC xrpl.imports.main)
target_link_libraries(xrpl.libxrpl.beast PUBLIC
xrpl.imports.main
xrpl.libpb
rapidjson
)
# Level 02
add_module(xrpl basics)
@@ -83,6 +88,7 @@ target_link_libraries(xrpl.libxrpl.basics PUBLIC xrpl.libxrpl.beast)
add_module(xrpl json)
target_link_libraries(xrpl.libxrpl.json PUBLIC xrpl.libxrpl.basics)
add_module(xrpl crypto)
target_link_libraries(xrpl.libxrpl.crypto PUBLIC xrpl.libxrpl.basics)
@@ -94,20 +100,12 @@ target_link_libraries(xrpl.libxrpl.protocol PUBLIC
)
# Level 05
add_module(xrpl core)
target_link_libraries(xrpl.libxrpl.core PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
)
# Level 06
add_module(xrpl resource)
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
# Level 07
# Level 06
add_module(xrpl net)
target_link_libraries(xrpl.libxrpl.net PUBLIC
target_link_libraries(xrpl.libxrpl.net PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
@@ -117,27 +115,6 @@ target_link_libraries(xrpl.libxrpl.net PUBLIC
add_module(xrpl server)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
add_module(xrpl nodestore)
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
)
add_module(xrpl shamap)
target_link_libraries(xrpl.libxrpl.shamap PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.crypto
xrpl.libxrpl.protocol
xrpl.libxrpl.nodestore
)
add_module(xrpl ledger)
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
)
add_library(xrpl.libxrpl)
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
@@ -152,16 +129,12 @@ target_sources(xrpl.libxrpl PRIVATE ${sources})
target_link_modules(xrpl PUBLIC
basics
beast
core
crypto
json
protocol
resource
server
nodestore
shamap
net
ledger
)
# All headers in libxrpl are in modules.
@@ -174,14 +147,14 @@ target_link_modules(xrpl PUBLIC
# $<INSTALL_INTERFACE:include>)
if(xrpld)
add_executable(xrpld)
add_executable(rippled)
if(tests)
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
target_compile_definitions(xrpld PRIVATE
target_compile_definitions(rippled PUBLIC ENABLE_TESTS)
target_compile_definitions(rippled PRIVATE
UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
)
endif()
target_include_directories(xrpld
target_include_directories(rippled
PRIVATE
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
)
@@ -189,36 +162,36 @@ if(xrpld)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
)
target_sources(xrpld PRIVATE ${sources})
target_sources(rippled PRIVATE ${sources})
if(tests)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
)
target_sources(xrpld PRIVATE ${sources})
target_sources(rippled PRIVATE ${sources})
endif()
target_link_libraries(xrpld
Xrpl::boost
Xrpl::opts
Xrpl::libs
target_link_libraries(rippled
Ripple::boost
Ripple::opts
Ripple::libs
xrpl.libxrpl
)
exclude_if_included(xrpld)
exclude_if_included(rippled)
# define a macro for tests that might need to
# be exluded or run differently in CI environment
if(is_ci)
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
target_compile_definitions(rippled PRIVATE RIPPLED_RUNNING_IN_CI)
endif ()
if(voidstar)
target_compile_options(xrpld
target_compile_options(rippled
PRIVATE
-fsanitize-coverage=trace-pc-guard
)
# xrpld requires access to antithesis-sdk-cpp implementation file
# rippled requires access to antithesis-sdk-cpp implementation file
# antithesis_instrumentation.h, which is not exported as INTERFACE
target_include_directories(xrpld
target_include_directories(rippled
PRIVATE
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
)

View File

@@ -11,9 +11,6 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
return()
endif()
include(ProcessorCount)
ProcessorCount(PROCESSOR_COUNT)
include(CodeCoverage)
# The instructions for these commands come from the `CodeCoverage` module,
@@ -29,13 +26,13 @@ list(APPEND GCOVR_ADDITIONAL_ARGS
--exclude-throw-branches
--exclude-noncode-lines
--exclude-unreachable-branches -s
-j ${PROCESSOR_COUNT})
-j ${coverage_test_parallelism})
setup_target_for_coverage_gcovr(
NAME coverage
FORMAT ${coverage_format}
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
DEPENDENCIES xrpld xrpl.tests
EXECUTABLE rippled
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
EXCLUDE "src/test" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
DEPENDENCIES rippled
)
add_code_coverage_to_target(opts INTERFACE)

View File

@@ -2,7 +2,9 @@
docs target (optional)
#]===================================================================]
if(NOT only_docs)
option(with_docs "Include the docs target?" FALSE)
if(NOT (with_docs OR only_docs))
return()
endif()

View File

@@ -8,26 +8,21 @@ install (
TARGETS
common
opts
xrpl_boost
xrpl_libs
xrpl_syslibs
ripple_syslibs
ripple_boost
xrpl.imports.main
xrpl.libpb
xrpl.libxrpl
xrpl.libxrpl.basics
xrpl.libxrpl.beast
xrpl.libxrpl.core
xrpl.libxrpl.crypto
xrpl.libxrpl.json
xrpl.libxrpl.ledger
xrpl.libxrpl.net
xrpl.libxrpl.nodestore
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl.shamap
xrpl.libxrpl.net
xrpl.libxrpl
antithesis-sdk-cpp
EXPORT XrplExports
EXPORT RippleExports
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
@@ -38,19 +33,26 @@ install(
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
)
install (EXPORT XrplExports
FILE XrplTargets.cmake
NAMESPACE Xrpl::
DESTINATION lib/cmake/xrpl)
install(CODE "
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(xrpl \
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
")
install (EXPORT RippleExports
FILE RippleTargets.cmake
NAMESPACE Ripple::
DESTINATION lib/cmake/ripple)
include (CMakePackageConfigHelpers)
write_basic_package_version_file (
XrplConfigVersion.cmake
VERSION ${xrpld_version}
RippleConfigVersion.cmake
VERSION ${rippled_version}
COMPATIBILITY SameMajorVersion)
if (is_root_project AND TARGET xrpld)
install (TARGETS xrpld RUNTIME DESTINATION bin)
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
if (is_root_project AND TARGET rippled)
install (TARGETS rippled RUNTIME DESTINATION bin)
set_target_properties(rippled PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
# sample configs should not overwrite existing files
# install if-not-exists workaround as suggested by
# https://cmake.org/Bug/view.php?id=12646
@@ -68,13 +70,13 @@ if (is_root_project AND TARGET xrpld)
install(CODE "
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(xrpld${suffix} \
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/rippled${suffix})
create_symbolic_link(rippled${suffix} \
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
")
endif ()
install (
FILES
${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
DESTINATION lib/cmake/xrpl)
${CMAKE_CURRENT_SOURCE_DIR}/cmake/RippleConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
DESTINATION lib/cmake/ripple)

View File

@@ -1,9 +1,9 @@
#[===================================================================[
xrpld compile options/settings via an interface library
rippled compile options/settings via an interface library
#]===================================================================]
add_library (opts INTERFACE)
add_library (Xrpl::opts ALIAS opts)
add_library (Ripple::opts ALIAS opts)
target_compile_definitions (opts
INTERFACE
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
@@ -21,18 +21,22 @@ target_compile_definitions (opts
>
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
target_compile_options (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
@@ -59,12 +63,12 @@ if (san)
endif ()
#[===================================================================[
xrpld transitive library deps via an interface library
rippled transitive library deps via an interface library
#]===================================================================]
add_library (xrpl_syslibs INTERFACE)
add_library (Xrpl::syslibs ALIAS xrpl_syslibs)
target_link_libraries (xrpl_syslibs
add_library (ripple_syslibs INTERFACE)
add_library (Ripple::syslibs ALIAS ripple_syslibs)
target_link_libraries (ripple_syslibs
INTERFACE
$<$<BOOL:${MSVC}>:
legacy_stdio_definitions.lib
@@ -89,9 +93,9 @@ target_link_libraries (xrpl_syslibs
if (NOT MSVC)
set (THREADS_PREFER_PTHREAD_FLAG ON)
find_package (Threads)
target_link_libraries (xrpl_syslibs INTERFACE Threads::Threads)
target_link_libraries (ripple_syslibs INTERFACE Threads::Threads)
endif ()
add_library (xrpl_libs INTERFACE)
add_library (Xrpl::libs ALIAS xrpl_libs)
target_link_libraries (xrpl_libs INTERFACE Xrpl::syslibs)
add_library (ripple_libs INTERFACE)
add_library (Ripple::libs ALIAS ripple_libs)
target_link_libraries (ripple_libs INTERFACE Ripple::syslibs)

View File

@@ -1,5 +1,5 @@
#[===================================================================[
sanity checks
convenience variables and sanity checks
#]===================================================================]
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
@@ -16,19 +16,39 @@ if (NOT is_multiconfig)
endif ()
endif ()
get_directory_property(has_parent PARENT_DIRECTORY)
if (has_parent)
set (is_root_project OFF)
else ()
set (is_root_project ON)
endif ()
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
set (is_clang TRUE)
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
message (FATAL_ERROR "This project requires clang 16 or later")
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
message (FATAL_ERROR "This project requires clang 8 or later")
endif ()
# TODO min AppleClang version check ?
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
set (is_gcc TRUE)
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
message (FATAL_ERROR "This project requires GCC 12 or later")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
message (FATAL_ERROR "This project requires GCC 8 or later")
endif ()
endif ()
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
set (is_linux TRUE)
else ()
set (is_linux FALSE)
endif ()
if ("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
set (is_ci TRUE)
else ()
set (is_ci FALSE)
endif ()
# check for in-source build and fail
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message (FATAL_ERROR "Builds (in-source) are not allowed in "
@@ -41,8 +61,8 @@ if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
endif ()
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
message (FATAL_ERROR "Xrpld requires a 64 bit target architecture.\n"
"The most likely cause of this warning is trying to build xrpld with a 32-bit OS.")
message (FATAL_ERROR "Rippled requires a 64 bit target architecture.\n"
"The most likely cause of this warning is trying to build rippled with a 32-bit OS.")
endif ()
if (APPLE AND NOT HOMEBREW)

View File

@@ -1,25 +1,10 @@
#[===================================================================[
declare options and variables
declare user options/settings
#]===================================================================]
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
set (is_linux TRUE)
else()
set(is_linux FALSE)
endif()
include(ProcessorCount)
if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
set(is_ci TRUE)
else()
set(is_ci FALSE)
endif()
get_directory_property(has_parent PARENT_DIRECTORY)
if(has_parent)
set(is_root_project OFF)
else()
set(is_root_project ON)
endif()
ProcessorCount(PROCESSOR_COUNT)
option(assert "Enables asserts, even in release builds" OFF)
@@ -40,30 +25,31 @@ if(unity)
endif()
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
endif()
if(is_clang AND is_linux)
option(voidstar "Enable Antithesis instrumentation." OFF)
endif()
if(is_gcc OR is_clang)
include(ProcessorCount)
ProcessorCount(PROCESSOR_COUNT)
option(coverage "Generates coverage info." OFF)
option(profile "Add profiling flags" OFF)
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
"Unit tests parallelism for the purpose of coverage report.")
set(coverage_format "html-details" CACHE STRING
"Output format of the coverage report.")
set(coverage_extra_args "" CACHE STRING
"Additional arguments to pass to gcovr.")
set(coverage_test "" CACHE STRING
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
if(coverage_test AND NOT coverage)
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
endif()
option(wextra "compile with extra gcc/clang warnings enabled" ON)
else()
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif()
if(is_linux)
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
option(static "link protobuf, openssl, libc++, and boost statically" ON)
option(perf "Enables flags that assist with perf recording" OFF)
option(use_gold "enables detection of gold (binutils) linker" ON)
@@ -72,19 +58,17 @@ else()
# we are not ready to allow shared-libs on windows because it would require
# export declarations. On macos it's more feasible, but static openssl
# produces odd linker errors, thus we disable shared lib builds for now.
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
endif()
if(is_clang)
option(use_lld "enables detection of lld linker" ON)
else()
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif()
option(jemalloc "Enables jemalloc for heap profiling" OFF)
option(werr "treat warnings as errors" OFF)
option(local_protobuf
@@ -118,26 +102,38 @@ if(san)
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif()
endif()
set(container_label "" CACHE STRING "tag to use for package building containers")
option(packages_only
"ONLY generate package building targets. This is special use-case and almost \
certainly not what you want. Use with caution as you won't be able to build \
any compiled targets locally." OFF)
option(have_package_container
"Sometimes you already have the tagged container you want to use for package \
building and you don't want docker to rebuild it. This flag will detach the \
dependency of the package build from the container build. It's an advanced \
use case and most likely you should not be touching this flag." OFF)
# the remaining options are obscure and rarely used
option(beast_no_unit_test_inline
"Prevents unit test definitions from being inserted into global table"
OFF)
option(single_io_service_thread
"Restricts the number of threads calling io_context::run to one. \
"Restricts the number of threads calling io_service::run to one. \
This can be useful when debugging."
OFF)
option(boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls."
OFF)
option(beast_hashers
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
OFF)
if(WIN32)
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else()
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif()
if(coverage)
message(STATUS "coverage build requested - forcing Debug build")
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)

View File

@@ -0,0 +1,22 @@
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
if (validator_keys)
git_branch (current_branch)
# default to tracking VK master branch unless we are on release
if (NOT (current_branch STREQUAL "release"))
set (current_branch "master")
endif ()
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare (
validator_keys_src
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}"
)
FetchContent_GetProperties (validator_keys_src)
if (NOT validator_keys_src_POPULATED)
message (STATUS "Pausing to download ValidatorKeys...")
FetchContent_Populate (validator_keys_src)
endif ()
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
endif ()

View File

@@ -5,11 +5,11 @@
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
foreach(line_ ${BUILD_INFO})
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
set(xrpld_version ${CMAKE_MATCH_1})
set(rippled_version ${CMAKE_MATCH_1})
endif()
endforeach()
if(xrpld_version)
message(STATUS "xrpld version: ${xrpld_version}")
if(rippled_version)
message(STATUS "rippled version: ${rippled_version}")
else()
message(FATAL_ERROR "unable to determine xrpld version")
message(FATAL_ERROR "unable to determine rippled version")
endif()

View File

@@ -1,20 +0,0 @@
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
if (validator_keys)
git_branch (current_branch)
# default to tracking VK master branch unless we are on release
if (NOT (current_branch STREQUAL "release"))
set (current_branch "master")
endif ()
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare (
validator_keys
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}"
)
FetchContent_MakeAvailable(validator_keys)
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
endif ()

View File

@@ -12,10 +12,16 @@ find_package(Boost 1.82 REQUIRED
thread
)
add_library(xrpl_boost INTERFACE)
add_library(Xrpl::boost ALIAS xrpl_boost)
add_library(ripple_boost INTERFACE)
add_library(Ripple::boost ALIAS ripple_boost)
if(XCODE)
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
else()
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
endif()
target_link_libraries(xrpl_boost
target_link_libraries(ripple_boost
INTERFACE
Boost::headers
Boost::chrono
@@ -24,13 +30,12 @@ target_link_libraries(xrpl_boost
Boost::date_time
Boost::filesystem
Boost::json
Boost::process
Boost::program_options
Boost::regex
Boost::system
Boost::thread)
if(Boost_COMPILER)
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
endif()
if(san AND is_clang)
# TODO: gcc does not support -fsanitize-blacklist...can we do something else

View File

@@ -7,7 +7,7 @@ function(xrpl_add_test name)
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
)
add_executable(${target} ${ARGN} ${sources})
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
isolate_headers(
${target}
@@ -22,4 +22,20 @@ function(xrpl_add_test name)
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
set_tests_properties(
${target} PROPERTIES
FIXTURES_REQUIRED ${target}_fixture
)
add_test(
NAME ${target}.build
COMMAND
${CMAKE_COMMAND}
--build ${CMAKE_BINARY_DIR}
--config $<CONFIG>
--target ${target}
)
set_tests_properties(${target}.build PROPERTIES
FIXTURES_SETUP ${target}_fixture
)
endfunction()

View File

@@ -1,62 +0,0 @@
{
"version": "0.5",
"requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
"secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765202256.763",
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1762797952.535",
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1764175362.029",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1760106486.594",
"nudb/2.0.9#fb8dfd1a5557f5e0528114c2da17721e%1763150366.909",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
"libarchive/3.8.1#ffee18995c706e02bf96e7a2f7042e0d%1764175360.142",
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
"grpc/1.72.0#f244a57bff01e708c55a1100b12e1589%1763158050.628",
"ed25519/2015.03#ae761bdc52730a843f0809bdf6c1b1f6%1764270189.893",
"doctest/2.4.12#eb9fb352fb2fdfc8abb17ec270945165%1762797941.757",
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1763584497.32",
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1764175359.429",
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1764175359.429",
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
"abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
],
"build_requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1756234281.733",
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",
"msys2/cci.latest#1996656c3c98e5765b25b60ff5cf77b4%1764840888.758",
"m4/1.4.19#70dc8bbb33e981d119d2acc0175cf381%1763158052.846",
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1764175359.44",
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1764175359.429",
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28",
"automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56",
"autoconf/2.71#51077f068e61700d65bb05541ea1e4b0%1731054366.86",
"abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
],
"python_requires": [],
"overrides": {
"protobuf/5.27.0": [
"protobuf/6.32.1"
],
"lz4/1.9.4": [
"lz4/1.10.0"
],
"boost/1.83.0": [
"boost/1.88.0"
],
"sqlite3/3.44.2": [
"sqlite3/3.49.1"
],
"lz4/[>=1.9.4 <2]": [
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504"
]
},
"config_requires": []
}

View File

@@ -1,5 +0,0 @@
# Global configuration for Conan. This is used to set the number of parallel
# downloads and uploads.
core:non_interactive=True
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}

View File

@@ -1,12 +0,0 @@
# Conan lockfile
To achieve reproducible dependencies, we use a [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html).
The `conan.lock` file in the repository contains a "snapshot" of the current
dependencies. It is implicitly used when running `conan` commands, so you don't
need to specify it.
You have to update this file every time you add a new dependency or change a
revision or version of an existing dependency.
To update a lockfile, run from the repository root: `./conan/lockfile/regenerate.sh`

View File

@@ -1,8 +0,0 @@
[settings]
arch=x86_64
build_type=Release
compiler=gcc
compiler.cppstd=20
compiler.libcxx=libstdc++11
compiler.version=13
os=Linux

View File

@@ -1,8 +0,0 @@
[settings]
arch=armv8
build_type=Release
compiler=apple-clang
compiler.cppstd=20
compiler.libcxx=libc++
compiler.version=17.0
os=Macos

View File

@@ -1,36 +0,0 @@
#!/usr/bin/env bash
set -ex
TEMP_DIR=$(mktemp -d)
trap "rm -rf $TEMP_DIR" EXIT
echo "Using temporary CONAN_HOME: $TEMP_DIR"
# We use a temporary Conan home to avoid polluting the user's existing Conan
# configuration and to not use local cache (which leads to non-reproducible lockfiles).
export CONAN_HOME="$TEMP_DIR"
# Ensure that the xrplf remote is the first to be consulted, so any recipes we
# patched are used. We also add it there to not created huge diff when the
# official Conan Center Index is updated.
conan remote add --force --index 0 xrplf https://conan.ripplex.io
# Delete any existing lockfile.
rm -f conan.lock
# Create a new lockfile that is compatible with Linux, macOS, and Windows. The
# first create command will create a new lockfile, while the subsequent create
# commands will merge any additional dependencies into the created lockfile.
conan lock create . \
--options '&:jemalloc=True' \
--options '&:rocksdb=True' \
--profile:all=conan/lockfile/linux.profile
conan lock create . \
--options '&:jemalloc=True' \
--options '&:rocksdb=True' \
--profile:all=conan/lockfile/macos.profile
conan lock create . \
--options '&:jemalloc=True' \
--options '&:rocksdb=True' \
--profile:all=conan/lockfile/windows.profile

View File

@@ -1,9 +0,0 @@
[settings]
arch=x86_64
build_type=Release
compiler=msvc
compiler.cppstd=20
compiler.runtime=dynamic
compiler.runtime_type=Release
compiler.version=194
os=Windows

View File

@@ -20,6 +20,15 @@ compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
{% endif %}
[conf]
{% if compiler == "gcc" and compiler_version < 13 %}
tools.build:cxxflags+=['-Wno-restrict']
{% if compiler == "clang" and compiler_version >= 19 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "apple-clang" and compiler_version >= 17 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "gcc" and compiler_version < 13 %}
tools.build:cxxflags=['-Wno-restrict']
{% endif %}
[tool_requires]
!cmake/*: cmake/[>=3 <4]

View File

@@ -1,169 +1,148 @@
from conan import ConanFile, __version__ as conan_version
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
import re
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
from conan import ConanFile
from conan import __version__ as conan_version
class Xrpl(ConanFile):
name = "xrpl"
name = 'xrpl'
license = "ISC"
author = "John Freeman <jfreeman@ripple.com>"
url = "https://github.com/xrplf/rippled"
description = "The XRP Ledger"
settings = "os", "compiler", "build_type", "arch"
license = 'ISC'
author = 'John Freeman <jfreeman@ripple.com>'
url = 'https://github.com/xrplf/rippled'
description = 'The XRP Ledger'
settings = 'os', 'compiler', 'build_type', 'arch'
options = {
"assertions": [True, False],
"coverage": [True, False],
"fPIC": [True, False],
"jemalloc": [True, False],
"rocksdb": [True, False],
"shared": [True, False],
"static": [True, False],
"tests": [True, False],
"unity": [True, False],
"xrpld": [True, False],
'assertions': [True, False],
'coverage': [True, False],
'fPIC': [True, False],
'jemalloc': [True, False],
'rocksdb': [True, False],
'shared': [True, False],
'static': [True, False],
'tests': [True, False],
'unity': [True, False],
'xrpld': [True, False],
}
requires = [
"ed25519/2015.03",
"grpc/1.72.0",
"libarchive/3.8.1",
"nudb/2.0.9",
"openssl/3.5.4",
"secp256k1/0.7.0",
"soci/4.0.3",
"zlib/1.3.1",
'grpc/1.50.1',
'libarchive/3.8.1',
'nudb/2.0.9',
'openssl/3.5.2',
'soci/4.0.3',
'zlib/1.3.1',
"rapidjson/1.1.0"
]
test_requires = [
"doctest/2.4.12",
'doctest/2.4.11',
]
tool_requires = [
"protobuf/6.32.1",
'protobuf/3.21.12',
]
default_options = {
"assertions": False,
"coverage": False,
"fPIC": True,
"jemalloc": False,
"rocksdb": True,
"shared": False,
"static": True,
"tests": False,
"unity": False,
"xrpld": False,
"date/*:header_only": True,
"ed25519/*:shared": False,
"grpc/*:shared": False,
"grpc/*:secure": True,
"grpc/*:codegen": True,
"grpc/*:cpp_plugin": True,
"grpc/*:csharp_ext": False,
"grpc/*:csharp_plugin": False,
"grpc/*:node_plugin": False,
"grpc/*:objective_c_plugin": False,
"grpc/*:php_plugin": False,
"grpc/*:python_plugin": False,
"grpc/*:ruby_plugin": False,
"grpc/*:otel_plugin": False,
"libarchive/*:shared": False,
"libarchive/*:with_acl": False,
"libarchive/*:with_bzip2": False,
"libarchive/*:with_cng": False,
"libarchive/*:with_expat": False,
"libarchive/*:with_iconv": False,
"libarchive/*:with_libxml2": False,
"libarchive/*:with_lz4": True,
"libarchive/*:with_lzma": False,
"libarchive/*:with_lzo": False,
"libarchive/*:with_nettle": False,
"libarchive/*:with_openssl": False,
"libarchive/*:with_pcreposix": False,
"libarchive/*:with_xattr": False,
"libarchive/*:with_zlib": False,
"lz4/*:shared": False,
"openssl/*:shared": False,
"protobuf/*:shared": False,
"protobuf/*:with_zlib": True,
"rocksdb/*:enable_sse": False,
"rocksdb/*:lite": False,
"rocksdb/*:shared": False,
"rocksdb/*:use_rtti": True,
"rocksdb/*:with_jemalloc": False,
"rocksdb/*:with_lz4": True,
"rocksdb/*:with_snappy": True,
"secp256k1/*:shared": False,
"snappy/*:shared": False,
"soci/*:shared": False,
"soci/*:with_sqlite3": True,
"soci/*:with_boost": True,
"xxhash/*:shared": False,
'assertions': False,
'coverage': False,
'fPIC': True,
'jemalloc': False,
'rocksdb': True,
'shared': False,
'static': True,
'tests': False,
'unity': False,
'xrpld': False,
'date/*:header_only': True,
'grpc/*:shared': False,
'grpc/*:secure': True,
'libarchive/*:shared': False,
'libarchive/*:with_acl': False,
'libarchive/*:with_bzip2': False,
'libarchive/*:with_cng': False,
'libarchive/*:with_expat': False,
'libarchive/*:with_iconv': False,
'libarchive/*:with_libxml2': False,
'libarchive/*:with_lz4': True,
'libarchive/*:with_lzma': False,
'libarchive/*:with_lzo': False,
'libarchive/*:with_nettle': False,
'libarchive/*:with_openssl': False,
'libarchive/*:with_pcreposix': False,
'libarchive/*:with_xattr': False,
'libarchive/*:with_zlib': False,
'lz4/*:shared': False,
'openssl/*:shared': False,
'protobuf/*:shared': False,
'protobuf/*:with_zlib': True,
'rocksdb/*:enable_sse': False,
'rocksdb/*:lite': False,
'rocksdb/*:shared': False,
'rocksdb/*:use_rtti': True,
'rocksdb/*:with_jemalloc': False,
'rocksdb/*:with_lz4': True,
'rocksdb/*:with_snappy': True,
'snappy/*:shared': False,
'soci/*:shared': False,
'soci/*:with_sqlite3': True,
'soci/*:with_boost': True,
'xxhash/*:shared': False,
}
def set_version(self):
if self.version is None:
path = f"{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp"
regex = r"versionString\s?=\s?\"(.*)\""
with open(path, encoding="utf-8") as file:
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
regex = r'versionString\s?=\s?\"(.*)\"'
with open(path, encoding='utf-8') as file:
matches = (re.search(regex, line) for line in file)
match = next(m for m in matches if m)
self.version = match.group(1)
def configure(self):
if self.settings.compiler == "apple-clang":
self.options["boost"].visibility = "global"
if self.settings.compiler in ["clang", "gcc"]:
self.options["boost"].without_cobalt = True
if self.settings.compiler == 'apple-clang':
self.options['boost'].visibility = 'global'
def requirements(self):
# Conan 2 requires transitive headers to be specified
transitive_headers_opt = (
{"transitive_headers": True} if conan_version.split(".")[0] == "2" else {}
)
self.requires("boost/1.88.0", force=True, **transitive_headers_opt)
self.requires("date/3.0.4", **transitive_headers_opt)
self.requires("lz4/1.10.0", force=True)
self.requires("protobuf/6.32.1", force=True)
self.requires("sqlite3/3.49.1", force=True)
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
self.requires('boost/1.86.0', force=True, **transitive_headers_opt)
self.requires('date/3.0.4', **transitive_headers_opt)
self.requires('lz4/1.10.0', force=True)
self.requires('protobuf/3.21.12', force=True)
self.requires('sqlite3/3.49.1', force=True)
if self.options.jemalloc:
self.requires("jemalloc/5.3.0")
self.requires('jemalloc/5.3.0')
if self.options.rocksdb:
self.requires("rocksdb/10.5.1")
self.requires("xxhash/0.8.3", **transitive_headers_opt)
self.requires('rocksdb/10.0.1')
self.requires('xxhash/0.8.3', **transitive_headers_opt)
exports_sources = (
"CMakeLists.txt",
"cfg/*",
"cmake/*",
"external/*",
"include/*",
"src/*",
'CMakeLists.txt',
'cfg/*',
'cmake/*',
'external/*',
'include/*',
'src/*',
)
def layout(self):
cmake_layout(self)
# Fix this setting to follow the default introduced in Conan 1.48
# to align with our build instructions.
self.folders.generators = "build/generators"
generators = "CMakeDeps"
self.folders.generators = 'build/generators'
generators = 'CMakeDeps'
def generate(self):
tc = CMakeToolchain(self)
tc.variables["tests"] = self.options.tests
tc.variables["assert"] = self.options.assertions
tc.variables["coverage"] = self.options.coverage
tc.variables["jemalloc"] = self.options.jemalloc
tc.variables["rocksdb"] = self.options.rocksdb
tc.variables["BUILD_SHARED_LIBS"] = self.options.shared
tc.variables["static"] = self.options.static
tc.variables["unity"] = self.options.unity
tc.variables["xrpld"] = self.options.xrpld
tc.variables['tests'] = self.options.tests
tc.variables['assert'] = self.options.assertions
tc.variables['coverage'] = self.options.coverage
tc.variables['jemalloc'] = self.options.jemalloc
tc.variables['rocksdb'] = self.options.rocksdb
tc.variables['BUILD_SHARED_LIBS'] = self.options.shared
tc.variables['static'] = self.options.static
tc.variables['unity'] = self.options.unity
tc.variables['xrpld'] = self.options.xrpld
tc.generate()
def build(self):
@@ -178,42 +157,39 @@ class Xrpl(ConanFile):
cmake.install()
def package_info(self):
libxrpl = self.cpp_info.components["libxrpl"]
libxrpl = self.cpp_info.components['libxrpl']
libxrpl.libs = [
"xrpl",
"xrpl.libpb",
"ed25519",
"secp256k1",
'xrpl',
'xrpl.libpb',
'ed25519',
'secp256k1',
]
# TODO: Fix the protobufs to include each other relative to
# `include/`, not `include/ripple/proto/`.
libxrpl.includedirs = ["include", "include/ripple/proto"]
libxrpl.includedirs = ['include', 'include/ripple/proto']
libxrpl.requires = [
"boost::headers",
"boost::chrono",
"boost::container",
"boost::coroutine",
"boost::date_time",
"boost::filesystem",
"boost::json",
"boost::program_options",
"boost::process",
"boost::regex",
"boost::system",
"boost::thread",
"date::date",
"ed25519::ed25519",
"grpc::grpc++",
"libarchive::libarchive",
"lz4::lz4",
"nudb::nudb",
"openssl::crypto",
"protobuf::libprotobuf",
"soci::soci",
"secp256k1::secp256k1",
"sqlite3::sqlite",
"xxhash::xxhash",
"zlib::zlib",
'boost::headers',
'boost::chrono',
'boost::container',
'boost::coroutine',
'boost::date_time',
'boost::filesystem',
'boost::json',
'boost::program_options',
'boost::regex',
'boost::system',
'boost::thread',
'date::date',
'grpc::grpc++',
'libarchive::libarchive',
'lz4::lz4',
'nudb::nudb',
'openssl::crypto',
'protobuf::libprotobuf',
'soci::soci',
'sqlite3::sqlite',
'xxhash::xxhash',
'zlib::zlib',
]
if self.options.rocksdb:
libxrpl.requires.append("rocksdb::librocksdb")
libxrpl.requires.append('rocksdb::librocksdb')

3
docs/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
html
temp
out.txt

View File

@@ -5,8 +5,8 @@ skinparam roundcorner 20
skinparam maxmessagesize 160
actor "Rippled Start" as RS
participant "Timer" as T
participant "NetworkOPs" as NOP
participant "Timer" as T
participant "NetworkOPs" as NOP
participant "ValidatorList" as VL #lightgreen
participant "Consensus" as GC
participant "ConsensusAdaptor" as CA #lightgreen
@@ -20,7 +20,7 @@ VL -> NOP
NOP -> VL: update trusted validators
activate VL
VL -> VL: re-calculate quorum
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
VL -> NOP
deactivate VL
NOP -> GC: start round
@@ -36,14 +36,14 @@ activate GC
end
alt phase == OPEN
alt should close ledger
alt should close ledger
GC -> GC: phase = ESTABLISH
GC -> CA: onClose
activate CA
alt sqn%256==0
alt sqn%256==0
CA -[#green]> RM: <font color=green>getValidations
CA -[#green]> CA: <font color=green>create UNLModify Tx
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
CA -[#green]> CA: <font color=green>create UNLModify Tx
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
end
CA -> GC
GC -> CA: propose
@@ -61,14 +61,14 @@ else phase == ESTABLISH
CA -> CA : build LCL
hnote over CA #lightgreen: copy negative UNL from parent ledger
alt sqn%256==0
CA -[#green]> CA: <font color=green>Adjust negative UNL
CA -[#green]> CA: <font color=green>Adjust negative UNL
CA -[#green]> CA: <font color=green>apply UNLModify Tx
end
CA -> CA : validate and send validation message
activate NOP
CA -> NOP : end consensus and\n<b>begin next consensus round
deactivate NOP
deactivate CA
deactivate CA
hnote over RM: receive validations
end
else phase == ACCEPTED
@@ -76,4 +76,4 @@ else phase == ACCEPTED
end
deactivate GC
@enduml
@enduml

View File

@@ -4,7 +4,7 @@ class TimeoutCounter {
#app_ : Application&
}
TimeoutCounter o-- "1" Application
TimeoutCounter o-- "1" Application
': app_
Stoppable <.. Application
@@ -14,13 +14,13 @@ class Application {
-m_inboundLedgers : uptr<InboundLedgers>
}
Application *-- "1" LedgerReplayer
Application *-- "1" LedgerReplayer
': m_ledgerReplayer
Application *-- "1" InboundLedgers
Application *-- "1" InboundLedgers
': m_inboundLedgers
Stoppable <.. InboundLedgers
Application "1" --o InboundLedgers
Application "1" --o InboundLedgers
': app_
class InboundLedgers {
@@ -28,9 +28,9 @@ class InboundLedgers {
}
Stoppable <.. LedgerReplayer
InboundLedgers "1" --o LedgerReplayer
InboundLedgers "1" --o LedgerReplayer
': inboundLedgers_
Application "1" --o LedgerReplayer
Application "1" --o LedgerReplayer
': app_
class LedgerReplayer {
@@ -42,17 +42,17 @@ class LedgerReplayer {
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
}
LedgerReplayer *-- LedgerReplayTask
LedgerReplayer *-- LedgerReplayTask
': tasks_
LedgerReplayer o-- LedgerDeltaAcquire
LedgerReplayer o-- LedgerDeltaAcquire
': deltas_
LedgerReplayer o-- SkipListAcquire
LedgerReplayer o-- SkipListAcquire
': skipLists_
TimeoutCounter <.. LedgerReplayTask
InboundLedgers "1" --o LedgerReplayTask
InboundLedgers "1" --o LedgerReplayTask
': inboundLedgers_
LedgerReplayer "1" --o LedgerReplayTask
LedgerReplayer "1" --o LedgerReplayTask
': replayer_
class LedgerReplayTask {
@@ -63,15 +63,15 @@ class LedgerReplayTask {
+addDelta(sptr<LedgerDeltaAcquire>)
}
LedgerReplayTask *-- "1" SkipListAcquire
LedgerReplayTask *-- "1" SkipListAcquire
': skipListAcquirer_
LedgerReplayTask *-- LedgerDeltaAcquire
LedgerReplayTask *-- LedgerDeltaAcquire
': deltas_
TimeoutCounter <.. SkipListAcquire
InboundLedgers "1" --o SkipListAcquire
InboundLedgers "1" --o SkipListAcquire
': inboundLedgers_
LedgerReplayer "1" --o SkipListAcquire
LedgerReplayer "1" --o SkipListAcquire
': replayer_
LedgerReplayTask --o SkipListAcquire : implicit via callback
@@ -83,9 +83,9 @@ class SkipListAcquire {
}
TimeoutCounter <.. LedgerDeltaAcquire
InboundLedgers "1" --o LedgerDeltaAcquire
InboundLedgers "1" --o LedgerDeltaAcquire
': inboundLedgers_
LedgerReplayer "1" --o LedgerDeltaAcquire
LedgerReplayer "1" --o LedgerDeltaAcquire
': replayer_
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
@@ -95,4 +95,4 @@ class LedgerDeltaAcquire {
-replayer_ : LedgerReplayer&
-dataReadyCallbacks_ : vector<callback>
}
@enduml
@enduml

View File

@@ -38,7 +38,7 @@ deactivate lr
loop
lr -> lda : make_shared(ledgerId, ledgerSeq)
return delta
lr -> lrt : addDelta(delta)
lr -> lrt : addDelta(delta)
lrt -> lda : addDataCallback(callback)
return
return
@@ -62,7 +62,7 @@ deactivate peer
lr -> lda : processData(ledgerHeader, txns)
lda -> lda : notify()
note over lda: call the callbacks added by\naddDataCallback(callback).
lda -> lrt : callback(ledgerId)
lda -> lrt : callback(ledgerId)
lrt -> lrt : deltaReady(ledgerId)
lrt -> lrt : tryAdvance()
loop as long as child can be built
@@ -82,4 +82,4 @@ deactivate peer
deactivate peer
@enduml
@enduml

View File

@@ -54,8 +54,8 @@ There is a `docs` target in the CMake configuration.
```
mkdir build
cd build
cmake -Donly_docs=ON ..
cmake --build . --target docs --parallel
cmake ..
cmake --build . --target docs
```
The output will be in `build/docs/html`.

5
external/README.md vendored
View File

@@ -1,7 +1,10 @@
# External Conan recipes
The subdirectories in this directory contain external libraries used by rippled.
The subdirectories in this directory contain copies of external libraries used
by rippled.
| Folder | Upstream | Description |
| :--------------- | :------------------------------------------------------------- | :------------------------------------------------------------------------------------------- |
| `antithesis-sdk` | [Project](https://github.com/antithesishq/antithesis-sdk-cpp/) | [Antithesis](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview.html) SDK for C++ |
| `ed25519-donna` | [Project](https://github.com/floodyberry/ed25519-donna) | [Ed25519](http://ed25519.cr.yp.to/) digital signatures |
| `secp256k1` | [Project](https://github.com/bitcoin-core/secp256k1) | ECDSA digital signatures using the **secp256k1** curve |

View File

@@ -1,3 +1,3 @@
---
DisableFormat: true
SortIncludes: Never
SortIncludes: false

51
external/ed25519-donna/CMakeLists.txt vendored Normal file
View File

@@ -0,0 +1,51 @@
cmake_minimum_required(VERSION 3.11)
project(ed25519
LANGUAGES C
)
if(PROJECT_NAME STREQUAL CMAKE_PROJECT_NAME)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/output/$<CONFIG>/lib")
endif()
if(NOT TARGET OpenSSL::SSL)
find_package(OpenSSL)
endif()
add_library(ed25519 STATIC
ed25519.c
)
add_library(ed25519::ed25519 ALIAS ed25519)
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
if(NOT MSVC)
target_compile_options(ed25519 PRIVATE -Wno-implicit-fallthrough)
endif()
include(GNUInstallDirs)
#[=========================================================[
NOTE for macos:
https://github.com/floodyberry/ed25519-donna/issues/29
our source for ed25519-donna-portable.h has been
patched to workaround this.
#]=========================================================]
target_include_directories(ed25519 PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
)
install(
TARGETS ed25519
EXPORT ${PROJECT_NAME}-exports
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
)
install(
EXPORT ${PROJECT_NAME}-exports
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
FILE ${PROJECT_NAME}-targets.cmake
NAMESPACE ${PROJECT_NAME}::
)
install(
FILES ed25519.h
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
)

182
external/ed25519-donna/README.md vendored Normal file
View File

@@ -0,0 +1,182 @@
[ed25519](http://ed25519.cr.yp.to/) is an
[Elliptic Curve Digital Signature Algortithm](http://en.wikipedia.org/wiki/Elliptic_Curve_DSA),
developed by [Dan Bernstein](http://cr.yp.to/djb.html),
[Niels Duif](http://www.nielsduif.nl/),
[Tanja Lange](http://hyperelliptic.org/tanja),
[Peter Schwabe](http://www.cryptojedi.org/users/peter/),
and [Bo-Yin Yang](http://www.iis.sinica.edu.tw/pages/byyang/).
This project provides performant, portable 32-bit & 64-bit implementations. All implementations are
of course constant time in regard to secret data.
#### Performance
SSE2 code and benches have not been updated yet. I will do those next.
Compilers versions are gcc 4.6.3, icc 13.1.1, clang 3.4-1~exp1.
Batch verification time (in parentheses) is the average time per 1 verification in a batch of 64 signatures. Counts are in thousands of cycles.
Note that SSE2 performance may be less impressive on AMD & older CPUs with slower SSE ops!
Visual Studio performance for `ge25519_scalarmult_base_niels` will lag behind a bit until optimized assembler versions of `ge25519_scalarmult_base_choose_niels`
are made.
##### E5200 @ 2.5ghz, march=core2
<table>
<thead><tr><th>Implementation</th><th>Sign</th><th>gcc</th><th>icc</th><th>clang</th><th>Verify</th><th>gcc</th><th>icc</th><th>clang</th></tr></thead>
<tbody>
<tr><td>ed25519-donna 64bit </td><td></td><td>100k</td><td>110k</td><td>137k</td><td></td><td>327k (144k) </td><td>342k (163k) </td><td>422k (194k) </td></tr>
<tr><td>amd64-64-24k </td><td></td><td>102k</td><td> </td><td> </td><td></td><td>355k (158k) </td><td> </td><td> </td></tr>
<tr><td>ed25519-donna-sse2 64bit</td><td></td><td>108k</td><td>111k</td><td>116k</td><td></td><td>353k (155k) </td><td>345k (154k) </td><td>360k (161k) </td></tr>
<tr><td>amd64-51-32k </td><td></td><td>116k</td><td> </td><td> </td><td></td><td>380k (175k) </td><td> </td><td> </td></tr>
<tr><td>ed25519-donna-sse2 32bit</td><td></td><td>147k</td><td>147k</td><td>156k</td><td></td><td>380k (178k) </td><td>381k (173k) </td><td>430k (192k) </td></tr>
<tr><td>ed25519-donna 32bit </td><td></td><td>597k</td><td>335k</td><td>380k</td><td></td><td>1693k (720k)</td><td>1052k (453k)</td><td>1141k (493k)</td></tr>
</tbody>
</table>
##### E3-1270 @ 3.4ghz, march=corei7-avx
<table>
<thead><tr><th>Implementation</th><th>Sign</th><th>gcc</th><th>icc</th><th>clang</th><th>Verify</th><th>gcc</th><th>icc</th><th>clang</th></tr></thead>
<tbody>
<tr><td>amd64-64-24k </td><td></td><td> 68k</td><td> </td><td> </td><td></td><td>225k (104k) </td><td> </td><td> </td></tr>
<tr><td>ed25519-donna 64bit </td><td></td><td> 71k</td><td> 75k</td><td> 90k</td><td></td><td>226k (105k) </td><td>226k (112k) </td><td>277k (125k) </td></tr>
<tr><td>amd64-51-32k </td><td></td><td> 72k</td><td> </td><td> </td><td></td><td>218k (107k) </td><td> </td><td> </td></tr>
<tr><td>ed25519-donna-sse2 64bit</td><td></td><td> 79k</td><td> 82k</td><td> 92k</td><td></td><td>252k (122k) </td><td>259k (124k) </td><td>282k (131k) </td></tr>
<tr><td>ed25519-donna-sse2 32bit</td><td></td><td> 94k</td><td> 95k</td><td>103k</td><td></td><td>296k (146k) </td><td>294k (137k) </td><td>306k (147k) </td></tr>
<tr><td>ed25519-donna 32bit </td><td></td><td>525k</td><td>299k</td><td>316k</td><td></td><td>1502k (645k)</td><td>959k (418k) </td><td>954k (416k) </td></tr>
</tbody>
</table>
#### Compilation
No configuration is needed **if you are compiling against OpenSSL**.
##### Hash Options
If you are not compiling aginst OpenSSL, you will need a hash function.
To use a simple/**slow** implementation of SHA-512, use `-DED25519_REFHASH` when compiling `ed25519.c`.
This should never be used except to verify the code works when OpenSSL is not available.
To use a custom hash function, use `-DED25519_CUSTOMHASH` when compiling `ed25519.c` and put your
custom hash implementation in ed25519-hash-custom.h. The hash must have a 512bit digest and implement
struct ed25519_hash_context;
void ed25519_hash_init(ed25519_hash_context *ctx);
void ed25519_hash_update(ed25519_hash_context *ctx, const uint8_t *in, size_t inlen);
void ed25519_hash_final(ed25519_hash_context *ctx, uint8_t *hash);
void ed25519_hash(uint8_t *hash, const uint8_t *in, size_t inlen);
##### Random Options
If you are not compiling aginst OpenSSL, you will need a random function for batch verification.
To use a custom random function, use `-DED25519_CUSTOMRANDOM` when compiling `ed25519.c` and put your
custom hash implementation in ed25519-randombytes-custom.h. The random function must implement:
void ED25519_FN(ed25519_randombytes_unsafe) (void *p, size_t len);
Use `-DED25519_TEST` when compiling `ed25519.c` to use a deterministically seeded, non-thread safe CSPRNG
variant of Bob Jenkins [ISAAC](http://en.wikipedia.org/wiki/ISAAC_%28cipher%29)
##### Minor options
Use `-DED25519_INLINE_ASM` to disable the use of custom assembler routines and instead rely on portable C.
Use `-DED25519_FORCE_32BIT` to force the use of 32 bit routines even when compiling for 64 bit.
##### 32-bit
gcc ed25519.c -m32 -O3 -c
##### 64-bit
gcc ed25519.c -m64 -O3 -c
##### SSE2
gcc ed25519.c -m32 -O3 -c -DED25519_SSE2 -msse2
gcc ed25519.c -m64 -O3 -c -DED25519_SSE2
clang and icc are also supported
#### Usage
To use the code, link against `ed25519.o -mbits` and:
#include "ed25519.h"
Add `-lssl -lcrypto` when using OpenSSL (Some systems don't need -lcrypto? It might be trial and error).
To generate a private key, simply generate 32 bytes from a secure
cryptographic source:
ed25519_secret_key sk;
randombytes(sk, sizeof(ed25519_secret_key));
To generate a public key:
ed25519_public_key pk;
ed25519_publickey(sk, pk);
To sign a message:
ed25519_signature sig;
ed25519_sign(message, message_len, sk, pk, signature);
To verify a signature:
int valid = ed25519_sign_open(message, message_len, pk, signature) == 0;
To batch verify signatures:
const unsigned char *mp[num] = {message1, message2..}
size_t ml[num] = {message_len1, message_len2..}
const unsigned char *pkp[num] = {pk1, pk2..}
const unsigned char *sigp[num] = {signature1, signature2..}
int valid[num]
/* valid[i] will be set to 1 if the individual signature was valid, 0 otherwise */
int all_valid = ed25519_sign_open_batch(mp, ml, pkp, sigp, num, valid) == 0;
**Note**: Batch verification uses `ed25519_randombytes_unsafe`, implemented in
`ed25519-randombytes.h`, to generate random scalars for the verification code.
The default implementation now uses OpenSSLs `RAND_bytes`.
Unlike the [SUPERCOP](http://bench.cr.yp.to/supercop.html) version, signatures are
not appended to messages, and there is no need for padding in front of messages.
Additionally, the secret key does not contain a copy of the public key, so it is
32 bytes instead of 64 bytes, and the public key must be provided to the signing
function.
##### Curve25519
Curve25519 public keys can be generated thanks to
[Adam Langley](http://www.imperialviolet.org/2013/05/10/fastercurve25519.html)
leveraging Ed25519's precomputed basepoint scalar multiplication.
curved25519_key sk, pk;
randombytes(sk, sizeof(curved25519_key));
curved25519_scalarmult_basepoint(pk, sk);
Note the name is curved25519, a combination of curve and ed25519, to prevent
name clashes. Performance is slightly faster than short message ed25519
signing due to both using the same code for the scalar multiply.
#### Testing
Fuzzing against reference implemenations is now available. See [fuzz/README](fuzz/README.md).
Building `ed25519.c` with `-DED25519_TEST` and linking with `test.c` will run basic sanity tests
and benchmark each function. `test-batch.c` has been incorporated in to `test.c`.
`test-internals.c` is standalone and built the same way as `ed25519.c`. It tests the math primitives
with extreme values to ensure they function correctly. SSE2 is now supported.
#### Papers
[Available on the Ed25519 website](http://ed25519.cr.yp.to/papers.html)

View File

@@ -0,0 +1,579 @@
/*
Public domain by Andrew M. <liquidsun@gmail.com>
See: https://github.com/floodyberry/curve25519-donna
32 bit integer curve25519 implementation
*/
typedef uint32_t bignum25519[10];
typedef uint32_t bignum25519align16[12];
static const uint32_t reduce_mask_25 = (1 << 25) - 1;
static const uint32_t reduce_mask_26 = (1 << 26) - 1;
/* out = in */
DONNA_INLINE static void
curve25519_copy(bignum25519 out, const bignum25519 in) {
out[0] = in[0];
out[1] = in[1];
out[2] = in[2];
out[3] = in[3];
out[4] = in[4];
out[5] = in[5];
out[6] = in[6];
out[7] = in[7];
out[8] = in[8];
out[9] = in[9];
}
/* out = a + b */
DONNA_INLINE static void
curve25519_add(bignum25519 out, const bignum25519 a, const bignum25519 b) {
out[0] = a[0] + b[0];
out[1] = a[1] + b[1];
out[2] = a[2] + b[2];
out[3] = a[3] + b[3];
out[4] = a[4] + b[4];
out[5] = a[5] + b[5];
out[6] = a[6] + b[6];
out[7] = a[7] + b[7];
out[8] = a[8] + b[8];
out[9] = a[9] + b[9];
}
DONNA_INLINE static void
curve25519_add_after_basic(bignum25519 out, const bignum25519 a, const bignum25519 b) {
uint32_t c;
out[0] = a[0] + b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
out[1] = a[1] + b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
out[2] = a[2] + b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
out[3] = a[3] + b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
out[4] = a[4] + b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
out[5] = a[5] + b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
out[6] = a[6] + b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
out[7] = a[7] + b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
out[8] = a[8] + b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
out[9] = a[9] + b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
out[0] += 19 * c;
}
DONNA_INLINE static void
curve25519_add_reduce(bignum25519 out, const bignum25519 a, const bignum25519 b) {
uint32_t c;
out[0] = a[0] + b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
out[1] = a[1] + b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
out[2] = a[2] + b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
out[3] = a[3] + b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
out[4] = a[4] + b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
out[5] = a[5] + b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
out[6] = a[6] + b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
out[7] = a[7] + b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
out[8] = a[8] + b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
out[9] = a[9] + b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
out[0] += 19 * c;
}
/* multiples of p */
static const uint32_t twoP0 = 0x07ffffda;
static const uint32_t twoP13579 = 0x03fffffe;
static const uint32_t twoP2468 = 0x07fffffe;
static const uint32_t fourP0 = 0x0fffffb4;
static const uint32_t fourP13579 = 0x07fffffc;
static const uint32_t fourP2468 = 0x0ffffffc;
/* out = a - b */
DONNA_INLINE static void
curve25519_sub(bignum25519 out, const bignum25519 a, const bignum25519 b) {
uint32_t c;
out[0] = twoP0 + a[0] - b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
out[1] = twoP13579 + a[1] - b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
out[2] = twoP2468 + a[2] - b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
out[3] = twoP13579 + a[3] - b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
out[4] = twoP2468 + a[4] - b[4] + c;
out[5] = twoP13579 + a[5] - b[5] ;
out[6] = twoP2468 + a[6] - b[6] ;
out[7] = twoP13579 + a[7] - b[7] ;
out[8] = twoP2468 + a[8] - b[8] ;
out[9] = twoP13579 + a[9] - b[9] ;
}
/* out = a - b, where a is the result of a basic op (add,sub) */
DONNA_INLINE static void
curve25519_sub_after_basic(bignum25519 out, const bignum25519 a, const bignum25519 b) {
uint32_t c;
out[0] = fourP0 + a[0] - b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
out[1] = fourP13579 + a[1] - b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
out[2] = fourP2468 + a[2] - b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
out[3] = fourP13579 + a[3] - b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
out[4] = fourP2468 + a[4] - b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
out[5] = fourP13579 + a[5] - b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
out[6] = fourP2468 + a[6] - b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
out[7] = fourP13579 + a[7] - b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
out[8] = fourP2468 + a[8] - b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
out[9] = fourP13579 + a[9] - b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
out[0] += 19 * c;
}
DONNA_INLINE static void
curve25519_sub_reduce(bignum25519 out, const bignum25519 a, const bignum25519 b) {
uint32_t c;
out[0] = fourP0 + a[0] - b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
out[1] = fourP13579 + a[1] - b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
out[2] = fourP2468 + a[2] - b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
out[3] = fourP13579 + a[3] - b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
out[4] = fourP2468 + a[4] - b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
out[5] = fourP13579 + a[5] - b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
out[6] = fourP2468 + a[6] - b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
out[7] = fourP13579 + a[7] - b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
out[8] = fourP2468 + a[8] - b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
out[9] = fourP13579 + a[9] - b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
out[0] += 19 * c;
}
/* out = -a */
DONNA_INLINE static void
curve25519_neg(bignum25519 out, const bignum25519 a) {
uint32_t c;
out[0] = twoP0 - a[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
out[1] = twoP13579 - a[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
out[2] = twoP2468 - a[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
out[3] = twoP13579 - a[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
out[4] = twoP2468 - a[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
out[5] = twoP13579 - a[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
out[6] = twoP2468 - a[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
out[7] = twoP13579 - a[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
out[8] = twoP2468 - a[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
out[9] = twoP13579 - a[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
out[0] += 19 * c;
}
/* out = a * b */
#define curve25519_mul_noinline curve25519_mul
static void
curve25519_mul(bignum25519 out, const bignum25519 a, const bignum25519 b) {
uint32_t r0,r1,r2,r3,r4,r5,r6,r7,r8,r9;
uint32_t s0,s1,s2,s3,s4,s5,s6,s7,s8,s9;
uint64_t m0,m1,m2,m3,m4,m5,m6,m7,m8,m9,c;
uint32_t p;
r0 = b[0];
r1 = b[1];
r2 = b[2];
r3 = b[3];
r4 = b[4];
r5 = b[5];
r6 = b[6];
r7 = b[7];
r8 = b[8];
r9 = b[9];
s0 = a[0];
s1 = a[1];
s2 = a[2];
s3 = a[3];
s4 = a[4];
s5 = a[5];
s6 = a[6];
s7 = a[7];
s8 = a[8];
s9 = a[9];
m1 = mul32x32_64(r0, s1) + mul32x32_64(r1, s0);
m3 = mul32x32_64(r0, s3) + mul32x32_64(r1, s2) + mul32x32_64(r2, s1) + mul32x32_64(r3, s0);
m5 = mul32x32_64(r0, s5) + mul32x32_64(r1, s4) + mul32x32_64(r2, s3) + mul32x32_64(r3, s2) + mul32x32_64(r4, s1) + mul32x32_64(r5, s0);
m7 = mul32x32_64(r0, s7) + mul32x32_64(r1, s6) + mul32x32_64(r2, s5) + mul32x32_64(r3, s4) + mul32x32_64(r4, s3) + mul32x32_64(r5, s2) + mul32x32_64(r6, s1) + mul32x32_64(r7, s0);
m9 = mul32x32_64(r0, s9) + mul32x32_64(r1, s8) + mul32x32_64(r2, s7) + mul32x32_64(r3, s6) + mul32x32_64(r4, s5) + mul32x32_64(r5, s4) + mul32x32_64(r6, s3) + mul32x32_64(r7, s2) + mul32x32_64(r8, s1) + mul32x32_64(r9, s0);
r1 *= 2;
r3 *= 2;
r5 *= 2;
r7 *= 2;
m0 = mul32x32_64(r0, s0);
m2 = mul32x32_64(r0, s2) + mul32x32_64(r1, s1) + mul32x32_64(r2, s0);
m4 = mul32x32_64(r0, s4) + mul32x32_64(r1, s3) + mul32x32_64(r2, s2) + mul32x32_64(r3, s1) + mul32x32_64(r4, s0);
m6 = mul32x32_64(r0, s6) + mul32x32_64(r1, s5) + mul32x32_64(r2, s4) + mul32x32_64(r3, s3) + mul32x32_64(r4, s2) + mul32x32_64(r5, s1) + mul32x32_64(r6, s0);
m8 = mul32x32_64(r0, s8) + mul32x32_64(r1, s7) + mul32x32_64(r2, s6) + mul32x32_64(r3, s5) + mul32x32_64(r4, s4) + mul32x32_64(r5, s3) + mul32x32_64(r6, s2) + mul32x32_64(r7, s1) + mul32x32_64(r8, s0);
r1 *= 19;
r2 *= 19;
r3 = (r3 / 2) * 19;
r4 *= 19;
r5 = (r5 / 2) * 19;
r6 *= 19;
r7 = (r7 / 2) * 19;
r8 *= 19;
r9 *= 19;
m1 += (mul32x32_64(r9, s2) + mul32x32_64(r8, s3) + mul32x32_64(r7, s4) + mul32x32_64(r6, s5) + mul32x32_64(r5, s6) + mul32x32_64(r4, s7) + mul32x32_64(r3, s8) + mul32x32_64(r2, s9));
m3 += (mul32x32_64(r9, s4) + mul32x32_64(r8, s5) + mul32x32_64(r7, s6) + mul32x32_64(r6, s7) + mul32x32_64(r5, s8) + mul32x32_64(r4, s9));
m5 += (mul32x32_64(r9, s6) + mul32x32_64(r8, s7) + mul32x32_64(r7, s8) + mul32x32_64(r6, s9));
m7 += (mul32x32_64(r9, s8) + mul32x32_64(r8, s9));
r3 *= 2;
r5 *= 2;
r7 *= 2;
r9 *= 2;
m0 += (mul32x32_64(r9, s1) + mul32x32_64(r8, s2) + mul32x32_64(r7, s3) + mul32x32_64(r6, s4) + mul32x32_64(r5, s5) + mul32x32_64(r4, s6) + mul32x32_64(r3, s7) + mul32x32_64(r2, s8) + mul32x32_64(r1, s9));
m2 += (mul32x32_64(r9, s3) + mul32x32_64(r8, s4) + mul32x32_64(r7, s5) + mul32x32_64(r6, s6) + mul32x32_64(r5, s7) + mul32x32_64(r4, s8) + mul32x32_64(r3, s9));
m4 += (mul32x32_64(r9, s5) + mul32x32_64(r8, s6) + mul32x32_64(r7, s7) + mul32x32_64(r6, s8) + mul32x32_64(r5, s9));
m6 += (mul32x32_64(r9, s7) + mul32x32_64(r8, s8) + mul32x32_64(r7, s9));
m8 += (mul32x32_64(r9, s9));
r0 = (uint32_t)m0 & reduce_mask_26; c = (m0 >> 26);
m1 += c; r1 = (uint32_t)m1 & reduce_mask_25; c = (m1 >> 25);
m2 += c; r2 = (uint32_t)m2 & reduce_mask_26; c = (m2 >> 26);
m3 += c; r3 = (uint32_t)m3 & reduce_mask_25; c = (m3 >> 25);
m4 += c; r4 = (uint32_t)m4 & reduce_mask_26; c = (m4 >> 26);
m5 += c; r5 = (uint32_t)m5 & reduce_mask_25; c = (m5 >> 25);
m6 += c; r6 = (uint32_t)m6 & reduce_mask_26; c = (m6 >> 26);
m7 += c; r7 = (uint32_t)m7 & reduce_mask_25; c = (m7 >> 25);
m8 += c; r8 = (uint32_t)m8 & reduce_mask_26; c = (m8 >> 26);
m9 += c; r9 = (uint32_t)m9 & reduce_mask_25; p = (uint32_t)(m9 >> 25);
m0 = r0 + mul32x32_64(p,19); r0 = (uint32_t)m0 & reduce_mask_26; p = (uint32_t)(m0 >> 26);
r1 += p;
out[0] = r0;
out[1] = r1;
out[2] = r2;
out[3] = r3;
out[4] = r4;
out[5] = r5;
out[6] = r6;
out[7] = r7;
out[8] = r8;
out[9] = r9;
}
/* out = in*in */
static void
curve25519_square(bignum25519 out, const bignum25519 in) {
uint32_t r0,r1,r2,r3,r4,r5,r6,r7,r8,r9;
uint32_t d6,d7,d8,d9;
uint64_t m0,m1,m2,m3,m4,m5,m6,m7,m8,m9,c;
uint32_t p;
r0 = in[0];
r1 = in[1];
r2 = in[2];
r3 = in[3];
r4 = in[4];
r5 = in[5];
r6 = in[6];
r7 = in[7];
r8 = in[8];
r9 = in[9];
m0 = mul32x32_64(r0, r0);
r0 *= 2;
m1 = mul32x32_64(r0, r1);
m2 = mul32x32_64(r0, r2) + mul32x32_64(r1, r1 * 2);
r1 *= 2;
m3 = mul32x32_64(r0, r3) + mul32x32_64(r1, r2 );
m4 = mul32x32_64(r0, r4) + mul32x32_64(r1, r3 * 2) + mul32x32_64(r2, r2);
r2 *= 2;
m5 = mul32x32_64(r0, r5) + mul32x32_64(r1, r4 ) + mul32x32_64(r2, r3);
m6 = mul32x32_64(r0, r6) + mul32x32_64(r1, r5 * 2) + mul32x32_64(r2, r4) + mul32x32_64(r3, r3 * 2);
r3 *= 2;
m7 = mul32x32_64(r0, r7) + mul32x32_64(r1, r6 ) + mul32x32_64(r2, r5) + mul32x32_64(r3, r4 );
m8 = mul32x32_64(r0, r8) + mul32x32_64(r1, r7 * 2) + mul32x32_64(r2, r6) + mul32x32_64(r3, r5 * 2) + mul32x32_64(r4, r4 );
m9 = mul32x32_64(r0, r9) + mul32x32_64(r1, r8 ) + mul32x32_64(r2, r7) + mul32x32_64(r3, r6 ) + mul32x32_64(r4, r5 * 2);
d6 = r6 * 19;
d7 = r7 * 2 * 19;
d8 = r8 * 19;
d9 = r9 * 2 * 19;
m0 += (mul32x32_64(d9, r1 ) + mul32x32_64(d8, r2 ) + mul32x32_64(d7, r3 ) + mul32x32_64(d6, r4 * 2) + mul32x32_64(r5, r5 * 2 * 19));
m1 += (mul32x32_64(d9, r2 / 2) + mul32x32_64(d8, r3 ) + mul32x32_64(d7, r4 ) + mul32x32_64(d6, r5 * 2));
m2 += (mul32x32_64(d9, r3 ) + mul32x32_64(d8, r4 * 2) + mul32x32_64(d7, r5 * 2) + mul32x32_64(d6, r6 ));
m3 += (mul32x32_64(d9, r4 ) + mul32x32_64(d8, r5 * 2) + mul32x32_64(d7, r6 ));
m4 += (mul32x32_64(d9, r5 * 2) + mul32x32_64(d8, r6 * 2) + mul32x32_64(d7, r7 ));
m5 += (mul32x32_64(d9, r6 ) + mul32x32_64(d8, r7 * 2));
m6 += (mul32x32_64(d9, r7 * 2) + mul32x32_64(d8, r8 ));
m7 += (mul32x32_64(d9, r8 ));
m8 += (mul32x32_64(d9, r9 ));
r0 = (uint32_t)m0 & reduce_mask_26; c = (m0 >> 26);
m1 += c; r1 = (uint32_t)m1 & reduce_mask_25; c = (m1 >> 25);
m2 += c; r2 = (uint32_t)m2 & reduce_mask_26; c = (m2 >> 26);
m3 += c; r3 = (uint32_t)m3 & reduce_mask_25; c = (m3 >> 25);
m4 += c; r4 = (uint32_t)m4 & reduce_mask_26; c = (m4 >> 26);
m5 += c; r5 = (uint32_t)m5 & reduce_mask_25; c = (m5 >> 25);
m6 += c; r6 = (uint32_t)m6 & reduce_mask_26; c = (m6 >> 26);
m7 += c; r7 = (uint32_t)m7 & reduce_mask_25; c = (m7 >> 25);
m8 += c; r8 = (uint32_t)m8 & reduce_mask_26; c = (m8 >> 26);
m9 += c; r9 = (uint32_t)m9 & reduce_mask_25; p = (uint32_t)(m9 >> 25);
m0 = r0 + mul32x32_64(p,19); r0 = (uint32_t)m0 & reduce_mask_26; p = (uint32_t)(m0 >> 26);
r1 += p;
out[0] = r0;
out[1] = r1;
out[2] = r2;
out[3] = r3;
out[4] = r4;
out[5] = r5;
out[6] = r6;
out[7] = r7;
out[8] = r8;
out[9] = r9;
}
/* out = in ^ (2 * count) */
static void
curve25519_square_times(bignum25519 out, const bignum25519 in, int count) {
uint32_t r0,r1,r2,r3,r4,r5,r6,r7,r8,r9;
uint32_t d6,d7,d8,d9;
uint64_t m0,m1,m2,m3,m4,m5,m6,m7,m8,m9,c;
uint32_t p;
r0 = in[0];
r1 = in[1];
r2 = in[2];
r3 = in[3];
r4 = in[4];
r5 = in[5];
r6 = in[6];
r7 = in[7];
r8 = in[8];
r9 = in[9];
do {
m0 = mul32x32_64(r0, r0);
r0 *= 2;
m1 = mul32x32_64(r0, r1);
m2 = mul32x32_64(r0, r2) + mul32x32_64(r1, r1 * 2);
r1 *= 2;
m3 = mul32x32_64(r0, r3) + mul32x32_64(r1, r2 );
m4 = mul32x32_64(r0, r4) + mul32x32_64(r1, r3 * 2) + mul32x32_64(r2, r2);
r2 *= 2;
m5 = mul32x32_64(r0, r5) + mul32x32_64(r1, r4 ) + mul32x32_64(r2, r3);
m6 = mul32x32_64(r0, r6) + mul32x32_64(r1, r5 * 2) + mul32x32_64(r2, r4) + mul32x32_64(r3, r3 * 2);
r3 *= 2;
m7 = mul32x32_64(r0, r7) + mul32x32_64(r1, r6 ) + mul32x32_64(r2, r5) + mul32x32_64(r3, r4 );
m8 = mul32x32_64(r0, r8) + mul32x32_64(r1, r7 * 2) + mul32x32_64(r2, r6) + mul32x32_64(r3, r5 * 2) + mul32x32_64(r4, r4 );
m9 = mul32x32_64(r0, r9) + mul32x32_64(r1, r8 ) + mul32x32_64(r2, r7) + mul32x32_64(r3, r6 ) + mul32x32_64(r4, r5 * 2);
d6 = r6 * 19;
d7 = r7 * 2 * 19;
d8 = r8 * 19;
d9 = r9 * 2 * 19;
m0 += (mul32x32_64(d9, r1 ) + mul32x32_64(d8, r2 ) + mul32x32_64(d7, r3 ) + mul32x32_64(d6, r4 * 2) + mul32x32_64(r5, r5 * 2 * 19));
m1 += (mul32x32_64(d9, r2 / 2) + mul32x32_64(d8, r3 ) + mul32x32_64(d7, r4 ) + mul32x32_64(d6, r5 * 2));
m2 += (mul32x32_64(d9, r3 ) + mul32x32_64(d8, r4 * 2) + mul32x32_64(d7, r5 * 2) + mul32x32_64(d6, r6 ));
m3 += (mul32x32_64(d9, r4 ) + mul32x32_64(d8, r5 * 2) + mul32x32_64(d7, r6 ));
m4 += (mul32x32_64(d9, r5 * 2) + mul32x32_64(d8, r6 * 2) + mul32x32_64(d7, r7 ));
m5 += (mul32x32_64(d9, r6 ) + mul32x32_64(d8, r7 * 2));
m6 += (mul32x32_64(d9, r7 * 2) + mul32x32_64(d8, r8 ));
m7 += (mul32x32_64(d9, r8 ));
m8 += (mul32x32_64(d9, r9 ));
r0 = (uint32_t)m0 & reduce_mask_26; c = (m0 >> 26);
m1 += c; r1 = (uint32_t)m1 & reduce_mask_25; c = (m1 >> 25);
m2 += c; r2 = (uint32_t)m2 & reduce_mask_26; c = (m2 >> 26);
m3 += c; r3 = (uint32_t)m3 & reduce_mask_25; c = (m3 >> 25);
m4 += c; r4 = (uint32_t)m4 & reduce_mask_26; c = (m4 >> 26);
m5 += c; r5 = (uint32_t)m5 & reduce_mask_25; c = (m5 >> 25);
m6 += c; r6 = (uint32_t)m6 & reduce_mask_26; c = (m6 >> 26);
m7 += c; r7 = (uint32_t)m7 & reduce_mask_25; c = (m7 >> 25);
m8 += c; r8 = (uint32_t)m8 & reduce_mask_26; c = (m8 >> 26);
m9 += c; r9 = (uint32_t)m9 & reduce_mask_25; p = (uint32_t)(m9 >> 25);
m0 = r0 + mul32x32_64(p,19); r0 = (uint32_t)m0 & reduce_mask_26; p = (uint32_t)(m0 >> 26);
r1 += p;
} while (--count);
out[0] = r0;
out[1] = r1;
out[2] = r2;
out[3] = r3;
out[4] = r4;
out[5] = r5;
out[6] = r6;
out[7] = r7;
out[8] = r8;
out[9] = r9;
}
/* Take a little-endian, 32-byte number and expand it into polynomial form */
static void
curve25519_expand(bignum25519 out, const unsigned char in[32]) {
static const union { uint8_t b[2]; uint16_t s; } endian_check = {{1,0}};
uint32_t x0,x1,x2,x3,x4,x5,x6,x7;
if (endian_check.s == 1) {
x0 = *(uint32_t *)(in + 0);
x1 = *(uint32_t *)(in + 4);
x2 = *(uint32_t *)(in + 8);
x3 = *(uint32_t *)(in + 12);
x4 = *(uint32_t *)(in + 16);
x5 = *(uint32_t *)(in + 20);
x6 = *(uint32_t *)(in + 24);
x7 = *(uint32_t *)(in + 28);
} else {
#define F(s) \
((((uint32_t)in[s + 0]) ) | \
(((uint32_t)in[s + 1]) << 8) | \
(((uint32_t)in[s + 2]) << 16) | \
(((uint32_t)in[s + 3]) << 24))
x0 = F(0);
x1 = F(4);
x2 = F(8);
x3 = F(12);
x4 = F(16);
x5 = F(20);
x6 = F(24);
x7 = F(28);
#undef F
}
out[0] = ( x0 ) & 0x3ffffff;
out[1] = ((((uint64_t)x1 << 32) | x0) >> 26) & 0x1ffffff;
out[2] = ((((uint64_t)x2 << 32) | x1) >> 19) & 0x3ffffff;
out[3] = ((((uint64_t)x3 << 32) | x2) >> 13) & 0x1ffffff;
out[4] = (( x3) >> 6) & 0x3ffffff;
out[5] = ( x4 ) & 0x1ffffff;
out[6] = ((((uint64_t)x5 << 32) | x4) >> 25) & 0x3ffffff;
out[7] = ((((uint64_t)x6 << 32) | x5) >> 19) & 0x1ffffff;
out[8] = ((((uint64_t)x7 << 32) | x6) >> 12) & 0x3ffffff;
out[9] = (( x7) >> 6) & 0x1ffffff;
}
/* Take a fully reduced polynomial form number and contract it into a
* little-endian, 32-byte array
*/
static void
curve25519_contract(unsigned char out[32], const bignum25519 in) {
bignum25519 f;
curve25519_copy(f, in);
#define carry_pass() \
f[1] += f[0] >> 26; f[0] &= reduce_mask_26; \
f[2] += f[1] >> 25; f[1] &= reduce_mask_25; \
f[3] += f[2] >> 26; f[2] &= reduce_mask_26; \
f[4] += f[3] >> 25; f[3] &= reduce_mask_25; \
f[5] += f[4] >> 26; f[4] &= reduce_mask_26; \
f[6] += f[5] >> 25; f[5] &= reduce_mask_25; \
f[7] += f[6] >> 26; f[6] &= reduce_mask_26; \
f[8] += f[7] >> 25; f[7] &= reduce_mask_25; \
f[9] += f[8] >> 26; f[8] &= reduce_mask_26;
#define carry_pass_full() \
carry_pass() \
f[0] += 19 * (f[9] >> 25); f[9] &= reduce_mask_25;
#define carry_pass_final() \
carry_pass() \
f[9] &= reduce_mask_25;
carry_pass_full()
carry_pass_full()
/* now t is between 0 and 2^255-1, properly carried. */
/* case 1: between 0 and 2^255-20. case 2: between 2^255-19 and 2^255-1. */
f[0] += 19;
carry_pass_full()
/* now between 19 and 2^255-1 in both cases, and offset by 19. */
f[0] += (reduce_mask_26 + 1) - 19;
f[1] += (reduce_mask_25 + 1) - 1;
f[2] += (reduce_mask_26 + 1) - 1;
f[3] += (reduce_mask_25 + 1) - 1;
f[4] += (reduce_mask_26 + 1) - 1;
f[5] += (reduce_mask_25 + 1) - 1;
f[6] += (reduce_mask_26 + 1) - 1;
f[7] += (reduce_mask_25 + 1) - 1;
f[8] += (reduce_mask_26 + 1) - 1;
f[9] += (reduce_mask_25 + 1) - 1;
/* now between 2^255 and 2^256-20, and offset by 2^255. */
carry_pass_final()
#undef carry_pass
#undef carry_full
#undef carry_final
f[1] <<= 2;
f[2] <<= 3;
f[3] <<= 5;
f[4] <<= 6;
f[6] <<= 1;
f[7] <<= 3;
f[8] <<= 4;
f[9] <<= 6;
#define F(i, s) \
out[s+0] |= (unsigned char )(f[i] & 0xff); \
out[s+1] = (unsigned char )((f[i] >> 8) & 0xff); \
out[s+2] = (unsigned char )((f[i] >> 16) & 0xff); \
out[s+3] = (unsigned char )((f[i] >> 24) & 0xff);
out[0] = 0;
out[16] = 0;
F(0,0);
F(1,3);
F(2,6);
F(3,9);
F(4,12);
F(5,16);
F(6,19);
F(7,22);
F(8,25);
F(9,28);
#undef F
}
/* out = (flag) ? in : out */
DONNA_INLINE static void
curve25519_move_conditional_bytes(uint8_t out[96], const uint8_t in[96], uint32_t flag) {
const uint32_t nb = flag - 1, b = ~nb;
const uint32_t *inl = (const uint32_t *)in;
uint32_t *outl = (uint32_t *)out;
outl[0] = (outl[0] & nb) | (inl[0] & b);
outl[1] = (outl[1] & nb) | (inl[1] & b);
outl[2] = (outl[2] & nb) | (inl[2] & b);
outl[3] = (outl[3] & nb) | (inl[3] & b);
outl[4] = (outl[4] & nb) | (inl[4] & b);
outl[5] = (outl[5] & nb) | (inl[5] & b);
outl[6] = (outl[6] & nb) | (inl[6] & b);
outl[7] = (outl[7] & nb) | (inl[7] & b);
outl[8] = (outl[8] & nb) | (inl[8] & b);
outl[9] = (outl[9] & nb) | (inl[9] & b);
outl[10] = (outl[10] & nb) | (inl[10] & b);
outl[11] = (outl[11] & nb) | (inl[11] & b);
outl[12] = (outl[12] & nb) | (inl[12] & b);
outl[13] = (outl[13] & nb) | (inl[13] & b);
outl[14] = (outl[14] & nb) | (inl[14] & b);
outl[15] = (outl[15] & nb) | (inl[15] & b);
outl[16] = (outl[16] & nb) | (inl[16] & b);
outl[17] = (outl[17] & nb) | (inl[17] & b);
outl[18] = (outl[18] & nb) | (inl[18] & b);
outl[19] = (outl[19] & nb) | (inl[19] & b);
outl[20] = (outl[20] & nb) | (inl[20] & b);
outl[21] = (outl[21] & nb) | (inl[21] & b);
outl[22] = (outl[22] & nb) | (inl[22] & b);
outl[23] = (outl[23] & nb) | (inl[23] & b);
}
/* if (iswap) swap(a, b) */
DONNA_INLINE static void
curve25519_swap_conditional(bignum25519 a, bignum25519 b, uint32_t iswap) {
const uint32_t swap = (uint32_t)(-(int32_t)iswap);
uint32_t x0,x1,x2,x3,x4,x5,x6,x7,x8,x9;
x0 = swap & (a[0] ^ b[0]); a[0] ^= x0; b[0] ^= x0;
x1 = swap & (a[1] ^ b[1]); a[1] ^= x1; b[1] ^= x1;
x2 = swap & (a[2] ^ b[2]); a[2] ^= x2; b[2] ^= x2;
x3 = swap & (a[3] ^ b[3]); a[3] ^= x3; b[3] ^= x3;
x4 = swap & (a[4] ^ b[4]); a[4] ^= x4; b[4] ^= x4;
x5 = swap & (a[5] ^ b[5]); a[5] ^= x5; b[5] ^= x5;
x6 = swap & (a[6] ^ b[6]); a[6] ^= x6; b[6] ^= x6;
x7 = swap & (a[7] ^ b[7]); a[7] ^= x7; b[7] ^= x7;
x8 = swap & (a[8] ^ b[8]); a[8] ^= x8; b[8] ^= x8;
x9 = swap & (a[9] ^ b[9]); a[9] ^= x9; b[9] ^= x9;
}

Some files were not shown because too many files have changed in this diff Show More