Compare commits

...

73 Commits

Author SHA1 Message Date
JCW
b5ee925be3 Rebuild all dependencies 2025-07-28 17:25:53 +01:00
JCW
7a8836c202 Rebuild all dependencies 2025-07-28 17:25:44 +01:00
JCW
386c0befe1 Skip rhel as the docker image is not working 2025-07-28 14:23:04 +01:00
JCW
76b34f7d82 Fix windows ENV:Path not found error 2025-07-28 14:00:05 +01:00
JCW
46073d7f87 Fix pipeline 2025-07-28 13:53:33 +01:00
Bart Thomee
30b64cb162 Only test RHEL 2025-07-28 08:30:26 -04:00
Bart Thomee
237d67b724 Run on all Linux, MacOS, and Windows variants 2025-07-27 18:38:09 -04:00
Bart Thomee
9bc08b25ed Run on all Linux, MacOS, and Windows variants 2025-07-27 18:36:20 -04:00
Bart Thomee
626f846ed6 Disable linux jobs, run only macos and windows 2025-07-27 18:19:40 -04:00
Bart Thomee
73f086b548 Disable linux jobs, run only macos and windows 2025-07-27 18:05:18 -04:00
Bart Thomee
c7578b00f3 Use self-hosted runners 2025-07-27 18:01:10 -04:00
Bart Thomee
e0a0d7c49d Use self-hosted runners 2025-07-27 17:59:59 -04:00
Bart Thomee
344df580e2 Add Ubuntu and MacOS 2025-07-27 17:05:47 -04:00
Bart Thomee
ed79654388 Add Ubuntu and MacOS 2025-07-27 17:04:12 -04:00
Bart Thomee
7125a6bd24 Add Ubuntu and MacOS 2025-07-27 17:00:10 -04:00
Bart Thomee
087a6044fd Add Ubuntu and MacOS 2025-07-27 16:59:13 -04:00
Bart Thomee
e02c49e7df Combine installing dependencies and building to optimize runtime 2025-07-27 15:15:39 -04:00
Bart Thomee
e1914cd8ef Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 15:01:46 -04:00
Bart Thomee
88ef67e73a Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:57:08 -04:00
Bart Thomee
06163a6d75 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:53:37 -04:00
Bart Thomee
d46c05e1b7 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:44:47 -04:00
Bart Thomee
2b3e131ec8 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:43:36 -04:00
Bart Thomee
9924bcc0d2 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:42:07 -04:00
Bart Thomee
195ac8ac46 Convert reusable workflows back to actions to avoid shortcomings with concurrency 2025-07-27 14:38:52 -04:00
Bart Thomee
2c862a4060 Repo/org vars, unlike secrets, are not accessible in composite actions or reusable workflows 2025-07-27 14:30:27 -04:00
Bart Thomee
c6c836b82b Repo/org vars, unlike secrets, are not accessible in composite actions or reusable workflows 2025-07-27 14:29:22 -04:00
Bart Thomee
46d18c50f8 Re-enable installing dependencies and building+testing 2025-07-27 14:14:05 -04:00
Bart Thomee
ca1c2134ab Debug outputs 2025-07-27 14:09:50 -04:00
Bart Thomee
ea4f39baae Debug outputs 2025-07-27 14:08:27 -04:00
Bart Thomee
1291b574b6 Debug outputs 2025-07-27 14:06:44 -04:00
Bart Thomee
d05f6c1f90 Debug outputs 2025-07-27 14:05:23 -04:00
Bart Thomee
bd70aad699 Debug outputs 2025-07-27 14:04:49 -04:00
Bart Thomee
a5217e433d Bypass generate-matrix job 2025-07-27 12:53:52 -04:00
Bart Thomee
b869902453 Bypass generate-matrix job 2025-07-27 12:49:22 -04:00
Bart Thomee
b6ff27f51f Debug outputs 2025-07-27 12:46:31 -04:00
Bart Thomee
17b6235cc0 Debug outputs 2025-07-27 12:43:53 -04:00
Bart Thomee
355438b29c Debug outputs 2025-07-27 12:43:22 -04:00
Bart Thomee
0b44deb4e4 Debug outputs 2025-07-27 12:38:21 -04:00
Bart Thomee
b3637549f3 Fix strategy matrix outputs 2025-07-27 12:27:59 -04:00
Bart Thomee
a6c63dbc0c Fix workflow vs. action 2025-07-27 12:18:35 -04:00
Bart Thomee
7c9aad6e9f Fix workflow vs. action 2025-07-27 12:12:52 -04:00
Bart Thomee
5c4c658c38 Fix workflow vs. action 2025-07-27 12:08:13 -04:00
Bart Thomee
51a04d8c11 Use different concurrency groups for reusable workflows 2025-07-27 12:06:16 -04:00
Bart Thomee
74930b1bfe Use different concurrency groups for reusable workflows 2025-07-27 12:05:20 -04:00
Bart Thomee
a9d7393c42 Fix workflow vs. action 2025-07-27 12:01:12 -04:00
Bart Thomee
346666695b Fix workflow vs. action 2025-07-27 12:00:02 -04:00
Bart Thomee
a3a5e57e18 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:53:43 -04:00
Bart Thomee
7acf9f517d Secrets cannot have types 2025-07-27 11:15:38 -04:00
Bart Thomee
b260565373 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:13:17 -04:00
Bart Thomee
4fc0c9c1f2 env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:11:49 -04:00
Bart Thomee
b698085d8d env is unavailable to pass to reusable workflows in 'with' 2025-07-27 11:03:58 -04:00
Bart Thomee
6b34810222 Temporarily remove comments to check if they confuse GitHub 2025-07-27 10:57:49 -04:00
Bart Thomee
323a3ed4b6 Use workflow_call 2025-07-27 10:51:24 -04:00
Bart Thomee
915b4568f3 Use workflow_call 2025-07-27 10:50:31 -04:00
Bart Thomee
4e7480125f Use workflow_call 2025-07-27 10:49:00 -04:00
Bart Thomee
d250365900 Use workflow_call 2025-07-27 10:47:30 -04:00
Bart Thomee
8030670edf Use workflow_call 2025-07-27 10:42:53 -04:00
Bart Thomee
521cf7f157 Temporarily comment out ARM as Clang builds fail 2025-07-26 20:55:37 -04:00
Bart Thomee
e5d99954ca Temporarily comment out ARM as Clang builds fail 2025-07-26 20:54:45 -04:00
Bart Thomee
71336d8f19 Composite actions must have a shell defined in each step 2025-07-26 19:13:06 -04:00
Bart Thomee
d283eb1287 Remove lsb_release check 2025-07-26 19:10:03 -04:00
Bart Thomee
8d9c1719c1 Use jq to generate the strategy matrix 2025-07-26 19:08:37 -04:00
Bart Thomee
e6535d64cb Use jq to generate the strategy matrix 2025-07-26 19:06:21 -04:00
Bart Thomee
15ffa2c71d Chomp newlines and use different random generator 2025-07-26 15:46:02 -04:00
Bart Thomee
a8753ebed1 Use different delimiter per output variable 2025-07-26 15:42:36 -04:00
Bart Thomee
9659fc8e05 Work around tr: write error 2025-07-26 15:39:57 -04:00
Bart Thomee
86c0c4cf48 Rename image to container 2025-07-26 15:29:26 -04:00
Bart Thomee
1e29ebde08 Rename image to container 2025-07-26 15:28:05 -04:00
Bart Thomee
4779fb28fb Revamp CI workflows 2025-07-26 15:17:11 -04:00
Bronek Kozicki
7179ce9c58 Build options cleanup (#5581)
As we no longer support old compiler versions, we are bringing back some warnings by removing no longer relevant `-Wno-...` options.
2025-07-25 15:48:22 -04:00
Bart
921aef9934 Updates Conan dependencies: Boost 1.86 (#5264) 2025-07-25 11:54:02 -04:00
Bronek Kozicki
e7a7bb83c1 VaultWithdraw destination account bugfix (#5572)
#5224 added (among other things) a `VaultWithdraw` transaction that allows setting the recipient of the withdrawn funds in the `Destination` transaction field. This technically turns this transaction into a payment, and in some respect the implementation does follow payment rules, e.g. enforcement of `lsfRequireDestTag` or `lsfDepositAuth`, or that MPT transfer has destination `MPToken`. However for IOUs, it missed verification that the destination account has a trust line to the asset issuer. Since the default behavior of `accountSendIOU` is to create this trust line (if missing), this is what `VaultWithdraw` currently does. This is incorrect, since the `Destination` might not be interested in holding the asset in question; this basically enables spammy transfers. This change, therefore, removes automatic creation of a trust line to the `Destination` account in `VaultWithdraw`.
2025-07-25 13:53:25 +00:00
Bart
5c2a3a2779 refactor: Update rocksdb (#5568)
This change updates RocksDB to its latest version. RocksDB is backward-compatible, so even though this is a major version bump, databases created with previous versions will continue to function.

The external RocksDB folder is removed, as the latest version available via Conan Center no longer needs custom patches.
2025-07-24 14:53:14 -04:00
66 changed files with 1774 additions and 2254 deletions

76
.github/actions/build-test/action.yml vendored Normal file
View File

@@ -0,0 +1,76 @@
name: Build and Test (Linux and MacOS)
inputs:
build_dir:
description: 'The directory where to build.'
required: true
type: string
build_type:
description: 'The build type to use.'
required: true
type: string
cmake_args:
description: 'Additional arguments to pass to CMake.'
required: false
type: string
cmake_generator:
description: 'The CMake generator to use for the build.'
required: true
type: string
cmake_target:
description: 'The CMake target to build.'
required: true
type: string
os:
description: 'A string representing which operating system is used.'
required: true
type: choice
options:
- Linux
- MacOS
- Windows
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Configure CMake
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
cmake \
${{ inputs.cmake_generator && format('-G "{0}"', inputs.cmake_generator) || '' }} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
-Dtests=TRUE \
-Dxrpld=TRUE \
${{ inputs.cmake_args }} \
..
- name: Build the binary
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
cmake --build . \
--config ${{ inputs.build_type }} \
--parallel $(nproc) \
--target ${{ inputs.cmake_target }}
- name: Check linking
if: inputs.os == 'Linux'
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Test the binary
shell: bash
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'Windows' && inputs.build_type || '' }}
run: |
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure

View File

@@ -1,34 +0,0 @@
name: build
inputs:
generator:
default: null
configuration:
required: true
cmake-args:
default: null
cmake-target:
default: all
# An implicit input is the environment variable `build_dir`.
runs:
using: composite
steps:
- name: configure
shell: bash
run: |
cd ${build_dir}
cmake \
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
-Dtests=TRUE \
-Dxrpld=TRUE \
${{ inputs.cmake-args }} \
..
- name: build
shell: bash
run: |
cmake \
--build ${build_dir} \
--config ${{ inputs.configuration }} \
--parallel ${NUM_PROCESSORS:-$(nproc)} \
--target ${{ inputs.cmake-target }}

View File

@@ -0,0 +1,51 @@
name: Configure Conan
inputs:
conan_global_conf:
description: 'The contents of the global Conan configuration file.'
required: true
type: string
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
type: string
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
type: string
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Install Conan profile
shell: bash
run: |
echo "${{ inputs.conan_global_conf }}" >> $(conan config home)/global.conf
conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo "Installed Conan profile:"
conan profile show
- name: Add Conan remote
shell: bash
run: |
if conan remote list | grep -q '${{ inputs.conan_remote_name }}'; then
conan remote remove ${{ inputs.conan_remote_name }}
echo "Removed Conan remote '${{ inputs.conan_remote_name }}'."
fi
conan remote add --index 0 ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
echo "Added new conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
- name: Log into Conan remote
shell: bash
run: |
conan remote login ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_username }} --password "${{ inputs.conan_remote_password }}"
conan remote list-users

View File

@@ -1,53 +0,0 @@
name: dependencies
inputs:
configuration:
required: true
# An implicit input is the environment variable `build_dir`.
runs:
using: composite
steps:
- name: export custom recipes
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 9.7.3 external/rocksdb
conan export --version 4.0.3 external/soci
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash
run: |
if conan remote list | grep -q "ripple"; then
conan remote remove ripple
echo "Removed conan remote ripple"
fi
conan remote add --index 0 ripple "${CONAN_URL}"
echo "Added conan remote ripple at ${CONAN_URL}"
- name: try to authenticate to Ripple Conan remote
if: env.CONAN_LOGIN_USERNAME_RIPPLE != '' && env.CONAN_PASSWORD_RIPPLE != ''
id: remote
shell: bash
run: |
echo "Authenticating to ripple remote..."
conan remote auth ripple --force
conan remote list-users
- name: list missing binaries
id: binaries
shell: bash
# Print the list of dependencies that would need to be built locally.
# A non-empty list means we have "failed" to cache binaries remotely.
run: |
echo missing=$(conan info . --build missing --settings build_type=${{ inputs.configuration }} --json 2>/dev/null | grep '^\[') | tee ${GITHUB_OUTPUT}
- name: install dependencies
shell: bash
run: |
mkdir ${build_dir}
cd ${build_dir}
conan install \
--output-folder . \
--build missing \
--options:host "&:tests=True" \
--options:host "&:xrpld=True" \
--settings:all build_type=${{ inputs.configuration }} \
..

View File

@@ -0,0 +1,37 @@
name: Install-dependencies
inputs:
build_dir:
description: 'The directory where to build.'
required: true
type: string
build_type:
description: 'The build type to use.'
required: true
type: string
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
# Install the Conan profiles and log into the specified remote. We first remove
# the remote if it already exists, which can occur on self-hosted runners where
# the workspace is not cleaned up between runs.
runs:
using: composite
steps:
- name: Install Conan dependencies
shell: bash
run: |
mkdir -p ${{ inputs.build_dir }}
cd ${{ inputs.build_dir }}
conan install \
--output-folder . \
--build * \
--options:host "&:tests=True" \
--options:host "&:xrpld=True" \
--settings:all build_type=${{ inputs.build_type }} \
..
- name: Upload Conan dependencies
shell: bash
run: conan upload '*' --confirm --remote ${{ inputs.conan_remote_name }}

225
.github/workflows/build-debian.yml vendored Normal file
View File

@@ -0,0 +1,225 @@
# This workflow builds and tests the binary on various Debian configurations.
name: Debian
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-debian
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/debian.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": ["self-hosted", "Linux", "X64", "devbox"]
},
{
"platform": "linux/arm64",
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "12"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "16"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "17"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "18"
},
{
"distro": "debian",
"release": "bookworm",
"compiler_name": "clang",
"compiler_version": "19"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "debian",
# "release": "bookworm",
# "compiler_name": "gcc",
# "compiler_version": "12"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

160
.github/workflows/build-macos.yml vendored Normal file
View File

@@ -0,0 +1,160 @@
# This workflow builds and tests the binary on various MacOS configurations.
name: MacOS
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-macos
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"runner": ["self-hosted", "macOS", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=OFF",
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
clang --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'MacOS'

202
.github/workflows/build-rhel.yml vendored Normal file
View File

@@ -0,0 +1,202 @@
# This workflow builds and tests the binary on various Red Hat Enterprise Linux
# configurations.
name: RHEL
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-rhel
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/rhel.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": ["self-hosted", "Linux", "X64", "devbox"]
},
{
"platform": "linux/arm64",
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "rhel",
"release": "9.6",
"compiler_name": "clang",
"compiler_version": "any"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "rhel",
# "release": "9.6",
# "compiler_name": "gcc",
# "compiler_version": "13"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

225
.github/workflows/build-ubuntu.yml vendored Normal file
View File

@@ -0,0 +1,225 @@
# This workflow builds and tests the binary on various Ubuntu configurations.
name: Ubuntu
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-ubuntu
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job. The matrix defined below should be kept in sync
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/ubuntu.yml.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"platform": "linux/amd64",
"runner": [self-hosted, Linux, X64, devbox]
},
{
"platform": "linux/arm64",
"runner": [self-hosted, Linux, ARM64, devbox]
}
]
STRATEGY_MATRIX_OS: >-
[
{
"distro": "ubuntu",
"release": "jammy",
"compiler_name": "gcc",
"compiler_version": "12"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "16"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "17"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "18"
},
{
"distro": "ubuntu",
"release": "noble",
"compiler_name": "clang",
"compiler_version": "19"
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "platform": "linux/amd64",
# "runner": ["self-hosted", "Linux", "X64"]
# }
# ]
# STRATEGY_MATRIX_OS: >-
# [
# {
# "distro": "ubuntu",
# "release": "jammy",
# "compiler_name": "gcc",
# "compiler_version": "12"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Release"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: bash
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking compiler version."
${CC} --version
echo "Checking Conan version."
conan --version
echo "Checking Ninja version."
ninja --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Ninja'
cmake_target: 'all'
os: 'Linux'

159
.github/workflows/build-windows.yml vendored Normal file
View File

@@ -0,0 +1,159 @@
# This workflow builds and tests the binary on various Windows configurations.
name: Windows
on:
workflow_call:
inputs:
build_dir:
description: 'The directory where to build.'
required: false
type: string
default: '.build'
conan_remote_name:
description: 'The name of the Conan remote to use.'
required: true
type: string
conan_remote_url:
description: 'The URL of the Conan remote to use.'
required: true
type: string
secrets:
conan_remote_username:
description: 'The username for logging into the Conan remote.'
required: true
conan_remote_password:
description: 'The password for logging into the Conan remote.'
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-windows
cancel-in-progress: true
defaults:
run:
shell: bash
env:
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
# duplication, we define it here using environment variables and create the
# matrix in the first job.
STRATEGY_MATRIX_ARCHITECTURE: >-
[
{
"runner": ["self-hosted", "Windows", "devbox"]
}
]
STRATEGY_MATRIX_BUILD_TYPE: >-
[
"Debug",
"Release"
]
STRATEGY_MATRIX_CMAKE_ARGS: >-
[
"-DUnity=OFF",
"-DUnity=ON"
]
# STRATEGY_MATRIX_ARCHITECTURE: >-
# [
# {
# "runner": "windows-latest"
# }
# ]
# STRATEGY_MATRIX_BUILD_TYPE: >-
# [
# "Debug"
# ]
# STRATEGY_MATRIX_CMAKE_ARGS: >-
# [
# "-DUnity=ON"
# ]
jobs:
# Generate the strategy matrix and expose environment variables to be used by
# following jobs. Exposing env vars this way is needed as they cannot be
# directly passed as inputs to reusable workflows (although they can be passed
# as inputs to actions).
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Generate outputs
id: generate
run: |
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
outputs:
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
# Install and cache the dependencies, and then build and test the binary using
# various configurations.
build-test:
needs:
- generate-outputs
runs-on: ${{ matrix.architecture.runner }}
strategy:
fail-fast: false
max-parallel: 4
matrix:
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
shell: pwsh
run: |
echo "Checking path"
$env:PATH -split ';' | Sort-Object
echo "Checking environment variables."
- name: choose Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: 3.13
- name: install Conan
run: pip install wheel conan
- name: Check versions
shell: bash
run: |
echo "Checking CMake version."
cmake --version
echo "Checking Conan version."
conan --version
- name: Configure Conan
uses: ./.github/actions/configure-conan
with:
conan_global_conf: ${{ inputs.conan_global_conf }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
- name: Install dependencies
uses: ./.github/actions/install-dependencies
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
- name: Build and test the binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_generator: 'Visual Studio 17 2022'
cmake_target: 'install'
os: 'Windows'

View File

@@ -0,0 +1,57 @@
# This workflow checks if the code is formatted according to the .clang-format
# rules.
name: Clang Format
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-clang-format
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
clang-format:
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
steps:
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
# same directory for jobs running in containers. The actions/checkout step
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
# safe.directory (see instructions at https://github.com/actions/checkout)
# but that is apparently not happening for some container images. We
# therefore preemptively add both directories to safe.directory. See also
# https://github.com/actions/runner/issues/2058 for more details.
- name: Configure git safe.directory
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory ${{ github.workspace }}
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check version
run: clang-format --version
- name: Format code
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
- name: Check for differences
env:
MESSAGE: |
One or more files did not conform to the formatting specified in
.clang-format. Maybe you did not run 'git-clang-format' or
'clang-format' before committing, or your version of clang-format
has an incompatibility with the one used here (see the "Check
version" step above).
Run 'git-clang-format --extensions cpp,h,hpp,ipp develop' in your
repo, and then commit and push the changes.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the files that changed to give the contributor a hint about
# what to expect when running git-clang-format on their own machine.
git status
echo "${MESSAGE}"
exit 1
fi

View File

@@ -0,0 +1,46 @@
# This workflow checks if the dependencies between the modules are correctly
# indexed.
name: Levelization
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-levelization
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
levelization:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
env:
MESSAGE: |
The dependency relationships between the modules in rippled have
changed, which may be an improvement or a regression.
A rule of thumb is that if your changes caused something to be
removed from loops.txt, it's probably an improvement, while if
something was added, it's probably a regression.
Run './Builds/levelization/levelization.sh' in your repo, and then
commit and push the changes. See Builds/levelization/README.md for
more info.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the differences to give the contributor a hint about what to
# expect when running levelization on their own machine.
git diff
echo "${MESSAGE}"
exit 1
fi

View File

@@ -1,13 +1,13 @@
name: Check libXRPL compatibility with Clio
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_URL: https://conan.ripplex.io
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
on:
pull_request:
paths:
- 'src/libxrpl/protocol/BuildInfo.cpp'
- '.github/workflows/libxrpl.yml'
- 'check-libxrpl.yml'
types: [opened, reopened, synchronize, ready_for_review]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -33,18 +33,15 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }}
wait-interval: 10
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Generate channel
id: channel
shell: bash
run: |
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
- name: Export new package
shell: bash
run: |
conan export . ${{ steps.channel.outputs.channel }}
- name: Add Ripple Conan remote
shell: bash
run: |
conan remote list
conan remote remove ripple || true
@@ -52,13 +49,11 @@ jobs:
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
- name: Parse new version
id: version
shell: bash
run: |
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
- name: Try to authenticate to Ripple Conan remote
id: remote
shell: bash
run: |
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
@@ -69,7 +64,6 @@ jobs:
- name: Upload new package
id: upload
if: (steps.remote.outputs.outcome == 'success')
shell: bash
run: |
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
@@ -83,7 +77,6 @@ jobs:
steps:
- name: Notify Clio about new version
if: (needs.publish.outputs.outcome == 'success')
shell: bash
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \

View File

@@ -1,64 +0,0 @@
name: clang-format
on:
push:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
jobs:
check:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
runs-on: ubuntu-24.04
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
steps:
# For jobs running in containers, $GITHUB_WORKSPACE and ${{ github.workspace }} might not be the
# same directory. The actions/checkout step is *supposed* to checkout into $GITHUB_WORKSPACE and
# then add it to safe.directory (see instructions at https://github.com/actions/checkout)
# but that's apparently not happening for some container images. We can't be sure what is actually
# happening, so let's pre-emptively add both directories to safe.directory. There's a
# Github issue opened in 2022 and not resolved in 2025 https://github.com/actions/runner/issues/2058 ¯\_(ツ)_/¯
- run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory ${{ github.workspace }}
- uses: actions/checkout@v4
- name: Format first-party sources
run: |
clang-format --version
find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
- name: Check for differences
id: assert
shell: bash
run: |
set -o pipefail
git diff --exit-code | tee "clang-format.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: clang-format.patch
if-no-files-found: ignore
path: clang-format.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
PREAMBLE: |
If you are reading this, you are looking at a failed Github Actions
job. That means you pushed one or more files that did not conform
to the formatting specified in .clang-format. That may be because
you neglected to run 'git clang-format' or 'clang-format' before
committing, or that your version of clang-format has an
incompatibility with the one on this
machine, which is:
SUGGESTION: |
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run 'git-clang-format --extensions cpp,h,hpp,ipp develop'
in your repo, commit, and push.
run: |
echo "${PREAMBLE}"
clang-format --version
echo "${SUGGESTION}"
exit 1

53
.github/workflows/documentation.yml vendored Normal file
View File

@@ -0,0 +1,53 @@
name: Documentation
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
# This can only be done if the `checks` workflow already exists on the default
# branch (i.e. `develop`), so we cannot do this yet.
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
on:
push:
branches:
- develop
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
doxygen:
runs-on: ubuntu-latest
permissions:
contents: write
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Check configuration
run: |
echo "Checking path"
echo ${PATH} | tr ':' '\n'
echo "Checking environment variables."
env | sort
- name: Check versions
run: |
echo "Checking CMake version."
cmake --version
echo "Checking Doxygen version."
doxygen --version
- name: Build documentation
run: |
mkdir build
cd build
cmake -Donly_docs=TRUE ..
cmake --build . --target docs --parallel $(nproc)
- name: Publish documentation
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

View File

@@ -1,37 +0,0 @@
name: Build and publish Doxygen documentation
# To test this workflow, push your changes to your fork's `develop` branch.
on:
push:
branches:
- develop
- doxygen
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
documentation:
runs-on: ubuntu-latest
permissions:
contents: write
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
steps:
- name: checkout
uses: actions/checkout@v4
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
cmake --version
doxygen --version
env | sort
- name: build
run: |
mkdir build
cd build
cmake -Donly_docs=TRUE ..
cmake --build . --target docs --parallel $(nproc)
- name: publish
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

View File

@@ -1,53 +0,0 @@
name: levelization
on:
push:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
jobs:
check:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
runs-on: ubuntu-latest
env:
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v4
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
id: assert
run: |
set -o pipefail
git diff --exit-code | tee "levelization.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: levelization.patch
if-no-files-found: ignore
path: levelization.patch
- name: What happened?
if: failure() && steps.assert.outcome == 'failure'
env:
MESSAGE: |
If you are reading this, you are looking at a failed Github
Actions job. That means you changed the dependency relationships
between the modules in rippled. That may be an improvement or a
regression. This check doesn't judge.
A rule of thumb, though, is that if your changes caused
something to be removed from loops.txt, that's probably an
improvement. If something was added, it's probably a regression.
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run './Builds/levelization/levelization.sh' in your repo,
commit, and push.
See Builds/levelization/README.md for more info.
run: |
echo "${MESSAGE}"
exit 1

View File

@@ -1,126 +0,0 @@
name: macos
on:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows, instrumentation)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- 'ci/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
core.download:parallel={{os.cpu_count()}}
core.upload:parallel={{os.cpu_count()}}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
jobs:
test:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
strategy:
matrix:
platform:
- macos
generator:
- Ninja
configuration:
- Release
runs-on: [self-hosted, macOS, mac-runner-m1]
env:
# The `build` action requires these variables.
build_dir: .build
NUM_PROCESSORS: 12
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: install Conan
run: |
brew install conan
- name: install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
- name: install python
run: |
if which python > /dev/null 2>&1; then
echo "Python executable exists"
else
brew install python@3.13
ln -s /opt/homebrew/bin/python3 /opt/homebrew/bin/python
fi
- name: install cmake
run: |
if which cmake > /dev/null 2>&1; then
echo "cmake executable exists"
else
brew install cmake
fi
- name: install nproc
run: |
brew install coreutils
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
python --version
conan --version
cmake --version
nproc --version
echo -n "nproc returns: "
nproc
system_profiler SPHardwareDataType
sysctl -n hw.logicalcpu
clang --version
- name: configure Conan
run : |
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: export custom recipes
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 9.7.3 external/rocksdb
conan export --version 4.0.3 external/soci
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash
run: |
if conan remote list | grep -q "ripple"; then
conan remote remove ripple
echo "Removed conan remote ripple"
fi
conan remote add --index 0 ripple "${CONAN_URL}"
echo "Added conan remote ripple at ${CONAN_URL}"
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: ${{ matrix.generator }}
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
n=$(nproc)
echo "Using $n test jobs"
cd ${build_dir}
./rippled --unittest --unittest-jobs $n
ctest -j $n --output-on-failure

90
.github/workflows/main.yml vendored Normal file
View File

@@ -0,0 +1,90 @@
# This workflow runs all workflows to check, build and test the project on
# various Linux flavors, as well as MacOS and Windows.
name: Main
# This workflow is triggered on every push to the repository, including pull
# requests. However, it will not run if the pull request is a draft unless it
# has the 'DraftRunCI' label. As GitHub Actions does not support such `if`
# conditions here, the individual jobs will check the pull request state and
# labels to determine whether to run or not. The workflows called by the jobs
# may also have their own conditions to partially or completely skip execution
# as needed.
on: push
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# check-clang-format:
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
# uses: ./.github/workflows/check-clang-format.yml
#
# check-levelization:
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
# uses: ./.github/workflows/check-levelization.yml
debian:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-debian.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
# rhel:
## needs:
## - check-clang-format
## - check-levelization
# uses: ./.github/workflows/build-rhel.yml
# with:
# conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
# conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
# secrets:
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
ubuntu:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-ubuntu.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
macos:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-macos.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
windows:
# needs:
# - check-clang-format
# - check-levelization
uses: ./.github/workflows/build-windows.yml
with:
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
secrets:
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}

View File

@@ -1,60 +1,66 @@
name: missing-commits
name: Check for missing commits
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
# This can only be done if the `checks` workflow already exists on the default
# branch (i.e. `develop`), so we cannot do this yet.
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
on:
push:
branches:
# Only check that the branches are up to date when updating the
# relevant branches.
- develop
- release
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
up_to_date:
runs-on: ubuntu-24.04
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- name: Check for missing commits
id: commits
env:
SUGGESTION: |
MESSAGE: |
If you are reading this, then the commits indicated above are
missing from "develop" and/or "release". Do a reverse-merge
as soon as possible. See CONTRIBUTING.md for instructions.
If you are reading this, then the commits indicated above are missing
from the "develop" and/or "release" branch. Do a reverse-merge as soon
as possible. See CONTRIBUTING.md for instructions.
run: |
set -o pipefail
# Branches ordered by how "canonical" they are. Every commit in
# one branch should be in all the branches behind it
order=( master release develop )
# Branches are ordered by how "canonical" they are. Every commit in one
# branch should be in all the branches behind it.
order=(master release develop)
branches=()
for branch in "${order[@]}"
do
# Check that the branches exist so that this job will work on
# forked repos, which don't necessarily have master and
# release branches.
for branch in "${order[@]}"; do
# Check that the branches exist so that this job will work on forked
# repos, which don't necessarily have master and release branches.
echo "Checking if ${branch} exists."
if git ls-remote --exit-code --heads origin \
refs/heads/${branch} > /dev/null
then
branches+=( origin/${branch} )
refs/heads/${branch} > /dev/null; then
branches+=(origin/${branch})
fi
done
prior=()
for branch in "${branches[@]}"
do
if [[ ${#prior[@]} -ne 0 ]]
then
echo "Checking ${prior[@]} for commits missing from ${branch}"
for branch in "${branches[@]}"; do
if [[ ${#prior[@]} -ne 0 ]]; then
echo "Checking ${prior[@]} for commits missing from ${branch}."
git log --oneline --no-merges "${prior[@]}" \
^$branch | tee -a "missing-commits.txt"
echo
fi
prior+=( "${branch}" )
prior+=("${branch}")
done
if [[ $( cat missing-commits.txt | wc -l ) -ne 0 ]]
then
echo "${SUGGESTION}"
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
echo "${MESSAGE}"
exit 1
fi

View File

@@ -1,422 +0,0 @@
name: nix
on:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- "ci/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# This workflow has multiple job matrixes.
# They can be considered phases because most of the matrices ("test",
# "coverage", "conan", ) depend on the first ("dependencies").
#
# The first phase has a job in the matrix for each combination of
# variables that affects dependency ABI:
# platform, compiler, and configuration.
# It creates a GitHub artifact holding the Conan profile,
# and builds and caches binaries for all the dependencies.
# If an Artifactory remote is configured, they are cached there.
# If not, they are added to the GitHub artifact.
# GitHub's "cache" action has a size limit (10 GB) that is too small
# to hold the binaries if they are built locally.
# We must use the "{upload,download}-artifact" actions instead.
#
# The remaining phases have a job in the matrix for each test
# configuration. They install dependency binaries from the cache,
# whichever was used, and build and test rippled.
#
# "instrumentation" is independent, but is included here because it also
# builds on linux in the same "on:" conditions.
jobs:
dependencies:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
include:
- compiler: gcc
compiler_version: 12
distro: ubuntu
codename: jammy
- compiler: clang
compiler_version: 16
distro: debian
codename: bookworm
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
env:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
lsb_release -a || true
${{ matrix.compiler }}-${{ matrix.compiler_version }} --version
conan --version
cmake --version
env | sort
- name: configure Conan
run: |
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: archive profile
# Create this archive before dependencies are added to the local cache.
run: tar -czf conan.tar.gz -C ${CONAN_HOME} .
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: upload archive
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
path: conan.tar.gz
if-no-files-found: error
test:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
- clang
configuration:
- Debug
- Release
include:
- compiler: gcc
compiler_version: 12
distro: ubuntu
codename: jammy
- compiler: clang
compiler_version: 16
distro: debian
codename: bookworm
cmake-args:
-
- "-Dunity=ON"
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: check linking
run: |
cd ${build_dir}
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: test
run: |
cd ${build_dir}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
reference-fee-test:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
configuration:
- Debug
cmake-args:
- "-DUNIT_TEST_REFERENCE_FEE=200"
- "-DUNIT_TEST_REFERENCE_FEE=1000"
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
cd ${build_dir}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
coverage:
strategy:
fail-fast: false
matrix:
platform:
- linux
compiler:
- gcc
configuration:
- Debug
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
gcovr --version
env | sort
ls ${CONAN_HOME}
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: build
uses: ./.github/actions/build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: >-
-Dassert=TRUE
-Dwerr=TRUE
-Dcoverage=ON
-Dcoverage_format=xml
-DCODE_COVERAGE_VERBOSE=ON
-DCMAKE_CXX_FLAGS="-O0"
-DCMAKE_C_FLAGS="-O0"
cmake-target: coverage
- name: move coverage report
shell: bash
run: |
mv "${build_dir}/coverage.xml" ./
- name: archive coverage report
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: coverage.xml
path: coverage.xml
retention-days: 30
- name: upload coverage report
uses: wandalen/wretry.action@v1.4.10
with:
action: codecov/codecov-action@v4.5.0
with: |
files: coverage.xml
fail_ci_if_error: true
disable_search: true
verbose: true
plugin: noop
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 210000 # in milliseconds
conan:
needs: dependencies
runs-on: [self-hosted, heavy]
container:
image: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
env:
build_dir: .build
platform: linux
compiler: gcc
compiler_version: 12
configuration: Release
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: ${{ env.platform }}-${{ env.compiler }}-${{ env.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
env | sort
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ env.configuration }}
- name: export
run: |
conan export . --version head
- name: build
run: |
cd tests/conan
mkdir ${build_dir} && cd ${build_dir}
conan install .. \
--settings:all build_type=${configuration} \
--output-folder . \
--build missing
cmake .. \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=./build/${configuration}/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${configuration}
cmake --build .
./example | grep '^[[:digit:]]\+\.[[:digit:]]\+\.[[:digit:]]\+'
instrumentation-build:
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/debian-bookworm:clang-16
env:
build_dir: .build
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: linux-clang-Debug
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
env | sort
ls ${CONAN_HOME}
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: Debug
- name: prepare environment
run: |
mkdir -p ${build_dir}
echo "SOURCE_DIR=$(pwd)" >> $GITHUB_ENV
echo "BUILD_DIR=$(pwd)/${build_dir}" >> $GITHUB_ENV
- name: build with instrumentation
run: |
cd ${BUILD_DIR}
cmake -S ${SOURCE_DIR} -B ${BUILD_DIR} \
-Dvoidstar=ON \
-Dtests=ON \
-Dxrpld=ON \
-DCMAKE_BUILD_TYPE=Debug \
-DSECP256K1_BUILD_BENCHMARK=OFF \
-DSECP256K1_BUILD_TESTS=OFF \
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
cmake --build . --parallel $(nproc)
- name: verify instrumentation enabled
run: |
cd ${BUILD_DIR}
./rippled --version | grep libvoidstar
- name: run unit tests
run: |
cd ${BUILD_DIR}
./rippled -u --unittest-jobs $(( $(nproc)/4 ))
ctest -j $(nproc) --output-on-failure

View File

@@ -1,122 +0,0 @@
name: windows
on:
pull_request:
types: [opened, reopened, synchronize, ready_for_review]
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows, instrumentation)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- 'ci/**'
# https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
core.download:parallel={{os.cpu_count()}}
core.upload:parallel={{os.cpu_count()}}
tools.build:jobs=24
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
jobs:
test:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
strategy:
fail-fast: false
matrix:
version:
- generator: Visual Studio 17 2022
runs-on: windows-2022
configuration:
- type: Release
tests: true
- type: Debug
# Skip running unit tests on debug builds, because they
# take an unreasonable amount of time
tests: false
runtime: d
runs-on: ${{ matrix.version.runs-on }}
env:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: choose Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: 3.13
- name: learn Python cache directory
id: pip-cache
shell: bash
run: |
python -m pip install --upgrade pip
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
- name: restore Python cache directory
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
- name: install Conan
run: pip install wheel conan
- name: check environment
run: |
dir env:
$env:PATH -split ';'
python --version
conan --version
cmake --version
- name: configure Conan
shell: bash
run: |
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: export custom recipes
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 9.7.3 external/rocksdb
conan export --version 4.0.3 external/soci
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash
run: |
if conan remote list | grep -q "ripple"; then
conan remote remove ripple
echo "Removed conan remote ripple"
fi
conan remote add --index 0 ripple "${CONAN_URL}"
echo "Added conan remote ripple at ${CONAN_URL}"
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration.type }}
- name: build
uses: ./.github/actions/build
with:
generator: '${{ matrix.version.generator }}'
configuration: ${{ matrix.configuration.type }}
# Hard code for now. Move to the matrix if varied options are needed
cmake-args: '-Dassert=TRUE -Dwerr=TRUE -Dreporting=OFF -Dunity=ON'
cmake-target: install
- name: test
shell: bash
if: ${{ matrix.configuration.tests }}
run: |
cd ${build_dir}/${{ matrix.configuration.type }}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure

View File

@@ -171,14 +171,6 @@ which allows you to statically link it with GCC, if you want.
conan export --version 1.1.10 external/snappy
```
Export our [Conan recipe for RocksDB](./external/rocksdb).
It does not override paths to dependencies when building with Visual Studio.
```
# Conan 2.x
conan export --version 9.7.3 external/rocksdb
```
Export our [Conan recipe for SOCI](./external/soci).
It patches their CMake to correctly import its dependencies.
@@ -378,18 +370,13 @@ and can be helpful for detecting `#include` omissions.
## Troubleshooting
### Conan
After any updates or changes to dependencies, you may need to do the following:
1. Remove your build directory.
2. Remove the Conan cache:
```
rm -rf ~/.conan/data
```
4. Re-run [conan install](#build-and-test).
2. Remove the Conan cache: `conan remove "*" -c`
3. Re-run [conan install](#build-and-test).
### 'protobuf/port_def.inc' file not found
@@ -407,54 +394,6 @@ For example, if you want to build Debug:
1. For conan install, pass `--settings build_type=Debug`
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
### no std::result_of
If your compiler version is recent enough to have removed `std::result_of` as
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
definition to your build.
```
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
```
### call to 'async_teardown' is ambiguous
If you are compiling with an early version of Clang 16, then you might hit
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
header][7]. You can workaround it by adding this preprocessor definition:
```
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
```
### recompile with -fPIC
If you get a linker error suggesting that you recompile Boost with
position-independent code, such as:
```
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
requires unsupported dynamic reloc 11; recompile with -fPIC
```
Conan most likely downloaded a bad binary distribution of the dependency.
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
for Linux. The solution is to build the dependency locally by passing
`--build boost` when calling `conan install`.
```
conan install --build boost ...
```
## Add a Dependency
If you want to experiment with a new package, follow these steps:

View File

@@ -72,15 +72,15 @@ It generates many files of [results](results):
desired as described above. In a perfect repo, this file will be
empty.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../.github/workflows/levelization.yml) to validate
Github workflow](../../.github/workflows/check-levelization.yml) to validate
that nothing changed.
* [`ordering.txt`](results/ordering.txt): A list showing relationships
between modules where there are no loops as they actually exist, as
opposed to how they are desired as described above.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../.github/workflows/levelization.yml) to validate
Github workflow](../../.github/workflows/check-levelization.yml) to validate
that nothing changed.
* [`levelization.yml`](../../.github/workflows/levelization.yml)
* [`levelization.yml`](../../.github/workflows/check-levelization.yml)
Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or

View File

@@ -90,28 +90,15 @@ if (MSVC)
-errorreport:none
-machine:X64)
else ()
# HACK : because these need to come first, before any warning demotion
string (APPEND CMAKE_CXX_FLAGS " -Wall -Wdeprecated")
if (wextra)
string (APPEND CMAKE_CXX_FLAGS " -Wextra -Wno-unused-parameter")
endif ()
# not MSVC
target_compile_options (common
INTERFACE
-Wall
-Wdeprecated
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
$<$<BOOL:${werr}>:-Werror>
$<$<COMPILE_LANGUAGE:CXX>:
-frtti
-Wnon-virtual-dtor
>
-Wno-sign-compare
-Wno-char-subscripts
-Wno-format
-Wno-unused-local-typedefs
-fstack-protector
$<$<BOOL:${is_gcc}>:
-Wno-unused-but-set-variable
-Wno-deprecated
>
-Wno-sign-compare
-Wno-unused-but-set-variable
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
# tweak gcc optimization for debug
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>

View File

@@ -26,9 +26,6 @@ tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% if compiler == "apple-clang" and compiler_version >= 17 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "clang" and compiler_version == 16 %}
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
{% endif %}
{% if compiler == "gcc" and compiler_version < 13 %}
tools.build:cxxflags=['-Wno-restrict']
{% endif %}

View File

@@ -104,7 +104,7 @@ class Xrpl(ConanFile):
def requirements(self):
# Conan 2 requires transitive headers to be specified
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
self.requires('boost/1.83.0', force=True, **transitive_headers_opt)
self.requires('boost/1.86.0', force=True, **transitive_headers_opt)
self.requires('date/3.0.4', **transitive_headers_opt)
self.requires('lz4/1.10.0', force=True)
self.requires('protobuf/3.21.12', force=True)
@@ -112,7 +112,7 @@ class Xrpl(ConanFile):
if self.options.jemalloc:
self.requires('jemalloc/5.3.0')
if self.options.rocksdb:
self.requires('rocksdb/9.7.3')
self.requires('rocksdb/10.0.1')
self.requires('xxhash/0.8.3', **transitive_headers_opt)
exports_sources = (

View File

@@ -17,6 +17,9 @@ add_library(ed25519 STATIC
)
add_library(ed25519::ed25519 ALIAS ed25519)
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
if(NOT MSVC)
target_compile_options(ed25519 PRIVATE -Wno-implicit-fallthrough)
endif()
include(GNUInstallDirs)

View File

@@ -1,12 +0,0 @@
sources:
"9.7.3":
url: "https://github.com/facebook/rocksdb/archive/refs/tags/v9.7.3.tar.gz"
sha256: "acfabb989cbfb5b5c4d23214819b059638193ec33dad2d88373c46448d16d38b"
patches:
"9.7.3":
- patch_file: "patches/9.x.x-0001-exclude-thirdparty.patch"
patch_description: "Do not include thirdparty.inc"
patch_type: "portability"
- patch_file: "patches/9.7.3-0001-memory-leak.patch"
patch_description: "Fix a leak of obsolete blob files left open until DB::Close()"
patch_type: "portability"

View File

@@ -1,235 +0,0 @@
import os
import glob
import shutil
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, collect_libs, copy, export_conandata_patches, get, rm, rmdir
from conan.tools.microsoft import check_min_vs, is_msvc, is_msvc_static_runtime
from conan.tools.scm import Version
required_conan_version = ">=1.53.0"
class RocksDBConan(ConanFile):
name = "rocksdb"
description = "A library that provides an embeddable, persistent key-value store for fast storage"
license = ("GPL-2.0-only", "Apache-2.0")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/facebook/rocksdb"
topics = ("database", "leveldb", "facebook", "key-value")
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"lite": [True, False],
"with_gflags": [True, False],
"with_snappy": [True, False],
"with_lz4": [True, False],
"with_zlib": [True, False],
"with_zstd": [True, False],
"with_tbb": [True, False],
"with_jemalloc": [True, False],
"enable_sse": [False, "sse42", "avx2"],
"use_rtti": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"lite": False,
"with_snappy": False,
"with_lz4": False,
"with_zlib": False,
"with_zstd": False,
"with_gflags": False,
"with_tbb": False,
"with_jemalloc": False,
"enable_sse": False,
"use_rtti": False,
}
@property
def _min_cppstd(self):
return "11" if Version(self.version) < "8.8.1" else "17"
@property
def _compilers_minimum_version(self):
return {} if self._min_cppstd == "11" else {
"apple-clang": "10",
"clang": "7",
"gcc": "7",
"msvc": "191",
"Visual Studio": "15",
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
if self.settings.arch != "x86_64":
del self.options.with_tbb
if self.settings.build_type == "Debug":
self.options.use_rtti = True # Rtti are used in asserts for debug mode...
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
if self.options.with_gflags:
self.requires("gflags/2.2.2")
if self.options.with_snappy:
self.requires("snappy/1.1.10")
if self.options.with_lz4:
self.requires("lz4/1.10.0")
if self.options.with_zlib:
self.requires("zlib/[>=1.2.11 <2]")
if self.options.with_zstd:
self.requires("zstd/1.5.6")
if self.options.get_safe("with_tbb"):
self.requires("onetbb/2021.12.0")
if self.options.with_jemalloc:
self.requires("jemalloc/5.3.0")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, self._min_cppstd)
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
)
if self.settings.arch not in ["x86_64", "ppc64le", "ppc64", "mips64", "armv8"]:
raise ConanInvalidConfiguration("Rocksdb requires 64 bits")
check_min_vs(self, "191")
if self.version == "6.20.3" and \
self.settings.os == "Linux" and \
self.settings.compiler == "gcc" and \
Version(self.settings.compiler.version) < "5":
raise ConanInvalidConfiguration("Rocksdb 6.20.3 is not compilable with gcc <5.") # See https://github.com/facebook/rocksdb/issues/3522
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["FAIL_ON_WARNINGS"] = False
tc.variables["WITH_TESTS"] = False
tc.variables["WITH_TOOLS"] = False
tc.variables["WITH_CORE_TOOLS"] = False
tc.variables["WITH_BENCHMARK_TOOLS"] = False
tc.variables["WITH_FOLLY_DISTRIBUTED_MUTEX"] = False
if is_msvc(self):
tc.variables["WITH_MD_LIBRARY"] = not is_msvc_static_runtime(self)
tc.variables["ROCKSDB_INSTALL_ON_WINDOWS"] = self.settings.os == "Windows"
tc.variables["ROCKSDB_LITE"] = self.options.lite
tc.variables["WITH_GFLAGS"] = self.options.with_gflags
tc.variables["WITH_SNAPPY"] = self.options.with_snappy
tc.variables["WITH_LZ4"] = self.options.with_lz4
tc.variables["WITH_ZLIB"] = self.options.with_zlib
tc.variables["WITH_ZSTD"] = self.options.with_zstd
tc.variables["WITH_TBB"] = self.options.get_safe("with_tbb", False)
tc.variables["WITH_JEMALLOC"] = self.options.with_jemalloc
tc.variables["ROCKSDB_BUILD_SHARED"] = self.options.shared
tc.variables["ROCKSDB_LIBRARY_EXPORTS"] = self.settings.os == "Windows" and self.options.shared
tc.variables["ROCKSDB_DLL" ] = self.settings.os == "Windows" and self.options.shared
tc.variables["USE_RTTI"] = self.options.use_rtti
if not bool(self.options.enable_sse):
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = False
elif self.options.enable_sse == "sse42":
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = True
elif self.options.enable_sse == "avx2":
tc.variables["PORTABLE"] = False
tc.variables["FORCE_SSE42"] = False
# not available yet in CCI
tc.variables["WITH_NUMA"] = False
tc.generate()
deps = CMakeDeps(self)
if self.options.with_jemalloc:
deps.set_property("jemalloc", "cmake_file_name", "JeMalloc")
deps.set_property("jemalloc", "cmake_target_name", "JeMalloc::JeMalloc")
if self.options.with_zstd:
deps.set_property("zstd", "cmake_target_name", "zstd::zstd")
deps.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def _remove_static_libraries(self):
rm(self, "rocksdb.lib", os.path.join(self.package_folder, "lib"))
for lib in glob.glob(os.path.join(self.package_folder, "lib", "*.a")):
if not lib.endswith(".dll.a"):
os.remove(lib)
def _remove_cpp_headers(self):
for path in glob.glob(os.path.join(self.package_folder, "include", "rocksdb", "*")):
if path != os.path.join(self.package_folder, "include", "rocksdb", "c.h"):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
def package(self):
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
copy(self, "LICENSE*", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
if self.options.shared:
self._remove_static_libraries()
self._remove_cpp_headers() # Force stable ABI for shared libraries
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig"))
def package_info(self):
cmake_target = "rocksdb-shared" if self.options.shared else "rocksdb"
self.cpp_info.set_property("cmake_file_name", "RocksDB")
self.cpp_info.set_property("cmake_target_name", f"RocksDB::{cmake_target}")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["librocksdb"].libs = collect_libs(self)
if self.settings.os == "Windows":
self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"]
if self.options.shared:
self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"]
elif self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"]
if self.options.lite:
self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "RocksDB"
self.cpp_info.names["cmake_find_package_multi"] = "RocksDB"
self.cpp_info.components["librocksdb"].names["cmake_find_package"] = cmake_target
self.cpp_info.components["librocksdb"].names["cmake_find_package_multi"] = cmake_target
self.cpp_info.components["librocksdb"].set_property("cmake_target_name", f"RocksDB::{cmake_target}")
if self.options.with_gflags:
self.cpp_info.components["librocksdb"].requires.append("gflags::gflags")
if self.options.with_snappy:
self.cpp_info.components["librocksdb"].requires.append("snappy::snappy")
if self.options.with_lz4:
self.cpp_info.components["librocksdb"].requires.append("lz4::lz4")
if self.options.with_zlib:
self.cpp_info.components["librocksdb"].requires.append("zlib::zlib")
if self.options.with_zstd:
self.cpp_info.components["librocksdb"].requires.append("zstd::zstd")
if self.options.get_safe("with_tbb"):
self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb")
if self.options.with_jemalloc:
self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc")

View File

@@ -1,319 +0,0 @@
diff --git a/HISTORY.md b/HISTORY.md
index 36d472229..05ad1a202 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,6 +1,10 @@
# Rocksdb Change Log
> NOTE: Entries for next release do not go here. Follow instructions in `unreleased_history/README.txt`
+## 9.7.4 (10/31/2024)
+### Bug Fixes
+* Fix a leak of obsolete blob files left open until DB::Close(). This bug was introduced in version 9.4.0.
+
## 9.7.3 (10/16/2024)
### Behavior Changes
* OPTIONS file to be loaded by remote worker is now preserved so that it does not get purged by the primary host. A similar technique as how we are preserving new SST files from getting purged is used for this. min_options_file_numbers_ is tracked like pending_outputs_ is tracked.
diff --git a/db/blob/blob_file_cache.cc b/db/blob/blob_file_cache.cc
index 5f340aadf..1b9faa238 100644
--- a/db/blob/blob_file_cache.cc
+++ b/db/blob/blob_file_cache.cc
@@ -42,6 +42,7 @@ Status BlobFileCache::GetBlobFileReader(
assert(blob_file_reader);
assert(blob_file_reader->IsEmpty());
+ // NOTE: sharing same Cache with table_cache
const Slice key = GetSliceForKey(&blob_file_number);
assert(cache_);
@@ -98,4 +99,13 @@ Status BlobFileCache::GetBlobFileReader(
return Status::OK();
}
+void BlobFileCache::Evict(uint64_t blob_file_number) {
+ // NOTE: sharing same Cache with table_cache
+ const Slice key = GetSliceForKey(&blob_file_number);
+
+ assert(cache_);
+
+ cache_.get()->Erase(key);
+}
+
} // namespace ROCKSDB_NAMESPACE
diff --git a/db/blob/blob_file_cache.h b/db/blob/blob_file_cache.h
index 740e67ada..6858d012b 100644
--- a/db/blob/blob_file_cache.h
+++ b/db/blob/blob_file_cache.h
@@ -36,6 +36,15 @@ class BlobFileCache {
uint64_t blob_file_number,
CacheHandleGuard<BlobFileReader>* blob_file_reader);
+ // Called when a blob file is obsolete to ensure it is removed from the cache
+ // to avoid effectively leaking the open file and assicated memory
+ void Evict(uint64_t blob_file_number);
+
+ // Used to identify cache entries for blob files (not normally useful)
+ static const Cache::CacheItemHelper* GetHelper() {
+ return CacheInterface::GetBasicHelper();
+ }
+
private:
using CacheInterface =
BasicTypedCacheInterface<BlobFileReader, CacheEntryRole::kMisc>;
diff --git a/db/column_family.h b/db/column_family.h
index e4b7adde8..86637736a 100644
--- a/db/column_family.h
+++ b/db/column_family.h
@@ -401,6 +401,7 @@ class ColumnFamilyData {
SequenceNumber earliest_seq);
TableCache* table_cache() const { return table_cache_.get(); }
+ BlobFileCache* blob_file_cache() const { return blob_file_cache_.get(); }
BlobSource* blob_source() const { return blob_source_.get(); }
// See documentation in compaction_picker.h
diff --git a/db/db_impl/db_impl.cc b/db/db_impl/db_impl.cc
index 261593423..06573ac2e 100644
--- a/db/db_impl/db_impl.cc
+++ b/db/db_impl/db_impl.cc
@@ -659,8 +659,9 @@ Status DBImpl::CloseHelper() {
// We need to release them before the block cache is destroyed. The block
// cache may be destroyed inside versions_.reset(), when column family data
// list is destroyed, so leaving handles in table cache after
- // versions_.reset() may cause issues.
- // Here we clean all unreferenced handles in table cache.
+ // versions_.reset() may cause issues. Here we clean all unreferenced handles
+ // in table cache, and (for certain builds/conditions) assert that no obsolete
+ // files are hanging around unreferenced (leak) in the table/blob file cache.
// Now we assume all user queries have finished, so only version set itself
// can possibly hold the blocks from block cache. After releasing unreferenced
// handles here, only handles held by version set left and inside
@@ -668,6 +669,9 @@ Status DBImpl::CloseHelper() {
// time a handle is released, we erase it from the cache too. By doing that,
// we can guarantee that after versions_.reset(), table cache is empty
// so the cache can be safely destroyed.
+#ifndef NDEBUG
+ TEST_VerifyNoObsoleteFilesCached(/*db_mutex_already_held=*/true);
+#endif // !NDEBUG
table_cache_->EraseUnRefEntries();
for (auto& txn_entry : recovered_transactions_) {
@@ -3227,6 +3231,8 @@ Status DBImpl::MultiGetImpl(
s = Status::Aborted();
break;
}
+ // This could be a long-running operation
+ ROCKSDB_THREAD_YIELD_HOOK();
}
// Post processing (decrement reference counts and record statistics)
diff --git a/db/db_impl/db_impl.h b/db/db_impl/db_impl.h
index 5e4fa310b..ccc0abfa7 100644
--- a/db/db_impl/db_impl.h
+++ b/db/db_impl/db_impl.h
@@ -1241,9 +1241,14 @@ class DBImpl : public DB {
static Status TEST_ValidateOptions(const DBOptions& db_options) {
return ValidateOptions(db_options);
}
-
#endif // NDEBUG
+ // In certain configurations, verify that the table/blob file cache only
+ // contains entries for live files, to check for effective leaks of open
+ // files. This can only be called when purging of obsolete files has
+ // "settled," such as during parts of DB Close().
+ void TEST_VerifyNoObsoleteFilesCached(bool db_mutex_already_held) const;
+
// persist stats to column family "_persistent_stats"
void PersistStats();
diff --git a/db/db_impl/db_impl_debug.cc b/db/db_impl/db_impl_debug.cc
index 790a50d7a..67f5b4aaf 100644
--- a/db/db_impl/db_impl_debug.cc
+++ b/db/db_impl/db_impl_debug.cc
@@ -9,6 +9,7 @@
#ifndef NDEBUG
+#include "db/blob/blob_file_cache.h"
#include "db/column_family.h"
#include "db/db_impl/db_impl.h"
#include "db/error_handler.h"
@@ -328,5 +329,49 @@ size_t DBImpl::TEST_EstimateInMemoryStatsHistorySize() const {
InstrumentedMutexLock l(&const_cast<DBImpl*>(this)->stats_history_mutex_);
return EstimateInMemoryStatsHistorySize();
}
+
+void DBImpl::TEST_VerifyNoObsoleteFilesCached(
+ bool db_mutex_already_held) const {
+ // This check is somewhat expensive and obscure to make a part of every
+ // unit test in every build variety. Thus, we only enable it for ASAN builds.
+ if (!kMustFreeHeapAllocations) {
+ return;
+ }
+
+ std::optional<InstrumentedMutexLock> l;
+ if (db_mutex_already_held) {
+ mutex_.AssertHeld();
+ } else {
+ l.emplace(&mutex_);
+ }
+
+ std::vector<uint64_t> live_files;
+ for (auto cfd : *versions_->GetColumnFamilySet()) {
+ if (cfd->IsDropped()) {
+ continue;
+ }
+ // Sneakily add both SST and blob files to the same list
+ cfd->current()->AddLiveFiles(&live_files, &live_files);
+ }
+ std::sort(live_files.begin(), live_files.end());
+
+ auto fn = [&live_files](const Slice& key, Cache::ObjectPtr, size_t,
+ const Cache::CacheItemHelper* helper) {
+ if (helper != BlobFileCache::GetHelper()) {
+ // Skip non-blob files for now
+ // FIXME: diagnose and fix the leaks of obsolete SST files revealed in
+ // unit tests.
+ return;
+ }
+ // See TableCache and BlobFileCache
+ assert(key.size() == sizeof(uint64_t));
+ uint64_t file_number;
+ GetUnaligned(reinterpret_cast<const uint64_t*>(key.data()), &file_number);
+ // Assert file is in sorted live_files
+ assert(
+ std::binary_search(live_files.begin(), live_files.end(), file_number));
+ };
+ table_cache_->ApplyToAllEntries(fn, {});
+}
} // namespace ROCKSDB_NAMESPACE
#endif // NDEBUG
diff --git a/db/db_iter.cc b/db/db_iter.cc
index e02586377..bf4749eb9 100644
--- a/db/db_iter.cc
+++ b/db/db_iter.cc
@@ -540,6 +540,8 @@ bool DBIter::FindNextUserEntryInternal(bool skipping_saved_key,
} else {
iter_.Next();
}
+ // This could be a long-running operation due to tombstones, etc.
+ ROCKSDB_THREAD_YIELD_HOOK();
} while (iter_.Valid());
valid_ = false;
diff --git a/db/table_cache.cc b/db/table_cache.cc
index 71fc29c32..8a5be75e8 100644
--- a/db/table_cache.cc
+++ b/db/table_cache.cc
@@ -164,6 +164,7 @@ Status TableCache::GetTableReader(
}
Cache::Handle* TableCache::Lookup(Cache* cache, uint64_t file_number) {
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&file_number);
return cache->Lookup(key);
}
@@ -179,6 +180,7 @@ Status TableCache::FindTable(
size_t max_file_size_for_l0_meta_pin, Temperature file_temperature) {
PERF_TIMER_GUARD_WITH_CLOCK(find_table_nanos, ioptions_.clock);
uint64_t number = file_meta.fd.GetNumber();
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&number);
*handle = cache_.Lookup(key);
TEST_SYNC_POINT_CALLBACK("TableCache::FindTable:0",
diff --git a/db/version_builder.cc b/db/version_builder.cc
index ed8ab8214..c98f53f42 100644
--- a/db/version_builder.cc
+++ b/db/version_builder.cc
@@ -24,6 +24,7 @@
#include <vector>
#include "cache/cache_reservation_manager.h"
+#include "db/blob/blob_file_cache.h"
#include "db/blob/blob_file_meta.h"
#include "db/dbformat.h"
#include "db/internal_stats.h"
@@ -744,12 +745,9 @@ class VersionBuilder::Rep {
return Status::Corruption("VersionBuilder", oss.str());
}
- // Note: we use C++11 for now but in C++14, this could be done in a more
- // elegant way using generalized lambda capture.
- VersionSet* const vs = version_set_;
- const ImmutableCFOptions* const ioptions = ioptions_;
-
- auto deleter = [vs, ioptions](SharedBlobFileMetaData* shared_meta) {
+ auto deleter = [vs = version_set_, ioptions = ioptions_,
+ bc = cfd_ ? cfd_->blob_file_cache()
+ : nullptr](SharedBlobFileMetaData* shared_meta) {
if (vs) {
assert(ioptions);
assert(!ioptions->cf_paths.empty());
@@ -758,6 +756,9 @@ class VersionBuilder::Rep {
vs->AddObsoleteBlobFile(shared_meta->GetBlobFileNumber(),
ioptions->cf_paths.front().path);
}
+ if (bc) {
+ bc->Evict(shared_meta->GetBlobFileNumber());
+ }
delete shared_meta;
};
@@ -766,7 +767,7 @@ class VersionBuilder::Rep {
blob_file_number, blob_file_addition.GetTotalBlobCount(),
blob_file_addition.GetTotalBlobBytes(),
blob_file_addition.GetChecksumMethod(),
- blob_file_addition.GetChecksumValue(), deleter);
+ blob_file_addition.GetChecksumValue(), std::move(deleter));
mutable_blob_file_metas_.emplace(
blob_file_number, MutableBlobFileMetaData(std::move(shared_meta)));
diff --git a/db/version_set.h b/db/version_set.h
index 9336782b1..024f869e7 100644
--- a/db/version_set.h
+++ b/db/version_set.h
@@ -1514,7 +1514,6 @@ class VersionSet {
void GetLiveFilesMetaData(std::vector<LiveFileMetaData>* metadata);
void AddObsoleteBlobFile(uint64_t blob_file_number, std::string path) {
- // TODO: Erase file from BlobFileCache?
obsolete_blob_files_.emplace_back(blob_file_number, std::move(path));
}
diff --git a/include/rocksdb/version.h b/include/rocksdb/version.h
index 2a19796b8..0afa2cab1 100644
--- a/include/rocksdb/version.h
+++ b/include/rocksdb/version.h
@@ -13,7 +13,7 @@
// minor or major version number planned for release.
#define ROCKSDB_MAJOR 9
#define ROCKSDB_MINOR 7
-#define ROCKSDB_PATCH 3
+#define ROCKSDB_PATCH 4
// Do not use these. We made the mistake of declaring macros starting with
// double underscore. Now we have to live with our choice. We'll deprecate these
diff --git a/port/port.h b/port/port.h
index 13aa56d47..141716e5b 100644
--- a/port/port.h
+++ b/port/port.h
@@ -19,3 +19,19 @@
#elif defined(OS_WIN)
#include "port/win/port_win.h"
#endif
+
+#ifdef OS_LINUX
+// A temporary hook into long-running RocksDB threads to support modifying their
+// priority etc. This should become a public API hook once the requirements
+// are better understood.
+extern "C" void RocksDbThreadYield() __attribute__((__weak__));
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ { \
+ if (RocksDbThreadYield) { \
+ RocksDbThreadYield(); \
+ } \
+ }
+#else
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ {}
+#endif

View File

@@ -1,30 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 93b884d..b715cb6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -106,14 +106,9 @@ endif()
include(CMakeDependentOption)
if(MSVC)
- option(WITH_GFLAGS "build with GFlags" OFF)
option(WITH_XPRESS "build with windows built in compression" OFF)
- option(ROCKSDB_SKIP_THIRDPARTY "skip thirdparty.inc" OFF)
-
- if(NOT ROCKSDB_SKIP_THIRDPARTY)
- include(${CMAKE_CURRENT_SOURCE_DIR}/thirdparty.inc)
- endif()
-else()
+endif()
+if(TRUE)
if(CMAKE_SYSTEM_NAME MATCHES "FreeBSD" AND NOT CMAKE_SYSTEM_NAME MATCHES "kFreeBSD")
# FreeBSD has jemalloc as default malloc
# but it does not have all the jemalloc files in include/...
@@ -126,7 +121,7 @@ else()
endif()
endif()
- if(MINGW)
+ if(MSVC OR MINGW)
option(WITH_GFLAGS "build with GFlags" OFF)
else()
option(WITH_GFLAGS "build with GFlags" ON)

View File

@@ -1,40 +0,0 @@
sources:
"1.1.10":
url: "https://github.com/google/snappy/archive/1.1.10.tar.gz"
sha256: "49d831bffcc5f3d01482340fe5af59852ca2fe76c3e05df0e67203ebbe0f1d90"
"1.1.9":
url: "https://github.com/google/snappy/archive/1.1.9.tar.gz"
sha256: "75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7"
"1.1.8":
url: "https://github.com/google/snappy/archive/1.1.8.tar.gz"
sha256: "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f"
"1.1.7":
url: "https://github.com/google/snappy/archive/1.1.7.tar.gz"
sha256: "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4"
patches:
"1.1.10":
- patch_file: "patches/1.1.10-0001-fix-inlining-failure.patch"
patch_description: "disable inlining for compilation error"
patch_type: "portability"
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
patch_description: "disable 'warning as error' options"
patch_type: "portability"
- patch_file: "patches/1.1.10-0003-fix-clobber-list-older-llvm.patch"
patch_description: "disable inline asm on apple-clang"
patch_type: "portability"
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
patch_description: "remove 'disable rtti'"
patch_type: "conan"
"1.1.9":
- patch_file: "patches/1.1.9-0001-fix-inlining-failure.patch"
patch_description: "disable inlining for compilation error"
patch_type: "portability"
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
patch_description: "disable 'warning as error' options"
patch_type: "portability"
- patch_file: "patches/1.1.9-0003-fix-clobber-list-older-llvm.patch"
patch_description: "disable inline asm on apple-clang"
patch_type: "portability"
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
patch_description: "remove 'disable rtti'"
patch_type: "conan"

View File

@@ -1,89 +0,0 @@
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
from conan.tools.scm import Version
import os
required_conan_version = ">=1.54.0"
class SnappyConan(ConanFile):
name = "snappy"
description = "A fast compressor/decompressor"
topics = ("google", "compressor", "decompressor")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/google/snappy"
license = "BSD-3-Clause"
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == 'Windows':
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 11)
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["SNAPPY_BUILD_TESTS"] = False
if Version(self.version) >= "1.1.8":
tc.variables["SNAPPY_FUZZING_BUILD"] = False
tc.variables["SNAPPY_REQUIRE_AVX"] = False
tc.variables["SNAPPY_REQUIRE_AVX2"] = False
tc.variables["SNAPPY_INSTALL"] = True
if Version(self.version) >= "1.1.9":
tc.variables["SNAPPY_BUILD_BENCHMARKS"] = False
tc.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "Snappy")
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["snappylib"].libs = ["snappy"]
if not self.options.shared:
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["snappylib"].system_libs.append("m")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "Snappy"
self.cpp_info.names["cmake_find_package_multi"] = "Snappy"
self.cpp_info.components["snappylib"].names["cmake_find_package"] = "snappy"
self.cpp_info.components["snappylib"].names["cmake_find_package_multi"] = "snappy"
self.cpp_info.components["snappylib"].set_property("cmake_target_name", "Snappy::snappy")

View File

@@ -1,13 +0,0 @@
diff --git a/snappy-stubs-internal.h b/snappy-stubs-internal.h
index 1548ed7..3b4a9f3 100644
--- a/snappy-stubs-internal.h
+++ b/snappy-stubs-internal.h
@@ -100,7 +100,7 @@
// Inlining hints.
#if HAVE_ATTRIBUTE_ALWAYS_INLINE
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#else
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#endif // HAVE_ATTRIBUTE_ALWAYS_INLINE

View File

@@ -1,13 +0,0 @@
diff --git a/snappy.cc b/snappy.cc
index d414718..e4efb59 100644
--- a/snappy.cc
+++ b/snappy.cc
@@ -1132,7 +1132,7 @@ inline size_t AdvanceToNextTagX86Optimized(const uint8_t** ip_p, size_t* tag) {
size_t literal_len = *tag >> 2;
size_t tag_type = *tag;
bool is_literal;
-#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__)
+#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
// TODO clang misses the fact that the (c & 3) already correctly
// sets the zero flag.
asm("and $3, %k[tag_type]\n\t"

View File

@@ -1,14 +0,0 @@
Fixes the following error:
error: inlining failed in call to always_inline size_t snappy::AdvanceToNextTag(const uint8_t**, size_t*): function body can be overwritten at link time
--- snappy-stubs-internal.h
+++ snappy-stubs-internal.h
@@ -100,7 +100,7 @@
// Inlining hints.
#ifdef HAVE_ATTRIBUTE_ALWAYS_INLINE
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#else
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
#endif

View File

@@ -1,12 +0,0 @@
--- CMakeLists.txt
+++ CMakeLists.txt
@@ -69,7 +69,7 @@
- # Use -Werror for clang only.
+if(0)
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
endif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
-
+endif()

View File

@@ -1,12 +0,0 @@
asm clobbers do not work for clang < 9 and apple-clang < 11 (found by SpaceIm)
--- snappy.cc
+++ snappy.cc
@@ -1026,7 +1026,7 @@
size_t literal_len = *tag >> 2;
size_t tag_type = *tag;
bool is_literal;
-#if defined(__GNUC__) && defined(__x86_64__)
+#if defined(__GNUC__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
// TODO clang misses the fact that the (c & 3) already correctly
// sets the zero flag.
asm("and $3, %k[tag_type]\n\t"

View File

@@ -1,20 +0,0 @@
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -53,8 +53,6 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
add_definitions(-D_HAS_EXCEPTIONS=0)
# Disable RTTI.
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Use -Wall for clang and gcc.
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
@@ -78,8 +76,6 @@ endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
# Disable RTTI.
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make

View File

@@ -1,12 +0,0 @@
sources:
"4.0.3":
url: "https://github.com/SOCI/soci/archive/v4.0.3.tar.gz"
sha256: "4b1ff9c8545c5d802fbe06ee6cd2886630e5c03bf740e269bb625b45cf934928"
patches:
"4.0.3":
- patch_file: "patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch"
patch_description: "Generate relocatable libraries on MacOS"
patch_type: "portability"
- patch_file: "patches/0002-Fix-soci_backend.patch"
patch_description: "Fix variable names for dependencies"
patch_type: "conan"

View File

@@ -1,212 +0,0 @@
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
from conan.tools.microsoft import is_msvc
from conan.tools.scm import Version
from conan.errors import ConanInvalidConfiguration
import os
required_conan_version = ">=1.55.0"
class SociConan(ConanFile):
name = "soci"
homepage = "https://github.com/SOCI/soci"
url = "https://github.com/conan-io/conan-center-index"
description = "The C++ Database Access Library "
topics = ("mysql", "odbc", "postgresql", "sqlite3")
license = "BSL-1.0"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"empty": [True, False],
"with_sqlite3": [True, False],
"with_db2": [True, False],
"with_odbc": [True, False],
"with_oracle": [True, False],
"with_firebird": [True, False],
"with_mysql": [True, False],
"with_postgresql": [True, False],
"with_boost": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"empty": False,
"with_sqlite3": False,
"with_db2": False,
"with_odbc": False,
"with_oracle": False,
"with_firebird": False,
"with_mysql": False,
"with_postgresql": False,
"with_boost": False,
}
def export_sources(self):
export_conandata_patches(self)
def layout(self):
cmake_layout(self, src_folder="src")
def config_options(self):
if self.settings.os == "Windows":
self.options.rm_safe("fPIC")
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def requirements(self):
if self.options.with_sqlite3:
self.requires("sqlite3/3.47.0")
if self.options.with_odbc and self.settings.os != "Windows":
self.requires("odbc/2.3.11")
if self.options.with_mysql:
self.requires("libmysqlclient/8.1.0")
if self.options.with_postgresql:
self.requires("libpq/15.5")
if self.options.with_boost:
self.requires("boost/1.83.0")
@property
def _minimum_compilers_version(self):
return {
"Visual Studio": "14",
"gcc": "4.8",
"clang": "3.8",
"apple-clang": "8.0"
}
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 11)
compiler = str(self.settings.compiler)
compiler_version = Version(self.settings.compiler.version.value)
if compiler not in self._minimum_compilers_version:
self.output.warning("{} recipe lacks information about the {} compiler support.".format(self.name, self.settings.compiler))
elif compiler_version < self._minimum_compilers_version[compiler]:
raise ConanInvalidConfiguration("{} requires a {} version >= {}".format(self.name, compiler, compiler_version))
prefix = "Dependencies for"
message = "not configured in this conan package."
if self.options.with_db2:
# self.requires("db2/0.0.0") # TODO add support for db2
raise ConanInvalidConfiguration("{} DB2 {} ".format(prefix, message))
if self.options.with_oracle:
# self.requires("oracle_db/0.0.0") # TODO add support for oracle
raise ConanInvalidConfiguration("{} ORACLE {} ".format(prefix, message))
if self.options.with_firebird:
# self.requires("firebird/0.0.0") # TODO add support for firebird
raise ConanInvalidConfiguration("{} firebird {} ".format(prefix, message))
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["SOCI_SHARED"] = self.options.shared
tc.variables["SOCI_STATIC"] = not self.options.shared
tc.variables["SOCI_TESTS"] = False
tc.variables["SOCI_CXX11"] = True
tc.variables["SOCI_EMPTY"] = self.options.empty
tc.variables["WITH_SQLITE3"] = self.options.with_sqlite3
tc.variables["WITH_DB2"] = self.options.with_db2
tc.variables["WITH_ODBC"] = self.options.with_odbc
tc.variables["WITH_ORACLE"] = self.options.with_oracle
tc.variables["WITH_FIREBIRD"] = self.options.with_firebird
tc.variables["WITH_MYSQL"] = self.options.with_mysql
tc.variables["WITH_POSTGRESQL"] = self.options.with_postgresql
tc.variables["WITH_BOOST"] = self.options.with_boost
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, "LICENSE_1_0.txt", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "SOCI")
target_suffix = "" if self.options.shared else "_static"
lib_prefix = "lib" if is_msvc(self) and not self.options.shared else ""
version = Version(self.version)
lib_suffix = "_{}_{}".format(version.major, version.minor) if self.settings.os == "Windows" else ""
# soci_core
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
if self.options.with_boost:
self.cpp_info.components["soci_core"].requires.append("boost::boost")
# soci_empty
if self.options.empty:
self.cpp_info.components["soci_empty"].set_property("cmake_target_name", "SOCI::soci_empty{}".format(target_suffix))
self.cpp_info.components["soci_empty"].libs = ["{}soci_empty{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_empty"].requires = ["soci_core"]
# soci_sqlite3
if self.options.with_sqlite3:
self.cpp_info.components["soci_sqlite3"].set_property("cmake_target_name", "SOCI::soci_sqlite3{}".format(target_suffix))
self.cpp_info.components["soci_sqlite3"].libs = ["{}soci_sqlite3{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_sqlite3"].requires = ["soci_core", "sqlite3::sqlite3"]
# soci_odbc
if self.options.with_odbc:
self.cpp_info.components["soci_odbc"].set_property("cmake_target_name", "SOCI::soci_odbc{}".format(target_suffix))
self.cpp_info.components["soci_odbc"].libs = ["{}soci_odbc{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_odbc"].requires = ["soci_core"]
if self.settings.os == "Windows":
self.cpp_info.components["soci_odbc"].system_libs.append("odbc32")
else:
self.cpp_info.components["soci_odbc"].requires.append("odbc::odbc")
# soci_mysql
if self.options.with_mysql:
self.cpp_info.components["soci_mysql"].set_property("cmake_target_name", "SOCI::soci_mysql{}".format(target_suffix))
self.cpp_info.components["soci_mysql"].libs = ["{}soci_mysql{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_mysql"].requires = ["soci_core", "libmysqlclient::libmysqlclient"]
# soci_postgresql
if self.options.with_postgresql:
self.cpp_info.components["soci_postgresql"].set_property("cmake_target_name", "SOCI::soci_postgresql{}".format(target_suffix))
self.cpp_info.components["soci_postgresql"].libs = ["{}soci_postgresql{}".format(lib_prefix, lib_suffix)]
self.cpp_info.components["soci_postgresql"].requires = ["soci_core", "libpq::libpq"]
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "SOCI"
self.cpp_info.names["cmake_find_package_multi"] = "SOCI"
self.cpp_info.components["soci_core"].names["cmake_find_package"] = "soci_core{}".format(target_suffix)
self.cpp_info.components["soci_core"].names["cmake_find_package_multi"] = "soci_core{}".format(target_suffix)
if self.options.empty:
self.cpp_info.components["soci_empty"].names["cmake_find_package"] = "soci_empty{}".format(target_suffix)
self.cpp_info.components["soci_empty"].names["cmake_find_package_multi"] = "soci_empty{}".format(target_suffix)
if self.options.with_sqlite3:
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package"] = "soci_sqlite3{}".format(target_suffix)
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package_multi"] = "soci_sqlite3{}".format(target_suffix)
if self.options.with_odbc:
self.cpp_info.components["soci_odbc"].names["cmake_find_package"] = "soci_odbc{}".format(target_suffix)
self.cpp_info.components["soci_odbc"].names["cmake_find_package_multi"] = "soci_odbc{}".format(target_suffix)
if self.options.with_mysql:
self.cpp_info.components["soci_mysql"].names["cmake_find_package"] = "soci_mysql{}".format(target_suffix)
self.cpp_info.components["soci_mysql"].names["cmake_find_package_multi"] = "soci_mysql{}".format(target_suffix)
if self.options.with_postgresql:
self.cpp_info.components["soci_postgresql"].names["cmake_find_package"] = "soci_postgresql{}".format(target_suffix)
self.cpp_info.components["soci_postgresql"].names["cmake_find_package_multi"] = "soci_postgresql{}".format(target_suffix)

View File

@@ -1,39 +0,0 @@
From d491bf7b5040d314ffd0c6310ba01f78ff44c85e Mon Sep 17 00:00:00 2001
From: Rasmus Thomsen <rasmus.thomsen@dampsoft.de>
Date: Fri, 14 Apr 2023 09:16:29 +0200
Subject: [PATCH] Remove hardcoded INSTALL_NAME_DIR for relocatable libraries
on MacOS
---
cmake/SociBackend.cmake | 2 +-
src/core/CMakeLists.txt | 1 -
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
index 5d4ef0df..39fe1f77 100644
--- a/cmake/SociBackend.cmake
+++ b/cmake/SociBackend.cmake
@@ -171,7 +171,7 @@ macro(soci_backend NAME)
set_target_properties(${THIS_BACKEND_TARGET}
PROPERTIES
SOVERSION ${${PROJECT_NAME}_SOVERSION}
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib)
+ )
if(APPLE)
set_target_properties(${THIS_BACKEND_TARGET}
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index 3e7deeae..f9eae564 100644
--- a/src/core/CMakeLists.txt
+++ b/src/core/CMakeLists.txt
@@ -59,7 +59,6 @@ if (SOCI_SHARED)
PROPERTIES
VERSION ${SOCI_VERSION}
SOVERSION ${SOCI_SOVERSION}
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib
CLEAN_DIRECT_OUTPUT 1)
endif()
--
2.25.1

View File

@@ -1,24 +0,0 @@
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
index 0a664667..3fa2ed95 100644
--- a/cmake/SociBackend.cmake
+++ b/cmake/SociBackend.cmake
@@ -31,14 +31,13 @@ macro(soci_backend_deps_found NAME DEPS SUCCESS)
if(NOT DEPEND_FOUND)
list(APPEND DEPS_NOT_FOUND ${dep})
else()
- string(TOUPPER "${dep}" DEPU)
- if( ${DEPU}_INCLUDE_DIR )
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIR})
+ if( ${dep}_INCLUDE_DIR )
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIR})
endif()
- if( ${DEPU}_INCLUDE_DIRS )
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIRS})
+ if( ${dep}_INCLUDE_DIRS )
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIRS})
endif()
- list(APPEND DEPS_LIBRARIES ${${DEPU}_LIBRARIES})
+ list(APPEND DEPS_LIBRARIES ${${dep}_LIBRARIES})
endif()
endforeach()

View File

@@ -26,6 +26,7 @@
#include <boost/beast/core/string.hpp>
#include <boost/filesystem.hpp>
#include <fstream>
#include <map>
#include <memory>
#include <mutex>

View File

@@ -3257,7 +3257,6 @@ operator==(aged_unordered_container<
{
if (size() != other.size())
return false;
using EqRng = std::pair<const_iterator, const_iterator>;
for (auto iter(cbegin()), last(cend()); iter != last;)
{
auto const& k(extract(*iter));

View File

@@ -509,6 +509,7 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw, Delegation::delegatable, ({
{sfVaultID, soeREQUIRED},
{sfAmount, soeREQUIRED, soeMPTSupported},
{sfDestination, soeOPTIONAL},
{sfDestinationTag, soeOPTIONAL},
}))
/** This transaction claws back tokens from a vault. */

View File

@@ -28,6 +28,7 @@
#include <cerrno>
#include <cstddef>
#include <fstream>
#include <ios>
#include <iterator>
#include <optional>
@@ -55,7 +56,7 @@ getFileContents(
return {};
}
ifstream fileStream(fullPath, std::ios::in);
std::ifstream fileStream(fullPath.string(), std::ios::in);
if (!fileStream)
{
@@ -85,7 +86,8 @@ writeFileContents(
using namespace boost::filesystem;
using namespace boost::system::errc;
ofstream fileStream(destPath, std::ios::out | std::ios::trunc);
std::ofstream fileStream(
destPath.string(), std::ios::out | std::ios::trunc);
if (!fileStream)
{

View File

@@ -107,8 +107,9 @@ sliceToHex(Slice const& slice)
}
for (int i = 0; i < slice.size(); ++i)
{
s += "0123456789ABCDEF"[((slice[i] & 0xf0) >> 4)];
s += "0123456789ABCDEF"[((slice[i] & 0x0f) >> 0)];
constexpr char hex[] = "0123456789ABCDEF";
s += hex[((slice[i] & 0xf0) >> 4)];
s += hex[((slice[i] & 0x0f) >> 0)];
}
return s;
}

View File

@@ -671,12 +671,12 @@ isMemoOkay(STObject const& st, std::string& reason)
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz");
for (char c : symbols)
for (unsigned char c : symbols)
a[c] = 1;
return a;
}();
for (auto c : *optData)
for (unsigned char c : *optData)
{
if (!allowedSymbols[c])
{

View File

@@ -544,7 +544,7 @@ b58_to_b256_be(std::string_view input, std::span<std::uint8_t> out)
XRPL_ASSERT(
num_b_58_10_coeffs <= b_58_10_coeff.size(),
"ripple::b58_fast::detail::b58_to_b256_be : maximum coeff");
for (auto c : input.substr(0, partial_coeff_len))
for (unsigned char c : input.substr(0, partial_coeff_len))
{
auto cur_val = ::ripple::alphabetReverse[c];
if (cur_val < 0)
@@ -558,7 +558,7 @@ b58_to_b256_be(std::string_view input, std::span<std::uint8_t> out)
{
for (int j = 0; j < num_full_coeffs; ++j)
{
auto c = input[partial_coeff_len + j * 10 + i];
unsigned char c = input[partial_coeff_len + j * 10 + i];
auto cur_val = ::ripple::alphabetReverse[c];
if (cur_val < 0)
{

View File

@@ -229,7 +229,6 @@ class RCLValidations_test : public beast::unit_test::suite
// support for a ledger hash which is already in the trie.
using Seq = RCLValidatedLedger::Seq;
using ID = RCLValidatedLedger::ID;
// Max known ancestors for each ledger
Seq const maxAncestors = 256;

View File

@@ -234,6 +234,28 @@ class Vault_test : public beast::unit_test::suite
env(tx, ter{tecNO_PERMISSION});
}
{
testcase(prefix + " fail to withdraw to zero destination");
auto tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(1000)});
tx[sfDestination] = "0";
env(tx, ter(temMALFORMED));
}
{
testcase(
prefix +
" fail to withdraw with tag but without destination");
auto tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(1000)});
tx[sfDestinationTag] = "0";
env(tx, ter(temMALFORMED));
}
if (!asset.raw().native())
{
testcase(
@@ -1335,6 +1357,7 @@ class Vault_test : public beast::unit_test::suite
struct CaseArgs
{
bool enableClawback = true;
bool requireAuth = true;
};
auto testCase = [this](
@@ -1356,16 +1379,20 @@ class Vault_test : public beast::unit_test::suite
Vault vault{env};
MPTTester mptt{env, issuer, mptInitNoFund};
auto const none = LedgerSpecificFlags(0);
mptt.create(
{.flags = tfMPTCanTransfer | tfMPTCanLock |
(args.enableClawback ? lsfMPTCanClawback
: LedgerSpecificFlags(0)) |
tfMPTRequireAuth});
(args.enableClawback ? tfMPTCanClawback : none) |
(args.requireAuth ? tfMPTRequireAuth : none)});
PrettyAsset asset = mptt.issuanceID();
mptt.authorize({.account = owner});
mptt.authorize({.account = issuer, .holder = owner});
mptt.authorize({.account = depositor});
mptt.authorize({.account = issuer, .holder = depositor});
if (args.requireAuth)
{
mptt.authorize({.account = issuer, .holder = owner});
mptt.authorize({.account = issuer, .holder = depositor});
}
env(pay(issuer, depositor, asset(1000)));
env.close();
@@ -1514,6 +1541,100 @@ class Vault_test : public beast::unit_test::suite
}
});
testCase(
[this](
Env& env,
Account const& issuer,
Account const& owner,
Account const& depositor,
PrettyAsset const& asset,
Vault& vault,
MPTTester& mptt) {
testcase(
"MPT 3rd party without MPToken cannot be withdrawal "
"destination");
auto [tx, keylet] =
vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
tx = vault.deposit(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
env(tx);
env.close();
{
// Set destination to 3rd party without MPToken
Account charlie{"charlie"};
env.fund(XRP(1000), charlie);
env.close();
tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
tx[sfDestination] = charlie.human();
env(tx, ter(tecNO_AUTH));
}
},
{.requireAuth = false});
testCase(
[this](
Env& env,
Account const& issuer,
Account const& owner,
Account const& depositor,
PrettyAsset const& asset,
Vault& vault,
MPTTester& mptt) {
testcase("MPT depositor without MPToken cannot withdraw");
auto [tx, keylet] =
vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
tx = vault.deposit(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(1000)});
env(tx);
env.close();
{
// Remove depositor's MPToken and withdraw will fail
mptt.authorize(
{.account = depositor, .flags = tfMPTUnauthorize});
env.close();
auto const mptoken =
env.le(keylet::mptoken(mptt.issuanceID(), depositor));
BEAST_EXPECT(mptoken == nullptr);
tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
env(tx, ter(tecNO_AUTH));
}
{
// Restore depositor's MPToken and withdraw will succeed
mptt.authorize({.account = depositor});
env.close();
tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
env(tx);
}
},
{.requireAuth = false});
testCase([this](
Env& env,
Account const& issuer,
@@ -1803,6 +1924,7 @@ class Vault_test : public beast::unit_test::suite
PrettyAsset const asset = issuer["IOU"];
env.trust(asset(1000), owner);
env.trust(asset(1000), charlie);
env(pay(issuer, owner, asset(200)));
env(rate(issuer, 1.25));
env.close();
@@ -2118,6 +2240,79 @@ class Vault_test : public beast::unit_test::suite
env.close();
});
testCase([&, this](
Env& env,
Account const& owner,
Account const& issuer,
Account const& charlie,
auto,
Vault& vault,
PrettyAsset const& asset,
auto&&...) {
testcase("IOU no trust line to 3rd party");
auto [tx, keylet] = vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
env(vault.deposit(
{.depositor = owner, .id = keylet.key, .amount = asset(100)}));
env.close();
Account const erin{"erin"};
env.fund(XRP(1000), erin);
env.close();
// Withdraw to 3rd party without trust line
auto const tx1 = [&](ripple::Keylet keylet) {
auto tx = vault.withdraw(
{.depositor = owner,
.id = keylet.key,
.amount = asset(10)});
tx[sfDestination] = erin.human();
return tx;
}(keylet);
env(tx1, ter{tecNO_LINE});
});
testCase([&, this](
Env& env,
Account const& owner,
Account const& issuer,
Account const& charlie,
auto,
Vault& vault,
PrettyAsset const& asset,
auto&&...) {
testcase("IOU no trust line to depositor");
auto [tx, keylet] = vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
// reset limit, so deposit of all funds will delete the trust line
env.trust(asset(0), owner);
env.close();
env(vault.deposit(
{.depositor = owner, .id = keylet.key, .amount = asset(200)}));
env.close();
auto trustline =
env.le(keylet::line(owner, asset.raw().get<Issue>()));
BEAST_EXPECT(trustline == nullptr);
// Withdraw without trust line, will succeed
auto const tx1 = [&](ripple::Keylet keylet) {
auto tx = vault.withdraw(
{.depositor = owner,
.id = keylet.key,
.amount = asset(10)});
return tx;
}(keylet);
env(tx1);
});
testCase([&, this](
Env& env,
Account const& owner,

View File

@@ -703,10 +703,6 @@ aged_associative_container_test_base::checkContentsRefRef(
Values const& v)
{
using Cont = typename std::remove_reference<C>::type;
using Traits = TestTraits<
Cont::is_unordered::value,
Cont::is_multi::value,
Cont::is_map::value>;
using size_type = typename Cont::size_type;
BEAST_EXPECT(c.size() == v.size());
@@ -761,10 +757,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testConstructEmpty()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Comp = typename Traits::Comp;
using Alloc = typename Traits::Alloc;
using MyComp = typename Traits::MyComp;
@@ -802,10 +794,6 @@ typename std::enable_if<IsUnordered>::type
aged_associative_container_test_base::testConstructEmpty()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Hash = typename Traits::Hash;
using Equal = typename Traits::Equal;
using Alloc = typename Traits::Alloc;
@@ -870,10 +858,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testConstructRange()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Comp = typename Traits::Comp;
using Alloc = typename Traits::Alloc;
using MyComp = typename Traits::MyComp;
@@ -925,10 +909,6 @@ typename std::enable_if<IsUnordered>::type
aged_associative_container_test_base::testConstructRange()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Hash = typename Traits::Hash;
using Equal = typename Traits::Equal;
using Alloc = typename Traits::Alloc;
@@ -996,14 +976,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testConstructInitList()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Comp = typename Traits::Comp;
using Alloc = typename Traits::Alloc;
using MyComp = typename Traits::MyComp;
using MyAlloc = typename Traits::MyAlloc;
typename Traits::ManualClock clock;
// testcase (Traits::name() + " init-list");
@@ -1020,16 +992,6 @@ typename std::enable_if<IsUnordered>::type
aged_associative_container_test_base::testConstructInitList()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Hash = typename Traits::Hash;
using Equal = typename Traits::Equal;
using Alloc = typename Traits::Alloc;
using MyHash = typename Traits::MyHash;
using MyEqual = typename Traits::MyEqual;
using MyAlloc = typename Traits::MyAlloc;
typename Traits::ManualClock clock;
// testcase (Traits::name() + " init-list");
@@ -1050,7 +1012,6 @@ void
aged_associative_container_test_base::testCopyMove()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Alloc = typename Traits::Alloc;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1121,8 +1082,6 @@ void
aged_associative_container_test_base::testIterator()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Alloc = typename Traits::Alloc;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1179,8 +1138,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testReverseIterator()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Alloc = typename Traits::Alloc;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1190,7 +1147,6 @@ aged_associative_container_test_base::testReverseIterator()
typename Traits::template Cont<> c{clock};
using iterator = decltype(c.begin());
using const_iterator = decltype(c.cbegin());
using reverse_iterator = decltype(c.rbegin());
using const_reverse_iterator = decltype(c.crbegin());
@@ -1394,7 +1350,6 @@ void
aged_associative_container_test_base::testChronological()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1760,7 +1715,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testCompare()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1832,8 +1786,6 @@ template <bool IsUnordered, bool IsMulti, bool IsMap>
void
aged_associative_container_test_base::testMaybeUnorderedMultiMap()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
testConstructEmpty<IsUnordered, IsMulti, IsMap>();
testConstructRange<IsUnordered, IsMulti, IsMap>();
testConstructInitList<IsUnordered, IsMulti, IsMap>();

View File

@@ -313,7 +313,6 @@ class LedgerTrie_test : public beast::unit_test::suite
testSupport()
{
using namespace csf;
using Seq = Ledger::Seq;
LedgerTrie<Ledger> t;
LedgerHistoryHelper h;
@@ -596,7 +595,6 @@ class LedgerTrie_test : public beast::unit_test::suite
testRootRelated()
{
using namespace csf;
using Seq = Ledger::Seq;
// Since the root is a special node that breaks the no-single child
// invariant, do some tests that exercise it.

View File

@@ -805,7 +805,6 @@ class Validations_test : public beast::unit_test::suite
Ledger ledgerACD = h["acd"];
using Seq = Ledger::Seq;
using ID = Ledger::ID;
auto pref = [](Ledger ledger) {
return std::make_pair(ledger.seq(), ledger.id());

View File

@@ -33,6 +33,7 @@
#include <boost/asio.hpp>
#include <boost/asio/ip/tcp.hpp>
#include <boost/asio/ssl/stream.hpp>
#include <boost/beast/core/flat_buffer.hpp>
#include <boost/beast/http.hpp>
#include <boost/beast/ssl.hpp>
#include <boost/beast/version.hpp>
@@ -220,9 +221,8 @@ public:
getList_ = [blob = blob, sig, manifest, version](int interval) {
// Build the contents of a version 1 format UNL file
std::stringstream l;
l << "{\"blob\":\"" << blob << "\""
<< ",\"signature\":\"" << sig << "\""
<< ",\"manifest\":\"" << manifest << "\""
l << "{\"blob\":\"" << blob << "\"" << ",\"signature\":\"" << sig
<< "\"" << ",\"manifest\":\"" << manifest << "\""
<< ",\"refresh_interval\": " << interval
<< ",\"version\":" << version << '}';
return l.str();
@@ -257,15 +257,14 @@ public:
std::stringstream l;
for (auto const& info : blobInfo)
{
l << "{\"blob\":\"" << info.blob << "\""
<< ",\"signature\":\"" << info.signature << "\"},";
l << "{\"blob\":\"" << info.blob << "\"" << ",\"signature\":\""
<< info.signature << "\"},";
}
std::string blobs = l.str();
blobs.pop_back();
l.str(std::string());
l << "{\"blobs_v2\": [ " << blobs << "],\"manifest\":\"" << manifest
<< "\""
<< ",\"refresh_interval\": " << interval
<< "\"" << ",\"refresh_interval\": " << interval
<< ",\"version\":" << (version + 1) << '}';
return l.str();
};

View File

@@ -681,7 +681,7 @@ class ServerStatus_test : public beast::unit_test::suite,
resp["Upgrade"] == "websocket");
BEAST_EXPECT(
resp.find("Connection") != resp.end() &&
resp["Connection"] == "upgrade");
resp["Connection"] == "Upgrade");
}
void

View File

@@ -26,6 +26,8 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#include <boost/filesystem.hpp>
#include <fstream>
namespace ripple {
namespace test {
namespace detail {

View File

@@ -79,7 +79,7 @@
#include <chrono>
#include <condition_variable>
#include <cstring>
#include <iostream>
#include <fstream>
#include <limits>
#include <mutex>
#include <optional>

View File

@@ -39,7 +39,7 @@
#include <google/protobuf/stubs/common.h>
#include <cstdlib>
#include <iostream>
#include <fstream>
#include <stdexcept>
#include <utility>

View File

@@ -315,14 +315,14 @@ escrowCreatePreclaimHelper<MPTIssue>(
// authorized
auto const& mptIssue = amount.get<MPTIssue>();
if (auto const ter =
requireAuth(ctx.view, mptIssue, account, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, account, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;
// If the issuer has requireAuth set, check if the destination is
// authorized
if (auto const ter =
requireAuth(ctx.view, mptIssue, dest, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, dest, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;
@@ -746,7 +746,7 @@ escrowFinishPreclaimHelper<MPTIssue>(
// authorized
auto const& mptIssue = amount.get<MPTIssue>();
if (auto const ter =
requireAuth(ctx.view, mptIssue, dest, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, dest, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;
@@ -1259,7 +1259,7 @@ escrowCancelPreclaimHelper<MPTIssue>(
// authorized
auto const& mptIssue = amount.get<MPTIssue>();
if (auto const ter =
requireAuth(ctx.view, mptIssue, account, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, account, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;

View File

@@ -52,9 +52,19 @@ VaultWithdraw::preflight(PreflightContext const& ctx)
return temBAD_AMOUNT;
if (auto const destination = ctx.tx[~sfDestination];
destination && *destination == beast::zero)
destination.has_value())
{
JLOG(ctx.j.debug()) << "VaultWithdraw: zero/empty destination account.";
if (*destination == beast::zero)
{
JLOG(ctx.j.debug())
<< "VaultWithdraw: zero/empty destination account.";
return temMALFORMED;
}
}
else if (ctx.tx.isFieldPresent(sfDestinationTag))
{
JLOG(ctx.j.debug()) << "VaultWithdraw: sfDestinationTag is set but "
"sfDestination is not";
return temMALFORMED;
}
@@ -123,33 +133,39 @@ VaultWithdraw::preclaim(PreclaimContext const& ctx)
// Withdrawal to a 3rd party destination account is essentially a transfer,
// via shares in the vault. Enforce all the usual asset transfer checks.
AuthType authType = AuthType::Legacy;
if (account != dstAcct)
{
auto const sleDst = ctx.view.read(keylet::account(dstAcct));
if (sleDst == nullptr)
return tecNO_DST;
if (sleDst->getFlags() & lsfRequireDestTag)
if (sleDst->isFlag(lsfRequireDestTag) &&
!ctx.tx.isFieldPresent(sfDestinationTag))
return tecDST_TAG_NEEDED; // Cannot send without a tag
if (sleDst->getFlags() & lsfDepositAuth)
if (sleDst->isFlag(lsfDepositAuth))
{
if (!ctx.view.exists(keylet::depositPreauth(dstAcct, account)))
return tecNO_PERMISSION;
}
// The destination account must have consented to receive the asset by
// creating a RippleState or MPToken
authType = AuthType::StrongAuth;
}
// Destination MPToken must exist (if asset is an MPT)
if (auto const ter = requireAuth(ctx.view, vaultAsset, dstAcct);
// Destination MPToken (for an MPT) or trust line (for an IOU) must exist
// if not sending to Account.
if (auto const ter = requireAuth(ctx.view, vaultAsset, dstAcct, authType);
!isTesSuccess(ter))
return ter;
// Cannot withdraw from a Vault an Asset frozen for the destination account
if (isFrozen(ctx.view, dstAcct, vaultAsset))
return vaultAsset.holds<Issue>() ? tecFROZEN : tecLOCKED;
if (auto const ret = checkFrozen(ctx.view, dstAcct, vaultAsset))
return ret;
if (isFrozen(ctx.view, account, vaultShare))
return tecLOCKED;
if (auto const ret = checkFrozen(ctx.view, account, vaultShare))
return ret;
return tesSUCCESS;
}

View File

@@ -175,6 +175,29 @@ isFrozen(
asset.value());
}
[[nodiscard]] inline TER
checkFrozen(ReadView const& view, AccountID const& account, Issue const& issue)
{
return isFrozen(view, account, issue) ? (TER)tecFROZEN : (TER)tesSUCCESS;
}
[[nodiscard]] inline TER
checkFrozen(
ReadView const& view,
AccountID const& account,
MPTIssue const& mptIssue)
{
return isFrozen(view, account, mptIssue) ? (TER)tecLOCKED : (TER)tesSUCCESS;
}
[[nodiscard]] inline TER
checkFrozen(ReadView const& view, AccountID const& account, Asset const& asset)
{
return std::visit(
[&](auto const& issue) { return checkFrozen(view, account, issue); },
asset.value());
}
[[nodiscard]] bool
isAnyFrozen(
ReadView const& view,
@@ -725,19 +748,40 @@ transferXRP(
STAmount const& amount,
beast::Journal j);
/* Check if MPToken exists:
* - StrongAuth - before checking lsfMPTRequireAuth is set
* - WeakAuth - after checking if lsfMPTRequireAuth is set
/* Check if MPToken (for MPT) or trust line (for IOU) exists:
* - StrongAuth - before checking if authorization is required
* - WeakAuth
* for MPT - after checking lsfMPTRequireAuth flag
* for IOU - do not check if trust line exists
* - Legacy
* for MPT - before checking lsfMPTRequireAuth flag i.e. same as StrongAuth
* for IOU - do not check if trust line exists i.e. same as WeakAuth
*/
enum class MPTAuthType : bool { StrongAuth = true, WeakAuth = false };
enum class AuthType { StrongAuth, WeakAuth, Legacy };
/** Check if the account lacks required authorization.
*
* Return tecNO_AUTH or tecNO_LINE if it does
* and tesSUCCESS otherwise.
* Return tecNO_AUTH or tecNO_LINE if it does
* and tesSUCCESS otherwise.
*
* If StrongAuth then return tecNO_LINE if the RippleState doesn't exist. Return
* tecNO_AUTH if lsfRequireAuth is set on the issuer's AccountRoot, and the
* RippleState does exist, and the RippleState is not authorized.
*
* If WeakAuth then return tecNO_AUTH if lsfRequireAuth is set, and the
* RippleState exists, and is not authorized. Return tecNO_LINE if
* lsfRequireAuth is set and the RippleState doesn't exist. Consequently, if
* WeakAuth and lsfRequireAuth is *not* set, this function will return
* tesSUCCESS even if RippleState does *not* exist.
*
* The default "Legacy" auth type is equivalent to WeakAuth.
*/
[[nodiscard]] TER
requireAuth(ReadView const& view, Issue const& issue, AccountID const& account);
requireAuth(
ReadView const& view,
Issue const& issue,
AccountID const& account,
AuthType authType = AuthType::Legacy);
/** Check if the account lacks required authorization.
*
@@ -751,32 +795,33 @@ requireAuth(ReadView const& view, Issue const& issue, AccountID const& account);
* purely defensive, as we currently do not allow such vaults to be created.
*
* If StrongAuth then return tecNO_AUTH if MPToken doesn't exist or
* lsfMPTRequireAuth is set and MPToken is not authorized. If WeakAuth then
* return tecNO_AUTH if lsfMPTRequireAuth is set and MPToken doesn't exist or is
* not authorized (explicitly or via credentials, if DomainID is set in
* MPTokenIssuance). Consequently, if WeakAuth and lsfMPTRequireAuth is *not*
* set, this function will return true even if MPToken does *not* exist.
* lsfMPTRequireAuth is set and MPToken is not authorized.
*
* If WeakAuth then return tecNO_AUTH if lsfMPTRequireAuth is set and MPToken
* doesn't exist or is not authorized (explicitly or via credentials, if
* DomainID is set in MPTokenIssuance). Consequently, if WeakAuth and
* lsfMPTRequireAuth is *not* set, this function will return true even if
* MPToken does *not* exist.
*
* The default "Legacy" auth type is equivalent to StrongAuth.
*/
[[nodiscard]] TER
requireAuth(
ReadView const& view,
MPTIssue const& mptIssue,
AccountID const& account,
MPTAuthType authType = MPTAuthType::StrongAuth,
AuthType authType = AuthType::Legacy,
int depth = 0);
[[nodiscard]] TER inline requireAuth(
ReadView const& view,
Asset const& asset,
AccountID const& account,
MPTAuthType authType = MPTAuthType::StrongAuth)
AuthType authType = AuthType::Legacy)
{
return std::visit(
[&]<ValidIssueType TIss>(TIss const& issue_) {
if constexpr (std::is_same_v<TIss, Issue>)
return requireAuth(view, issue_, account);
else
return requireAuth(view, issue_, account, authType);
return requireAuth(view, issue_, account, authType);
},
asset.value());
}

View File

@@ -505,8 +505,8 @@ accountHolds(
if (zeroIfUnauthorized == ahZERO_IF_UNAUTHORIZED &&
view.rules().enabled(featureSingleAssetVault))
{
if (auto const err = requireAuth(
view, mptIssue, account, MPTAuthType::StrongAuth);
if (auto const err =
requireAuth(view, mptIssue, account, AuthType::StrongAuth);
!isTesSuccess(err))
amount.clear(mptIssue);
}
@@ -2298,15 +2298,27 @@ transferXRP(
}
TER
requireAuth(ReadView const& view, Issue const& issue, AccountID const& account)
requireAuth(
ReadView const& view,
Issue const& issue,
AccountID const& account,
AuthType authType)
{
if (isXRP(issue) || issue.account == account)
return tesSUCCESS;
auto const trustLine =
view.read(keylet::line(account, issue.account, issue.currency));
// If account has no line, and this is a strong check, fail
if (!trustLine && authType == AuthType::StrongAuth)
return tecNO_LINE;
// If this is a weak or legacy check, or if the account has a line, fail if
// auth is required and not set on the line
if (auto const issuerAccount = view.read(keylet::account(issue.account));
issuerAccount && (*issuerAccount)[sfFlags] & lsfRequireAuth)
{
if (auto const trustLine =
view.read(keylet::line(account, issue.account, issue.currency)))
if (trustLine)
return ((*trustLine)[sfFlags] &
((account > issue.account) ? lsfLowAuth : lsfHighAuth))
? tesSUCCESS
@@ -2322,7 +2334,7 @@ requireAuth(
ReadView const& view,
MPTIssue const& mptIssue,
AccountID const& account,
MPTAuthType authType,
AuthType authType,
int depth)
{
auto const mptID = keylet::mptIssuance(mptIssue.getMptID());
@@ -2357,7 +2369,7 @@ requireAuth(
if (auto const err = std::visit(
[&]<ValidIssueType TIss>(TIss const& issue) {
if constexpr (std::is_same_v<TIss, Issue>)
return requireAuth(view, issue, account);
return requireAuth(view, issue, account, authType);
else
return requireAuth(
view, issue, account, authType, depth + 1);
@@ -2372,7 +2384,8 @@ requireAuth(
auto const sleToken = view.read(mptokenID);
// if account has no MPToken, fail
if (!sleToken && authType == MPTAuthType::StrongAuth)
if (!sleToken &&
(authType == AuthType::StrongAuth || authType == AuthType::Legacy))
return tecNO_AUTH;
// Note, this check is not amendment-gated because DomainID will be always

View File

@@ -44,7 +44,6 @@ doLogLevel(RPC::JsonContext& context)
Logs::toString(Logs::fromSeverity(context.app.logs().threshold()));
std::vector<std::pair<std::string, std::string>> logTable(
context.app.logs().partition_severities());
using stringPair = std::map<std::string, std::string>::value_type;
for (auto const& [k, v] : logTable)
lev[k] = v;