mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-30 16:05:51 +00:00
Compare commits
58 Commits
release-3.
...
ximinez/mv
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de7192d889 | ||
|
|
58551da698 | ||
|
|
8a77ac71ed | ||
|
|
a72c237373 | ||
|
|
faae2514b9 | ||
|
|
a14cbf117c | ||
|
|
5ce07e769f | ||
|
|
08a5e8428a | ||
|
|
64959b980f | ||
|
|
0b833e17ae | ||
|
|
0475398a17 | ||
|
|
bf483a2e94 | ||
|
|
4169b0a6b7 | ||
|
|
024339c99b | ||
|
|
f1f1117b08 | ||
|
|
5bb12ac1db | ||
|
|
4d0f895cda | ||
|
|
48162219ac | ||
|
|
18ffbbd42d | ||
|
|
ae25e0f058 | ||
|
|
7d634c1e62 | ||
|
|
e434563e2e | ||
|
|
29300fc972 | ||
|
|
a88aceb846 | ||
|
|
2d570267de | ||
|
|
97849b6e70 | ||
|
|
b447fc54c8 | ||
|
|
9920037d13 | ||
|
|
87e0c25a34 | ||
|
|
a648249b90 | ||
|
|
e9033898da | ||
|
|
d952e9de1a | ||
|
|
241f22bab7 | ||
|
|
a387a7aadf | ||
|
|
d5e137de82 | ||
|
|
4639c1c351 | ||
|
|
c863bd25e0 | ||
|
|
924b05ea9f | ||
|
|
a0a0916108 | ||
|
|
b07087f10b | ||
|
|
f89c88dbb8 | ||
|
|
a66ef800bc | ||
|
|
5f5f60a024 | ||
|
|
1d90ae7c25 | ||
|
|
39db7381b2 | ||
|
|
c3fa4fb71c | ||
|
|
a8d0d763b0 | ||
|
|
1b8da176bf | ||
|
|
66dd0de019 | ||
|
|
c73372297c | ||
|
|
1f331bf8d8 | ||
|
|
30becab0b6 | ||
|
|
e037ae3d3a | ||
|
|
72f33d8f3b | ||
|
|
96f2a65f64 | ||
|
|
43fe49e756 | ||
|
|
937b67cbc0 | ||
|
|
4e50087612 |
@@ -33,6 +33,5 @@ slack_app: false
|
||||
|
||||
ignore:
|
||||
- "src/test/"
|
||||
- "src/tests/"
|
||||
- "include/xrpl/beast/test/"
|
||||
- "include/xrpl/beast/unit_test/"
|
||||
|
||||
57
.github/actions/build-deps/action.yml
vendored
57
.github/actions/build-deps/action.yml
vendored
@@ -1,5 +1,7 @@
|
||||
# This action installs and optionally uploads Conan dependencies to a remote
|
||||
# repository. The dependencies will only be uploaded if the credentials are
|
||||
# provided.
|
||||
name: Build Conan dependencies
|
||||
description: "Install Conan dependencies, optionally forcing a rebuild of all dependencies."
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
@@ -10,40 +12,51 @@ inputs:
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
build_nproc:
|
||||
description: "The number of processors to use for building."
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
default: ""
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
default: ""
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
log_verbosity:
|
||||
description: "The logging verbosity."
|
||||
force_upload:
|
||||
description: 'Force uploading of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "verbose"
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
BUILD_NPROC: ${{ inputs.build_nproc }}
|
||||
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build="${BUILD_OPTION}" \
|
||||
--options:host='&:tests=True' \
|
||||
--options:host='&:xrpld=True' \
|
||||
--settings:all build_type="${BUILD_TYPE}" \
|
||||
--conf:all tools.build:jobs=${BUILD_NPROC} \
|
||||
--conf:all tools.build:verbosity="${LOG_VERBOSITY}" \
|
||||
--conf:all tools.compilation:verbosity="${LOG_VERBOSITY}" \
|
||||
..
|
||||
--build ${{ inputs.force_build == 'true' && '"*"' || 'missing' }} \
|
||||
--options:host '&:tests=True' \
|
||||
--options:host '&:xrpld=True' \
|
||||
--settings:all build_type=${{ inputs.build_type }} \
|
||||
--format=json ..
|
||||
- name: Upload Conan dependencies
|
||||
if: ${{ inputs.conan_remote_username != '' && inputs.conan_remote_password != '' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo "Logging into Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote login ${{ inputs.conan_remote_name }} "${{ inputs.conan_remote_username }}" --password "${{ inputs.conan_remote_password }}"
|
||||
echo 'Uploading dependencies.'
|
||||
conan upload '*' --confirm --check ${{ inputs.force_upload == 'true' && '--force' || '' }} --remote=${{ inputs.conan_remote_name }}
|
||||
|
||||
95
.github/actions/build-test/action.yml
vendored
Normal file
95
.github/actions/build-test/action.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# This action build and tests the binary. The Conan dependencies must have
|
||||
# already been installed (see the build-deps action).
|
||||
name: Build and Test
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
default: ""
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Configuring CMake.'
|
||||
cmake \
|
||||
-G '${{ inputs.os == 'windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
|
||||
${{ inputs.cmake_args }} \
|
||||
..
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Building binary.'
|
||||
cmake \
|
||||
--build . \
|
||||
--config ${{ inputs.build_type }} \
|
||||
--parallel $(nproc) \
|
||||
--target ${{ inputs.cmake_target }}
|
||||
- name: Check linking
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Checking linking.'
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: Verify voidstar
|
||||
if: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Verifying presence of instrumentation.'
|
||||
./rippled --version | grep libvoidstar
|
||||
- name: Test the binary
|
||||
if: ${{ inputs.build_only == 'false' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'windows' && inputs.build_type || '' }}
|
||||
run: |
|
||||
echo 'Testing binary.'
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
- name: Upload coverage report
|
||||
if: ${{ inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ inputs.codecov_token }}
|
||||
verbose: true
|
||||
43
.github/actions/print-env/action.yml
vendored
43
.github/actions/print-env/action.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Print build environment
|
||||
description: "Print environment and some tooling versions"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
- name: Check configuration (Linux and macOS)
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
46
.github/actions/setup-conan/action.yml
vendored
46
.github/actions/setup-conan/action.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: Setup Conan
|
||||
description: "Set up Conan configuration, profile, and remote."
|
||||
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: false
|
||||
default: xrplf
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: false
|
||||
default: https://conan.ripplex.io
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Set up Conan configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing configuration.'
|
||||
cat conan/global.conf ${{ runner.os == 'Linux' && '>>' || '>' }} $(conan config home)/global.conf
|
||||
|
||||
echo 'Conan configuration:'
|
||||
conan config show '*'
|
||||
|
||||
- name: Set up Conan profile
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing profile.'
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
|
||||
echo 'Conan profile:'
|
||||
conan profile show
|
||||
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
|
||||
run: |
|
||||
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
|
||||
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
6
.github/scripts/levelization/README.md
vendored
6
.github/scripts/levelization/README.md
vendored
@@ -72,15 +72,15 @@ It generates many files of [results](results):
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
|
||||
- [`levelization.yml`](../../workflows/check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
|
||||
@@ -7,6 +7,9 @@ Loop: test.jtx test.unit_test
|
||||
Loop: xrpld.app xrpld.core
|
||||
xrpld.app > xrpld.core
|
||||
|
||||
Loop: xrpld.app xrpld.ledger
|
||||
xrpld.app > xrpld.ledger
|
||||
|
||||
Loop: xrpld.app xrpld.overlay
|
||||
xrpld.overlay > xrpld.app
|
||||
|
||||
|
||||
@@ -2,10 +2,6 @@ libxrpl.basics > xrpl.basics
|
||||
libxrpl.crypto > xrpl.basics
|
||||
libxrpl.json > xrpl.basics
|
||||
libxrpl.json > xrpl.json
|
||||
libxrpl.ledger > xrpl.basics
|
||||
libxrpl.ledger > xrpl.json
|
||||
libxrpl.ledger > xrpl.ledger
|
||||
libxrpl.ledger > xrpl.protocol
|
||||
libxrpl.net > xrpl.basics
|
||||
libxrpl.net > xrpl.net
|
||||
libxrpl.protocol > xrpl.basics
|
||||
@@ -25,11 +21,11 @@ test.app > test.unit_test
|
||||
test.app > xrpl.basics
|
||||
test.app > xrpld.app
|
||||
test.app > xrpld.core
|
||||
test.app > xrpld.ledger
|
||||
test.app > xrpld.nodestore
|
||||
test.app > xrpld.overlay
|
||||
test.app > xrpld.rpc
|
||||
test.app > xrpl.json
|
||||
test.app > xrpl.ledger
|
||||
test.app > xrpl.protocol
|
||||
test.app > xrpl.resource
|
||||
test.basics > test.jtx
|
||||
@@ -48,8 +44,8 @@ test.consensus > test.unit_test
|
||||
test.consensus > xrpl.basics
|
||||
test.consensus > xrpld.app
|
||||
test.consensus > xrpld.consensus
|
||||
test.consensus > xrpld.ledger
|
||||
test.consensus > xrpl.json
|
||||
test.consensus > xrpl.ledger
|
||||
test.core > test.jtx
|
||||
test.core > test.toplevel
|
||||
test.core > test.unit_test
|
||||
@@ -67,9 +63,9 @@ test.json > xrpl.json
|
||||
test.jtx > xrpl.basics
|
||||
test.jtx > xrpld.app
|
||||
test.jtx > xrpld.core
|
||||
test.jtx > xrpld.ledger
|
||||
test.jtx > xrpld.rpc
|
||||
test.jtx > xrpl.json
|
||||
test.jtx > xrpl.ledger
|
||||
test.jtx > xrpl.net
|
||||
test.jtx > xrpl.protocol
|
||||
test.jtx > xrpl.resource
|
||||
@@ -79,7 +75,7 @@ test.ledger > test.toplevel
|
||||
test.ledger > xrpl.basics
|
||||
test.ledger > xrpld.app
|
||||
test.ledger > xrpld.core
|
||||
test.ledger > xrpl.ledger
|
||||
test.ledger > xrpld.ledger
|
||||
test.ledger > xrpl.protocol
|
||||
test.nodestore > test.jtx
|
||||
test.nodestore > test.toplevel
|
||||
@@ -138,11 +134,7 @@ test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
tests.libxrpl > xrpl.json
|
||||
tests.libxrpl > xrpl.net
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.ledger > xrpl.basics
|
||||
xrpl.ledger > xrpl.protocol
|
||||
xrpl.net > xrpl.basics
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
@@ -159,7 +151,6 @@ xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.nodestore
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.ledger
|
||||
xrpld.app > xrpl.net
|
||||
xrpld.app > xrpl.protocol
|
||||
xrpld.app > xrpl.resource
|
||||
@@ -172,6 +163,9 @@ xrpld.core > xrpl.basics
|
||||
xrpld.core > xrpl.json
|
||||
xrpld.core > xrpl.net
|
||||
xrpld.core > xrpl.protocol
|
||||
xrpld.ledger > xrpl.basics
|
||||
xrpld.ledger > xrpl.json
|
||||
xrpld.ledger > xrpl.protocol
|
||||
xrpld.nodestore > xrpl.basics
|
||||
xrpld.nodestore > xrpld.core
|
||||
xrpld.nodestore > xrpld.unity
|
||||
@@ -192,9 +186,9 @@ xrpld.perflog > xrpl.basics
|
||||
xrpld.perflog > xrpl.json
|
||||
xrpld.rpc > xrpl.basics
|
||||
xrpld.rpc > xrpld.core
|
||||
xrpld.rpc > xrpld.ledger
|
||||
xrpld.rpc > xrpld.nodestore
|
||||
xrpld.rpc > xrpl.json
|
||||
xrpld.rpc > xrpl.ledger
|
||||
xrpld.rpc > xrpl.net
|
||||
xrpld.rpc > xrpl.protocol
|
||||
xrpld.rpc > xrpl.resource
|
||||
|
||||
59
.github/scripts/strategy-matrix/generate.py
vendored
Executable file → Normal file
59
.github/scripts/strategy-matrix/generate.py
vendored
Executable file → Normal file
@@ -2,17 +2,7 @@
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass
|
||||
|
||||
THIS_DIR = Path(__file__).parent.resolve()
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
architecture: list[dict]
|
||||
os: list[dict]
|
||||
build_type: list[str]
|
||||
cmake_args: list[str]
|
||||
import re
|
||||
|
||||
'''
|
||||
Generate a strategy matrix for GitHub Actions CI.
|
||||
@@ -28,9 +18,9 @@ We will further set additional CMake arguments as follows:
|
||||
- Certain Debian Bookworm configurations will change the reference fee, enable
|
||||
codecov, and enable voidstar in PRs.
|
||||
'''
|
||||
def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
def generate_strategy_matrix(all: bool, architecture: list[dict], os: list[dict], build_type: list[str], cmake_args: list[str]) -> dict:
|
||||
configurations = []
|
||||
for architecture, os, build_type, cmake_args in itertools.product(config.architecture, config.os, config.build_type, config.cmake_args):
|
||||
for architecture, os, build_type, cmake_args in itertools.product(architecture, os, build_type, cmake_args):
|
||||
# The default CMake target is 'all' for Linux and MacOS and 'install'
|
||||
# for Windows, but it can get overridden for certain configurations.
|
||||
cmake_target = 'install' if os["distro_name"] == 'windows' else 'all'
|
||||
@@ -45,7 +35,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
# Only generate a subset of configurations in PRs.
|
||||
if not all:
|
||||
# Debian:
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/arm64, set
|
||||
# the reference fee to 500.
|
||||
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||
# code coverage (which will be done below).
|
||||
@@ -57,7 +47,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
if os['distro_name'] == 'debian':
|
||||
skip = True
|
||||
if os['distro_version'] == 'bookworm':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
@@ -74,14 +64,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
# - 9.4 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 9.6 using Clang: Release and no Unity on linux/amd64.
|
||||
if os['distro_name'] == 'rhel':
|
||||
skip = True
|
||||
if os['distro_version'] == '9':
|
||||
if os['distro_version'] == '9.4':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
elif os['distro_version'] == '10':
|
||||
elif os['distro_version'] == '9.6':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
@@ -138,6 +128,8 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
# Unity on linux/amd64
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}'
|
||||
cmake_target = 'coverage'
|
||||
build_only = True
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
@@ -160,36 +152,27 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': build_only,
|
||||
'build_only': 'true' if build_only else 'false',
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
})
|
||||
|
||||
return configurations
|
||||
|
||||
|
||||
def read_config(file: Path) -> Config:
|
||||
config = json.loads(file.read_text())
|
||||
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
|
||||
raise Exception('Invalid configuration file.')
|
||||
|
||||
return Config(**config)
|
||||
return {'include': configurations}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-a', '--all', help='Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).', action="store_true")
|
||||
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=False, type=Path)
|
||||
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=True, type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
matrix = []
|
||||
if args.config is None or args.config == '':
|
||||
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "linux.json"))
|
||||
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "macos.json"))
|
||||
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "windows.json"))
|
||||
else:
|
||||
matrix += generate_strategy_matrix(args.all, read_config(args.config))
|
||||
# Load the JSON configuration file.
|
||||
config = None
|
||||
with open(args.config, 'r') as f:
|
||||
config = json.load(f)
|
||||
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
|
||||
raise Exception('Invalid configuration file.')
|
||||
|
||||
# Generate the strategy matrix.
|
||||
print(f'matrix={json.dumps({"include": matrix})}')
|
||||
print(f'matrix={json.dumps(generate_strategy_matrix(args.all, config['architecture'], config['os'], config['build_type'], config['cmake_args']))}')
|
||||
|
||||
122
.github/scripts/strategy-matrix/linux.json
vendored
122
.github/scripts/strategy-matrix/linux.json
vendored
@@ -14,169 +14,139 @@
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "15"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "19"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "20"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "97ba375"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "97ba375"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "97ba375"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "97ba375"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "97ba375"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "97ba375"
|
||||
"compiler_version": "19"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
3
.github/scripts/strategy-matrix/macos.json
vendored
3
.github/scripts/strategy-matrix/macos.json
vendored
@@ -10,8 +10,7 @@
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
5
.github/scripts/strategy-matrix/windows.json
vendored
5
.github/scripts/strategy-matrix/windows.json
vendored
@@ -2,7 +2,7 @@
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "windows/amd64",
|
||||
"runner": ["self-hosted", "Windows", "devbox"]
|
||||
"runner": ["windows-latest"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
@@ -10,8 +10,7 @@
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
201
.github/workflows/build-test.yml
vendored
Normal file
201
.github/workflows/build-test.yml
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
type: string
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
run: python generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} --config=${{ inputs.os }}.json >> "${GITHUB_OUTPUT}"
|
||||
outputs:
|
||||
matrix: ${{ steps.generate.outputs.matrix }}
|
||||
|
||||
# Build and test the binary.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ inputs.os == 'linux' && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
steps:
|
||||
- name: Check strategy matrix
|
||||
run: |
|
||||
echo 'Operating system distro name: ${{ matrix.os.distro_name }}'
|
||||
echo 'Operating system distro version: ${{ matrix.os.distro_version }}'
|
||||
echo 'Operating system compiler name: ${{ matrix.os.compiler_name }}'
|
||||
echo 'Operating system compiler version: ${{ matrix.os.compiler_version }}'
|
||||
echo 'Architecture platform: ${{ matrix.architecture.platform }}'
|
||||
echo 'Architecture runner: ${{ toJson(matrix.architecture.runner) }}'
|
||||
echo 'Build type: ${{ matrix.build_type }}'
|
||||
echo 'Build only: ${{ matrix.build_only }}'
|
||||
echo 'CMake arguments: ${{ matrix.cmake_args }}'
|
||||
echo 'CMake target: ${{ matrix.cmake_target }}'
|
||||
echo 'Config name: ${{ matrix.config_name }}'
|
||||
|
||||
- name: Clean workspace (MacOS)
|
||||
if: ${{ inputs.os == 'macos' }}
|
||||
run: |
|
||||
WORKSPACE=${{ github.workspace }}
|
||||
echo "Cleaning workspace '${WORKSPACE}'."
|
||||
if [ -z "${WORKSPACE}" ] || [ "${WORKSPACE}" = "/" ]; then
|
||||
echo "Invalid working directory '${WORKSPACE}'."
|
||||
exit 1
|
||||
fi
|
||||
find "${WORKSPACE}" -depth 1 | xargs rm -rfv
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
- name: Check configuration (Linux and MacOS)
|
||||
if: ${{ inputs.os == 'linux' || inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ inputs.os == 'linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
|
||||
- name: Set up Conan configuration
|
||||
run: |
|
||||
echo 'Installing configuration.'
|
||||
cat conan/global.conf ${{ inputs.os == 'linux' && '>>' || '>' }} $(conan config home)/global.conf
|
||||
|
||||
echo 'Conan configuration:'
|
||||
conan config show '*'
|
||||
- name: Set up Conan profile
|
||||
run: |
|
||||
echo 'Installing profile.'
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
|
||||
echo 'Conan profile:'
|
||||
conan profile show
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
force_build: ${{ inputs.dependencies_force_build }}
|
||||
force_upload: ${{ inputs.dependencies_force_upload }}
|
||||
- name: Build and test binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
codecov_token: ${{ secrets.codecov_token }}
|
||||
os: ${{ inputs.os }}
|
||||
75
.github/workflows/check-format.yml
vendored
Normal file
75
.github/workflows/check-format.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# This workflow checks if the code is properly formatted.
|
||||
name: Check format
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-format
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-pre-commit
|
||||
steps:
|
||||
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
|
||||
# same directory for jobs running in containers. The actions/checkout step
|
||||
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
|
||||
# safe.directory (see instructions at https://github.com/actions/checkout)
|
||||
# but that is apparently not happening for some container images. We
|
||||
# therefore preemptively add both directories to safe.directory. See also
|
||||
# https://github.com/actions/runner/issues/2058 for more details.
|
||||
- name: Configure git safe.directory
|
||||
run: |
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory ${{ github.workspace }}
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking pre-commit version.'
|
||||
pre-commit --version
|
||||
|
||||
echo 'Checking clang-format version.'
|
||||
clang-format --version
|
||||
|
||||
echo 'Checking NPM version.'
|
||||
npm --version
|
||||
|
||||
echo 'Checking Node.js version.'
|
||||
node --version
|
||||
|
||||
echo 'Checking prettier version.'
|
||||
prettier --version
|
||||
- name: Format code
|
||||
run: pre-commit run --show-diff-on-failure --color=always --all-files
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
One or more files did not conform to the formatting. Maybe you did
|
||||
not run 'pre-commit' before committing, or your version of
|
||||
'clang-format' or 'prettier' has an incompatibility with the ones
|
||||
used here (see the "Check configuration" step above).
|
||||
|
||||
Run 'pre-commit run --all-files' in your repo, and then commit and
|
||||
push the changes.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the files that changed to give the contributor a hint about
|
||||
# what to expect when running pre-commit on their own machine.
|
||||
git status
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
@@ -9,14 +9,12 @@ on:
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: xrplf
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: https://conan.ripplex.io
|
||||
secrets:
|
||||
clio_notify_token:
|
||||
description: "The GitHub token to notify Clio about new versions."
|
||||
@@ -40,52 +38,43 @@ jobs:
|
||||
upload:
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Calculate conan reference
|
||||
id: conan_ref
|
||||
- name: Add Conan remote
|
||||
run: |
|
||||
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
- name: Log into Conan remote
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}
|
||||
outputs:
|
||||
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||
channel: ${{ steps.generate.outputs.channel }}
|
||||
version: ${{ steps.generate.outputs.version }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
|
||||
-F "client_payload[pr_url]=${PR_URL}"
|
||||
-F "client_payload[version]=${{ needs.upload.outputs.version }}@${{ needs.upload.outputs.user }}/${{ needs.upload.outputs.channel }}" \
|
||||
-F "client_payload[pr]=${{ github.event.pull_request.number }}"
|
||||
65
.github/workflows/on-pr.yml
vendored
65
.github/workflows/on-pr.yml
vendored
@@ -23,6 +23,10 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
|
||||
jobs:
|
||||
# This job determines whether the rest of the workflow should run. It runs
|
||||
# when the PR is not a draft (which should also cover merge-group) or
|
||||
@@ -50,20 +54,18 @@ jobs:
|
||||
files: |
|
||||
# These paths are unique to `on-pr.yml`.
|
||||
.github/scripts/levelization/**
|
||||
.github/workflows/reusable-check-levelization.yml
|
||||
.github/workflows/reusable-notify-clio.yml
|
||||
.github/workflows/check-format.yml
|
||||
.github/workflows/check-levelization.yml
|
||||
.github/workflows/notify-clio.yml
|
||||
.github/workflows/on-pr.yml
|
||||
.clang-format
|
||||
.pre-commit-config.yaml
|
||||
|
||||
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||
.github/actions/build-deps/**
|
||||
.github/actions/build-test/**
|
||||
.github/actions/setup-conan/**
|
||||
.github/scripts/strategy-matrix/**
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.github/workflows/build-test.yml
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
@@ -73,7 +75,6 @@ jobs:
|
||||
tests/**
|
||||
CMakeLists.txt
|
||||
conanfile.py
|
||||
conan.lock
|
||||
- name: Check whether to run
|
||||
# This step determines whether the rest of the workflow should
|
||||
# run. The rest of the workflow will run if this job runs AND at
|
||||
@@ -93,41 +94,61 @@ jobs:
|
||||
outputs:
|
||||
go: ${{ steps.go.outputs.go == 'true' }}
|
||||
|
||||
check-format:
|
||||
needs: should-run
|
||||
if: needs.should-run.outputs.go == 'true'
|
||||
uses: ./.github/workflows/check-format.yml
|
||||
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
if: needs.should-run.outputs.go == 'true'
|
||||
uses: ./.github/workflows/check-levelization.yml
|
||||
|
||||
# This job works around the limitation that GitHub Actions does not support
|
||||
# using environment variables as inputs for reusable workflows.
|
||||
generate-outputs:
|
||||
needs: should-run
|
||||
if: needs.should-run.outputs.go == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: No-op
|
||||
run: true
|
||||
outputs:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
|
||||
build-test:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
needs: generate-outputs
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- should-run
|
||||
- generate-outputs
|
||||
- build-test
|
||||
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/reusable-notify-clio.yml
|
||||
uses: ./.github/workflows/notify-clio.yml
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
passed:
|
||||
if: failure() || cancelled()
|
||||
needs:
|
||||
- build-test
|
||||
- check-format
|
||||
- check-levelization
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail
|
||||
run: false
|
||||
- name: No-op
|
||||
run: true
|
||||
|
||||
84
.github/workflows/on-trigger.yml
vendored
84
.github/workflows/on-trigger.yml
vendored
@@ -9,25 +9,20 @@ name: Trigger
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "release*"
|
||||
- "master"
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/reusable-check-missing-commits.yml"
|
||||
- ".github/workflows/check-missing-commits.yml"
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
|
||||
# Keep the paths below in sync with those in `on-pr.yml`.
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/actions/setup-conan/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".github/workflows/build-test.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
@@ -37,7 +32,6 @@ on:
|
||||
- "tests/**"
|
||||
- "CMakeLists.txt"
|
||||
- "conanfile.py"
|
||||
- "conan.lock"
|
||||
|
||||
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||
@@ -46,35 +40,79 @@ on:
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API.
|
||||
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
||||
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
||||
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
||||
# dependencies if needed.
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
# When a PR is merged into the develop branch it will be assigned a unique
|
||||
# group identifier, so execution will continue even if another PR is merged
|
||||
# while it is still running. In all other cases the group identifier is shared
|
||||
# per branch, so that any in-progress runs are cancelled when a new commit is
|
||||
# pushed.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
|
||||
jobs:
|
||||
check-missing-commits:
|
||||
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/reusable-check-missing-commits.yml
|
||||
uses: ./.github/workflows/check-missing-commits.yml
|
||||
|
||||
# This job works around the limitation that GitHub Actions does not support
|
||||
# using environment variables as inputs for reusable workflows. It also sets
|
||||
# outputs that depend on the event that triggered the workflow.
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check inputs and set outputs
|
||||
id: generate
|
||||
run: |
|
||||
if [[ '${{ github.event_name }}' == 'push' ]]; then
|
||||
echo 'dependencies_force_build=false' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
|
||||
elif [[ '${{ github.event_name }}' == 'schedule' ]]; then
|
||||
echo 'dependencies_force_build=true' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo 'dependencies_force_build=${{ inputs.dependencies_force_build }}' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=${{ inputs.dependencies_force_upload }}' >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
outputs:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
dependencies_force_build: ${{ steps.generate.outputs.dependencies_force_build }}
|
||||
dependencies_force_upload: ${{ steps.generate.outputs.dependencies_force_upload }}
|
||||
|
||||
build-test:
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
needs: generate-outputs
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
dependencies_force_build: ${{ needs.generate-outputs.outputs.dependencies_force_build == 'true' }}
|
||||
dependencies_force_upload: ${{ needs.generate-outputs.outputs.dependencies_force_upload == 'true' }}
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
strategy_matrix: "all"
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
15
.github/workflows/pre-commit.yml
vendored
15
.github/workflows/pre-commit.yml
vendored
@@ -1,15 +0,0 @@
|
||||
name: Run pre-commit hooks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [develop, release, master]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||
run-hooks:
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||
with:
|
||||
runs_on: ubuntu-latest
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||
20
.github/workflows/publish-docs.yml
vendored
20
.github/workflows/publish-docs.yml
vendored
@@ -23,24 +23,16 @@ defaults:
|
||||
|
||||
env:
|
||||
BUILD_DIR: .build
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
@@ -54,16 +46,12 @@ jobs:
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
|
||||
- name: Build documentation
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
mkdir -p ${{ env.BUILD_DIR }}
|
||||
cd ${{ env.BUILD_DIR }}
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel ${BUILD_NPROC}
|
||||
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
|
||||
213
.github/workflows/reusable-build-test-config.yml
vendored
213
.github/workflows/reusable-build-test-config.yml
vendored
@@ -1,213 +0,0 @@
|
||||
name: Build and test configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
type: string
|
||||
required: true
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The configuration string (used for naming artifacts and such)."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: false
|
||||
type: number
|
||||
default: 2
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
name: ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
ENABLED_VOIDSTAR: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
ENABLED_COVERAGE: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ inputs.nproc_subtract }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Configure CMake
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||
run: |
|
||||
cmake \
|
||||
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
${CMAKE_ARGS} \
|
||||
..
|
||||
|
||||
- name: Build the binary
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel "${BUILD_NPROC}" \
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Upload rippled artifact (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
path: ${{ env.BUILD_DIR }}/rippled
|
||||
retention-days: 3
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Check linking (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify presence of instrumentation (Linux)
|
||||
if: ${{ runner.os == 'Linux' && env.ENABLED_VOIDSTAR == 'true' }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
./rippled --version | grep libvoidstar
|
||||
|
||||
- name: Run the separate tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
# Windows locks some of the build files while running tests, and parallel jobs can collide
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
PARALLELISM: ${{ runner.os == 'Windows' && '1' || steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
ctest \
|
||||
--output-on-failure \
|
||||
-C "${BUILD_TYPE}" \
|
||||
-j "${PARALLELISM}"
|
||||
|
||||
- name: Run the embedded tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', inputs.build_dir, inputs.build_type) || inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
./rippled --unittest --unittest-jobs "${BUILD_NPROC}"
|
||||
|
||||
- name: Debug failure (Linux)
|
||||
if: ${{ failure() && runner.os == 'Linux' && !inputs.build_only }}
|
||||
run: |
|
||||
echo "IPv4 local port range:"
|
||||
cat /proc/sys/net/ipv4/ip_local_port_range
|
||||
echo "Netstat:"
|
||||
netstat -an
|
||||
|
||||
- name: Prepare coverage report
|
||||
if: ${{ !inputs.build_only && env.ENABLED_COVERAGE == 'true' }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel "${BUILD_NPROC}" \
|
||||
--target coverage
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.ENABLED_COVERAGE == 'true' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
58
.github/workflows/reusable-build-test.yml
vendored
58
.github/workflows/reusable-build-test.yml
vendored
@@ -1,58 +0,0 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary for each configuration.
|
||||
build-test-config:
|
||||
needs:
|
||||
- generate-matrix
|
||||
uses: ./.github/workflows/reusable-build-test-config.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
45
.github/workflows/reusable-strategy-matrix.yml
vendored
45
.github/workflows/reusable-strategy-matrix.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: Generate strategy matrix
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: false
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
outputs:
|
||||
matrix:
|
||||
description: "The generated strategy matrix."
|
||||
value: ${{ jobs.generate-matrix.outputs.matrix }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.generate.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
111
.github/workflows/upload-conan-deps.yml
vendored
111
.github/workflows/upload-conan-deps.yml
vendored
@@ -1,111 +0,0 @@
|
||||
name: Upload Conan Dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 2-6"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force_source_build:
|
||||
description: "Force source build of all dependencies"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
force_upload:
|
||||
description: "Force upload of all dependencies"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
paths:
|
||||
# This allows testing changes to the upload workflow in a PR
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
push:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
- .github/workflows/reusable-strategy-matrix.yml
|
||||
- .github/actions/build-deps/action.yml
|
||||
- .github/actions/setup-conan/action.yml
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- conanfile.py
|
||||
- conan.lock
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||
|
||||
# Build and upload the dependencies for each configuration.
|
||||
run-upload-conan-deps:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: .build
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
|
||||
- name: Upload Conan packages
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
env:
|
||||
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
|
||||
@@ -1,5 +1,18 @@
|
||||
# To run pre-commit hooks, first install pre-commit:
|
||||
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
|
||||
# - `pip install pre-commit-hooks==${PRE_COMMIT_HOOKS_VERSION}`
|
||||
#
|
||||
# Depending on your system, you can use `brew install` or `apt install` as well
|
||||
# for installing the pre-commit package, but `pip` is needed to install the
|
||||
# hooks; you can also use `pipx` if you prefer.
|
||||
# Next, install the required formatters:
|
||||
# - `pip install clang-format==${CLANG_VERSION}`
|
||||
# - `npm install prettier@${PRETTIER_VERSION}`
|
||||
#
|
||||
# See https://github.com/XRPLF/ci/blob/main/.github/workflows/tools-rippled.yml
|
||||
# for the versions used in the CI pipeline. You will need to have the exact same
|
||||
# versions of the tools installed on your system to produce the same results as
|
||||
# the pipeline.
|
||||
#
|
||||
# Then, run the following command to install the git hook scripts:
|
||||
# - `pre-commit install`
|
||||
@@ -7,29 +20,42 @@
|
||||
# - `pre-commit run --all-files`
|
||||
# To manually run a specific hook, use:
|
||||
# - `pre-commit run <hook_id> --all-files`
|
||||
# To run the hooks against only the staged files, use:
|
||||
# To run the hooks against only the files changed in the current commit, use:
|
||||
# - `pre-commit run`
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
- id: check-merge-conflict
|
||||
args: [--assume-in-merge]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: clang-format
|
||||
args: [--style=file]
|
||||
"types_or": [c++, c, proto]
|
||||
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
|
||||
name: clang-format
|
||||
language: system
|
||||
entry: clang-format -i
|
||||
files: '\.(cpp|hpp|h|ipp|proto)$'
|
||||
- id: trailing-whitespace
|
||||
name: trailing-whitespace
|
||||
entry: trailing-whitespace-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
- id: end-of-file
|
||||
name: end-of-file
|
||||
entry: end-of-file-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
- id: mixed-line-ending
|
||||
name: mixed-line-ending
|
||||
entry: mixed-line-ending
|
||||
language: system
|
||||
types: [text]
|
||||
- id: check-merge-conflict
|
||||
name: check-merge-conflict
|
||||
entry: check-merge-conflict --assume-in-merge
|
||||
language: system
|
||||
types: [text]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: prettier
|
||||
language: system
|
||||
entry: prettier --ignore-unknown --write
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
|
||||
62
BUILD.md
62
BUILD.md
@@ -39,12 +39,17 @@ found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/), or higher
|
||||
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
[^2]:
|
||||
CMake 4 is not yet supported by all dependencies required by this project.
|
||||
If you are affected by this issue, follow [conan workaround for cmake
|
||||
4](#workaround-for-cmake-4)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
@@ -127,7 +132,7 @@ higher index than the default Conan Center remote, so it is consulted first. You
|
||||
can do this by running:
|
||||
|
||||
```bash
|
||||
conan remote add --index 0 xrplf https://conan.ripplex.io
|
||||
conan remote add --index 0 xrplf "https://conan.ripplex.io"
|
||||
```
|
||||
|
||||
Alternatively, you can pull the patched recipes into the repository and use them
|
||||
@@ -277,6 +282,21 @@ sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Workaround for CMake 4
|
||||
|
||||
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||
profile to use CMake version 3 for dependencies, by adding the following two
|
||||
lines to your profile:
|
||||
|
||||
```text
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
```
|
||||
|
||||
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||
is needed because some of the dependencies used by this project do not support
|
||||
CMake 4.
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
@@ -459,24 +479,12 @@ It is implicitly used when running `conan` commands, you don't need to specify i
|
||||
|
||||
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
|
||||
|
||||
> [!NOTE]
|
||||
> Conan uses local cache by default when creating a lockfile.
|
||||
>
|
||||
> To ensure, that lockfile creation works the same way on all developer machines, you should clear the local cache before creating a new lockfile.
|
||||
|
||||
To create a new lockfile, run the following commands in the repository root:
|
||||
To do that, run the following command in the repository root:
|
||||
|
||||
```bash
|
||||
conan remove '*' --confirm
|
||||
rm conan.lock
|
||||
# This ensure that xrplf remote is the first to be consulted
|
||||
conan remote add --force --index 0 xrplf https://conan.ripplex.io
|
||||
conan lock create . -o '&:jemalloc=True' -o '&:rocksdb=True'
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> If some dependencies are exclusive for some OS, you may need to run the last command for them adding `--profile:all <PROFILE>`.
|
||||
|
||||
## Coverage report
|
||||
|
||||
The coverage report is intended for developers using compilers GCC
|
||||
@@ -495,18 +503,18 @@ A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed one or more run of the unit tests, which populates coverage capture data
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The last step of the above is automated into a single target `coverage`. The instrumented
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture data). Since `rippled` binary is simply a dependency of the
|
||||
coverage report target, it is possible to re-run the `coverage` target without
|
||||
rebuilding the `rippled` binary. Note, running of the unit tests before the `coverage`
|
||||
target is left to the developer. Each such run will append to the coverage data
|
||||
collected in the build directory.
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
@@ -515,6 +523,11 @@ to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
@@ -573,11 +586,6 @@ After any updates or changes to dependencies, you may need to do the following:
|
||||
4. [Regenerate lockfile](#conan-lockfile).
|
||||
5. Re-run [conan install](#build-and-test).
|
||||
|
||||
#### ERROR: Package not resolved
|
||||
|
||||
If you're seeing an error like `ERROR: Package 'snappy/1.1.10' not resolved: Unable to find 'snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246' in remotes.`,
|
||||
please add `xrplf` remote or re-run `conan export` for [patched recipes](#patched-recipes).
|
||||
|
||||
### `protobuf/port_def.inc` file not found
|
||||
|
||||
If `cmake --build .` results in an error due to a missing a protobuf file, then
|
||||
|
||||
22
README.md
22
README.md
@@ -6,7 +6,7 @@ The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powe
|
||||
|
||||
## XRP
|
||||
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
|
||||
## rippled
|
||||
|
||||
@@ -23,19 +23,19 @@ If you are interested in running an **API Server** (including a **Full History S
|
||||
|
||||
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
|
||||
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction fees.
|
||||
- **[Responsible Software Governance][]:** A team of full-time developers at Ripple & other organizations maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
|
||||
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
|
||||
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
|
||||
- **[Modern Features][]:** Features like Escrow, Checks, and Payment Channels support financial applications atop of the XRP Ledger. This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
|
||||
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/transaction-censorship-detection.html#transaction-censorship-detection
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/consensus-research.html#consensus-research
|
||||
[Finite XRP Supply]: https://xrpl.org/what-is-xrp.html
|
||||
[Responsible Software Governance]: https://xrpl.org/contribute-code.html#contribute-code-to-the-xrp-ledger
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/cryptographic-keys.html#cryptographic-keys
|
||||
[Modern Features]: https://xrpl.org/use-specialized-payment-types.html
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/decentralized-exchange.html#decentralized-exchange
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
|
||||
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
|
||||
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
|
||||
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
||||
|
||||
## Source Code
|
||||
|
||||
|
||||
@@ -1,3 +1,21 @@
|
||||
macro(group_sources_in source_dir curdir)
|
||||
file(GLOB children RELATIVE ${source_dir}/${curdir}
|
||||
${source_dir}/${curdir}/*)
|
||||
foreach (child ${children})
|
||||
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
|
||||
group_sources_in(${source_dir} ${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES
|
||||
${source_dir}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
macro(group_sources curdir)
|
||||
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
|
||||
endmacro()
|
||||
|
||||
macro (exclude_from_default target_)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
|
||||
|
||||
@@ -104,14 +104,6 @@
|
||||
# 2025-08-28, Bronek Kozicki
|
||||
# - fix "At least one COMMAND must be given" CMake warning from policy CMP0175
|
||||
#
|
||||
# 2025-09-03, Jingchen Wu
|
||||
# - remove the unused function append_coverage_compiler_flags and append_coverage_compiler_flags_to_target
|
||||
# - add a new function add_code_coverage_to_target
|
||||
# - remove some unused code
|
||||
#
|
||||
# 2025-11-11, Bronek Kozicki
|
||||
# - make EXECUTABLE and EXECUTABLE_ARGS optional
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
@@ -120,8 +112,10 @@
|
||||
# using a CMake option() to enable it just optionally):
|
||||
# include(CodeCoverage)
|
||||
#
|
||||
# 3. Append necessary compiler flags and linker flags for all supported source files:
|
||||
# add_code_coverage_to_target(<target> <PRIVATE|PUBLIC|INTERFACE>)
|
||||
# 3. Append necessary compiler flags for all supported source files:
|
||||
# append_coverage_compiler_flags()
|
||||
# Or for specific target:
|
||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
||||
#
|
||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||
#
|
||||
@@ -210,69 +204,67 @@ endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "")
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "")
|
||||
set(COVERAGE_C_LINKER_FLAGS "")
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
include(CheckCCompilerFlag)
|
||||
include(CheckLinkerFlag)
|
||||
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_C_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_CXX_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
|
||||
if(HAVE_cxx_linker_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
|
||||
if(HAVE_c_linker_fprofile_abs_path)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
|
||||
if(HAVE_cxx_fprofile_update)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
|
||||
if(HAVE_c_fprofile_update)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
|
||||
if(HAVE_cxx_linker_fprofile_update)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
|
||||
if(HAVE_c_linker_fprofile_update)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_C_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
||||
FORCE )
|
||||
mark_as_advanced(
|
||||
CMAKE_Fortran_FLAGS_COVERAGE
|
||||
CMAKE_CXX_FLAGS_COVERAGE
|
||||
CMAKE_C_FLAGS_COVERAGE
|
||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
link_libraries(gcov)
|
||||
endif()
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||
@@ -320,10 +312,6 @@ function(setup_target_for_coverage_gcovr)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
|
||||
message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set")
|
||||
endif()
|
||||
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else()
|
||||
@@ -405,18 +393,17 @@ function(setup_target_for_coverage_gcovr)
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# If EXECUTABLE is not set, the user is expected to run the tests manually
|
||||
# before running the coverage target NAME
|
||||
if(DEFINED Coverage_EXECUTABLE)
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
endif()
|
||||
# Run tests
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
|
||||
# Create folder
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
else()
|
||||
set(GCOVR_FOLDER_CMD echo) # dummy
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
@@ -433,13 +420,11 @@ function(setup_target_for_coverage_gcovr)
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
endif()
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
|
||||
if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
|
||||
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
@@ -469,19 +454,18 @@ function(setup_target_for_coverage_gcovr)
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function(add_code_coverage_to_target name scope)
|
||||
separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
|
||||
function(append_coverage_compiler_flags)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
||||
endfunction() # append_coverage_compiler_flags
|
||||
|
||||
# Add compiler options to the target
|
||||
target_compile_options(${name} ${scope}
|
||||
$<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||
|
||||
target_link_libraries (${name} ${scope}
|
||||
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
|
||||
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
|
||||
)
|
||||
endfunction() # add_code_coverage_to_target
|
||||
# Setup coverage for specific library
|
||||
function(append_coverage_compiler_flags_to_target name)
|
||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
target_link_libraries(${name} PRIVATE gcov)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
@@ -12,7 +12,7 @@ if (static OR MSVC)
|
||||
else ()
|
||||
set (Boost_USE_STATIC_RUNTIME OFF)
|
||||
endif ()
|
||||
find_dependency (Boost
|
||||
find_dependency (Boost 1.70
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
@@ -52,3 +52,5 @@ if (TARGET ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
endif ()
|
||||
|
||||
include ("${CMAKE_CURRENT_LIST_DIR}/RippleTargets.cmake")
|
||||
|
||||
@@ -16,13 +16,16 @@ set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
||||
# defined, so no need to add it explicitly except for
|
||||
# this special case of (profile ON) and (assert OFF)
|
||||
# -- presumably this is because we don't want profile
|
||||
# builds asserting unless asserts were specifically
|
||||
# requested
|
||||
#[===[
|
||||
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
|
||||
explicitly except for the special case of (profile ON) and (assert OFF).
|
||||
Presumably this is because we don't want profile builds asserting unless
|
||||
asserts were specifically requested.
|
||||
]===]
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED
|
||||
)
|
||||
|
||||
if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
|
||||
@@ -72,7 +72,10 @@ include(target_link_modules)
|
||||
|
||||
# Level 01
|
||||
add_module(xrpl beast)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC xrpl.imports.main)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
)
|
||||
|
||||
# Level 02
|
||||
add_module(xrpl basics)
|
||||
@@ -108,12 +111,6 @@ target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
add_module(xrpl ledger)
|
||||
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
add_library(xrpl.libxrpl)
|
||||
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
|
||||
@@ -134,7 +131,6 @@ target_link_modules(xrpl PUBLIC
|
||||
resource
|
||||
server
|
||||
net
|
||||
ledger
|
||||
)
|
||||
|
||||
# All headers in libxrpl are in modules.
|
||||
|
||||
@@ -11,9 +11,6 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
return()
|
||||
endif()
|
||||
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
include(CodeCoverage)
|
||||
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
@@ -29,13 +26,13 @@ list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${PROCESSOR_COUNT})
|
||||
-j ${coverage_test_parallelism})
|
||||
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES rippled xrpl.tests
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES rippled
|
||||
)
|
||||
|
||||
add_code_coverage_to_target(opts INTERFACE)
|
||||
|
||||
@@ -18,7 +18,6 @@ install (
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.ledger
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl
|
||||
|
||||
@@ -28,11 +28,15 @@ target_compile_options (opts
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#[===================================================================[
|
||||
sanity checks
|
||||
convenience variables and sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
@@ -16,19 +16,39 @@ if (NOT is_multiconfig)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
set (is_root_project OFF)
|
||||
else ()
|
||||
set (is_root_project ON)
|
||||
endif ()
|
||||
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
|
||||
set (is_clang TRUE)
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
|
||||
message (FATAL_ERROR "This project requires clang 16 or later")
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires clang 8 or later")
|
||||
endif ()
|
||||
# TODO min AppleClang version check ?
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set (is_gcc TRUE)
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
|
||||
message (FATAL_ERROR "This project requires GCC 12 or later")
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires GCC 8 or later")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else ()
|
||||
set (is_linux FALSE)
|
||||
endif ()
|
||||
|
||||
if ("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set (is_ci TRUE)
|
||||
else ()
|
||||
set (is_ci FALSE)
|
||||
endif ()
|
||||
|
||||
# check for in-source build and fail
|
||||
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
message (FATAL_ERROR "Builds (in-source) are not allowed in "
|
||||
|
||||
@@ -1,25 +1,10 @@
|
||||
#[===================================================================[
|
||||
declare options and variables
|
||||
declare user options/settings
|
||||
#]===================================================================]
|
||||
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else()
|
||||
set(is_linux FALSE)
|
||||
endif()
|
||||
include(ProcessorCount)
|
||||
|
||||
if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set(is_ci TRUE)
|
||||
else()
|
||||
set(is_ci FALSE)
|
||||
endif()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if(has_parent)
|
||||
set(is_root_project OFF)
|
||||
else()
|
||||
set(is_root_project ON)
|
||||
endif()
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
@@ -40,28 +25,29 @@ if(unity)
|
||||
endif()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif()
|
||||
|
||||
if(is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif()
|
||||
|
||||
if(is_gcc OR is_clang)
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
|
||||
"Unit tests parallelism for the purpose of coverage report.")
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
set(coverage_test "" CACHE STRING
|
||||
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
|
||||
if(coverage_test AND NOT coverage)
|
||||
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
@@ -78,13 +64,11 @@ else()
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
@@ -118,26 +102,38 @@ if(san)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
set(container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option(packages_only
|
||||
"ONLY generate package building targets. This is special use-case and almost \
|
||||
certainly not what you want. Use with caution as you won't be able to build \
|
||||
any compiled targets locally." OFF)
|
||||
option(have_package_container
|
||||
"Sometimes you already have the tagged container you want to use for package \
|
||||
building and you don't want docker to rebuild it. This flag will detach the \
|
||||
dependency of the package build from the container build. It's an advanced \
|
||||
use case and most likely you should not be touching this flag." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
"Restricts the number of threads calling io_context::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
option(beast_hashers
|
||||
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
|
||||
OFF)
|
||||
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
|
||||
@@ -24,6 +24,7 @@ target_link_libraries(ripple_boost
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::process
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
|
||||
@@ -7,7 +7,7 @@ function(xrpl_add_test name)
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||
)
|
||||
add_executable(${target} ${ARGN} ${sources})
|
||||
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
|
||||
|
||||
isolate_headers(
|
||||
${target}
|
||||
@@ -22,4 +22,20 @@ function(xrpl_add_test name)
|
||||
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||
|
||||
add_test(NAME ${target} COMMAND ${target})
|
||||
set_tests_properties(
|
||||
${target} PROPERTIES
|
||||
FIXTURES_REQUIRED ${target}_fixture
|
||||
)
|
||||
|
||||
add_test(
|
||||
NAME ${target}.build
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build ${CMAKE_BINARY_DIR}
|
||||
--config $<CONFIG>
|
||||
--target ${target}
|
||||
)
|
||||
set_tests_properties(${target}.build PROPERTIES
|
||||
FIXTURES_SETUP ${target}_fixture
|
||||
)
|
||||
endfunction()
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"rocksdb/10.0.1#85537f46e538974d67da0c3977de48ac%1756234304.347",
|
||||
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
|
||||
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
||||
"openssl/1.1.1w#a8f0792d7c5121b954578a7149d23e03%1756223730.729",
|
||||
"openssl/3.5.2#0c5a5e15ae569f45dff57adcf1770cf7%1756234259.61",
|
||||
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
|
||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
||||
@@ -21,7 +21,7 @@
|
||||
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
|
||||
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
|
||||
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
|
||||
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368%1754325043.336",
|
||||
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
|
||||
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
|
||||
],
|
||||
"build_requires": [
|
||||
@@ -46,7 +46,7 @@
|
||||
"lz4/1.10.0"
|
||||
],
|
||||
"boost/1.83.0": [
|
||||
"boost/1.83.0"
|
||||
"boost/1.88.0"
|
||||
],
|
||||
"sqlite3/3.44.2": [
|
||||
"sqlite3/3.49.1"
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads and uploads.
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
core:non_interactive=True
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
|
||||
@@ -21,14 +21,14 @@ compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||
|
||||
[conf]
|
||||
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "clang" and compiler_version == 16 %}
|
||||
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||
tools.build:cxxflags+=['-Wno-restrict']
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
{% endif %}
|
||||
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
|
||||
@@ -27,7 +27,7 @@ class Xrpl(ConanFile):
|
||||
'grpc/1.50.1',
|
||||
'libarchive/3.8.1',
|
||||
'nudb/2.0.9',
|
||||
'openssl/1.1.1w',
|
||||
'openssl/3.5.2',
|
||||
'soci/4.0.3',
|
||||
'zlib/1.3.1',
|
||||
]
|
||||
@@ -100,11 +100,13 @@ class Xrpl(ConanFile):
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
self.options['boost'].visibility = 'global'
|
||||
if self.settings.compiler in ['clang', 'gcc']:
|
||||
self.options['boost'].without_cobalt = True
|
||||
|
||||
def requirements(self):
|
||||
# Conan 2 requires transitive headers to be specified
|
||||
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
|
||||
self.requires('boost/1.83.0', force=True, **transitive_headers_opt)
|
||||
self.requires('boost/1.88.0', force=True, **transitive_headers_opt)
|
||||
self.requires('date/3.0.4', **transitive_headers_opt)
|
||||
self.requires('lz4/1.10.0', force=True)
|
||||
self.requires('protobuf/3.21.12', force=True)
|
||||
@@ -175,6 +177,7 @@ class Xrpl(ConanFile):
|
||||
'boost::filesystem',
|
||||
'boost::json',
|
||||
'boost::program_options',
|
||||
'boost::process',
|
||||
'boost::regex',
|
||||
'boost::system',
|
||||
'boost::thread',
|
||||
|
||||
@@ -654,14 +654,12 @@ SharedWeakUnion<T>::convertToWeak()
|
||||
break;
|
||||
case destroy:
|
||||
// We just added a weak ref. How could we destroy?
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::SharedWeakUnion::convertToWeak : destroying freshly "
|
||||
"added ref");
|
||||
delete p;
|
||||
unsafeSetRawPtr(nullptr);
|
||||
return true; // Should never happen
|
||||
// LCOV_EXCL_STOP
|
||||
case partialDestroy:
|
||||
// This is a weird case. We just converted the last strong
|
||||
// pointer to a weak pointer.
|
||||
|
||||
@@ -150,24 +150,6 @@ public:
|
||||
return (mantissa_ < 0) ? -1 : (mantissa_ ? 1 : 0);
|
||||
}
|
||||
|
||||
Number
|
||||
truncate() const noexcept
|
||||
{
|
||||
if (exponent_ >= 0 || mantissa_ == 0)
|
||||
return *this;
|
||||
|
||||
Number ret = *this;
|
||||
while (ret.exponent_ < 0 && ret.mantissa_ != 0)
|
||||
{
|
||||
ret.exponent_ += 1;
|
||||
ret.mantissa_ /= rep(10);
|
||||
}
|
||||
// We are guaranteed that normalize() will never throw an exception
|
||||
// because exponent is either negative or zero at this point.
|
||||
ret.normalize();
|
||||
return ret;
|
||||
}
|
||||
|
||||
friend constexpr bool
|
||||
operator>(Number const& x, Number const& y) noexcept
|
||||
{
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
#include <xrpl/basics/Resolver.h>
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -33,7 +33,7 @@ public:
|
||||
explicit ResolverAsio() = default;
|
||||
|
||||
static std::unique_ptr<ResolverAsio>
|
||||
New(boost::asio::io_service&, beast::Journal);
|
||||
New(boost::asio::io_context&, beast::Journal);
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -636,10 +636,7 @@ template <std::size_t Bits, class Tag>
|
||||
inline std::string
|
||||
to_short_string(base_uint<Bits, Tag> const& a)
|
||||
{
|
||||
static_assert(
|
||||
base_uint<Bits, Tag>::bytes > 4,
|
||||
"For 4 bytes or less, use a native type");
|
||||
return strHex(a.cbegin(), a.cbegin() + 4) + "...";
|
||||
return to_string(a).substr(0, 8) + "...";
|
||||
}
|
||||
|
||||
template <std::size_t Bits, class Tag>
|
||||
|
||||
@@ -23,7 +23,8 @@
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <boost/asio/basic_waitable_timer.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/post.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <condition_variable>
|
||||
@@ -32,7 +33,7 @@
|
||||
|
||||
namespace beast {
|
||||
|
||||
/** Measures handler latency on an io_service queue. */
|
||||
/** Measures handler latency on an io_context queue. */
|
||||
template <class Clock>
|
||||
class io_latency_probe
|
||||
{
|
||||
@@ -44,12 +45,12 @@ private:
|
||||
std::condition_variable_any m_cond;
|
||||
std::size_t m_count;
|
||||
duration const m_period;
|
||||
boost::asio::io_service& m_ios;
|
||||
boost::asio::io_context& m_ios;
|
||||
boost::asio::basic_waitable_timer<std::chrono::steady_clock> m_timer;
|
||||
bool m_cancel;
|
||||
|
||||
public:
|
||||
io_latency_probe(duration const& period, boost::asio::io_service& ios)
|
||||
io_latency_probe(duration const& period, boost::asio::io_context& ios)
|
||||
: m_count(1)
|
||||
, m_period(period)
|
||||
, m_ios(ios)
|
||||
@@ -64,16 +65,16 @@ public:
|
||||
cancel(lock, true);
|
||||
}
|
||||
|
||||
/** Return the io_service associated with the latency probe. */
|
||||
/** Return the io_context associated with the latency probe. */
|
||||
/** @{ */
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
{
|
||||
return m_ios;
|
||||
}
|
||||
|
||||
boost::asio::io_service const&
|
||||
get_io_service() const
|
||||
boost::asio::io_context const&
|
||||
get_io_context() const
|
||||
{
|
||||
return m_ios;
|
||||
}
|
||||
@@ -109,8 +110,10 @@ public:
|
||||
std::lock_guard lock(m_mutex);
|
||||
if (m_cancel)
|
||||
throw std::logic_error("io_latency_probe is canceled");
|
||||
m_ios.post(sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), false, this));
|
||||
boost::asio::post(
|
||||
m_ios,
|
||||
sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), false, this));
|
||||
}
|
||||
|
||||
/** Initiate continuous i/o latency sampling.
|
||||
@@ -124,8 +127,10 @@ public:
|
||||
std::lock_guard lock(m_mutex);
|
||||
if (m_cancel)
|
||||
throw std::logic_error("io_latency_probe is canceled");
|
||||
m_ios.post(sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), true, this));
|
||||
boost::asio::post(
|
||||
m_ios,
|
||||
sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), true, this));
|
||||
}
|
||||
|
||||
private:
|
||||
@@ -236,12 +241,13 @@ private:
|
||||
// The latency is too high to maintain the desired
|
||||
// period so don't bother with a timer.
|
||||
//
|
||||
m_probe->m_ios.post(
|
||||
boost::asio::post(
|
||||
m_probe->m_ios,
|
||||
sample_op<Handler>(m_handler, now, m_repeat, m_probe));
|
||||
}
|
||||
else
|
||||
{
|
||||
m_probe->m_timer.expires_from_now(when - now);
|
||||
m_probe->m_timer.expires_after(when - now);
|
||||
m_probe->m_timer.async_wait(
|
||||
sample_op<Handler>(m_handler, now, m_repeat, m_probe));
|
||||
}
|
||||
@@ -254,7 +260,8 @@ private:
|
||||
if (!m_probe)
|
||||
return;
|
||||
typename Clock::time_point const now(Clock::now());
|
||||
m_probe->m_ios.post(
|
||||
boost::asio::post(
|
||||
m_probe->m_ios,
|
||||
sample_op<Handler>(m_handler, now, m_repeat, m_probe));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -94,11 +94,7 @@ hash_append(Hasher& h, beast::IP::Address const& addr) noexcept
|
||||
else if (addr.is_v6())
|
||||
hash_append(h, addr.to_v6().to_bytes());
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("beast::hash_append : invalid address type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
} // namespace beast
|
||||
|
||||
|
||||
@@ -8,9 +8,11 @@
|
||||
#ifndef BEAST_TEST_YIELD_TO_HPP
|
||||
#define BEAST_TEST_YIELD_TO_HPP
|
||||
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/executor_work_guard.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/thread/csbl/memory/allocator_arg.hpp>
|
||||
|
||||
#include <condition_variable>
|
||||
#include <mutex>
|
||||
@@ -29,10 +31,12 @@ namespace test {
|
||||
class enable_yield_to
|
||||
{
|
||||
protected:
|
||||
boost::asio::io_service ios_;
|
||||
boost::asio::io_context ios_;
|
||||
|
||||
private:
|
||||
boost::optional<boost::asio::io_service::work> work_;
|
||||
boost::optional<boost::asio::executor_work_guard<
|
||||
boost::asio::io_context::executor_type>>
|
||||
work_;
|
||||
std::vector<std::thread> threads_;
|
||||
std::mutex m_;
|
||||
std::condition_variable cv_;
|
||||
@@ -42,7 +46,8 @@ public:
|
||||
/// The type of yield context passed to functions.
|
||||
using yield_context = boost::asio::yield_context;
|
||||
|
||||
explicit enable_yield_to(std::size_t concurrency = 1) : work_(ios_)
|
||||
explicit enable_yield_to(std::size_t concurrency = 1)
|
||||
: work_(boost::asio::make_work_guard(ios_))
|
||||
{
|
||||
threads_.reserve(concurrency);
|
||||
while (concurrency--)
|
||||
@@ -56,9 +61,9 @@ public:
|
||||
t.join();
|
||||
}
|
||||
|
||||
/// Return the `io_service` associated with the object
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
/// Return the `io_context` associated with the object
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
{
|
||||
return ios_;
|
||||
}
|
||||
@@ -111,13 +116,18 @@ enable_yield_to::spawn(F0&& f, FN&&... fn)
|
||||
{
|
||||
boost::asio::spawn(
|
||||
ios_,
|
||||
boost::allocator_arg,
|
||||
boost::context::fixedsize_stack(2 * 1024 * 1024),
|
||||
[&](yield_context yield) {
|
||||
f(yield);
|
||||
std::lock_guard lock{m_};
|
||||
if (--running_ == 0)
|
||||
cv_.notify_all();
|
||||
},
|
||||
boost::coroutines::attributes(2 * 1024 * 1024));
|
||||
[](std::exception_ptr e) {
|
||||
if (e)
|
||||
std::rethrow_exception(e);
|
||||
});
|
||||
spawn(fn...);
|
||||
}
|
||||
|
||||
|
||||
@@ -217,7 +217,7 @@ Reader::parse(Value& root, BufferSequence const& bs)
|
||||
std::string s;
|
||||
s.reserve(buffer_size(bs));
|
||||
for (auto const& b : bs)
|
||||
s.append(buffer_cast<char const*>(b), buffer_size(b));
|
||||
s.append(static_cast<char const*>(b.data()), buffer_size(b));
|
||||
return parse(s, root);
|
||||
}
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ public:
|
||||
* without formatting (not human friendly).
|
||||
*
|
||||
* The JSON document is written in a single line. It is not intended for 'human'
|
||||
* consumption, but may be useful to support feature such as RPC where bandwidth
|
||||
* consumption, but may be useful to support feature such as RPC where bandwith
|
||||
* is limited. \sa Reader, Value
|
||||
*/
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ public:
|
||||
|
||||
public:
|
||||
AutoSocket(
|
||||
boost::asio::io_service& s,
|
||||
boost::asio::io_context& s,
|
||||
boost::asio::ssl::context& c,
|
||||
bool secureOnly,
|
||||
bool plainOnly)
|
||||
@@ -58,7 +58,7 @@ public:
|
||||
mSocket = std::make_unique<ssl_socket>(s, c);
|
||||
}
|
||||
|
||||
AutoSocket(boost::asio::io_service& s, boost::asio::ssl::context& c)
|
||||
AutoSocket(boost::asio::io_context& s, boost::asio::ssl::context& c)
|
||||
: AutoSocket(s, c, false, false)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
#include <xrpl/basics/ByteUtilities.h>
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/streambuf.hpp>
|
||||
|
||||
#include <chrono>
|
||||
@@ -51,7 +51,7 @@ public:
|
||||
|
||||
static void
|
||||
get(bool bSSL,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
std::deque<std::string> deqSites,
|
||||
unsigned short const port,
|
||||
std::string const& strPath,
|
||||
@@ -65,7 +65,7 @@ public:
|
||||
|
||||
static void
|
||||
get(bool bSSL,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
std::string strSite,
|
||||
unsigned short const port,
|
||||
std::string const& strPath,
|
||||
@@ -80,7 +80,7 @@ public:
|
||||
static void
|
||||
request(
|
||||
bool bSSL,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
std::string strSite,
|
||||
unsigned short const port,
|
||||
std::function<
|
||||
|
||||
@@ -153,7 +153,7 @@ public:
|
||||
{
|
||||
strm.set_verify_callback(
|
||||
std::bind(
|
||||
&rfc2818_verify,
|
||||
&rfc6125_verify,
|
||||
host,
|
||||
std::placeholders::_1,
|
||||
std::placeholders::_2,
|
||||
@@ -167,7 +167,7 @@ public:
|
||||
|
||||
/**
|
||||
* @brief callback invoked for name verification - just passes through
|
||||
* to the asio rfc2818 implementation.
|
||||
* to the asio `host_name_verification` (rfc6125) implementation.
|
||||
*
|
||||
* @param domain hostname expected
|
||||
* @param preverified passed by implementation
|
||||
@@ -175,13 +175,13 @@ public:
|
||||
* @param j journal for logging
|
||||
*/
|
||||
static bool
|
||||
rfc2818_verify(
|
||||
rfc6125_verify(
|
||||
std::string const& domain,
|
||||
bool preverified,
|
||||
boost::asio::ssl::verify_context& ctx,
|
||||
beast::Journal j)
|
||||
{
|
||||
if (boost::asio::ssl::rfc2818_verification(domain)(preverified, ctx))
|
||||
if (boost::asio::ssl::host_name_verification(domain)(preverified, ctx))
|
||||
return true;
|
||||
|
||||
JLOG(j.warn()) << "Outbound SSL connection to " << domain
|
||||
|
||||
@@ -287,11 +287,9 @@ delegate(AccountID const& account, AccountID const& authorizedAccount) noexcept;
|
||||
Keylet
|
||||
bridge(STXChainBridge const& bridge, STXChainBridge::ChainType chainType);
|
||||
|
||||
// `seq` is stored as `sfXChainClaimID` in the object
|
||||
Keylet
|
||||
xChainClaimID(STXChainBridge const& bridge, std::uint64_t seq);
|
||||
|
||||
// `seq` is stored as `sfXChainAccountCreateCount` in the object
|
||||
Keylet
|
||||
xChainCreateAccountClaimID(STXChainBridge const& bridge, std::uint64_t seq);
|
||||
|
||||
|
||||
@@ -188,15 +188,6 @@ enum LedgerSpecificFlags {
|
||||
lsfMPTCanTransfer = 0x00000020,
|
||||
lsfMPTCanClawback = 0x00000040,
|
||||
|
||||
lsmfMPTCanMutateCanLock = 0x00000002,
|
||||
lsmfMPTCanMutateRequireAuth = 0x00000004,
|
||||
lsmfMPTCanMutateCanEscrow = 0x00000008,
|
||||
lsmfMPTCanMutateCanTrade = 0x00000010,
|
||||
lsmfMPTCanMutateCanTransfer = 0x00000020,
|
||||
lsmfMPTCanMutateCanClawback = 0x00000040,
|
||||
lsmfMPTCanMutateMetadata = 0x00010000,
|
||||
lsmfMPTCanMutateTransferFee = 0x00020000,
|
||||
|
||||
// ltMPTOKEN
|
||||
lsfMPTAuthorized = 0x00000002,
|
||||
|
||||
|
||||
@@ -20,8 +20,6 @@
|
||||
#ifndef RIPPLE_PROTOCOL_PERMISSION_H_INCLUDED
|
||||
#define RIPPLE_PROTOCOL_PERMISSION_H_INCLUDED
|
||||
|
||||
#include <xrpl/protocol/Rules.h>
|
||||
#include <xrpl/protocol/TER.h>
|
||||
#include <xrpl/protocol/TxFormats.h>
|
||||
|
||||
#include <optional>
|
||||
@@ -55,8 +53,6 @@ class Permission
|
||||
private:
|
||||
Permission();
|
||||
|
||||
std::unordered_map<std::uint16_t, uint256> txFeatureMap_;
|
||||
|
||||
std::unordered_map<std::uint16_t, Delegation> delegatableTx_;
|
||||
|
||||
std::unordered_map<std::string, GranularPermissionType>
|
||||
@@ -74,9 +70,6 @@ public:
|
||||
Permission&
|
||||
operator=(Permission const&) = delete;
|
||||
|
||||
std::optional<std::string>
|
||||
getPermissionName(std::uint32_t const value) const;
|
||||
|
||||
std::optional<std::uint32_t>
|
||||
getGranularValue(std::string const& name) const;
|
||||
|
||||
@@ -86,12 +79,8 @@ public:
|
||||
std::optional<TxType>
|
||||
getGranularTxType(GranularPermissionType const& gpType) const;
|
||||
|
||||
std::optional<std::reference_wrapper<uint256 const>> const
|
||||
getTxFeature(TxType txType) const;
|
||||
|
||||
bool
|
||||
isDelegatable(std::uint32_t const& permissionValue, Rules const& rules)
|
||||
const;
|
||||
isDelegatable(std::uint32_t const& permissionValue) const;
|
||||
|
||||
// for tx level permission, permission value is equal to tx type plus one
|
||||
uint32_t
|
||||
|
||||
@@ -55,10 +55,7 @@ std::size_t constexpr oversizeMetaDataCap = 5200;
|
||||
/** The maximum number of entries per directory page */
|
||||
std::size_t constexpr dirNodeMaxEntries = 32;
|
||||
|
||||
/** The maximum number of pages allowed in a directory
|
||||
|
||||
Made obsolete by fixDirectoryLimit amendment.
|
||||
*/
|
||||
/** The maximum number of pages allowed in a directory */
|
||||
std::uint64_t constexpr dirNodeMaxPages = 262144;
|
||||
|
||||
/** The maximum number of items in an NFT page */
|
||||
@@ -124,13 +121,6 @@ std::size_t constexpr maxDataPayloadLength = 256;
|
||||
/** Vault withdrawal policies */
|
||||
std::uint8_t constexpr vaultStrategyFirstComeFirstServe = 1;
|
||||
|
||||
/** Default IOU scale factor for a Vault */
|
||||
std::uint8_t constexpr vaultDefaultIOUScale = 6;
|
||||
/** Maximum scale factor for a Vault. The number is chosen to ensure that
|
||||
1 IOU can be always converted to shares.
|
||||
10^19 > maxMPTokenAmount (2^64-1) > 10^18 */
|
||||
std::uint8_t constexpr vaultMaximumIOUScale = 18;
|
||||
|
||||
/** Maximum recursion depth for vault shares being put as an asset inside
|
||||
* another vault; counted from 0 */
|
||||
std::uint8_t constexpr maxAssetCheckDepth = 5;
|
||||
|
||||
@@ -72,10 +72,8 @@ class STCurrency;
|
||||
STYPE(STI_VL, 7) \
|
||||
STYPE(STI_ACCOUNT, 8) \
|
||||
STYPE(STI_NUMBER, 9) \
|
||||
STYPE(STI_INT32, 10) \
|
||||
STYPE(STI_INT64, 11) \
|
||||
\
|
||||
/* 12-13 are reserved */ \
|
||||
/* 10-13 are reserved */ \
|
||||
STYPE(STI_OBJECT, 14) \
|
||||
STYPE(STI_ARRAY, 15) \
|
||||
\
|
||||
@@ -358,9 +356,6 @@ using SF_UINT256 = TypedField<STBitString<256>>;
|
||||
using SF_UINT384 = TypedField<STBitString<384>>;
|
||||
using SF_UINT512 = TypedField<STBitString<512>>;
|
||||
|
||||
using SF_INT32 = TypedField<STInteger<std::int32_t>>;
|
||||
using SF_INT64 = TypedField<STInteger<std::int64_t>>;
|
||||
|
||||
using SF_ACCOUNT = TypedField<STAccount>;
|
||||
using SF_AMOUNT = TypedField<STAmount>;
|
||||
using SF_ISSUE = TypedField<STIssue>;
|
||||
|
||||
@@ -81,8 +81,6 @@ using STUInt16 = STInteger<std::uint16_t>;
|
||||
using STUInt32 = STInteger<std::uint32_t>;
|
||||
using STUInt64 = STInteger<std::uint64_t>;
|
||||
|
||||
using STInt32 = STInteger<std::int32_t>;
|
||||
|
||||
template <typename Integer>
|
||||
inline STInteger<Integer>::STInteger(Integer v) : value_(v)
|
||||
{
|
||||
|
||||
@@ -231,8 +231,6 @@ public:
|
||||
getFieldH192(SField const& field) const;
|
||||
uint256
|
||||
getFieldH256(SField const& field) const;
|
||||
std::int32_t
|
||||
getFieldI32(SField const& field) const;
|
||||
AccountID
|
||||
getAccountID(SField const& field) const;
|
||||
|
||||
@@ -367,8 +365,6 @@ public:
|
||||
void
|
||||
setFieldH256(SField const& field, uint256 const&);
|
||||
void
|
||||
setFieldI32(SField const& field, std::int32_t);
|
||||
void
|
||||
setFieldVL(SField const& field, Blob const&);
|
||||
void
|
||||
setFieldVL(SField const& field, Slice const&);
|
||||
|
||||
@@ -54,6 +54,34 @@ public:
|
||||
Json::Value error;
|
||||
};
|
||||
|
||||
/** Holds the serialized result of parsing an input JSON array.
|
||||
This does validation and checking on the provided JSON.
|
||||
*/
|
||||
class STParsedJSONArray
|
||||
{
|
||||
public:
|
||||
/** Parses and creates an STParsedJSON array.
|
||||
The result of the parsing is stored in array and error.
|
||||
Exceptions:
|
||||
Does not throw.
|
||||
@param name The name of the JSON field, used in diagnostics.
|
||||
@param json The JSON-RPC to parse.
|
||||
*/
|
||||
STParsedJSONArray(std::string const& name, Json::Value const& json);
|
||||
|
||||
STParsedJSONArray() = delete;
|
||||
STParsedJSONArray(STParsedJSONArray const&) = delete;
|
||||
STParsedJSONArray&
|
||||
operator=(STParsedJSONArray const&) = delete;
|
||||
~STParsedJSONArray() = default;
|
||||
|
||||
/** The STArray if the parse was successful. */
|
||||
std::optional<STArray> array;
|
||||
|
||||
/** On failure, an appropriate set of error values. */
|
||||
Json::Value error;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -673,8 +673,7 @@ isTerRetry(TER x) noexcept
|
||||
inline bool
|
||||
isTesSuccess(TER x) noexcept
|
||||
{
|
||||
// Makes use of TERSubset::operator bool()
|
||||
return !(x);
|
||||
return (x == tesSUCCESS);
|
||||
}
|
||||
|
||||
inline bool
|
||||
|
||||
@@ -143,8 +143,8 @@ constexpr std::uint32_t const tfTransferable = 0x00000008;
|
||||
constexpr std::uint32_t const tfMutable = 0x00000010;
|
||||
|
||||
// MPTokenIssuanceCreate flags:
|
||||
// Note: tf/lsfMPTLocked is intentionally omitted, since this transaction
|
||||
// is not allowed to modify it.
|
||||
// NOTE - there is intentionally no flag here for lsfMPTLocked, which
|
||||
// this transaction cannot mutate.
|
||||
constexpr std::uint32_t const tfMPTCanLock = lsfMPTCanLock;
|
||||
constexpr std::uint32_t const tfMPTRequireAuth = lsfMPTRequireAuth;
|
||||
constexpr std::uint32_t const tfMPTCanEscrow = lsfMPTCanEscrow;
|
||||
@@ -154,20 +154,6 @@ constexpr std::uint32_t const tfMPTCanClawback = lsfMPTCanClawback;
|
||||
constexpr std::uint32_t const tfMPTokenIssuanceCreateMask =
|
||||
~(tfUniversal | tfMPTCanLock | tfMPTRequireAuth | tfMPTCanEscrow | tfMPTCanTrade | tfMPTCanTransfer | tfMPTCanClawback);
|
||||
|
||||
// MPTokenIssuanceCreate MutableFlags:
|
||||
// Indicating specific fields or flags may be changed after issuance.
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanLock = lsmfMPTCanMutateCanLock;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateRequireAuth = lsmfMPTCanMutateRequireAuth;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanEscrow = lsmfMPTCanMutateCanEscrow;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTrade = lsmfMPTCanMutateCanTrade;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTransfer = lsmfMPTCanMutateCanTransfer;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanClawback = lsmfMPTCanMutateCanClawback;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateMetadata = lsmfMPTCanMutateMetadata;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateTransferFee = lsmfMPTCanMutateTransferFee;
|
||||
constexpr std::uint32_t const tmfMPTokenIssuanceCreateMutableMask =
|
||||
~(tmfMPTCanMutateCanLock | tmfMPTCanMutateRequireAuth | tmfMPTCanMutateCanEscrow | tmfMPTCanMutateCanTrade
|
||||
| tmfMPTCanMutateCanTransfer | tmfMPTCanMutateCanClawback | tmfMPTCanMutateMetadata | tmfMPTCanMutateTransferFee);
|
||||
|
||||
// MPTokenAuthorize flags:
|
||||
constexpr std::uint32_t const tfMPTUnauthorize = 0x00000001;
|
||||
constexpr std::uint32_t const tfMPTokenAuthorizeMask = ~(tfUniversal | tfMPTUnauthorize);
|
||||
@@ -178,25 +164,6 @@ constexpr std::uint32_t const tfMPTUnlock = 0x00000002;
|
||||
constexpr std::uint32_t const tfMPTokenIssuanceSetMask = ~(tfUniversal | tfMPTLock | tfMPTUnlock);
|
||||
constexpr std::uint32_t const tfMPTokenIssuanceSetPermissionMask = ~(tfUniversal | tfMPTLock | tfMPTUnlock);
|
||||
|
||||
// MPTokenIssuanceSet MutableFlags:
|
||||
// Set or Clear flags.
|
||||
constexpr std::uint32_t const tmfMPTSetCanLock = 0x00000001;
|
||||
constexpr std::uint32_t const tmfMPTClearCanLock = 0x00000002;
|
||||
constexpr std::uint32_t const tmfMPTSetRequireAuth = 0x00000004;
|
||||
constexpr std::uint32_t const tmfMPTClearRequireAuth = 0x00000008;
|
||||
constexpr std::uint32_t const tmfMPTSetCanEscrow = 0x00000010;
|
||||
constexpr std::uint32_t const tmfMPTClearCanEscrow = 0x00000020;
|
||||
constexpr std::uint32_t const tmfMPTSetCanTrade = 0x00000040;
|
||||
constexpr std::uint32_t const tmfMPTClearCanTrade = 0x00000080;
|
||||
constexpr std::uint32_t const tmfMPTSetCanTransfer = 0x00000100;
|
||||
constexpr std::uint32_t const tmfMPTClearCanTransfer = 0x00000200;
|
||||
constexpr std::uint32_t const tmfMPTSetCanClawback = 0x00000400;
|
||||
constexpr std::uint32_t const tmfMPTClearCanClawback = 0x00000800;
|
||||
constexpr std::uint32_t const tmfMPTokenIssuanceSetMutableMask = ~(tmfMPTSetCanLock | tmfMPTClearCanLock |
|
||||
tmfMPTSetRequireAuth | tmfMPTClearRequireAuth | tmfMPTSetCanEscrow | tmfMPTClearCanEscrow |
|
||||
tmfMPTSetCanTrade | tmfMPTClearCanTrade | tmfMPTSetCanTransfer | tmfMPTClearCanTransfer |
|
||||
tmfMPTSetCanClawback | tmfMPTClearCanClawback);
|
||||
|
||||
// MPTokenIssuanceDestroy flags:
|
||||
constexpr std::uint32_t const tfMPTokenIssuanceDestroyMask = ~tfUniversal;
|
||||
|
||||
|
||||
@@ -59,7 +59,8 @@ enum TxType : std::uint16_t
|
||||
#pragma push_macro("TRANSACTION")
|
||||
#undef TRANSACTION
|
||||
|
||||
#define TRANSACTION(tag, value, ...) tag = value,
|
||||
#define TRANSACTION(tag, value, ...) \
|
||||
tag = value,
|
||||
|
||||
#include <xrpl/protocol/detail/transactions.macro>
|
||||
|
||||
|
||||
@@ -77,16 +77,6 @@ concept Compatible = Valid<VU> && std::is_arithmetic_v<Other> &&
|
||||
std::is_arithmetic_v<typename VU::value_type> &&
|
||||
std::is_convertible_v<Other, typename VU::value_type>;
|
||||
|
||||
template <class T>
|
||||
concept Integral = std::is_integral_v<T>;
|
||||
|
||||
template <class VU>
|
||||
concept IntegralValue = Integral<typename VU::value_type>;
|
||||
|
||||
template <class VU1, class VU2>
|
||||
concept CastableValue = IntegralValue<VU1> && IntegralValue<VU2> &&
|
||||
std::is_same_v<typename VU1::unit_type, typename VU2::unit_type>;
|
||||
|
||||
template <class UnitTag, class T>
|
||||
class ValueUnit : private boost::totally_ordered<ValueUnit<UnitTag, T>>,
|
||||
private boost::additive<ValueUnit<UnitTag, T>>,
|
||||
@@ -228,8 +218,8 @@ public:
|
||||
return *this;
|
||||
}
|
||||
|
||||
template <Integral transparent = value_type>
|
||||
ValueUnit&
|
||||
template <class transparent = value_type>
|
||||
std::enable_if_t<std::is_integral_v<transparent>, ValueUnit&>
|
||||
operator%=(value_type const& rhs)
|
||||
{
|
||||
value_ %= rhs;
|
||||
@@ -368,27 +358,49 @@ to_string(ValueUnit<UnitTag, T> const& amount)
|
||||
return std::to_string(amount.value());
|
||||
}
|
||||
|
||||
template <class Source>
|
||||
concept muldivSource = Valid<Source> &&
|
||||
template <Valid Source>
|
||||
constexpr bool can_muldiv_source_v =
|
||||
std::is_convertible_v<typename Source::value_type, std::uint64_t>;
|
||||
|
||||
template <class Dest>
|
||||
concept muldivDest = muldivSource<Dest> && // Dest is also a source
|
||||
template <Valid Dest>
|
||||
constexpr bool can_muldiv_dest_v =
|
||||
can_muldiv_source_v<Dest> && // Dest is also a source
|
||||
std::is_convertible_v<std::uint64_t, typename Dest::value_type> &&
|
||||
sizeof(typename Dest::value_type) >= sizeof(std::uint64_t);
|
||||
|
||||
template <class Source2, class Source1>
|
||||
concept muldivSources = muldivSource<Source1> && muldivSource<Source2> &&
|
||||
template <Valid Source1, Valid Source2>
|
||||
constexpr bool can_muldiv_sources_v =
|
||||
can_muldiv_source_v<Source1> && can_muldiv_source_v<Source2> &&
|
||||
std::is_same_v<typename Source1::unit_type, typename Source2::unit_type>;
|
||||
|
||||
template <class Dest, class Source1, class Source2>
|
||||
concept muldivable = muldivSources<Source1, Source2> && muldivDest<Dest>;
|
||||
template <Valid Source1, Valid Source2, Valid Dest>
|
||||
constexpr bool can_muldiv_v =
|
||||
can_muldiv_sources_v<Source1, Source2> && can_muldiv_dest_v<Dest>;
|
||||
// Source and Dest can be the same by default
|
||||
|
||||
template <class Dest, class Source1, class Source2>
|
||||
concept muldivCommutable = muldivable<Dest, Source1, Source2> &&
|
||||
template <Valid Source1, Valid Source2, Valid Dest>
|
||||
constexpr bool can_muldiv_commute_v = can_muldiv_v<Source1, Source2, Dest> &&
|
||||
!std::is_same_v<typename Source1::unit_type, typename Dest::unit_type>;
|
||||
|
||||
template <class T>
|
||||
using enable_muldiv_source_t =
|
||||
typename std::enable_if_t<can_muldiv_source_v<T>>;
|
||||
|
||||
template <class T>
|
||||
using enable_muldiv_dest_t = typename std::enable_if_t<can_muldiv_dest_v<T>>;
|
||||
|
||||
template <class Source1, class Source2>
|
||||
using enable_muldiv_sources_t =
|
||||
typename std::enable_if_t<can_muldiv_sources_v<Source1, Source2>>;
|
||||
|
||||
template <class Source1, class Source2, class Dest>
|
||||
using enable_muldiv_t =
|
||||
typename std::enable_if_t<can_muldiv_v<Source1, Source2, Dest>>;
|
||||
|
||||
template <class Source1, class Source2, class Dest>
|
||||
using enable_muldiv_commute_t =
|
||||
typename std::enable_if_t<can_muldiv_commute_v<Source1, Source2, Dest>>;
|
||||
|
||||
template <class T>
|
||||
ValueUnit<unitlessTag, T>
|
||||
scalar(T value)
|
||||
@@ -396,7 +408,11 @@ scalar(T value)
|
||||
return ValueUnit<unitlessTag, T>{value};
|
||||
}
|
||||
|
||||
template <class Source1, class Source2, unit::muldivable<Source1, Source2> Dest>
|
||||
template <
|
||||
class Source1,
|
||||
class Source2,
|
||||
class Dest,
|
||||
class = enable_muldiv_t<Source1, Source2, Dest>>
|
||||
std::optional<Dest>
|
||||
mulDivU(Source1 value, Dest mul, Source2 div)
|
||||
{
|
||||
@@ -410,7 +426,7 @@ mulDivU(Source1 value, Dest mul, Source2 div)
|
||||
XRPL_ASSERT(
|
||||
mul.value() >= 0, "ripple::unit::mulDivU : minimum mul input");
|
||||
XRPL_ASSERT(
|
||||
div.value() > 0, "ripple::unit::mulDivU : minimum div input");
|
||||
div.value() >= 0, "ripple::unit::mulDivU : minimum div input");
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
@@ -461,7 +477,11 @@ using TenthBips = unit::ValueUnit<unit::TenthBipsTag, T>;
|
||||
using TenthBips16 = TenthBips<std::uint16_t>;
|
||||
using TenthBips32 = TenthBips<std::uint32_t>;
|
||||
|
||||
template <class Source1, class Source2, unit::muldivable<Source1, Source2> Dest>
|
||||
template <
|
||||
class Source1,
|
||||
class Source2,
|
||||
class Dest,
|
||||
class = unit::enable_muldiv_t<Source1, Source2, Dest>>
|
||||
std::optional<Dest>
|
||||
mulDiv(Source1 value, Dest mul, Source2 div)
|
||||
{
|
||||
@@ -471,7 +491,8 @@ mulDiv(Source1 value, Dest mul, Source2 div)
|
||||
template <
|
||||
class Source1,
|
||||
class Source2,
|
||||
unit::muldivCommutable<Source1, Source2> Dest>
|
||||
class Dest,
|
||||
class = unit::enable_muldiv_commute_t<Source1, Source2, Dest>>
|
||||
std::optional<Dest>
|
||||
mulDiv(Dest value, Source1 mul, Source2 div)
|
||||
{
|
||||
@@ -479,7 +500,7 @@ mulDiv(Dest value, Source1 mul, Source2 div)
|
||||
return unit::mulDivU(mul, value, div);
|
||||
}
|
||||
|
||||
template <unit::muldivDest Dest>
|
||||
template <class Dest, class = unit::enable_muldiv_dest_t<Dest>>
|
||||
std::optional<Dest>
|
||||
mulDiv(std::uint64_t value, Dest mul, std::uint64_t div)
|
||||
{
|
||||
@@ -488,7 +509,7 @@ mulDiv(std::uint64_t value, Dest mul, std::uint64_t div)
|
||||
return unit::mulDivU(unit::scalar(value), mul, unit::scalar(div));
|
||||
}
|
||||
|
||||
template <unit::muldivDest Dest>
|
||||
template <class Dest, class = unit::enable_muldiv_dest_t<Dest>>
|
||||
std::optional<Dest>
|
||||
mulDiv(Dest value, std::uint64_t mul, std::uint64_t div)
|
||||
{
|
||||
@@ -496,7 +517,10 @@ mulDiv(Dest value, std::uint64_t mul, std::uint64_t div)
|
||||
return mulDiv(mul, value, div);
|
||||
}
|
||||
|
||||
template <unit::muldivSource Source1, unit::muldivSources<Source1> Source2>
|
||||
template <
|
||||
class Source1,
|
||||
class Source2,
|
||||
class = unit::enable_muldiv_sources_t<Source1, Source2>>
|
||||
std::optional<std::uint64_t>
|
||||
mulDiv(Source1 value, std::uint64_t mul, Source2 div)
|
||||
{
|
||||
@@ -510,7 +534,10 @@ mulDiv(Source1 value, std::uint64_t mul, Source2 div)
|
||||
return unitresult->value();
|
||||
}
|
||||
|
||||
template <unit::muldivSource Source1, unit::muldivSources<Source1> Source2>
|
||||
template <
|
||||
class Source1,
|
||||
class Source2,
|
||||
class = unit::enable_muldiv_sources_t<Source1, Source2>>
|
||||
std::optional<std::uint64_t>
|
||||
mulDiv(std::uint64_t value, Source1 mul, Source2 div)
|
||||
{
|
||||
@@ -518,32 +545,44 @@ mulDiv(std::uint64_t value, Source1 mul, Source2 div)
|
||||
return mulDiv(mul, value, div);
|
||||
}
|
||||
|
||||
template <unit::IntegralValue Dest, unit::CastableValue<Dest> Src>
|
||||
constexpr Dest
|
||||
template <class Dest, class Src>
|
||||
constexpr std::enable_if_t<
|
||||
std::is_same_v<typename Dest::unit_type, typename Src::unit_type> &&
|
||||
std::is_integral_v<typename Dest::value_type> &&
|
||||
std::is_integral_v<typename Src::value_type>,
|
||||
Dest>
|
||||
safe_cast(Src s) noexcept
|
||||
{
|
||||
// Dest may not have an explicit value constructor
|
||||
return Dest{safe_cast<typename Dest::value_type>(s.value())};
|
||||
}
|
||||
|
||||
template <unit::IntegralValue Dest, unit::Integral Src>
|
||||
constexpr Dest
|
||||
template <class Dest, class Src>
|
||||
constexpr std::enable_if_t<
|
||||
std::is_integral_v<typename Dest::value_type> && std::is_integral_v<Src>,
|
||||
Dest>
|
||||
safe_cast(Src s) noexcept
|
||||
{
|
||||
// Dest may not have an explicit value constructor
|
||||
return Dest{safe_cast<typename Dest::value_type>(s)};
|
||||
}
|
||||
|
||||
template <unit::IntegralValue Dest, unit::CastableValue<Dest> Src>
|
||||
constexpr Dest
|
||||
template <class Dest, class Src>
|
||||
constexpr std::enable_if_t<
|
||||
std::is_same_v<typename Dest::unit_type, typename Src::unit_type> &&
|
||||
std::is_integral_v<typename Dest::value_type> &&
|
||||
std::is_integral_v<typename Src::value_type>,
|
||||
Dest>
|
||||
unsafe_cast(Src s) noexcept
|
||||
{
|
||||
// Dest may not have an explicit value constructor
|
||||
return Dest{unsafe_cast<typename Dest::value_type>(s.value())};
|
||||
}
|
||||
|
||||
template <unit::IntegralValue Dest, unit::Integral Src>
|
||||
constexpr Dest
|
||||
template <class Dest, class Src>
|
||||
constexpr std::enable_if_t<
|
||||
std::is_integral_v<typename Dest::value_type> && std::is_integral_v<Src>,
|
||||
Dest>
|
||||
unsafe_cast(Src s) noexcept
|
||||
{
|
||||
// Dest may not have an explicit value constructor
|
||||
|
||||
@@ -129,12 +129,10 @@ inplace_bigint_div_rem(std::span<uint64_t> numerator, std::uint64_t divisor)
|
||||
{
|
||||
// should never happen, but if it does then it seems natural to define
|
||||
// the a null set of numbers to be zero, so the remainder is also zero.
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::b58_fast::detail::inplace_bigint_div_rem : empty "
|
||||
"numerator");
|
||||
return 0;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
auto to_u128 = [](std::uint64_t high,
|
||||
|
||||
@@ -29,22 +29,19 @@
|
||||
|
||||
// Add new amendments to the top of this list.
|
||||
// Keep it sorted in reverse chronological order.
|
||||
// If you add an amendment here, then do not forget to increment `numFeatures`
|
||||
// in include/xrpl/protocol/Feature.h.
|
||||
|
||||
XRPL_FIX (DirectoryLimit, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (IncludeKeyletFields, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DynamicMPT, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (TokenEscrowV1, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (DelegateV1_1, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PriceOracleOrder, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (AMMClawbackRounding, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PriceOracleOrder, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (AMMClawbackRounding, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (EnforceNFTokenTrustlineV2, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (AMMv1_3, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionedDEX, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Batch, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(SingleAssetVault, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionDelegation, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionDelegation, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PayChanCancelAfter, Supported::yes, VoteBehavior::DefaultNo)
|
||||
// Check flags in Credential transactions
|
||||
XRPL_FIX (InvalidTxFlags, Supported::yes, VoteBehavior::DefaultNo)
|
||||
|
||||
@@ -120,7 +120,6 @@ LEDGER_ENTRY(ltNFTOKEN_PAGE, 0x0050, NFTokenPage, nft_page, ({
|
||||
// All fields are soeREQUIRED because there is always a SignerEntries.
|
||||
// If there are no SignerEntries the node is deleted.
|
||||
LEDGER_ENTRY(ltSIGNER_LIST, 0x0053, SignerList, signer_list, ({
|
||||
{sfOwner, soeOPTIONAL},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfSignerQuorum, soeREQUIRED},
|
||||
{sfSignerEntries, soeREQUIRED},
|
||||
@@ -189,7 +188,7 @@ LEDGER_ENTRY(ltDIR_NODE, 0x0064, DirectoryNode, directory, ({
|
||||
{sfNFTokenID, soeOPTIONAL},
|
||||
{sfPreviousTxnID, soeOPTIONAL},
|
||||
{sfPreviousTxnLgrSeq, soeOPTIONAL},
|
||||
{sfDomainID, soeOPTIONAL} // order book directories
|
||||
{sfDomainID, soeOPTIONAL}
|
||||
}))
|
||||
|
||||
/** The ledger object which lists details about amendments on the network.
|
||||
@@ -344,7 +343,6 @@ LEDGER_ENTRY(ltXCHAIN_OWNED_CREATE_ACCOUNT_CLAIM_ID, 0x0074, XChainOwnedCreateAc
|
||||
*/
|
||||
LEDGER_ENTRY(ltESCROW, 0x0075, Escrow, escrow, ({
|
||||
{sfAccount, soeREQUIRED},
|
||||
{sfSequence, soeOPTIONAL},
|
||||
{sfDestination, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED},
|
||||
{sfCondition, soeOPTIONAL},
|
||||
@@ -367,7 +365,6 @@ LEDGER_ENTRY(ltESCROW, 0x0075, Escrow, escrow, ({
|
||||
LEDGER_ENTRY(ltPAYCHAN, 0x0078, PayChannel, payment_channel, ({
|
||||
{sfAccount, soeREQUIRED},
|
||||
{sfDestination, soeREQUIRED},
|
||||
{sfSequence, soeOPTIONAL},
|
||||
{sfAmount, soeREQUIRED},
|
||||
{sfBalance, soeREQUIRED},
|
||||
{sfPublicKey, soeREQUIRED},
|
||||
@@ -415,7 +412,6 @@ LEDGER_ENTRY(ltMPTOKEN_ISSUANCE, 0x007e, MPTokenIssuance, mpt_issuance, ({
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
{sfDomainID, soeOPTIONAL},
|
||||
{sfMutableFlags, soeDEFAULT},
|
||||
}))
|
||||
|
||||
/** A ledger object which tracks MPToken
|
||||
@@ -436,7 +432,6 @@ LEDGER_ENTRY(ltMPTOKEN, 0x007f, MPToken, mptoken, ({
|
||||
*/
|
||||
LEDGER_ENTRY(ltORACLE, 0x0080, Oracle, oracle, ({
|
||||
{sfOwner, soeREQUIRED},
|
||||
{sfOracleDocumentID, soeOPTIONAL},
|
||||
{sfProvider, soeREQUIRED},
|
||||
{sfPriceDataSeries, soeREQUIRED},
|
||||
{sfAssetClass, soeREQUIRED},
|
||||
@@ -457,7 +452,7 @@ LEDGER_ENTRY(ltCREDENTIAL, 0x0081, Credential, credential, ({
|
||||
{sfExpiration, soeOPTIONAL},
|
||||
{sfURI, soeOPTIONAL},
|
||||
{sfIssuerNode, soeREQUIRED},
|
||||
{sfSubjectNode, soeOPTIONAL},
|
||||
{sfSubjectNode, soeREQUIRED},
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
}))
|
||||
@@ -504,7 +499,6 @@ LEDGER_ENTRY(ltVAULT, 0x0084, Vault, vault, ({
|
||||
{sfLossUnrealized, soeREQUIRED},
|
||||
{sfShareMPTID, soeREQUIRED},
|
||||
{sfWithdrawalPolicy, soeREQUIRED},
|
||||
{sfScale, soeDEFAULT},
|
||||
// no SharesTotal ever (use MPTIssuance.sfOutstandingAmount)
|
||||
// no PermissionedDomainID ever (use MPTIssuance.sfDomainID)
|
||||
}))
|
||||
|
||||
@@ -114,7 +114,6 @@ TYPED_SFIELD(sfVoteWeight, UINT32, 48)
|
||||
TYPED_SFIELD(sfFirstNFTokenSequence, UINT32, 50)
|
||||
TYPED_SFIELD(sfOracleDocumentID, UINT32, 51)
|
||||
TYPED_SFIELD(sfPermissionValue, UINT32, 52)
|
||||
TYPED_SFIELD(sfMutableFlags, UINT32, 53)
|
||||
|
||||
// 64-bit integers (common)
|
||||
TYPED_SFIELD(sfIndexNext, UINT64, 1)
|
||||
@@ -175,7 +174,7 @@ TYPED_SFIELD(sfEmitParentTxnID, UINT256, 11)
|
||||
TYPED_SFIELD(sfEmitNonce, UINT256, 12)
|
||||
TYPED_SFIELD(sfEmitHookHash, UINT256, 13)
|
||||
TYPED_SFIELD(sfAMMID, UINT256, 14,
|
||||
SField::sMD_PseudoAccount | SField::sMD_Default)
|
||||
SField::sMD_PseudoAccount |SField::sMD_Default)
|
||||
|
||||
// 256-bit (uncommon)
|
||||
TYPED_SFIELD(sfBookDirectory, UINT256, 16)
|
||||
@@ -208,12 +207,6 @@ TYPED_SFIELD(sfAssetsMaximum, NUMBER, 3)
|
||||
TYPED_SFIELD(sfAssetsTotal, NUMBER, 4)
|
||||
TYPED_SFIELD(sfLossUnrealized, NUMBER, 5)
|
||||
|
||||
// int32
|
||||
// NOTE: Do not use `sfDummyInt32`. It's so far the only use of INT32
|
||||
// in this file and has been defined here for test only.
|
||||
// TODO: Replace `sfDummyInt32` with actually useful field.
|
||||
TYPED_SFIELD(sfDummyInt32, INT32, 1) // for tests only
|
||||
|
||||
// currency amount (common)
|
||||
TYPED_SFIELD(sfAmount, AMOUNT, 1)
|
||||
TYPED_SFIELD(sfBalance, AMOUNT, 2)
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
#endif
|
||||
|
||||
/**
|
||||
* TRANSACTION(tag, value, name, delegatable, amendments, privileges, fields)
|
||||
* TRANSACTION(tag, value, name, delegatable, privileges, fields)
|
||||
*
|
||||
* To ease maintenance, you may replace any unneeded values with "..."
|
||||
* e.g. #define TRANSACTION(tag, value, name, ...)
|
||||
@@ -45,7 +45,6 @@
|
||||
#endif
|
||||
TRANSACTION(ttPAYMENT, 0, Payment,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
createAcct,
|
||||
({
|
||||
{sfDestination, soeREQUIRED},
|
||||
@@ -65,7 +64,6 @@ TRANSACTION(ttPAYMENT, 0, Payment,
|
||||
#endif
|
||||
TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfDestination, soeREQUIRED},
|
||||
@@ -79,7 +77,6 @@ TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate,
|
||||
/** This transaction type completes an existing escrow. */
|
||||
TRANSACTION(ttESCROW_FINISH, 2, EscrowFinish,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfOwner, soeREQUIRED},
|
||||
@@ -96,7 +93,6 @@ TRANSACTION(ttESCROW_FINISH, 2, EscrowFinish,
|
||||
#endif
|
||||
TRANSACTION(ttACCOUNT_SET, 3, AccountSet,
|
||||
Delegation::notDelegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfEmailHash, soeOPTIONAL},
|
||||
@@ -117,7 +113,6 @@ TRANSACTION(ttACCOUNT_SET, 3, AccountSet,
|
||||
#endif
|
||||
TRANSACTION(ttESCROW_CANCEL, 4, EscrowCancel,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfOwner, soeREQUIRED},
|
||||
@@ -130,7 +125,6 @@ TRANSACTION(ttESCROW_CANCEL, 4, EscrowCancel,
|
||||
#endif
|
||||
TRANSACTION(ttREGULAR_KEY_SET, 5, SetRegularKey,
|
||||
Delegation::notDelegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfRegularKey, soeOPTIONAL},
|
||||
@@ -144,7 +138,6 @@ TRANSACTION(ttREGULAR_KEY_SET, 5, SetRegularKey,
|
||||
#endif
|
||||
TRANSACTION(ttOFFER_CREATE, 7, OfferCreate,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfTakerPays, soeREQUIRED},
|
||||
@@ -160,7 +153,6 @@ TRANSACTION(ttOFFER_CREATE, 7, OfferCreate,
|
||||
#endif
|
||||
TRANSACTION(ttOFFER_CANCEL, 8, OfferCancel,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfOfferSequence, soeREQUIRED},
|
||||
@@ -174,7 +166,6 @@ TRANSACTION(ttOFFER_CANCEL, 8, OfferCancel,
|
||||
#endif
|
||||
TRANSACTION(ttTICKET_CREATE, 10, TicketCreate,
|
||||
Delegation::delegatable,
|
||||
featureTicketBatch,
|
||||
noPriv,
|
||||
({
|
||||
{sfTicketCount, soeREQUIRED},
|
||||
@@ -190,7 +181,6 @@ TRANSACTION(ttTICKET_CREATE, 10, TicketCreate,
|
||||
#endif
|
||||
TRANSACTION(ttSIGNER_LIST_SET, 12, SignerListSet,
|
||||
Delegation::notDelegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfSignerQuorum, soeREQUIRED},
|
||||
@@ -203,7 +193,6 @@ TRANSACTION(ttSIGNER_LIST_SET, 12, SignerListSet,
|
||||
#endif
|
||||
TRANSACTION(ttPAYCHAN_CREATE, 13, PaymentChannelCreate,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfDestination, soeREQUIRED},
|
||||
@@ -217,7 +206,6 @@ TRANSACTION(ttPAYCHAN_CREATE, 13, PaymentChannelCreate,
|
||||
/** This transaction type funds an existing unidirectional XRP payment channel. */
|
||||
TRANSACTION(ttPAYCHAN_FUND, 14, PaymentChannelFund,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfChannel, soeREQUIRED},
|
||||
@@ -228,7 +216,6 @@ TRANSACTION(ttPAYCHAN_FUND, 14, PaymentChannelFund,
|
||||
/** This transaction type submits a claim against an existing unidirectional payment channel. */
|
||||
TRANSACTION(ttPAYCHAN_CLAIM, 15, PaymentChannelClaim,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfChannel, soeREQUIRED},
|
||||
@@ -245,7 +232,6 @@ TRANSACTION(ttPAYCHAN_CLAIM, 15, PaymentChannelClaim,
|
||||
#endif
|
||||
TRANSACTION(ttCHECK_CREATE, 16, CheckCreate,
|
||||
Delegation::delegatable,
|
||||
featureChecks,
|
||||
noPriv,
|
||||
({
|
||||
{sfDestination, soeREQUIRED},
|
||||
@@ -261,7 +247,6 @@ TRANSACTION(ttCHECK_CREATE, 16, CheckCreate,
|
||||
#endif
|
||||
TRANSACTION(ttCHECK_CASH, 17, CheckCash,
|
||||
Delegation::delegatable,
|
||||
featureChecks,
|
||||
noPriv,
|
||||
({
|
||||
{sfCheckID, soeREQUIRED},
|
||||
@@ -275,7 +260,6 @@ TRANSACTION(ttCHECK_CASH, 17, CheckCash,
|
||||
#endif
|
||||
TRANSACTION(ttCHECK_CANCEL, 18, CheckCancel,
|
||||
Delegation::delegatable,
|
||||
featureChecks,
|
||||
noPriv,
|
||||
({
|
||||
{sfCheckID, soeREQUIRED},
|
||||
@@ -287,7 +271,6 @@ TRANSACTION(ttCHECK_CANCEL, 18, CheckCancel,
|
||||
#endif
|
||||
TRANSACTION(ttDEPOSIT_PREAUTH, 19, DepositPreauth,
|
||||
Delegation::delegatable,
|
||||
featureDepositPreauth,
|
||||
noPriv,
|
||||
({
|
||||
{sfAuthorize, soeOPTIONAL},
|
||||
@@ -302,7 +285,6 @@ TRANSACTION(ttDEPOSIT_PREAUTH, 19, DepositPreauth,
|
||||
#endif
|
||||
TRANSACTION(ttTRUST_SET, 20, TrustSet,
|
||||
Delegation::delegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfLimitAmount, soeOPTIONAL},
|
||||
@@ -316,7 +298,6 @@ TRANSACTION(ttTRUST_SET, 20, TrustSet,
|
||||
#endif
|
||||
TRANSACTION(ttACCOUNT_DELETE, 21, AccountDelete,
|
||||
Delegation::notDelegatable,
|
||||
uint256{},
|
||||
mustDeleteAcct,
|
||||
({
|
||||
{sfDestination, soeREQUIRED},
|
||||
@@ -332,7 +313,6 @@ TRANSACTION(ttACCOUNT_DELETE, 21, AccountDelete,
|
||||
#endif
|
||||
TRANSACTION(ttNFTOKEN_MINT, 25, NFTokenMint,
|
||||
Delegation::delegatable,
|
||||
featureNonFungibleTokensV1,
|
||||
changeNFTCounts,
|
||||
({
|
||||
{sfNFTokenTaxon, soeREQUIRED},
|
||||
@@ -350,7 +330,6 @@ TRANSACTION(ttNFTOKEN_MINT, 25, NFTokenMint,
|
||||
#endif
|
||||
TRANSACTION(ttNFTOKEN_BURN, 26, NFTokenBurn,
|
||||
Delegation::delegatable,
|
||||
featureNonFungibleTokensV1,
|
||||
changeNFTCounts,
|
||||
({
|
||||
{sfNFTokenID, soeREQUIRED},
|
||||
@@ -363,7 +342,6 @@ TRANSACTION(ttNFTOKEN_BURN, 26, NFTokenBurn,
|
||||
#endif
|
||||
TRANSACTION(ttNFTOKEN_CREATE_OFFER, 27, NFTokenCreateOffer,
|
||||
Delegation::delegatable,
|
||||
featureNonFungibleTokensV1,
|
||||
noPriv,
|
||||
({
|
||||
{sfNFTokenID, soeREQUIRED},
|
||||
@@ -379,7 +357,6 @@ TRANSACTION(ttNFTOKEN_CREATE_OFFER, 27, NFTokenCreateOffer,
|
||||
#endif
|
||||
TRANSACTION(ttNFTOKEN_CANCEL_OFFER, 28, NFTokenCancelOffer,
|
||||
Delegation::delegatable,
|
||||
featureNonFungibleTokensV1,
|
||||
noPriv,
|
||||
({
|
||||
{sfNFTokenOffers, soeREQUIRED},
|
||||
@@ -391,7 +368,6 @@ TRANSACTION(ttNFTOKEN_CANCEL_OFFER, 28, NFTokenCancelOffer,
|
||||
#endif
|
||||
TRANSACTION(ttNFTOKEN_ACCEPT_OFFER, 29, NFTokenAcceptOffer,
|
||||
Delegation::delegatable,
|
||||
featureNonFungibleTokensV1,
|
||||
noPriv,
|
||||
({
|
||||
{sfNFTokenBuyOffer, soeOPTIONAL},
|
||||
@@ -405,7 +381,6 @@ TRANSACTION(ttNFTOKEN_ACCEPT_OFFER, 29, NFTokenAcceptOffer,
|
||||
#endif
|
||||
TRANSACTION(ttCLAWBACK, 30, Clawback,
|
||||
Delegation::delegatable,
|
||||
featureClawback,
|
||||
noPriv,
|
||||
({
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -418,7 +393,6 @@ TRANSACTION(ttCLAWBACK, 30, Clawback,
|
||||
#endif
|
||||
TRANSACTION(ttAMM_CLAWBACK, 31, AMMClawback,
|
||||
Delegation::delegatable,
|
||||
featureAMMClawback,
|
||||
mayDeleteAcct | overrideFreeze,
|
||||
({
|
||||
{sfHolder, soeREQUIRED},
|
||||
@@ -433,7 +407,6 @@ TRANSACTION(ttAMM_CLAWBACK, 31, AMMClawback,
|
||||
#endif
|
||||
TRANSACTION(ttAMM_CREATE, 35, AMMCreate,
|
||||
Delegation::delegatable,
|
||||
featureAMM,
|
||||
createPseudoAcct,
|
||||
({
|
||||
{sfAmount, soeREQUIRED},
|
||||
@@ -447,7 +420,6 @@ TRANSACTION(ttAMM_CREATE, 35, AMMCreate,
|
||||
#endif
|
||||
TRANSACTION(ttAMM_DEPOSIT, 36, AMMDeposit,
|
||||
Delegation::delegatable,
|
||||
featureAMM,
|
||||
noPriv,
|
||||
({
|
||||
{sfAsset, soeREQUIRED},
|
||||
@@ -465,7 +437,6 @@ TRANSACTION(ttAMM_DEPOSIT, 36, AMMDeposit,
|
||||
#endif
|
||||
TRANSACTION(ttAMM_WITHDRAW, 37, AMMWithdraw,
|
||||
Delegation::delegatable,
|
||||
featureAMM,
|
||||
mayDeleteAcct,
|
||||
({
|
||||
{sfAsset, soeREQUIRED},
|
||||
@@ -482,7 +453,6 @@ TRANSACTION(ttAMM_WITHDRAW, 37, AMMWithdraw,
|
||||
#endif
|
||||
TRANSACTION(ttAMM_VOTE, 38, AMMVote,
|
||||
Delegation::delegatable,
|
||||
featureAMM,
|
||||
noPriv,
|
||||
({
|
||||
{sfAsset, soeREQUIRED},
|
||||
@@ -496,7 +466,6 @@ TRANSACTION(ttAMM_VOTE, 38, AMMVote,
|
||||
#endif
|
||||
TRANSACTION(ttAMM_BID, 39, AMMBid,
|
||||
Delegation::delegatable,
|
||||
featureAMM,
|
||||
noPriv,
|
||||
({
|
||||
{sfAsset, soeREQUIRED},
|
||||
@@ -512,7 +481,6 @@ TRANSACTION(ttAMM_BID, 39, AMMBid,
|
||||
#endif
|
||||
TRANSACTION(ttAMM_DELETE, 40, AMMDelete,
|
||||
Delegation::delegatable,
|
||||
featureAMM,
|
||||
mustDeleteAcct,
|
||||
({
|
||||
{sfAsset, soeREQUIRED},
|
||||
@@ -525,7 +493,6 @@ TRANSACTION(ttAMM_DELETE, 40, AMMDelete,
|
||||
#endif
|
||||
TRANSACTION(ttXCHAIN_CREATE_CLAIM_ID, 41, XChainCreateClaimID,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
noPriv,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -536,7 +503,6 @@ TRANSACTION(ttXCHAIN_CREATE_CLAIM_ID, 41, XChainCreateClaimID,
|
||||
/** This transactions initiates a crosschain transaction */
|
||||
TRANSACTION(ttXCHAIN_COMMIT, 42, XChainCommit,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
noPriv,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -548,7 +514,6 @@ TRANSACTION(ttXCHAIN_COMMIT, 42, XChainCommit,
|
||||
/** This transaction completes a crosschain transaction */
|
||||
TRANSACTION(ttXCHAIN_CLAIM, 43, XChainClaim,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
noPriv,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -561,7 +526,6 @@ TRANSACTION(ttXCHAIN_CLAIM, 43, XChainClaim,
|
||||
/** This transaction initiates a crosschain account create transaction */
|
||||
TRANSACTION(ttXCHAIN_ACCOUNT_CREATE_COMMIT, 44, XChainAccountCreateCommit,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
noPriv,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -573,7 +537,6 @@ TRANSACTION(ttXCHAIN_ACCOUNT_CREATE_COMMIT, 44, XChainAccountCreateCommit,
|
||||
/** This transaction adds an attestation to a claim */
|
||||
TRANSACTION(ttXCHAIN_ADD_CLAIM_ATTESTATION, 45, XChainAddClaimAttestation,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
createAcct,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -594,7 +557,6 @@ TRANSACTION(ttXCHAIN_ADD_CLAIM_ATTESTATION, 45, XChainAddClaimAttestation,
|
||||
TRANSACTION(ttXCHAIN_ADD_ACCOUNT_CREATE_ATTESTATION, 46,
|
||||
XChainAddAccountCreateAttestation,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
createAcct,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -615,7 +577,6 @@ TRANSACTION(ttXCHAIN_ADD_ACCOUNT_CREATE_ATTESTATION, 46,
|
||||
/** This transaction modifies a sidechain */
|
||||
TRANSACTION(ttXCHAIN_MODIFY_BRIDGE, 47, XChainModifyBridge,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
noPriv,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -626,7 +587,6 @@ TRANSACTION(ttXCHAIN_MODIFY_BRIDGE, 47, XChainModifyBridge,
|
||||
/** This transactions creates a sidechain */
|
||||
TRANSACTION(ttXCHAIN_CREATE_BRIDGE, 48, XChainCreateBridge,
|
||||
Delegation::delegatable,
|
||||
featureXChainBridge,
|
||||
noPriv,
|
||||
({
|
||||
{sfXChainBridge, soeREQUIRED},
|
||||
@@ -640,7 +600,6 @@ TRANSACTION(ttXCHAIN_CREATE_BRIDGE, 48, XChainCreateBridge,
|
||||
#endif
|
||||
TRANSACTION(ttDID_SET, 49, DIDSet,
|
||||
Delegation::delegatable,
|
||||
featureDID,
|
||||
noPriv,
|
||||
({
|
||||
{sfDIDDocument, soeOPTIONAL},
|
||||
@@ -651,7 +610,6 @@ TRANSACTION(ttDID_SET, 49, DIDSet,
|
||||
/** This transaction type deletes a DID */
|
||||
TRANSACTION(ttDID_DELETE, 50, DIDDelete,
|
||||
Delegation::delegatable,
|
||||
featureDID,
|
||||
noPriv,
|
||||
({}))
|
||||
|
||||
@@ -661,7 +619,6 @@ TRANSACTION(ttDID_DELETE, 50, DIDDelete,
|
||||
#endif
|
||||
TRANSACTION(ttORACLE_SET, 51, OracleSet,
|
||||
Delegation::delegatable,
|
||||
featurePriceOracle,
|
||||
noPriv,
|
||||
({
|
||||
{sfOracleDocumentID, soeREQUIRED},
|
||||
@@ -678,7 +635,6 @@ TRANSACTION(ttORACLE_SET, 51, OracleSet,
|
||||
#endif
|
||||
TRANSACTION(ttORACLE_DELETE, 52, OracleDelete,
|
||||
Delegation::delegatable,
|
||||
featurePriceOracle,
|
||||
noPriv,
|
||||
({
|
||||
{sfOracleDocumentID, soeREQUIRED},
|
||||
@@ -690,7 +646,6 @@ TRANSACTION(ttORACLE_DELETE, 52, OracleDelete,
|
||||
#endif
|
||||
TRANSACTION(ttLEDGER_STATE_FIX, 53, LedgerStateFix,
|
||||
Delegation::delegatable,
|
||||
fixNFTokenPageLinks,
|
||||
noPriv,
|
||||
({
|
||||
{sfLedgerFixType, soeREQUIRED},
|
||||
@@ -703,7 +658,6 @@ TRANSACTION(ttLEDGER_STATE_FIX, 53, LedgerStateFix,
|
||||
#endif
|
||||
TRANSACTION(ttMPTOKEN_ISSUANCE_CREATE, 54, MPTokenIssuanceCreate,
|
||||
Delegation::delegatable,
|
||||
featureMPTokensV1,
|
||||
createMPTIssuance,
|
||||
({
|
||||
{sfAssetScale, soeOPTIONAL},
|
||||
@@ -711,7 +665,6 @@ TRANSACTION(ttMPTOKEN_ISSUANCE_CREATE, 54, MPTokenIssuanceCreate,
|
||||
{sfMaximumAmount, soeOPTIONAL},
|
||||
{sfMPTokenMetadata, soeOPTIONAL},
|
||||
{sfDomainID, soeOPTIONAL},
|
||||
{sfMutableFlags, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction type destroys a MPTokensIssuance instance */
|
||||
@@ -720,7 +673,6 @@ TRANSACTION(ttMPTOKEN_ISSUANCE_CREATE, 54, MPTokenIssuanceCreate,
|
||||
#endif
|
||||
TRANSACTION(ttMPTOKEN_ISSUANCE_DESTROY, 55, MPTokenIssuanceDestroy,
|
||||
Delegation::delegatable,
|
||||
featureMPTokensV1,
|
||||
destroyMPTIssuance,
|
||||
({
|
||||
{sfMPTokenIssuanceID, soeREQUIRED},
|
||||
@@ -732,15 +684,11 @@ TRANSACTION(ttMPTOKEN_ISSUANCE_DESTROY, 55, MPTokenIssuanceDestroy,
|
||||
#endif
|
||||
TRANSACTION(ttMPTOKEN_ISSUANCE_SET, 56, MPTokenIssuanceSet,
|
||||
Delegation::delegatable,
|
||||
featureMPTokensV1,
|
||||
noPriv,
|
||||
({
|
||||
{sfMPTokenIssuanceID, soeREQUIRED},
|
||||
{sfHolder, soeOPTIONAL},
|
||||
{sfDomainID, soeOPTIONAL},
|
||||
{sfMPTokenMetadata, soeOPTIONAL},
|
||||
{sfTransferFee, soeOPTIONAL},
|
||||
{sfMutableFlags, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction type authorizes a MPToken instance */
|
||||
@@ -749,7 +697,6 @@ TRANSACTION(ttMPTOKEN_ISSUANCE_SET, 56, MPTokenIssuanceSet,
|
||||
#endif
|
||||
TRANSACTION(ttMPTOKEN_AUTHORIZE, 57, MPTokenAuthorize,
|
||||
Delegation::delegatable,
|
||||
featureMPTokensV1,
|
||||
mustAuthorizeMPT,
|
||||
({
|
||||
{sfMPTokenIssuanceID, soeREQUIRED},
|
||||
@@ -762,7 +709,6 @@ TRANSACTION(ttMPTOKEN_AUTHORIZE, 57, MPTokenAuthorize,
|
||||
#endif
|
||||
TRANSACTION(ttCREDENTIAL_CREATE, 58, CredentialCreate,
|
||||
Delegation::delegatable,
|
||||
featureCredentials,
|
||||
noPriv,
|
||||
({
|
||||
{sfSubject, soeREQUIRED},
|
||||
@@ -774,7 +720,6 @@ TRANSACTION(ttCREDENTIAL_CREATE, 58, CredentialCreate,
|
||||
/** This transaction type accept an Credential object */
|
||||
TRANSACTION(ttCREDENTIAL_ACCEPT, 59, CredentialAccept,
|
||||
Delegation::delegatable,
|
||||
featureCredentials,
|
||||
noPriv,
|
||||
({
|
||||
{sfIssuer, soeREQUIRED},
|
||||
@@ -784,7 +729,6 @@ TRANSACTION(ttCREDENTIAL_ACCEPT, 59, CredentialAccept,
|
||||
/** This transaction type delete an Credential object */
|
||||
TRANSACTION(ttCREDENTIAL_DELETE, 60, CredentialDelete,
|
||||
Delegation::delegatable,
|
||||
featureCredentials,
|
||||
noPriv,
|
||||
({
|
||||
{sfSubject, soeOPTIONAL},
|
||||
@@ -798,7 +742,6 @@ TRANSACTION(ttCREDENTIAL_DELETE, 60, CredentialDelete,
|
||||
#endif
|
||||
TRANSACTION(ttNFTOKEN_MODIFY, 61, NFTokenModify,
|
||||
Delegation::delegatable,
|
||||
featureDynamicNFT,
|
||||
noPriv,
|
||||
({
|
||||
{sfNFTokenID, soeREQUIRED},
|
||||
@@ -812,7 +755,6 @@ TRANSACTION(ttNFTOKEN_MODIFY, 61, NFTokenModify,
|
||||
#endif
|
||||
TRANSACTION(ttPERMISSIONED_DOMAIN_SET, 62, PermissionedDomainSet,
|
||||
Delegation::delegatable,
|
||||
featurePermissionedDomains,
|
||||
noPriv,
|
||||
({
|
||||
{sfDomainID, soeOPTIONAL},
|
||||
@@ -825,7 +767,6 @@ TRANSACTION(ttPERMISSIONED_DOMAIN_SET, 62, PermissionedDomainSet,
|
||||
#endif
|
||||
TRANSACTION(ttPERMISSIONED_DOMAIN_DELETE, 63, PermissionedDomainDelete,
|
||||
Delegation::delegatable,
|
||||
featurePermissionedDomains,
|
||||
noPriv,
|
||||
({
|
||||
{sfDomainID, soeREQUIRED},
|
||||
@@ -837,7 +778,6 @@ TRANSACTION(ttPERMISSIONED_DOMAIN_DELETE, 63, PermissionedDomainDelete,
|
||||
#endif
|
||||
TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
|
||||
Delegation::notDelegatable,
|
||||
featurePermissionDelegation,
|
||||
noPriv,
|
||||
({
|
||||
{sfAuthorize, soeREQUIRED},
|
||||
@@ -850,8 +790,7 @@ TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
|
||||
#endif
|
||||
TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
createPseudoAcct | createMPTIssuance | mustModifyVault,
|
||||
createPseudoAcct | createMPTIssuance,
|
||||
({
|
||||
{sfAsset, soeREQUIRED, soeMPTSupported},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -859,7 +798,6 @@ TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
{sfDomainID, soeOPTIONAL},
|
||||
{sfWithdrawalPolicy, soeOPTIONAL},
|
||||
{sfData, soeOPTIONAL},
|
||||
{sfScale, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction updates a single asset vault. */
|
||||
@@ -868,8 +806,7 @@ TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
#endif
|
||||
TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mustModifyVault,
|
||||
noPriv,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -883,8 +820,7 @@ TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
#endif
|
||||
TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mustDeleteAcct | destroyMPTIssuance | mustModifyVault,
|
||||
mustDeleteAcct | destroyMPTIssuance,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
}))
|
||||
@@ -895,8 +831,7 @@ TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
#endif
|
||||
TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayAuthorizeMPT | mustModifyVault,
|
||||
mayAuthorizeMPT,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -908,8 +843,7 @@ TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
#endif
|
||||
TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT | mayAuthorizeMPT | mustModifyVault,
|
||||
noPriv,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -923,8 +857,7 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
#endif
|
||||
TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT | mustModifyVault,
|
||||
noPriv,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfHolder, soeREQUIRED},
|
||||
@@ -937,11 +870,10 @@ TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback,
|
||||
#endif
|
||||
TRANSACTION(ttBATCH, 71, Batch,
|
||||
Delegation::notDelegatable,
|
||||
featureBatch,
|
||||
noPriv,
|
||||
({
|
||||
{sfRawTransactions, soeREQUIRED},
|
||||
{sfBatchSigners, soeOPTIONAL},
|
||||
{sfRawTransactions, soeREQUIRED},
|
||||
{sfBatchSigners, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This system-generated transaction type is used to update the status of the various amendments.
|
||||
@@ -953,7 +885,6 @@ TRANSACTION(ttBATCH, 71, Batch,
|
||||
#endif
|
||||
TRANSACTION(ttAMENDMENT, 100, EnableAmendment,
|
||||
Delegation::notDelegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
@@ -965,7 +896,6 @@ TRANSACTION(ttAMENDMENT, 100, EnableAmendment,
|
||||
*/
|
||||
TRANSACTION(ttFEE, 101, SetFee,
|
||||
Delegation::notDelegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfLedgerSequence, soeOPTIONAL},
|
||||
@@ -986,7 +916,6 @@ TRANSACTION(ttFEE, 101, SetFee,
|
||||
*/
|
||||
TRANSACTION(ttUNL_MODIFY, 102, UNLModify,
|
||||
Delegation::notDelegatable,
|
||||
uint256{},
|
||||
noPriv,
|
||||
({
|
||||
{sfUNLModifyDisabling, soeREQUIRED},
|
||||
|
||||
@@ -436,12 +436,10 @@ public:
|
||||
admin_.erase(admin_.iterator_to(entry));
|
||||
break;
|
||||
default:
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::Resource::Logic::release : invalid entry "
|
||||
"kind");
|
||||
break;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
inactive_.push_back(entry);
|
||||
entry.whenExpires = m_clock.now() + secondsUntilExpiration;
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
#include <xrpl/server/Port.h>
|
||||
#include <xrpl/server/detail/ServerImpl.h>
|
||||
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -34,10 +34,10 @@ template <class Handler>
|
||||
std::unique_ptr<Server>
|
||||
make_Server(
|
||||
Handler& handler,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
beast::Journal journal)
|
||||
{
|
||||
return std::make_unique<ServerImpl<Handler>>(handler, io_service, journal);
|
||||
return std::make_unique<ServerImpl<Handler>>(handler, io_context, journal);
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -88,9 +88,7 @@ public:
|
||||
++iter)
|
||||
{
|
||||
typename BufferSequence::value_type const& buffer(*iter);
|
||||
write(
|
||||
boost::asio::buffer_cast<void const*>(buffer),
|
||||
boost::asio::buffer_size(buffer));
|
||||
write(buffer.data(), boost::asio::buffer_size(buffer));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -104,7 +102,7 @@ public:
|
||||
|
||||
/** Detach the session.
|
||||
This holds the session open so that the response can be sent
|
||||
asynchronously. Calls to io_service::run made by the server
|
||||
asynchronously. Calls to io_context::run made by the server
|
||||
will not return until all detached sessions are closed.
|
||||
*/
|
||||
virtual std::shared_ptr<Session>
|
||||
|
||||
@@ -24,11 +24,13 @@
|
||||
#include <xrpl/beast/net/IPAddressConversion.h>
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
#include <xrpl/server/Session.h>
|
||||
#include <xrpl/server/detail/Spawn.h>
|
||||
#include <xrpl/server/detail/io_list.h>
|
||||
|
||||
#include <boost/asio/ip/tcp.hpp>
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/ssl/stream.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
#include <boost/asio/streambuf.hpp>
|
||||
#include <boost/beast/core/stream_traits.hpp>
|
||||
#include <boost/beast/http/dynamic_body.hpp>
|
||||
@@ -215,8 +217,8 @@ BaseHTTPPeer<Handler, Impl>::BaseHTTPPeer(
|
||||
ConstBufferSequence const& buffers)
|
||||
: port_(port)
|
||||
, handler_(handler)
|
||||
, work_(executor)
|
||||
, strand_(executor)
|
||||
, work_(boost::asio::make_work_guard(executor))
|
||||
, strand_(boost::asio::make_strand(executor))
|
||||
, remote_address_(remote_address)
|
||||
, journal_(journal)
|
||||
{
|
||||
@@ -356,7 +358,7 @@ BaseHTTPPeer<Handler, Impl>::on_write(
|
||||
return;
|
||||
if (graceful_)
|
||||
return do_close();
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_read,
|
||||
@@ -375,7 +377,7 @@ BaseHTTPPeer<Handler, Impl>::do_writer(
|
||||
{
|
||||
auto const p = impl().shared_from_this();
|
||||
resume = std::function<void(void)>([this, p, writer, keep_alive]() {
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_writer,
|
||||
@@ -406,7 +408,7 @@ BaseHTTPPeer<Handler, Impl>::do_writer(
|
||||
if (!keep_alive)
|
||||
return do_close();
|
||||
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_read,
|
||||
@@ -448,14 +450,14 @@ BaseHTTPPeer<Handler, Impl>::write(
|
||||
std::shared_ptr<Writer> const& writer,
|
||||
bool keep_alive)
|
||||
{
|
||||
boost::asio::spawn(bind_executor(
|
||||
util::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_writer,
|
||||
impl().shared_from_this(),
|
||||
writer,
|
||||
keep_alive,
|
||||
std::placeholders::_1)));
|
||||
std::placeholders::_1));
|
||||
}
|
||||
|
||||
// DEPRECATED
|
||||
@@ -490,12 +492,12 @@ BaseHTTPPeer<Handler, Impl>::complete()
|
||||
}
|
||||
|
||||
// keep-alive
|
||||
boost::asio::spawn(bind_executor(
|
||||
util::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_read,
|
||||
impl().shared_from_this(),
|
||||
std::placeholders::_1)));
|
||||
std::placeholders::_1));
|
||||
}
|
||||
|
||||
// DEPRECATED
|
||||
|
||||
@@ -91,8 +91,8 @@ BasePeer<Handler, Impl>::BasePeer(
|
||||
return "##" + std::to_string(++id) + " ";
|
||||
}())
|
||||
, j_(sink_)
|
||||
, work_(executor)
|
||||
, strand_(executor)
|
||||
, work_(boost::asio::make_work_guard(executor))
|
||||
, strand_(boost::asio::make_strand(executor))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
#include <xrpl/server/detail/BasePeer.h>
|
||||
#include <xrpl/server/detail/LowestLayer.h>
|
||||
|
||||
#include <boost/asio/error.hpp>
|
||||
#include <boost/beast/core/multi_buffer.hpp>
|
||||
#include <boost/beast/http/message.hpp>
|
||||
#include <boost/beast/websocket.hpp>
|
||||
@@ -420,11 +421,17 @@ BaseWSPeer<Handler, Impl>::start_timer()
|
||||
// Max seconds without completing a message
|
||||
static constexpr std::chrono::seconds timeout{30};
|
||||
static constexpr std::chrono::seconds timeoutLocal{3};
|
||||
error_code ec;
|
||||
timer_.expires_from_now(
|
||||
remote_endpoint().address().is_loopback() ? timeoutLocal : timeout, ec);
|
||||
if (ec)
|
||||
return fail(ec, "start_timer");
|
||||
|
||||
try
|
||||
{
|
||||
timer_.expires_after(
|
||||
remote_endpoint().address().is_loopback() ? timeoutLocal : timeout);
|
||||
}
|
||||
catch (boost::system::system_error const& e)
|
||||
{
|
||||
return fail(e.code(), "start_timer");
|
||||
}
|
||||
|
||||
timer_.async_wait(bind_executor(
|
||||
strand_,
|
||||
std::bind(
|
||||
@@ -438,8 +445,14 @@ template <class Handler, class Impl>
|
||||
void
|
||||
BaseWSPeer<Handler, Impl>::cancel_timer()
|
||||
{
|
||||
error_code ec;
|
||||
timer_.cancel(ec);
|
||||
try
|
||||
{
|
||||
timer_.cancel();
|
||||
}
|
||||
catch (boost::system::system_error const&)
|
||||
{
|
||||
// ignored
|
||||
}
|
||||
}
|
||||
|
||||
template <class Handler, class Impl>
|
||||
|
||||
@@ -69,7 +69,7 @@ private:
|
||||
stream_type stream_;
|
||||
socket_type& socket_;
|
||||
endpoint_type remote_address_;
|
||||
boost::asio::io_context::strand strand_;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
beast::Journal const j_;
|
||||
|
||||
public:
|
||||
@@ -95,7 +95,7 @@ private:
|
||||
Handler& handler_;
|
||||
boost::asio::io_context& ioc_;
|
||||
acceptor_type acceptor_;
|
||||
boost::asio::io_context::strand strand_;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
bool ssl_;
|
||||
bool plain_;
|
||||
|
||||
@@ -155,7 +155,7 @@ Door<Handler>::Detector::Detector(
|
||||
, stream_(std::move(stream))
|
||||
, socket_(stream_.socket())
|
||||
, remote_address_(remote_address)
|
||||
, strand_(ioc_)
|
||||
, strand_(boost::asio::make_strand(ioc_))
|
||||
, j_(j)
|
||||
{
|
||||
}
|
||||
@@ -164,7 +164,7 @@ template <class Handler>
|
||||
void
|
||||
Door<Handler>::Detector::run()
|
||||
{
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&Detector::do_detect,
|
||||
@@ -269,7 +269,7 @@ Door<Handler>::reOpen()
|
||||
Throw<std::exception>();
|
||||
}
|
||||
|
||||
acceptor_.listen(boost::asio::socket_base::max_connections, ec);
|
||||
acceptor_.listen(boost::asio::socket_base::max_listen_connections, ec);
|
||||
if (ec)
|
||||
{
|
||||
JLOG(j_.error()) << "Listen on port '" << port_.name
|
||||
@@ -291,7 +291,7 @@ Door<Handler>::Door(
|
||||
, handler_(handler)
|
||||
, ioc_(io_context)
|
||||
, acceptor_(io_context)
|
||||
, strand_(io_context)
|
||||
, strand_(boost::asio::make_strand(io_context))
|
||||
, ssl_(
|
||||
port_.protocol.count("https") > 0 ||
|
||||
port_.protocol.count("wss") > 0 || port_.protocol.count("wss2") > 0 ||
|
||||
@@ -307,7 +307,7 @@ template <class Handler>
|
||||
void
|
||||
Door<Handler>::run()
|
||||
{
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&Door<Handler>::do_accept,
|
||||
@@ -320,7 +320,8 @@ void
|
||||
Door<Handler>::close()
|
||||
{
|
||||
if (!strand_.running_in_this_thread())
|
||||
return strand_.post(
|
||||
return boost::asio::post(
|
||||
strand_,
|
||||
std::bind(&Door<Handler>::close, this->shared_from_this()));
|
||||
error_code ec;
|
||||
acceptor_.close(ec);
|
||||
|
||||
@@ -105,7 +105,7 @@ PlainHTTPPeer<Handler>::run()
|
||||
{
|
||||
if (!this->handler_.onAccept(this->session(), this->remote_address_))
|
||||
{
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
this->strand_,
|
||||
std::bind(&PlainHTTPPeer::do_close, this->shared_from_this()));
|
||||
return;
|
||||
@@ -114,7 +114,7 @@ PlainHTTPPeer<Handler>::run()
|
||||
if (!socket_.is_open())
|
||||
return;
|
||||
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
this->strand_,
|
||||
std::bind(
|
||||
&PlainHTTPPeer::do_read,
|
||||
|
||||
@@ -115,14 +115,14 @@ SSLHTTPPeer<Handler>::run()
|
||||
{
|
||||
if (!this->handler_.onAccept(this->session(), this->remote_address_))
|
||||
{
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
this->strand_,
|
||||
std::bind(&SSLHTTPPeer::do_close, this->shared_from_this()));
|
||||
return;
|
||||
}
|
||||
if (!socket_.is_open())
|
||||
return;
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
this->strand_,
|
||||
std::bind(
|
||||
&SSLHTTPPeer::do_handshake,
|
||||
@@ -164,7 +164,7 @@ SSLHTTPPeer<Handler>::do_handshake(yield_context do_yield)
|
||||
this->port().protocol.count("https") > 0;
|
||||
if (http)
|
||||
{
|
||||
boost::asio::spawn(
|
||||
util::spawn(
|
||||
this->strand_,
|
||||
std::bind(
|
||||
&SSLHTTPPeer::do_read,
|
||||
|
||||
@@ -26,6 +26,8 @@
|
||||
#include <xrpl/server/detail/io_list.h>
|
||||
|
||||
#include <boost/asio.hpp>
|
||||
#include <boost/asio/executor_work_guard.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
|
||||
#include <array>
|
||||
#include <chrono>
|
||||
@@ -85,9 +87,11 @@ private:
|
||||
|
||||
Handler& handler_;
|
||||
beast::Journal const j_;
|
||||
boost::asio::io_service& io_service_;
|
||||
boost::asio::io_service::strand strand_;
|
||||
std::optional<boost::asio::io_service::work> work_;
|
||||
boost::asio::io_context& io_context_;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
std::optional<boost::asio::executor_work_guard<
|
||||
boost::asio::io_context::executor_type>>
|
||||
work_;
|
||||
|
||||
std::mutex m_;
|
||||
std::vector<Port> ports_;
|
||||
@@ -100,7 +104,7 @@ private:
|
||||
public:
|
||||
ServerImpl(
|
||||
Handler& handler,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
beast::Journal journal);
|
||||
|
||||
~ServerImpl();
|
||||
@@ -123,10 +127,10 @@ public:
|
||||
return ios_;
|
||||
}
|
||||
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
{
|
||||
return io_service_;
|
||||
return io_context_;
|
||||
}
|
||||
|
||||
bool
|
||||
@@ -140,13 +144,13 @@ private:
|
||||
template <class Handler>
|
||||
ServerImpl<Handler>::ServerImpl(
|
||||
Handler& handler,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
beast::Journal journal)
|
||||
: handler_(handler)
|
||||
, j_(journal)
|
||||
, io_service_(io_service)
|
||||
, strand_(io_service_)
|
||||
, work_(io_service_)
|
||||
, io_context_(io_context)
|
||||
, strand_(boost::asio::make_strand(io_context_))
|
||||
, work_(std::in_place, boost::asio::make_work_guard(io_context_))
|
||||
{
|
||||
}
|
||||
|
||||
@@ -173,7 +177,7 @@ ServerImpl<Handler>::ports(std::vector<Port> const& ports)
|
||||
ports_.push_back(port);
|
||||
auto& internalPort = ports_.back();
|
||||
if (auto sp = ios_.emplace<Door<Handler>>(
|
||||
handler_, io_service_, internalPort, j_))
|
||||
handler_, io_context_, internalPort, j_))
|
||||
{
|
||||
list_.push_back(sp);
|
||||
|
||||
|
||||
108
include/xrpl/server/detail/Spawn.h
Normal file
108
include/xrpl/server/detail/Spawn.h
Normal file
@@ -0,0 +1,108 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright(c) 2025 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_SERVER_SPAWN_H_INCLUDED
|
||||
#define RIPPLE_SERVER_SPAWN_H_INCLUDED
|
||||
|
||||
#include <xrpl/basics/Log.h>
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
|
||||
#include <concepts>
|
||||
#include <type_traits>
|
||||
|
||||
namespace ripple::util {
|
||||
namespace impl {
|
||||
|
||||
template <typename T>
|
||||
concept IsStrand = std::same_as<
|
||||
std::decay_t<T>,
|
||||
boost::asio::strand<typename std::decay_t<T>::inner_executor_type>>;
|
||||
|
||||
/**
|
||||
* @brief A completion handler that restores `boost::asio::spawn`'s behaviour
|
||||
* from Boost 1.83
|
||||
*
|
||||
* This is intended to be passed as the third argument to `boost::asio::spawn`
|
||||
* so that exceptions are not ignored but propagated to `io_context.run()` call
|
||||
* site.
|
||||
*
|
||||
* @param ePtr The exception that was caught on the coroutine
|
||||
*/
|
||||
inline constexpr auto kPROPAGATE_EXCEPTIONS = [](std::exception_ptr ePtr) {
|
||||
if (ePtr)
|
||||
{
|
||||
try
|
||||
{
|
||||
std::rethrow_exception(ePtr);
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(debugLog().warn()) << "Spawn exception: " << e.what();
|
||||
throw;
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
JLOG(debugLog().warn()) << "Spawn exception: Unknown";
|
||||
throw;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace impl
|
||||
|
||||
/**
|
||||
* @brief Spawns a coroutine using `boost::asio::spawn`
|
||||
*
|
||||
* @note This uses kPROPAGATE_EXCEPTIONS to force asio to propagate exceptions
|
||||
* through `io_context`
|
||||
* @note Since implicit strand was removed from boost::asio::spawn this helper
|
||||
* function adds the strand back
|
||||
*
|
||||
* @tparam Ctx The type of the context/strand
|
||||
* @tparam F The type of the function to execute
|
||||
* @param ctx The execution context
|
||||
* @param func The function to execute. Must return `void`
|
||||
*/
|
||||
template <typename Ctx, typename F>
|
||||
requires std::is_invocable_r_v<void, F, boost::asio::yield_context>
|
||||
void
|
||||
spawn(Ctx&& ctx, F&& func)
|
||||
{
|
||||
if constexpr (impl::IsStrand<Ctx>)
|
||||
{
|
||||
boost::asio::spawn(
|
||||
std::forward<Ctx>(ctx),
|
||||
std::forward<F>(func),
|
||||
impl::kPROPAGATE_EXCEPTIONS);
|
||||
}
|
||||
else
|
||||
{
|
||||
boost::asio::spawn(
|
||||
boost::asio::make_strand(
|
||||
boost::asio::get_associated_executor(std::forward<Ctx>(ctx))),
|
||||
std::forward<F>(func),
|
||||
impl::kPROPAGATE_EXCEPTIONS);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace ripple::util
|
||||
|
||||
#endif
|
||||
@@ -166,7 +166,7 @@ public:
|
||||
May be called concurrently.
|
||||
|
||||
Preconditions:
|
||||
No call to io_service::run on any io_service
|
||||
No call to io_context::run on any io_context
|
||||
used by work objects associated with this io_list
|
||||
exists in the caller's call stack.
|
||||
*/
|
||||
|
||||
@@ -239,11 +239,9 @@ Logs::fromSeverity(beast::severities::Severity level)
|
||||
case kError:
|
||||
return lsERROR;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::fromSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
break;
|
||||
}
|
||||
@@ -267,11 +265,9 @@ Logs::toSeverity(LogSeverity level)
|
||||
return kWarning;
|
||||
case lsERROR:
|
||||
return kError;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case lsFATAL:
|
||||
break;
|
||||
}
|
||||
@@ -296,11 +292,9 @@ Logs::toString(LogSeverity s)
|
||||
return "Error";
|
||||
case lsFATAL:
|
||||
return "Fatal";
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toString : invalid severity");
|
||||
return "Unknown";
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -362,11 +356,9 @@ Logs::format(
|
||||
case kError:
|
||||
output += "ERR ";
|
||||
break;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::format : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
output += "FTL ";
|
||||
break;
|
||||
|
||||
@@ -25,8 +25,9 @@
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <boost/asio/bind_executor.hpp>
|
||||
#include <boost/asio/error.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/ip/tcp.hpp>
|
||||
#include <boost/system/detail/error_code.hpp>
|
||||
|
||||
@@ -124,8 +125,8 @@ public:
|
||||
|
||||
beast::Journal m_journal;
|
||||
|
||||
boost::asio::io_service& m_io_service;
|
||||
boost::asio::io_service::strand m_strand;
|
||||
boost::asio::io_context& m_io_context;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> m_strand;
|
||||
boost::asio::ip::tcp::resolver m_resolver;
|
||||
|
||||
std::condition_variable m_cv;
|
||||
@@ -155,12 +156,12 @@ public:
|
||||
std::deque<Work> m_work;
|
||||
|
||||
ResolverAsioImpl(
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
beast::Journal journal)
|
||||
: m_journal(journal)
|
||||
, m_io_service(io_service)
|
||||
, m_strand(io_service)
|
||||
, m_resolver(io_service)
|
||||
, m_io_context(io_context)
|
||||
, m_strand(boost::asio::make_strand(io_context))
|
||||
, m_resolver(io_context)
|
||||
, m_asyncHandlersCompleted(true)
|
||||
, m_stop_called(false)
|
||||
, m_stopped(true)
|
||||
@@ -216,8 +217,14 @@ public:
|
||||
{
|
||||
if (m_stop_called.exchange(true) == false)
|
||||
{
|
||||
m_io_service.dispatch(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_stop, this, CompletionCounter(this))));
|
||||
boost::asio::dispatch(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_stop,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
|
||||
JLOG(m_journal.debug()) << "Queued a stop request";
|
||||
}
|
||||
@@ -248,12 +255,16 @@ public:
|
||||
|
||||
// TODO NIKB use rvalue references to construct and move
|
||||
// reducing cost.
|
||||
m_io_service.dispatch(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_resolve,
|
||||
this,
|
||||
names,
|
||||
handler,
|
||||
CompletionCounter(this))));
|
||||
boost::asio::dispatch(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_resolve,
|
||||
this,
|
||||
names,
|
||||
handler,
|
||||
CompletionCounter(this))));
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
@@ -279,19 +290,20 @@ public:
|
||||
std::string name,
|
||||
boost::system::error_code const& ec,
|
||||
HandlerType handler,
|
||||
boost::asio::ip::tcp::resolver::iterator iter,
|
||||
boost::asio::ip::tcp::resolver::results_type results,
|
||||
CompletionCounter)
|
||||
{
|
||||
if (ec == boost::asio::error::operation_aborted)
|
||||
return;
|
||||
|
||||
std::vector<beast::IP::Endpoint> addresses;
|
||||
auto iter = results.begin();
|
||||
|
||||
// If we get an error message back, we don't return any
|
||||
// results that we may have gotten.
|
||||
if (!ec)
|
||||
{
|
||||
while (iter != boost::asio::ip::tcp::resolver::iterator())
|
||||
while (iter != results.end())
|
||||
{
|
||||
addresses.push_back(
|
||||
beast::IPAddressConversion::from_asio(*iter));
|
||||
@@ -301,8 +313,14 @@ public:
|
||||
|
||||
handler(name, addresses);
|
||||
|
||||
m_io_service.post(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_work, this, CompletionCounter(this))));
|
||||
boost::asio::post(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
}
|
||||
|
||||
HostAndPort
|
||||
@@ -383,16 +401,21 @@ public:
|
||||
{
|
||||
JLOG(m_journal.error()) << "Unable to parse '" << name << "'";
|
||||
|
||||
m_io_service.post(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_work, this, CompletionCounter(this))));
|
||||
boost::asio::post(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
boost::asio::ip::tcp::resolver::query query(host, port);
|
||||
|
||||
m_resolver.async_resolve(
|
||||
query,
|
||||
host,
|
||||
port,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_finish,
|
||||
this,
|
||||
@@ -423,10 +446,14 @@ public:
|
||||
|
||||
if (m_work.size() > 0)
|
||||
{
|
||||
m_io_service.post(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
boost::asio::post(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -435,9 +462,9 @@ public:
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
std::unique_ptr<ResolverAsio>
|
||||
ResolverAsio::New(boost::asio::io_service& io_service, beast::Journal journal)
|
||||
ResolverAsio::New(boost::asio::io_context& io_context, beast::Journal journal)
|
||||
{
|
||||
return std::make_unique<ResolverAsioImpl>(io_service, journal);
|
||||
return std::make_unique<ResolverAsioImpl>(io_context, journal);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
@@ -36,7 +36,6 @@ LogThrow(std::string const& title)
|
||||
[[noreturn]] void
|
||||
LogicError(std::string const& s) noexcept
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(debugLog().fatal()) << s;
|
||||
std::cerr << "Logic error: " << s << std::endl;
|
||||
// Use a non-standard contract naming here (without namespace) because
|
||||
@@ -46,7 +45,6 @@ LogicError(std::string const& s) noexcept
|
||||
// For the above reasons, we want this contract to stand out.
|
||||
UNREACHABLE("LogicError", {{"message", s}});
|
||||
std::abort();
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -30,9 +30,11 @@
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <boost/asio/basic_waitable_timer.hpp>
|
||||
#include <boost/asio/bind_executor.hpp>
|
||||
#include <boost/asio/buffer.hpp>
|
||||
#include <boost/asio/error.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/executor_work_guard.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/ip/udp.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
#include <boost/system/detail/error_code.hpp>
|
||||
@@ -238,9 +240,11 @@ private:
|
||||
Journal m_journal;
|
||||
IP::Endpoint m_address;
|
||||
std::string m_prefix;
|
||||
boost::asio::io_service m_io_service;
|
||||
std::optional<boost::asio::io_service::work> m_work;
|
||||
boost::asio::io_service::strand m_strand;
|
||||
boost::asio::io_context m_io_context;
|
||||
std::optional<boost::asio::executor_work_guard<
|
||||
boost::asio::io_context::executor_type>>
|
||||
m_work;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> m_strand;
|
||||
boost::asio::basic_waitable_timer<std::chrono::steady_clock> m_timer;
|
||||
boost::asio::ip::udp::socket m_socket;
|
||||
std::deque<std::string> m_data;
|
||||
@@ -264,18 +268,24 @@ public:
|
||||
: m_journal(journal)
|
||||
, m_address(address)
|
||||
, m_prefix(prefix)
|
||||
, m_work(std::ref(m_io_service))
|
||||
, m_strand(m_io_service)
|
||||
, m_timer(m_io_service)
|
||||
, m_socket(m_io_service)
|
||||
, m_work(boost::asio::make_work_guard(m_io_context))
|
||||
, m_strand(boost::asio::make_strand(m_io_context))
|
||||
, m_timer(m_io_context)
|
||||
, m_socket(m_io_context)
|
||||
, m_thread(&StatsDCollectorImp::run, this)
|
||||
{
|
||||
}
|
||||
|
||||
~StatsDCollectorImp() override
|
||||
{
|
||||
boost::system::error_code ec;
|
||||
m_timer.cancel(ec);
|
||||
try
|
||||
{
|
||||
m_timer.cancel();
|
||||
}
|
||||
catch (boost::system::system_error const&)
|
||||
{
|
||||
// ignored
|
||||
}
|
||||
|
||||
m_work.reset();
|
||||
m_thread.join();
|
||||
@@ -334,10 +344,10 @@ public:
|
||||
|
||||
//--------------------------------------------------------------------------
|
||||
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
{
|
||||
return m_io_service;
|
||||
return m_io_context;
|
||||
}
|
||||
|
||||
std::string const&
|
||||
@@ -355,8 +365,14 @@ public:
|
||||
void
|
||||
post_buffer(std::string&& buffer)
|
||||
{
|
||||
m_io_service.dispatch(m_strand.wrap(std::bind(
|
||||
&StatsDCollectorImp::do_post_buffer, this, std::move(buffer))));
|
||||
boost::asio::dispatch(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&StatsDCollectorImp::do_post_buffer,
|
||||
this,
|
||||
std::move(buffer))));
|
||||
}
|
||||
|
||||
// The keepAlive parameter makes sure the buffers sent to
|
||||
@@ -386,8 +402,7 @@ public:
|
||||
for (auto const& buffer : buffers)
|
||||
{
|
||||
std::string const s(
|
||||
boost::asio::buffer_cast<char const*>(buffer),
|
||||
boost::asio::buffer_size(buffer));
|
||||
buffer.data(), boost::asio::buffer_size(buffer));
|
||||
std::cerr << s;
|
||||
}
|
||||
std::cerr << '\n';
|
||||
@@ -456,7 +471,7 @@ public:
|
||||
set_timer()
|
||||
{
|
||||
using namespace std::chrono_literals;
|
||||
m_timer.expires_from_now(1s);
|
||||
m_timer.expires_after(1s);
|
||||
m_timer.async_wait(std::bind(
|
||||
&StatsDCollectorImp::on_timer, this, std::placeholders::_1));
|
||||
}
|
||||
@@ -498,13 +513,13 @@ public:
|
||||
|
||||
set_timer();
|
||||
|
||||
m_io_service.run();
|
||||
m_io_context.run();
|
||||
|
||||
m_socket.shutdown(boost::asio::ip::udp::socket::shutdown_send, ec);
|
||||
|
||||
m_socket.close();
|
||||
|
||||
m_io_service.poll();
|
||||
m_io_context.poll();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -547,10 +562,12 @@ StatsDCounterImpl::~StatsDCounterImpl()
|
||||
void
|
||||
StatsDCounterImpl::increment(CounterImpl::value_type amount)
|
||||
{
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDCounterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDCounterImpl>(shared_from_this()),
|
||||
amount));
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDCounterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDCounterImpl>(shared_from_this()),
|
||||
amount));
|
||||
}
|
||||
|
||||
void
|
||||
@@ -592,10 +609,12 @@ StatsDEventImpl::StatsDEventImpl(
|
||||
void
|
||||
StatsDEventImpl::notify(EventImpl::value_type const& value)
|
||||
{
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDEventImpl::do_notify,
|
||||
std::static_pointer_cast<StatsDEventImpl>(shared_from_this()),
|
||||
value));
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDEventImpl::do_notify,
|
||||
std::static_pointer_cast<StatsDEventImpl>(shared_from_this()),
|
||||
value));
|
||||
}
|
||||
|
||||
void
|
||||
@@ -625,19 +644,23 @@ StatsDGaugeImpl::~StatsDGaugeImpl()
|
||||
void
|
||||
StatsDGaugeImpl::set(GaugeImpl::value_type value)
|
||||
{
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDGaugeImpl::do_set,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
value));
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDGaugeImpl::do_set,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
value));
|
||||
}
|
||||
|
||||
void
|
||||
StatsDGaugeImpl::increment(GaugeImpl::difference_type amount)
|
||||
{
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDGaugeImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
amount));
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDGaugeImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
amount));
|
||||
}
|
||||
|
||||
void
|
||||
@@ -713,10 +736,12 @@ StatsDMeterImpl::~StatsDMeterImpl()
|
||||
void
|
||||
StatsDMeterImpl::increment(MeterImpl::value_type amount)
|
||||
{
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDMeterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDMeterImpl>(shared_from_this()),
|
||||
amount));
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDMeterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDMeterImpl>(shared_from_this()),
|
||||
amount));
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
@@ -25,11 +25,11 @@ namespace IP {
|
||||
bool
|
||||
is_private(AddressV4 const& addr)
|
||||
{
|
||||
return ((addr.to_ulong() & 0xff000000) ==
|
||||
return ((addr.to_uint() & 0xff000000) ==
|
||||
0x0a000000) || // Prefix /8, 10. #.#.#
|
||||
((addr.to_ulong() & 0xfff00000) ==
|
||||
((addr.to_uint() & 0xfff00000) ==
|
||||
0xac100000) || // Prefix /12 172. 16.#.# - 172.31.#.#
|
||||
((addr.to_ulong() & 0xffff0000) ==
|
||||
((addr.to_uint() & 0xffff0000) ==
|
||||
0xc0a80000) || // Prefix /16 192.168.#.#
|
||||
addr.is_loopback();
|
||||
}
|
||||
@@ -44,7 +44,7 @@ char
|
||||
get_class(AddressV4 const& addr)
|
||||
{
|
||||
static char const* table = "AAAABBCD";
|
||||
return table[(addr.to_ulong() & 0xE0000000) >> 29];
|
||||
return table[(addr.to_uint() & 0xE0000000) >> 29];
|
||||
}
|
||||
|
||||
} // namespace IP
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
#include <xrpl/beast/net/IPAddressV4.h>
|
||||
#include <xrpl/beast/net/IPAddressV6.h>
|
||||
|
||||
#include <boost/asio/ip/address_v4.hpp>
|
||||
|
||||
namespace beast {
|
||||
namespace IP {
|
||||
|
||||
@@ -28,7 +30,9 @@ is_private(AddressV6 const& addr)
|
||||
{
|
||||
return (
|
||||
(addr.to_bytes()[0] & 0xfd) || // TODO fc00::/8 too ?
|
||||
(addr.is_v4_mapped() && is_private(addr.to_v4())));
|
||||
(addr.is_v4_mapped() &&
|
||||
is_private(boost::asio::ip::make_address_v4(
|
||||
boost::asio::ip::v4_mapped, addr))));
|
||||
}
|
||||
|
||||
bool
|
||||
|
||||
@@ -21,6 +21,8 @@
|
||||
#include <xrpl/beast/net/IPEndpoint.h>
|
||||
|
||||
#include <boost/algorithm/string/trim.hpp>
|
||||
#include <boost/asio/ip/address.hpp>
|
||||
#include <boost/asio/ip/address_v4.hpp>
|
||||
#include <boost/system/detail/error_code.hpp>
|
||||
|
||||
#include <cctype>
|
||||
@@ -167,7 +169,7 @@ operator>>(std::istream& is, Endpoint& endpoint)
|
||||
}
|
||||
|
||||
boost::system::error_code ec;
|
||||
auto addr = Address::from_string(addrStr, ec);
|
||||
auto addr = boost::asio::ip::make_address(addrStr, ec);
|
||||
if (ec)
|
||||
{
|
||||
is.setstate(std::ios_base::failbit);
|
||||
|
||||
@@ -174,7 +174,7 @@ Array::append(Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Array::append : invalid type"); // LCOV_EXCL_LINE
|
||||
UNREACHABLE("Json::Array::append : invalid type");
|
||||
}
|
||||
|
||||
void
|
||||
@@ -209,7 +209,7 @@ Object::set(std::string const& k, Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Object::set : invalid type"); // LCOV_EXCL_LINE
|
||||
UNREACHABLE("Json::Object::set : invalid type");
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -213,10 +213,8 @@ Value::Value(ValueType type) : type_(type), allocated_(0)
|
||||
value_.bool_ = false;
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::Value(ValueType) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,10 +290,8 @@ Value::Value(Value const& other) : type_(other.type_)
|
||||
value_.map_ = new ObjectValues(*other.value_.map_);
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::Value(Value const&) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -322,10 +318,8 @@ Value::~Value()
|
||||
delete value_.map_;
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::~Value : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -425,10 +419,8 @@ operator<(Value const& x, Value const& y)
|
||||
return *x.value_.map_ < *y.value_.map_;
|
||||
}
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::operator<(Value, Value) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable
|
||||
@@ -473,10 +465,8 @@ operator==(Value const& x, Value const& y)
|
||||
return x.value_.map_->size() == y.value_.map_->size() &&
|
||||
*x.value_.map_ == *y.value_.map_;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::operator==(Value, Value) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable
|
||||
@@ -516,10 +506,8 @@ Value::asString() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to string");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asString : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return ""; // unreachable
|
||||
@@ -560,10 +548,8 @@ Value::asInt() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to int");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asInt : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -604,10 +590,8 @@ Value::asUInt() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to uint");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asUInt : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -638,10 +622,8 @@ Value::asDouble() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to double");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asDouble : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -672,10 +654,8 @@ Value::asBool() const
|
||||
case objectValue:
|
||||
return value_.map_->size() != 0;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asBool : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return false; // unreachable;
|
||||
@@ -730,10 +710,8 @@ Value::isConvertibleTo(ValueType other) const
|
||||
return other == objectValue ||
|
||||
(other == nullValue && value_.map_->size() == 0);
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::isConvertible : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return false; // unreachable;
|
||||
@@ -766,10 +744,8 @@ Value::size() const
|
||||
case objectValue:
|
||||
return Int(value_.map_->size());
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::size : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
#include <xrpl/net/HTTPClientSSLContext.h>
|
||||
|
||||
#include <boost/asio.hpp>
|
||||
#include <boost/asio/ip/resolver_query_base.hpp>
|
||||
#include <boost/asio/ip/tcp.hpp>
|
||||
#include <boost/asio/ssl.hpp>
|
||||
#include <boost/regex.hpp>
|
||||
@@ -55,16 +56,16 @@ class HTTPClientImp : public std::enable_shared_from_this<HTTPClientImp>,
|
||||
{
|
||||
public:
|
||||
HTTPClientImp(
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
unsigned short const port,
|
||||
std::size_t maxResponseSize,
|
||||
beast::Journal& j)
|
||||
: mSocket(io_service, httpClientSSLContext->context())
|
||||
, mResolver(io_service)
|
||||
: mSocket(io_context, httpClientSSLContext->context())
|
||||
, mResolver(io_context)
|
||||
, mHeader(maxClientHeaderBytes)
|
||||
, mPort(port)
|
||||
, maxResponseSize_(maxResponseSize)
|
||||
, mDeadline(io_service)
|
||||
, mDeadline(io_context)
|
||||
, j_(j)
|
||||
{
|
||||
}
|
||||
@@ -146,18 +147,21 @@ public:
|
||||
{
|
||||
JLOG(j_.trace()) << "Fetch: " << mDeqSites[0];
|
||||
|
||||
auto query = std::make_shared<boost::asio::ip::tcp::resolver::query>(
|
||||
auto query = std::make_shared<Query>(
|
||||
mDeqSites[0],
|
||||
std::to_string(mPort),
|
||||
boost::asio::ip::resolver_query_base::numeric_service);
|
||||
mQuery = query;
|
||||
|
||||
mDeadline.expires_from_now(mTimeout, mShutdown);
|
||||
|
||||
JLOG(j_.trace()) << "expires_from_now: " << mShutdown.message();
|
||||
|
||||
if (!mShutdown)
|
||||
try
|
||||
{
|
||||
mDeadline.expires_after(mTimeout);
|
||||
}
|
||||
catch (boost::system::system_error const& e)
|
||||
{
|
||||
mShutdown = e.code();
|
||||
|
||||
JLOG(j_.trace()) << "expires_after: " << mShutdown.message();
|
||||
mDeadline.async_wait(std::bind(
|
||||
&HTTPClientImp::handleDeadline,
|
||||
shared_from_this(),
|
||||
@@ -169,7 +173,9 @@ public:
|
||||
JLOG(j_.trace()) << "Resolving: " << mDeqSites[0];
|
||||
|
||||
mResolver.async_resolve(
|
||||
*mQuery,
|
||||
mQuery->host,
|
||||
mQuery->port,
|
||||
mQuery->flags,
|
||||
std::bind(
|
||||
&HTTPClientImp::handleResolve,
|
||||
shared_from_this(),
|
||||
@@ -233,7 +239,7 @@ public:
|
||||
void
|
||||
handleResolve(
|
||||
boost::system::error_code const& ecResult,
|
||||
boost::asio::ip::tcp::resolver::iterator itrEndpoint)
|
||||
boost::asio::ip::tcp::resolver::results_type result)
|
||||
{
|
||||
if (!mShutdown)
|
||||
{
|
||||
@@ -255,7 +261,7 @@ public:
|
||||
|
||||
boost::asio::async_connect(
|
||||
mSocket.lowest_layer(),
|
||||
itrEndpoint,
|
||||
result,
|
||||
std::bind(
|
||||
&HTTPClientImp::handleConnect,
|
||||
shared_from_this(),
|
||||
@@ -377,7 +383,7 @@ public:
|
||||
static boost::regex reStatus{
|
||||
"\\`HTTP/1\\S+ (\\d{3}) .*\\'"}; // HTTP/1.1 200 OK
|
||||
static boost::regex reSize{
|
||||
"\\`.*\\r\\nContent-Length:\\s+([0-9]+).*\\'", boost::regex::icase};
|
||||
"\\`.*\\r\\nContent-Length:\\s+([0-9]+).*\\'"};
|
||||
static boost::regex reBody{"\\`.*\\r\\n\\r\\n(.*)\\'"};
|
||||
|
||||
boost::smatch smMatch;
|
||||
@@ -475,13 +481,15 @@ public:
|
||||
std::string const& strData = "")
|
||||
{
|
||||
boost::system::error_code ecCancel;
|
||||
|
||||
(void)mDeadline.cancel(ecCancel);
|
||||
|
||||
if (ecCancel)
|
||||
try
|
||||
{
|
||||
JLOG(j_.trace()) << "invokeComplete: Deadline cancel error: "
|
||||
<< ecCancel.message();
|
||||
mDeadline.cancel();
|
||||
}
|
||||
catch (boost::system::system_error const& e)
|
||||
{
|
||||
JLOG(j_.trace())
|
||||
<< "invokeComplete: Deadline cancel error: " << e.what();
|
||||
ecCancel = e.code();
|
||||
}
|
||||
|
||||
JLOG(j_.debug()) << "invokeComplete: Deadline popping: "
|
||||
@@ -515,7 +523,15 @@ private:
|
||||
bool mSSL;
|
||||
AutoSocket mSocket;
|
||||
boost::asio::ip::tcp::resolver mResolver;
|
||||
std::shared_ptr<boost::asio::ip::tcp::resolver::query> mQuery;
|
||||
|
||||
struct Query
|
||||
{
|
||||
std::string host;
|
||||
std::string port;
|
||||
boost::asio::ip::resolver_query_base::flags flags;
|
||||
};
|
||||
std::shared_ptr<Query> mQuery;
|
||||
|
||||
boost::asio::streambuf mRequest;
|
||||
boost::asio::streambuf mHeader;
|
||||
boost::asio::streambuf mResponse;
|
||||
@@ -546,7 +562,7 @@ private:
|
||||
void
|
||||
HTTPClient::get(
|
||||
bool bSSL,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
std::deque<std::string> deqSites,
|
||||
unsigned short const port,
|
||||
std::string const& strPath,
|
||||
@@ -559,14 +575,14 @@ HTTPClient::get(
|
||||
beast::Journal& j)
|
||||
{
|
||||
auto client =
|
||||
std::make_shared<HTTPClientImp>(io_service, port, responseMax, j);
|
||||
std::make_shared<HTTPClientImp>(io_context, port, responseMax, j);
|
||||
client->get(bSSL, deqSites, strPath, timeout, complete);
|
||||
}
|
||||
|
||||
void
|
||||
HTTPClient::get(
|
||||
bool bSSL,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
std::string strSite,
|
||||
unsigned short const port,
|
||||
std::string const& strPath,
|
||||
@@ -581,14 +597,14 @@ HTTPClient::get(
|
||||
std::deque<std::string> deqSites(1, strSite);
|
||||
|
||||
auto client =
|
||||
std::make_shared<HTTPClientImp>(io_service, port, responseMax, j);
|
||||
std::make_shared<HTTPClientImp>(io_context, port, responseMax, j);
|
||||
client->get(bSSL, deqSites, strPath, timeout, complete);
|
||||
}
|
||||
|
||||
void
|
||||
HTTPClient::request(
|
||||
bool bSSL,
|
||||
boost::asio::io_service& io_service,
|
||||
boost::asio::io_context& io_context,
|
||||
std::string strSite,
|
||||
unsigned short const port,
|
||||
std::function<void(boost::asio::streambuf& sb, std::string const& strHost)>
|
||||
@@ -604,7 +620,7 @@ HTTPClient::request(
|
||||
std::deque<std::string> deqSites(1, strSite);
|
||||
|
||||
auto client =
|
||||
std::make_shared<HTTPClientImp>(io_service, port, responseMax, j);
|
||||
std::make_shared<HTTPClientImp>(io_context, port, responseMax, j);
|
||||
client->request(bSSL, deqSites, setRequest, timeout, complete);
|
||||
}
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ namespace BuildInfo {
|
||||
// and follow the format described at http://semver.org/
|
||||
//------------------------------------------------------------------------------
|
||||
// clang-format off
|
||||
char const* const versionString = "3.0.0-rc1"
|
||||
char const* const versionString = "2.6.0"
|
||||
// clang-format on
|
||||
|
||||
#if defined(DEBUG) || defined(SANITIZER)
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
//==============================================================================
|
||||
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
#include <xrpl/protocol/Feature.h>
|
||||
#include <xrpl/protocol/Permissions.h>
|
||||
#include <xrpl/protocol/jss.h>
|
||||
|
||||
@@ -26,19 +25,6 @@ namespace ripple {
|
||||
|
||||
Permission::Permission()
|
||||
{
|
||||
txFeatureMap_ = {
|
||||
#pragma push_macro("TRANSACTION")
|
||||
#undef TRANSACTION
|
||||
|
||||
#define TRANSACTION(tag, value, name, delegatable, amendment, ...) \
|
||||
{value, amendment},
|
||||
|
||||
#include <xrpl/protocol/detail/transactions.macro>
|
||||
|
||||
#undef TRANSACTION
|
||||
#pragma pop_macro("TRANSACTION")
|
||||
};
|
||||
|
||||
delegatableTx_ = {
|
||||
#pragma push_macro("TRANSACTION")
|
||||
#undef TRANSACTION
|
||||
@@ -101,22 +87,6 @@ Permission::getInstance()
|
||||
return instance;
|
||||
}
|
||||
|
||||
std::optional<std::string>
|
||||
Permission::getPermissionName(std::uint32_t const value) const
|
||||
{
|
||||
auto const permissionValue = static_cast<GranularPermissionType>(value);
|
||||
if (auto const granular = getGranularName(permissionValue))
|
||||
return *granular;
|
||||
|
||||
// not a granular permission, check if it maps to a transaction type
|
||||
auto const txType = permissionToTxType(value);
|
||||
if (auto const* item = TxFormats::getInstance().findByType(txType);
|
||||
item != nullptr)
|
||||
return item->getName();
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::optional<std::uint32_t>
|
||||
Permission::getGranularValue(std::string const& name) const
|
||||
{
|
||||
@@ -147,23 +117,8 @@ Permission::getGranularTxType(GranularPermissionType const& gpType) const
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::optional<std::reference_wrapper<uint256 const>> const
|
||||
Permission::getTxFeature(TxType txType) const
|
||||
{
|
||||
auto const txFeaturesIt = txFeatureMap_.find(txType);
|
||||
XRPL_ASSERT(
|
||||
txFeaturesIt != txFeatureMap_.end(),
|
||||
"ripple::Permissions::getTxFeature : tx exists in txFeatureMap_");
|
||||
|
||||
if (txFeaturesIt->second == uint256{})
|
||||
return std::nullopt;
|
||||
return txFeaturesIt->second;
|
||||
}
|
||||
|
||||
bool
|
||||
Permission::isDelegatable(
|
||||
std::uint32_t const& permissionValue,
|
||||
Rules const& rules) const
|
||||
Permission::isDelegatable(std::uint32_t const& permissionValue) const
|
||||
{
|
||||
auto const granularPermission =
|
||||
getGranularName(static_cast<GranularPermissionType>(permissionValue));
|
||||
@@ -171,23 +126,7 @@ Permission::isDelegatable(
|
||||
// granular permissions are always allowed to be delegated
|
||||
return true;
|
||||
|
||||
auto const txType = permissionToTxType(permissionValue);
|
||||
auto const it = delegatableTx_.find(txType);
|
||||
|
||||
if (rules.enabled(fixDelegateV1_1))
|
||||
{
|
||||
if (it == delegatableTx_.end())
|
||||
return false;
|
||||
|
||||
auto const feature = getTxFeature(txType);
|
||||
|
||||
// fixDelegateV1_1: Delegation is only allowed if the required amendment
|
||||
// for the transaction is enabled. For transactions that do not require
|
||||
// an amendment, delegation is always allowed.
|
||||
if (feature && !rules.enabled(*feature))
|
||||
return false;
|
||||
}
|
||||
|
||||
auto const it = delegatableTx_.find(permissionValue - 1);
|
||||
if (it != delegatableTx_.end() && it->second == Delegation::notDelegatable)
|
||||
return false;
|
||||
|
||||
|
||||
@@ -112,9 +112,7 @@ void
|
||||
STBase::add(Serializer& s) const
|
||||
{
|
||||
// Should never be called
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::STBase::add : not implemented");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
bool
|
||||
|
||||
@@ -177,10 +177,23 @@ STUInt32::getText() const
|
||||
{
|
||||
if (getFName() == sfPermissionValue)
|
||||
{
|
||||
auto const permissionName =
|
||||
Permission::getInstance().getPermissionName(value_);
|
||||
if (permissionName)
|
||||
return *permissionName;
|
||||
auto const permissionValue =
|
||||
static_cast<GranularPermissionType>(value_);
|
||||
auto const granular =
|
||||
Permission::getInstance().getGranularName(permissionValue);
|
||||
|
||||
if (granular)
|
||||
{
|
||||
return *granular;
|
||||
}
|
||||
else
|
||||
{
|
||||
auto const txType =
|
||||
Permission::getInstance().permissionToTxType(value_);
|
||||
auto item = TxFormats::getInstance().findByType(txType);
|
||||
if (item != nullptr)
|
||||
return item->getName();
|
||||
}
|
||||
}
|
||||
return std::to_string(value_);
|
||||
}
|
||||
@@ -191,10 +204,23 @@ STUInt32::getJson(JsonOptions) const
|
||||
{
|
||||
if (getFName() == sfPermissionValue)
|
||||
{
|
||||
auto const permissionName =
|
||||
Permission::getInstance().getPermissionName(value_);
|
||||
if (permissionName)
|
||||
return *permissionName;
|
||||
auto const permissionValue =
|
||||
static_cast<GranularPermissionType>(value_);
|
||||
auto const granular =
|
||||
Permission::getInstance().getGranularName(permissionValue);
|
||||
|
||||
if (granular)
|
||||
{
|
||||
return *granular;
|
||||
}
|
||||
else
|
||||
{
|
||||
auto const txType =
|
||||
Permission::getInstance().permissionToTxType(value_);
|
||||
auto item = TxFormats::getInstance().findByType(txType);
|
||||
if (item != nullptr)
|
||||
return item->getName();
|
||||
}
|
||||
}
|
||||
|
||||
return value_;
|
||||
@@ -249,33 +275,4 @@ STUInt64::getJson(JsonOptions) const
|
||||
return convertToString(value_, 16); // Convert to base 16
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
template <>
|
||||
STInteger<std::int32_t>::STInteger(SerialIter& sit, SField const& name)
|
||||
: STInteger(name, sit.get32())
|
||||
{
|
||||
}
|
||||
|
||||
template <>
|
||||
SerializedTypeID
|
||||
STInt32::getSType() const
|
||||
{
|
||||
return STI_INT32;
|
||||
}
|
||||
|
||||
template <>
|
||||
std::string
|
||||
STInt32::getText() const
|
||||
{
|
||||
return std::to_string(value_);
|
||||
}
|
||||
|
||||
template <>
|
||||
Json::Value
|
||||
STInt32::getJson(JsonOptions) const
|
||||
{
|
||||
return value_;
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -647,12 +647,6 @@ STObject::getFieldH256(SField const& field) const
|
||||
return getFieldByValue<STUInt256>(field);
|
||||
}
|
||||
|
||||
std::int32_t
|
||||
STObject::getFieldI32(SField const& field) const
|
||||
{
|
||||
return getFieldByValue<STInt32>(field);
|
||||
}
|
||||
|
||||
AccountID
|
||||
STObject::getAccountID(SField const& field) const
|
||||
{
|
||||
@@ -767,12 +761,6 @@ STObject::setFieldH256(SField const& field, uint256 const& v)
|
||||
setFieldUsingSetValue<STUInt256>(field, v);
|
||||
}
|
||||
|
||||
void
|
||||
STObject::setFieldI32(SField const& field, std::int32_t v)
|
||||
{
|
||||
setFieldUsingSetValue<STInt32>(field, v);
|
||||
}
|
||||
|
||||
void
|
||||
STObject::setFieldV256(SField const& field, STVector256 const& v)
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user