mirror of
https://github.com/XRPLF/rippled.git
synced 2025-12-03 17:35:51 +00:00
Compare commits
84 Commits
a1q123456/
...
3.0.0-rc2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e19b2c55c2 | ||
|
|
138d6e751b | ||
|
|
b195011eff | ||
|
|
cd00aa591f | ||
|
|
d3466de16c | ||
|
|
90894ec6c1 | ||
|
|
6b55db490e | ||
|
|
2237644ec5 | ||
|
|
587d4ac5cc | ||
|
|
13b169ffcb | ||
|
|
d42f8e0bda | ||
|
|
0276a6b6bd | ||
|
|
234dc6bdca | ||
|
|
82c2bf7144 | ||
|
|
210d49df44 | ||
|
|
3ffa30bf24 | ||
|
|
e7e4d52e38 | ||
|
|
4135d56aa0 | ||
|
|
865557024e | ||
|
|
91b96d6386 | ||
|
|
f1dbb20d7b | ||
|
|
2a2881ee53 | ||
|
|
ee2dff337d | ||
|
|
102a89f351 | ||
|
|
9add957962 | ||
|
|
0f1b607bb4 | ||
|
|
4425f84c1f | ||
|
|
8a6cc3ded8 | ||
|
|
3eec6ffcd7 | ||
|
|
6b56c805dd | ||
|
|
da0eff9c1b | ||
|
|
6e326e6c11 | ||
|
|
405575fd53 | ||
|
|
f38f299a86 | ||
|
|
8951419dbe | ||
|
|
a8b1a01d9e | ||
|
|
994b490db5 | ||
|
|
7c8b16797f | ||
|
|
9507d9c276 | ||
|
|
8ca21406e6 | ||
|
|
178f4248e4 | ||
|
|
e8069a40f2 | ||
|
|
5ebc29c481 | ||
|
|
224b055124 | ||
|
|
f99c1158d5 | ||
|
|
a2594b6fe0 | ||
|
|
12fb54c66e | ||
|
|
51917be96d | ||
|
|
97b8f5c4b3 | ||
|
|
a127314a89 | ||
|
|
0754cca98c | ||
|
|
eb66ae1bd4 | ||
|
|
5c3b44d1af | ||
|
|
e4b334faba | ||
|
|
adad20b862 | ||
|
|
9907fa07a9 | ||
|
|
d032bd681a | ||
|
|
b444457c19 | ||
|
|
4f076cb955 | ||
|
|
220ab26225 | ||
|
|
89d81655c6 | ||
|
|
7bc2d5cba4 | ||
|
|
a34b36e021 | ||
|
|
5b2ab905c0 | ||
|
|
5e43e91d4a | ||
|
|
5bac21c05b | ||
|
|
1643d22103 | ||
|
|
2df730438d | ||
|
|
5d79bfc531 | ||
|
|
51ef35ab55 | ||
|
|
330a3215bc | ||
|
|
85c2ceacde | ||
|
|
70d5c624e8 | ||
|
|
8e4fda160d | ||
|
|
072b1c442c | ||
|
|
294e03ecf5 | ||
|
|
550f90a75e | ||
|
|
d67dcfe3c4 | ||
|
|
0fd2f715bb | ||
|
|
807462b191 | ||
|
|
19c4226d3d | ||
|
|
d02c306f1e | ||
|
|
c46888f8f7 | ||
|
|
2ae65d2fdb |
28
.github/actions/build-deps/action.yml
vendored
28
.github/actions/build-deps/action.yml
vendored
@@ -10,24 +10,40 @@ inputs:
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
build_nproc:
|
||||
description: "The number of processors to use for building."
|
||||
required: true
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
log_verbosity:
|
||||
description: "The logging verbosity."
|
||||
required: false
|
||||
default: "verbose"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
BUILD_NPROC: ${{ inputs.build_nproc }}
|
||||
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build ${{ inputs.force_build == 'true' && '"*"' || 'missing' }} \
|
||||
--options:host '&:tests=True' \
|
||||
--options:host '&:xrpld=True' \
|
||||
--settings:all build_type=${{ inputs.build_type }} \
|
||||
--build="${BUILD_OPTION}" \
|
||||
--options:host='&:tests=True' \
|
||||
--options:host='&:xrpld=True' \
|
||||
--settings:all build_type="${BUILD_TYPE}" \
|
||||
--conf:all tools.build:jobs=${BUILD_NPROC} \
|
||||
--conf:all tools.build:verbosity="${LOG_VERBOSITY}" \
|
||||
--conf:all tools.compilation:verbosity="${LOG_VERBOSITY}" \
|
||||
..
|
||||
|
||||
96
.github/actions/build-test/action.yml
vendored
96
.github/actions/build-test/action.yml
vendored
@@ -1,96 +0,0 @@
|
||||
# This action build and tests the binary. The Conan dependencies must have
|
||||
# already been installed (see the build-deps action).
|
||||
name: Build and Test
|
||||
description: "Build and test the binary."
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
default: ""
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Configuring CMake.'
|
||||
cmake \
|
||||
-G '${{ inputs.os == 'windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
|
||||
${{ inputs.cmake_args }} \
|
||||
..
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Building binary.'
|
||||
cmake \
|
||||
--build . \
|
||||
--config ${{ inputs.build_type }} \
|
||||
--parallel $(nproc) \
|
||||
--target ${{ inputs.cmake_target }}
|
||||
- name: Check linking
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Checking linking.'
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: Verify voidstar
|
||||
if: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Verifying presence of instrumentation.'
|
||||
./rippled --version | grep libvoidstar
|
||||
- name: Test the binary
|
||||
if: ${{ inputs.build_only == 'false' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'windows' && inputs.build_type || '' }}
|
||||
run: |
|
||||
echo 'Testing binary.'
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
- name: Upload coverage report
|
||||
if: ${{ inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ inputs.codecov_token }}
|
||||
verbose: true
|
||||
43
.github/actions/print-env/action.yml
vendored
Normal file
43
.github/actions/print-env/action.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Print build environment
|
||||
description: "Print environment and some tooling versions"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
- name: Check configuration (Linux and macOS)
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
7
.github/actions/setup-conan/action.yml
vendored
7
.github/actions/setup-conan/action.yml
vendored
@@ -35,9 +35,12 @@ runs:
|
||||
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
|
||||
run: |
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
|
||||
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
|
||||
6
.github/scripts/levelization/README.md
vendored
6
.github/scripts/levelization/README.md
vendored
@@ -72,15 +72,15 @@ It generates many files of [results](results):
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`levelization.yml`](../../workflows/check-levelization.yml)
|
||||
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
|
||||
@@ -138,6 +138,7 @@ test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
tests.libxrpl > xrpl.json
|
||||
tests.libxrpl > xrpl.net
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.ledger > xrpl.basics
|
||||
|
||||
16
.github/scripts/strategy-matrix/generate.py
vendored
16
.github/scripts/strategy-matrix/generate.py
vendored
@@ -74,14 +74,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9.4 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 9.6 using Clang: Release and no Unity on linux/amd64.
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
if os['distro_name'] == 'rhel':
|
||||
skip = True
|
||||
if os['distro_version'] == '9.4':
|
||||
if os['distro_version'] == '9':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
elif os['distro_version'] == '9.6':
|
||||
elif os['distro_version'] == '10':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
@@ -130,16 +130,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
if os['distro_name'] == 'rhel' and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# We skip all clang-20 on arm64 due to boost 1.86 build error
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and architecture['platform'] == 'linux/arm64':
|
||||
# We skip all clang 20+ on arm64 due to Boost build error.
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' in ['clang-20', 'clang-21'] and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
|
||||
# Unity on linux/amd64
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}'
|
||||
cmake_target = 'coverage'
|
||||
build_only = True
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
@@ -162,7 +160,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': 'true' if build_only else 'false',
|
||||
'build_only': build_only,
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
|
||||
148
.github/scripts/strategy-matrix/linux.json
vendored
148
.github/scripts/strategy-matrix/linux.json
vendored
@@ -14,139 +14,197 @@
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
"compiler_version": "12",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
"compiler_version": "13",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
"compiler_version": "14",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15"
|
||||
"compiler_version": "15",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
"compiler_version": "16",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
"compiler_version": "17",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
"compiler_version": "18",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
"compiler_version": "19",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20"
|
||||
"compiler_version": "20",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
"compiler_version": "14",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
"compiler_version": "15",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
"compiler_version": "20",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "21",
|
||||
"image_sha": "0525eae"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
"compiler_version": "any",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
"compiler_version": "12",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
"compiler_version": "13",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
"compiler_version": "14",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
"compiler_version": "16",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
"compiler_version": "17",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
"compiler_version": "18",
|
||||
"image_sha": "e1782cd"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
"compiler_version": "19",
|
||||
"image_sha": "e1782cd"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
3
.github/scripts/strategy-matrix/macos.json
vendored
3
.github/scripts/strategy-matrix/macos.json
vendored
@@ -10,7 +10,8 @@
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
3
.github/scripts/strategy-matrix/windows.json
vendored
3
.github/scripts/strategy-matrix/windows.json
vendored
@@ -10,7 +10,8 @@
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
147
.github/workflows/build-test.yml
vendored
147
.github/workflows/build-test.yml
vendored
@@ -1,147 +0,0 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ inputs.os == 'linux' && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-5dd7158', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
steps:
|
||||
- name: Check strategy matrix
|
||||
run: |
|
||||
echo 'Operating system distro name: ${{ matrix.os.distro_name }}'
|
||||
echo 'Operating system distro version: ${{ matrix.os.distro_version }}'
|
||||
echo 'Operating system compiler name: ${{ matrix.os.compiler_name }}'
|
||||
echo 'Operating system compiler version: ${{ matrix.os.compiler_version }}'
|
||||
echo 'Architecture platform: ${{ matrix.architecture.platform }}'
|
||||
echo 'Architecture runner: ${{ toJson(matrix.architecture.runner) }}'
|
||||
echo 'Build type: ${{ matrix.build_type }}'
|
||||
echo 'Build only: ${{ matrix.build_only }}'
|
||||
echo 'CMake arguments: ${{ matrix.cmake_args }}'
|
||||
echo 'CMake target: ${{ matrix.cmake_target }}'
|
||||
echo 'Config name: ${{ matrix.config_name }}'
|
||||
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
- name: Check configuration (Linux and MacOS)
|
||||
if: ${{ inputs.os == 'linux' || inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ inputs.os == 'linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ inputs.dependencies_force_build }}
|
||||
|
||||
- name: Build and test binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
codecov_token: ${{ secrets.codecov_token }}
|
||||
os: ${{ inputs.os }}
|
||||
62
.github/workflows/check-missing-commits.yml
vendored
62
.github/workflows/check-missing-commits.yml
vendored
@@ -1,62 +0,0 @@
|
||||
# This workflow checks that all commits in the "master" branch are also in the
|
||||
# "release" and "develop" branches, and that all commits in the "release" branch
|
||||
# are also in the "develop" branch.
|
||||
name: Check for missing commits
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-missing-commits
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for missing commits
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
If you are reading this, then the commits indicated above are missing
|
||||
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
||||
as possible. See CONTRIBUTING.md for instructions.
|
||||
run: |
|
||||
set -o pipefail
|
||||
# Branches are ordered by how "canonical" they are. Every commit in one
|
||||
# branch should be in all the branches behind it.
|
||||
order=(master release develop)
|
||||
branches=()
|
||||
for branch in "${order[@]}"; do
|
||||
# Check that the branches exist so that this job will work on forked
|
||||
# repos, which don't necessarily have master and release branches.
|
||||
echo "Checking if ${branch} exists."
|
||||
if git ls-remote --exit-code --heads origin \
|
||||
refs/heads/${branch} > /dev/null; then
|
||||
branches+=(origin/${branch})
|
||||
fi
|
||||
done
|
||||
|
||||
prior=()
|
||||
for branch in "${branches[@]}"; do
|
||||
if [[ ${#prior[@]} -ne 0 ]]; then
|
||||
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
||||
git log --oneline --no-merges "${prior[@]}" \
|
||||
^$branch | tee -a "missing-commits.txt"
|
||||
echo
|
||||
fi
|
||||
prior+=("${branch}")
|
||||
done
|
||||
|
||||
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
20
.github/workflows/on-pr.yml
vendored
20
.github/workflows/on-pr.yml
vendored
@@ -50,8 +50,8 @@ jobs:
|
||||
files: |
|
||||
# These paths are unique to `on-pr.yml`.
|
||||
.github/scripts/levelization/**
|
||||
.github/workflows/check-levelization.yml
|
||||
.github/workflows/notify-clio.yml
|
||||
.github/workflows/reusable-check-levelization.yml
|
||||
.github/workflows/reusable-notify-clio.yml
|
||||
.github/workflows/on-pr.yml
|
||||
|
||||
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||
@@ -59,8 +59,11 @@ jobs:
|
||||
.github/actions/build-test/**
|
||||
.github/actions/setup-conan/**
|
||||
.github/scripts/strategy-matrix/**
|
||||
.github/workflows/build-test.yml
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
@@ -93,26 +96,27 @@ jobs:
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/check-levelization.yml
|
||||
uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
|
||||
build-test:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- should-run
|
||||
- build-test
|
||||
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/notify-clio.yml
|
||||
if: ${{ needs.should-run.outputs.go == 'true' && (startsWith(github.base_ref, 'release') || github.base_ref == 'master') }}
|
||||
uses: ./.github/workflows/reusable-notify-clio.yml
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
|
||||
45
.github/workflows/on-trigger.yml
vendored
45
.github/workflows/on-trigger.yml
vendored
@@ -9,22 +9,23 @@ name: Trigger
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
- "develop"
|
||||
- "release*"
|
||||
- "master"
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/check-missing-commits.yml"
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
|
||||
# Keep the paths below in sync with those in `on-pr.yml`.
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/actions/setup-conan/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/build-test.yml"
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
@@ -43,25 +44,16 @@ on:
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
||||
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
||||
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
||||
# dependencies if needed.
|
||||
# Run when manually triggered via the GitHub UI or API.
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
# When a PR is merged into the develop branch it will be assigned a unique
|
||||
# group identifier, so execution will continue even if another PR is merged
|
||||
# while it is still running. In all other cases the group identifier is shared
|
||||
# per branch, so that any in-progress runs are cancelled when a new commit is
|
||||
# pushed.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
@@ -69,17 +61,14 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check-missing-commits:
|
||||
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/check-missing-commits.yml
|
||||
|
||||
build-test:
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
secrets:
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/pre-commit.yml
vendored
4
.github/workflows/pre-commit.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
jobs:
|
||||
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||
run-hooks:
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@af1b0f0d764cda2e5435f5ac97b240d4bd4d95d3
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||
with:
|
||||
runs_on: ubuntu-latest
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-d1496b8" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||
|
||||
20
.github/workflows/publish-docs.yml
vendored
20
.github/workflows/publish-docs.yml
vendored
@@ -23,16 +23,24 @@ defaults:
|
||||
|
||||
env:
|
||||
BUILD_DIR: .build
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-d1496b8
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
@@ -46,12 +54,16 @@ jobs:
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
|
||||
- name: Build documentation
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
mkdir -p ${{ env.BUILD_DIR }}
|
||||
cd ${{ env.BUILD_DIR }}
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
cmake --build . --target docs --parallel ${BUILD_NPROC}
|
||||
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
|
||||
213
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
213
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
name: Build and test configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
type: string
|
||||
required: true
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The configuration string (used for naming artifacts and such)."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: false
|
||||
type: number
|
||||
default: 2
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
name: ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
ENABLED_VOIDSTAR: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
ENABLED_COVERAGE: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
|
||||
steps:
|
||||
- name: Cleanup workspace (macOS and Windows)
|
||||
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@01b244d2718865d427b499822fbd3f15e7197fcc
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ inputs.nproc_subtract }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Configure CMake
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||
run: |
|
||||
cmake \
|
||||
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
${CMAKE_ARGS} \
|
||||
..
|
||||
|
||||
- name: Build the binary
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel "${BUILD_NPROC}" \
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Upload rippled artifact (Linux)
|
||||
if: ${{ github.repository_owner == 'XRPLF' && runner.os == 'Linux' }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
path: ${{ env.BUILD_DIR }}/rippled
|
||||
retention-days: 3
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Check linking (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify presence of instrumentation (Linux)
|
||||
if: ${{ runner.os == 'Linux' && env.ENABLED_VOIDSTAR == 'true' }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
./rippled --version | grep libvoidstar
|
||||
|
||||
- name: Run the separate tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
# Windows locks some of the build files while running tests, and parallel jobs can collide
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
PARALLELISM: ${{ runner.os == 'Windows' && '1' || steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
ctest \
|
||||
--output-on-failure \
|
||||
-C "${BUILD_TYPE}" \
|
||||
-j "${PARALLELISM}"
|
||||
|
||||
- name: Run the embedded tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', inputs.build_dir, inputs.build_type) || inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
./rippled --unittest --unittest-jobs "${BUILD_NPROC}"
|
||||
|
||||
- name: Debug failure (Linux)
|
||||
if: ${{ failure() && runner.os == 'Linux' && !inputs.build_only }}
|
||||
run: |
|
||||
echo "IPv4 local port range:"
|
||||
cat /proc/sys/net/ipv4/ip_local_port_range
|
||||
echo "Netstat:"
|
||||
netstat -an
|
||||
|
||||
- name: Prepare coverage report
|
||||
if: ${{ !inputs.build_only && env.ENABLED_COVERAGE == 'true' }}
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel "${BUILD_NPROC}" \
|
||||
--target coverage
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.ENABLED_COVERAGE == 'true' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary for each configuration.
|
||||
build-test-config:
|
||||
needs:
|
||||
- generate-matrix
|
||||
uses: ./.github/workflows/reusable-build-test-config.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -46,41 +46,46 @@ jobs:
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Calculate conan reference
|
||||
id: conan_ref
|
||||
run: |
|
||||
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
outputs:
|
||||
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
|
||||
-F "client_payload[pr_url]=${{ github.event.pull_request.html_url }}"
|
||||
-F "client_payload[pr_url]=${PR_URL}"
|
||||
@@ -18,6 +18,10 @@ on:
|
||||
description: "The generated strategy matrix."
|
||||
value: ${{ jobs.generate-matrix.outputs.matrix }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -35,4 +39,7 @@ jobs:
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
run: ./generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }} >> "${GITHUB_OUTPUT}"
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
48
.github/workflows/upload-conan-deps.yml
vendored
48
.github/workflows/upload-conan-deps.yml
vendored
@@ -24,30 +24,34 @@ on:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
|
||||
- .github/workflows/reusable-strategy-matrix.yml
|
||||
|
||||
- .github/actions/build-deps/action.yml
|
||||
- .github/actions/setup-conan/action.yml
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
|
||||
- conanfile.py
|
||||
- conan.lock
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||
|
||||
# Build and upload the dependencies for each configuration.
|
||||
run-upload-conan-deps:
|
||||
needs:
|
||||
- generate-matrix
|
||||
@@ -56,19 +60,29 @@ jobs:
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-5dd7158', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
- name: Cleanup workspace (macOS and Windows)
|
||||
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@01b244d2718865d427b499822fbd3f15e7197fcc
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
@@ -79,13 +93,19 @@ jobs:
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: .build
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' }}
|
||||
run: conan remote login ${{ env.CONAN_REMOTE_NAME }} "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
|
||||
- name: Upload Conan packages
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
|
||||
run: conan upload "*" -r=${{ env.CONAN_REMOTE_NAME }} --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
env:
|
||||
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
|
||||
|
||||
@@ -34,6 +34,5 @@ repos:
|
||||
exclude: |
|
||||
(?x)^(
|
||||
external/.*|
|
||||
.github/scripts/levelization/results/.*\.txt|
|
||||
conan\.lock
|
||||
.github/scripts/levelization/results/.*\.txt
|
||||
)$
|
||||
|
||||
41
BUILD.md
41
BUILD.md
@@ -39,17 +39,12 @@ found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/), or higher
|
||||
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
[^2]:
|
||||
CMake 4 is not yet supported by all dependencies required by this project.
|
||||
If you are affected by this issue, follow [conan workaround for cmake
|
||||
4](#workaround-for-cmake-4)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
@@ -282,21 +277,6 @@ sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Workaround for CMake 4
|
||||
|
||||
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||
profile to use CMake version 3 for dependencies, by adding the following two
|
||||
lines to your profile:
|
||||
|
||||
```text
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
```
|
||||
|
||||
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||
is needed because some of the dependencies used by this project do not support
|
||||
CMake 4.
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
@@ -515,18 +495,18 @@ A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
2. completed one or more run of the unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
The last step of the above is automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
(to store coverage capture data). Since `rippled` binary is simply a dependency of the
|
||||
coverage report target, it is possible to re-run the `coverage` target without
|
||||
rebuilding the `rippled` binary. Note, running of the unit tests before the `coverage`
|
||||
target is left to the developer. Each such run will append to the coverage data
|
||||
collected in the build directory.
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
@@ -535,11 +515,6 @@ to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
|
||||
@@ -1,21 +1,3 @@
|
||||
macro(group_sources_in source_dir curdir)
|
||||
file(GLOB children RELATIVE ${source_dir}/${curdir}
|
||||
${source_dir}/${curdir}/*)
|
||||
foreach (child ${children})
|
||||
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
|
||||
group_sources_in(${source_dir} ${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES
|
||||
${source_dir}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
macro(group_sources curdir)
|
||||
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
|
||||
endmacro()
|
||||
|
||||
macro (exclude_from_default target_)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
|
||||
|
||||
@@ -109,6 +109,9 @@
|
||||
# - add a new function add_code_coverage_to_target
|
||||
# - remove some unused code
|
||||
#
|
||||
# 2025-11-11, Bronek Kozicki
|
||||
# - make EXECUTABLE and EXECUTABLE_ARGS optional
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
@@ -317,6 +320,10 @@ function(setup_target_for_coverage_gcovr)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
|
||||
message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set")
|
||||
endif()
|
||||
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else()
|
||||
@@ -398,17 +405,18 @@ function(setup_target_for_coverage_gcovr)
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# Run tests
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
# If EXECUTABLE is not set, the user is expected to run the tests manually
|
||||
# before running the coverage target NAME
|
||||
if(DEFINED Coverage_EXECUTABLE)
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
endif()
|
||||
|
||||
# Create folder
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
else()
|
||||
set(GCOVR_FOLDER_CMD echo) # dummy
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
@@ -425,11 +433,13 @@ function(setup_target_for_coverage_gcovr)
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
|
||||
if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
|
||||
@@ -12,7 +12,7 @@ if (static OR MSVC)
|
||||
else ()
|
||||
set (Boost_USE_STATIC_RUNTIME OFF)
|
||||
endif ()
|
||||
find_dependency (Boost 1.70
|
||||
find_dependency (Boost
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
@@ -52,5 +52,3 @@ if (TARGET ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
endif ()
|
||||
|
||||
include ("${CMAKE_CURRENT_LIST_DIR}/RippleTargets.cmake")
|
||||
|
||||
@@ -72,10 +72,7 @@ include(target_link_modules)
|
||||
|
||||
# Level 01
|
||||
add_module(xrpl beast)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC xrpl.imports.main)
|
||||
|
||||
# Level 02
|
||||
add_module(xrpl basics)
|
||||
|
||||
@@ -11,6 +11,9 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
return()
|
||||
endif()
|
||||
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
include(CodeCoverage)
|
||||
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
@@ -26,15 +29,13 @@ list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${coverage_test_parallelism})
|
||||
-j ${PROCESSOR_COUNT})
|
||||
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES rippled
|
||||
DEPENDENCIES rippled xrpl.tests
|
||||
)
|
||||
|
||||
add_code_coverage_to_target(opts INTERFACE)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#[===================================================================[
|
||||
convenience variables and sanity checks
|
||||
sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
@@ -16,39 +16,19 @@ if (NOT is_multiconfig)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
set (is_root_project OFF)
|
||||
else ()
|
||||
set (is_root_project ON)
|
||||
endif ()
|
||||
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
|
||||
set (is_clang TRUE)
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires clang 8 or later")
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
|
||||
message (FATAL_ERROR "This project requires clang 16 or later")
|
||||
endif ()
|
||||
# TODO min AppleClang version check ?
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set (is_gcc TRUE)
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires GCC 8 or later")
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
|
||||
message (FATAL_ERROR "This project requires GCC 12 or later")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else ()
|
||||
set (is_linux FALSE)
|
||||
endif ()
|
||||
|
||||
if ("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set (is_ci TRUE)
|
||||
else ()
|
||||
set (is_ci FALSE)
|
||||
endif ()
|
||||
|
||||
# check for in-source build and fail
|
||||
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
message (FATAL_ERROR "Builds (in-source) are not allowed in "
|
||||
|
||||
@@ -1,10 +1,25 @@
|
||||
#[===================================================================[
|
||||
declare user options/settings
|
||||
declare options and variables
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else()
|
||||
set(is_linux FALSE)
|
||||
endif()
|
||||
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set(is_ci TRUE)
|
||||
else()
|
||||
set(is_ci FALSE)
|
||||
endif()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if(has_parent)
|
||||
set(is_root_project OFF)
|
||||
else()
|
||||
set(is_root_project ON)
|
||||
endif()
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
@@ -25,29 +40,28 @@ if(unity)
|
||||
endif()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif()
|
||||
|
||||
if(is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif()
|
||||
|
||||
if(is_gcc OR is_clang)
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
|
||||
"Unit tests parallelism for the purpose of coverage report.")
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
set(coverage_test "" CACHE STRING
|
||||
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
|
||||
if(coverage_test AND NOT coverage)
|
||||
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
@@ -64,11 +78,13 @@ else()
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
@@ -102,38 +118,26 @@ if(san)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
set(container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option(packages_only
|
||||
"ONLY generate package building targets. This is special use-case and almost \
|
||||
certainly not what you want. Use with caution as you won't be able to build \
|
||||
any compiled targets locally." OFF)
|
||||
option(have_package_container
|
||||
"Sometimes you already have the tagged container you want to use for package \
|
||||
building and you don't want docker to rebuild it. This flag will detach the \
|
||||
dependency of the package build from the container build. It's an advanced \
|
||||
use case and most likely you should not be touching this flag." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
"Restricts the number of threads calling io_context::run to one. \
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
option(beast_hashers
|
||||
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
|
||||
OFF)
|
||||
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
|
||||
@@ -24,7 +24,6 @@ target_link_libraries(ripple_boost
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::process
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
|
||||
@@ -7,7 +7,7 @@ function(xrpl_add_test name)
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||
)
|
||||
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
|
||||
add_executable(${target} ${ARGN} ${sources})
|
||||
|
||||
isolate_headers(
|
||||
${target}
|
||||
@@ -22,20 +22,4 @@ function(xrpl_add_test name)
|
||||
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||
|
||||
add_test(NAME ${target} COMMAND ${target})
|
||||
set_tests_properties(
|
||||
${target} PROPERTIES
|
||||
FIXTURES_REQUIRED ${target}_fixture
|
||||
)
|
||||
|
||||
add_test(
|
||||
NAME ${target}.build
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build ${CMAKE_BINARY_DIR}
|
||||
--config $<CONFIG>
|
||||
--target ${target}
|
||||
)
|
||||
set_tests_properties(${target}.build PROPERTIES
|
||||
FIXTURES_SETUP ${target}_fixture
|
||||
)
|
||||
endfunction()
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
|
||||
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
|
||||
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
|
||||
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
|
||||
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368%1754325043.336",
|
||||
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
|
||||
],
|
||||
"build_requires": [
|
||||
@@ -46,11 +46,11 @@
|
||||
"lz4/1.10.0"
|
||||
],
|
||||
"boost/1.83.0": [
|
||||
"boost/1.88.0"
|
||||
"boost/1.83.0"
|
||||
],
|
||||
"sqlite3/3.44.2": [
|
||||
"sqlite3/3.49.1"
|
||||
]
|
||||
},
|
||||
"config_requires": []
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
# downloads and uploads.
|
||||
core:non_interactive=True
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
|
||||
@@ -21,14 +21,14 @@ compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||
|
||||
[conf]
|
||||
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "clang" and compiler_version == 16 %}
|
||||
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
{% endif %}
|
||||
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
tools.build:cxxflags+=['-Wno-restrict']
|
||||
{% endif %}
|
||||
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
|
||||
@@ -100,13 +100,11 @@ class Xrpl(ConanFile):
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
self.options['boost'].visibility = 'global'
|
||||
if self.settings.compiler in ['clang', 'gcc']:
|
||||
self.options['boost'].without_cobalt = True
|
||||
|
||||
def requirements(self):
|
||||
# Conan 2 requires transitive headers to be specified
|
||||
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
|
||||
self.requires('boost/1.88.0', force=True, **transitive_headers_opt)
|
||||
self.requires('boost/1.83.0', force=True, **transitive_headers_opt)
|
||||
self.requires('date/3.0.4', **transitive_headers_opt)
|
||||
self.requires('lz4/1.10.0', force=True)
|
||||
self.requires('protobuf/3.21.12', force=True)
|
||||
@@ -177,7 +175,6 @@ class Xrpl(ConanFile):
|
||||
'boost::filesystem',
|
||||
'boost::json',
|
||||
'boost::program_options',
|
||||
'boost::process',
|
||||
'boost::regex',
|
||||
'boost::system',
|
||||
'boost::thread',
|
||||
|
||||
2
external/secp256k1/include/secp256k1.h
vendored
2
external/secp256k1/include/secp256k1.h
vendored
@@ -541,7 +541,7 @@ SECP256K1_API int secp256k1_ecdsa_signature_serialize_compact(
|
||||
/** Verify an ECDSA signature.
|
||||
*
|
||||
* Returns: 1: correct signature
|
||||
* 0: incorrect or unparseable signature
|
||||
* 0: incorrect or unparsable signature
|
||||
* Args: ctx: pointer to a context object
|
||||
* In: sig: the signature being verified.
|
||||
* msghash32: the 32-byte message hash being verified.
|
||||
|
||||
@@ -654,12 +654,14 @@ SharedWeakUnion<T>::convertToWeak()
|
||||
break;
|
||||
case destroy:
|
||||
// We just added a weak ref. How could we destroy?
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::SharedWeakUnion::convertToWeak : destroying freshly "
|
||||
"added ref");
|
||||
delete p;
|
||||
unsafeSetRawPtr(nullptr);
|
||||
return true; // Should never happen
|
||||
// LCOV_EXCL_STOP
|
||||
case partialDestroy:
|
||||
// This is a weird case. We just converted the last strong
|
||||
// pointer to a weak pointer.
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string_view>
|
||||
#include <utility>
|
||||
|
||||
namespace ripple {
|
||||
@@ -131,14 +130,27 @@ private:
|
||||
Does nothing if there is no associated system file.
|
||||
*/
|
||||
void
|
||||
write(std::string_view text);
|
||||
write(char const* text);
|
||||
|
||||
/** write to the log file and append an end of line marker.
|
||||
Does nothing if there is no associated system file.
|
||||
*/
|
||||
void
|
||||
writeln(std::string_view text);
|
||||
writeln(char const* text);
|
||||
|
||||
/** Write to the log file using std::string. */
|
||||
/** @{ */
|
||||
void
|
||||
write(std::string const& str)
|
||||
{
|
||||
write(str.c_str());
|
||||
}
|
||||
|
||||
void
|
||||
writeln(std::string const& str)
|
||||
{
|
||||
writeln(str.c_str());
|
||||
}
|
||||
/** @} */
|
||||
|
||||
private:
|
||||
@@ -174,14 +186,6 @@ public:
|
||||
beast::Journal::Sink&
|
||||
operator[](std::string const& name);
|
||||
|
||||
template <typename AttributesFactory>
|
||||
beast::Journal
|
||||
journal(std::string const& name, AttributesFactory&& factory)
|
||||
{
|
||||
return beast::Journal{
|
||||
get(name), name, std::forward<AttributesFactory>(factory)};
|
||||
}
|
||||
|
||||
beast::Journal
|
||||
journal(std::string const& name);
|
||||
|
||||
@@ -233,34 +237,30 @@ public:
|
||||
static LogSeverity
|
||||
fromString(std::string const& s);
|
||||
|
||||
static void
|
||||
format(
|
||||
std::string& output,
|
||||
std::string const& message,
|
||||
beast::severities::Severity severity,
|
||||
std::string const& partition);
|
||||
|
||||
private:
|
||||
enum {
|
||||
// Maximum line length for log messages.
|
||||
// If the message exceeds this length it will be truncated with elipses.
|
||||
maximumMessageCharacters = 12 * 1024
|
||||
};
|
||||
|
||||
static void
|
||||
format(
|
||||
std::string& output,
|
||||
std::string const& message,
|
||||
beast::severities::Severity severity,
|
||||
std::string const& partition);
|
||||
};
|
||||
|
||||
// Wraps a Journal::Stream to skip evaluation of
|
||||
// expensive argument lists if the stream is not active.
|
||||
#ifndef JLOG
|
||||
#define JLOG_JOIN_(a, b) a##b
|
||||
#define JLOG_JOIN(a, b) JLOG_JOIN_(a, b)
|
||||
#define JLOG_UNIQUE(base) JLOG_JOIN(base, __LINE__) // line-based unique name
|
||||
|
||||
#define JLOG(x) \
|
||||
if (auto JLOG_UNIQUE(stream) = (x); !JLOG_UNIQUE(stream)) \
|
||||
{ \
|
||||
} \
|
||||
else \
|
||||
std::move(JLOG_UNIQUE(stream))
|
||||
#define JLOG(x) \
|
||||
if (!x) \
|
||||
{ \
|
||||
} \
|
||||
else \
|
||||
x
|
||||
#endif
|
||||
|
||||
#ifndef CLOG
|
||||
|
||||
@@ -32,6 +32,15 @@ class Number;
|
||||
std::string
|
||||
to_string(Number const& amount);
|
||||
|
||||
template <typename T>
|
||||
constexpr bool
|
||||
isPowerOfTen(T value)
|
||||
{
|
||||
while (value >= 10 && value % 10 == 0)
|
||||
value /= 10;
|
||||
return value == 1;
|
||||
}
|
||||
|
||||
class Number
|
||||
{
|
||||
using rep = std::int64_t;
|
||||
@@ -41,7 +50,9 @@ class Number
|
||||
public:
|
||||
// The range for the mantissa when normalized
|
||||
constexpr static std::int64_t minMantissa = 1'000'000'000'000'000LL;
|
||||
constexpr static std::int64_t maxMantissa = 9'999'999'999'999'999LL;
|
||||
static_assert(isPowerOfTen(minMantissa));
|
||||
constexpr static std::int64_t maxMantissa = minMantissa * 10 - 1;
|
||||
static_assert(maxMantissa == 9'999'999'999'999'999LL);
|
||||
|
||||
// The range for the exponent when normalized
|
||||
constexpr static int minExponent = -32768;
|
||||
@@ -151,22 +162,7 @@ public:
|
||||
}
|
||||
|
||||
Number
|
||||
truncate() const noexcept
|
||||
{
|
||||
if (exponent_ >= 0 || mantissa_ == 0)
|
||||
return *this;
|
||||
|
||||
Number ret = *this;
|
||||
while (ret.exponent_ < 0 && ret.mantissa_ != 0)
|
||||
{
|
||||
ret.exponent_ += 1;
|
||||
ret.mantissa_ /= rep(10);
|
||||
}
|
||||
// We are guaranteed that normalize() will never throw an exception
|
||||
// because exponent is either negative or zero at this point.
|
||||
ret.normalize();
|
||||
return ret;
|
||||
}
|
||||
truncate() const noexcept;
|
||||
|
||||
friend constexpr bool
|
||||
operator>(Number const& x, Number const& y) noexcept
|
||||
@@ -211,6 +207,8 @@ private:
|
||||
class Guard;
|
||||
};
|
||||
|
||||
constexpr static Number numZero{};
|
||||
|
||||
inline constexpr Number::Number(rep mantissa, int exponent, unchecked) noexcept
|
||||
: mantissa_{mantissa}, exponent_{exponent}
|
||||
{
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
#include <xrpl/basics/Resolver.h>
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -33,7 +33,7 @@ public:
|
||||
explicit ResolverAsio() = default;
|
||||
|
||||
static std::unique_ptr<ResolverAsio>
|
||||
New(boost::asio::io_context&, beast::Journal);
|
||||
New(boost::asio::io_service&, beast::Journal);
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -176,7 +176,7 @@ public:
|
||||
@param count the number of items the slab allocator can allocate; note
|
||||
that a count of 0 is valid and means that the allocator
|
||||
is, effectively, disabled. This can be very useful in some
|
||||
contexts (e.g. when mimimal memory usage is needed) and
|
||||
contexts (e.g. when minimal memory usage is needed) and
|
||||
allows for graceful failure.
|
||||
*/
|
||||
constexpr explicit SlabAllocator(
|
||||
|
||||
@@ -565,7 +565,7 @@ operator<=>(base_uint<Bits, Tag> const& lhs, base_uint<Bits, Tag> const& rhs)
|
||||
// This comparison might seem wrong on a casual inspection because it
|
||||
// compares data internally stored as std::uint32_t byte-by-byte. But
|
||||
// note that the underlying data is stored in big endian, even if the
|
||||
// plaform is little endian. This makes the comparison correct.
|
||||
// platform is little endian. This makes the comparison correct.
|
||||
//
|
||||
// FIXME: use std::lexicographical_compare_three_way once support is
|
||||
// added to MacOS.
|
||||
|
||||
@@ -28,7 +28,7 @@ namespace ripple {
|
||||
|
||||
/*
|
||||
* MSVC 2019 version 16.9.0 added [[nodiscard]] to the std comparison
|
||||
* operator() functions. boost::bimap checks that the comparitor is a
|
||||
* operator() functions. boost::bimap checks that the comparator is a
|
||||
* BinaryFunction, in part by calling the function and ignoring the value.
|
||||
* These two things don't play well together. These wrapper classes simply
|
||||
* strip [[nodiscard]] from operator() for use in boost::bimap.
|
||||
|
||||
@@ -23,8 +23,7 @@
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <boost/asio/basic_waitable_timer.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/post.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <condition_variable>
|
||||
@@ -33,7 +32,7 @@
|
||||
|
||||
namespace beast {
|
||||
|
||||
/** Measures handler latency on an io_context queue. */
|
||||
/** Measures handler latency on an io_service queue. */
|
||||
template <class Clock>
|
||||
class io_latency_probe
|
||||
{
|
||||
@@ -45,12 +44,12 @@ private:
|
||||
std::condition_variable_any m_cond;
|
||||
std::size_t m_count;
|
||||
duration const m_period;
|
||||
boost::asio::io_context& m_ios;
|
||||
boost::asio::io_service& m_ios;
|
||||
boost::asio::basic_waitable_timer<std::chrono::steady_clock> m_timer;
|
||||
bool m_cancel;
|
||||
|
||||
public:
|
||||
io_latency_probe(duration const& period, boost::asio::io_context& ios)
|
||||
io_latency_probe(duration const& period, boost::asio::io_service& ios)
|
||||
: m_count(1)
|
||||
, m_period(period)
|
||||
, m_ios(ios)
|
||||
@@ -65,16 +64,16 @@ public:
|
||||
cancel(lock, true);
|
||||
}
|
||||
|
||||
/** Return the io_context associated with the latency probe. */
|
||||
/** Return the io_service associated with the latency probe. */
|
||||
/** @{ */
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
{
|
||||
return m_ios;
|
||||
}
|
||||
|
||||
boost::asio::io_context const&
|
||||
get_io_context() const
|
||||
boost::asio::io_service const&
|
||||
get_io_service() const
|
||||
{
|
||||
return m_ios;
|
||||
}
|
||||
@@ -110,10 +109,8 @@ public:
|
||||
std::lock_guard lock(m_mutex);
|
||||
if (m_cancel)
|
||||
throw std::logic_error("io_latency_probe is canceled");
|
||||
boost::asio::post(
|
||||
m_ios,
|
||||
sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), false, this));
|
||||
m_ios.post(sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), false, this));
|
||||
}
|
||||
|
||||
/** Initiate continuous i/o latency sampling.
|
||||
@@ -127,10 +124,8 @@ public:
|
||||
std::lock_guard lock(m_mutex);
|
||||
if (m_cancel)
|
||||
throw std::logic_error("io_latency_probe is canceled");
|
||||
boost::asio::post(
|
||||
m_ios,
|
||||
sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), true, this));
|
||||
m_ios.post(sample_op<Handler>(
|
||||
std::forward<Handler>(handler), Clock::now(), true, this));
|
||||
}
|
||||
|
||||
private:
|
||||
@@ -241,13 +236,12 @@ private:
|
||||
// The latency is too high to maintain the desired
|
||||
// period so don't bother with a timer.
|
||||
//
|
||||
boost::asio::post(
|
||||
m_probe->m_ios,
|
||||
m_probe->m_ios.post(
|
||||
sample_op<Handler>(m_handler, now, m_repeat, m_probe));
|
||||
}
|
||||
else
|
||||
{
|
||||
m_probe->m_timer.expires_after(when - now);
|
||||
m_probe->m_timer.expires_from_now(when - now);
|
||||
m_probe->m_timer.async_wait(
|
||||
sample_op<Handler>(m_handler, now, m_repeat, m_probe));
|
||||
}
|
||||
@@ -260,8 +254,7 @@ private:
|
||||
if (!m_probe)
|
||||
return;
|
||||
typename Clock::time_point const now(Clock::now());
|
||||
boost::asio::post(
|
||||
m_probe->m_ios,
|
||||
m_probe->m_ios.post(
|
||||
sample_op<Handler>(m_handler, now, m_repeat, m_probe));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -94,7 +94,11 @@ hash_append(Hasher& h, beast::IP::Address const& addr) noexcept
|
||||
else if (addr.is_v6())
|
||||
hash_append(h, addr.to_v6().to_bytes());
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("beast::hash_append : invalid address type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
} // namespace beast
|
||||
|
||||
|
||||
@@ -8,11 +8,9 @@
|
||||
#ifndef BEAST_TEST_YIELD_TO_HPP
|
||||
#define BEAST_TEST_YIELD_TO_HPP
|
||||
|
||||
#include <boost/asio/executor_work_guard.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/thread/csbl/memory/allocator_arg.hpp>
|
||||
|
||||
#include <condition_variable>
|
||||
#include <mutex>
|
||||
@@ -31,12 +29,10 @@ namespace test {
|
||||
class enable_yield_to
|
||||
{
|
||||
protected:
|
||||
boost::asio::io_context ios_;
|
||||
boost::asio::io_service ios_;
|
||||
|
||||
private:
|
||||
boost::optional<boost::asio::executor_work_guard<
|
||||
boost::asio::io_context::executor_type>>
|
||||
work_;
|
||||
boost::optional<boost::asio::io_service::work> work_;
|
||||
std::vector<std::thread> threads_;
|
||||
std::mutex m_;
|
||||
std::condition_variable cv_;
|
||||
@@ -46,8 +42,7 @@ public:
|
||||
/// The type of yield context passed to functions.
|
||||
using yield_context = boost::asio::yield_context;
|
||||
|
||||
explicit enable_yield_to(std::size_t concurrency = 1)
|
||||
: work_(boost::asio::make_work_guard(ios_))
|
||||
explicit enable_yield_to(std::size_t concurrency = 1) : work_(ios_)
|
||||
{
|
||||
threads_.reserve(concurrency);
|
||||
while (concurrency--)
|
||||
@@ -61,9 +56,9 @@ public:
|
||||
t.join();
|
||||
}
|
||||
|
||||
/// Return the `io_context` associated with the object
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
/// Return the `io_service` associated with the object
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
{
|
||||
return ios_;
|
||||
}
|
||||
@@ -116,18 +111,13 @@ enable_yield_to::spawn(F0&& f, FN&&... fn)
|
||||
{
|
||||
boost::asio::spawn(
|
||||
ios_,
|
||||
boost::allocator_arg,
|
||||
boost::context::fixedsize_stack(2 * 1024 * 1024),
|
||||
[&](yield_context yield) {
|
||||
f(yield);
|
||||
std::lock_guard lock{m_};
|
||||
if (--running_ == 0)
|
||||
cv_.notify_all();
|
||||
},
|
||||
[](std::exception_ptr e) {
|
||||
if (e)
|
||||
std::rethrow_exception(e);
|
||||
});
|
||||
boost::coroutines::attributes(2 * 1024 * 1024));
|
||||
spawn(fn...);
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ public:
|
||||
|
||||
The argument string is available to suites and
|
||||
allows for customization of the test. Each suite
|
||||
defines its own syntax for the argumnet string.
|
||||
defines its own syntax for the argument string.
|
||||
The same argument is passed to all suites.
|
||||
*/
|
||||
void
|
||||
|
||||
@@ -22,266 +22,10 @@
|
||||
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <atomic>
|
||||
#include <charconv>
|
||||
#include <cstring>
|
||||
#include <deque>
|
||||
#include <mutex>
|
||||
#include <shared_mutex>
|
||||
#include <source_location>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
#include <utility>
|
||||
|
||||
namespace ripple::log {
|
||||
template <typename T>
|
||||
class LogParameter
|
||||
{
|
||||
public:
|
||||
template <typename TArg>
|
||||
LogParameter(char const* name, TArg&& value)
|
||||
: name_(name), value_(std::forward<TArg>(value))
|
||||
{
|
||||
}
|
||||
|
||||
private:
|
||||
char const* name_;
|
||||
T value_;
|
||||
|
||||
template <typename U>
|
||||
friend std::ostream&
|
||||
operator<<(std::ostream& os, LogParameter<U> const&);
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
class LogField
|
||||
{
|
||||
public:
|
||||
template <typename TArg>
|
||||
LogField(char const* name, TArg&& value)
|
||||
: name_(name), value_(std::forward<TArg>(value))
|
||||
{
|
||||
}
|
||||
|
||||
private:
|
||||
char const* name_;
|
||||
T value_;
|
||||
|
||||
template <typename U>
|
||||
friend std::ostream&
|
||||
operator<<(std::ostream& os, LogField<U> const&);
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
std::ostream&
|
||||
operator<<(std::ostream& os, LogField<T> const& param);
|
||||
|
||||
template <typename T>
|
||||
std::ostream&
|
||||
operator<<(std::ostream& os, LogParameter<T> const& param);
|
||||
} // namespace ripple::log
|
||||
|
||||
namespace beast {
|
||||
|
||||
namespace detail {
|
||||
|
||||
class SimpleJsonWriter
|
||||
{
|
||||
public:
|
||||
explicit SimpleJsonWriter(std::string* buffer) : buffer_(buffer)
|
||||
{
|
||||
}
|
||||
|
||||
SimpleJsonWriter() = default;
|
||||
|
||||
SimpleJsonWriter(SimpleJsonWriter const& other) = default;
|
||||
SimpleJsonWriter&
|
||||
operator=(SimpleJsonWriter const& other) = default;
|
||||
|
||||
std::string&
|
||||
buffer()
|
||||
{
|
||||
return *buffer_;
|
||||
}
|
||||
|
||||
void
|
||||
startObject() const
|
||||
{
|
||||
buffer_->push_back('{');
|
||||
}
|
||||
void
|
||||
endObject() const
|
||||
{
|
||||
using namespace std::string_view_literals;
|
||||
if (buffer_->back() == ',')
|
||||
buffer_->pop_back();
|
||||
buffer_->append("},"sv);
|
||||
}
|
||||
void
|
||||
writeKey(std::string_view key) const
|
||||
{
|
||||
writeString(key);
|
||||
buffer_->back() = ':';
|
||||
}
|
||||
void
|
||||
startArray() const
|
||||
{
|
||||
buffer_->push_back('[');
|
||||
}
|
||||
void
|
||||
endArray() const
|
||||
{
|
||||
using namespace std::string_view_literals;
|
||||
if (buffer_->back() == ',')
|
||||
buffer_->pop_back();
|
||||
buffer_->append("],"sv);
|
||||
}
|
||||
void
|
||||
writeString(std::string_view str) const
|
||||
{
|
||||
using namespace std::string_view_literals;
|
||||
buffer_->push_back('"');
|
||||
escape(str, *buffer_);
|
||||
buffer_->append("\","sv);
|
||||
}
|
||||
std::string_view
|
||||
writeInt(std::int32_t val) const
|
||||
{
|
||||
return pushNumber(val, *buffer_);
|
||||
}
|
||||
std::string_view
|
||||
writeInt(std::int64_t val) const
|
||||
{
|
||||
return pushNumber(val, *buffer_);
|
||||
}
|
||||
std::string_view
|
||||
writeUInt(std::uint32_t val) const
|
||||
{
|
||||
return pushNumber(val, *buffer_);
|
||||
}
|
||||
std::string_view
|
||||
writeUInt(std::uint64_t val) const
|
||||
{
|
||||
return pushNumber(val, *buffer_);
|
||||
}
|
||||
std::string_view
|
||||
writeDouble(double val) const
|
||||
{
|
||||
return pushNumber(val, *buffer_);
|
||||
}
|
||||
std::string_view
|
||||
writeBool(bool val) const
|
||||
{
|
||||
using namespace std::string_view_literals;
|
||||
auto str = val ? "true,"sv : "false,"sv;
|
||||
buffer_->append(str);
|
||||
return str;
|
||||
}
|
||||
void
|
||||
writeNull() const
|
||||
{
|
||||
using namespace std::string_view_literals;
|
||||
buffer_->append("null,"sv);
|
||||
}
|
||||
void
|
||||
writeRaw(std::string_view str) const
|
||||
{
|
||||
buffer_->append(str);
|
||||
}
|
||||
|
||||
void
|
||||
finish()
|
||||
{
|
||||
buffer_->pop_back();
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename T>
|
||||
static std::string_view
|
||||
pushNumber(T val, std::string& str)
|
||||
{
|
||||
thread_local char buffer[128];
|
||||
auto result = std::to_chars(std::begin(buffer), std::end(buffer), val);
|
||||
auto ptr = result.ptr;
|
||||
*ptr = ',';
|
||||
auto len = ptr - std::begin(buffer);
|
||||
str.append(buffer, len + 1);
|
||||
return {buffer, static_cast<size_t>(len)};
|
||||
}
|
||||
|
||||
static void
|
||||
escape(std::string_view str, std::string& buffer)
|
||||
{
|
||||
static constexpr char HEX[] = "0123456789ABCDEF";
|
||||
|
||||
char const* p = str.data();
|
||||
char const* end = p + str.size();
|
||||
char const* chunk = p;
|
||||
|
||||
while (p < end)
|
||||
{
|
||||
auto c = static_cast<unsigned char>(*p);
|
||||
|
||||
// JSON requires escaping for <0x20 and the two specials below.
|
||||
bool needsEscape = (c < 0x20) || (c == '"') || (c == '\\');
|
||||
|
||||
if (!needsEscape)
|
||||
{
|
||||
++p;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Flush the preceding safe run in one go.
|
||||
if (chunk != p)
|
||||
buffer.append(chunk, p - chunk);
|
||||
|
||||
switch (c)
|
||||
{
|
||||
case '"':
|
||||
buffer.append("\\\"", 2);
|
||||
break;
|
||||
case '\\':
|
||||
buffer.append("\\\\", 2);
|
||||
break;
|
||||
case '\b':
|
||||
buffer.append("\\b", 2);
|
||||
break;
|
||||
case '\f':
|
||||
buffer.append("\\f", 2);
|
||||
break;
|
||||
case '\n':
|
||||
buffer.append("\\n", 2);
|
||||
break;
|
||||
case '\r':
|
||||
buffer.append("\\r", 2);
|
||||
break;
|
||||
case '\t':
|
||||
buffer.append("\\t", 2);
|
||||
break;
|
||||
default: {
|
||||
// Other C0 controls -> \u00XX (JSON compliant)
|
||||
char buf[6]{
|
||||
'\\', 'u', '0', '0', HEX[(c >> 4) & 0xF], HEX[c & 0xF]};
|
||||
buffer.append(buf, 6);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
++p;
|
||||
chunk = p;
|
||||
}
|
||||
|
||||
// Flush trailing safe run
|
||||
if (chunk != p)
|
||||
buffer.append(chunk, p - chunk);
|
||||
}
|
||||
|
||||
std::string* buffer_ = nullptr;
|
||||
};
|
||||
|
||||
} // namespace detail
|
||||
|
||||
/** A namespace for easy access to logging severity values. */
|
||||
namespace severities {
|
||||
/** Severity level / threshold of a Journal message. */
|
||||
@@ -298,9 +42,6 @@ enum Severity {
|
||||
kDisabled,
|
||||
kNone = kDisabled
|
||||
};
|
||||
|
||||
std::string_view
|
||||
to_string(Severity severity);
|
||||
} // namespace severities
|
||||
|
||||
/** A generic endpoint for log messages.
|
||||
@@ -318,114 +59,18 @@ to_string(Severity severity);
|
||||
class Journal
|
||||
{
|
||||
public:
|
||||
template <typename T>
|
||||
friend std::ostream&
|
||||
ripple::log::operator<<(
|
||||
std::ostream& os,
|
||||
ripple::log::LogField<T> const& param);
|
||||
|
||||
template <typename T>
|
||||
friend std::ostream&
|
||||
ripple::log::operator<<(
|
||||
std::ostream& os,
|
||||
ripple::log::LogParameter<T> const& param);
|
||||
|
||||
class Sink;
|
||||
|
||||
class JsonLogContext
|
||||
{
|
||||
std::string messageBuffer_;
|
||||
detail::SimpleJsonWriter jsonWriter_;
|
||||
bool hasMessageParams_ = false;
|
||||
std::size_t messageOffset_ = 0;
|
||||
|
||||
public:
|
||||
JsonLogContext() : jsonWriter_(&messageBuffer_)
|
||||
{
|
||||
messageBuffer_.reserve(4 * 1024);
|
||||
}
|
||||
|
||||
std::string&
|
||||
messageBuffer()
|
||||
{
|
||||
return messageBuffer_;
|
||||
}
|
||||
|
||||
void
|
||||
startMessageParams()
|
||||
{
|
||||
if (!hasMessageParams_)
|
||||
{
|
||||
writer().writeKey("Dt");
|
||||
writer().startObject();
|
||||
hasMessageParams_ = true;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
endMessageParams()
|
||||
{
|
||||
if (hasMessageParams_)
|
||||
{
|
||||
writer().endObject();
|
||||
}
|
||||
}
|
||||
|
||||
detail::SimpleJsonWriter&
|
||||
writer()
|
||||
{
|
||||
return jsonWriter_;
|
||||
}
|
||||
|
||||
void
|
||||
reuseJson();
|
||||
|
||||
void
|
||||
finish();
|
||||
|
||||
void
|
||||
start(
|
||||
std::source_location location,
|
||||
severities::Severity severity,
|
||||
std::string_view moduleName,
|
||||
std::string_view journalAttributes) noexcept;
|
||||
};
|
||||
|
||||
private:
|
||||
// Severity level / threshold of a Journal message.
|
||||
using Severity = severities::Severity;
|
||||
|
||||
std::string name_;
|
||||
std::string attributes_;
|
||||
static std::string globalLogAttributes_;
|
||||
static std::shared_mutex globalLogAttributesMutex_;
|
||||
static bool jsonLogsEnabled_;
|
||||
|
||||
static thread_local JsonLogContext currentJsonLogContext_;
|
||||
|
||||
// Invariant: m_sink always points to a valid Sink
|
||||
Sink* m_sink = nullptr;
|
||||
|
||||
void
|
||||
initMessageContext(
|
||||
std::source_location location,
|
||||
severities::Severity severity) const;
|
||||
|
||||
static std::string&
|
||||
formatLog(std::string const& message);
|
||||
Sink* m_sink;
|
||||
|
||||
public:
|
||||
//--------------------------------------------------------------------------
|
||||
|
||||
static void
|
||||
enableStructuredJournal();
|
||||
|
||||
static void
|
||||
disableStructuredJournal();
|
||||
|
||||
static bool
|
||||
isStructuredJournalEnabled();
|
||||
|
||||
/** Abstraction for the underlying message destination. */
|
||||
class Sink
|
||||
{
|
||||
@@ -616,32 +261,11 @@ public:
|
||||
/** Output stream support. */
|
||||
/** @{ */
|
||||
ScopedStream
|
||||
operator<<(std::ostream& manip(std::ostream&)) const&&
|
||||
{
|
||||
return {*this, manip};
|
||||
}
|
||||
operator<<(std::ostream& manip(std::ostream&)) const;
|
||||
|
||||
template <typename T>
|
||||
ScopedStream
|
||||
operator<<(T const& t) const&&
|
||||
{
|
||||
return {*this, t};
|
||||
}
|
||||
|
||||
ScopedStream
|
||||
operator<<(std::ostream& manip(std::ostream&)) const&
|
||||
{
|
||||
currentJsonLogContext_.reuseJson();
|
||||
return {*this, manip};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
ScopedStream
|
||||
operator<<(T const& t) const&
|
||||
{
|
||||
currentJsonLogContext_.reuseJson();
|
||||
return {*this, t};
|
||||
}
|
||||
operator<<(T const& t) const;
|
||||
/** @} */
|
||||
|
||||
private:
|
||||
@@ -663,73 +287,11 @@ public:
|
||||
/** Journal has no default constructor. */
|
||||
Journal() = delete;
|
||||
|
||||
Journal(Journal const& other)
|
||||
: name_(other.name_)
|
||||
, attributes_(other.attributes_)
|
||||
, m_sink(other.m_sink)
|
||||
{
|
||||
}
|
||||
|
||||
template <typename TAttributesFactory>
|
||||
Journal(Journal const& other, TAttributesFactory&& attributesFactory)
|
||||
: name_(other.name_), m_sink(other.m_sink)
|
||||
{
|
||||
std::string buffer{other.attributes_};
|
||||
detail::SimpleJsonWriter writer{&buffer};
|
||||
if (other.attributes_.empty() && jsonLogsEnabled_)
|
||||
{
|
||||
writer.startObject();
|
||||
}
|
||||
attributesFactory(writer);
|
||||
attributes_ = std::move(buffer);
|
||||
}
|
||||
|
||||
/** Create a journal that writes to the specified sink. */
|
||||
explicit Journal(Sink& sink, std::string const& name = {})
|
||||
: name_(name), m_sink(&sink)
|
||||
explicit Journal(Sink& sink) : m_sink(&sink)
|
||||
{
|
||||
}
|
||||
|
||||
/** Create a journal that writes to the specified sink. */
|
||||
template <typename TAttributesFactory>
|
||||
explicit Journal(
|
||||
Sink& sink,
|
||||
std::string const& name,
|
||||
TAttributesFactory&& attributesFactory)
|
||||
: name_(name), m_sink(&sink)
|
||||
{
|
||||
std::string buffer;
|
||||
buffer.reserve(128);
|
||||
detail::SimpleJsonWriter writer{&buffer};
|
||||
if (jsonLogsEnabled_)
|
||||
{
|
||||
writer.startObject();
|
||||
}
|
||||
attributesFactory(writer);
|
||||
attributes_ = std::move(buffer);
|
||||
}
|
||||
|
||||
Journal&
|
||||
operator=(Journal const& other)
|
||||
{
|
||||
if (&other == this)
|
||||
return *this; // LCOV_EXCL_LINE
|
||||
|
||||
m_sink = other.m_sink;
|
||||
name_ = other.name_;
|
||||
attributes_ = other.attributes_;
|
||||
return *this;
|
||||
}
|
||||
|
||||
Journal&
|
||||
operator=(Journal&& other) noexcept
|
||||
{
|
||||
m_sink = other.m_sink;
|
||||
name_ = std::move(other.name_);
|
||||
attributes_ = std::move(other.attributes_);
|
||||
return *this;
|
||||
}
|
||||
|
||||
/** Returns the Sink associated with this Journal. */
|
||||
Sink&
|
||||
sink() const
|
||||
@@ -739,11 +301,8 @@ public:
|
||||
|
||||
/** Returns a stream for this sink, with the specified severity level. */
|
||||
Stream
|
||||
stream(
|
||||
Severity level,
|
||||
std::source_location location = std::source_location::current()) const
|
||||
stream(Severity level) const
|
||||
{
|
||||
initMessageContext(location, level);
|
||||
return Stream(*m_sink, level);
|
||||
}
|
||||
|
||||
@@ -760,69 +319,41 @@ public:
|
||||
/** Severity stream access functions. */
|
||||
/** @{ */
|
||||
Stream
|
||||
trace(std::source_location location = std::source_location::current()) const
|
||||
trace() const
|
||||
{
|
||||
initMessageContext(location, severities::kTrace);
|
||||
return {*m_sink, severities::kTrace};
|
||||
}
|
||||
|
||||
Stream
|
||||
debug(std::source_location location = std::source_location::current()) const
|
||||
debug() const
|
||||
{
|
||||
initMessageContext(location, severities::kDebug);
|
||||
return {*m_sink, severities::kDebug};
|
||||
}
|
||||
|
||||
Stream
|
||||
info(std::source_location location = std::source_location::current()) const
|
||||
info() const
|
||||
{
|
||||
initMessageContext(location, severities::kInfo);
|
||||
return {*m_sink, severities::kInfo};
|
||||
}
|
||||
|
||||
Stream
|
||||
warn(std::source_location location = std::source_location::current()) const
|
||||
warn() const
|
||||
{
|
||||
initMessageContext(location, severities::kWarning);
|
||||
return {*m_sink, severities::kWarning};
|
||||
}
|
||||
|
||||
Stream
|
||||
error(std::source_location location = std::source_location::current()) const
|
||||
error() const
|
||||
{
|
||||
initMessageContext(location, severities::kError);
|
||||
return {*m_sink, severities::kError};
|
||||
}
|
||||
|
||||
Stream
|
||||
fatal(std::source_location location = std::source_location::current()) const
|
||||
fatal() const
|
||||
{
|
||||
initMessageContext(location, severities::kFatal);
|
||||
return {*m_sink, severities::kFatal};
|
||||
}
|
||||
/** @} */
|
||||
|
||||
static void
|
||||
resetGlobalAttributes()
|
||||
{
|
||||
std::unique_lock lock(globalLogAttributesMutex_);
|
||||
globalLogAttributes_.clear();
|
||||
}
|
||||
|
||||
template <typename TAttributesFactory>
|
||||
static void
|
||||
addGlobalAttributes(TAttributesFactory&& factory)
|
||||
{
|
||||
std::unique_lock lock(globalLogAttributesMutex_);
|
||||
globalLogAttributes_.reserve(1024);
|
||||
auto isEmpty = globalLogAttributes_.empty();
|
||||
detail::SimpleJsonWriter writer{&globalLogAttributes_};
|
||||
if (isEmpty && jsonLogsEnabled_)
|
||||
{
|
||||
writer.startObject();
|
||||
}
|
||||
factory(writer);
|
||||
}
|
||||
};
|
||||
|
||||
#ifndef __INTELLISENSE__
|
||||
@@ -837,7 +368,7 @@ static_assert(std::is_nothrow_destructible<Journal>::value == true, "");
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
template <typename T>
|
||||
Journal::ScopedStream::ScopedStream(Stream const& stream, T const& t)
|
||||
Journal::ScopedStream::ScopedStream(Journal::Stream const& stream, T const& t)
|
||||
: ScopedStream(stream.sink(), stream.level())
|
||||
{
|
||||
m_ostream << t;
|
||||
@@ -853,6 +384,13 @@ Journal::ScopedStream::operator<<(T const& t) const
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
template <typename T>
|
||||
Journal::ScopedStream
|
||||
Journal::Stream::operator<<(T const& t) const
|
||||
{
|
||||
return ScopedStream(*this, t);
|
||||
}
|
||||
|
||||
namespace detail {
|
||||
|
||||
template <class CharT, class Traits = std::char_traits<CharT>>
|
||||
@@ -922,244 +460,4 @@ using logwstream = basic_logstream<wchar_t>;
|
||||
|
||||
} // namespace beast
|
||||
|
||||
namespace ripple::log {
|
||||
|
||||
namespace detail {
|
||||
|
||||
template <typename T>
|
||||
concept ToCharsFormattable = requires(T val) {
|
||||
{
|
||||
to_chars(std::declval<char*>(), std::declval<char*>(), val)
|
||||
} -> std::convertible_to<std::to_chars_result>;
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
concept StreamFormattable = requires(T val) {
|
||||
{
|
||||
std::declval<std::ostream&>() << val
|
||||
} -> std::convertible_to<std::ostream&>;
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
setTextValue(
|
||||
beast::detail::SimpleJsonWriter& writer,
|
||||
char const* name,
|
||||
T&& value)
|
||||
{
|
||||
using ValueType = std::decay_t<T>;
|
||||
writer.buffer() += name;
|
||||
writer.buffer() += ": ";
|
||||
if constexpr (
|
||||
std::is_same_v<ValueType, std::string> ||
|
||||
std::is_same_v<ValueType, std::string_view> ||
|
||||
std::is_same_v<ValueType, char const*> ||
|
||||
std::is_same_v<ValueType, char*>)
|
||||
{
|
||||
writer.buffer() += value;
|
||||
}
|
||||
else
|
||||
{
|
||||
std::ostringstream oss;
|
||||
oss << value;
|
||||
writer.buffer() += value;
|
||||
;
|
||||
}
|
||||
writer.buffer() += " ";
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
setJsonValue(
|
||||
beast::detail::SimpleJsonWriter& writer,
|
||||
char const* name,
|
||||
T&& value,
|
||||
std::ostream* outStream)
|
||||
{
|
||||
using ValueType = std::decay_t<T>;
|
||||
writer.writeKey(name);
|
||||
if constexpr (std::is_same_v<ValueType, bool>)
|
||||
{
|
||||
auto sv = writer.writeBool(value);
|
||||
if (outStream)
|
||||
{
|
||||
outStream->write(sv.data(), sv.size());
|
||||
}
|
||||
}
|
||||
else if constexpr (std::is_integral_v<ValueType>)
|
||||
{
|
||||
std::string_view sv;
|
||||
if constexpr (std::is_signed_v<ValueType>)
|
||||
{
|
||||
if constexpr (sizeof(ValueType) > 4)
|
||||
{
|
||||
sv = writer.writeInt(static_cast<std::int64_t>(value));
|
||||
}
|
||||
else
|
||||
{
|
||||
sv = writer.writeInt(static_cast<std::int32_t>(value));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if constexpr (sizeof(ValueType) > 4)
|
||||
{
|
||||
sv = writer.writeUInt(static_cast<std::uint64_t>(value));
|
||||
}
|
||||
else
|
||||
{
|
||||
sv = writer.writeUInt(static_cast<std::uint32_t>(value));
|
||||
}
|
||||
}
|
||||
if (outStream)
|
||||
{
|
||||
outStream->write(sv.data(), sv.size());
|
||||
}
|
||||
}
|
||||
else if constexpr (std::is_floating_point_v<ValueType>)
|
||||
{
|
||||
auto sv = writer.writeDouble(value);
|
||||
|
||||
if (outStream)
|
||||
{
|
||||
outStream->write(sv.data(), sv.size());
|
||||
}
|
||||
}
|
||||
else if constexpr (
|
||||
std::is_same_v<ValueType, char const*> ||
|
||||
std::is_same_v<ValueType, char*>)
|
||||
{
|
||||
writer.writeString(value);
|
||||
if (outStream)
|
||||
{
|
||||
outStream->write(value, std::strlen(value));
|
||||
}
|
||||
}
|
||||
else if constexpr (
|
||||
std::is_same_v<ValueType, std::string> ||
|
||||
std::is_same_v<ValueType, std::string_view>)
|
||||
{
|
||||
writer.writeString(value);
|
||||
if (outStream)
|
||||
{
|
||||
outStream->write(value.data(), value.size());
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if constexpr (ToCharsFormattable<ValueType>)
|
||||
{
|
||||
char buffer[1024];
|
||||
std::to_chars_result result =
|
||||
to_chars(std::begin(buffer), std::end(buffer), value);
|
||||
if (result.ec == std::errc{})
|
||||
{
|
||||
std::string_view sv{std::begin(buffer), result.ptr};
|
||||
writer.writeString(sv);
|
||||
if (outStream)
|
||||
{
|
||||
outStream->write(sv.data(), sv.size());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if constexpr (StreamFormattable<ValueType>)
|
||||
{
|
||||
std::ostringstream oss;
|
||||
oss.imbue(std::locale::classic());
|
||||
oss << value;
|
||||
|
||||
auto str = oss.str();
|
||||
|
||||
writer.writeString(str);
|
||||
|
||||
if (outStream)
|
||||
{
|
||||
outStream->write(
|
||||
str.c_str(), static_cast<std::streamsize>(str.size()));
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static_assert(
|
||||
ToCharsFormattable<ValueType> || StreamFormattable<ValueType>);
|
||||
}
|
||||
}
|
||||
} // namespace detail
|
||||
|
||||
template <typename T>
|
||||
std::ostream&
|
||||
operator<<(std::ostream& os, LogParameter<T> const& param)
|
||||
{
|
||||
if (!beast::Journal::jsonLogsEnabled_)
|
||||
{
|
||||
os << param.value_;
|
||||
return os;
|
||||
}
|
||||
beast::Journal::currentJsonLogContext_.startMessageParams();
|
||||
detail::setJsonValue(
|
||||
beast::Journal::currentJsonLogContext_.writer(),
|
||||
param.name_,
|
||||
param.value_,
|
||||
&os);
|
||||
return os;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::ostream&
|
||||
operator<<(std::ostream& os, LogField<T> const& param)
|
||||
{
|
||||
if (!beast::Journal::jsonLogsEnabled_)
|
||||
return os;
|
||||
beast::Journal::currentJsonLogContext_.startMessageParams();
|
||||
detail::setJsonValue(
|
||||
beast::Journal::currentJsonLogContext_.writer(),
|
||||
param.name_,
|
||||
param.value_,
|
||||
nullptr);
|
||||
return os;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
LogParameter<T>
|
||||
param(char const* name, T&& value)
|
||||
{
|
||||
return LogParameter<T>{name, std::forward<T>(value)};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
LogField<T>
|
||||
field(char const* name, T&& value)
|
||||
{
|
||||
return LogField<T>{name, std::forward<T>(value)};
|
||||
}
|
||||
|
||||
template <typename... Pair>
|
||||
[[nodiscard]] auto
|
||||
attributes(Pair&&... pairs)
|
||||
{
|
||||
return [&](beast::detail::SimpleJsonWriter& writer) {
|
||||
if (beast::Journal::isStructuredJournalEnabled())
|
||||
{
|
||||
(detail::setJsonValue(writer, pairs.first, pairs.second, nullptr),
|
||||
...);
|
||||
}
|
||||
else
|
||||
{
|
||||
(detail::setTextValue(writer, pairs.first, pairs.second), ...);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
[[nodiscard]] std::pair<char const*, std::decay_t<T>>
|
||||
attr(char const* name, T&& value)
|
||||
{
|
||||
return std::make_pair(name, std::forward<T>(value));
|
||||
}
|
||||
|
||||
} // namespace ripple::log
|
||||
|
||||
#endif
|
||||
|
||||
@@ -32,7 +32,7 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
// The duplication is because Visual Studio 2019 cannot compile that header
|
||||
// even with the option -Zc:__cplusplus added.
|
||||
#define ALWAYS(cond, message, ...) assert((message) && (cond))
|
||||
#define ALWAYS_OR_UNREACHABLE(cond, message, ...) assert((message) && (cond))
|
||||
#define ALWAYS_OR_UNREACHABLE(cond, message) assert((message) && (cond))
|
||||
#define SOMETIMES(cond, message, ...)
|
||||
#define REACHABLE(message, ...)
|
||||
#define UNREACHABLE(message, ...) assert((message) && false)
|
||||
|
||||
@@ -217,7 +217,7 @@ Reader::parse(Value& root, BufferSequence const& bs)
|
||||
std::string s;
|
||||
s.reserve(buffer_size(bs));
|
||||
for (auto const& b : bs)
|
||||
s.append(static_cast<char const*>(b.data()), buffer_size(b));
|
||||
s.append(buffer_cast<char const*>(b), buffer_size(b));
|
||||
return parse(s, root);
|
||||
}
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
#include <xrpl/json/json_forwards.h>
|
||||
|
||||
#include <cstring>
|
||||
#include <limits>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
@@ -158,9 +159,9 @@ public:
|
||||
using ArrayIndex = UInt;
|
||||
|
||||
static Value const null;
|
||||
static Int const minInt;
|
||||
static Int const maxInt;
|
||||
static UInt const maxUInt;
|
||||
static constexpr Int minInt = std::numeric_limits<Int>::min();
|
||||
static constexpr Int maxInt = std::numeric_limits<Int>::max();
|
||||
static constexpr UInt maxUInt = std::numeric_limits<UInt>::max();
|
||||
|
||||
private:
|
||||
class CZString
|
||||
@@ -263,6 +264,10 @@ public:
|
||||
bool
|
||||
asBool() const;
|
||||
|
||||
/** Correct absolute value from int or unsigned int */
|
||||
UInt
|
||||
asAbsUInt() const;
|
||||
|
||||
// TODO: What is the "empty()" method this docstring mentions?
|
||||
/** isNull() tests to see if this field is null. Don't use this method to
|
||||
test for emptiness: use empty(). */
|
||||
@@ -395,6 +400,9 @@ public:
|
||||
/// Return true if the object has a member named key.
|
||||
bool
|
||||
isMember(std::string const& key) const;
|
||||
/// Return true if the object has a member named key.
|
||||
bool
|
||||
isMember(StaticString const& key) const;
|
||||
|
||||
/// \brief Return a list of the member names.
|
||||
///
|
||||
|
||||
@@ -46,7 +46,7 @@ public:
|
||||
* without formatting (not human friendly).
|
||||
*
|
||||
* The JSON document is written in a single line. It is not intended for 'human'
|
||||
* consumption, but may be useful to support feature such as RPC where bandwith
|
||||
* consumption, but may be useful to support feature such as RPC where bandwidth
|
||||
* is limited. \sa Reader, Value
|
||||
*/
|
||||
|
||||
|
||||
@@ -284,12 +284,14 @@ public:
|
||||
{
|
||||
if (key.type != ltOFFER)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::ApplyView::dirAppend : only Offers are appended to "
|
||||
"book directories");
|
||||
// Only Offers are appended to book directories. Call dirInsert()
|
||||
// instead
|
||||
return std::nullopt;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
return dirAdd(true, directory, key.key, describe);
|
||||
}
|
||||
@@ -385,6 +387,45 @@ public:
|
||||
emptyDirDelete(Keylet const& directory);
|
||||
};
|
||||
|
||||
namespace directory {
|
||||
/** Helper functions for managing low-level directory operations.
|
||||
These are not part of the ApplyView interface.
|
||||
|
||||
Don't use them unless you really, really know what you're doing.
|
||||
Instead use dirAdd, dirInsert, etc.
|
||||
*/
|
||||
|
||||
std::uint64_t
|
||||
createRoot(
|
||||
ApplyView& view,
|
||||
Keylet const& directory,
|
||||
uint256 const& key,
|
||||
std::function<void(std::shared_ptr<SLE> const&)> const& describe);
|
||||
|
||||
auto
|
||||
findPreviousPage(ApplyView& view, Keylet const& directory, SLE::ref start);
|
||||
|
||||
std::uint64_t
|
||||
insertKey(
|
||||
ApplyView& view,
|
||||
SLE::ref node,
|
||||
std::uint64_t page,
|
||||
bool preserveOrder,
|
||||
STVector256& indexes,
|
||||
uint256 const& key);
|
||||
|
||||
std::optional<std::uint64_t>
|
||||
insertPage(
|
||||
ApplyView& view,
|
||||
std::uint64_t page,
|
||||
SLE::pointer node,
|
||||
std::uint64_t nextPage,
|
||||
SLE::ref next,
|
||||
uint256 const& key,
|
||||
Keylet const& directory,
|
||||
std::function<void(std::shared_ptr<SLE> const&)> const& describe);
|
||||
|
||||
} // namespace directory
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
#include <xrpl/ledger/ApplyView.h>
|
||||
#include <xrpl/ledger/OpenView.h>
|
||||
#include <xrpl/ledger/ReadView.h>
|
||||
#include <xrpl/protocol/Asset.h>
|
||||
#include <xrpl/protocol/Indexes.h>
|
||||
#include <xrpl/protocol/MPTIssue.h>
|
||||
#include <xrpl/protocol/Protocol.h>
|
||||
@@ -242,6 +243,80 @@ isDeepFrozen(
|
||||
Currency const& currency,
|
||||
AccountID const& issuer);
|
||||
|
||||
[[nodiscard]] inline bool
|
||||
isDeepFrozen(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Issue const& issue,
|
||||
int = 0 /*ignored*/)
|
||||
{
|
||||
return isDeepFrozen(view, account, issue.currency, issue.account);
|
||||
}
|
||||
|
||||
[[nodiscard]] inline bool
|
||||
isDeepFrozen(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
MPTIssue const& mptIssue,
|
||||
int depth = 0)
|
||||
{
|
||||
// Unlike IOUs, frozen / locked MPTs are not allowed to send or receive
|
||||
// funds, so checking "deep frozen" is the same as checking "frozen".
|
||||
return isFrozen(view, account, mptIssue, depth);
|
||||
}
|
||||
|
||||
/**
|
||||
* isFrozen check is recursive for MPT shares in a vault, descending to
|
||||
* assets in the vault, up to maxAssetCheckDepth recursion depth. This is
|
||||
* purely defensive, as we currently do not allow such vaults to be created.
|
||||
*/
|
||||
[[nodiscard]] inline bool
|
||||
isDeepFrozen(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Asset const& asset,
|
||||
int depth = 0)
|
||||
{
|
||||
return std::visit(
|
||||
[&](auto const& issue) {
|
||||
return isDeepFrozen(view, account, issue, depth);
|
||||
},
|
||||
asset.value());
|
||||
}
|
||||
|
||||
[[nodiscard]] inline TER
|
||||
checkDeepFrozen(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Issue const& issue)
|
||||
{
|
||||
return isDeepFrozen(view, account, issue) ? (TER)tecFROZEN
|
||||
: (TER)tesSUCCESS;
|
||||
}
|
||||
|
||||
[[nodiscard]] inline TER
|
||||
checkDeepFrozen(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
MPTIssue const& mptIssue)
|
||||
{
|
||||
return isDeepFrozen(view, account, mptIssue) ? (TER)tecLOCKED
|
||||
: (TER)tesSUCCESS;
|
||||
}
|
||||
|
||||
[[nodiscard]] inline TER
|
||||
checkDeepFrozen(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Asset const& asset)
|
||||
{
|
||||
return std::visit(
|
||||
[&](auto const& issue) {
|
||||
return checkDeepFrozen(view, account, issue);
|
||||
},
|
||||
asset.value());
|
||||
}
|
||||
|
||||
[[nodiscard]] bool
|
||||
isLPTokenFrozen(
|
||||
ReadView const& view,
|
||||
@@ -287,6 +362,49 @@ accountHolds(
|
||||
AuthHandling zeroIfUnauthorized,
|
||||
beast::Journal j);
|
||||
|
||||
// Returns the amount an account can spend total.
|
||||
//
|
||||
// These functions use accountHolds, but unlike accountHolds:
|
||||
// * The account can go into debt.
|
||||
// * If the account is the asset issuer the only limit is defined by the asset /
|
||||
// issuance.
|
||||
//
|
||||
// <-- saAmount: amount of currency held by account. May be negative.
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Currency const& currency,
|
||||
AccountID const& issuer,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Issue const& issue,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
MPTIssue const& mptIssue,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
AuthHandling zeroIfUnauthorized,
|
||||
beast::Journal j);
|
||||
|
||||
[[nodiscard]] STAmount
|
||||
accountSpendable(
|
||||
ReadView const& view,
|
||||
AccountID const& account,
|
||||
Asset const& asset,
|
||||
FreezeHandling zeroIfFrozen,
|
||||
AuthHandling zeroIfUnauthorized,
|
||||
beast::Journal j);
|
||||
|
||||
// Returns the amount an account can spend of the currency type saDefault, or
|
||||
// returns saDefault if this account is the issuer of the currency in
|
||||
// question. Should be used in favor of accountHolds when questioning how much
|
||||
@@ -533,7 +651,11 @@ dirNext(
|
||||
describeOwnerDir(AccountID const& account);
|
||||
|
||||
[[nodiscard]] TER
|
||||
dirLink(ApplyView& view, AccountID const& owner, std::shared_ptr<SLE>& object);
|
||||
dirLink(
|
||||
ApplyView& view,
|
||||
AccountID const& owner,
|
||||
std::shared_ptr<SLE>& object,
|
||||
SF_UINT64 const& node = sfOwnerNode);
|
||||
|
||||
AccountID
|
||||
pseudoAccountAddress(ReadView const& view, uint256 const& pseudoOwnerKey);
|
||||
@@ -552,14 +674,17 @@ createPseudoAccount(
|
||||
uint256 const& pseudoOwnerKey,
|
||||
SField const& ownerField);
|
||||
|
||||
// Returns true iff sleAcct is a pseudo-account.
|
||||
// Returns true iff sleAcct is a pseudo-account or specific
|
||||
// pseudo-accounts in pseudoFieldFilter.
|
||||
//
|
||||
// Returns false if sleAcct is
|
||||
// * NOT a pseudo-account OR
|
||||
// * NOT a ltACCOUNT_ROOT OR
|
||||
// * null pointer
|
||||
[[nodiscard]] bool
|
||||
isPseudoAccount(std::shared_ptr<SLE const> sleAcct);
|
||||
isPseudoAccount(
|
||||
std::shared_ptr<SLE const> sleAcct,
|
||||
std::set<SField const*> const& pseudoFieldFilter = {});
|
||||
|
||||
// Returns the list of fields that define an ACCOUNT_ROOT as a pseudo-account if
|
||||
// set
|
||||
@@ -573,14 +698,91 @@ isPseudoAccount(std::shared_ptr<SLE const> sleAcct);
|
||||
getPseudoAccountFields();
|
||||
|
||||
[[nodiscard]] inline bool
|
||||
isPseudoAccount(ReadView const& view, AccountID accountId)
|
||||
isPseudoAccount(
|
||||
ReadView const& view,
|
||||
AccountID const& accountId,
|
||||
std::set<SField const*> const& pseudoFieldFilter = {})
|
||||
{
|
||||
return isPseudoAccount(view.read(keylet::account(accountId)));
|
||||
return isPseudoAccount(
|
||||
view.read(keylet::account(accountId)), pseudoFieldFilter);
|
||||
}
|
||||
|
||||
[[nodiscard]] TER
|
||||
canAddHolding(ReadView const& view, Asset const& asset);
|
||||
|
||||
/** Validates that the destination SLE and tag are valid
|
||||
|
||||
- Checks that the SLE is not null.
|
||||
- If the SLE requires a destination tag, checks that there is a tag.
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
checkDestinationAndTag(SLE::const_ref toSle, bool hasDestinationTag);
|
||||
|
||||
/** Checks that can withdraw funds from an object to itself or a destination.
|
||||
*
|
||||
* The receiver may be either the submitting account (sfAccount) or a different
|
||||
* destination account (sfDestination).
|
||||
*
|
||||
* - Checks that the receiver account exists.
|
||||
* - If the receiver requires a destination tag, check that one exists, even
|
||||
* if withdrawing to self.
|
||||
* - If withdrawing to self, succeed.
|
||||
* - If not, checks if the receiver requires deposit authorization, and if
|
||||
* the sender has it.
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
canWithdraw(
|
||||
AccountID const& from,
|
||||
ReadView const& view,
|
||||
AccountID const& to,
|
||||
SLE::const_ref toSle,
|
||||
bool hasDestinationTag);
|
||||
|
||||
/** Checks that can withdraw funds from an object to itself or a destination.
|
||||
*
|
||||
* The receiver may be either the submitting account (sfAccount) or a different
|
||||
* destination account (sfDestination).
|
||||
*
|
||||
* - Checks that the receiver account exists.
|
||||
* - If the receiver requires a destination tag, check that one exists, even
|
||||
* if withdrawing to self.
|
||||
* - If withdrawing to self, succeed.
|
||||
* - If not, checks if the receiver requires deposit authorization, and if
|
||||
* the sender has it.
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
canWithdraw(
|
||||
AccountID const& from,
|
||||
ReadView const& view,
|
||||
AccountID const& to,
|
||||
bool hasDestinationTag);
|
||||
|
||||
/** Checks that can withdraw funds from an object to itself or a destination.
|
||||
*
|
||||
* The receiver may be either the submitting account (sfAccount) or a different
|
||||
* destination account (sfDestination).
|
||||
*
|
||||
* - Checks that the receiver account exists.
|
||||
* - If the receiver requires a destination tag, check that one exists, even
|
||||
* if withdrawing to self.
|
||||
* - If withdrawing to self, succeed.
|
||||
* - If not, checks if the receiver requires deposit authorization, and if
|
||||
* the sender has it.
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
canWithdraw(ReadView const& view, STTx const& tx);
|
||||
|
||||
[[nodiscard]] TER
|
||||
doWithdraw(
|
||||
ApplyView& view,
|
||||
STTx const& tx,
|
||||
AccountID const& senderAcct,
|
||||
AccountID const& dstAcct,
|
||||
AccountID const& sourceAcct,
|
||||
XRPAmount priorBalance,
|
||||
STAmount const& amount,
|
||||
beast::Journal j);
|
||||
|
||||
/// Any transactors that call addEmptyHolding() in doApply must call
|
||||
/// canAddHolding() in preflight with the same View and Asset
|
||||
[[nodiscard]] TER
|
||||
@@ -750,6 +952,22 @@ accountSend(
|
||||
beast::Journal j,
|
||||
WaiveTransferFee waiveFee = WaiveTransferFee::No);
|
||||
|
||||
using MultiplePaymentDestinations = std::vector<std::pair<AccountID, Number>>;
|
||||
/** Like accountSend, except one account is sending multiple payments (with the
|
||||
* same asset!) simultaneously
|
||||
*
|
||||
* Calls static accountSendMultiIOU if saAmount represents Issue.
|
||||
* Calls static accountSendMultiMPT if saAmount represents MPTIssue.
|
||||
*/
|
||||
[[nodiscard]] TER
|
||||
accountSendMulti(
|
||||
ApplyView& view,
|
||||
AccountID const& senderID,
|
||||
Asset const& asset,
|
||||
MultiplePaymentDestinations const& receivers,
|
||||
beast::Journal j,
|
||||
WaiveTransferFee waiveFee = WaiveTransferFee::No);
|
||||
|
||||
[[nodiscard]] TER
|
||||
issueIOU(
|
||||
ApplyView& view,
|
||||
@@ -821,7 +1039,8 @@ requireAuth(
|
||||
* purely defensive, as we currently do not allow such vaults to be created.
|
||||
*
|
||||
* If StrongAuth then return tecNO_AUTH if MPToken doesn't exist or
|
||||
* lsfMPTRequireAuth is set and MPToken is not authorized.
|
||||
* lsfMPTRequireAuth is set and MPToken is not authorized. Vault and LoanBroker
|
||||
* pseudo-accounts are implicitly authorized.
|
||||
*
|
||||
* If WeakAuth then return tecNO_AUTH if lsfMPTRequireAuth is set and MPToken
|
||||
* doesn't exist or is not authorized (explicitly or via credentials, if
|
||||
@@ -894,6 +1113,26 @@ canTransfer(
|
||||
AccountID const& from,
|
||||
AccountID const& to);
|
||||
|
||||
[[nodiscard]] TER
|
||||
canTransfer(
|
||||
ReadView const& view,
|
||||
Issue const& issue,
|
||||
AccountID const& from,
|
||||
AccountID const& to);
|
||||
|
||||
[[nodiscard]] TER inline canTransfer(
|
||||
ReadView const& view,
|
||||
Asset const& asset,
|
||||
AccountID const& from,
|
||||
AccountID const& to)
|
||||
{
|
||||
return std::visit(
|
||||
[&]<ValidIssueType TIss>(TIss const& issue) -> TER {
|
||||
return canTransfer(view, issue, from, to);
|
||||
},
|
||||
asset.value());
|
||||
}
|
||||
|
||||
/** Deleter function prototype. Returns the status of the entry deletion
|
||||
* (if should not be skipped) and if the entry should be skipped. The status
|
||||
* is always tesSUCCESS if the entry should be skipped.
|
||||
|
||||
@@ -47,7 +47,7 @@ public:
|
||||
|
||||
public:
|
||||
AutoSocket(
|
||||
boost::asio::io_context& s,
|
||||
boost::asio::io_service& s,
|
||||
boost::asio::ssl::context& c,
|
||||
bool secureOnly,
|
||||
bool plainOnly)
|
||||
@@ -58,7 +58,7 @@ public:
|
||||
mSocket = std::make_unique<ssl_socket>(s, c);
|
||||
}
|
||||
|
||||
AutoSocket(boost::asio::io_context& s, boost::asio::ssl::context& c)
|
||||
AutoSocket(boost::asio::io_service& s, boost::asio::ssl::context& c)
|
||||
: AutoSocket(s, c, false, false)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
#include <xrpl/basics/ByteUtilities.h>
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/streambuf.hpp>
|
||||
|
||||
#include <chrono>
|
||||
@@ -51,7 +51,7 @@ public:
|
||||
|
||||
static void
|
||||
get(bool bSSL,
|
||||
boost::asio::io_context& io_context,
|
||||
boost::asio::io_service& io_service,
|
||||
std::deque<std::string> deqSites,
|
||||
unsigned short const port,
|
||||
std::string const& strPath,
|
||||
@@ -65,7 +65,7 @@ public:
|
||||
|
||||
static void
|
||||
get(bool bSSL,
|
||||
boost::asio::io_context& io_context,
|
||||
boost::asio::io_service& io_service,
|
||||
std::string strSite,
|
||||
unsigned short const port,
|
||||
std::string const& strPath,
|
||||
@@ -80,7 +80,7 @@ public:
|
||||
static void
|
||||
request(
|
||||
bool bSSL,
|
||||
boost::asio::io_context& io_context,
|
||||
boost::asio::io_service& io_service,
|
||||
std::string strSite,
|
||||
unsigned short const port,
|
||||
std::function<
|
||||
|
||||
@@ -153,7 +153,7 @@ public:
|
||||
{
|
||||
strm.set_verify_callback(
|
||||
std::bind(
|
||||
&rfc6125_verify,
|
||||
&rfc2818_verify,
|
||||
host,
|
||||
std::placeholders::_1,
|
||||
std::placeholders::_2,
|
||||
@@ -167,7 +167,7 @@ public:
|
||||
|
||||
/**
|
||||
* @brief callback invoked for name verification - just passes through
|
||||
* to the asio `host_name_verification` (rfc6125) implementation.
|
||||
* to the asio rfc2818 implementation.
|
||||
*
|
||||
* @param domain hostname expected
|
||||
* @param preverified passed by implementation
|
||||
@@ -175,13 +175,13 @@ public:
|
||||
* @param j journal for logging
|
||||
*/
|
||||
static bool
|
||||
rfc6125_verify(
|
||||
rfc2818_verify(
|
||||
std::string const& domain,
|
||||
bool preverified,
|
||||
boost::asio::ssl::verify_context& ctx,
|
||||
beast::Journal j)
|
||||
{
|
||||
if (boost::asio::ssl::host_name_verification(domain)(preverified, ctx))
|
||||
if (boost::asio::ssl::rfc2818_verification(domain)(preverified, ctx))
|
||||
return true;
|
||||
|
||||
JLOG(j.warn()) << "Outbound SSL connection to " << domain
|
||||
|
||||
@@ -100,7 +100,27 @@ public:
|
||||
bool
|
||||
native() const
|
||||
{
|
||||
return holds<Issue>() && get<Issue>().native();
|
||||
return std::visit(
|
||||
[&]<ValidIssueType TIss>(TIss const& issue) {
|
||||
if constexpr (std::is_same_v<TIss, Issue>)
|
||||
return issue.native();
|
||||
if constexpr (std::is_same_v<TIss, MPTIssue>)
|
||||
return false;
|
||||
},
|
||||
issue_);
|
||||
}
|
||||
|
||||
bool
|
||||
integral() const
|
||||
{
|
||||
return std::visit(
|
||||
[&]<ValidIssueType TIss>(TIss const& issue) {
|
||||
if constexpr (std::is_same_v<TIss, Issue>)
|
||||
return issue.native();
|
||||
if constexpr (std::is_same_v<TIss, MPTIssue>)
|
||||
return true;
|
||||
},
|
||||
issue_);
|
||||
}
|
||||
|
||||
friend constexpr bool
|
||||
|
||||
@@ -346,6 +346,24 @@ vault(uint256 const& vaultKey)
|
||||
return {ltVAULT, vaultKey};
|
||||
}
|
||||
|
||||
Keylet
|
||||
loanbroker(AccountID const& owner, std::uint32_t seq) noexcept;
|
||||
|
||||
inline Keylet
|
||||
loanbroker(uint256 const& key)
|
||||
{
|
||||
return {ltLOAN_BROKER, key};
|
||||
}
|
||||
|
||||
Keylet
|
||||
loan(uint256 const& loanBrokerID, std::uint32_t loanSeq) noexcept;
|
||||
|
||||
inline Keylet
|
||||
loan(uint256 const& key)
|
||||
{
|
||||
return {ltLOAN, key};
|
||||
}
|
||||
|
||||
Keylet
|
||||
permissionedDomain(AccountID const& account, std::uint32_t seq) noexcept;
|
||||
|
||||
|
||||
@@ -188,14 +188,14 @@ enum LedgerSpecificFlags {
|
||||
lsfMPTCanTransfer = 0x00000020,
|
||||
lsfMPTCanClawback = 0x00000040,
|
||||
|
||||
lmfMPTCanMutateCanLock = 0x00000002,
|
||||
lmfMPTCanMutateRequireAuth = 0x00000004,
|
||||
lmfMPTCanMutateCanEscrow = 0x00000008,
|
||||
lmfMPTCanMutateCanTrade = 0x00000010,
|
||||
lmfMPTCanMutateCanTransfer = 0x00000020,
|
||||
lmfMPTCanMutateCanClawback = 0x00000040,
|
||||
lmfMPTCanMutateMetadata = 0x00010000,
|
||||
lmfMPTCanMutateTransferFee = 0x00020000,
|
||||
lsmfMPTCanMutateCanLock = 0x00000002,
|
||||
lsmfMPTCanMutateRequireAuth = 0x00000004,
|
||||
lsmfMPTCanMutateCanEscrow = 0x00000008,
|
||||
lsmfMPTCanMutateCanTrade = 0x00000010,
|
||||
lsmfMPTCanMutateCanTransfer = 0x00000020,
|
||||
lsmfMPTCanMutateCanClawback = 0x00000040,
|
||||
lsmfMPTCanMutateMetadata = 0x00010000,
|
||||
lsmfMPTCanMutateTransferFee = 0x00020000,
|
||||
|
||||
// ltMPTOKEN
|
||||
lsfMPTAuthorized = 0x00000002,
|
||||
@@ -205,6 +205,11 @@ enum LedgerSpecificFlags {
|
||||
|
||||
// ltVAULT
|
||||
lsfVaultPrivate = 0x00010000,
|
||||
|
||||
// ltLOAN
|
||||
lsfLoanDefault = 0x00010000,
|
||||
lsfLoanImpaired = 0x00020000,
|
||||
lsfLoanOverpayment = 0x00040000, // True, loan allows overpayments
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -86,6 +86,9 @@ public:
|
||||
std::optional<TxType>
|
||||
getGranularTxType(GranularPermissionType const& gpType) const;
|
||||
|
||||
std::optional<std::reference_wrapper<uint256 const>> const
|
||||
getTxFeature(TxType txType) const;
|
||||
|
||||
bool
|
||||
isDelegatable(std::uint32_t const& permissionValue, Rules const& rules)
|
||||
const;
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
|
||||
#include <xrpl/basics/ByteUtilities.h>
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
#include <xrpl/protocol/Units.h>
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
@@ -55,7 +56,10 @@ std::size_t constexpr oversizeMetaDataCap = 5200;
|
||||
/** The maximum number of entries per directory page */
|
||||
std::size_t constexpr dirNodeMaxEntries = 32;
|
||||
|
||||
/** The maximum number of pages allowed in a directory */
|
||||
/** The maximum number of pages allowed in a directory
|
||||
|
||||
Made obsolete by fixDirectoryLimit amendment.
|
||||
*/
|
||||
std::uint64_t constexpr dirNodeMaxPages = 262144;
|
||||
|
||||
/** The maximum number of items in an NFT page */
|
||||
@@ -81,6 +85,140 @@ std::size_t constexpr maxDeletableTokenOfferEntries = 500;
|
||||
*/
|
||||
std::uint16_t constexpr maxTransferFee = 50000;
|
||||
|
||||
/** There are 10,000 basis points (bips) in 100%.
|
||||
*
|
||||
* Basis points represent 0.01%.
|
||||
*
|
||||
* Given a value X, to find the amount for B bps,
|
||||
* use X * B / bipsPerUnity
|
||||
*
|
||||
* Example: If a loan broker has 999 XRP of debt, and must maintain 1,000 bps of
|
||||
* that debt as cover (10%), then the minimum cover amount is 999,000,000 drops
|
||||
* * 1000 / bipsPerUnity = 99,900,00 drops or 99.9 XRP.
|
||||
*
|
||||
* Given a percentage P, to find the number of bps that percentage represents,
|
||||
* use P * bipsPerUnity.
|
||||
*
|
||||
* Example: 50% is 0.50 * bipsPerUnity = 5,000 bps.
|
||||
*/
|
||||
Bips32 constexpr bipsPerUnity(100 * 100);
|
||||
static_assert(bipsPerUnity == Bips32{10'000});
|
||||
TenthBips32 constexpr tenthBipsPerUnity(bipsPerUnity.value() * 10);
|
||||
static_assert(tenthBipsPerUnity == TenthBips32(100'000));
|
||||
|
||||
constexpr Bips32
|
||||
percentageToBips(std::uint32_t percentage)
|
||||
{
|
||||
return Bips32(percentage * bipsPerUnity.value() / 100);
|
||||
}
|
||||
constexpr TenthBips32
|
||||
percentageToTenthBips(std::uint32_t percentage)
|
||||
{
|
||||
return TenthBips32(percentage * tenthBipsPerUnity.value() / 100);
|
||||
}
|
||||
template <typename T, class TBips>
|
||||
constexpr T
|
||||
bipsOfValue(T value, Bips<TBips> bips)
|
||||
{
|
||||
return value * bips.value() / bipsPerUnity.value();
|
||||
}
|
||||
template <typename T, class TBips>
|
||||
constexpr T
|
||||
tenthBipsOfValue(T value, TenthBips<TBips> bips)
|
||||
{
|
||||
return value * bips.value() / tenthBipsPerUnity.value();
|
||||
}
|
||||
|
||||
namespace Lending {
|
||||
/** The maximum management fee rate allowed by a loan broker in 1/10 bips.
|
||||
|
||||
Valid values are between 0 and 10% inclusive.
|
||||
*/
|
||||
TenthBips16 constexpr maxManagementFeeRate(
|
||||
unsafe_cast<std::uint16_t>(percentageToTenthBips(10).value()));
|
||||
static_assert(maxManagementFeeRate == TenthBips16(std::uint16_t(10'000u)));
|
||||
|
||||
/** The maximum coverage rate required of a loan broker in 1/10 bips.
|
||||
|
||||
Valid values are between 0 and 100% inclusive.
|
||||
*/
|
||||
TenthBips32 constexpr maxCoverRate = percentageToTenthBips(100);
|
||||
static_assert(maxCoverRate == TenthBips32(100'000u));
|
||||
|
||||
/** The maximum overpayment fee on a loan in 1/10 bips.
|
||||
*
|
||||
Valid values are between 0 and 100% inclusive.
|
||||
*/
|
||||
TenthBips32 constexpr maxOverpaymentFee = percentageToTenthBips(100);
|
||||
static_assert(maxOverpaymentFee == TenthBips32(100'000u));
|
||||
|
||||
/** Annualized interest rate of the Loan in 1/10 bips.
|
||||
*
|
||||
* Valid values are between 0 and 100% inclusive.
|
||||
*/
|
||||
TenthBips32 constexpr maxInterestRate = percentageToTenthBips(100);
|
||||
static_assert(maxInterestRate == TenthBips32(100'000u));
|
||||
|
||||
/** The maximum premium added to the interest rate for late payments on a loan
|
||||
* in 1/10 bips.
|
||||
*
|
||||
* Valid values are between 0 and 100% inclusive.
|
||||
*/
|
||||
TenthBips32 constexpr maxLateInterestRate = percentageToTenthBips(100);
|
||||
static_assert(maxLateInterestRate == TenthBips32(100'000u));
|
||||
|
||||
/** The maximum close interest rate charged for repaying a loan early in 1/10
|
||||
* bips.
|
||||
*
|
||||
* Valid values are between 0 and 100% inclusive.
|
||||
*/
|
||||
TenthBips32 constexpr maxCloseInterestRate = percentageToTenthBips(100);
|
||||
static_assert(maxCloseInterestRate == TenthBips32(100'000u));
|
||||
|
||||
/** The maximum overpayment interest rate charged on loan overpayments in 1/10
|
||||
* bips.
|
||||
*
|
||||
* Valid values are between 0 and 100% inclusive.
|
||||
*/
|
||||
TenthBips32 constexpr maxOverpaymentInterestRate = percentageToTenthBips(100);
|
||||
static_assert(maxOverpaymentInterestRate == TenthBips32(100'000u));
|
||||
|
||||
/** LoanPay transaction cost will be one base fee per X combined payments
|
||||
*
|
||||
* The number of payments is estimated based on the Amount paid and the Loan's
|
||||
* Fixed Payment size. Overpayments (indicated with the tfLoanOverpayment flag)
|
||||
* count as one more payment.
|
||||
*
|
||||
* This number was chosen arbitrarily, but should not be changed once released
|
||||
* without an amendment
|
||||
*/
|
||||
static constexpr int loanPaymentsPerFeeIncrement = 5;
|
||||
|
||||
/** Maximum number of combined payments that a LoanPay transaction will process
|
||||
*
|
||||
* This limit is enforced during the loan payment process, and thus is not
|
||||
* estimated. If the limit is hit, no further payments or overpayments will be
|
||||
* processed, no matter how much of the transation Amount is left, but the
|
||||
* transaction will succeed with the payments that have been processed up to
|
||||
* that point.
|
||||
*
|
||||
* This limit is independent of loanPaymentsPerFeeIncrement, so a transaction
|
||||
* could potentially be charged for many more payments than actually get
|
||||
* processed. Users should take care not to submit a transaction paying more
|
||||
* than loanMaximumPaymentsPerTransaction * Loan.PeriodicPayment. Because
|
||||
* overpayments are charged as a payment, if submitting
|
||||
* loanMaximumPaymentsPerTransaction * Loan.PeriodicPayment, users should not
|
||||
* set the tfLoanOverpayment flag.
|
||||
*
|
||||
* Even though they're independent, loanMaximumPaymentsPerTransaction should be
|
||||
* a multiple of loanPaymentsPerFeeIncrement.
|
||||
*
|
||||
* This number was chosen arbitrarily, but should not be changed once released
|
||||
* without an amendment
|
||||
*/
|
||||
static constexpr int loanMaximumPaymentsPerTransaction = 100;
|
||||
} // namespace Lending
|
||||
|
||||
/** The maximum length of a URI inside an NFT */
|
||||
std::size_t constexpr maxTokenURILength = 256;
|
||||
|
||||
|
||||
@@ -72,8 +72,10 @@ class STCurrency;
|
||||
STYPE(STI_VL, 7) \
|
||||
STYPE(STI_ACCOUNT, 8) \
|
||||
STYPE(STI_NUMBER, 9) \
|
||||
STYPE(STI_INT32, 10) \
|
||||
STYPE(STI_INT64, 11) \
|
||||
\
|
||||
/* 10-13 are reserved */ \
|
||||
/* 12-13 are reserved */ \
|
||||
STYPE(STI_OBJECT, 14) \
|
||||
STYPE(STI_ARRAY, 15) \
|
||||
\
|
||||
@@ -137,8 +139,8 @@ field_code(int id, int index)
|
||||
SFields are created at compile time.
|
||||
|
||||
Each SField, once constructed, lives until program termination, and there
|
||||
is only one instance per fieldType/fieldValue pair which serves the entire
|
||||
application.
|
||||
is only one instance per fieldType/fieldValue pair which serves the
|
||||
entire application.
|
||||
*/
|
||||
class SField
|
||||
{
|
||||
@@ -356,6 +358,9 @@ using SF_UINT256 = TypedField<STBitString<256>>;
|
||||
using SF_UINT384 = TypedField<STBitString<384>>;
|
||||
using SF_UINT512 = TypedField<STBitString<512>>;
|
||||
|
||||
using SF_INT32 = TypedField<STInteger<std::int32_t>>;
|
||||
using SF_INT64 = TypedField<STInteger<std::int64_t>>;
|
||||
|
||||
using SF_ACCOUNT = TypedField<STAccount>;
|
||||
using SF_AMOUNT = TypedField<STAmount>;
|
||||
using SF_ISSUE = TypedField<STIssue>;
|
||||
|
||||
@@ -66,16 +66,18 @@ public:
|
||||
static int const cMaxOffset = 80;
|
||||
|
||||
// Maximum native value supported by the code
|
||||
static std::uint64_t const cMinValue = 1000000000000000ull;
|
||||
static std::uint64_t const cMaxValue = 9999999999999999ull;
|
||||
static std::uint64_t const cMaxNative = 9000000000000000000ull;
|
||||
constexpr static std::uint64_t cMinValue = 1'000'000'000'000'000ull;
|
||||
static_assert(isPowerOfTen(cMinValue));
|
||||
constexpr static std::uint64_t cMaxValue = cMinValue * 10 - 1;
|
||||
static_assert(cMaxValue == 9'999'999'999'999'999ull);
|
||||
constexpr static std::uint64_t cMaxNative = 9'000'000'000'000'000'000ull;
|
||||
|
||||
// Max native value on network.
|
||||
static std::uint64_t const cMaxNativeN = 100000000000000000ull;
|
||||
static std::uint64_t const cIssuedCurrency = 0x8000000000000000ull;
|
||||
static std::uint64_t const cPositive = 0x4000000000000000ull;
|
||||
static std::uint64_t const cMPToken = 0x2000000000000000ull;
|
||||
static std::uint64_t const cValueMask = ~(cPositive | cMPToken);
|
||||
constexpr static std::uint64_t cMaxNativeN = 100'000'000'000'000'000ull;
|
||||
constexpr static std::uint64_t cIssuedCurrency = 0x8'000'000'000'000'000ull;
|
||||
constexpr static std::uint64_t cPositive = 0x4'000'000'000'000'000ull;
|
||||
constexpr static std::uint64_t cMPToken = 0x2'000'000'000'000'000ull;
|
||||
constexpr static std::uint64_t cValueMask = ~(cPositive | cMPToken);
|
||||
|
||||
static std::uint64_t const uRateOne;
|
||||
|
||||
@@ -174,6 +176,9 @@ public:
|
||||
int
|
||||
exponent() const noexcept;
|
||||
|
||||
bool
|
||||
integral() const noexcept;
|
||||
|
||||
bool
|
||||
native() const noexcept;
|
||||
|
||||
@@ -454,6 +459,12 @@ STAmount::exponent() const noexcept
|
||||
return mOffset;
|
||||
}
|
||||
|
||||
inline bool
|
||||
STAmount::integral() const noexcept
|
||||
{
|
||||
return mAsset.integral();
|
||||
}
|
||||
|
||||
inline bool
|
||||
STAmount::native() const noexcept
|
||||
{
|
||||
@@ -572,7 +583,7 @@ STAmount::clear()
|
||||
{
|
||||
// The -100 is used to allow 0 to sort less than a small positive values
|
||||
// which have a negative exponent.
|
||||
mOffset = native() ? 0 : -100;
|
||||
mOffset = integral() ? 0 : -100;
|
||||
mValue = 0;
|
||||
mIsNegative = false;
|
||||
}
|
||||
@@ -695,6 +706,53 @@ divRoundStrict(
|
||||
std::uint64_t
|
||||
getRate(STAmount const& offerOut, STAmount const& offerIn);
|
||||
|
||||
/** Round an arbitrary precision Amount to the precision of an STAmount that has
|
||||
* a given exponent.
|
||||
*
|
||||
* This is used to ensure that calculations involving IOU amounts do not collect
|
||||
* dust beyond the precision of the reference value.
|
||||
*
|
||||
* @param value The value to be rounded
|
||||
* @param scale An exponent value to establish the precision limit of
|
||||
* `value`. Should be larger than `value.exponent()`.
|
||||
* @param rounding Optional Number rounding mode
|
||||
*
|
||||
*/
|
||||
STAmount
|
||||
roundToScale(
|
||||
STAmount const& value,
|
||||
std::int32_t scale,
|
||||
Number::rounding_mode rounding = Number::getround());
|
||||
|
||||
/** Round an arbitrary precision Number to the precision of a given Asset.
|
||||
*
|
||||
* This is used to ensure that calculations do not collect dust beyond the
|
||||
* precision of the reference value for IOUs, or fractional amounts for the
|
||||
* integral types XRP and MPT.
|
||||
*
|
||||
* @param asset The relevant asset
|
||||
* @param value The value to be rounded
|
||||
* @param scale Only relevant to IOU assets. An exponent value to establish the
|
||||
* precision limit of `value`. Should be larger than `value.exponent()`.
|
||||
* @param rounding Optional Number rounding mode
|
||||
*/
|
||||
template <AssetType A>
|
||||
Number
|
||||
roundToAsset(
|
||||
A const& asset,
|
||||
Number const& value,
|
||||
std::int32_t scale,
|
||||
Number::rounding_mode rounding = Number::getround())
|
||||
{
|
||||
NumberRoundModeGuard mg(rounding);
|
||||
STAmount const ret{asset, value};
|
||||
if (ret.integral())
|
||||
return ret;
|
||||
// Note that the ctor will round integral types (XRP, MPT) via canonicalize,
|
||||
// so no extra work is needed for those.
|
||||
return roundToScale(ret, scale);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
inline bool
|
||||
|
||||
@@ -81,6 +81,8 @@ using STUInt16 = STInteger<std::uint16_t>;
|
||||
using STUInt32 = STInteger<std::uint32_t>;
|
||||
using STUInt64 = STInteger<std::uint64_t>;
|
||||
|
||||
using STInt32 = STInteger<std::int32_t>;
|
||||
|
||||
template <typename Integer>
|
||||
inline STInteger<Integer>::STInteger(Integer v) : value_(v)
|
||||
{
|
||||
|
||||
@@ -231,6 +231,8 @@ public:
|
||||
getFieldH192(SField const& field) const;
|
||||
uint256
|
||||
getFieldH256(SField const& field) const;
|
||||
std::int32_t
|
||||
getFieldI32(SField const& field) const;
|
||||
AccountID
|
||||
getAccountID(SField const& field) const;
|
||||
|
||||
@@ -242,6 +244,9 @@ public:
|
||||
getFieldPathSet(SField const& field) const;
|
||||
STVector256 const&
|
||||
getFieldV256(SField const& field) const;
|
||||
// If not found, returns an object constructed with the given field
|
||||
STObject
|
||||
getFieldObject(SField const& field) const;
|
||||
STArray const&
|
||||
getFieldArray(SField const& field) const;
|
||||
STCurrency const&
|
||||
@@ -365,6 +370,8 @@ public:
|
||||
void
|
||||
setFieldH256(SField const& field, uint256 const&);
|
||||
void
|
||||
setFieldI32(SField const& field, std::int32_t);
|
||||
void
|
||||
setFieldVL(SField const& field, Blob const&);
|
||||
void
|
||||
setFieldVL(SField const& field, Slice const&);
|
||||
@@ -386,6 +393,8 @@ public:
|
||||
setFieldV256(SField const& field, STVector256 const& v);
|
||||
void
|
||||
setFieldArray(SField const& field, STArray const& v);
|
||||
void
|
||||
setFieldObject(SField const& field, STObject const& v);
|
||||
|
||||
template <class Tag>
|
||||
void
|
||||
@@ -492,6 +501,8 @@ public:
|
||||
value_type
|
||||
operator*() const;
|
||||
|
||||
/// Do not use operator->() unless the field is required, or you've checked
|
||||
/// that it's set.
|
||||
T const*
|
||||
operator->() const;
|
||||
|
||||
@@ -515,7 +526,26 @@ protected:
|
||||
// Constraint += and -= ValueProxy operators
|
||||
// to value types that support arithmetic operations
|
||||
template <typename U>
|
||||
concept IsArithmetic = std::is_arithmetic_v<U> || std::is_same_v<U, STAmount>;
|
||||
concept IsArithmeticNumber = std::is_arithmetic_v<U> ||
|
||||
std::is_same_v<U, Number> || std::is_same_v<U, STAmount>;
|
||||
template <
|
||||
typename U,
|
||||
typename Value = typename U::value_type,
|
||||
typename Unit = typename U::unit_type>
|
||||
concept IsArithmeticValueUnit =
|
||||
std::is_same_v<U, unit::ValueUnit<Unit, Value>> &&
|
||||
IsArithmeticNumber<Value> && std::is_class_v<Unit>;
|
||||
template <typename U, typename Value = typename U::value_type>
|
||||
concept IsArithmeticST = !IsArithmeticValueUnit<U> && IsArithmeticNumber<Value>;
|
||||
template <typename U>
|
||||
concept IsArithmetic =
|
||||
IsArithmeticNumber<U> || IsArithmeticST<U> || IsArithmeticValueUnit<U>;
|
||||
|
||||
template <class T, class U>
|
||||
concept Addable = requires(T t, U u) { t = t + u; };
|
||||
template <typename T, typename U>
|
||||
concept IsArithmeticCompatible =
|
||||
IsArithmetic<typename T::value_type> && Addable<typename T::value_type, U>;
|
||||
|
||||
template <class T>
|
||||
class STObject::ValueProxy : public Proxy<T>
|
||||
@@ -535,10 +565,12 @@ public:
|
||||
// Convenience operators for value types supporting
|
||||
// arithmetic operations
|
||||
template <IsArithmetic U>
|
||||
requires IsArithmeticCompatible<T, U>
|
||||
ValueProxy&
|
||||
operator+=(U const& u);
|
||||
|
||||
template <IsArithmetic U>
|
||||
requires IsArithmeticCompatible<T, U>
|
||||
ValueProxy&
|
||||
operator-=(U const& u);
|
||||
|
||||
@@ -728,6 +760,8 @@ STObject::Proxy<T>::operator*() const -> value_type
|
||||
return this->value();
|
||||
}
|
||||
|
||||
/// Do not use operator->() unless the field is required, or you've checked that
|
||||
/// it's set.
|
||||
template <class T>
|
||||
T const*
|
||||
STObject::Proxy<T>::operator->() const
|
||||
@@ -774,6 +808,7 @@ STObject::ValueProxy<T>::operator=(U&& u)
|
||||
|
||||
template <typename T>
|
||||
template <IsArithmetic U>
|
||||
requires IsArithmeticCompatible<T, U>
|
||||
STObject::ValueProxy<T>&
|
||||
STObject::ValueProxy<T>::operator+=(U const& u)
|
||||
{
|
||||
@@ -783,6 +818,7 @@ STObject::ValueProxy<T>::operator+=(U const& u)
|
||||
|
||||
template <class T>
|
||||
template <IsArithmetic U>
|
||||
requires IsArithmeticCompatible<T, U>
|
||||
STObject::ValueProxy<T>&
|
||||
STObject::ValueProxy<T>::operator-=(U const& u)
|
||||
{
|
||||
|
||||
@@ -87,8 +87,14 @@ public:
|
||||
getFullText() const override;
|
||||
|
||||
// Outer transaction functions / signature functions.
|
||||
static Blob
|
||||
getSignature(STObject const& sigObject);
|
||||
|
||||
Blob
|
||||
getSignature() const;
|
||||
getSignature() const
|
||||
{
|
||||
return getSignature(*this);
|
||||
}
|
||||
|
||||
uint256
|
||||
getSigningHash() const;
|
||||
@@ -119,13 +125,20 @@ public:
|
||||
getJson(JsonOptions options, bool binary) const;
|
||||
|
||||
void
|
||||
sign(PublicKey const& publicKey, SecretKey const& secretKey);
|
||||
sign(
|
||||
PublicKey const& publicKey,
|
||||
SecretKey const& secretKey,
|
||||
std::optional<std::reference_wrapper<SField const>> signatureTarget =
|
||||
{});
|
||||
|
||||
/** Check the signature.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
enum class RequireFullyCanonicalSig : bool { no, yes };
|
||||
|
||||
/** Check the signature.
|
||||
@param requireCanonicalSig If `true`, check that the signature is fully
|
||||
canonical. If `false`, only check that the signature is valid.
|
||||
@param rules The current ledger rules.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
Expected<void, std::string>
|
||||
checkSign(RequireFullyCanonicalSig requireCanonicalSig, Rules const& rules)
|
||||
const;
|
||||
@@ -150,17 +163,34 @@ public:
|
||||
char status,
|
||||
std::string const& escapedMetaData) const;
|
||||
|
||||
std::vector<uint256>
|
||||
std::vector<uint256> const&
|
||||
getBatchTransactionIDs() const;
|
||||
|
||||
private:
|
||||
/** Check the signature.
|
||||
@param requireCanonicalSig If `true`, check that the signature is fully
|
||||
canonical. If `false`, only check that the signature is valid.
|
||||
@param rules The current ledger rules.
|
||||
@param sigObject Reference to object that contains the signature fields.
|
||||
Will be *this more often than not.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
Expected<void, std::string>
|
||||
checkSingleSign(RequireFullyCanonicalSig requireCanonicalSig) const;
|
||||
checkSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkSingleSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
STObject const& sigObject) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkMultiSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules) const;
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkBatchSingleSign(
|
||||
@@ -179,7 +209,7 @@ private:
|
||||
move(std::size_t n, void* buf) override;
|
||||
|
||||
friend class detail::STVar;
|
||||
mutable std::vector<uint256> batch_txn_ids_;
|
||||
mutable std::vector<uint256> batchTxnIds_;
|
||||
};
|
||||
|
||||
bool
|
||||
|
||||
@@ -673,7 +673,8 @@ isTerRetry(TER x) noexcept
|
||||
inline bool
|
||||
isTesSuccess(TER x) noexcept
|
||||
{
|
||||
return (x == tesSUCCESS);
|
||||
// Makes use of TERSubset::operator bool()
|
||||
return !(x);
|
||||
}
|
||||
|
||||
inline bool
|
||||
|
||||
@@ -156,14 +156,14 @@ constexpr std::uint32_t const tfMPTokenIssuanceCreateMask =
|
||||
|
||||
// MPTokenIssuanceCreate MutableFlags:
|
||||
// Indicating specific fields or flags may be changed after issuance.
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanLock = lmfMPTCanMutateCanLock;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateRequireAuth = lmfMPTCanMutateRequireAuth;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanEscrow = lmfMPTCanMutateCanEscrow;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTrade = lmfMPTCanMutateCanTrade;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTransfer = lmfMPTCanMutateCanTransfer;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanClawback = lmfMPTCanMutateCanClawback;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateMetadata = lmfMPTCanMutateMetadata;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateTransferFee = lmfMPTCanMutateTransferFee;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanLock = lsmfMPTCanMutateCanLock;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateRequireAuth = lsmfMPTCanMutateRequireAuth;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanEscrow = lsmfMPTCanMutateCanEscrow;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTrade = lsmfMPTCanMutateCanTrade;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTransfer = lsmfMPTCanMutateCanTransfer;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanClawback = lsmfMPTCanMutateCanClawback;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateMetadata = lsmfMPTCanMutateMetadata;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateTransferFee = lsmfMPTCanMutateTransferFee;
|
||||
constexpr std::uint32_t const tmfMPTokenIssuanceCreateMutableMask =
|
||||
~(tmfMPTCanMutateCanLock | tmfMPTCanMutateRequireAuth | tmfMPTCanMutateCanEscrow | tmfMPTCanMutateCanTrade
|
||||
| tmfMPTCanMutateCanTransfer | tmfMPTCanMutateCanClawback | tmfMPTCanMutateMetadata | tmfMPTCanMutateTransferFee);
|
||||
@@ -285,6 +285,32 @@ constexpr std::uint32_t tfIndependent = 0x00080000;
|
||||
constexpr std::uint32_t const tfBatchMask =
|
||||
~(tfUniversal | tfAllOrNothing | tfOnlyOne | tfUntilFailure | tfIndependent) | tfInnerBatchTxn;
|
||||
|
||||
// LoanSet and LoanPay flags:
|
||||
// LoanSet: True, indicates the loan supports overpayments
|
||||
// LoanPay: True, indicates any excess in this payment can be used
|
||||
// as an overpayment. False, no overpayments will be taken.
|
||||
constexpr std::uint32_t const tfLoanOverpayment = 0x00010000;
|
||||
// LoanPay exclusive flags:
|
||||
// tfLoanFullPayment: True, indicates that the payment is an early
|
||||
// full payment. It must pay the entire loan including close
|
||||
// interest and fees, or it will fail. False: Not a full payment.
|
||||
constexpr std::uint32_t const tfLoanFullPayment = 0x00020000;
|
||||
// tfLoanLatePayment: True, indicates that the payment is late,
|
||||
// and includes late iterest and fees. If the loan is not late,
|
||||
// it will fail. False: not a late payment. If the current payment
|
||||
// is overdue, the transaction will fail.
|
||||
constexpr std::uint32_t const tfLoanLatePayment = 0x00040000;
|
||||
constexpr std::uint32_t const tfLoanSetMask = ~(tfUniversal |
|
||||
tfLoanOverpayment);
|
||||
constexpr std::uint32_t const tfLoanPayMask = ~(tfUniversal |
|
||||
tfLoanOverpayment | tfLoanFullPayment | tfLoanLatePayment);
|
||||
|
||||
// LoanManage flags:
|
||||
constexpr std::uint32_t const tfLoanDefault = 0x00010000;
|
||||
constexpr std::uint32_t const tfLoanImpair = 0x00020000;
|
||||
constexpr std::uint32_t const tfLoanUnimpair = 0x00040000;
|
||||
constexpr std::uint32_t const tfLoanManageMask = ~(tfUniversal | tfLoanDefault | tfLoanImpair | tfLoanUnimpair);
|
||||
|
||||
// clang-format on
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -129,10 +129,12 @@ inplace_bigint_div_rem(std::span<uint64_t> numerator, std::uint64_t divisor)
|
||||
{
|
||||
// should never happen, but if it does then it seems natural to define
|
||||
// the a null set of numbers to be zero, so the remainder is also zero.
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::b58_fast::detail::inplace_bigint_div_rem : empty "
|
||||
"numerator");
|
||||
return 0;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
auto to_u128 = [](std::uint64_t high,
|
||||
|
||||
@@ -27,17 +27,19 @@
|
||||
#error "undefined macro: XRPL_RETIRE"
|
||||
#endif
|
||||
|
||||
// clang-format off
|
||||
|
||||
// Add new amendments to the top of this list.
|
||||
// Keep it sorted in reverse chronological order.
|
||||
// If you add an amendment here, then do not forget to increment `numFeatures`
|
||||
// in include/xrpl/protocol/Feature.h.
|
||||
|
||||
XRPL_FIX (IncludeKeyletFields, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(LendingProtocol, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (DirectoryLimit, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (IncludeKeyletFields, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DynamicMPT, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (TokenEscrowV1, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (DelegateV1_1, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PriceOracleOrder, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (AMMClawbackRounding, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (EnforceNFTokenTrustlineV2, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -45,7 +47,7 @@ XRPL_FIX (AMMv1_3, Supported::yes, VoteBehavior::DefaultNo
|
||||
XRPL_FEATURE(PermissionedDEX, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Batch, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(SingleAssetVault, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionDelegation, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionDelegation, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PayChanCancelAfter, Supported::yes, VoteBehavior::DefaultNo)
|
||||
// Check flags in Credential transactions
|
||||
XRPL_FIX (InvalidTxFlags, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -156,3 +158,5 @@ XRPL_RETIRE(fix1512)
|
||||
XRPL_RETIRE(fix1523)
|
||||
XRPL_RETIRE(fix1528)
|
||||
XRPL_RETIRE(FlowCross)
|
||||
|
||||
// clang-format on
|
||||
|
||||
@@ -168,6 +168,7 @@ LEDGER_ENTRY(ltACCOUNT_ROOT, 0x0061, AccountRoot, account, ({
|
||||
{sfFirstNFTokenSequence, soeOPTIONAL},
|
||||
{sfAMMID, soeOPTIONAL}, // pseudo-account designator
|
||||
{sfVaultID, soeOPTIONAL}, // pseudo-account designator
|
||||
{sfLoanBrokerID, soeOPTIONAL}, // pseudo-account designator
|
||||
}))
|
||||
|
||||
/** A ledger object which contains a list of object identifiers.
|
||||
@@ -457,7 +458,7 @@ LEDGER_ENTRY(ltCREDENTIAL, 0x0081, Credential, credential, ({
|
||||
{sfExpiration, soeOPTIONAL},
|
||||
{sfURI, soeOPTIONAL},
|
||||
{sfIssuerNode, soeREQUIRED},
|
||||
{sfSubjectNode, soeREQUIRED},
|
||||
{sfSubjectNode, soeOPTIONAL},
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
}))
|
||||
@@ -498,10 +499,10 @@ LEDGER_ENTRY(ltVAULT, 0x0084, Vault, vault, ({
|
||||
{sfAccount, soeREQUIRED},
|
||||
{sfData, soeOPTIONAL},
|
||||
{sfAsset, soeREQUIRED},
|
||||
{sfAssetsTotal, soeREQUIRED},
|
||||
{sfAssetsAvailable, soeREQUIRED},
|
||||
{sfAssetsTotal, soeDEFAULT},
|
||||
{sfAssetsAvailable, soeDEFAULT},
|
||||
{sfAssetsMaximum, soeDEFAULT},
|
||||
{sfLossUnrealized, soeREQUIRED},
|
||||
{sfLossUnrealized, soeDEFAULT},
|
||||
{sfShareMPTID, soeREQUIRED},
|
||||
{sfWithdrawalPolicy, soeREQUIRED},
|
||||
{sfScale, soeDEFAULT},
|
||||
@@ -509,5 +510,117 @@ LEDGER_ENTRY(ltVAULT, 0x0084, Vault, vault, ({
|
||||
// no PermissionedDomainID ever (use MPTIssuance.sfDomainID)
|
||||
}))
|
||||
|
||||
/** Reserve 0x0084-0x0087 for future Vault-related objects. */
|
||||
|
||||
/** A ledger object representing a loan broker
|
||||
|
||||
\sa keylet::loanbroker
|
||||
*/
|
||||
LEDGER_ENTRY(ltLOAN_BROKER, 0x0088, LoanBroker, loan_broker, ({
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
{sfSequence, soeREQUIRED},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfVaultNode, soeREQUIRED},
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAccount, soeREQUIRED},
|
||||
{sfOwner, soeREQUIRED},
|
||||
{sfLoanSequence, soeREQUIRED},
|
||||
{sfData, soeDEFAULT},
|
||||
{sfManagementFeeRate, soeDEFAULT},
|
||||
{sfOwnerCount, soeDEFAULT},
|
||||
{sfDebtTotal, soeDEFAULT},
|
||||
{sfDebtMaximum, soeDEFAULT},
|
||||
{sfCoverAvailable, soeDEFAULT},
|
||||
{sfCoverRateMinimum, soeDEFAULT},
|
||||
{sfCoverRateLiquidation, soeDEFAULT},
|
||||
}))
|
||||
|
||||
/** A ledger object representing a loan between a Borrower and a Loan Broker
|
||||
|
||||
\sa keylet::loan
|
||||
*/
|
||||
LEDGER_ENTRY(ltLOAN, 0x0089, Loan, loan, ({
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfLoanBrokerNode, soeREQUIRED},
|
||||
{sfLoanBrokerID, soeREQUIRED},
|
||||
{sfLoanSequence, soeREQUIRED},
|
||||
{sfBorrower, soeREQUIRED},
|
||||
{sfLoanOriginationFee, soeDEFAULT},
|
||||
{sfLoanServiceFee, soeDEFAULT},
|
||||
{sfLatePaymentFee, soeDEFAULT},
|
||||
{sfClosePaymentFee, soeDEFAULT},
|
||||
{sfOverpaymentFee, soeDEFAULT},
|
||||
{sfInterestRate, soeDEFAULT},
|
||||
{sfLateInterestRate, soeDEFAULT},
|
||||
{sfCloseInterestRate, soeDEFAULT},
|
||||
{sfOverpaymentInterestRate, soeDEFAULT},
|
||||
{sfStartDate, soeREQUIRED},
|
||||
{sfPaymentInterval, soeREQUIRED},
|
||||
{sfGracePeriod, soeDEFAULT},
|
||||
{sfPreviousPaymentDate, soeDEFAULT},
|
||||
{sfNextPaymentDueDate, soeDEFAULT},
|
||||
// The loan object tracks these values:
|
||||
//
|
||||
// - PaymentRemaining: The number of payments left in the loan. When it
|
||||
// reaches 0, the loan is paid off, and all other relevant values
|
||||
// must also be 0.
|
||||
//
|
||||
// - PeriodicPayment: The fixed, unrounded amount to be paid each
|
||||
// interval. Stored with as much precision as possible.
|
||||
// Payment transactions must round this value *UP*.
|
||||
//
|
||||
// - TotalValueOutstanding: The rounded total amount owed by the
|
||||
// borrower to the lender / vault.
|
||||
//
|
||||
// - PrincipalOutstanding: The rounded portion of the
|
||||
// TotalValueOutstanding that is from the principal borrowed.
|
||||
//
|
||||
// - ManagementFeeOutstanding: The rounded portion of the
|
||||
// TotalValueOutstanding that represents management fees
|
||||
// specifically owed to the broker based on the initial
|
||||
// loan parameters.
|
||||
//
|
||||
// There are additional values that can be computed from these:
|
||||
//
|
||||
// - InterestOutstanding = TotalValueOutstanding - PrincipalOutstanding
|
||||
// The total amount of interest still pending on the loan,
|
||||
// independent of management fees.
|
||||
//
|
||||
// - InterestOwedToVault = InterestOutstanding - ManagementFeeOutstanding
|
||||
// The amount of the total interest that is owed to the vault, and
|
||||
// will be sent to it as part of a payment.
|
||||
//
|
||||
// - TrueTotalLoanValue = PaymentRemaining * PeriodicPayment
|
||||
// The unrounded true total value of the loan.
|
||||
//
|
||||
// - TrueTotalPrincialOutstanding can be computed using the algorithm
|
||||
// in the ripple::detail::loanPrincipalFromPeriodicPayment function.
|
||||
//
|
||||
// - TrueTotalInterestOutstanding = TrueTotalLoanValue -
|
||||
// TrueTotalPrincipalOutstanding
|
||||
// The unrounded true total interest remaining.
|
||||
//
|
||||
// - TrueTotalManagementFeeOutstanding = TrueTotalInterestOutstanding *
|
||||
// LoanBroker.ManagementFeeRate
|
||||
// The unrounded true total fee still owed to the broker.
|
||||
//
|
||||
// Note the the "True" values may differ significantly from the tracked
|
||||
// rounded values.
|
||||
{sfPaymentRemaining, soeDEFAULT},
|
||||
{sfPeriodicPayment, soeREQUIRED},
|
||||
{sfPrincipalOutstanding, soeDEFAULT},
|
||||
{sfTotalValueOutstanding, soeDEFAULT},
|
||||
{sfManagementFeeOutstanding, soeDEFAULT},
|
||||
// Based on the computed total value at creation, used for
|
||||
// rounding calculated values so they are all on a
|
||||
// consistent scale - that is, they all have the same
|
||||
// number of digits after the decimal point (excluding
|
||||
// trailing zeros).
|
||||
{sfLoanScale, soeDEFAULT},
|
||||
}))
|
||||
|
||||
#undef EXPAND
|
||||
#undef LEDGER_ENTRY_DUPLICATE
|
||||
|
||||
@@ -24,6 +24,8 @@
|
||||
#error "undefined macro: TYPED_SFIELD"
|
||||
#endif
|
||||
|
||||
// clang-format off
|
||||
|
||||
// untyped
|
||||
UNTYPED_SFIELD(sfLedgerEntry, LEDGERENTRY, 257)
|
||||
UNTYPED_SFIELD(sfTransaction, TRANSACTION, 257)
|
||||
@@ -59,6 +61,7 @@ TYPED_SFIELD(sfHookEmitCount, UINT16, 18)
|
||||
TYPED_SFIELD(sfHookExecutionIndex, UINT16, 19)
|
||||
TYPED_SFIELD(sfHookApiVersion, UINT16, 20)
|
||||
TYPED_SFIELD(sfLedgerFixType, UINT16, 21)
|
||||
TYPED_SFIELD(sfManagementFeeRate, UINT16, 22) // 1/10 basis points (bips)
|
||||
|
||||
// 32-bit integers (common)
|
||||
TYPED_SFIELD(sfNetworkID, UINT32, 1)
|
||||
@@ -115,6 +118,21 @@ TYPED_SFIELD(sfFirstNFTokenSequence, UINT32, 50)
|
||||
TYPED_SFIELD(sfOracleDocumentID, UINT32, 51)
|
||||
TYPED_SFIELD(sfPermissionValue, UINT32, 52)
|
||||
TYPED_SFIELD(sfMutableFlags, UINT32, 53)
|
||||
TYPED_SFIELD(sfStartDate, UINT32, 54)
|
||||
TYPED_SFIELD(sfPaymentInterval, UINT32, 55)
|
||||
TYPED_SFIELD(sfGracePeriod, UINT32, 56)
|
||||
TYPED_SFIELD(sfPreviousPaymentDate, UINT32, 57)
|
||||
TYPED_SFIELD(sfNextPaymentDueDate, UINT32, 58)
|
||||
TYPED_SFIELD(sfPaymentRemaining, UINT32, 59)
|
||||
TYPED_SFIELD(sfPaymentTotal, UINT32, 60)
|
||||
TYPED_SFIELD(sfLoanSequence, UINT32, 61)
|
||||
TYPED_SFIELD(sfCoverRateMinimum, UINT32, 62) // 1/10 basis points (bips)
|
||||
TYPED_SFIELD(sfCoverRateLiquidation, UINT32, 63) // 1/10 basis points (bips)
|
||||
TYPED_SFIELD(sfOverpaymentFee, UINT32, 64) // 1/10 basis points (bips)
|
||||
TYPED_SFIELD(sfInterestRate, UINT32, 65) // 1/10 basis points (bips)
|
||||
TYPED_SFIELD(sfLateInterestRate, UINT32, 66) // 1/10 basis points (bips)
|
||||
TYPED_SFIELD(sfCloseInterestRate, UINT32, 67) // 1/10 basis points (bips)
|
||||
TYPED_SFIELD(sfOverpaymentInterestRate, UINT32, 68) // 1/10 basis points (bips)
|
||||
|
||||
// 64-bit integers (common)
|
||||
TYPED_SFIELD(sfIndexNext, UINT64, 1)
|
||||
@@ -146,6 +164,8 @@ TYPED_SFIELD(sfMPTAmount, UINT64, 26, SField::sMD_BaseTen|SFie
|
||||
TYPED_SFIELD(sfIssuerNode, UINT64, 27)
|
||||
TYPED_SFIELD(sfSubjectNode, UINT64, 28)
|
||||
TYPED_SFIELD(sfLockedAmount, UINT64, 29, SField::sMD_BaseTen|SField::sMD_Default)
|
||||
TYPED_SFIELD(sfVaultNode, UINT64, 30)
|
||||
TYPED_SFIELD(sfLoanBrokerNode, UINT64, 31)
|
||||
|
||||
// 128-bit
|
||||
TYPED_SFIELD(sfEmailHash, UINT128, 1)
|
||||
@@ -200,6 +220,9 @@ TYPED_SFIELD(sfDomainID, UINT256, 34)
|
||||
TYPED_SFIELD(sfVaultID, UINT256, 35,
|
||||
SField::sMD_PseudoAccount | SField::sMD_Default)
|
||||
TYPED_SFIELD(sfParentBatchID, UINT256, 36)
|
||||
TYPED_SFIELD(sfLoanBrokerID, UINT256, 37,
|
||||
SField::sMD_PseudoAccount | SField::sMD_Default)
|
||||
TYPED_SFIELD(sfLoanID, UINT256, 38)
|
||||
|
||||
// number (common)
|
||||
TYPED_SFIELD(sfNumber, NUMBER, 1)
|
||||
@@ -207,6 +230,21 @@ TYPED_SFIELD(sfAssetsAvailable, NUMBER, 2)
|
||||
TYPED_SFIELD(sfAssetsMaximum, NUMBER, 3)
|
||||
TYPED_SFIELD(sfAssetsTotal, NUMBER, 4)
|
||||
TYPED_SFIELD(sfLossUnrealized, NUMBER, 5)
|
||||
TYPED_SFIELD(sfDebtTotal, NUMBER, 6)
|
||||
TYPED_SFIELD(sfDebtMaximum, NUMBER, 7)
|
||||
TYPED_SFIELD(sfCoverAvailable, NUMBER, 8)
|
||||
TYPED_SFIELD(sfLoanOriginationFee, NUMBER, 9)
|
||||
TYPED_SFIELD(sfLoanServiceFee, NUMBER, 10)
|
||||
TYPED_SFIELD(sfLatePaymentFee, NUMBER, 11)
|
||||
TYPED_SFIELD(sfClosePaymentFee, NUMBER, 12)
|
||||
TYPED_SFIELD(sfPrincipalOutstanding, NUMBER, 13)
|
||||
TYPED_SFIELD(sfPrincipalRequested, NUMBER, 14)
|
||||
TYPED_SFIELD(sfTotalValueOutstanding, NUMBER, 15)
|
||||
TYPED_SFIELD(sfPeriodicPayment, NUMBER, 16)
|
||||
TYPED_SFIELD(sfManagementFeeOutstanding, NUMBER, 17)
|
||||
|
||||
// int32
|
||||
TYPED_SFIELD(sfLoanScale, INT32, 1)
|
||||
|
||||
// currency amount (common)
|
||||
TYPED_SFIELD(sfAmount, AMOUNT, 1)
|
||||
@@ -302,6 +340,8 @@ TYPED_SFIELD(sfAttestationRewardAccount, ACCOUNT, 21)
|
||||
TYPED_SFIELD(sfLockingChainDoor, ACCOUNT, 22)
|
||||
TYPED_SFIELD(sfIssuingChainDoor, ACCOUNT, 23)
|
||||
TYPED_SFIELD(sfSubject, ACCOUNT, 24)
|
||||
TYPED_SFIELD(sfBorrower, ACCOUNT, 25)
|
||||
TYPED_SFIELD(sfCounterparty, ACCOUNT, 26)
|
||||
|
||||
// vector of 256-bit
|
||||
TYPED_SFIELD(sfIndexes, VECTOR256, 1, SField::sMD_Never)
|
||||
@@ -365,6 +405,7 @@ UNTYPED_SFIELD(sfCredential, OBJECT, 33)
|
||||
UNTYPED_SFIELD(sfRawTransaction, OBJECT, 34)
|
||||
UNTYPED_SFIELD(sfBatchSigner, OBJECT, 35)
|
||||
UNTYPED_SFIELD(sfBook, OBJECT, 36)
|
||||
UNTYPED_SFIELD(sfCounterpartySignature, OBJECT, 37, SField::sMD_Default, SField::notSigning)
|
||||
|
||||
// array of objects (common)
|
||||
// ARRAY/1 is reserved for end of array
|
||||
@@ -399,3 +440,5 @@ UNTYPED_SFIELD(sfAcceptedCredentials, ARRAY, 28)
|
||||
UNTYPED_SFIELD(sfPermissions, ARRAY, 29)
|
||||
UNTYPED_SFIELD(sfRawTransactions, ARRAY, 30)
|
||||
UNTYPED_SFIELD(sfBatchSigners, ARRAY, 31, SField::sMD_Default, SField::notSigning)
|
||||
|
||||
// clang-format on
|
||||
|
||||
@@ -851,7 +851,7 @@ TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
|
||||
TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
createPseudoAcct | createMPTIssuance,
|
||||
createPseudoAcct | createMPTIssuance | mustModifyVault,
|
||||
({
|
||||
{sfAsset, soeREQUIRED, soeMPTSupported},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -869,7 +869,7 @@ TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
noPriv,
|
||||
mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -884,7 +884,7 @@ TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mustDeleteAcct | destroyMPTIssuance,
|
||||
mustDeleteAcct | destroyMPTIssuance | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
}))
|
||||
@@ -896,7 +896,7 @@ TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayAuthorizeMPT,
|
||||
mayAuthorizeMPT | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -909,7 +909,7 @@ TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT,
|
||||
mayDeleteMPT | mayAuthorizeMPT | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -924,7 +924,7 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT,
|
||||
mayDeleteMPT | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfHolder, soeREQUIRED},
|
||||
@@ -944,6 +944,139 @@ TRANSACTION(ttBATCH, 71, Batch,
|
||||
{sfBatchSigners, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** Reserve 72-73 for future Vault-related transactions */
|
||||
|
||||
/** This transaction creates and updates a Loan Broker */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanBrokerSet.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_BROKER_SET, 74, LoanBrokerSet,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
createPseudoAcct | mayAuthorizeMPT, ({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfLoanBrokerID, soeOPTIONAL},
|
||||
{sfData, soeOPTIONAL},
|
||||
{sfManagementFeeRate, soeOPTIONAL},
|
||||
{sfDebtMaximum, soeOPTIONAL},
|
||||
{sfCoverRateMinimum, soeOPTIONAL},
|
||||
{sfCoverRateLiquidation, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction deletes a Loan Broker */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanBrokerDelete.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_BROKER_DELETE, 75, LoanBrokerDelete,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
mustDeleteAcct | mayAuthorizeMPT, ({
|
||||
{sfLoanBrokerID, soeREQUIRED},
|
||||
}))
|
||||
|
||||
/** This transaction deposits First Loss Capital into a Loan Broker */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanBrokerCoverDeposit.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_BROKER_COVER_DEPOSIT, 76, LoanBrokerCoverDeposit,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
noPriv, ({
|
||||
{sfLoanBrokerID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
}))
|
||||
|
||||
/** This transaction withdraws First Loss Capital from a Loan Broker */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanBrokerCoverWithdraw.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_BROKER_COVER_WITHDRAW, 77, LoanBrokerCoverWithdraw,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
mayAuthorizeMPT, ({
|
||||
{sfLoanBrokerID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
{sfDestination, soeOPTIONAL},
|
||||
{sfDestinationTag, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction claws back First Loss Capital from a Loan Broker to
|
||||
the issuer of the capital */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanBrokerCoverClawback.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_BROKER_COVER_CLAWBACK, 78, LoanBrokerCoverClawback,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
noPriv, ({
|
||||
{sfLoanBrokerID, soeOPTIONAL},
|
||||
{sfAmount, soeOPTIONAL, soeMPTSupported},
|
||||
}))
|
||||
|
||||
/** This transaction creates a Loan */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanSet.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_SET, 80, LoanSet,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
mayAuthorizeMPT | mustModifyVault, ({
|
||||
{sfLoanBrokerID, soeREQUIRED},
|
||||
{sfData, soeOPTIONAL},
|
||||
{sfCounterparty, soeOPTIONAL},
|
||||
{sfCounterpartySignature, soeOPTIONAL},
|
||||
{sfLoanOriginationFee, soeOPTIONAL},
|
||||
{sfLoanServiceFee, soeOPTIONAL},
|
||||
{sfLatePaymentFee, soeOPTIONAL},
|
||||
{sfClosePaymentFee, soeOPTIONAL},
|
||||
{sfOverpaymentFee, soeOPTIONAL},
|
||||
{sfInterestRate, soeOPTIONAL},
|
||||
{sfLateInterestRate, soeOPTIONAL},
|
||||
{sfCloseInterestRate, soeOPTIONAL},
|
||||
{sfOverpaymentInterestRate, soeOPTIONAL},
|
||||
{sfPrincipalRequested, soeREQUIRED},
|
||||
{sfPaymentTotal, soeOPTIONAL},
|
||||
{sfPaymentInterval, soeOPTIONAL},
|
||||
{sfGracePeriod, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction deletes an existing Loan */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanDelete.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_DELETE, 81, LoanDelete,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
noPriv, ({
|
||||
{sfLoanID, soeREQUIRED},
|
||||
}))
|
||||
|
||||
/** This transaction is used to change the delinquency status of an existing Loan */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanManage.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_MANAGE, 82, LoanManage,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
// All of the LoanManage options will modify the vault, but the
|
||||
// transaction can succeed without options, essentially making it
|
||||
// a noop.
|
||||
mayModifyVault, ({
|
||||
{sfLoanID, soeREQUIRED},
|
||||
}))
|
||||
|
||||
/** The Borrower uses this transaction to make a Payment on the Loan. */
|
||||
#if TRANSACTION_INCLUDE
|
||||
# include <xrpld/app/tx/detail/LoanPay.h>
|
||||
#endif
|
||||
TRANSACTION(ttLOAN_PAY, 84, LoanPay,
|
||||
Delegation::delegatable,
|
||||
featureLendingProtocol,
|
||||
mayAuthorizeMPT | mustModifyVault, ({
|
||||
{sfLoanID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
}))
|
||||
|
||||
/** This system-generated transaction type is used to update the status of the various amendments.
|
||||
|
||||
For details, see: https://xrpl.org/amendments.html
|
||||
|
||||
@@ -59,6 +59,8 @@ JSS(BaseAsset); // in: Oracle
|
||||
JSS(BidMax); // in: AMM Bid
|
||||
JSS(BidMin); // in: AMM Bid
|
||||
JSS(ClearFlag); // field.
|
||||
JSS(Counterparty); // field.
|
||||
JSS(CounterpartySignature);// field.
|
||||
JSS(DeliverMax); // out: alias to Amount
|
||||
JSS(DeliverMin); // in: TransactionSign
|
||||
JSS(Destination); // in: TransactionSign; field.
|
||||
@@ -392,6 +394,8 @@ JSS(load_factor_local); // out: NetworkOPs
|
||||
JSS(load_factor_net); // out: NetworkOPs
|
||||
JSS(load_factor_server); // out: NetworkOPs
|
||||
JSS(load_fee); // out: LoadFeeTrackImp, NetworkOPs
|
||||
JSS(loan_broker_id); // in: LedgerEntry
|
||||
JSS(loan_seq); // in: LedgerEntry
|
||||
JSS(local); // out: resource/Logic.h
|
||||
JSS(local_txs); // out: GetCounts
|
||||
JSS(local_static_keys); // out: ValidatorList
|
||||
@@ -504,6 +508,7 @@ JSS(propose_seq); // out: LedgerPropose
|
||||
JSS(proposers); // out: NetworkOPs, LedgerConsensus
|
||||
JSS(protocol); // out: NetworkOPs, PeerImp
|
||||
JSS(proxied); // out: RPC ping
|
||||
JSS(pseudo_account); // out: AccountInfo
|
||||
JSS(pubkey_node); // out: NetworkOPs
|
||||
JSS(pubkey_publisher); // out: ValidatorList
|
||||
JSS(pubkey_validator); // out: NetworkOPs, ValidatorList
|
||||
@@ -569,6 +574,7 @@ JSS(settle_delay); // out: AccountChannels
|
||||
JSS(severity); // in: LogLevel
|
||||
JSS(shares); // out: VaultInfo
|
||||
JSS(signature); // out: NetworkOPs, ChannelAuthorize
|
||||
JSS(signature_target); // in: TransactionSign
|
||||
JSS(signature_verified); // out: ChannelVerify
|
||||
JSS(signing_key); // out: NetworkOPs
|
||||
JSS(signing_keys); // out: ValidatorList
|
||||
|
||||
@@ -436,10 +436,12 @@ public:
|
||||
admin_.erase(admin_.iterator_to(entry));
|
||||
break;
|
||||
default:
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::Resource::Logic::release : invalid entry "
|
||||
"kind");
|
||||
break;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
inactive_.push_back(entry);
|
||||
entry.whenExpires = m_clock.now() + secondsUntilExpiration;
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
#include <xrpl/server/Port.h>
|
||||
#include <xrpl/server/detail/ServerImpl.h>
|
||||
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -34,10 +34,10 @@ template <class Handler>
|
||||
std::unique_ptr<Server>
|
||||
make_Server(
|
||||
Handler& handler,
|
||||
boost::asio::io_context& io_context,
|
||||
boost::asio::io_service& io_service,
|
||||
beast::Journal journal)
|
||||
{
|
||||
return std::make_unique<ServerImpl<Handler>>(handler, io_context, journal);
|
||||
return std::make_unique<ServerImpl<Handler>>(handler, io_service, journal);
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -88,7 +88,9 @@ public:
|
||||
++iter)
|
||||
{
|
||||
typename BufferSequence::value_type const& buffer(*iter);
|
||||
write(buffer.data(), boost::asio::buffer_size(buffer));
|
||||
write(
|
||||
boost::asio::buffer_cast<void const*>(buffer),
|
||||
boost::asio::buffer_size(buffer));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,7 +104,7 @@ public:
|
||||
|
||||
/** Detach the session.
|
||||
This holds the session open so that the response can be sent
|
||||
asynchronously. Calls to io_context::run made by the server
|
||||
asynchronously. Calls to io_service::run made by the server
|
||||
will not return until all detached sessions are closed.
|
||||
*/
|
||||
virtual std::shared_ptr<Session>
|
||||
|
||||
@@ -24,13 +24,11 @@
|
||||
#include <xrpl/beast/net/IPAddressConversion.h>
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
#include <xrpl/server/Session.h>
|
||||
#include <xrpl/server/detail/Spawn.h>
|
||||
#include <xrpl/server/detail/io_list.h>
|
||||
|
||||
#include <boost/asio/ip/tcp.hpp>
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/ssl/stream.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
#include <boost/asio/streambuf.hpp>
|
||||
#include <boost/beast/core/stream_traits.hpp>
|
||||
#include <boost/beast/http/dynamic_body.hpp>
|
||||
@@ -217,8 +215,8 @@ BaseHTTPPeer<Handler, Impl>::BaseHTTPPeer(
|
||||
ConstBufferSequence const& buffers)
|
||||
: port_(port)
|
||||
, handler_(handler)
|
||||
, work_(boost::asio::make_work_guard(executor))
|
||||
, strand_(boost::asio::make_strand(executor))
|
||||
, work_(executor)
|
||||
, strand_(executor)
|
||||
, remote_address_(remote_address)
|
||||
, journal_(journal)
|
||||
{
|
||||
@@ -358,7 +356,7 @@ BaseHTTPPeer<Handler, Impl>::on_write(
|
||||
return;
|
||||
if (graceful_)
|
||||
return do_close();
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_read,
|
||||
@@ -377,7 +375,7 @@ BaseHTTPPeer<Handler, Impl>::do_writer(
|
||||
{
|
||||
auto const p = impl().shared_from_this();
|
||||
resume = std::function<void(void)>([this, p, writer, keep_alive]() {
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_writer,
|
||||
@@ -408,7 +406,7 @@ BaseHTTPPeer<Handler, Impl>::do_writer(
|
||||
if (!keep_alive)
|
||||
return do_close();
|
||||
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_read,
|
||||
@@ -450,14 +448,14 @@ BaseHTTPPeer<Handler, Impl>::write(
|
||||
std::shared_ptr<Writer> const& writer,
|
||||
bool keep_alive)
|
||||
{
|
||||
util::spawn(
|
||||
boost::asio::spawn(bind_executor(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_writer,
|
||||
impl().shared_from_this(),
|
||||
writer,
|
||||
keep_alive,
|
||||
std::placeholders::_1));
|
||||
std::placeholders::_1)));
|
||||
}
|
||||
|
||||
// DEPRECATED
|
||||
@@ -492,12 +490,12 @@ BaseHTTPPeer<Handler, Impl>::complete()
|
||||
}
|
||||
|
||||
// keep-alive
|
||||
util::spawn(
|
||||
boost::asio::spawn(bind_executor(
|
||||
strand_,
|
||||
std::bind(
|
||||
&BaseHTTPPeer<Handler, Impl>::do_read,
|
||||
impl().shared_from_this(),
|
||||
std::placeholders::_1));
|
||||
std::placeholders::_1)));
|
||||
}
|
||||
|
||||
// DEPRECATED
|
||||
|
||||
@@ -47,6 +47,7 @@ protected:
|
||||
Port const& port_;
|
||||
Handler& handler_;
|
||||
endpoint_type remote_address_;
|
||||
beast::WrappedSink sink_;
|
||||
beast::Journal const j_;
|
||||
|
||||
boost::asio::executor_work_guard<boost::asio::executor> work_;
|
||||
@@ -83,15 +84,15 @@ BasePeer<Handler, Impl>::BasePeer(
|
||||
: port_(port)
|
||||
, handler_(handler)
|
||||
, remote_address_(remote_address)
|
||||
, j_(journal,
|
||||
log::attributes(log::attr(
|
||||
"PeerID",
|
||||
[] {
|
||||
static std::atomic<unsigned> id{0};
|
||||
return "##" + std::to_string(++id) + " ";
|
||||
}())))
|
||||
, work_(boost::asio::make_work_guard(executor))
|
||||
, strand_(boost::asio::make_strand(executor))
|
||||
, sink_(
|
||||
journal.sink(),
|
||||
[] {
|
||||
static std::atomic<unsigned> id{0};
|
||||
return "##" + std::to_string(++id) + " ";
|
||||
}())
|
||||
, j_(sink_)
|
||||
, work_(executor)
|
||||
, strand_(executor)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@
|
||||
#include <xrpl/server/detail/BasePeer.h>
|
||||
#include <xrpl/server/detail/LowestLayer.h>
|
||||
|
||||
#include <boost/asio/error.hpp>
|
||||
#include <boost/beast/core/multi_buffer.hpp>
|
||||
#include <boost/beast/http/message.hpp>
|
||||
#include <boost/beast/websocket.hpp>
|
||||
@@ -421,17 +420,11 @@ BaseWSPeer<Handler, Impl>::start_timer()
|
||||
// Max seconds without completing a message
|
||||
static constexpr std::chrono::seconds timeout{30};
|
||||
static constexpr std::chrono::seconds timeoutLocal{3};
|
||||
|
||||
try
|
||||
{
|
||||
timer_.expires_after(
|
||||
remote_endpoint().address().is_loopback() ? timeoutLocal : timeout);
|
||||
}
|
||||
catch (boost::system::system_error const& e)
|
||||
{
|
||||
return fail(e.code(), "start_timer");
|
||||
}
|
||||
|
||||
error_code ec;
|
||||
timer_.expires_from_now(
|
||||
remote_endpoint().address().is_loopback() ? timeoutLocal : timeout, ec);
|
||||
if (ec)
|
||||
return fail(ec, "start_timer");
|
||||
timer_.async_wait(bind_executor(
|
||||
strand_,
|
||||
std::bind(
|
||||
@@ -445,14 +438,8 @@ template <class Handler, class Impl>
|
||||
void
|
||||
BaseWSPeer<Handler, Impl>::cancel_timer()
|
||||
{
|
||||
try
|
||||
{
|
||||
timer_.cancel();
|
||||
}
|
||||
catch (boost::system::system_error const&)
|
||||
{
|
||||
// ignored
|
||||
}
|
||||
error_code ec;
|
||||
timer_.cancel(ec);
|
||||
}
|
||||
|
||||
template <class Handler, class Impl>
|
||||
|
||||
@@ -69,7 +69,7 @@ private:
|
||||
stream_type stream_;
|
||||
socket_type& socket_;
|
||||
endpoint_type remote_address_;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
boost::asio::io_context::strand strand_;
|
||||
beast::Journal const j_;
|
||||
|
||||
public:
|
||||
@@ -95,7 +95,7 @@ private:
|
||||
Handler& handler_;
|
||||
boost::asio::io_context& ioc_;
|
||||
acceptor_type acceptor_;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
boost::asio::io_context::strand strand_;
|
||||
bool ssl_;
|
||||
bool plain_;
|
||||
|
||||
@@ -155,7 +155,7 @@ Door<Handler>::Detector::Detector(
|
||||
, stream_(std::move(stream))
|
||||
, socket_(stream_.socket())
|
||||
, remote_address_(remote_address)
|
||||
, strand_(boost::asio::make_strand(ioc_))
|
||||
, strand_(ioc_)
|
||||
, j_(j)
|
||||
{
|
||||
}
|
||||
@@ -164,7 +164,7 @@ template <class Handler>
|
||||
void
|
||||
Door<Handler>::Detector::run()
|
||||
{
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&Detector::do_detect,
|
||||
@@ -269,7 +269,7 @@ Door<Handler>::reOpen()
|
||||
Throw<std::exception>();
|
||||
}
|
||||
|
||||
acceptor_.listen(boost::asio::socket_base::max_listen_connections, ec);
|
||||
acceptor_.listen(boost::asio::socket_base::max_connections, ec);
|
||||
if (ec)
|
||||
{
|
||||
JLOG(j_.error()) << "Listen on port '" << port_.name
|
||||
@@ -291,7 +291,7 @@ Door<Handler>::Door(
|
||||
, handler_(handler)
|
||||
, ioc_(io_context)
|
||||
, acceptor_(io_context)
|
||||
, strand_(boost::asio::make_strand(io_context))
|
||||
, strand_(io_context)
|
||||
, ssl_(
|
||||
port_.protocol.count("https") > 0 ||
|
||||
port_.protocol.count("wss") > 0 || port_.protocol.count("wss2") > 0 ||
|
||||
@@ -307,7 +307,7 @@ template <class Handler>
|
||||
void
|
||||
Door<Handler>::run()
|
||||
{
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
strand_,
|
||||
std::bind(
|
||||
&Door<Handler>::do_accept,
|
||||
@@ -320,8 +320,7 @@ void
|
||||
Door<Handler>::close()
|
||||
{
|
||||
if (!strand_.running_in_this_thread())
|
||||
return boost::asio::post(
|
||||
strand_,
|
||||
return strand_.post(
|
||||
std::bind(&Door<Handler>::close, this->shared_from_this()));
|
||||
error_code ec;
|
||||
acceptor_.close(ec);
|
||||
|
||||
@@ -105,7 +105,7 @@ PlainHTTPPeer<Handler>::run()
|
||||
{
|
||||
if (!this->handler_.onAccept(this->session(), this->remote_address_))
|
||||
{
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
this->strand_,
|
||||
std::bind(&PlainHTTPPeer::do_close, this->shared_from_this()));
|
||||
return;
|
||||
@@ -114,7 +114,7 @@ PlainHTTPPeer<Handler>::run()
|
||||
if (!socket_.is_open())
|
||||
return;
|
||||
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
this->strand_,
|
||||
std::bind(
|
||||
&PlainHTTPPeer::do_read,
|
||||
|
||||
@@ -115,14 +115,14 @@ SSLHTTPPeer<Handler>::run()
|
||||
{
|
||||
if (!this->handler_.onAccept(this->session(), this->remote_address_))
|
||||
{
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
this->strand_,
|
||||
std::bind(&SSLHTTPPeer::do_close, this->shared_from_this()));
|
||||
return;
|
||||
}
|
||||
if (!socket_.is_open())
|
||||
return;
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
this->strand_,
|
||||
std::bind(
|
||||
&SSLHTTPPeer::do_handshake,
|
||||
@@ -164,7 +164,7 @@ SSLHTTPPeer<Handler>::do_handshake(yield_context do_yield)
|
||||
this->port().protocol.count("https") > 0;
|
||||
if (http)
|
||||
{
|
||||
util::spawn(
|
||||
boost::asio::spawn(
|
||||
this->strand_,
|
||||
std::bind(
|
||||
&SSLHTTPPeer::do_read,
|
||||
|
||||
@@ -26,8 +26,6 @@
|
||||
#include <xrpl/server/detail/io_list.h>
|
||||
|
||||
#include <boost/asio.hpp>
|
||||
#include <boost/asio/executor_work_guard.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
|
||||
#include <array>
|
||||
#include <chrono>
|
||||
@@ -87,11 +85,9 @@ private:
|
||||
|
||||
Handler& handler_;
|
||||
beast::Journal const j_;
|
||||
boost::asio::io_context& io_context_;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
std::optional<boost::asio::executor_work_guard<
|
||||
boost::asio::io_context::executor_type>>
|
||||
work_;
|
||||
boost::asio::io_service& io_service_;
|
||||
boost::asio::io_service::strand strand_;
|
||||
std::optional<boost::asio::io_service::work> work_;
|
||||
|
||||
std::mutex m_;
|
||||
std::vector<Port> ports_;
|
||||
@@ -104,7 +100,7 @@ private:
|
||||
public:
|
||||
ServerImpl(
|
||||
Handler& handler,
|
||||
boost::asio::io_context& io_context,
|
||||
boost::asio::io_service& io_service,
|
||||
beast::Journal journal);
|
||||
|
||||
~ServerImpl();
|
||||
@@ -127,10 +123,10 @@ public:
|
||||
return ios_;
|
||||
}
|
||||
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
{
|
||||
return io_context_;
|
||||
return io_service_;
|
||||
}
|
||||
|
||||
bool
|
||||
@@ -144,13 +140,13 @@ private:
|
||||
template <class Handler>
|
||||
ServerImpl<Handler>::ServerImpl(
|
||||
Handler& handler,
|
||||
boost::asio::io_context& io_context,
|
||||
boost::asio::io_service& io_service,
|
||||
beast::Journal journal)
|
||||
: handler_(handler)
|
||||
, j_(journal)
|
||||
, io_context_(io_context)
|
||||
, strand_(boost::asio::make_strand(io_context_))
|
||||
, work_(std::in_place, boost::asio::make_work_guard(io_context_))
|
||||
, io_service_(io_service)
|
||||
, strand_(io_service_)
|
||||
, work_(io_service_)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -177,7 +173,7 @@ ServerImpl<Handler>::ports(std::vector<Port> const& ports)
|
||||
ports_.push_back(port);
|
||||
auto& internalPort = ports_.back();
|
||||
if (auto sp = ios_.emplace<Door<Handler>>(
|
||||
handler_, io_context_, internalPort, j_))
|
||||
handler_, io_service_, internalPort, j_))
|
||||
{
|
||||
list_.push_back(sp);
|
||||
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright(c) 2025 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_SERVER_SPAWN_H_INCLUDED
|
||||
#define RIPPLE_SERVER_SPAWN_H_INCLUDED
|
||||
|
||||
#include <xrpl/basics/Log.h>
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
|
||||
#include <concepts>
|
||||
#include <type_traits>
|
||||
|
||||
namespace ripple::util {
|
||||
namespace impl {
|
||||
|
||||
template <typename T>
|
||||
concept IsStrand = std::same_as<
|
||||
std::decay_t<T>,
|
||||
boost::asio::strand<typename std::decay_t<T>::inner_executor_type>>;
|
||||
|
||||
/**
|
||||
* @brief A completion handler that restores `boost::asio::spawn`'s behaviour
|
||||
* from Boost 1.83
|
||||
*
|
||||
* This is intended to be passed as the third argument to `boost::asio::spawn`
|
||||
* so that exceptions are not ignored but propagated to `io_context.run()` call
|
||||
* site.
|
||||
*
|
||||
* @param ePtr The exception that was caught on the coroutine
|
||||
*/
|
||||
inline constexpr auto kPROPAGATE_EXCEPTIONS = [](std::exception_ptr ePtr) {
|
||||
if (ePtr)
|
||||
{
|
||||
try
|
||||
{
|
||||
std::rethrow_exception(ePtr);
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(debugLog().warn()) << "Spawn exception: " << e.what();
|
||||
throw;
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
JLOG(debugLog().warn()) << "Spawn exception: Unknown";
|
||||
throw;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace impl
|
||||
|
||||
/**
|
||||
* @brief Spawns a coroutine using `boost::asio::spawn`
|
||||
*
|
||||
* @note This uses kPROPAGATE_EXCEPTIONS to force asio to propagate exceptions
|
||||
* through `io_context`
|
||||
* @note Since implicit strand was removed from boost::asio::spawn this helper
|
||||
* function adds the strand back
|
||||
*
|
||||
* @tparam Ctx The type of the context/strand
|
||||
* @tparam F The type of the function to execute
|
||||
* @param ctx The execution context
|
||||
* @param func The function to execute. Must return `void`
|
||||
*/
|
||||
template <typename Ctx, typename F>
|
||||
requires std::is_invocable_r_v<void, F, boost::asio::yield_context>
|
||||
void
|
||||
spawn(Ctx&& ctx, F&& func)
|
||||
{
|
||||
if constexpr (impl::IsStrand<Ctx>)
|
||||
{
|
||||
boost::asio::spawn(
|
||||
std::forward<Ctx>(ctx),
|
||||
std::forward<F>(func),
|
||||
impl::kPROPAGATE_EXCEPTIONS);
|
||||
}
|
||||
else
|
||||
{
|
||||
boost::asio::spawn(
|
||||
boost::asio::make_strand(
|
||||
boost::asio::get_associated_executor(std::forward<Ctx>(ctx))),
|
||||
std::forward<F>(func),
|
||||
impl::kPROPAGATE_EXCEPTIONS);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace ripple::util
|
||||
|
||||
#endif
|
||||
@@ -166,7 +166,7 @@ public:
|
||||
May be called concurrently.
|
||||
|
||||
Preconditions:
|
||||
No call to io_context::run on any io_context
|
||||
No call to io_service::run on any io_service
|
||||
used by work objects associated with this io_list
|
||||
exists in the caller's call stack.
|
||||
*/
|
||||
|
||||
@@ -49,7 +49,7 @@ Section::append(std::vector<std::string> const& lines)
|
||||
// <key> '=' <value>
|
||||
static boost::regex const re1(
|
||||
"^" // start of line
|
||||
"(?:\\s*)" // whitespace (optonal)
|
||||
"(?:\\s*)" // whitespace (optional)
|
||||
"([a-zA-Z][_a-zA-Z0-9]*)" // <key>
|
||||
"(?:\\s*)" // whitespace (optional)
|
||||
"(?:=)" // '='
|
||||
|
||||
@@ -113,14 +113,14 @@ Logs::File::close()
|
||||
}
|
||||
|
||||
void
|
||||
Logs::File::write(std::string_view text)
|
||||
Logs::File::write(char const* text)
|
||||
{
|
||||
if (m_stream != nullptr)
|
||||
(*m_stream) << text;
|
||||
}
|
||||
|
||||
void
|
||||
Logs::File::writeln(std::string_view text)
|
||||
Logs::File::writeln(char const* text)
|
||||
{
|
||||
if (m_stream != nullptr)
|
||||
{
|
||||
@@ -196,15 +196,11 @@ Logs::write(
|
||||
bool console)
|
||||
{
|
||||
std::string s;
|
||||
std::string_view result = text;
|
||||
if (!beast::Journal::isStructuredJournalEnabled())
|
||||
{
|
||||
format(s, text, level, partition);
|
||||
result = text;
|
||||
}
|
||||
|
||||
format(s, text, level, partition);
|
||||
std::lock_guard lock(mutex_);
|
||||
file_.writeln(result);
|
||||
file_.writeln(s);
|
||||
if (!silent_)
|
||||
std::cerr << s << '\n';
|
||||
// VFALCO TODO Fix console output
|
||||
// if (console)
|
||||
// out_.write_console(s);
|
||||
@@ -243,9 +239,11 @@ Logs::fromSeverity(beast::severities::Severity level)
|
||||
case kError:
|
||||
return lsERROR;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::fromSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
break;
|
||||
}
|
||||
@@ -269,9 +267,11 @@ Logs::toSeverity(LogSeverity level)
|
||||
return kWarning;
|
||||
case lsERROR:
|
||||
return kError;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case lsFATAL:
|
||||
break;
|
||||
}
|
||||
@@ -296,9 +296,11 @@ Logs::toString(LogSeverity s)
|
||||
return "Error";
|
||||
case lsFATAL:
|
||||
return "Fatal";
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toString : invalid severity");
|
||||
return "Unknown";
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -360,9 +362,11 @@ Logs::format(
|
||||
case kError:
|
||||
output += "ERR ";
|
||||
break;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::format : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
output += "FTL ";
|
||||
break;
|
||||
|
||||
@@ -93,6 +93,18 @@ public:
|
||||
// tie, round towards even.
|
||||
int
|
||||
round() noexcept;
|
||||
|
||||
// Modify the result to the correctly rounded value
|
||||
void
|
||||
doRoundUp(rep& mantissa, int& exponent, std::string location);
|
||||
|
||||
// Modify the result to the correctly rounded value
|
||||
void
|
||||
doRoundDown(rep& mantissa, int& exponent);
|
||||
|
||||
// Modify the result to the correctly rounded value
|
||||
void
|
||||
doRound(rep& drops);
|
||||
};
|
||||
|
||||
inline void
|
||||
@@ -170,6 +182,61 @@ Number::Guard::round() noexcept
|
||||
return 0;
|
||||
}
|
||||
|
||||
void
|
||||
Number::Guard::doRoundUp(rep& mantissa, int& exponent, std::string location)
|
||||
{
|
||||
auto r = round();
|
||||
if (r == 1 || (r == 0 && (mantissa & 1) == 1))
|
||||
{
|
||||
++mantissa;
|
||||
if (mantissa > maxMantissa)
|
||||
{
|
||||
mantissa /= 10;
|
||||
++exponent;
|
||||
}
|
||||
}
|
||||
if (exponent < minExponent)
|
||||
{
|
||||
mantissa = 0;
|
||||
exponent = Number{}.exponent_;
|
||||
}
|
||||
if (exponent > maxExponent)
|
||||
throw std::overflow_error(location);
|
||||
}
|
||||
|
||||
void
|
||||
Number::Guard::doRoundDown(rep& mantissa, int& exponent)
|
||||
{
|
||||
auto r = round();
|
||||
if (r == 1 || (r == 0 && (mantissa & 1) == 1))
|
||||
{
|
||||
--mantissa;
|
||||
if (mantissa < minMantissa)
|
||||
{
|
||||
mantissa *= 10;
|
||||
--exponent;
|
||||
}
|
||||
}
|
||||
if (exponent < minExponent)
|
||||
{
|
||||
mantissa = 0;
|
||||
exponent = Number{}.exponent_;
|
||||
}
|
||||
}
|
||||
|
||||
// Modify the result to the correctly rounded value
|
||||
void
|
||||
Number::Guard::doRound(rep& drops)
|
||||
{
|
||||
auto r = round();
|
||||
if (r == 1 || (r == 0 && (drops & 1) == 1))
|
||||
{
|
||||
++drops;
|
||||
}
|
||||
if (is_negative())
|
||||
drops = -drops;
|
||||
}
|
||||
|
||||
// Number
|
||||
|
||||
constexpr Number one{1000000000000000, -15, Number::unchecked{}};
|
||||
@@ -209,18 +276,7 @@ Number::normalize()
|
||||
return;
|
||||
}
|
||||
|
||||
auto r = g.round();
|
||||
if (r == 1 || (r == 0 && (mantissa_ & 1) == 1))
|
||||
{
|
||||
++mantissa_;
|
||||
if (mantissa_ > maxMantissa)
|
||||
{
|
||||
mantissa_ /= 10;
|
||||
++exponent_;
|
||||
}
|
||||
}
|
||||
if (exponent_ > maxExponent)
|
||||
throw std::overflow_error("Number::normalize 2");
|
||||
g.doRoundUp(mantissa_, exponent_, "Number::normalize 2");
|
||||
|
||||
if (negative)
|
||||
mantissa_ = -mantissa_;
|
||||
@@ -292,18 +348,7 @@ Number::operator+=(Number const& y)
|
||||
xm /= 10;
|
||||
++xe;
|
||||
}
|
||||
auto r = g.round();
|
||||
if (r == 1 || (r == 0 && (xm & 1) == 1))
|
||||
{
|
||||
++xm;
|
||||
if (xm > maxMantissa)
|
||||
{
|
||||
xm /= 10;
|
||||
++xe;
|
||||
}
|
||||
}
|
||||
if (xe > maxExponent)
|
||||
throw std::overflow_error("Number::addition overflow");
|
||||
g.doRoundUp(xm, xe, "Number::addition overflow");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -323,21 +368,7 @@ Number::operator+=(Number const& y)
|
||||
xm -= g.pop();
|
||||
--xe;
|
||||
}
|
||||
auto r = g.round();
|
||||
if (r == 1 || (r == 0 && (xm & 1) == 1))
|
||||
{
|
||||
--xm;
|
||||
if (xm < minMantissa)
|
||||
{
|
||||
xm *= 10;
|
||||
--xe;
|
||||
}
|
||||
}
|
||||
if (xe < minExponent)
|
||||
{
|
||||
xm = 0;
|
||||
xe = Number{}.exponent_;
|
||||
}
|
||||
g.doRoundDown(xm, xe);
|
||||
}
|
||||
mantissa_ = xm * xn;
|
||||
exponent_ = xe;
|
||||
@@ -417,25 +448,10 @@ Number::operator*=(Number const& y)
|
||||
}
|
||||
xm = static_cast<rep>(zm);
|
||||
xe = ze;
|
||||
auto r = g.round();
|
||||
if (r == 1 || (r == 0 && (xm & 1) == 1))
|
||||
{
|
||||
++xm;
|
||||
if (xm > maxMantissa)
|
||||
{
|
||||
xm /= 10;
|
||||
++xe;
|
||||
}
|
||||
}
|
||||
if (xe < minExponent)
|
||||
{
|
||||
xm = 0;
|
||||
xe = Number{}.exponent_;
|
||||
}
|
||||
if (xe > maxExponent)
|
||||
throw std::overflow_error(
|
||||
"Number::multiplication overflow : exponent is " +
|
||||
std::to_string(xe));
|
||||
g.doRoundUp(
|
||||
xm,
|
||||
xe,
|
||||
"Number::multiplication overflow : exponent is " + std::to_string(xe));
|
||||
mantissa_ = xm * zn;
|
||||
exponent_ = xe;
|
||||
XRPL_ASSERT(
|
||||
@@ -500,17 +516,29 @@ Number::operator rep() const
|
||||
throw std::overflow_error("Number::operator rep() overflow");
|
||||
drops *= 10;
|
||||
}
|
||||
auto r = g.round();
|
||||
if (r == 1 || (r == 0 && (drops & 1) == 1))
|
||||
{
|
||||
++drops;
|
||||
}
|
||||
if (g.is_negative())
|
||||
drops = -drops;
|
||||
g.doRound(drops);
|
||||
}
|
||||
return drops;
|
||||
}
|
||||
|
||||
Number
|
||||
Number::truncate() const noexcept
|
||||
{
|
||||
if (exponent_ >= 0 || mantissa_ == 0)
|
||||
return *this;
|
||||
|
||||
Number ret = *this;
|
||||
while (ret.exponent_ < 0 && ret.mantissa_ != 0)
|
||||
{
|
||||
ret.exponent_ += 1;
|
||||
ret.mantissa_ /= rep(10);
|
||||
}
|
||||
// We are guaranteed that normalize() will never throw an exception
|
||||
// because exponent is either negative or zero at this point.
|
||||
ret.normalize();
|
||||
return ret;
|
||||
}
|
||||
|
||||
std::string
|
||||
to_string(Number const& amount)
|
||||
{
|
||||
|
||||
@@ -25,9 +25,8 @@
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <boost/asio/bind_executor.hpp>
|
||||
#include <boost/asio/error.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/ip/tcp.hpp>
|
||||
#include <boost/system/detail/error_code.hpp>
|
||||
|
||||
@@ -125,8 +124,8 @@ public:
|
||||
|
||||
beast::Journal m_journal;
|
||||
|
||||
boost::asio::io_context& m_io_context;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> m_strand;
|
||||
boost::asio::io_service& m_io_service;
|
||||
boost::asio::io_service::strand m_strand;
|
||||
boost::asio::ip::tcp::resolver m_resolver;
|
||||
|
||||
std::condition_variable m_cv;
|
||||
@@ -156,12 +155,12 @@ public:
|
||||
std::deque<Work> m_work;
|
||||
|
||||
ResolverAsioImpl(
|
||||
boost::asio::io_context& io_context,
|
||||
boost::asio::io_service& io_service,
|
||||
beast::Journal journal)
|
||||
: m_journal(journal)
|
||||
, m_io_context(io_context)
|
||||
, m_strand(boost::asio::make_strand(io_context))
|
||||
, m_resolver(io_context)
|
||||
, m_io_service(io_service)
|
||||
, m_strand(io_service)
|
||||
, m_resolver(io_service)
|
||||
, m_asyncHandlersCompleted(true)
|
||||
, m_stop_called(false)
|
||||
, m_stopped(true)
|
||||
@@ -217,14 +216,8 @@ public:
|
||||
{
|
||||
if (m_stop_called.exchange(true) == false)
|
||||
{
|
||||
boost::asio::dispatch(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_stop,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
m_io_service.dispatch(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_stop, this, CompletionCounter(this))));
|
||||
|
||||
JLOG(m_journal.debug()) << "Queued a stop request";
|
||||
}
|
||||
@@ -255,16 +248,12 @@ public:
|
||||
|
||||
// TODO NIKB use rvalue references to construct and move
|
||||
// reducing cost.
|
||||
boost::asio::dispatch(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_resolve,
|
||||
this,
|
||||
names,
|
||||
handler,
|
||||
CompletionCounter(this))));
|
||||
m_io_service.dispatch(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_resolve,
|
||||
this,
|
||||
names,
|
||||
handler,
|
||||
CompletionCounter(this))));
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
@@ -290,20 +279,19 @@ public:
|
||||
std::string name,
|
||||
boost::system::error_code const& ec,
|
||||
HandlerType handler,
|
||||
boost::asio::ip::tcp::resolver::results_type results,
|
||||
boost::asio::ip::tcp::resolver::iterator iter,
|
||||
CompletionCounter)
|
||||
{
|
||||
if (ec == boost::asio::error::operation_aborted)
|
||||
return;
|
||||
|
||||
std::vector<beast::IP::Endpoint> addresses;
|
||||
auto iter = results.begin();
|
||||
|
||||
// If we get an error message back, we don't return any
|
||||
// results that we may have gotten.
|
||||
if (!ec)
|
||||
{
|
||||
while (iter != results.end())
|
||||
while (iter != boost::asio::ip::tcp::resolver::iterator())
|
||||
{
|
||||
addresses.push_back(
|
||||
beast::IPAddressConversion::from_asio(*iter));
|
||||
@@ -313,14 +301,8 @@ public:
|
||||
|
||||
handler(name, addresses);
|
||||
|
||||
boost::asio::post(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
m_io_service.post(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_work, this, CompletionCounter(this))));
|
||||
}
|
||||
|
||||
HostAndPort
|
||||
@@ -401,21 +383,16 @@ public:
|
||||
{
|
||||
JLOG(m_journal.error()) << "Unable to parse '" << name << "'";
|
||||
|
||||
boost::asio::post(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
m_io_service.post(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_work, this, CompletionCounter(this))));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
boost::asio::ip::tcp::resolver::query query(host, port);
|
||||
|
||||
m_resolver.async_resolve(
|
||||
host,
|
||||
port,
|
||||
query,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_finish,
|
||||
this,
|
||||
@@ -446,14 +423,10 @@ public:
|
||||
|
||||
if (m_work.size() > 0)
|
||||
{
|
||||
boost::asio::post(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
m_io_service.post(m_strand.wrap(std::bind(
|
||||
&ResolverAsioImpl::do_work,
|
||||
this,
|
||||
CompletionCounter(this))));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -462,9 +435,9 @@ public:
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
std::unique_ptr<ResolverAsio>
|
||||
ResolverAsio::New(boost::asio::io_context& io_context, beast::Journal journal)
|
||||
ResolverAsio::New(boost::asio::io_service& io_service, beast::Journal journal)
|
||||
{
|
||||
return std::make_unique<ResolverAsioImpl>(io_context, journal);
|
||||
return std::make_unique<ResolverAsioImpl>(io_service, journal);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
@@ -36,6 +36,7 @@ LogThrow(std::string const& title)
|
||||
[[noreturn]] void
|
||||
LogicError(std::string const& s) noexcept
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(debugLog().fatal()) << s;
|
||||
std::cerr << "Logic error: " << s << std::endl;
|
||||
// Use a non-standard contract naming here (without namespace) because
|
||||
@@ -45,6 +46,7 @@ LogicError(std::string const& s) noexcept
|
||||
// For the above reasons, we want this contract to stand out.
|
||||
UNREACHABLE("LogicError", {{"message", s}});
|
||||
std::abort();
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -30,11 +30,9 @@
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
|
||||
#include <boost/asio/basic_waitable_timer.hpp>
|
||||
#include <boost/asio/bind_executor.hpp>
|
||||
#include <boost/asio/buffer.hpp>
|
||||
#include <boost/asio/error.hpp>
|
||||
#include <boost/asio/executor_work_guard.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/ip/udp.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
#include <boost/system/detail/error_code.hpp>
|
||||
@@ -240,11 +238,9 @@ private:
|
||||
Journal m_journal;
|
||||
IP::Endpoint m_address;
|
||||
std::string m_prefix;
|
||||
boost::asio::io_context m_io_context;
|
||||
std::optional<boost::asio::executor_work_guard<
|
||||
boost::asio::io_context::executor_type>>
|
||||
m_work;
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> m_strand;
|
||||
boost::asio::io_service m_io_service;
|
||||
std::optional<boost::asio::io_service::work> m_work;
|
||||
boost::asio::io_service::strand m_strand;
|
||||
boost::asio::basic_waitable_timer<std::chrono::steady_clock> m_timer;
|
||||
boost::asio::ip::udp::socket m_socket;
|
||||
std::deque<std::string> m_data;
|
||||
@@ -268,24 +264,18 @@ public:
|
||||
: m_journal(journal)
|
||||
, m_address(address)
|
||||
, m_prefix(prefix)
|
||||
, m_work(boost::asio::make_work_guard(m_io_context))
|
||||
, m_strand(boost::asio::make_strand(m_io_context))
|
||||
, m_timer(m_io_context)
|
||||
, m_socket(m_io_context)
|
||||
, m_work(std::ref(m_io_service))
|
||||
, m_strand(m_io_service)
|
||||
, m_timer(m_io_service)
|
||||
, m_socket(m_io_service)
|
||||
, m_thread(&StatsDCollectorImp::run, this)
|
||||
{
|
||||
}
|
||||
|
||||
~StatsDCollectorImp() override
|
||||
{
|
||||
try
|
||||
{
|
||||
m_timer.cancel();
|
||||
}
|
||||
catch (boost::system::system_error const&)
|
||||
{
|
||||
// ignored
|
||||
}
|
||||
boost::system::error_code ec;
|
||||
m_timer.cancel(ec);
|
||||
|
||||
m_work.reset();
|
||||
m_thread.join();
|
||||
@@ -344,10 +334,10 @@ public:
|
||||
|
||||
//--------------------------------------------------------------------------
|
||||
|
||||
boost::asio::io_context&
|
||||
get_io_context()
|
||||
boost::asio::io_service&
|
||||
get_io_service()
|
||||
{
|
||||
return m_io_context;
|
||||
return m_io_service;
|
||||
}
|
||||
|
||||
std::string const&
|
||||
@@ -365,14 +355,8 @@ public:
|
||||
void
|
||||
post_buffer(std::string&& buffer)
|
||||
{
|
||||
boost::asio::dispatch(
|
||||
m_io_context,
|
||||
boost::asio::bind_executor(
|
||||
m_strand,
|
||||
std::bind(
|
||||
&StatsDCollectorImp::do_post_buffer,
|
||||
this,
|
||||
std::move(buffer))));
|
||||
m_io_service.dispatch(m_strand.wrap(std::bind(
|
||||
&StatsDCollectorImp::do_post_buffer, this, std::move(buffer))));
|
||||
}
|
||||
|
||||
// The keepAlive parameter makes sure the buffers sent to
|
||||
@@ -402,7 +386,8 @@ public:
|
||||
for (auto const& buffer : buffers)
|
||||
{
|
||||
std::string const s(
|
||||
buffer.data(), boost::asio::buffer_size(buffer));
|
||||
boost::asio::buffer_cast<char const*>(buffer),
|
||||
boost::asio::buffer_size(buffer));
|
||||
std::cerr << s;
|
||||
}
|
||||
std::cerr << '\n';
|
||||
@@ -471,7 +456,7 @@ public:
|
||||
set_timer()
|
||||
{
|
||||
using namespace std::chrono_literals;
|
||||
m_timer.expires_after(1s);
|
||||
m_timer.expires_from_now(1s);
|
||||
m_timer.async_wait(std::bind(
|
||||
&StatsDCollectorImp::on_timer, this, std::placeholders::_1));
|
||||
}
|
||||
@@ -513,13 +498,13 @@ public:
|
||||
|
||||
set_timer();
|
||||
|
||||
m_io_context.run();
|
||||
m_io_service.run();
|
||||
|
||||
m_socket.shutdown(boost::asio::ip::udp::socket::shutdown_send, ec);
|
||||
|
||||
m_socket.close();
|
||||
|
||||
m_io_context.poll();
|
||||
m_io_service.poll();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -562,12 +547,10 @@ StatsDCounterImpl::~StatsDCounterImpl()
|
||||
void
|
||||
StatsDCounterImpl::increment(CounterImpl::value_type amount)
|
||||
{
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDCounterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDCounterImpl>(shared_from_this()),
|
||||
amount));
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDCounterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDCounterImpl>(shared_from_this()),
|
||||
amount));
|
||||
}
|
||||
|
||||
void
|
||||
@@ -609,12 +592,10 @@ StatsDEventImpl::StatsDEventImpl(
|
||||
void
|
||||
StatsDEventImpl::notify(EventImpl::value_type const& value)
|
||||
{
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDEventImpl::do_notify,
|
||||
std::static_pointer_cast<StatsDEventImpl>(shared_from_this()),
|
||||
value));
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDEventImpl::do_notify,
|
||||
std::static_pointer_cast<StatsDEventImpl>(shared_from_this()),
|
||||
value));
|
||||
}
|
||||
|
||||
void
|
||||
@@ -644,23 +625,19 @@ StatsDGaugeImpl::~StatsDGaugeImpl()
|
||||
void
|
||||
StatsDGaugeImpl::set(GaugeImpl::value_type value)
|
||||
{
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDGaugeImpl::do_set,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
value));
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDGaugeImpl::do_set,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
value));
|
||||
}
|
||||
|
||||
void
|
||||
StatsDGaugeImpl::increment(GaugeImpl::difference_type amount)
|
||||
{
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDGaugeImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
amount));
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDGaugeImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDGaugeImpl>(shared_from_this()),
|
||||
amount));
|
||||
}
|
||||
|
||||
void
|
||||
@@ -736,12 +713,10 @@ StatsDMeterImpl::~StatsDMeterImpl()
|
||||
void
|
||||
StatsDMeterImpl::increment(MeterImpl::value_type amount)
|
||||
{
|
||||
boost::asio::dispatch(
|
||||
m_impl->get_io_context(),
|
||||
std::bind(
|
||||
&StatsDMeterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDMeterImpl>(shared_from_this()),
|
||||
amount));
|
||||
m_impl->get_io_service().dispatch(std::bind(
|
||||
&StatsDMeterImpl::do_increment,
|
||||
std::static_pointer_cast<StatsDMeterImpl>(shared_from_this()),
|
||||
amount));
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
@@ -25,11 +25,11 @@ namespace IP {
|
||||
bool
|
||||
is_private(AddressV4 const& addr)
|
||||
{
|
||||
return ((addr.to_uint() & 0xff000000) ==
|
||||
return ((addr.to_ulong() & 0xff000000) ==
|
||||
0x0a000000) || // Prefix /8, 10. #.#.#
|
||||
((addr.to_uint() & 0xfff00000) ==
|
||||
((addr.to_ulong() & 0xfff00000) ==
|
||||
0xac100000) || // Prefix /12 172. 16.#.# - 172.31.#.#
|
||||
((addr.to_uint() & 0xffff0000) ==
|
||||
((addr.to_ulong() & 0xffff0000) ==
|
||||
0xc0a80000) || // Prefix /16 192.168.#.#
|
||||
addr.is_loopback();
|
||||
}
|
||||
@@ -44,7 +44,7 @@ char
|
||||
get_class(AddressV4 const& addr)
|
||||
{
|
||||
static char const* table = "AAAABBCD";
|
||||
return table[(addr.to_uint() & 0xE0000000) >> 29];
|
||||
return table[(addr.to_ulong() & 0xE0000000) >> 29];
|
||||
}
|
||||
|
||||
} // namespace IP
|
||||
|
||||
@@ -20,8 +20,6 @@
|
||||
#include <xrpl/beast/net/IPAddressV4.h>
|
||||
#include <xrpl/beast/net/IPAddressV6.h>
|
||||
|
||||
#include <boost/asio/ip/address_v4.hpp>
|
||||
|
||||
namespace beast {
|
||||
namespace IP {
|
||||
|
||||
@@ -30,9 +28,7 @@ is_private(AddressV6 const& addr)
|
||||
{
|
||||
return (
|
||||
(addr.to_bytes()[0] & 0xfd) || // TODO fc00::/8 too ?
|
||||
(addr.is_v4_mapped() &&
|
||||
is_private(boost::asio::ip::make_address_v4(
|
||||
boost::asio::ip::v4_mapped, addr))));
|
||||
(addr.is_v4_mapped() && is_private(addr.to_v4())));
|
||||
}
|
||||
|
||||
bool
|
||||
|
||||
@@ -21,8 +21,6 @@
|
||||
#include <xrpl/beast/net/IPEndpoint.h>
|
||||
|
||||
#include <boost/algorithm/string/trim.hpp>
|
||||
#include <boost/asio/ip/address.hpp>
|
||||
#include <boost/asio/ip/address_v4.hpp>
|
||||
#include <boost/system/detail/error_code.hpp>
|
||||
|
||||
#include <cctype>
|
||||
@@ -169,7 +167,7 @@ operator>>(std::istream& is, Endpoint& endpoint)
|
||||
}
|
||||
|
||||
boost::system::error_code ec;
|
||||
auto addr = boost::asio::ip::make_address(addrStr, ec);
|
||||
auto addr = Address::from_string(addrStr, ec);
|
||||
if (ec)
|
||||
{
|
||||
is.setstate(std::ios_base::failbit);
|
||||
|
||||
@@ -19,102 +19,12 @@
|
||||
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
|
||||
#include <chrono>
|
||||
#include <ios>
|
||||
#include <ostream>
|
||||
#include <ranges>
|
||||
#include <string>
|
||||
#include <thread>
|
||||
|
||||
namespace beast {
|
||||
|
||||
namespace {
|
||||
|
||||
// Fast timestamp to ISO string conversion
|
||||
// Returns string like "2024-01-15T10:30:45.123Z"
|
||||
std::string_view
|
||||
fastTimestampToString(std::int64_t milliseconds_since_epoch)
|
||||
{
|
||||
thread_local char buffer[64]; // "2024-01-15T10:30:45.123Z"
|
||||
|
||||
// Precomputed lookup table for 2-digit numbers 00-99
|
||||
static constexpr char digits[200] = {
|
||||
'0', '0', '0', '1', '0', '2', '0', '3', '0', '4', '0', '5', '0', '6',
|
||||
'0', '7', '0', '8', '0', '9', '1', '0', '1', '1', '1', '2', '1', '3',
|
||||
'1', '4', '1', '5', '1', '6', '1', '7', '1', '8', '1', '9', '2', '0',
|
||||
'2', '1', '2', '2', '2', '3', '2', '4', '2', '5', '2', '6', '2', '7',
|
||||
'2', '8', '2', '9', '3', '0', '3', '1', '3', '2', '3', '3', '3', '4',
|
||||
'3', '5', '3', '6', '3', '7', '3', '8', '3', '9', '4', '0', '4', '1',
|
||||
'4', '2', '4', '3', '4', '4', '4', '5', '4', '6', '4', '7', '4', '8',
|
||||
'4', '9', '5', '0', '5', '1', '5', '2', '5', '3', '5', '4', '5', '5',
|
||||
'5', '6', '5', '7', '5', '8', '5', '9', '6', '0', '6', '1', '6', '2',
|
||||
'6', '3', '6', '4', '6', '5', '6', '6', '6', '7', '6', '8', '6', '9',
|
||||
'7', '0', '7', '1', '7', '2', '7', '3', '7', '4', '7', '5', '7', '6',
|
||||
'7', '7', '7', '8', '7', '9', '8', '0', '8', '1', '8', '2', '8', '3',
|
||||
'8', '4', '8', '5', '8', '6', '8', '7', '8', '8', '8', '9', '9', '0',
|
||||
'9', '1', '9', '2', '9', '3', '9', '4', '9', '5', '9', '6', '9', '7',
|
||||
'9', '8', '9', '9'};
|
||||
|
||||
constexpr std::int64_t UNIX_EPOCH_DAYS =
|
||||
719468; // Days from year 0 to 1970-01-01
|
||||
|
||||
std::int64_t seconds = milliseconds_since_epoch / 1000;
|
||||
int ms = milliseconds_since_epoch % 1000;
|
||||
std::int64_t days = seconds / 86400 + UNIX_EPOCH_DAYS;
|
||||
int sec_of_day = seconds % 86400;
|
||||
|
||||
// Calculate year, month, day from days using Gregorian calendar algorithm
|
||||
int era = (days >= 0 ? days : days - 146096) / 146097;
|
||||
int doe = days - era * 146097;
|
||||
int yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365;
|
||||
int year = yoe + era * 400;
|
||||
int doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
|
||||
int mp = (5 * doy + 2) / 153;
|
||||
int day = doy - (153 * mp + 2) / 5 + 1;
|
||||
int month = mp + (mp < 10 ? 3 : -9);
|
||||
year += (month <= 2);
|
||||
|
||||
// Calculate hour, minute, second
|
||||
int hour = sec_of_day / 3600;
|
||||
int min = (sec_of_day % 3600) / 60;
|
||||
int sec = sec_of_day % 60;
|
||||
|
||||
// Format: "2024-01-15T10:30:45.123Z"
|
||||
buffer[0] = '0' + year / 1000;
|
||||
buffer[1] = '0' + (year / 100) % 10;
|
||||
buffer[2] = '0' + (year / 10) % 10;
|
||||
buffer[3] = '0' + year % 10;
|
||||
buffer[4] = '-';
|
||||
buffer[5] = digits[month * 2];
|
||||
buffer[6] = digits[month * 2 + 1];
|
||||
buffer[7] = '-';
|
||||
buffer[8] = digits[day * 2];
|
||||
buffer[9] = digits[day * 2 + 1];
|
||||
buffer[10] = 'T';
|
||||
buffer[11] = digits[hour * 2];
|
||||
buffer[12] = digits[hour * 2 + 1];
|
||||
buffer[13] = ':';
|
||||
buffer[14] = digits[min * 2];
|
||||
buffer[15] = digits[min * 2 + 1];
|
||||
buffer[16] = ':';
|
||||
buffer[17] = digits[sec * 2];
|
||||
buffer[18] = digits[sec * 2 + 1];
|
||||
buffer[19] = '.';
|
||||
buffer[20] = '0' + ms / 100;
|
||||
buffer[21] = '0' + (ms / 10) % 10;
|
||||
buffer[22] = '0' + ms % 10;
|
||||
buffer[23] = 'Z';
|
||||
|
||||
return {buffer, 24};
|
||||
}
|
||||
|
||||
} // anonymous namespace
|
||||
|
||||
std::string Journal::globalLogAttributes_;
|
||||
std::shared_mutex Journal::globalLogAttributesMutex_;
|
||||
bool Journal::jsonLogsEnabled_ = false;
|
||||
thread_local Journal::JsonLogContext Journal::currentJsonLogContext_{};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
// A Sink that does nothing.
|
||||
@@ -177,186 +87,6 @@ Journal::getNullSink()
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
std::string_view
|
||||
severities::to_string(Severity severity)
|
||||
{
|
||||
using namespace std::string_view_literals;
|
||||
switch (severity)
|
||||
{
|
||||
case kDisabled:
|
||||
return "disabled"sv;
|
||||
case kTrace:
|
||||
return "trace"sv;
|
||||
case kDebug:
|
||||
return "debug"sv;
|
||||
case kInfo:
|
||||
return "info"sv;
|
||||
case kWarning:
|
||||
return "warning"sv;
|
||||
case kError:
|
||||
return "error"sv;
|
||||
case kFatal:
|
||||
return "fatal"sv;
|
||||
default:
|
||||
UNREACHABLE("Unexpected severity value!");
|
||||
}
|
||||
return ""sv;
|
||||
}
|
||||
|
||||
void
|
||||
Journal::JsonLogContext::start(
|
||||
std::source_location location,
|
||||
severities::Severity severity,
|
||||
std::string_view moduleName,
|
||||
std::string_view journalAttributes) noexcept
|
||||
{
|
||||
struct ThreadIdStringInitializer
|
||||
{
|
||||
std::string value;
|
||||
ThreadIdStringInitializer()
|
||||
{
|
||||
std::stringstream threadIdStream;
|
||||
threadIdStream << std::this_thread::get_id();
|
||||
value = threadIdStream.str();
|
||||
}
|
||||
};
|
||||
thread_local ThreadIdStringInitializer const threadId;
|
||||
|
||||
messageOffset_ = 0;
|
||||
messageBuffer_.clear();
|
||||
jsonWriter_ = detail::SimpleJsonWriter{&messageBuffer_};
|
||||
|
||||
if (!jsonLogsEnabled_)
|
||||
{
|
||||
messageBuffer_ = journalAttributes;
|
||||
return;
|
||||
}
|
||||
|
||||
writer().startObject();
|
||||
|
||||
if (!journalAttributes.empty())
|
||||
{
|
||||
writer().writeKey("Jnl");
|
||||
writer().writeRaw(journalAttributes);
|
||||
writer().endObject();
|
||||
}
|
||||
|
||||
{
|
||||
std::shared_lock lock(globalLogAttributesMutex_);
|
||||
if (!globalLogAttributes_.empty())
|
||||
{
|
||||
writer().writeKey("Glb");
|
||||
writer().writeRaw(globalLogAttributes_);
|
||||
writer().endObject();
|
||||
}
|
||||
}
|
||||
|
||||
writer().writeKey("Mtd");
|
||||
writer().startObject();
|
||||
|
||||
writer().writeKey("Mdl");
|
||||
writer().writeString(moduleName);
|
||||
|
||||
writer().writeKey("Fl");
|
||||
constexpr size_t FILE_NAME_KEEP_CHARS = 20;
|
||||
std::string_view fileName = location.file_name();
|
||||
std::string_view trimmedFileName = (fileName.size() > FILE_NAME_KEEP_CHARS)
|
||||
? fileName.substr(fileName.size() - FILE_NAME_KEEP_CHARS)
|
||||
: fileName;
|
||||
writer().writeString(trimmedFileName);
|
||||
|
||||
writer().writeKey("Ln");
|
||||
writer().writeUInt(location.line());
|
||||
|
||||
writer().writeKey("ThId");
|
||||
writer().writeString(threadId.value);
|
||||
|
||||
auto severityStr = to_string(severity);
|
||||
writer().writeKey("Lv");
|
||||
writer().writeString(severityStr);
|
||||
|
||||
auto nowMs = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::system_clock::now().time_since_epoch())
|
||||
.count();
|
||||
writer().writeKey("Tm");
|
||||
writer().writeString(fastTimestampToString(nowMs));
|
||||
|
||||
writer().endObject();
|
||||
|
||||
hasMessageParams_ = false;
|
||||
}
|
||||
|
||||
void
|
||||
Journal::JsonLogContext::reuseJson()
|
||||
{
|
||||
messageOffset_ = messageBuffer_.size();
|
||||
}
|
||||
|
||||
void
|
||||
Journal::JsonLogContext::finish()
|
||||
{
|
||||
if (messageOffset_ != 0)
|
||||
{
|
||||
messageBuffer_.erase(messageOffset_);
|
||||
}
|
||||
else
|
||||
{
|
||||
messageBuffer_.clear();
|
||||
}
|
||||
|
||||
jsonWriter_ = detail::SimpleJsonWriter{&messageBuffer_};
|
||||
}
|
||||
|
||||
void
|
||||
Journal::initMessageContext(
|
||||
std::source_location location,
|
||||
severities::Severity severity) const
|
||||
{
|
||||
currentJsonLogContext_.start(location, severity, name_, attributes_);
|
||||
}
|
||||
|
||||
std::string&
|
||||
Journal::formatLog(std::string const& message)
|
||||
{
|
||||
if (!jsonLogsEnabled_)
|
||||
{
|
||||
currentJsonLogContext_.writer().buffer() += message;
|
||||
return currentJsonLogContext_.messageBuffer();
|
||||
}
|
||||
|
||||
auto& writer = currentJsonLogContext_.writer();
|
||||
|
||||
currentJsonLogContext_.endMessageParams();
|
||||
|
||||
writer.writeKey("Msg");
|
||||
writer.writeString(message);
|
||||
|
||||
writer.endObject();
|
||||
|
||||
writer.finish();
|
||||
|
||||
return currentJsonLogContext_.messageBuffer();
|
||||
}
|
||||
|
||||
void
|
||||
Journal::enableStructuredJournal()
|
||||
{
|
||||
jsonLogsEnabled_ = true;
|
||||
}
|
||||
|
||||
void
|
||||
Journal::disableStructuredJournal()
|
||||
{
|
||||
jsonLogsEnabled_ = false;
|
||||
resetGlobalAttributes();
|
||||
}
|
||||
|
||||
bool
|
||||
Journal::isStructuredJournalEnabled()
|
||||
{
|
||||
return jsonLogsEnabled_;
|
||||
}
|
||||
|
||||
Journal::Sink::Sink(Severity thresh, bool console)
|
||||
: thresh_(thresh), m_console(console)
|
||||
{
|
||||
@@ -413,14 +143,13 @@ Journal::ScopedStream::ScopedStream(
|
||||
|
||||
Journal::ScopedStream::~ScopedStream()
|
||||
{
|
||||
std::string s = m_ostream.str();
|
||||
std::string const& s(m_ostream.str());
|
||||
if (!s.empty())
|
||||
{
|
||||
if (s == "\n")
|
||||
s = "";
|
||||
|
||||
m_sink.write(m_level, formatLog(s));
|
||||
currentJsonLogContext_.finish();
|
||||
m_sink.write(m_level, "");
|
||||
else
|
||||
m_sink.write(m_level, s);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,4 +159,12 @@ Journal::ScopedStream::operator<<(std::ostream& manip(std::ostream&)) const
|
||||
return m_ostream << manip;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
Journal::ScopedStream
|
||||
Journal::Stream::operator<<(std::ostream& manip(std::ostream&)) const
|
||||
{
|
||||
return ScopedStream(*this, manip);
|
||||
}
|
||||
|
||||
} // namespace beast
|
||||
|
||||
@@ -174,7 +174,7 @@ Array::append(Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Array::append : invalid type");
|
||||
UNREACHABLE("Json::Array::append : invalid type"); // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
void
|
||||
@@ -209,7 +209,7 @@ Object::set(std::string const& k, Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Object::set : invalid type");
|
||||
UNREACHABLE("Json::Object::set : invalid type"); // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user