mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-04 19:25:51 +00:00
Compare commits
80 Commits
a1q123456/
...
legleux/li
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f51e2d3f30 | ||
|
|
2dd1d682ac | ||
|
|
4cb1084c02 | ||
|
|
8d1b3b3994 | ||
|
|
b39d7a6519 | ||
|
|
b0910e359e | ||
|
|
44e027e516 | ||
|
|
a10f42a3aa | ||
|
|
efd4c1b95d | ||
|
|
f8b4f692f1 | ||
|
|
80a3ae6386 | ||
|
|
48d38c1e2c | ||
|
|
553fb5be3b | ||
|
|
efa917d9f3 | ||
|
|
bd3bc917f8 | ||
|
|
ed5d6f3e22 | ||
|
|
a8e4da0b11 | ||
|
|
1dd60242de | ||
|
|
76611c3f46 | ||
|
|
5efaf0c328 | ||
|
|
0aa23933ea | ||
|
|
21f3c12d85 | ||
|
|
7d5ed0cd8d | ||
|
|
d9960d5ba0 | ||
|
|
91fa6b2295 | ||
|
|
76f774e22d | ||
|
|
f4f7618173 | ||
|
|
66f16469f9 | ||
|
|
1845b1c656 | ||
|
|
e192ffe964 | ||
|
|
2bf77cc8f6 | ||
|
|
5e33ca56fd | ||
|
|
7c39c810eb | ||
|
|
a7792ebcae | ||
|
|
83ee3788e1 | ||
|
|
ae719b86d3 | ||
|
|
dd722f8b3f | ||
|
|
30190a5feb | ||
|
|
afb6e0e41b | ||
|
|
5523557226 | ||
|
|
b64707f53b | ||
|
|
0b113f371f | ||
|
|
b4c894c1ba | ||
|
|
92281a4ede | ||
|
|
e80642fc12 | ||
|
|
640ce4988f | ||
|
|
a422855ea7 | ||
|
|
108f90586c | ||
|
|
519d1dbc34 | ||
|
|
3d44758e5a | ||
|
|
97bc94a7f6 | ||
|
|
34619f2504 | ||
|
|
3509de9c5f | ||
|
|
459d0da010 | ||
|
|
8637d606a4 | ||
|
|
8456b8275e | ||
|
|
3c88786bb0 | ||
|
|
46ba8a28fe | ||
|
|
5ecde3cf39 | ||
|
|
620fb26823 | ||
|
|
6b6b213cf5 | ||
|
|
f61086b43c | ||
|
|
176fd2b6e4 | ||
|
|
2df730438d | ||
|
|
5d79bfc531 | ||
|
|
51ef35ab55 | ||
|
|
330a3215bc | ||
|
|
85c2ceacde | ||
|
|
70d5c624e8 | ||
|
|
8e4fda160d | ||
|
|
072b1c442c | ||
|
|
294e03ecf5 | ||
|
|
550f90a75e | ||
|
|
d67dcfe3c4 | ||
|
|
0fd2f715bb | ||
|
|
807462b191 | ||
|
|
19c4226d3d | ||
|
|
d02c306f1e | ||
|
|
c46888f8f7 | ||
|
|
2ae65d2fdb |
28
.github/actions/build-deps/action.yml
vendored
28
.github/actions/build-deps/action.yml
vendored
@@ -10,24 +10,40 @@ inputs:
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
build_nproc:
|
||||
description: "The number of processors to use for building."
|
||||
required: true
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
log_verbosity:
|
||||
description: "The logging verbosity."
|
||||
required: false
|
||||
default: "verbose"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
BUILD_NPROC: ${{ inputs.build_nproc }}
|
||||
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build ${{ inputs.force_build == 'true' && '"*"' || 'missing' }} \
|
||||
--options:host '&:tests=True' \
|
||||
--options:host '&:xrpld=True' \
|
||||
--settings:all build_type=${{ inputs.build_type }} \
|
||||
--build="${BUILD_OPTION}" \
|
||||
--options:host='&:tests=True' \
|
||||
--options:host='&:xrpld=True' \
|
||||
--settings:all build_type="${BUILD_TYPE}" \
|
||||
--conf:all tools.build:jobs=${BUILD_NPROC} \
|
||||
--conf:all tools.build:verbosity="${LOG_VERBOSITY}" \
|
||||
--conf:all tools.compilation:verbosity="${LOG_VERBOSITY}" \
|
||||
..
|
||||
|
||||
96
.github/actions/build-test/action.yml
vendored
96
.github/actions/build-test/action.yml
vendored
@@ -1,96 +0,0 @@
|
||||
# This action build and tests the binary. The Conan dependencies must have
|
||||
# already been installed (see the build-deps action).
|
||||
name: Build and Test
|
||||
description: "Build and test the binary."
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
default: ""
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Configuring CMake.'
|
||||
cmake \
|
||||
-G '${{ inputs.os == 'windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
|
||||
${{ inputs.cmake_args }} \
|
||||
..
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Building binary.'
|
||||
cmake \
|
||||
--build . \
|
||||
--config ${{ inputs.build_type }} \
|
||||
--parallel $(nproc) \
|
||||
--target ${{ inputs.cmake_target }}
|
||||
- name: Check linking
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Checking linking.'
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: Verify voidstar
|
||||
if: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Verifying presence of instrumentation.'
|
||||
./rippled --version | grep libvoidstar
|
||||
- name: Test the binary
|
||||
if: ${{ inputs.build_only == 'false' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'windows' && inputs.build_type || '' }}
|
||||
run: |
|
||||
echo 'Testing binary.'
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
- name: Upload coverage report
|
||||
if: ${{ inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ inputs.codecov_token }}
|
||||
verbose: true
|
||||
43
.github/actions/print-env/action.yml
vendored
Normal file
43
.github/actions/print-env/action.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Print build environment
|
||||
description: "Print environment and some tooling versions"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
- name: Check configuration (Linux and macOS)
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
7
.github/actions/setup-conan/action.yml
vendored
7
.github/actions/setup-conan/action.yml
vendored
@@ -35,9 +35,12 @@ runs:
|
||||
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
|
||||
run: |
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
|
||||
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
|
||||
6
.github/scripts/levelization/README.md
vendored
6
.github/scripts/levelization/README.md
vendored
@@ -72,15 +72,15 @@ It generates many files of [results](results):
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`levelization.yml`](../../workflows/check-levelization.yml)
|
||||
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
|
||||
@@ -138,6 +138,7 @@ test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
tests.libxrpl > xrpl.json
|
||||
tests.libxrpl > xrpl.net
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.ledger > xrpl.basics
|
||||
|
||||
10
.github/scripts/strategy-matrix/generate.py
vendored
10
.github/scripts/strategy-matrix/generate.py
vendored
@@ -74,14 +74,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9.4 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 9.6 using Clang: Release and no Unity on linux/amd64.
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
if os['distro_name'] == 'rhel':
|
||||
skip = True
|
||||
if os['distro_version'] == '9.4':
|
||||
if os['distro_version'] == '9':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
elif os['distro_version'] == '9.6':
|
||||
elif os['distro_version'] == '10':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
@@ -162,7 +162,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': 'true' if build_only else 'false',
|
||||
'build_only': build_only,
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
|
||||
122
.github/scripts/strategy-matrix/linux.json
vendored
122
.github/scripts/strategy-matrix/linux.json
vendored
@@ -14,139 +14,169 @@
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
"compiler_version": "12",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
"compiler_version": "13",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
"compiler_version": "14",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15"
|
||||
"compiler_version": "15",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
"compiler_version": "16",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
"compiler_version": "17",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
"compiler_version": "18",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
"compiler_version": "19",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20"
|
||||
"compiler_version": "20",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
"compiler_version": "12",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
"compiler_version": "13",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
"compiler_version": "14",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
"compiler_version": "16",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
"compiler_version": "17",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
"compiler_version": "18",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
"compiler_version": "19",
|
||||
"image_sha": "6948666"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
3
.github/scripts/strategy-matrix/macos.json
vendored
3
.github/scripts/strategy-matrix/macos.json
vendored
@@ -10,7 +10,8 @@
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
3
.github/scripts/strategy-matrix/windows.json
vendored
3
.github/scripts/strategy-matrix/windows.json
vendored
@@ -10,7 +10,8 @@
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
147
.github/workflows/build-test.yml
vendored
147
.github/workflows/build-test.yml
vendored
@@ -1,147 +0,0 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ inputs.os == 'linux' && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-5dd7158', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
steps:
|
||||
- name: Check strategy matrix
|
||||
run: |
|
||||
echo 'Operating system distro name: ${{ matrix.os.distro_name }}'
|
||||
echo 'Operating system distro version: ${{ matrix.os.distro_version }}'
|
||||
echo 'Operating system compiler name: ${{ matrix.os.compiler_name }}'
|
||||
echo 'Operating system compiler version: ${{ matrix.os.compiler_version }}'
|
||||
echo 'Architecture platform: ${{ matrix.architecture.platform }}'
|
||||
echo 'Architecture runner: ${{ toJson(matrix.architecture.runner) }}'
|
||||
echo 'Build type: ${{ matrix.build_type }}'
|
||||
echo 'Build only: ${{ matrix.build_only }}'
|
||||
echo 'CMake arguments: ${{ matrix.cmake_args }}'
|
||||
echo 'CMake target: ${{ matrix.cmake_target }}'
|
||||
echo 'Config name: ${{ matrix.config_name }}'
|
||||
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
- name: Check configuration (Linux and MacOS)
|
||||
if: ${{ inputs.os == 'linux' || inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ inputs.os == 'linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ inputs.dependencies_force_build }}
|
||||
|
||||
- name: Build and test binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
codecov_token: ${{ secrets.codecov_token }}
|
||||
os: ${{ inputs.os }}
|
||||
80
.github/workflows/on-pr.yml
vendored
80
.github/workflows/on-pr.yml
vendored
@@ -1,10 +1,13 @@
|
||||
# This workflow runs all workflows to check, build and test the project on
|
||||
# various Linux flavors, as well as on MacOS and Windows, on every push to a
|
||||
# This workflow runs all workflows to check, build, package and test the project on
|
||||
# various Linux flavors, as well as on macOS and Windows, on every push to a
|
||||
# user branch. However, it will not run if the pull request is a draft unless it
|
||||
# has the 'DraftRunCI' label.
|
||||
name: PR
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- legleux/linux_packages
|
||||
merge_group:
|
||||
types:
|
||||
- checks_requested
|
||||
@@ -50,8 +53,8 @@ jobs:
|
||||
files: |
|
||||
# These paths are unique to `on-pr.yml`.
|
||||
.github/scripts/levelization/**
|
||||
.github/workflows/check-levelization.yml
|
||||
.github/workflows/notify-clio.yml
|
||||
.github/workflows/reusable-check-levelization.yml
|
||||
.github/workflows/reusable-notify-clio.yml
|
||||
.github/workflows/on-pr.yml
|
||||
|
||||
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||
@@ -59,13 +62,18 @@ jobs:
|
||||
.github/actions/build-test/**
|
||||
.github/actions/setup-conan/**
|
||||
.github/scripts/strategy-matrix/**
|
||||
.github/workflows/build-test.yml
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/reusable-build-pkg.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
external/**
|
||||
include/**
|
||||
pkgs/**
|
||||
src/**
|
||||
tests/**
|
||||
CMakeLists.txt
|
||||
@@ -90,39 +98,57 @@ jobs:
|
||||
outputs:
|
||||
go: ${{ steps.go.outputs.go == 'true' }}
|
||||
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/check-levelization.yml
|
||||
# check-levelization:
|
||||
# needs: should-run
|
||||
# if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
# uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
|
||||
build-test:
|
||||
# build-test:
|
||||
# needs: should-run
|
||||
# if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
# uses: ./.github/workflows/reusable-build-test.yml
|
||||
# strategy:
|
||||
# matrix:
|
||||
# os: [linux, macos, windows]
|
||||
# with:
|
||||
# os: ${{ matrix.os }}
|
||||
# secrets:
|
||||
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build-package:
|
||||
name: Build ${{ matrix.pkg_type }}-${{ matrix.arch }} packages
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
uses: ./.github/workflows/reusable-build-pkg.yml
|
||||
secrets: inherit
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
# pkg_type: [rpm]
|
||||
pkg_type: [deb, rpm]
|
||||
arch: [amd64]
|
||||
# arch: [amd64, arm64]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
pkg_type: ${{ matrix.pkg_type }}
|
||||
arch: ${{ matrix.arch }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- should-run
|
||||
- build-test
|
||||
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/notify-clio.yml
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
# notify-clio:
|
||||
# needs:
|
||||
# - should-run
|
||||
# - build-test
|
||||
# if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||
# uses: ./.github/workflows/reusable-notify-clio.yml
|
||||
# secrets:
|
||||
# clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
passed:
|
||||
if: failure() || cancelled()
|
||||
needs:
|
||||
- build-test
|
||||
- check-levelization
|
||||
# - build-test
|
||||
# - check-levelization
|
||||
- build-package
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail
|
||||
|
||||
74
.github/workflows/on-trigger.yml
vendored
74
.github/workflows/on-trigger.yml
vendored
@@ -9,12 +9,13 @@ name: Trigger
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
- "develop"
|
||||
- "release*"
|
||||
- "master"
|
||||
- "legleux/linux_packages"
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/check-missing-commits.yml"
|
||||
- ".github/workflows/reusable-check-missing-commits.yml"
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
|
||||
@@ -23,13 +24,18 @@ on:
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/actions/setup-conan/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/build-test.yml"
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-build-pkg.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "pkgs/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- "CMakeLists.txt"
|
||||
@@ -43,25 +49,16 @@ on:
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
||||
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
||||
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
||||
# dependencies if needed.
|
||||
# Run when manually triggered via the GitHub UI or API.
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
# When a PR is merged into the develop branch it will be assigned a unique
|
||||
# group identifier, so execution will continue even if another PR is merged
|
||||
# while it is still running. In all other cases the group identifier is shared
|
||||
# per branch, so that any in-progress runs are cancelled when a new commit is
|
||||
# pushed.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
@@ -69,17 +66,32 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check-missing-commits:
|
||||
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/check-missing-commits.yml
|
||||
# check-missing-commits:
|
||||
# if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
# uses: ./.github/workflows/reusable-check-missing-commits.yml
|
||||
|
||||
build-test:
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
# build-test:
|
||||
# uses: ./.github/workflows/reusable-build-test.yml
|
||||
# strategy:
|
||||
# matrix:
|
||||
# os: [linux, macos, windows]
|
||||
# with:
|
||||
# os: ${{ matrix.os }}
|
||||
# strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
# secrets:
|
||||
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build-package:
|
||||
name: Build ${{ matrix.pkg_type }}-${{ matrix.arch }} packages
|
||||
uses: ./.github/workflows/reusable-build-pkg.yml
|
||||
secrets: inherit
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
# pkg_type: [rpm]
|
||||
pkg_type: [deb, rpm]
|
||||
arch: [amd64]
|
||||
# arch: [amd64, arm64]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
secrets:
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
pkg_type: ${{ matrix.pkg_type }}
|
||||
arch: ${{ matrix.arch }}
|
||||
|
||||
4
.github/workflows/pre-commit.yml
vendored
4
.github/workflows/pre-commit.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
jobs:
|
||||
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||
run-hooks:
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@af1b0f0d764cda2e5435f5ac97b240d4bd4d95d3
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||
with:
|
||||
runs_on: ubuntu-latest
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-d1496b8" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||
|
||||
20
.github/workflows/publish-docs.yml
vendored
20
.github/workflows/publish-docs.yml
vendored
@@ -23,16 +23,24 @@ defaults:
|
||||
|
||||
env:
|
||||
BUILD_DIR: .build
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-d1496b8
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
@@ -46,12 +54,16 @@ jobs:
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
|
||||
- name: Build documentation
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
mkdir -p ${{ env.BUILD_DIR }}
|
||||
cd ${{ env.BUILD_DIR }}
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
cmake --build . --target docs --parallel ${BUILD_NPROC}
|
||||
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
|
||||
32
.github/workflows/reusable-build-pkg.yml
vendored
Normal file
32
.github/workflows/reusable-build-pkg.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
pkg_type:
|
||||
required: false
|
||||
type: string
|
||||
arch:
|
||||
required: false
|
||||
type: string
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.pkg_type }}-${{ inputs.arch }} package
|
||||
runs-on: heavy${{ inputs.arch == 'arm64' && '-arm64' || '' }}
|
||||
container: ghcr.io/xrplf/ci/${{ inputs.pkg_type == 'rpm' && 'rhel-9' || 'debian-bookworm' }}:gcc-12
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Build packages
|
||||
run: |
|
||||
./pkgs/build.sh
|
||||
cat build_vars >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
path: "*${{ inputs.arch }}.${{ inputs.pkg_type }}"
|
||||
if-no-files-found: error
|
||||
77
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
77
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
name: Build and test configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: true
|
||||
type: boolean
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
type: string
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
type: string
|
||||
required: true
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The configuration string (used for naming artifacts and such)."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: false
|
||||
type: number
|
||||
default: 2
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
uses: ./.github/workflows/reusable-build.yml
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
cmake_args: ${{ inputs.cmake_args }}
|
||||
cmake_target: ${{ inputs.cmake_target }}
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
image: ${{ inputs.image }}
|
||||
config_name: ${{ inputs.config_name }}
|
||||
nproc_subtract: ${{ inputs.nproc_subtract }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test:
|
||||
needs: build
|
||||
uses: ./.github/workflows/reusable-test.yml
|
||||
with:
|
||||
run_tests: ${{ !inputs.build_only }}
|
||||
verify_voidstar: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
image: ${{ inputs.image }}
|
||||
config_name: ${{ inputs.config_name }}
|
||||
nproc_subtract: ${{ inputs.nproc_subtract }}
|
||||
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary for each configuration.
|
||||
build-test-config:
|
||||
needs:
|
||||
- generate-matrix
|
||||
uses: ./.github/workflows/reusable-build-test-config.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
154
.github/workflows/reusable-build.yml
vendored
Normal file
154
.github/workflows/reusable-build.yml
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
name: Build rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
type: string
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: true
|
||||
type: string
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The name of the configuration."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: true
|
||||
type: number
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ inputs.nproc_subtract }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||
run: |
|
||||
cmake \
|
||||
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
${CMAKE_ARGS} \
|
||||
..
|
||||
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel ${BUILD_NPROC} \
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Put built binaries in one location
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE_DIR: ${{ runner.os == 'Windows' && inputs.build_type || '' }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
mkdir -p ./binaries/doctest/
|
||||
|
||||
cp ./${BUILD_TYPE_DIR}/rippled* ./binaries/
|
||||
if [ "${CMAKE_TARGET}" != 'coverage' ]; then
|
||||
cp ./src/tests/libxrpl/${BUILD_TYPE_DIR}/xrpl.test.* ./binaries/doctest/
|
||||
fi
|
||||
|
||||
- name: Upload rippled artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
path: ${{ env.BUILD_DIR }}/binaries/
|
||||
retention-days: 3
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ github.repository_owner == 'XRPLF' && inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
@@ -46,41 +46,46 @@ jobs:
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Calculate conan reference
|
||||
id: conan_ref
|
||||
run: |
|
||||
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
outputs:
|
||||
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
|
||||
-F "client_payload[pr_url]=${{ github.event.pull_request.html_url }}"
|
||||
-F "client_payload[pr_url]=${PR_URL}"
|
||||
@@ -35,4 +35,7 @@ jobs:
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
run: ./generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }} >> "${GITHUB_OUTPUT}"
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
111
.github/workflows/reusable-test.yml
vendored
Normal file
111
.github/workflows/reusable-test.yml
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
name: Test rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
verify_voidstar:
|
||||
description: "Whether to verify the presence of voidstar instrumentation."
|
||||
required: true
|
||||
type: boolean
|
||||
run_tests:
|
||||
description: "Whether to run unit tests"
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The name of the configuration."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: true
|
||||
type: number
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ inputs.nproc_subtract }}
|
||||
|
||||
- name: Download rippled artifact
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
|
||||
- name: Make binary executable (Linux and macOS)
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
run: |
|
||||
chmod +x ./rippled
|
||||
|
||||
- name: Check linking (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
run: |
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verifying presence of instrumentation
|
||||
if: ${{ inputs.verify_voidstar }}
|
||||
shell: bash
|
||||
run: |
|
||||
./rippled --version | grep libvoidstar
|
||||
|
||||
- name: Run the embedded tests
|
||||
if: ${{ inputs.run_tests }}
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
./rippled --unittest --unittest-jobs ${BUILD_NPROC}
|
||||
|
||||
- name: Run the separate tests
|
||||
if: ${{ inputs.run_tests }}
|
||||
env:
|
||||
EXT: ${{ runner.os == 'Windows' && '.exe' || '' }}
|
||||
shell: bash
|
||||
run: |
|
||||
for test_file in ./doctest/*${EXT}; do
|
||||
echo "Executing $test_file"
|
||||
chmod +x "$test_file"
|
||||
if [[ "${{ runner.os }}" == "Windows" && "$test_file" == "./doctest/xrpl.test.net.exe" ]]; then
|
||||
echo "Skipping $test_file on Windows"
|
||||
else
|
||||
"$test_file"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Debug failure (Linux)
|
||||
if: ${{ failure() && runner.os == 'Linux' && inputs.run_tests }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "IPv4 local port range:"
|
||||
cat /proc/sys/net/ipv4/ip_local_port_range
|
||||
echo "Netstat:"
|
||||
netstat -an
|
||||
38
.github/workflows/upload-conan-deps.yml
vendored
38
.github/workflows/upload-conan-deps.yml
vendored
@@ -24,30 +24,30 @@ on:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
|
||||
- .github/workflows/reusable-strategy-matrix.yml
|
||||
|
||||
- .github/actions/build-deps/action.yml
|
||||
- .github/actions/setup-conan/action.yml
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
|
||||
- conanfile.py
|
||||
- conan.lock
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||
|
||||
# Build and upload the dependencies for each configuration.
|
||||
run-upload-conan-deps:
|
||||
needs:
|
||||
- generate-matrix
|
||||
@@ -56,19 +56,29 @@ jobs:
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-5dd7158', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
@@ -79,13 +89,19 @@ jobs:
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: .build
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' }}
|
||||
run: conan remote login ${{ env.CONAN_REMOTE_NAME }} "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
|
||||
- name: Upload Conan packages
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
|
||||
run: conan upload "*" -r=${{ env.CONAN_REMOTE_NAME }} --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
env:
|
||||
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
|
||||
|
||||
22
BUILD.md
22
BUILD.md
@@ -39,17 +39,12 @@ found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/), or higher
|
||||
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
[^2]:
|
||||
CMake 4 is not yet supported by all dependencies required by this project.
|
||||
If you are affected by this issue, follow [conan workaround for cmake
|
||||
4](#workaround-for-cmake-4)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
@@ -282,21 +277,6 @@ sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Workaround for CMake 4
|
||||
|
||||
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||
profile to use CMake version 3 for dependencies, by adding the following two
|
||||
lines to your profile:
|
||||
|
||||
```text
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
```
|
||||
|
||||
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||
is needed because some of the dependencies used by this project do not support
|
||||
CMake 4.
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
|
||||
@@ -975,6 +975,47 @@
|
||||
# number of ledger records online. Must be greater
|
||||
# than or equal to ledger_history.
|
||||
#
|
||||
# Optional keys for NuDB only:
|
||||
#
|
||||
# nudb_block_size EXPERIMENTAL: Block size in bytes for NuDB storage.
|
||||
# Must be a power of 2 between 4096 and 32768. Default is 4096.
|
||||
#
|
||||
# This parameter controls the fundamental storage unit
|
||||
# size for NuDB's internal data structures. The choice
|
||||
# of block size can significantly impact performance
|
||||
# depending on your storage hardware and filesystem:
|
||||
#
|
||||
# - 4096 bytes: Optimal for most standard SSDs and
|
||||
# traditional filesystems (ext4, NTFS, HFS+).
|
||||
# Provides good balance of performance and storage
|
||||
# efficiency. Recommended for most deployments.
|
||||
# Minimizes memory footprint and provides consistent
|
||||
# low-latency access patterns across diverse hardware.
|
||||
#
|
||||
# - 8192-16384 bytes: May improve performance on
|
||||
# high-end NVMe SSDs and copy-on-write filesystems
|
||||
# like ZFS or Btrfs that benefit from larger block
|
||||
# alignment. Can reduce metadata overhead for large
|
||||
# databases. Offers better sequential throughput and
|
||||
# reduced I/O operations at the cost of higher memory
|
||||
# usage per operation.
|
||||
#
|
||||
# - 32768 bytes (32K): Maximum supported block size
|
||||
# for high-performance scenarios with very fast
|
||||
# storage. May increase memory usage and reduce
|
||||
# efficiency for smaller databases. Best suited for
|
||||
# enterprise environments with abundant RAM.
|
||||
#
|
||||
# Performance testing is recommended before deploying
|
||||
# any non-default block size in production environments.
|
||||
#
|
||||
# Note: This setting cannot be changed after database
|
||||
# creation without rebuilding the entire database.
|
||||
# Choose carefully based on your hardware and expected
|
||||
# database size.
|
||||
#
|
||||
# Example: nudb_block_size=4096
|
||||
#
|
||||
# These keys modify the behavior of online_delete, and thus are only
|
||||
# relevant if online_delete is defined and non-zero:
|
||||
#
|
||||
@@ -1471,6 +1512,7 @@ secure_gateway = 127.0.0.1
|
||||
[node_db]
|
||||
type=NuDB
|
||||
path=/var/lib/rippled/db/nudb
|
||||
nudb_block_size=4096
|
||||
online_delete=512
|
||||
advisory_delete=0
|
||||
|
||||
|
||||
@@ -16,13 +16,16 @@ set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
||||
# defined, so no need to add it explicitly except for
|
||||
# this special case of (profile ON) and (assert OFF)
|
||||
# -- presumably this is because we don't want profile
|
||||
# builds asserting unless asserts were specifically
|
||||
# requested
|
||||
#[===[
|
||||
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
|
||||
explicitly except for the special case of (profile ON) and (assert OFF).
|
||||
Presumably this is because we don't want profile builds asserting unless
|
||||
asserts were specifically requested.
|
||||
]===]
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED
|
||||
)
|
||||
|
||||
if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
|
||||
@@ -38,7 +38,7 @@ install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(xrpl \
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
||||
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
||||
")
|
||||
|
||||
install (EXPORT RippleExports
|
||||
@@ -72,7 +72,7 @@ if (is_root_project AND TARGET rippled)
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(rippled${suffix} \
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||
")
|
||||
endif ()
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
|
||||
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch (current_branch)
|
||||
@@ -6,17 +6,15 @@ if (validator_keys)
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
|
||||
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare (
|
||||
validator_keys_src
|
||||
validator_keys
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_GetProperties (validator_keys_src)
|
||||
if (NOT validator_keys_src_POPULATED)
|
||||
message (STATUS "Pausing to download ValidatorKeys...")
|
||||
FetchContent_Populate (validator_keys_src)
|
||||
endif ()
|
||||
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
|
||||
FetchContent_MakeAvailable(validator_keys)
|
||||
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
|
||||
endif ()
|
||||
|
||||
@@ -7,7 +7,7 @@ function(xrpl_add_test name)
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||
)
|
||||
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
|
||||
add_executable(${target} ${ARGN} ${sources})
|
||||
|
||||
isolate_headers(
|
||||
${target}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"rocksdb/10.0.1#85537f46e538974d67da0c3977de48ac%1756234304.347",
|
||||
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
|
||||
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
||||
"openssl/1.1.1w#a8f0792d7c5121b954578a7149d23e03%1756223730.729",
|
||||
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1759746684.671",
|
||||
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
|
||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
# downloads and uploads.
|
||||
core:non_interactive=True
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
|
||||
@@ -21,14 +21,11 @@ compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||
|
||||
[conf]
|
||||
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
tools.build:cxxflags+=['-Wno-restrict']
|
||||
{% endif %}
|
||||
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
|
||||
@@ -27,7 +27,7 @@ class Xrpl(ConanFile):
|
||||
'grpc/1.50.1',
|
||||
'libarchive/3.8.1',
|
||||
'nudb/2.0.9',
|
||||
'openssl/1.1.1w',
|
||||
'openssl/3.5.4',
|
||||
'soci/4.0.3',
|
||||
'zlib/1.3.1',
|
||||
]
|
||||
|
||||
@@ -654,12 +654,14 @@ SharedWeakUnion<T>::convertToWeak()
|
||||
break;
|
||||
case destroy:
|
||||
// We just added a weak ref. How could we destroy?
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::SharedWeakUnion::convertToWeak : destroying freshly "
|
||||
"added ref");
|
||||
delete p;
|
||||
unsafeSetRawPtr(nullptr);
|
||||
return true; // Should never happen
|
||||
// LCOV_EXCL_STOP
|
||||
case partialDestroy:
|
||||
// This is a weird case. We just converted the last strong
|
||||
// pointer to a weak pointer.
|
||||
|
||||
@@ -94,7 +94,11 @@ hash_append(Hasher& h, beast::IP::Address const& addr) noexcept
|
||||
else if (addr.is_v6())
|
||||
hash_append(h, addr.to_v6().to_bytes());
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("beast::hash_append : invalid address type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
} // namespace beast
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ public:
|
||||
* without formatting (not human friendly).
|
||||
*
|
||||
* The JSON document is written in a single line. It is not intended for 'human'
|
||||
* consumption, but may be useful to support feature such as RPC where bandwith
|
||||
* consumption, but may be useful to support feature such as RPC where bandwidth
|
||||
* is limited. \sa Reader, Value
|
||||
*/
|
||||
|
||||
|
||||
@@ -284,12 +284,14 @@ public:
|
||||
{
|
||||
if (key.type != ltOFFER)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::ApplyView::dirAppend : only Offers are appended to "
|
||||
"book directories");
|
||||
// Only Offers are appended to book directories. Call dirInsert()
|
||||
// instead
|
||||
return std::nullopt;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
return dirAdd(true, directory, key.key, describe);
|
||||
}
|
||||
|
||||
@@ -188,14 +188,14 @@ enum LedgerSpecificFlags {
|
||||
lsfMPTCanTransfer = 0x00000020,
|
||||
lsfMPTCanClawback = 0x00000040,
|
||||
|
||||
lmfMPTCanMutateCanLock = 0x00000002,
|
||||
lmfMPTCanMutateRequireAuth = 0x00000004,
|
||||
lmfMPTCanMutateCanEscrow = 0x00000008,
|
||||
lmfMPTCanMutateCanTrade = 0x00000010,
|
||||
lmfMPTCanMutateCanTransfer = 0x00000020,
|
||||
lmfMPTCanMutateCanClawback = 0x00000040,
|
||||
lmfMPTCanMutateMetadata = 0x00010000,
|
||||
lmfMPTCanMutateTransferFee = 0x00020000,
|
||||
lsmfMPTCanMutateCanLock = 0x00000002,
|
||||
lsmfMPTCanMutateRequireAuth = 0x00000004,
|
||||
lsmfMPTCanMutateCanEscrow = 0x00000008,
|
||||
lsmfMPTCanMutateCanTrade = 0x00000010,
|
||||
lsmfMPTCanMutateCanTransfer = 0x00000020,
|
||||
lsmfMPTCanMutateCanClawback = 0x00000040,
|
||||
lsmfMPTCanMutateMetadata = 0x00010000,
|
||||
lsmfMPTCanMutateTransferFee = 0x00020000,
|
||||
|
||||
// ltMPTOKEN
|
||||
lsfMPTAuthorized = 0x00000002,
|
||||
|
||||
@@ -86,6 +86,9 @@ public:
|
||||
std::optional<TxType>
|
||||
getGranularTxType(GranularPermissionType const& gpType) const;
|
||||
|
||||
std::optional<std::reference_wrapper<uint256 const>> const
|
||||
getTxFeature(TxType txType) const;
|
||||
|
||||
bool
|
||||
isDelegatable(std::uint32_t const& permissionValue, Rules const& rules)
|
||||
const;
|
||||
|
||||
@@ -55,7 +55,10 @@ std::size_t constexpr oversizeMetaDataCap = 5200;
|
||||
/** The maximum number of entries per directory page */
|
||||
std::size_t constexpr dirNodeMaxEntries = 32;
|
||||
|
||||
/** The maximum number of pages allowed in a directory */
|
||||
/** The maximum number of pages allowed in a directory
|
||||
|
||||
Made obsolete by fixDirectoryLimit amendment.
|
||||
*/
|
||||
std::uint64_t constexpr dirNodeMaxPages = 262144;
|
||||
|
||||
/** The maximum number of items in an NFT page */
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#define RIPPLE_PROTOCOL_PUBLICKEY_H_INCLUDED
|
||||
|
||||
#include <xrpl/basics/Slice.h>
|
||||
#include <xrpl/beast/net/IPEndpoint.h>
|
||||
#include <xrpl/protocol/KeyType.h>
|
||||
#include <xrpl/protocol/STExchange.h>
|
||||
#include <xrpl/protocol/UintTypes.h>
|
||||
@@ -264,6 +265,24 @@ calcNodeID(PublicKey const&);
|
||||
AccountID
|
||||
calcAccountID(PublicKey const& pk);
|
||||
|
||||
inline std::string
|
||||
getFingerprint(
|
||||
beast::IP::Endpoint const& address,
|
||||
std::optional<PublicKey> const& publicKey = std::nullopt,
|
||||
std::optional<std::string> const& id = std::nullopt)
|
||||
{
|
||||
std::stringstream ss;
|
||||
ss << "IP Address: " << address;
|
||||
if (publicKey.has_value())
|
||||
{
|
||||
ss << ", Public Key: " << toBase58(TokenType::NodePublic, *publicKey);
|
||||
}
|
||||
if (id.has_value())
|
||||
{
|
||||
ss << ", Id: " << id.value();
|
||||
}
|
||||
return ss.str();
|
||||
}
|
||||
} // namespace ripple
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -72,8 +72,10 @@ class STCurrency;
|
||||
STYPE(STI_VL, 7) \
|
||||
STYPE(STI_ACCOUNT, 8) \
|
||||
STYPE(STI_NUMBER, 9) \
|
||||
STYPE(STI_INT32, 10) \
|
||||
STYPE(STI_INT64, 11) \
|
||||
\
|
||||
/* 10-13 are reserved */ \
|
||||
/* 12-13 are reserved */ \
|
||||
STYPE(STI_OBJECT, 14) \
|
||||
STYPE(STI_ARRAY, 15) \
|
||||
\
|
||||
@@ -356,6 +358,9 @@ using SF_UINT256 = TypedField<STBitString<256>>;
|
||||
using SF_UINT384 = TypedField<STBitString<384>>;
|
||||
using SF_UINT512 = TypedField<STBitString<512>>;
|
||||
|
||||
using SF_INT32 = TypedField<STInteger<std::int32_t>>;
|
||||
using SF_INT64 = TypedField<STInteger<std::int64_t>>;
|
||||
|
||||
using SF_ACCOUNT = TypedField<STAccount>;
|
||||
using SF_AMOUNT = TypedField<STAmount>;
|
||||
using SF_ISSUE = TypedField<STIssue>;
|
||||
|
||||
@@ -709,37 +709,6 @@ canAdd(STAmount const& amt1, STAmount const& amt2);
|
||||
bool
|
||||
canSubtract(STAmount const& amt1, STAmount const& amt2);
|
||||
|
||||
// Since `canonicalize` does not have access to a ledger, this is needed to put
|
||||
// the low-level routine stAmountCanonicalize on an amendment switch. Only
|
||||
// transactions need to use this switchover. Outside of a transaction it's safe
|
||||
// to unconditionally use the new behavior.
|
||||
|
||||
bool
|
||||
getSTAmountCanonicalizeSwitchover();
|
||||
|
||||
void
|
||||
setSTAmountCanonicalizeSwitchover(bool v);
|
||||
|
||||
/** RAII class to set and restore the STAmount canonicalize switchover.
|
||||
*/
|
||||
|
||||
class STAmountSO
|
||||
{
|
||||
public:
|
||||
explicit STAmountSO(bool v) : saved_(getSTAmountCanonicalizeSwitchover())
|
||||
{
|
||||
setSTAmountCanonicalizeSwitchover(v);
|
||||
}
|
||||
|
||||
~STAmountSO()
|
||||
{
|
||||
setSTAmountCanonicalizeSwitchover(saved_);
|
||||
}
|
||||
|
||||
private:
|
||||
bool saved_;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -81,6 +81,8 @@ using STUInt16 = STInteger<std::uint16_t>;
|
||||
using STUInt32 = STInteger<std::uint32_t>;
|
||||
using STUInt64 = STInteger<std::uint64_t>;
|
||||
|
||||
using STInt32 = STInteger<std::int32_t>;
|
||||
|
||||
template <typename Integer>
|
||||
inline STInteger<Integer>::STInteger(Integer v) : value_(v)
|
||||
{
|
||||
|
||||
@@ -231,6 +231,8 @@ public:
|
||||
getFieldH192(SField const& field) const;
|
||||
uint256
|
||||
getFieldH256(SField const& field) const;
|
||||
std::int32_t
|
||||
getFieldI32(SField const& field) const;
|
||||
AccountID
|
||||
getAccountID(SField const& field) const;
|
||||
|
||||
@@ -242,6 +244,9 @@ public:
|
||||
getFieldPathSet(SField const& field) const;
|
||||
STVector256 const&
|
||||
getFieldV256(SField const& field) const;
|
||||
// If not found, returns an object constructed with the given field
|
||||
STObject
|
||||
getFieldObject(SField const& field) const;
|
||||
STArray const&
|
||||
getFieldArray(SField const& field) const;
|
||||
STCurrency const&
|
||||
@@ -365,6 +370,8 @@ public:
|
||||
void
|
||||
setFieldH256(SField const& field, uint256 const&);
|
||||
void
|
||||
setFieldI32(SField const& field, std::int32_t);
|
||||
void
|
||||
setFieldVL(SField const& field, Blob const&);
|
||||
void
|
||||
setFieldVL(SField const& field, Slice const&);
|
||||
@@ -386,6 +393,8 @@ public:
|
||||
setFieldV256(SField const& field, STVector256 const& v);
|
||||
void
|
||||
setFieldArray(SField const& field, STArray const& v);
|
||||
void
|
||||
setFieldObject(SField const& field, STObject const& v);
|
||||
|
||||
template <class Tag>
|
||||
void
|
||||
|
||||
@@ -87,8 +87,14 @@ public:
|
||||
getFullText() const override;
|
||||
|
||||
// Outer transaction functions / signature functions.
|
||||
static Blob
|
||||
getSignature(STObject const& sigObject);
|
||||
|
||||
Blob
|
||||
getSignature() const;
|
||||
getSignature() const
|
||||
{
|
||||
return getSignature(*this);
|
||||
}
|
||||
|
||||
uint256
|
||||
getSigningHash() const;
|
||||
@@ -119,13 +125,20 @@ public:
|
||||
getJson(JsonOptions options, bool binary) const;
|
||||
|
||||
void
|
||||
sign(PublicKey const& publicKey, SecretKey const& secretKey);
|
||||
sign(
|
||||
PublicKey const& publicKey,
|
||||
SecretKey const& secretKey,
|
||||
std::optional<std::reference_wrapper<SField const>> signatureTarget =
|
||||
{});
|
||||
|
||||
/** Check the signature.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
enum class RequireFullyCanonicalSig : bool { no, yes };
|
||||
|
||||
/** Check the signature.
|
||||
@param requireCanonicalSig If `true`, check that the signature is fully
|
||||
canonical. If `false`, only check that the signature is valid.
|
||||
@param rules The current ledger rules.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
Expected<void, std::string>
|
||||
checkSign(RequireFullyCanonicalSig requireCanonicalSig, Rules const& rules)
|
||||
const;
|
||||
@@ -150,17 +163,34 @@ public:
|
||||
char status,
|
||||
std::string const& escapedMetaData) const;
|
||||
|
||||
std::vector<uint256>
|
||||
std::vector<uint256> const&
|
||||
getBatchTransactionIDs() const;
|
||||
|
||||
private:
|
||||
/** Check the signature.
|
||||
@param requireCanonicalSig If `true`, check that the signature is fully
|
||||
canonical. If `false`, only check that the signature is valid.
|
||||
@param rules The current ledger rules.
|
||||
@param sigObject Reference to object that contains the signature fields.
|
||||
Will be *this more often than not.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
Expected<void, std::string>
|
||||
checkSingleSign(RequireFullyCanonicalSig requireCanonicalSig) const;
|
||||
checkSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkSingleSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
STObject const& sigObject) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkMultiSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules) const;
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkBatchSingleSign(
|
||||
@@ -179,7 +209,7 @@ private:
|
||||
move(std::size_t n, void* buf) override;
|
||||
|
||||
friend class detail::STVar;
|
||||
mutable std::vector<uint256> batch_txn_ids_;
|
||||
mutable std::vector<uint256> batchTxnIds_;
|
||||
};
|
||||
|
||||
bool
|
||||
|
||||
@@ -673,7 +673,8 @@ isTerRetry(TER x) noexcept
|
||||
inline bool
|
||||
isTesSuccess(TER x) noexcept
|
||||
{
|
||||
return (x == tesSUCCESS);
|
||||
// Makes use of TERSubset::operator bool()
|
||||
return !(x);
|
||||
}
|
||||
|
||||
inline bool
|
||||
|
||||
@@ -156,14 +156,14 @@ constexpr std::uint32_t const tfMPTokenIssuanceCreateMask =
|
||||
|
||||
// MPTokenIssuanceCreate MutableFlags:
|
||||
// Indicating specific fields or flags may be changed after issuance.
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanLock = lmfMPTCanMutateCanLock;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateRequireAuth = lmfMPTCanMutateRequireAuth;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanEscrow = lmfMPTCanMutateCanEscrow;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTrade = lmfMPTCanMutateCanTrade;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTransfer = lmfMPTCanMutateCanTransfer;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanClawback = lmfMPTCanMutateCanClawback;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateMetadata = lmfMPTCanMutateMetadata;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateTransferFee = lmfMPTCanMutateTransferFee;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanLock = lsmfMPTCanMutateCanLock;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateRequireAuth = lsmfMPTCanMutateRequireAuth;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanEscrow = lsmfMPTCanMutateCanEscrow;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTrade = lsmfMPTCanMutateCanTrade;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTransfer = lsmfMPTCanMutateCanTransfer;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanClawback = lsmfMPTCanMutateCanClawback;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateMetadata = lsmfMPTCanMutateMetadata;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateTransferFee = lsmfMPTCanMutateTransferFee;
|
||||
constexpr std::uint32_t const tmfMPTokenIssuanceCreateMutableMask =
|
||||
~(tmfMPTCanMutateCanLock | tmfMPTCanMutateRequireAuth | tmfMPTCanMutateCanEscrow | tmfMPTCanMutateCanTrade
|
||||
| tmfMPTCanMutateCanTransfer | tmfMPTCanMutateCanClawback | tmfMPTCanMutateMetadata | tmfMPTCanMutateTransferFee);
|
||||
|
||||
@@ -129,10 +129,12 @@ inplace_bigint_div_rem(std::span<uint64_t> numerator, std::uint64_t divisor)
|
||||
{
|
||||
// should never happen, but if it does then it seems natural to define
|
||||
// the a null set of numbers to be zero, so the remainder is also zero.
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::b58_fast::detail::inplace_bigint_div_rem : empty "
|
||||
"numerator");
|
||||
return 0;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
auto to_u128 = [](std::uint64_t high,
|
||||
|
||||
@@ -29,15 +29,14 @@
|
||||
|
||||
// Add new amendments to the top of this list.
|
||||
// Keep it sorted in reverse chronological order.
|
||||
// If you add an amendment here, then do not forget to increment `numFeatures`
|
||||
// in include/xrpl/protocol/Feature.h.
|
||||
|
||||
XRPL_FIX (IncludeKeyletFields, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (DirectoryLimit, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (IncludeKeyletFields, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DynamicMPT, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (TokenEscrowV1, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (DelegateV1_1, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PriceOracleOrder, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (AMMClawbackRounding, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (EnforceNFTokenTrustlineV2, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -45,7 +44,7 @@ XRPL_FIX (AMMv1_3, Supported::yes, VoteBehavior::DefaultNo
|
||||
XRPL_FEATURE(PermissionedDEX, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Batch, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(SingleAssetVault, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionDelegation, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionDelegation, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PayChanCancelAfter, Supported::yes, VoteBehavior::DefaultNo)
|
||||
// Check flags in Credential transactions
|
||||
XRPL_FIX (InvalidTxFlags, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -91,33 +90,19 @@ XRPL_FIX (TrustLinesToSelf, Supported::yes, VoteBehavior::DefaultNo
|
||||
XRPL_FEATURE(NonFungibleTokensV1_1, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(ExpandedSignerList, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(CheckCashMakesTrustLine, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (RmSmallIncreasedQOffers, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (STAmountCanonicalize, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(FlowSortStrands, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(TicketBatch, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(NegativeUNL, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (AmendmentMajorityCalc, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(HardenedValidations, Supported::yes, VoteBehavior::DefaultYes)
|
||||
// fix1781: XRPEndpointSteps should be included in the circular payment check
|
||||
XRPL_FIX (1781, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(RequireFullyCanonicalSig, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (QualityUpperBound, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(DeletableAccounts, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (PayChanRecipientOwnerDir, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (CheckThreading, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (MasterKeyAsRegularKey, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (TakerDryOfferRemoval, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(MultiSignReserve, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1578, Supported::yes, VoteBehavior::DefaultYes)
|
||||
// fix1515: Use liquidity from strands that consume max offers, but mark as dry
|
||||
XRPL_FIX (1515, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(DepositPreauth, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1623, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1543, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1571, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(Checks, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(DepositAuth, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1513, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(Flow, Supported::yes, VoteBehavior::DefaultYes)
|
||||
|
||||
// The following amendments are obsolete, but must remain supported
|
||||
@@ -138,21 +123,35 @@ XRPL_FEATURE(CryptoConditionsSuite, Supported::yes, VoteBehavior::Obsolete)
|
||||
|
||||
// The following amendments have been active for at least two years. Their
|
||||
// pre-amendment code has been removed and the identifiers are deprecated.
|
||||
// All known amendments and amendments that may appear in a validated
|
||||
// ledger must be registered either here or above with the "active" amendments
|
||||
XRPL_RETIRE(MultiSign)
|
||||
XRPL_RETIRE(TrustSetAuth)
|
||||
XRPL_RETIRE(FeeEscalation)
|
||||
XRPL_RETIRE(PayChan)
|
||||
XRPL_RETIRE(CryptoConditions)
|
||||
XRPL_RETIRE(TickSize)
|
||||
XRPL_RETIRE(fix1368)
|
||||
XRPL_RETIRE(Escrow)
|
||||
XRPL_RETIRE(fix1373)
|
||||
XRPL_RETIRE(EnforceInvariants)
|
||||
XRPL_RETIRE(SortedDirectories)
|
||||
// All known amendments and amendments that may appear in a validated ledger
|
||||
// must be registered either here or above with the "active" amendments
|
||||
//
|
||||
// Please keep this list sorted alphabetically for convenience.
|
||||
XRPL_RETIRE(fix1201)
|
||||
XRPL_RETIRE(fix1368)
|
||||
XRPL_RETIRE(fix1373)
|
||||
XRPL_RETIRE(fix1512)
|
||||
XRPL_RETIRE(fix1513)
|
||||
XRPL_RETIRE(fix1515)
|
||||
XRPL_RETIRE(fix1523)
|
||||
XRPL_RETIRE(fix1528)
|
||||
XRPL_RETIRE(fix1543)
|
||||
XRPL_RETIRE(fix1571)
|
||||
XRPL_RETIRE(fix1578)
|
||||
XRPL_RETIRE(fix1623)
|
||||
XRPL_RETIRE(fix1781)
|
||||
XRPL_RETIRE(fixCheckThreading)
|
||||
XRPL_RETIRE(fixQualityUpperBound)
|
||||
XRPL_RETIRE(fixRmSmallIncreasedQOffers)
|
||||
XRPL_RETIRE(fixSTAmountCanonicalize)
|
||||
XRPL_RETIRE(fixTakerDryOfferRemoval)
|
||||
XRPL_RETIRE(CryptoConditions)
|
||||
XRPL_RETIRE(Escrow)
|
||||
XRPL_RETIRE(EnforceInvariants)
|
||||
XRPL_RETIRE(FeeEscalation)
|
||||
XRPL_RETIRE(FlowCross)
|
||||
XRPL_RETIRE(MultiSign)
|
||||
XRPL_RETIRE(PayChan)
|
||||
XRPL_RETIRE(SortedDirectories)
|
||||
XRPL_RETIRE(TickSize)
|
||||
XRPL_RETIRE(TrustSetAuth)
|
||||
|
||||
@@ -457,7 +457,7 @@ LEDGER_ENTRY(ltCREDENTIAL, 0x0081, Credential, credential, ({
|
||||
{sfExpiration, soeOPTIONAL},
|
||||
{sfURI, soeOPTIONAL},
|
||||
{sfIssuerNode, soeREQUIRED},
|
||||
{sfSubjectNode, soeREQUIRED},
|
||||
{sfSubjectNode, soeOPTIONAL},
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
}))
|
||||
|
||||
@@ -208,6 +208,12 @@ TYPED_SFIELD(sfAssetsMaximum, NUMBER, 3)
|
||||
TYPED_SFIELD(sfAssetsTotal, NUMBER, 4)
|
||||
TYPED_SFIELD(sfLossUnrealized, NUMBER, 5)
|
||||
|
||||
// int32
|
||||
// NOTE: Do not use `sfDummyInt32`. It's so far the only use of INT32
|
||||
// in this file and has been defined here for test only.
|
||||
// TODO: Replace `sfDummyInt32` with actually useful field.
|
||||
TYPED_SFIELD(sfDummyInt32, INT32, 1) // for tests only
|
||||
|
||||
// currency amount (common)
|
||||
TYPED_SFIELD(sfAmount, AMOUNT, 1)
|
||||
TYPED_SFIELD(sfBalance, AMOUNT, 2)
|
||||
|
||||
@@ -851,7 +851,7 @@ TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
|
||||
TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
createPseudoAcct | createMPTIssuance,
|
||||
createPseudoAcct | createMPTIssuance | mustModifyVault,
|
||||
({
|
||||
{sfAsset, soeREQUIRED, soeMPTSupported},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -869,7 +869,7 @@ TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
noPriv,
|
||||
mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -884,7 +884,7 @@ TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mustDeleteAcct | destroyMPTIssuance,
|
||||
mustDeleteAcct | destroyMPTIssuance | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
}))
|
||||
@@ -896,7 +896,7 @@ TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayAuthorizeMPT,
|
||||
mayAuthorizeMPT | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -909,7 +909,7 @@ TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT,
|
||||
mayDeleteMPT | mayAuthorizeMPT | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -924,7 +924,7 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT,
|
||||
mayDeleteMPT | mustModifyVault,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfHolder, soeREQUIRED},
|
||||
|
||||
@@ -569,6 +569,7 @@ JSS(settle_delay); // out: AccountChannels
|
||||
JSS(severity); // in: LogLevel
|
||||
JSS(shares); // out: VaultInfo
|
||||
JSS(signature); // out: NetworkOPs, ChannelAuthorize
|
||||
JSS(signature_target); // in: TransactionSign
|
||||
JSS(signature_verified); // out: ChannelVerify
|
||||
JSS(signing_key); // out: NetworkOPs
|
||||
JSS(signing_keys); // out: ValidatorList
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#define RIPPLE_RESOURCE_CONSUMER_H_INCLUDED
|
||||
|
||||
#include <xrpl/basics/Log.h>
|
||||
#include <xrpl/protocol/PublicKey.h>
|
||||
#include <xrpl/resource/Charge.h>
|
||||
#include <xrpl/resource/Disposition.h>
|
||||
|
||||
@@ -87,6 +88,9 @@ public:
|
||||
Entry&
|
||||
entry();
|
||||
|
||||
void
|
||||
setPublicKey(PublicKey const& publicKey);
|
||||
|
||||
private:
|
||||
Logic* m_logic;
|
||||
Entry* m_entry;
|
||||
|
||||
@@ -53,7 +53,7 @@ struct Entry : public beast::List<Entry>::Node
|
||||
std::string
|
||||
to_string() const
|
||||
{
|
||||
return key->address.to_string();
|
||||
return getFingerprint(key->address, publicKey);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -82,6 +82,9 @@ struct Entry : public beast::List<Entry>::Node
|
||||
return local_balance.add(charge, now) + remote_balance;
|
||||
}
|
||||
|
||||
// The public key of the peer
|
||||
std::optional<PublicKey> publicKey;
|
||||
|
||||
// Back pointer to the map key (bit of a hack here)
|
||||
Key const* key;
|
||||
|
||||
|
||||
@@ -436,10 +436,12 @@ public:
|
||||
admin_.erase(admin_.iterator_to(entry));
|
||||
break;
|
||||
default:
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::Resource::Logic::release : invalid entry "
|
||||
"kind");
|
||||
break;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
inactive_.push_back(entry);
|
||||
entry.whenExpires = m_clock.now() + secondsUntilExpiration;
|
||||
|
||||
@@ -30,15 +30,29 @@
|
||||
#include <boost/asio/buffer.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/ip/tcp.hpp>
|
||||
#include <boost/asio/post.hpp>
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/steady_timer.hpp>
|
||||
#include <boost/beast/core/detect_ssl.hpp>
|
||||
#include <boost/beast/core/multi_buffer.hpp>
|
||||
#include <boost/beast/core/tcp_stream.hpp>
|
||||
#include <boost/container/flat_map.hpp>
|
||||
#include <boost/predef.h>
|
||||
|
||||
#if !BOOST_OS_WINDOWS
|
||||
#include <sys/resource.h>
|
||||
|
||||
#include <dirent.h>
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
#include <cstdint>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <optional>
|
||||
#include <sstream>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -98,10 +112,27 @@ private:
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
bool ssl_;
|
||||
bool plain_;
|
||||
static constexpr std::chrono::milliseconds INITIAL_ACCEPT_DELAY{50};
|
||||
static constexpr std::chrono::milliseconds MAX_ACCEPT_DELAY{2000};
|
||||
std::chrono::milliseconds accept_delay_{INITIAL_ACCEPT_DELAY};
|
||||
boost::asio::steady_timer backoff_timer_;
|
||||
static constexpr double FREE_FD_THRESHOLD = 0.70;
|
||||
|
||||
struct FDStats
|
||||
{
|
||||
std::uint64_t used{0};
|
||||
std::uint64_t limit{0};
|
||||
};
|
||||
|
||||
void
|
||||
reOpen();
|
||||
|
||||
std::optional<FDStats>
|
||||
query_fd_stats() const;
|
||||
|
||||
bool
|
||||
should_throttle_for_fds();
|
||||
|
||||
public:
|
||||
Door(
|
||||
Handler& handler,
|
||||
@@ -299,6 +330,7 @@ Door<Handler>::Door(
|
||||
, plain_(
|
||||
port_.protocol.count("http") > 0 || port_.protocol.count("ws") > 0 ||
|
||||
port_.protocol.count("ws2"))
|
||||
, backoff_timer_(io_context)
|
||||
{
|
||||
reOpen();
|
||||
}
|
||||
@@ -323,6 +355,7 @@ Door<Handler>::close()
|
||||
return boost::asio::post(
|
||||
strand_,
|
||||
std::bind(&Door<Handler>::close, this->shared_from_this()));
|
||||
backoff_timer_.cancel();
|
||||
error_code ec;
|
||||
acceptor_.close(ec);
|
||||
}
|
||||
@@ -368,6 +401,17 @@ Door<Handler>::do_accept(boost::asio::yield_context do_yield)
|
||||
{
|
||||
while (acceptor_.is_open())
|
||||
{
|
||||
if (should_throttle_for_fds())
|
||||
{
|
||||
backoff_timer_.expires_after(accept_delay_);
|
||||
boost::system::error_code tec;
|
||||
backoff_timer_.async_wait(do_yield[tec]);
|
||||
accept_delay_ = std::min(accept_delay_ * 2, MAX_ACCEPT_DELAY);
|
||||
JLOG(j_.warn()) << "Throttling do_accept for "
|
||||
<< accept_delay_.count() << "ms.";
|
||||
continue;
|
||||
}
|
||||
|
||||
error_code ec;
|
||||
endpoint_type remote_address;
|
||||
stream_type stream(ioc_);
|
||||
@@ -377,15 +421,28 @@ Door<Handler>::do_accept(boost::asio::yield_context do_yield)
|
||||
{
|
||||
if (ec == boost::asio::error::operation_aborted)
|
||||
break;
|
||||
JLOG(j_.error()) << "accept: " << ec.message();
|
||||
if (ec == boost::asio::error::no_descriptors)
|
||||
|
||||
if (ec == boost::asio::error::no_descriptors ||
|
||||
ec == boost::asio::error::no_buffer_space)
|
||||
{
|
||||
JLOG(j_.info()) << "re-opening acceptor";
|
||||
reOpen();
|
||||
JLOG(j_.warn()) << "accept: Too many open files. Pausing for "
|
||||
<< accept_delay_.count() << "ms.";
|
||||
|
||||
backoff_timer_.expires_after(accept_delay_);
|
||||
boost::system::error_code tec;
|
||||
backoff_timer_.async_wait(do_yield[tec]);
|
||||
|
||||
accept_delay_ = std::min(accept_delay_ * 2, MAX_ACCEPT_DELAY);
|
||||
}
|
||||
else
|
||||
{
|
||||
JLOG(j_.error()) << "accept error: " << ec.message();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
accept_delay_ = INITIAL_ACCEPT_DELAY;
|
||||
|
||||
if (ssl_ && plain_)
|
||||
{
|
||||
if (auto sp = ios().template emplace<Detector>(
|
||||
@@ -408,6 +465,60 @@ Door<Handler>::do_accept(boost::asio::yield_context do_yield)
|
||||
}
|
||||
}
|
||||
|
||||
template <class Handler>
|
||||
std::optional<typename Door<Handler>::FDStats>
|
||||
Door<Handler>::query_fd_stats() const
|
||||
{
|
||||
#if BOOST_OS_WINDOWS
|
||||
return std::nullopt;
|
||||
#else
|
||||
FDStats s;
|
||||
struct rlimit rl;
|
||||
if (getrlimit(RLIMIT_NOFILE, &rl) != 0 || rl.rlim_cur == RLIM_INFINITY)
|
||||
return std::nullopt;
|
||||
s.limit = static_cast<std::uint64_t>(rl.rlim_cur);
|
||||
#if BOOST_OS_LINUX
|
||||
constexpr char const* kFdDir = "/proc/self/fd";
|
||||
#else
|
||||
constexpr char const* kFdDir = "/dev/fd";
|
||||
#endif
|
||||
if (DIR* d = ::opendir(kFdDir))
|
||||
{
|
||||
std::uint64_t cnt = 0;
|
||||
while (::readdir(d) != nullptr)
|
||||
++cnt;
|
||||
::closedir(d);
|
||||
// readdir counts '.', '..', and the DIR* itself shows in the list
|
||||
s.used = (cnt >= 3) ? (cnt - 3) : 0;
|
||||
return s;
|
||||
}
|
||||
return std::nullopt;
|
||||
#endif
|
||||
}
|
||||
|
||||
template <class Handler>
|
||||
bool
|
||||
Door<Handler>::should_throttle_for_fds()
|
||||
{
|
||||
#if BOOST_OS_WINDOWS
|
||||
return false;
|
||||
#else
|
||||
auto const stats = query_fd_stats();
|
||||
if (!stats || stats->limit == 0)
|
||||
return false;
|
||||
|
||||
auto const& s = *stats;
|
||||
auto const free = (s.limit > s.used) ? (s.limit - s.used) : 0ull;
|
||||
double const free_ratio =
|
||||
static_cast<double>(free) / static_cast<double>(s.limit);
|
||||
if (free_ratio < FREE_FD_THRESHOLD)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
167
pkgs/build.sh
Executable file
167
pkgs/build.sh
Executable file
@@ -0,0 +1,167 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
set -o xtrace
|
||||
|
||||
. /etc/os-release
|
||||
|
||||
case "$ID $ID_LIKE" in
|
||||
*rhel*|*fedora*)
|
||||
pkg="rpm"
|
||||
;;
|
||||
*debian*|*ubuntu*)
|
||||
pkg="deb"
|
||||
;;
|
||||
esac
|
||||
|
||||
repo_dir=$PWD
|
||||
set -a
|
||||
repo_name="rippled"
|
||||
pkgs_dir="${repo_dir}/pkgs"
|
||||
shared_files="${pkgs_dir}/shared"
|
||||
pkg_files="${pkgs_dir}/${pkg}"
|
||||
build_info_src="${repo_dir}/src/libxrpl/protocol/BuildInfo.cpp"
|
||||
xrpl_version=$(grep -E -i -o "\b(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-[0-9a-z\-]+(\.[0-9a-z\-]+)*)?(\+[0-9a-z\-]+(\.[0-9a-z\-]+)*)?\b" "${build_info_src}")
|
||||
|
||||
git config --global --add safe.directory '*'
|
||||
branch=$(git rev-parse --abbrev-ref HEAD)
|
||||
commit=$(git rev-parse HEAD)
|
||||
short_commit=$(git rev-parse --short=7 HEAD)
|
||||
date=$(git show -s --format=%cd --date=format-local:"%Y%m%d%H%M%S")
|
||||
|
||||
BUILD_TYPE=${BUILD_TYPE:-Release}
|
||||
set +a
|
||||
if [ "${branch}" = 'develop' ]; then
|
||||
# TODO: Can remove when CMake sets version
|
||||
dev_version="${date}~${short_commit}"
|
||||
xrpl_version="${xrpl_version}+${dev_version}"
|
||||
fi
|
||||
|
||||
if [ "${pkg}" = 'rpm' ]; then
|
||||
IFS='-' read -r RIPPLED_RPM_VERSION RELEASE <<< "${xrpl_version}"
|
||||
export RIPPLED_RPM_VERSION
|
||||
RPM_RELEASE=${RPM_RELEASE-1}
|
||||
# post-release version
|
||||
if [ "hf" = "$(echo "$RELEASE" | cut -c -2)" ]; then
|
||||
RPM_RELEASE="${RPM_RELEASE}.${RELEASE}"
|
||||
# pre-release version (-b or -rc)
|
||||
elif [[ $RELEASE ]]; then
|
||||
RPM_RELEASE="0.${RPM_RELEASE}.${RELEASE}"
|
||||
fi
|
||||
export RPM_RELEASE
|
||||
|
||||
if [[ $RPM_PATCH ]]; then
|
||||
RPM_PATCH=".${RPM_PATCH}"
|
||||
export RPM_PATCH
|
||||
fi
|
||||
|
||||
build_dir="build/${pkg}/packages"
|
||||
rm -rf "${build_dir}"
|
||||
mkdir -p "${build_dir}/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}"
|
||||
# cp "${pkgs_dir}/rippled.patch" "${build_dir}/rpmbuild/SOURCES/"
|
||||
git archive \
|
||||
--remote "${repo_dir}" HEAD \
|
||||
--prefix ${repo_name}/ \
|
||||
--format tar.gz \
|
||||
--output "${build_dir}/rpmbuild/SOURCES/rippled.tar.gz"
|
||||
ln --symbolic "${repo_dir}" "${build_dir}/rippled"
|
||||
cp -r "${pkg_files}/rippled.spec" "${build_dir}"
|
||||
pushd "${build_dir}" || exit
|
||||
|
||||
rpmbuild \
|
||||
--define "_topdir ${PWD}/rpmbuild" \
|
||||
--define "_smp_build_ncpus %(nproc --ignore=2 2>/dev/null || echo 1)" \
|
||||
-ba rippled.spec
|
||||
|
||||
RPM_VERSION_RELEASE=$(rpm -qp --qf='%{NAME}-%{VERSION}-%{RELEASE}' ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm)
|
||||
tar_file=$RPM_VERSION_RELEASE.tar.gz
|
||||
|
||||
printf '%s\n' \
|
||||
"rpm_md5sum=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm 2>/dev/null)" \
|
||||
"rpm_sha256=$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm | awk '{ print $1 }')" \
|
||||
"rippled_version=${xrpl_version}" \
|
||||
"rippled_git_hash=${commit}" \
|
||||
"rpm_version=${RIPPLED_RPM_VERSION}" \
|
||||
"rpm_file_name=${tar_file}" \
|
||||
"rpm_version_release=${RPM_VERSION_RELEASE}" \
|
||||
> build_vars
|
||||
|
||||
# Rename the files arch to match the debs
|
||||
mv rpmbuild/RPMS/x86_64/* .
|
||||
for f in *x86_64.rpm; do
|
||||
new="${f/x86_64/amd64}"
|
||||
mv "$f" "$new"
|
||||
echo "Renamed $f -> $new"
|
||||
done
|
||||
rm -rf rpmbuild
|
||||
rm -f rippled rippled.tar.gz rippled.spec
|
||||
pushd -0 && dirs -c
|
||||
|
||||
mv "${build_dir}/build_vars" .
|
||||
|
||||
elif [ "${pkg}" = 'deb' ]; then
|
||||
# dpkg_version=$(echo "${xrpl_version}" | sed 's:-:~:g')
|
||||
dpkg_version=${xrpl_version//-/\~}
|
||||
full_version="${dpkg_version}"
|
||||
build_dir="build/${pkg}/packages"
|
||||
rm -rf "${build_dir}"
|
||||
mkdir -p "${build_dir}"
|
||||
|
||||
git archive \
|
||||
--remote "${repo_dir}" HEAD \
|
||||
--prefix ${repo_name}/ \
|
||||
--format tar.gz \
|
||||
--output "${build_dir}/${repo_name}_${dpkg_version}.orig.tar.gz"
|
||||
|
||||
pushd "${build_dir}" || exit
|
||||
tar -zxf "${repo_name}_${dpkg_version}.orig.tar.gz"
|
||||
|
||||
pushd ${repo_name} || exit
|
||||
|
||||
# Prepare the package metadata directory, `debian/`, within `rippled/`.
|
||||
cp -r "${pkg_files}/debian" .
|
||||
cp "${shared_files}/rippled.service" debian/
|
||||
cp "${shared_files}/update-rippled.sh" .
|
||||
cp "${shared_files}/update-rippled-cron" .
|
||||
cp "${shared_files}/rippled-logrotate" .
|
||||
|
||||
if [ "${branch}" = 'develop' ]; then
|
||||
# TODO: Can remove when CMake sets version
|
||||
sed --in-place "s/versionString = \"\([^\"]*\)\"/versionString = \"${xrpl_version}\"/" "${build_info_src}"
|
||||
fi
|
||||
|
||||
cat << CHANGELOG > ./debian/changelog
|
||||
rippled (${xrpl_version}) unstable; urgency=low
|
||||
|
||||
* see RELEASENOTES
|
||||
|
||||
-- Ripple Labs Inc. <support@ripple.com> $(TZ=UTC date -R)
|
||||
CHANGELOG
|
||||
|
||||
cat ./debian/changelog
|
||||
dpkg-buildpackage -b -d -us -uc
|
||||
|
||||
popd || exit
|
||||
rm -rf ${repo_name}
|
||||
# for f in *.ddeb; do mv -- "$f" "${f%.ddeb}.deb"; done
|
||||
popd || exit
|
||||
cp "${build_dir}/${repo_name}_${xrpl_version}_amd64.changes" .
|
||||
|
||||
awk '/Checksums-Sha256:/{hit=1;next}/Files:/{hit=0}hit' "${repo_name}_${xrpl_version}_amd64.changes" | sed -E 's!^[[:space:]]+!!' > shasums
|
||||
sha() {
|
||||
<shasums awk "/$1/ { print \$1 }"
|
||||
}
|
||||
|
||||
printf '%s\n' \
|
||||
"deb_sha256=$(sha "rippled_${full_version}_amd64.deb")" \
|
||||
"dbg_sha256=$(sha "rippled-dbgsym_${full_version}_amd64.deb")" \
|
||||
"rippled_version=${xrpl_version}" \
|
||||
"rippled_git_hash=${commit}" \
|
||||
"dpkg_version=${dpkg_version}" \
|
||||
"dpkg_full_version=${full_version}" \
|
||||
> build_vars
|
||||
|
||||
pushd -0 && dirs -c
|
||||
fi
|
||||
|
||||
cp "${build_dir}/"*.$pkg .
|
||||
3
pkgs/deb/debian/README.Debian
Normal file
3
pkgs/deb/debian/README.Debian
Normal file
@@ -0,0 +1,3 @@
|
||||
rippled daemon
|
||||
|
||||
-- Mike Ellery <mellery451@gmail.com> Tue, 04 Dec 2018 18:19:03 +0000
|
||||
20
pkgs/deb/debian/control
Normal file
20
pkgs/deb/debian/control
Normal file
@@ -0,0 +1,20 @@
|
||||
Source: rippled
|
||||
Section: net
|
||||
Priority: optional
|
||||
Maintainer: Ripple Labs Inc. <support@ripple.com>
|
||||
Build-Depends: cmake,
|
||||
debhelper (>= 13),
|
||||
debhelper-compat (= 13)
|
||||
# debhelper (>= 14),
|
||||
# debhelper-compat (= 14),
|
||||
# TODO: Let's go for 14!
|
||||
Standards-Version: 4.6.0
|
||||
Homepage: https://github.com/XRPLF/rippled.git
|
||||
Rules-Requires-Root: no
|
||||
|
||||
Package: rippled
|
||||
Architecture: any
|
||||
Depends: ${shlibs:Depends}, ${misc:Depends}
|
||||
Description: XRP Ledger server (rippled)
|
||||
rippled is the core server of the XRP Ledger, providing a peer-to-peer
|
||||
network node for validating and processing transactions.
|
||||
86
pkgs/deb/debian/copyright
Normal file
86
pkgs/deb/debian/copyright
Normal file
@@ -0,0 +1,86 @@
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: rippled
|
||||
Source: https://github.com/ripple/rippled
|
||||
|
||||
Files: *
|
||||
Copyright: 2012-2019 Ripple Labs Inc.
|
||||
|
||||
License: __UNKNOWN__
|
||||
|
||||
The accompanying files under various copyrights.
|
||||
|
||||
Copyright (c) 2012, 2013, 2014 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
The accompanying files incorporate work covered by the following copyright
|
||||
and previous license notice:
|
||||
|
||||
Copyright (c) 2011 Arthur Britto, David Schwartz, Jed McCaleb,
|
||||
Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant
|
||||
|
||||
Some code from Raw Material Software, Ltd., provided under the terms of the
|
||||
ISC License. See the corresponding source files for more details.
|
||||
Copyright (c) 2013 - Raw Material Software Ltd.
|
||||
Please visit http://www.juce.com
|
||||
|
||||
Some code from ASIO examples:
|
||||
// Copyright (c) 2003-2011 Christopher M. Kohlhoff (chris at kohlhoff dot com)
|
||||
//
|
||||
// Distributed under the Boost Software License, Version 1.0. (See accompanying
|
||||
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
Some code from Bitcoin:
|
||||
// Copyright (c) 2009-2010 Satoshi Nakamoto
|
||||
// Copyright (c) 2011 The Bitcoin developers
|
||||
// Distributed under the MIT/X11 software license, see the accompanying
|
||||
// file license.txt or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
Some code from Tom Wu:
|
||||
This software is covered under the following copyright:
|
||||
|
||||
/*
|
||||
* Copyright (c) 2003-2005 Tom Wu
|
||||
* All Rights Reserved.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
|
||||
* WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
|
||||
*
|
||||
* IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
||||
* INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
|
||||
* RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
|
||||
* THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
|
||||
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*
|
||||
* In addition, the following condition applies:
|
||||
*
|
||||
* All redistributions must retain an intact copy of this copyright notice
|
||||
* and disclaimer.
|
||||
*/
|
||||
|
||||
Address all questions regarding this license to:
|
||||
|
||||
Tom Wu
|
||||
tjw@cs.Stanford.EDU
|
||||
3
pkgs/deb/debian/dirs
Normal file
3
pkgs/deb/debian/dirs
Normal file
@@ -0,0 +1,3 @@
|
||||
/var/log/rippled/
|
||||
/var/lib/rippled/
|
||||
/etc/systemd/system/rippled.service.d/
|
||||
2
pkgs/deb/debian/docs
Normal file
2
pkgs/deb/debian/docs
Normal file
@@ -0,0 +1,2 @@
|
||||
README.md
|
||||
LICENSE.md
|
||||
3
pkgs/deb/debian/rippled-dev.install
Normal file
3
pkgs/deb/debian/rippled-dev.install
Normal file
@@ -0,0 +1,3 @@
|
||||
opt/ripple/include
|
||||
opt/ripple/lib/*.a
|
||||
opt/ripple/lib/cmake/*
|
||||
2
pkgs/deb/debian/rippled.conffiles
Normal file
2
pkgs/deb/debian/rippled.conffiles
Normal file
@@ -0,0 +1,2 @@
|
||||
/opt/ripple/etc/rippled.cfg
|
||||
/opt/ripple/etc/validators.txt
|
||||
10
pkgs/deb/debian/rippled.install
Normal file
10
pkgs/deb/debian/rippled.install
Normal file
@@ -0,0 +1,10 @@
|
||||
etc/logrotate.d/rippled
|
||||
opt/ripple/bin/rippled
|
||||
opt/ripple/bin/update-rippled.sh
|
||||
opt/ripple/bin/validator-keys
|
||||
opt/ripple/bin/xrpld
|
||||
opt/ripple/etc/rippled.cfg
|
||||
opt/ripple/etc/update-rippled-cron
|
||||
opt/ripple/etc/validators.txt
|
||||
opt/ripple/include/*
|
||||
opt/ripple/lib/*
|
||||
4
pkgs/deb/debian/rippled.links
Normal file
4
pkgs/deb/debian/rippled.links
Normal file
@@ -0,0 +1,4 @@
|
||||
opt/ripple/etc/rippled.cfg etc/opt/ripple/rippled.cfg
|
||||
opt/ripple/etc/validators.txt etc/opt/ripple/validators.txt
|
||||
opt/ripple/bin/rippled usr/local/bin/rippled
|
||||
opt/ripple/bin/rippled opt/ripple/bin/xrpld
|
||||
39
pkgs/deb/debian/rippled.postinst
Normal file
39
pkgs/deb/debian/rippled.postinst
Normal file
@@ -0,0 +1,39 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
USER_NAME=rippled
|
||||
GROUP_NAME=rippled
|
||||
case "$1" in
|
||||
configure)
|
||||
id -u $USER_NAME >/dev/null 2>&1 || \
|
||||
useradd --system \
|
||||
--home-dir /nonexistent \
|
||||
--no-create-home \
|
||||
--shell /usr/sbin/nologin \
|
||||
--comment "system user for rippled" \
|
||||
--user-group \
|
||||
${USER_NAME}
|
||||
|
||||
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
|
||||
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
|
||||
chown -R $USER_NAME:$GROUP_NAME /opt/ripple
|
||||
chmod 755 /var/log/rippled/
|
||||
chmod 755 /var/lib/rippled/
|
||||
chmod 644 /opt/ripple/etc/update-rippled-cron
|
||||
chmod 644 /etc/logrotate.d/rippled
|
||||
chown -R root:$GROUP_NAME /opt/ripple/etc/update-rippled-cron
|
||||
;;
|
||||
|
||||
abort-upgrade|abort-remove|abort-deconfigure)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "postinst called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
17
pkgs/deb/debian/rippled.postrm
Normal file
17
pkgs/deb/debian/rippled.postrm
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
case "$1" in
|
||||
purge|remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "postrm called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
20
pkgs/deb/debian/rippled.preinst
Normal file
20
pkgs/deb/debian/rippled.preinst
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
case "$1" in
|
||||
install|upgrade)
|
||||
;;
|
||||
|
||||
abort-upgrade)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "preinst called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
20
pkgs/deb/debian/rippled.prerm
Normal file
20
pkgs/deb/debian/rippled.prerm
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
case "$1" in
|
||||
remove|upgrade|deconfigure)
|
||||
;;
|
||||
|
||||
failed-upgrade)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "prerm called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
77
pkgs/deb/debian/rules
Executable file
77
pkgs/deb/debian/rules
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/make -f
|
||||
export DH_VERBOSE = 1
|
||||
export DH_OPTIONS = -v
|
||||
## TODO: Confirm these are still required.
|
||||
# debuild sets some warnings that don't work well
|
||||
# for our curent build..so try to remove those flags here:
|
||||
export CFLAGS:=$(subst -Wformat,,$(CFLAGS))
|
||||
export CFLAGS:=$(subst -Werror=format-security,,$(CFLAGS))
|
||||
export CXXFLAGS:=$(subst -Wformat,,$(CXXFLAGS))
|
||||
export CXXFLAGS:=$(subst -Werror=format-security,,$(CXXFLAGS))
|
||||
|
||||
## TODO: Confirm these are still required.
|
||||
export DEB_BUILD_MAINT_OPTIONS = hardening=+all
|
||||
export DEB_BUILD_OPTIONS = nodwz
|
||||
|
||||
export RIPPLE_REMOTE = xrplf
|
||||
export RIPPLE_REMOTE_URL = https://conan.ripplex.io
|
||||
|
||||
# ## CMake Configure args
|
||||
# export DEB_CMAKE_GENERATOR = Ninja
|
||||
export DEB_CMAKE_BUILD_TYPE = Release
|
||||
|
||||
NPROC := $(shell nproc --ignore=2)
|
||||
export DEB_BUILD_OPTIONS += parallel=$(NPROC)
|
||||
|
||||
CONAN_HOME := $(shell conan config home)
|
||||
CONAN_PROFILE := $(shell conan profile path default)
|
||||
CONAN_GCONF := $(CONAN_HOME)/global.conf
|
||||
INSTALL_PREFIX := "/opt/ripple"
|
||||
BUILD_DIR := obj-$(DEB_BUILD_GNU_TYPE)
|
||||
|
||||
.ONESHELL:
|
||||
SHELL := /bin/bash
|
||||
|
||||
%:
|
||||
dh $@ --buildsystem=cmake
|
||||
override_dh_installsystemd:
|
||||
dh_installsystemd --no-start
|
||||
|
||||
override_dh_auto_configure:
|
||||
conan remote add --index 0 $(RIPPLE_REMOTE) $(RIPPLE_REMOTE_URL) --force
|
||||
conan config install ./conan/profiles/default --target-folder $(CONAN_HOME)/profiles
|
||||
echo "tools.build:jobs={{ os.cpu_count() - 2 }}" >> ${CONAN_HOME}/global.conf
|
||||
echo "core.download:parallel={{ os.cpu_count() }}" >> $(CONAN_GCONF)
|
||||
|
||||
conan install . \
|
||||
--settings:all build_type=$(DEB_CMAKE_BUILD_TYPE) \
|
||||
--output-folder=$(BUILD_DIR) \
|
||||
--options:host "&:xrpld=True" \
|
||||
--options:host "&:tests=True" \
|
||||
--build=missing
|
||||
|
||||
# Debian assumes an offline build process and sets CMake's FETCHCONTENT_FULLY_DISCONNECTED variable to ON
|
||||
# To use as much as the default settings as possible we'll clone it where CMake's FetchContent expects it.
|
||||
mkdir -p "$(BUILD_DIR)/_deps"
|
||||
git clone https://github.com/ripple/validator-keys-tool.git "$(BUILD_DIR)/_deps/validator_keys-src"
|
||||
|
||||
dh_auto_configure --builddirectory=$(BUILD_DIR) -- \
|
||||
-DCMAKE_BUILD_TYPE:STRING=$(DEB_CMAKE_BUILD_TYPE) \
|
||||
-Dxrpld=ON -Dtests=ON -Dvalidator_keys=ON \
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=$(INSTALL_PREFIX) \
|
||||
-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=$(BUILD_DIR)/build/generators/conan_toolchain.cmake
|
||||
|
||||
override_dh_auto_build:
|
||||
dh_auto_build \
|
||||
--builddirectory=$(BUILD_DIR) -- rippled validator-keys
|
||||
|
||||
override_dh_auto_install:
|
||||
cmake --install $(BUILD_DIR) --prefix debian/tmp/opt/ripple
|
||||
install -D update-rippled.sh debian/tmp/opt/ripple/bin/update-rippled.sh
|
||||
install -D update-rippled-cron debian/tmp/opt/ripple/etc/update-rippled-cron
|
||||
install -D rippled-logrotate debian/tmp/etc/logrotate.d/rippled
|
||||
rm -rf debian/tmp/opt/ripple/lib64/cmake/date
|
||||
|
||||
override_dh_dwz:
|
||||
@echo "Skipping DWZ due to huge debug info"
|
||||
1
pkgs/deb/debian/source/format
Normal file
1
pkgs/deb/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
2
pkgs/deb/debian/source/local-options
Normal file
2
pkgs/deb/debian/source/local-options
Normal file
@@ -0,0 +1,2 @@
|
||||
#abort-on-upstream-changes
|
||||
#unapply-patches
|
||||
1
pkgs/rpm/50-rippled.preset
Normal file
1
pkgs/rpm/50-rippled.preset
Normal file
@@ -0,0 +1 @@
|
||||
enable rippled.service
|
||||
171
pkgs/rpm/rippled.spec
Normal file
171
pkgs/rpm/rippled.spec
Normal file
@@ -0,0 +1,171 @@
|
||||
%global pkg_name %{getenv:repo_name}
|
||||
%global branch %{getenv:branch}
|
||||
%global commit %{getenv:commit}
|
||||
%global shortcommit %{getenv:shortcommit}
|
||||
%global date %{getenv:commit_date}
|
||||
%global conan_remote_name %{getenv:conan_remote_name}
|
||||
%global conan_remote_url %{getenv:conan_remote_url}
|
||||
%global shared_files %{getenv:shared_files}
|
||||
%global pkg_files %{getenv:pkg_files}
|
||||
%global build_type %{getenv:BUILD_TYPE}
|
||||
|
||||
%global _prefix /opt/ripple
|
||||
%global srcdir %{_builddir}/rippled
|
||||
%global blddir %{srcdir}/bld.rippled
|
||||
|
||||
%global xrpl_version %{getenv:xrpl_version}
|
||||
%global ver_base %(v=%{xrpl_version}; echo ${v%%-*})
|
||||
%global _has_dash %(v=%{xrpl_version}; [ "${v#*-}" != "$v" ] && echo 1 || echo 0)
|
||||
%if 0%{?_has_dash}
|
||||
%global ver_suffix %(v=%{xrpl_version}; printf %s "${v#*-}")
|
||||
%endif
|
||||
|
||||
Name: %{pkg_name}
|
||||
Version: %{ver_base}
|
||||
Release: %{?ver_suffix:0.%{ver_suffix}}%{!?ver_suffix:1}%{?dist}
|
||||
Summary: %{name} XRPL daemon
|
||||
|
||||
License: ISC
|
||||
URL: https://github.com/XRPLF/rippled
|
||||
Source0: rippled.tar.gz
|
||||
Patch0: rippled.patch
|
||||
%{warn:name=%{name}}
|
||||
%{warn:version=%{version}}
|
||||
%{warn:ver_base=%{ver_base}}
|
||||
%{warn:ver_suffix=%{ver_suffix}}
|
||||
%{warn:release=%{release}}
|
||||
%{warn:FullReleaseVersion=%{name}-%{version}-%{release}.%{_arch}.rpm}
|
||||
|
||||
%description
|
||||
%{name} with p2p server for the XRP Ledger.
|
||||
|
||||
%prep
|
||||
%autosetup -p1 -n %{name}
|
||||
|
||||
# TODO: Remove when version set with CMake.
|
||||
if [ %{branch} == 'develop' ]; then
|
||||
sed --in-place "s/versionString = \"\([^\"]*\)\"/versionString = \"\1+%{ver_input}\"/" src/libxrpl/protocol/BuildInfo.cpp
|
||||
fi
|
||||
|
||||
%build
|
||||
conan remote add --index 0 %{conan_remote_name} %{conan_remote_url} --force
|
||||
conan config install conan/profiles/default --target-folder $(conan config home)/profiles/
|
||||
echo "tools.build:jobs={{ os.cpu_count() - 2 }}" >> ${CONAN_HOME}/global.conf
|
||||
echo "core.download:parallel={{ os.cpu_count() }}" >> ${CONAN_HOME}/global.conf
|
||||
|
||||
conan install %{srcdir} \
|
||||
--settings:all build_type=%{build_type} \
|
||||
--output-folder %{srcdir}/conan_deps \
|
||||
--options:host "&:xrpld=True" \
|
||||
--options:host "&:tests=True" \
|
||||
--build=missing
|
||||
|
||||
cmake \
|
||||
-S %{srcdir} \
|
||||
-B %{blddir} \
|
||||
-Dxrpld=ON \
|
||||
-Dvalidator_keys=ON \
|
||||
-Dtests=ON \
|
||||
-DCMAKE_BUILD_TYPE:STRING=%{build_type} \
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=%{_prefix} \
|
||||
-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=%{srcdir}/conan_deps/build/generators/conan_toolchain.cmake
|
||||
|
||||
cmake \
|
||||
--build %{blddir} \
|
||||
--parallel %{_smp_build_ncpus} \
|
||||
--target rippled \
|
||||
--target validator-keys
|
||||
|
||||
%install
|
||||
rm -rf %{buildroot}
|
||||
DESTDIR=%{buildroot} cmake --install %{blddir}
|
||||
|
||||
install -Dm0755 %{shared_files}/update-rippled.sh %{buildroot}%{_bindir}/update-rippled.sh
|
||||
ln -s rippled %{buildroot}%{_bindir}/xrpld
|
||||
ln -s update-rippled.sh %{buildroot}%{_bindir}/update-xrpld.sh
|
||||
|
||||
# configs
|
||||
install -Dm0644 %{srcdir}/cfg/rippled-example.cfg %{buildroot}%{_prefix}/etc/rippled.cfg
|
||||
install -Dm0644 %{srcdir}/cfg/validators-example.txt %{buildroot}%{_prefix}/etc/validators.txt
|
||||
mkdir -p %{buildroot}%{_sysconfdir}/opt/ripple
|
||||
|
||||
#/etc points to /opt
|
||||
ln -s ../../../opt/ripple/rippled.cfg %{buildroot}%{_sysconfdir}/opt/ripple/xrpld.cfg
|
||||
ln -s ../../../opt/ripple/etc/rippled.cfg %{buildroot}%{_sysconfdir}/opt/ripple/rippled.cfg
|
||||
ln -s ../../../opt/ripple/etc/validators.txt %{buildroot}%{_sysconfdir}/opt/ripple/validators.txt
|
||||
|
||||
# systemd/sysusers/tmpfiles
|
||||
install -Dm0644 %{shared_files}/rippled.service %{buildroot}%{_unitdir}/rippled.service
|
||||
install -Dm0644 %{pkg_files}/rippled.sysusers %{buildroot}%{_sysusersdir}/rippled.conf
|
||||
install -Dm0644 %{pkg_files}/rippled.tmpfiles %{buildroot}%{_tmpfilesdir}/rippled.conf
|
||||
|
||||
%files
|
||||
%license LICENSE*
|
||||
%doc README*
|
||||
|
||||
# Files/dirs the pkgs owns
|
||||
%dir %{_prefix}
|
||||
%dir %{_prefix}/bin
|
||||
%dir %{_prefix}/etc
|
||||
%if 0
|
||||
%dir %{_sysconfdir}/opt # Add this if rpmlint cries.
|
||||
%endif
|
||||
%dir %{_sysconfdir}/opt/ripple
|
||||
|
||||
# Binaries and symlinks under our (non-standard) _prefix (/opt/ripple)
|
||||
%{_bindir}/rippled
|
||||
%{_bindir}/xrpld
|
||||
%{_bindir}/update-rippled.sh
|
||||
%{_bindir}/update-xrpld.sh
|
||||
%{_bindir}/validator-keys
|
||||
|
||||
# We won't ship these but we'll create them.
|
||||
%ghost /usr/local/bin/rippled
|
||||
%ghost /usr/local/bin/xrpld
|
||||
|
||||
%config(noreplace) %{_prefix}/etc/rippled.cfg
|
||||
%config(noreplace) %{_prefix}/etc/validators.txt
|
||||
|
||||
%config(noreplace) %{_sysconfdir}/opt/ripple/rippled.cfg
|
||||
%config(noreplace) %{_sysconfdir}/opt/ripple/xrpld.cfg
|
||||
%config(noreplace) %{_sysconfdir}/opt/ripple/validators.txt
|
||||
|
||||
# systemd and service user creation
|
||||
%{_unitdir}/rippled.service
|
||||
%{_sysusersdir}/rippled.conf
|
||||
%{_tmpfilesdir}/rippled.conf
|
||||
|
||||
# Let tmpfiles create the db and log dirs
|
||||
%ghost %dir /var/opt/ripple
|
||||
%ghost %dir /var/opt/ripple/lib
|
||||
%ghost %dir /var/opt/ripple/log
|
||||
|
||||
# TODO: Fix the CMake install() calls so we don't need to exclude these.
|
||||
%exclude %{_prefix}/include/*
|
||||
%exclude %{_prefix}/lib/*
|
||||
%exclude %{_prefix}/lib/pkgconfig/*
|
||||
%exclude /usr/lib/debug/**
|
||||
|
||||
%post
|
||||
# Add a link to $PATH /usr/local/bin/rippled %{_bindir}/rippled (also non-standard)
|
||||
mkdir -p /usr/local/bin
|
||||
for i in rippled xrpld
|
||||
do
|
||||
if [ ! -e /usr/local/bin/${i} ]; then
|
||||
ln -s %{_bindir}/${i} /usr/local/bin/${i}
|
||||
elif [ -L /usr/local/bin/${i} ] && \
|
||||
[ "$(readlink -f /usr/local/bin/${i})" != "%{_bindir}/${i}" ]; then
|
||||
ln -sfn %{_bindir}/${i} /usr/local/bin/${i}
|
||||
fi
|
||||
done
|
||||
|
||||
%preun
|
||||
# remove the link only if it points to us (on erase, $1 == 0)
|
||||
for i in rippled xrpld
|
||||
do
|
||||
if [ "$1" -eq 0 ] && [ -L /usr/local/bin/${i} ] && \
|
||||
[ "$(readlink -f /usr/local/bin/${i})" = "%{_bindir}/${i}" ]; then
|
||||
rm -f /usr/local/bin/${i}
|
||||
fi
|
||||
done
|
||||
2
pkgs/rpm/rippled.sysusers
Normal file
2
pkgs/rpm/rippled.sysusers
Normal file
@@ -0,0 +1,2 @@
|
||||
u rippled - "System user for rippled service"
|
||||
g rippled - -
|
||||
2
pkgs/rpm/rippled.tmpfiles
Normal file
2
pkgs/rpm/rippled.tmpfiles
Normal file
@@ -0,0 +1,2 @@
|
||||
d /var/opt/ripple/lib 0750 rippled rippled -
|
||||
d /var/opt/ripple/log 0750 rippled adm -
|
||||
15
pkgs/shared/rippled-logrotate
Normal file
15
pkgs/shared/rippled-logrotate
Normal file
@@ -0,0 +1,15 @@
|
||||
/var/log/rippled/*.log {
|
||||
daily
|
||||
minsize 200M
|
||||
rotate 7
|
||||
nocreate
|
||||
missingok
|
||||
notifempty
|
||||
compress
|
||||
compresscmd /usr/bin/nice
|
||||
compressoptions -n19 ionice -c3 gzip
|
||||
compressext .gz
|
||||
postrotate
|
||||
/opt/ripple/bin/rippled --conf /opt/ripple/etc/rippled.cfg logrotate
|
||||
endscript
|
||||
}
|
||||
15
pkgs/shared/rippled.service
Normal file
15
pkgs/shared/rippled.service
Normal file
@@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=Ripple Daemon
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/opt/ripple/bin/rippled --net --silent --conf /etc/opt/ripple/rippled.cfg
|
||||
Restart=on-failure
|
||||
User=rippled
|
||||
Group=rippled
|
||||
LimitNOFILE=65536
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
10
pkgs/shared/update-rippled-cron
Normal file
10
pkgs/shared/update-rippled-cron
Normal file
@@ -0,0 +1,10 @@
|
||||
# For automatic updates, symlink this file to /etc/cron.d/
|
||||
# Do not remove the newline at the end of this cron script
|
||||
|
||||
# bash required for use of RANDOM below.
|
||||
SHELL=/bin/bash
|
||||
PATH=/sbin;/bin;/usr/sbin;/usr/bin
|
||||
|
||||
# invoke check/update script with random delay up to 59 mins
|
||||
0 * * * * root sleep $((RANDOM*3540/32768)) && /opt/ripple/bin/update-rippled.sh
|
||||
|
||||
65
pkgs/shared/update-rippled.sh
Executable file
65
pkgs/shared/update-rippled.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# auto-update script for rippled daemon
|
||||
|
||||
# Check for sudo/root permissions
|
||||
if [[ $(id -u) -ne 0 ]] ; then
|
||||
echo "This update script must be run as root or sudo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LOCKDIR=/tmp/rippleupdate.lock
|
||||
UPDATELOG=/var/log/rippled/update.log
|
||||
|
||||
function cleanup {
|
||||
# If this directory isn't removed, future updates will fail.
|
||||
rmdir $LOCKDIR
|
||||
}
|
||||
|
||||
# Use mkdir to check if process is already running. mkdir is atomic, as against file create.
|
||||
if ! mkdir $LOCKDIR 2>/dev/null; then
|
||||
echo $(date -u) "lockdir exists - won't proceed." >> $UPDATELOG
|
||||
exit 1
|
||||
fi
|
||||
trap cleanup EXIT
|
||||
|
||||
source /etc/os-release
|
||||
can_update=false
|
||||
|
||||
if [[ "$ID" == "ubuntu" || "$ID" == "debian" ]] ; then
|
||||
# Silent update
|
||||
apt-get update -qq
|
||||
|
||||
# The next line is an "awk"ward way to check if the package needs to be updated.
|
||||
RIPPLE=$(apt-get install -s --only-upgrade rippled | awk '/^Inst/ { print $2 }')
|
||||
test "$RIPPLE" == "rippled" && can_update=true
|
||||
|
||||
function apply_update {
|
||||
apt-get install rippled -qq
|
||||
}
|
||||
elif [[ "$ID" == "fedora" || "$ID" == "centos" || "$ID" == "rhel" || "$ID" == "scientific" ]] ; then
|
||||
RIPPLE_REPO=${RIPPLE_REPO-stable}
|
||||
yum --disablerepo=* --enablerepo=ripple-$RIPPLE_REPO clean expire-cache
|
||||
|
||||
yum check-update -q --enablerepo=ripple-$RIPPLE_REPO rippled || can_update=true
|
||||
|
||||
function apply_update {
|
||||
yum update -y --enablerepo=ripple-$RIPPLE_REPO rippled
|
||||
}
|
||||
else
|
||||
echo "unrecognized distro!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Do the actual update and restart the service after reloading systemctl daemon.
|
||||
if [ "$can_update" = true ] ; then
|
||||
exec 3>&1 1>>${UPDATELOG} 2>&1
|
||||
set -e
|
||||
apply_update
|
||||
systemctl daemon-reload
|
||||
systemctl restart rippled.service
|
||||
echo $(date -u) "rippled daemon updated."
|
||||
else
|
||||
echo $(date -u) "no updates available" >> $UPDATELOG
|
||||
fi
|
||||
|
||||
@@ -239,9 +239,11 @@ Logs::fromSeverity(beast::severities::Severity level)
|
||||
case kError:
|
||||
return lsERROR;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::fromSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
break;
|
||||
}
|
||||
@@ -265,9 +267,11 @@ Logs::toSeverity(LogSeverity level)
|
||||
return kWarning;
|
||||
case lsERROR:
|
||||
return kError;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case lsFATAL:
|
||||
break;
|
||||
}
|
||||
@@ -292,9 +296,11 @@ Logs::toString(LogSeverity s)
|
||||
return "Error";
|
||||
case lsFATAL:
|
||||
return "Fatal";
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toString : invalid severity");
|
||||
return "Unknown";
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,9 +362,11 @@ Logs::format(
|
||||
case kError:
|
||||
output += "ERR ";
|
||||
break;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::format : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
output += "FTL ";
|
||||
break;
|
||||
|
||||
@@ -36,6 +36,7 @@ LogThrow(std::string const& title)
|
||||
[[noreturn]] void
|
||||
LogicError(std::string const& s) noexcept
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(debugLog().fatal()) << s;
|
||||
std::cerr << "Logic error: " << s << std::endl;
|
||||
// Use a non-standard contract naming here (without namespace) because
|
||||
@@ -45,6 +46,7 @@ LogicError(std::string const& s) noexcept
|
||||
// For the above reasons, we want this contract to stand out.
|
||||
UNREACHABLE("LogicError", {{"message", s}});
|
||||
std::abort();
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -174,7 +174,7 @@ Array::append(Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Array::append : invalid type");
|
||||
UNREACHABLE("Json::Array::append : invalid type"); // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
void
|
||||
@@ -209,7 +209,7 @@ Object::set(std::string const& k, Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Object::set : invalid type");
|
||||
UNREACHABLE("Json::Object::set : invalid type"); // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -213,8 +213,10 @@ Value::Value(ValueType type) : type_(type), allocated_(0)
|
||||
value_.bool_ = false;
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::Value(ValueType) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -290,8 +292,10 @@ Value::Value(Value const& other) : type_(other.type_)
|
||||
value_.map_ = new ObjectValues(*other.value_.map_);
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::Value(Value const&) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,8 +322,10 @@ Value::~Value()
|
||||
delete value_.map_;
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::~Value : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -419,8 +425,10 @@ operator<(Value const& x, Value const& y)
|
||||
return *x.value_.map_ < *y.value_.map_;
|
||||
}
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::operator<(Value, Value) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable
|
||||
@@ -465,8 +473,10 @@ operator==(Value const& x, Value const& y)
|
||||
return x.value_.map_->size() == y.value_.map_->size() &&
|
||||
*x.value_.map_ == *y.value_.map_;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::operator==(Value, Value) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable
|
||||
@@ -506,8 +516,10 @@ Value::asString() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to string");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asString : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return ""; // unreachable
|
||||
@@ -548,8 +560,10 @@ Value::asInt() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to int");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asInt : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -590,8 +604,10 @@ Value::asUInt() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to uint");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asUInt : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -622,8 +638,10 @@ Value::asDouble() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to double");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asDouble : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -654,8 +672,10 @@ Value::asBool() const
|
||||
case objectValue:
|
||||
return value_.map_->size() != 0;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asBool : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return false; // unreachable;
|
||||
@@ -710,8 +730,10 @@ Value::isConvertibleTo(ValueType other) const
|
||||
return other == objectValue ||
|
||||
(other == nullValue && value_.map_->size() == 0);
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::isConvertible : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return false; // unreachable;
|
||||
@@ -744,8 +766,10 @@ Value::size() const
|
||||
case objectValue:
|
||||
return Int(value_.map_->size());
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::size : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
|
||||
@@ -259,9 +259,11 @@ ApplyStateTable::apply(
|
||||
}
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::detail::ApplyStateTable::apply : unsupported "
|
||||
"operation type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -680,12 +682,6 @@ ApplyStateTable::threadOwners(
|
||||
if (auto const optSleAcct{(*sle)[~sfAccount]})
|
||||
threadTx(base, meta, *optSleAcct, mods, j);
|
||||
|
||||
// Don't thread a check's sfDestination unless the amendment is
|
||||
// enabled
|
||||
if (ledgerType == ltCHECK &&
|
||||
!base.rules().enabled(fixCheckThreading))
|
||||
break;
|
||||
|
||||
// If sfDestination is present, thread to that account
|
||||
if (auto const optSleDest{(*sle)[~sfDestination]})
|
||||
threadTx(base, meta, *optSleDest, mods, j);
|
||||
|
||||
@@ -22,6 +22,9 @@
|
||||
#include <xrpl/ledger/ApplyView.h>
|
||||
#include <xrpl/protocol/Protocol.h>
|
||||
|
||||
#include <limits>
|
||||
#include <type_traits>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
std::optional<std::uint64_t>
|
||||
@@ -91,8 +94,21 @@ ApplyView::dirAdd(
|
||||
return page;
|
||||
}
|
||||
|
||||
// We rely on modulo arithmetic of unsigned integers (guaranteed in
|
||||
// [basic.fundamental] paragraph 2) to detect page representation overflow.
|
||||
// For signed integers this would be UB, hence static_assert here.
|
||||
static_assert(std::is_unsigned_v<decltype(page)>);
|
||||
// Defensive check against breaking changes in compiler.
|
||||
static_assert([]<typename T>(std::type_identity<T>) constexpr -> T {
|
||||
T tmp = std::numeric_limits<T>::max();
|
||||
return ++tmp;
|
||||
}(std::type_identity<decltype(page)>{}) == 0);
|
||||
++page;
|
||||
// Check whether we're out of pages.
|
||||
if (++page >= dirNodeMaxPages)
|
||||
if (page == 0)
|
||||
return std::nullopt;
|
||||
if (!rules().enabled(fixDirectoryLimit) &&
|
||||
page >= dirNodeMaxPages) // Old pages limit
|
||||
return std::nullopt;
|
||||
|
||||
// We are about to create a new node; we'll link it to
|
||||
@@ -133,8 +149,10 @@ ApplyView::emptyDirDelete(Keylet const& directory)
|
||||
if (directory.type != ltDIR_NODE ||
|
||||
node->getFieldH256(sfRootIndex) != directory.key)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::ApplyView::emptyDirDelete : invalid node type");
|
||||
return false;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
// The directory still contains entries and so it cannot be removed
|
||||
|
||||
@@ -36,7 +36,9 @@ BookDirs::BookDirs(ReadView const& view, Book const& book)
|
||||
{
|
||||
if (!cdirFirst(*view_, key_, sle_, entry_, index_))
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::BookDirs::BookDirs : directory is empty");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -110,9 +112,11 @@ BookDirs::const_iterator::operator++()
|
||||
}
|
||||
else if (!cdirFirst(*view_, cur_key_, sle_, entry_, index_))
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::BookDirs::const_iterator::operator++ : directory is "
|
||||
"empty");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -77,19 +77,23 @@ deleteSLE(
|
||||
AccountID const& account, SField const& node, bool isOwner) -> TER {
|
||||
auto const sleAccount = view.peek(keylet::account(account));
|
||||
if (!sleAccount)
|
||||
{ // LCOV_EXCL_START
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.fatal()) << "Internal error: can't retrieve Owner account.";
|
||||
return tecINTERNAL;
|
||||
} // LCOV_EXCL_STOP
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
// Remove object from owner directory
|
||||
std::uint64_t const page = sleCredential->getFieldU64(node);
|
||||
if (!view.dirRemove(
|
||||
keylet::ownerDir(account), page, sleCredential->key(), false))
|
||||
{ // LCOV_EXCL_START
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.fatal()) << "Unable to delete Credential from owner.";
|
||||
return tefBAD_LEDGER;
|
||||
} // LCOV_EXCL_STOP
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
if (isOwner)
|
||||
adjustOwnerCount(view, sleAccount, -1, j);
|
||||
|
||||
@@ -324,10 +324,12 @@ isVaultPseudoAccountFrozen(
|
||||
auto const issuer = mptIssuance->getAccountID(sfIssuer);
|
||||
auto const mptIssuer = view.read(keylet::account(issuer));
|
||||
if (mptIssuer == nullptr)
|
||||
{ // LCOV_EXCL_START
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::isVaultPseudoAccountFrozen : null MPToken issuer");
|
||||
return false;
|
||||
} // LCOV_EXCL_STOP
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
if (!mptIssuer->isFieldPresent(sfVaultID))
|
||||
return false; // not a Vault pseudo-account, common case
|
||||
@@ -338,7 +340,8 @@ isVaultPseudoAccountFrozen(
|
||||
{ // LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::isVaultPseudoAccountFrozen : null vault");
|
||||
return false;
|
||||
} // LCOV_EXCL_STOP
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return isAnyFrozen(view, {issuer, account}, vault->at(sfAsset), depth + 1);
|
||||
}
|
||||
@@ -1131,7 +1134,7 @@ createPseudoAccount(
|
||||
uint256 const& pseudoOwnerKey,
|
||||
SField const& ownerField)
|
||||
{
|
||||
auto const& fields = getPseudoAccountFields();
|
||||
[[maybe_unused]] auto const& fields = getPseudoAccountFields();
|
||||
XRPL_ASSERT(
|
||||
std::count_if(
|
||||
fields.begin(),
|
||||
@@ -1239,6 +1242,12 @@ addEmptyHolding(
|
||||
// If the line already exists, don't create it again.
|
||||
if (view.read(index))
|
||||
return tecDUPLICATE;
|
||||
|
||||
// Can the account cover the trust line reserve ?
|
||||
std::uint32_t const ownerCount = sleDst->at(sfOwnerCount);
|
||||
if (priorBalance < view.fees().accountReserve(ownerCount + 1))
|
||||
return tecNO_LINE_INSUF_RESERVE;
|
||||
|
||||
return trustCreate(
|
||||
view,
|
||||
high,
|
||||
@@ -1289,7 +1298,7 @@ authorizeMPToken(
|
||||
{
|
||||
auto const sleAcct = view.peek(keylet::account(account));
|
||||
if (!sleAcct)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
// If the account that submitted the tx is a holder
|
||||
// Note: `account_` is holder's account
|
||||
@@ -1354,17 +1363,17 @@ authorizeMPToken(
|
||||
|
||||
auto const sleMptIssuance = view.read(keylet::mptIssuance(mptIssuanceID));
|
||||
if (!sleMptIssuance)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
// If the account that submitted this tx is the issuer of the MPT
|
||||
// Note: `account_` is issuer's account
|
||||
// `holderID` is holder's account
|
||||
if (account != (*sleMptIssuance)[sfIssuer])
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
auto const sleMpt = view.peek(keylet::mptoken(mptIssuanceID, *holderID));
|
||||
if (!sleMpt)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
std::uint32_t const flagsIn = sleMpt->getFieldU32(sfFlags);
|
||||
std::uint32_t flagsOut = flagsIn;
|
||||
@@ -1421,7 +1430,7 @@ trustCreate(
|
||||
describeOwnerDir(uLowAccountID));
|
||||
|
||||
if (!lowNode)
|
||||
return tecDIR_FULL;
|
||||
return tecDIR_FULL; // LCOV_EXCL_LINE
|
||||
|
||||
auto highNode = view.dirInsert(
|
||||
keylet::ownerDir(uHighAccountID),
|
||||
@@ -1429,14 +1438,14 @@ trustCreate(
|
||||
describeOwnerDir(uHighAccountID));
|
||||
|
||||
if (!highNode)
|
||||
return tecDIR_FULL;
|
||||
return tecDIR_FULL; // LCOV_EXCL_LINE
|
||||
|
||||
bool const bSetDst = saLimit.getIssuer() == uDstAccountID;
|
||||
bool const bSetHigh = bSrcHigh ^ bSetDst;
|
||||
|
||||
XRPL_ASSERT(sleAccount, "ripple::trustCreate : non-null SLE");
|
||||
if (!sleAccount)
|
||||
return tefINTERNAL;
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
XRPL_ASSERT(
|
||||
sleAccount->getAccountID(sfAccount) ==
|
||||
@@ -1515,10 +1524,12 @@ removeEmptyHolding(
|
||||
{
|
||||
auto const sle = view.read(keylet::account(accountID));
|
||||
if (!sle)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
auto const balance = sle->getFieldAmount(sfBalance);
|
||||
if (balance.xrp() != 0)
|
||||
return tecHAS_OBLIGATIONS;
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
@@ -1536,7 +1547,8 @@ removeEmptyHolding(
|
||||
auto sleLowAccount =
|
||||
view.peek(keylet::account(line->at(sfLowLimit)->getIssuer()));
|
||||
if (!sleLowAccount)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
adjustOwnerCount(view, sleLowAccount, -1, journal);
|
||||
// It's not really necessary to clear the reserve flag, since the line
|
||||
// is about to be deleted, but this will make the metadata reflect an
|
||||
@@ -1550,7 +1562,8 @@ removeEmptyHolding(
|
||||
auto sleHighAccount =
|
||||
view.peek(keylet::account(line->at(sfHighLimit)->getIssuer()));
|
||||
if (!sleHighAccount)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
adjustOwnerCount(view, sleHighAccount, -1, journal);
|
||||
// It's not really necessary to clear the reserve flag, since the line
|
||||
// is about to be deleted, but this will make the metadata reflect an
|
||||
@@ -1610,7 +1623,7 @@ trustDelete(
|
||||
sleRippleState->key(),
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER;
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
JLOG(j.trace()) << "trustDelete: Deleting ripple line: high";
|
||||
@@ -1621,7 +1634,7 @@ trustDelete(
|
||||
sleRippleState->key(),
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER;
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
JLOG(j.trace()) << "trustDelete: Deleting ripple line: state";
|
||||
@@ -1647,7 +1660,7 @@ offerDelete(ApplyView& view, std::shared_ptr<SLE> const& sle, beast::Journal j)
|
||||
offerIndex,
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER;
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
if (!view.dirRemove(
|
||||
@@ -1656,7 +1669,7 @@ offerDelete(ApplyView& view, std::shared_ptr<SLE> const& sle, beast::Journal j)
|
||||
offerIndex,
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER;
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
if (sle->isFieldPresent(sfAdditionalBooks))
|
||||
@@ -1820,7 +1833,7 @@ rippleCreditIOU(
|
||||
|
||||
auto const sleAccount = view.peek(keylet::account(uReceiverID));
|
||||
if (!sleAccount)
|
||||
return tefINTERNAL;
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
bool const noRipple = (sleAccount->getFlags() & lsfDefaultRipple) == 0;
|
||||
|
||||
@@ -1910,14 +1923,16 @@ accountSendIOU(
|
||||
{
|
||||
if (saAmount < beast::zero || saAmount.holds<MPTIssue>())
|
||||
{
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
XRPL_ASSERT(
|
||||
saAmount >= beast::zero && !saAmount.holds<MPTIssue>(),
|
||||
"ripple::accountSendIOU : minimum amount and not MPT");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
/* If we aren't sending anything or if the sender is the same as the
|
||||
@@ -1974,8 +1989,10 @@ accountSendIOU(
|
||||
{
|
||||
// VFALCO Its laborious to have to mutate the
|
||||
// TER based on params everywhere
|
||||
// LCOV_EXCL_START
|
||||
terResult = view.open() ? TER{telFAILED_PROCESSING}
|
||||
: TER{tecFAILED_PROCESSING};
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -2062,7 +2079,7 @@ rippleCreditMPT(
|
||||
view.update(sleIssuance);
|
||||
}
|
||||
else
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -2322,7 +2339,7 @@ issueIOU(
|
||||
|
||||
auto const receiverAccount = view.peek(keylet::account(account));
|
||||
if (!receiverAccount)
|
||||
return tefINTERNAL;
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
bool noRipple = (receiverAccount->getFlags() & lsfDefaultRipple) == 0;
|
||||
|
||||
@@ -2410,11 +2427,13 @@ redeemIOU(
|
||||
// In order to hold an IOU, a trust line *MUST* exist to track the
|
||||
// balance. If it doesn't, then something is very wrong. Don't try
|
||||
// to continue.
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.fatal()) << "redeemIOU: " << to_string(account)
|
||||
<< " attempts to redeem " << amount.getFullText()
|
||||
<< " but no trust line exists!";
|
||||
|
||||
return tefINTERNAL;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
TER
|
||||
@@ -2434,7 +2453,7 @@ transferXRP(
|
||||
SLE::pointer const sender = view.peek(keylet::account(from));
|
||||
SLE::pointer const receiver = view.peek(keylet::account(to));
|
||||
if (!sender || !receiver)
|
||||
return tefINTERNAL;
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
JLOG(j.trace()) << "transferXRP: " << to_string(from) << " -> "
|
||||
<< to_string(to) << ") : " << amount.getFullText();
|
||||
@@ -2444,8 +2463,10 @@ transferXRP(
|
||||
// VFALCO Its unfortunate we have to keep
|
||||
// mutating these TER everywhere
|
||||
// FIXME: this logic should be moved to callers maybe?
|
||||
// LCOV_EXCL_START
|
||||
return view.open() ? TER{telFAILED_PROCESSING}
|
||||
: TER{tecFAILED_PROCESSING};
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
// Decrement XRP balance.
|
||||
@@ -2676,7 +2697,8 @@ enforceMPTokenAuthorization(
|
||||
UNREACHABLE(
|
||||
"ripple::enforceMPTokenAuthorization : condition list is incomplete");
|
||||
return tefINTERNAL;
|
||||
} // LCOV_EXCL_STOP
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
TER
|
||||
canTransfer(
|
||||
@@ -2725,11 +2747,13 @@ cleanupOnAccountDelete(
|
||||
if (!sleItem)
|
||||
{
|
||||
// Directory node has an invalid index. Bail out.
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.fatal())
|
||||
<< "DeleteAccount: Directory node in ledger " << view.seq()
|
||||
<< " has index to object that is missing: "
|
||||
<< to_string(dirEntry);
|
||||
return tefBAD_LEDGER;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
LedgerEntryType const nodeType{safe_cast<LedgerEntryType>(
|
||||
@@ -2762,9 +2786,11 @@ cleanupOnAccountDelete(
|
||||
"ripple::cleanupOnAccountDelete : minimum dir entries");
|
||||
if (uDirEntry == 0)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.error())
|
||||
<< "DeleteAccount iterator re-validation failed.";
|
||||
return tefBAD_LEDGER;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
if (skipEntry == SkipEntry::No)
|
||||
uDirEntry--;
|
||||
@@ -2784,7 +2810,7 @@ deleteAMMTrustLine(
|
||||
beast::Journal j)
|
||||
{
|
||||
if (!sleState || sleState->getType() != ltRIPPLE_STATE)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
auto const& [low, high] = std::minmax(
|
||||
sleState->getFieldAmount(sfLowLimit).getIssuer(),
|
||||
@@ -2792,13 +2818,14 @@ deleteAMMTrustLine(
|
||||
auto sleLow = view.peek(keylet::account(low));
|
||||
auto sleHigh = view.peek(keylet::account(high));
|
||||
if (!sleLow || !sleHigh)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
bool const ammLow = sleLow->isFieldPresent(sfAMMID);
|
||||
bool const ammHigh = sleHigh->isFieldPresent(sfAMMID);
|
||||
|
||||
// can't both be AMM
|
||||
if (ammLow && ammHigh)
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
// at least one must be
|
||||
if (!ammLow && !ammHigh)
|
||||
@@ -2818,7 +2845,7 @@ deleteAMMTrustLine(
|
||||
|
||||
auto const uFlags = !ammLow ? lsfLowReserve : lsfHighReserve;
|
||||
if (!(sleState->getFlags() & uFlags))
|
||||
return tecINTERNAL;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
adjustOwnerCount(view, !ammLow ? sleLow : sleHigh, -1, j);
|
||||
|
||||
@@ -3126,7 +3153,7 @@ rippleUnlockEscrowMPT(
|
||||
{ // LCOV_EXCL_START
|
||||
JLOG(j.error())
|
||||
<< "rippleUnlockEscrowMPT: MPToken not found for " << receiver;
|
||||
return tecOBJECT_NOT_FOUND; // LCOV_EXCL_LINE
|
||||
return tecOBJECT_NOT_FOUND;
|
||||
} // LCOV_EXCL_STOP
|
||||
|
||||
auto current = sle->getFieldU64(sfMPTAmount);
|
||||
|
||||
@@ -36,7 +36,7 @@ namespace BuildInfo {
|
||||
// and follow the format described at http://semver.org/
|
||||
//------------------------------------------------------------------------------
|
||||
// clang-format off
|
||||
char const* const versionString = "2.6.1-rc1"
|
||||
char const* const versionString = "3.0.0-b1"
|
||||
// clang-format on
|
||||
|
||||
#if defined(DEBUG) || defined(SANITIZER)
|
||||
|
||||
@@ -147,6 +147,19 @@ Permission::getGranularTxType(GranularPermissionType const& gpType) const
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::optional<std::reference_wrapper<uint256 const>> const
|
||||
Permission::getTxFeature(TxType txType) const
|
||||
{
|
||||
auto const txFeaturesIt = txFeatureMap_.find(txType);
|
||||
XRPL_ASSERT(
|
||||
txFeaturesIt != txFeatureMap_.end(),
|
||||
"ripple::Permissions::getTxFeature : tx exists in txFeatureMap_");
|
||||
|
||||
if (txFeaturesIt->second == uint256{})
|
||||
return std::nullopt;
|
||||
return txFeaturesIt->second;
|
||||
}
|
||||
|
||||
bool
|
||||
Permission::isDelegatable(
|
||||
std::uint32_t const& permissionValue,
|
||||
@@ -166,16 +179,12 @@ Permission::isDelegatable(
|
||||
if (it == delegatableTx_.end())
|
||||
return false;
|
||||
|
||||
auto const txFeaturesIt = txFeatureMap_.find(txType);
|
||||
XRPL_ASSERT(
|
||||
txFeaturesIt != txFeatureMap_.end(),
|
||||
"ripple::Permissions::isDelegatable : tx exists in txFeatureMap_");
|
||||
auto const feature = getTxFeature(txType);
|
||||
|
||||
// fixDelegateV1_1: Delegation is only allowed if the required amendment
|
||||
// for the transaction is enabled. For transactions that do not require
|
||||
// an amendment, delegation is always allowed.
|
||||
if (txFeaturesIt->second != uint256{} &&
|
||||
!rules.enabled(txFeaturesIt->second))
|
||||
if (feature && !rules.enabled(*feature))
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -68,29 +68,6 @@
|
||||
|
||||
namespace ripple {
|
||||
|
||||
namespace {
|
||||
|
||||
// Use a static inside a function to help prevent order-of-initialzation issues
|
||||
LocalValue<bool>&
|
||||
getStaticSTAmountCanonicalizeSwitchover()
|
||||
{
|
||||
static LocalValue<bool> r{true};
|
||||
return r;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
bool
|
||||
getSTAmountCanonicalizeSwitchover()
|
||||
{
|
||||
return *getStaticSTAmountCanonicalizeSwitchover();
|
||||
}
|
||||
|
||||
void
|
||||
setSTAmountCanonicalizeSwitchover(bool v)
|
||||
{
|
||||
*getStaticSTAmountCanonicalizeSwitchover() = v;
|
||||
}
|
||||
|
||||
static std::uint64_t const tenTo14 = 100000000000000ull;
|
||||
static std::uint64_t const tenTo14m1 = tenTo14 - 1;
|
||||
static std::uint64_t const tenTo17 = tenTo14 * 1000;
|
||||
@@ -884,18 +861,14 @@ STAmount::canonicalize()
|
||||
return;
|
||||
}
|
||||
|
||||
if (getSTAmountCanonicalizeSwitchover())
|
||||
{
|
||||
// log(cMaxNativeN, 10) == 17
|
||||
if (native() && mOffset > 17)
|
||||
Throw<std::runtime_error>(
|
||||
"Native currency amount out of range");
|
||||
// log(maxMPTokenAmount, 10) ~ 18.96
|
||||
if (mAsset.holds<MPTIssue>() && mOffset > 18)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
}
|
||||
// log(cMaxNativeN, 10) == 17
|
||||
if (native() && mOffset > 17)
|
||||
Throw<std::runtime_error>("Native currency amount out of range");
|
||||
// log(maxMPTokenAmount, 10) ~ 18.96
|
||||
if (mAsset.holds<MPTIssue>() && mOffset > 18)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
|
||||
if (getSTNumberSwitchover() && getSTAmountCanonicalizeSwitchover())
|
||||
if (getSTNumberSwitchover())
|
||||
{
|
||||
Number num(
|
||||
mIsNegative ? -mValue : mValue, mOffset, Number::unchecked{});
|
||||
@@ -919,16 +892,14 @@ STAmount::canonicalize()
|
||||
|
||||
while (mOffset > 0)
|
||||
{
|
||||
if (getSTAmountCanonicalizeSwitchover())
|
||||
{
|
||||
// N.B. do not move the overflow check to after the
|
||||
// multiplication
|
||||
if (native() && mValue > cMaxNativeN)
|
||||
Throw<std::runtime_error>(
|
||||
"Native currency amount out of range");
|
||||
else if (!native() && mValue > maxMPTokenAmount)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
}
|
||||
// N.B. do not move the overflow check to after the
|
||||
// multiplication
|
||||
if (native() && mValue > cMaxNativeN)
|
||||
Throw<std::runtime_error>(
|
||||
"Native currency amount out of range");
|
||||
else if (!native() && mValue > maxMPTokenAmount)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
|
||||
mValue *= 10;
|
||||
--mOffset;
|
||||
}
|
||||
|
||||
@@ -112,7 +112,9 @@ void
|
||||
STBase::add(Serializer& s) const
|
||||
{
|
||||
// Should never be called
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::STBase::add : not implemented");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
bool
|
||||
|
||||
@@ -249,4 +249,33 @@ STUInt64::getJson(JsonOptions) const
|
||||
return convertToString(value_, 16); // Convert to base 16
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
template <>
|
||||
STInteger<std::int32_t>::STInteger(SerialIter& sit, SField const& name)
|
||||
: STInteger(name, sit.get32())
|
||||
{
|
||||
}
|
||||
|
||||
template <>
|
||||
SerializedTypeID
|
||||
STInt32::getSType() const
|
||||
{
|
||||
return STI_INT32;
|
||||
}
|
||||
|
||||
template <>
|
||||
std::string
|
||||
STInt32::getText() const
|
||||
{
|
||||
return std::to_string(value_);
|
||||
}
|
||||
|
||||
template <>
|
||||
Json::Value
|
||||
STInt32::getJson(JsonOptions) const
|
||||
{
|
||||
return value_;
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -647,6 +647,12 @@ STObject::getFieldH256(SField const& field) const
|
||||
return getFieldByValue<STUInt256>(field);
|
||||
}
|
||||
|
||||
std::int32_t
|
||||
STObject::getFieldI32(SField const& field) const
|
||||
{
|
||||
return getFieldByValue<STInt32>(field);
|
||||
}
|
||||
|
||||
AccountID
|
||||
STObject::getAccountID(SField const& field) const
|
||||
{
|
||||
@@ -682,6 +688,16 @@ STObject::getFieldV256(SField const& field) const
|
||||
return getFieldByConstRef<STVector256>(field, empty);
|
||||
}
|
||||
|
||||
STObject
|
||||
STObject::getFieldObject(SField const& field) const
|
||||
{
|
||||
STObject const empty{field};
|
||||
auto ret = getFieldByConstRef<STObject>(field, empty);
|
||||
if (ret != empty)
|
||||
ret.applyTemplateFromSField(field);
|
||||
return ret;
|
||||
}
|
||||
|
||||
STArray const&
|
||||
STObject::getFieldArray(SField const& field) const
|
||||
{
|
||||
@@ -761,6 +777,12 @@ STObject::setFieldH256(SField const& field, uint256 const& v)
|
||||
setFieldUsingSetValue<STUInt256>(field, v);
|
||||
}
|
||||
|
||||
void
|
||||
STObject::setFieldI32(SField const& field, std::int32_t v)
|
||||
{
|
||||
setFieldUsingSetValue<STInt32>(field, v);
|
||||
}
|
||||
|
||||
void
|
||||
STObject::setFieldV256(SField const& field, STVector256 const& v)
|
||||
{
|
||||
@@ -821,6 +843,12 @@ STObject::setFieldArray(SField const& field, STArray const& v)
|
||||
setFieldUsingAssignment(field, v);
|
||||
}
|
||||
|
||||
void
|
||||
STObject::setFieldObject(SField const& field, STObject const& v)
|
||||
{
|
||||
setFieldUsingAssignment(field, v);
|
||||
}
|
||||
|
||||
Json::Value
|
||||
STObject::getJson(JsonOptions options) const
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user