mirror of
https://github.com/XRPLF/clio.git
synced 2025-11-13 00:05:51 +00:00
Compare commits
150 Commits
2.5.0
...
experiment
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7eaf0005e4 | ||
|
|
497721ee7c | ||
|
|
26530108e3 | ||
|
|
fc88abdaeb | ||
|
|
3f2ada3439 | ||
|
|
e996f2b7ab | ||
|
|
26112d17f8 | ||
|
|
e4abec4b98 | ||
|
|
503e23055b | ||
|
|
97480ce626 | ||
|
|
bd966e636e | ||
|
|
91b248e3b2 | ||
|
|
140ac78e15 | ||
|
|
f1bf423f69 | ||
|
|
dcf369e4ec | ||
|
|
56f4dc591c | ||
|
|
c40cd8154f | ||
|
|
989a0c8468 | ||
|
|
1adbed7913 | ||
|
|
490ec41083 | ||
|
|
384e79cd32 | ||
|
|
8bc36c2c0b | ||
|
|
9edc26a2a3 | ||
|
|
08bb619964 | ||
|
|
26ef25f864 | ||
|
|
4e9558f76b | ||
|
|
a62084a4f0 | ||
|
|
b8c298b734 | ||
|
|
cf4d5d649a | ||
|
|
eb2778ccad | ||
|
|
790402bcfb | ||
|
|
7c68770787 | ||
|
|
d9faf7a833 | ||
|
|
90ac03cae7 | ||
|
|
3a667f558c | ||
|
|
0a2930d861 | ||
|
|
e86178b523 | ||
|
|
10e15b524f | ||
|
|
402ab29a73 | ||
|
|
3df28f42ec | ||
|
|
0e8896ad06 | ||
|
|
ffd18049eb | ||
|
|
7413e02a05 | ||
|
|
0403248a8f | ||
|
|
84db880ce7 | ||
|
|
e6b2f9cde7 | ||
|
|
2512a9c8e7 | ||
|
|
5e7f6bb5bd | ||
|
|
ae15bbd7b5 | ||
|
|
f88ce31363 | ||
|
|
33c0737933 | ||
|
|
b26fcae690 | ||
|
|
60baaf921f | ||
|
|
f41e06061f | ||
|
|
c170c56a84 | ||
|
|
e03f5e46c0 | ||
|
|
30da8d8f63 | ||
|
|
c9c392679d | ||
|
|
47f5ae5f12 | ||
|
|
6c34458d6c | ||
|
|
8f6bec2e25 | ||
|
|
ec40cc93ff | ||
|
|
3681ef4e41 | ||
|
|
e2fbf56277 | ||
|
|
2d48de372b | ||
|
|
0d9a83fd4d | ||
|
|
c780ef8a0b | ||
|
|
d833d36896 | ||
|
|
7a2090bc00 | ||
|
|
b5892dd139 | ||
|
|
a172d0b7ea | ||
|
|
47c2af0421 | ||
|
|
c3e04426d3 | ||
|
|
e9ab081ab7 | ||
|
|
caedb51f00 | ||
|
|
e6abdda0a7 | ||
|
|
d598396445 | ||
|
|
bbd2884e3b | ||
|
|
46c96654ee | ||
|
|
57ac234657 | ||
|
|
4232359dce | ||
|
|
8b1cab46e7 | ||
|
|
e05505aa4f | ||
|
|
73bc85864b | ||
|
|
373430924b | ||
|
|
8ad111655c | ||
|
|
0a8470758d | ||
|
|
1ec906addc | ||
|
|
afc0a358d9 | ||
|
|
af284dda37 | ||
|
|
7558348d14 | ||
|
|
0d262e74bc | ||
|
|
312e7be2b4 | ||
|
|
de9b79adf0 | ||
|
|
6c68360234 | ||
|
|
7e42507b9a | ||
|
|
36bfcc7543 | ||
|
|
4a5278a915 | ||
|
|
333b73e882 | ||
|
|
9420c506ca | ||
|
|
707427c63a | ||
|
|
5eea26d9ac | ||
|
|
226d386be2 | ||
|
|
c95d8f2f89 | ||
|
|
ed5dfc6c0e | ||
|
|
2600198bd5 | ||
|
|
c83be63b9c | ||
|
|
4aa2ca94de | ||
|
|
87565b685a | ||
|
|
e4d0c1ca48 | ||
|
|
498232baf8 | ||
|
|
43f4828a61 | ||
|
|
1a298dedd2 | ||
|
|
6c9c88e3fc | ||
|
|
156b858db7 | ||
|
|
cfffbfba9d | ||
|
|
97a938a2a9 | ||
|
|
63d664b9fb | ||
|
|
40824812d6 | ||
|
|
288ffb8fef | ||
|
|
a9cf781ca5 | ||
|
|
a446d85297 | ||
|
|
dc18aefb33 | ||
|
|
e312354016 | ||
|
|
ba905e38ae | ||
|
|
f35e5ac784 | ||
|
|
a9b02fb292 | ||
|
|
bc8a2c19aa | ||
|
|
46b86a5d61 | ||
|
|
3defcaecac | ||
|
|
d7888d5556 | ||
|
|
f518936e69 | ||
|
|
9e35f16be1 | ||
|
|
d048641242 | ||
|
|
f347a732a6 | ||
|
|
0e2ba4a64e | ||
|
|
4a4f8842bd | ||
|
|
a63805d631 | ||
|
|
b3beb50e8f | ||
|
|
0be712c363 | ||
|
|
ad5f0642ba | ||
|
|
4948882545 | ||
|
|
5778363689 | ||
|
|
d6fec5b5ff | ||
|
|
e380214496 | ||
|
|
1463b0e3e5 | ||
|
|
60bbe1eb72 | ||
|
|
b29e2e4c88 | ||
|
|
bcaa5f3392 | ||
|
|
3f0f20a542 |
@@ -16,6 +16,7 @@ coverage:
|
|||||||
#
|
#
|
||||||
# More info: https://github.com/XRPLF/clio/pull/2066
|
# More info: https://github.com/XRPLF/clio/pull/2066
|
||||||
ignore:
|
ignore:
|
||||||
|
- "benchmarks"
|
||||||
- "tests"
|
- "tests"
|
||||||
- "src/data/cassandra/"
|
- "src/data/cassandra/"
|
||||||
- "src/data/CassandraBackend.hpp"
|
- "src/data/CassandraBackend.hpp"
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ inputs:
|
|||||||
dockerhub_repo:
|
dockerhub_repo:
|
||||||
description: DockerHub repository name
|
description: DockerHub repository name
|
||||||
required: false
|
required: false
|
||||||
|
default: ""
|
||||||
dockerhub_description:
|
dockerhub_description:
|
||||||
description: Short description of the image
|
description: Short description of the image
|
||||||
required: false
|
required: false
|
||||||
@@ -33,14 +34,14 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: ${{ inputs.push_image == 'true' && inputs.dockerhub_repo != '' }}
|
if: ${{ inputs.push_image == 'true' && inputs.dockerhub_repo != '' }}
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
username: ${{ env.DOCKERHUB_USER }}
|
username: ${{ env.DOCKERHUB_USER }}
|
||||||
password: ${{ env.DOCKERHUB_PW }}
|
password: ${{ env.DOCKERHUB_PW }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: ${{ inputs.push_image == 'true' }}
|
if: ${{ inputs.push_image == 'true' }}
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -51,7 +52,7 @@ runs:
|
|||||||
cache-image: false
|
cache-image: false
|
||||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||||
|
|
||||||
- uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
- uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||||
id: meta
|
id: meta
|
||||||
with:
|
with:
|
||||||
images: ${{ inputs.images }}
|
images: ${{ inputs.images }}
|
||||||
|
|||||||
73
.github/actions/cmake/action.yml
vendored
Normal file
73
.github/actions/cmake/action.yml
vendored
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
name: Run CMake
|
||||||
|
description: Run CMake to generate build files
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
build_dir:
|
||||||
|
description: Build directory
|
||||||
|
required: false
|
||||||
|
default: "build"
|
||||||
|
conan_profile:
|
||||||
|
description: Conan profile name
|
||||||
|
required: true
|
||||||
|
build_type:
|
||||||
|
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
|
||||||
|
required: true
|
||||||
|
default: "Release"
|
||||||
|
integration_tests:
|
||||||
|
description: Whether to generate target integration tests
|
||||||
|
required: true
|
||||||
|
default: "true"
|
||||||
|
benchmark:
|
||||||
|
description: Whether to generate targets for benchmarks
|
||||||
|
required: true
|
||||||
|
default: "true"
|
||||||
|
code_coverage:
|
||||||
|
description: Whether to enable code coverage
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
static:
|
||||||
|
description: Whether Clio is to be statically linked
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
time_trace:
|
||||||
|
description: Whether to enable compiler trace reports
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
package:
|
||||||
|
description: Whether to generate Debian package
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Run cmake
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
BUILD_TYPE: "${{ inputs.build_type }}"
|
||||||
|
SANITIZER_OPTION: |-
|
||||||
|
${{ endsWith(inputs.conan_profile, '.asan') && '-Dsan=address' ||
|
||||||
|
endsWith(inputs.conan_profile, '.tsan') && '-Dsan=thread' ||
|
||||||
|
endsWith(inputs.conan_profile, '.ubsan') && '-Dsan=undefined' ||
|
||||||
|
'' }}
|
||||||
|
INTEGRATION_TESTS: "${{ inputs.integration_tests == 'true' && 'ON' || 'OFF' }}"
|
||||||
|
BENCHMARK: "${{ inputs.benchmark == 'true' && 'ON' || 'OFF' }}"
|
||||||
|
COVERAGE: "${{ inputs.code_coverage == 'true' && 'ON' || 'OFF' }}"
|
||||||
|
STATIC: "${{ inputs.static == 'true' && 'ON' || 'OFF' }}"
|
||||||
|
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'ON' || 'OFF' }}"
|
||||||
|
PACKAGE: "${{ inputs.package == 'true' && 'ON' || 'OFF' }}"
|
||||||
|
run: |
|
||||||
|
cmake \
|
||||||
|
-B ${{inputs.build_dir}} \
|
||||||
|
-S . \
|
||||||
|
-G Ninja \
|
||||||
|
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||||
|
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||||
|
"${SANITIZER_OPTION}" \
|
||||||
|
-Dtests=ON \
|
||||||
|
-Dintegration_tests="${INTEGRATION_TESTS}" \
|
||||||
|
-Dbenchmark="${BENCHMARK}" \
|
||||||
|
-Dcoverage="${COVERAGE}" \
|
||||||
|
-Dstatic="${STATIC}" \
|
||||||
|
-Dtime_trace="${TIME_TRACE}" \
|
||||||
|
-Dpackage="${PACKAGE}"
|
||||||
1
.github/actions/code_coverage/action.yml
vendored
1
.github/actions/code_coverage/action.yml
vendored
@@ -15,6 +15,7 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
gcovr \
|
gcovr \
|
||||||
|
-e benchmarks \
|
||||||
-e tests \
|
-e tests \
|
||||||
-e src/data/cassandra \
|
-e src/data/cassandra \
|
||||||
-e src/data/CassandraBackend.hpp \
|
-e src/data/CassandraBackend.hpp \
|
||||||
|
|||||||
38
.github/actions/conan/action.yml
vendored
Normal file
38
.github/actions/conan/action.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
name: Run Conan
|
||||||
|
description: Run conan to install dependencies
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
build_dir:
|
||||||
|
description: Build directory
|
||||||
|
required: false
|
||||||
|
default: "build"
|
||||||
|
conan_profile:
|
||||||
|
description: Conan profile name
|
||||||
|
required: true
|
||||||
|
force_conan_source_build:
|
||||||
|
description: Whether conan should build all dependencies from source
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
build_type:
|
||||||
|
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
|
||||||
|
required: true
|
||||||
|
default: "Release"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Create build directory
|
||||||
|
shell: bash
|
||||||
|
run: mkdir -p "${{ inputs.build_dir }}"
|
||||||
|
|
||||||
|
- name: Run conan
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
CONAN_BUILD_OPTION: "${{ inputs.force_conan_source_build == 'true' && '*' || 'missing' }}"
|
||||||
|
run: |
|
||||||
|
conan \
|
||||||
|
install . \
|
||||||
|
-of build \
|
||||||
|
-b "$CONAN_BUILD_OPTION" \
|
||||||
|
-s "build_type=${{ inputs.build_type }}" \
|
||||||
|
--profile:all "${{ inputs.conan_profile }}"
|
||||||
79
.github/actions/generate/action.yml
vendored
79
.github/actions/generate/action.yml
vendored
@@ -1,79 +0,0 @@
|
|||||||
name: Run conan and cmake
|
|
||||||
description: Run conan and cmake
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
conan_profile:
|
|
||||||
description: Conan profile name
|
|
||||||
required: true
|
|
||||||
force_conan_source_build:
|
|
||||||
description: Whether conan should build all dependencies from source
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
build_type:
|
|
||||||
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
|
|
||||||
required: true
|
|
||||||
default: "Release"
|
|
||||||
build_integration_tests:
|
|
||||||
description: Whether to build integration tests
|
|
||||||
required: true
|
|
||||||
default: "true"
|
|
||||||
code_coverage:
|
|
||||||
description: Whether conan's coverage option should be on or not
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
static:
|
|
||||||
description: Whether Clio is to be statically linked
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
time_trace:
|
|
||||||
description: Whether to enable compiler trace reports
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Create build directory
|
|
||||||
shell: bash
|
|
||||||
run: mkdir -p build
|
|
||||||
|
|
||||||
- name: Run conan
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
CONAN_BUILD_OPTION: "${{ inputs.force_conan_source_build == 'true' && '*' || 'missing' }}"
|
|
||||||
CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}"
|
|
||||||
STATIC_OPTION: "${{ inputs.static == 'true' && 'True' || 'False' }}"
|
|
||||||
INTEGRATION_TESTS_OPTION: "${{ inputs.build_integration_tests == 'true' && 'True' || 'False' }}"
|
|
||||||
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'True' || 'False' }}"
|
|
||||||
run: |
|
|
||||||
cd build
|
|
||||||
conan \
|
|
||||||
install .. \
|
|
||||||
-of . \
|
|
||||||
-b "$CONAN_BUILD_OPTION" \
|
|
||||||
-s "build_type=${{ inputs.build_type }}" \
|
|
||||||
-o "&:static=${STATIC_OPTION}" \
|
|
||||||
-o "&:tests=True" \
|
|
||||||
-o "&:integration_tests=${INTEGRATION_TESTS_OPTION}" \
|
|
||||||
-o "&:lint=False" \
|
|
||||||
-o "&:coverage=${CODE_COVERAGE}" \
|
|
||||||
-o "&:time_trace=${TIME_TRACE}" \
|
|
||||||
--profile:all "${{ inputs.conan_profile }}"
|
|
||||||
|
|
||||||
- name: Run cmake
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
BUILD_TYPE: "${{ inputs.build_type }}"
|
|
||||||
SANITIZER_OPTION: |-
|
|
||||||
${{ endsWith(inputs.conan_profile, '.asan') && '-Dsan=address' ||
|
|
||||||
endsWith(inputs.conan_profile, '.tsan') && '-Dsan=thread' ||
|
|
||||||
endsWith(inputs.conan_profile, '.ubsan') && '-Dsan=undefined' ||
|
|
||||||
'' }}
|
|
||||||
run: |
|
|
||||||
cd build
|
|
||||||
cmake \
|
|
||||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
|
||||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
|
||||||
"${SANITIZER_OPTION}" \
|
|
||||||
.. \
|
|
||||||
-G Ninja
|
|
||||||
77
.github/actions/prepare_runner/action.yml
vendored
77
.github/actions/prepare_runner/action.yml
vendored
@@ -1,77 +0,0 @@
|
|||||||
name: Prepare runner
|
|
||||||
description: Install packages, set environment variables, create directories
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
disable_ccache:
|
|
||||||
description: Whether ccache should be disabled
|
|
||||||
required: true
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Install packages on mac
|
|
||||||
if: ${{ runner.os == 'macOS' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
brew install --quiet \
|
|
||||||
bison \
|
|
||||||
ca-certificates \
|
|
||||||
ccache \
|
|
||||||
clang-build-analyzer \
|
|
||||||
conan \
|
|
||||||
gh \
|
|
||||||
jq \
|
|
||||||
llvm@14 \
|
|
||||||
ninja \
|
|
||||||
pkg-config
|
|
||||||
echo "/opt/homebrew/opt/conan@2/bin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: Install CMake 3.31.6 on mac
|
|
||||||
if: ${{ runner.os == 'macOS' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
# Uninstall any existing cmake
|
|
||||||
brew uninstall --formula cmake --ignore-dependencies || true
|
|
||||||
|
|
||||||
# Download specific cmake formula
|
|
||||||
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
|
|
||||||
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
|
|
||||||
|
|
||||||
mkdir -p /tmp/homebrew-formula
|
|
||||||
curl -s -L "$FORMULA_URL" -o /tmp/homebrew-formula/cmake.rb
|
|
||||||
|
|
||||||
echo "$FORMULA_EXPECTED_SHA256 /tmp/homebrew-formula/cmake.rb" | shasum -a 256 -c
|
|
||||||
|
|
||||||
# Install cmake from the specific formula with force flag
|
|
||||||
brew install --formula --quiet --force /tmp/homebrew-formula/cmake.rb
|
|
||||||
|
|
||||||
- name: Fix git permissions on Linux
|
|
||||||
if: ${{ runner.os == 'Linux' }}
|
|
||||||
shell: bash
|
|
||||||
run: git config --global --add safe.directory "$PWD"
|
|
||||||
|
|
||||||
- name: Set env variables for macOS
|
|
||||||
if: ${{ runner.os == 'macOS' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "CCACHE_DIR=${{ github.workspace }}/.ccache" >> $GITHUB_ENV
|
|
||||||
echo "CONAN_HOME=${{ github.workspace }}/.conan2" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Set env variables for Linux
|
|
||||||
if: ${{ runner.os == 'Linux' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "CCACHE_DIR=/root/.ccache" >> $GITHUB_ENV
|
|
||||||
echo "CONAN_HOME=/root/.conan2" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Set CCACHE_DISABLE=1
|
|
||||||
if: ${{ inputs.disable_ccache == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "CCACHE_DISABLE=1" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Create directories
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir -p "$CCACHE_DIR"
|
|
||||||
mkdir -p "$CONAN_HOME"
|
|
||||||
30
.github/dependabot.yml
vendored
30
.github/dependabot.yml
vendored
@@ -39,6 +39,19 @@ updates:
|
|||||||
prefix: "ci: [DEPENDABOT] "
|
prefix: "ci: [DEPENDABOT] "
|
||||||
target-branch: develop
|
target-branch: develop
|
||||||
|
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: .github/actions/cmake/
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
day: monday
|
||||||
|
time: "04:00"
|
||||||
|
timezone: Etc/GMT
|
||||||
|
reviewers:
|
||||||
|
- XRPLF/clio-dev-team
|
||||||
|
commit-message:
|
||||||
|
prefix: "ci: [DEPENDABOT] "
|
||||||
|
target-branch: develop
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
- package-ecosystem: github-actions
|
||||||
directory: .github/actions/code_coverage/
|
directory: .github/actions/code_coverage/
|
||||||
schedule:
|
schedule:
|
||||||
@@ -53,7 +66,7 @@ updates:
|
|||||||
target-branch: develop
|
target-branch: develop
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
- package-ecosystem: github-actions
|
||||||
directory: .github/actions/create_issue/
|
directory: .github/actions/conan/
|
||||||
schedule:
|
schedule:
|
||||||
interval: weekly
|
interval: weekly
|
||||||
day: monday
|
day: monday
|
||||||
@@ -66,7 +79,7 @@ updates:
|
|||||||
target-branch: develop
|
target-branch: develop
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
- package-ecosystem: github-actions
|
||||||
directory: .github/actions/generate/
|
directory: .github/actions/create_issue/
|
||||||
schedule:
|
schedule:
|
||||||
interval: weekly
|
interval: weekly
|
||||||
day: monday
|
day: monday
|
||||||
@@ -104,19 +117,6 @@ updates:
|
|||||||
prefix: "ci: [DEPENDABOT] "
|
prefix: "ci: [DEPENDABOT] "
|
||||||
target-branch: develop
|
target-branch: develop
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
|
||||||
directory: .github/actions/prepare_runner/
|
|
||||||
schedule:
|
|
||||||
interval: weekly
|
|
||||||
day: monday
|
|
||||||
time: "04:00"
|
|
||||||
timezone: Etc/GMT
|
|
||||||
reviewers:
|
|
||||||
- XRPLF/clio-dev-team
|
|
||||||
commit-message:
|
|
||||||
prefix: "ci: [DEPENDABOT] "
|
|
||||||
target-branch: develop
|
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
- package-ecosystem: github-actions
|
||||||
directory: .github/actions/restore_cache/
|
directory: .github/actions/restore_cache/
|
||||||
schedule:
|
schedule:
|
||||||
|
|||||||
8
.github/scripts/conan/apple-clang-ci.profile
vendored
8
.github/scripts/conan/apple-clang-ci.profile
vendored
@@ -1,8 +0,0 @@
|
|||||||
[settings]
|
|
||||||
arch={{detect_api.detect_arch()}}
|
|
||||||
build_type=Release
|
|
||||||
compiler=apple-clang
|
|
||||||
compiler.cppstd=20
|
|
||||||
compiler.libcxx=libc++
|
|
||||||
compiler.version=16
|
|
||||||
os=Macos
|
|
||||||
2
.github/scripts/conan/generate_matrix.py
vendored
2
.github/scripts/conan/generate_matrix.py
vendored
@@ -3,7 +3,7 @@ import itertools
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
LINUX_OS = ["heavy", "heavy-arm64"]
|
LINUX_OS = ["heavy", "heavy-arm64"]
|
||||||
LINUX_CONTAINERS = ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
|
LINUX_CONTAINERS = ['{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }']
|
||||||
LINUX_COMPILERS = ["gcc", "clang"]
|
LINUX_COMPILERS = ["gcc", "clang"]
|
||||||
|
|
||||||
MACOS_OS = ["macos15"]
|
MACOS_OS = ["macos15"]
|
||||||
|
|||||||
7
.github/scripts/conan/init.sh
vendored
7
.github/scripts/conan/init.sh
vendored
@@ -8,10 +8,11 @@ REPO_DIR="$(cd "$CURRENT_DIR/../../../" && pwd)"
|
|||||||
CONAN_DIR="${CONAN_HOME:-$HOME/.conan2}"
|
CONAN_DIR="${CONAN_HOME:-$HOME/.conan2}"
|
||||||
PROFILES_DIR="$CONAN_DIR/profiles"
|
PROFILES_DIR="$CONAN_DIR/profiles"
|
||||||
|
|
||||||
|
# When developers' compilers are updated, these profiles might be different
|
||||||
if [[ -z "$CI" ]]; then
|
if [[ -z "$CI" ]]; then
|
||||||
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-local.profile"
|
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-17.profile"
|
||||||
else
|
else
|
||||||
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-ci.profile"
|
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-17.profile"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
GCC_PROFILE="$REPO_DIR/docker/ci/conan/gcc.profile"
|
GCC_PROFILE="$REPO_DIR/docker/ci/conan/gcc.profile"
|
||||||
@@ -21,7 +22,7 @@ SANITIZER_TEMPLATE_FILE="$REPO_DIR/docker/ci/conan/sanitizer_template.profile"
|
|||||||
|
|
||||||
rm -rf "$CONAN_DIR"
|
rm -rf "$CONAN_DIR"
|
||||||
|
|
||||||
conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev
|
conan remote add --index 0 xrplf https://conan.ripplex.io
|
||||||
|
|
||||||
cp "$REPO_DIR/docker/ci/conan/global.conf" "$CONAN_DIR/global.conf"
|
cp "$REPO_DIR/docker/ci/conan/global.conf" "$CONAN_DIR/global.conf"
|
||||||
|
|
||||||
|
|||||||
33
.github/workflows/build.yml
vendored
33
.github/workflows/build.yml
vendored
@@ -28,8 +28,9 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
# Develop branch: Each run gets unique group (using run_number) for parallel execution
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
# Other branches: Shared group with cancel-in-progress to stop old runs when new commits are pushed
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.ref == 'refs/heads/develop' && github.run_number || 'branch' }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -42,7 +43,10 @@ jobs:
|
|||||||
os: [heavy]
|
os: [heavy]
|
||||||
conan_profile: [gcc, clang]
|
conan_profile: [gcc, clang]
|
||||||
build_type: [Release, Debug]
|
build_type: [Release, Debug]
|
||||||
container: ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
|
container:
|
||||||
|
[
|
||||||
|
'{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }',
|
||||||
|
]
|
||||||
static: [true]
|
static: [true]
|
||||||
|
|
||||||
include:
|
include:
|
||||||
@@ -69,7 +73,7 @@ jobs:
|
|||||||
uses: ./.github/workflows/build_impl.yml
|
uses: ./.github/workflows/build_impl.yml
|
||||||
with:
|
with:
|
||||||
runs_on: heavy
|
runs_on: heavy
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Debug
|
build_type: Debug
|
||||||
disable_cache: false
|
disable_cache: false
|
||||||
@@ -81,17 +85,34 @@ jobs:
|
|||||||
secrets:
|
secrets:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
package:
|
||||||
|
name: Build packages
|
||||||
|
|
||||||
|
uses: ./.github/workflows/build_impl.yml
|
||||||
|
with:
|
||||||
|
runs_on: heavy
|
||||||
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
|
conan_profile: gcc
|
||||||
|
build_type: Release
|
||||||
|
disable_cache: false
|
||||||
|
code_coverage: false
|
||||||
|
static: true
|
||||||
|
upload_clio_server: false
|
||||||
|
package: true
|
||||||
|
targets: package
|
||||||
|
analyze_build_time: false
|
||||||
|
|
||||||
check_config:
|
check_config:
|
||||||
name: Check Config Description
|
name: Check Config Description
|
||||||
needs: build-and-test
|
needs: build-and-test
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: clio_server_Linux_Release_gcc
|
name: clio_server_Linux_Release_gcc
|
||||||
|
|
||||||
|
|||||||
14
.github/workflows/build_and_test.yml
vendored
14
.github/workflows/build_and_test.yml
vendored
@@ -57,6 +57,18 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
default: all
|
default: all
|
||||||
|
|
||||||
|
expected_version:
|
||||||
|
description: Expected version of the clio_server binary
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
package:
|
||||||
|
description: Whether to generate Debian package
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
uses: ./.github/workflows/build_impl.yml
|
uses: ./.github/workflows/build_impl.yml
|
||||||
@@ -71,6 +83,8 @@ jobs:
|
|||||||
upload_clio_server: ${{ inputs.upload_clio_server }}
|
upload_clio_server: ${{ inputs.upload_clio_server }}
|
||||||
targets: ${{ inputs.targets }}
|
targets: ${{ inputs.targets }}
|
||||||
analyze_build_time: false
|
analyze_build_time: false
|
||||||
|
expected_version: ${{ inputs.expected_version }}
|
||||||
|
package: ${{ inputs.package }}
|
||||||
|
|
||||||
test:
|
test:
|
||||||
needs: build
|
needs: build
|
||||||
|
|||||||
16
.github/workflows/build_clio_docker_image.yml
vendored
16
.github/workflows/build_clio_docker_image.yml
vendored
@@ -48,7 +48,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Download Clio binary from artifact
|
- name: Download Clio binary from artifact
|
||||||
if: ${{ inputs.artifact_name != null }}
|
if: ${{ inputs.artifact_name != null }}
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.artifact_name }}
|
name: ${{ inputs.artifact_name }}
|
||||||
path: ./docker/clio/artifact/
|
path: ./docker/clio/artifact/
|
||||||
@@ -73,7 +73,8 @@ jobs:
|
|||||||
elif [[ $artifact == *.tar.gz ]]; then
|
elif [[ $artifact == *.tar.gz ]]; then
|
||||||
tar -xvf $artifact
|
tar -xvf $artifact
|
||||||
fi
|
fi
|
||||||
mv clio_server ../
|
chmod +x ./clio_server
|
||||||
|
mv ./clio_server ../
|
||||||
cd ../
|
cd ../
|
||||||
rm -rf ./artifact
|
rm -rf ./artifact
|
||||||
|
|
||||||
@@ -82,6 +83,11 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: strip ./docker/clio/clio_server
|
run: strip ./docker/clio/clio_server
|
||||||
|
|
||||||
|
- name: Set GHCR_REPO
|
||||||
|
id: set-ghcr-repo
|
||||||
|
run: |
|
||||||
|
echo "GHCR_REPO=$(echo ghcr.io/${{ github.repository_owner }} | tr '[:upper:]' '[:lower:]')" >> ${GITHUB_OUTPUT}
|
||||||
|
|
||||||
- name: Build Docker image
|
- name: Build Docker image
|
||||||
uses: ./.github/actions/build_docker_image
|
uses: ./.github/actions/build_docker_image
|
||||||
env:
|
env:
|
||||||
@@ -90,11 +96,11 @@ jobs:
|
|||||||
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
|
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
ghcr.io/xrplf/clio
|
ghcr.io/${{ steps.set-ghcr-repo.outputs.GHCR_REPO }}/clio
|
||||||
rippleci/clio
|
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio' || '' }}
|
||||||
push_image: ${{ inputs.publish_image }}
|
push_image: ${{ inputs.publish_image }}
|
||||||
directory: docker/clio
|
directory: docker/clio
|
||||||
tags: ${{ inputs.tags }}
|
tags: ${{ inputs.tags }}
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
dockerhub_repo: rippleci/clio
|
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio' || '' }}
|
||||||
dockerhub_description: Clio is an XRP Ledger API server.
|
dockerhub_description: Clio is an XRP Ledger API server.
|
||||||
|
|||||||
56
.github/workflows/build_impl.yml
vendored
56
.github/workflows/build_impl.yml
vendored
@@ -53,6 +53,17 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
expected_version:
|
||||||
|
description: Expected version of the clio_server binary
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
package:
|
||||||
|
description: Whether to generate Debian package
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
CODECOV_TOKEN:
|
CODECOV_TOKEN:
|
||||||
required: false
|
required: false
|
||||||
@@ -64,16 +75,20 @@ jobs:
|
|||||||
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
|
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Clean workdir
|
- name: Cleanup workspace
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
# We need to fetch tags to have correct version in the release
|
||||||
|
# The workaround is based on https://github.com/actions/checkout/issues/1467
|
||||||
|
fetch-tags: true
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: ./.github/actions/prepare_runner
|
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
||||||
with:
|
with:
|
||||||
disable_ccache: ${{ inputs.disable_cache }}
|
disable_ccache: ${{ inputs.disable_cache }}
|
||||||
|
|
||||||
@@ -92,14 +107,21 @@ jobs:
|
|||||||
build_type: ${{ inputs.build_type }}
|
build_type: ${{ inputs.build_type }}
|
||||||
code_coverage: ${{ inputs.code_coverage }}
|
code_coverage: ${{ inputs.code_coverage }}
|
||||||
|
|
||||||
- name: Run conan and cmake
|
- name: Run conan
|
||||||
uses: ./.github/actions/generate
|
uses: ./.github/actions/conan
|
||||||
|
with:
|
||||||
|
conan_profile: ${{ inputs.conan_profile }}
|
||||||
|
build_type: ${{ inputs.build_type }}
|
||||||
|
|
||||||
|
- name: Run CMake
|
||||||
|
uses: ./.github/actions/cmake
|
||||||
with:
|
with:
|
||||||
conan_profile: ${{ inputs.conan_profile }}
|
conan_profile: ${{ inputs.conan_profile }}
|
||||||
build_type: ${{ inputs.build_type }}
|
build_type: ${{ inputs.build_type }}
|
||||||
code_coverage: ${{ inputs.code_coverage }}
|
code_coverage: ${{ inputs.code_coverage }}
|
||||||
static: ${{ inputs.static }}
|
static: ${{ inputs.static }}
|
||||||
time_trace: ${{ inputs.analyze_build_time }}
|
time_trace: ${{ inputs.analyze_build_time }}
|
||||||
|
package: ${{ inputs.package }}
|
||||||
|
|
||||||
- name: Build Clio
|
- name: Build Clio
|
||||||
uses: ./.github/actions/build_clio
|
uses: ./.github/actions/build_clio
|
||||||
@@ -147,19 +169,26 @@ jobs:
|
|||||||
path: build/clio_server
|
path: build/clio_server
|
||||||
|
|
||||||
- name: Upload clio_tests
|
- name: Upload clio_tests
|
||||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
|
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: build/clio_tests
|
path: build/clio_tests
|
||||||
|
|
||||||
- name: Upload clio_integration_tests
|
- name: Upload clio_integration_tests
|
||||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
|
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: build/clio_integration_tests
|
path: build/clio_integration_tests
|
||||||
|
|
||||||
|
- name: Upload Clio Linux package
|
||||||
|
if: inputs.package
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: clio_deb_package_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
|
path: build/*.deb
|
||||||
|
|
||||||
- name: Save cache
|
- name: Save cache
|
||||||
if: ${{ !inputs.disable_cache && github.ref == 'refs/heads/develop' }}
|
if: ${{ !inputs.disable_cache && github.ref == 'refs/heads/develop' }}
|
||||||
uses: ./.github/actions/save_cache
|
uses: ./.github/actions/save_cache
|
||||||
@@ -174,7 +203,6 @@ jobs:
|
|||||||
|
|
||||||
# This is run as part of the build job, because it requires the following:
|
# This is run as part of the build job, because it requires the following:
|
||||||
# - source code
|
# - source code
|
||||||
# - generated source code (Build.cpp)
|
|
||||||
# - conan packages
|
# - conan packages
|
||||||
# - .gcno files in build directory
|
# - .gcno files in build directory
|
||||||
#
|
#
|
||||||
@@ -183,6 +211,18 @@ jobs:
|
|||||||
if: ${{ inputs.code_coverage }}
|
if: ${{ inputs.code_coverage }}
|
||||||
uses: ./.github/actions/code_coverage
|
uses: ./.github/actions/code_coverage
|
||||||
|
|
||||||
|
- name: Verify expected version
|
||||||
|
if: ${{ inputs.expected_version != '' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
EXPECTED_VERSION="clio-${{ inputs.expected_version }}"
|
||||||
|
actual_version=$(./build/clio_server --version)
|
||||||
|
if [[ "$actual_version" != "$EXPECTED_VERSION" ]]; then
|
||||||
|
echo "Expected version '$EXPECTED_VERSION', but got '$actual_version'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# `codecov/codecov-action` will rerun `gcov` if it's available and build directory is present
|
# `codecov/codecov-action` will rerun `gcov` if it's available and build directory is present
|
||||||
# To prevent this from happening, we run this action in a separate workflow
|
# To prevent this from happening, we run this action in a separate workflow
|
||||||
#
|
#
|
||||||
|
|||||||
30
.github/workflows/check_libxrpl.yml
vendored
30
.github/workflows/check_libxrpl.yml
vendored
@@ -17,31 +17,36 @@ jobs:
|
|||||||
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
|
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Prepare runner
|
||||||
|
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
||||||
|
with:
|
||||||
|
disable_ccache: true
|
||||||
|
|
||||||
- name: Update libXRPL version requirement
|
- name: Update libXRPL version requirement
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
sed -i.bak -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/${{ github.event.client_payload.version }}'|g" conanfile.py
|
sed -i.bak -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/${{ github.event.client_payload.conan_ref }}'|g" conanfile.py
|
||||||
rm -f conanfile.py.bak
|
rm -f conanfile.py.bak
|
||||||
|
|
||||||
- name: Update conan lockfile
|
- name: Update conan lockfile
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
conan lock create . -o '&:tests=True' -o '&:benchmark=True'
|
conan lock create . --profile:all ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Run conan
|
||||||
uses: ./.github/actions/prepare_runner
|
uses: ./.github/actions/conan
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
- name: Run conan and cmake
|
- name: Run CMake
|
||||||
uses: ./.github/actions/generate
|
uses: ./.github/actions/cmake
|
||||||
with:
|
with:
|
||||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
@@ -62,10 +67,10 @@ jobs:
|
|||||||
needs: build
|
needs: build
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: clio_tests_check_libxrpl
|
name: clio_tests_check_libxrpl
|
||||||
|
|
||||||
@@ -95,6 +100,7 @@ jobs:
|
|||||||
labels: "compatibility,bug"
|
labels: "compatibility,bug"
|
||||||
title: "Proposed libXRPL check failed"
|
title: "Proposed libXRPL check failed"
|
||||||
body: >
|
body: >
|
||||||
Clio build or tests failed against `libXRPL ${{ github.event.client_payload.version }}`.
|
Clio build or tests failed against `libXRPL ${{ github.event.client_payload.conan_ref }}`.
|
||||||
|
|
||||||
Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
PR: ${{ github.event.client_payload.pr_url }}
|
||||||
|
Workflow run: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||||
|
|||||||
2
.github/workflows/check_pr_title.yml
vendored
2
.github/workflows/check_pr_title.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ytanikin/pr-conventional-commits@8267db1bacc237419f9ed0228bb9d94e94271a1d # v1.4.1
|
- uses: ytanikin/pr-conventional-commits@b72758283dcbee706975950e96bc4bf323a8d8c0 # v1.4.2
|
||||||
with:
|
with:
|
||||||
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
|
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
|
||||||
add_label: false
|
add_label: false
|
||||||
|
|||||||
13
.github/workflows/clang-tidy.yml
vendored
13
.github/workflows/clang-tidy.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
clang_tidy:
|
clang_tidy:
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
@@ -37,7 +37,7 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: ./.github/actions/prepare_runner
|
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
disable_ccache: true
|
||||||
|
|
||||||
@@ -48,8 +48,13 @@ jobs:
|
|||||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
ccache_dir: ${{ env.CCACHE_DIR }}
|
||||||
|
|
||||||
- name: Run conan and cmake
|
- name: Run conan
|
||||||
uses: ./.github/actions/generate
|
uses: ./.github/actions/conan
|
||||||
|
with:
|
||||||
|
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
|
- name: Run CMake
|
||||||
|
uses: ./.github/actions/cmake
|
||||||
with:
|
with:
|
||||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
|
|||||||
6
.github/workflows/docs.yml
vendored
6
.github/workflows/docs.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
lfs: true
|
lfs: true
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: ./.github/actions/prepare_runner
|
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
disable_ccache: true
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ jobs:
|
|||||||
uses: actions/configure-pages@v5
|
uses: actions/configure-pages@v5
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-pages-artifact@v3
|
uses: actions/upload-pages-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: build_docs/html
|
path: build_docs/html
|
||||||
name: docs-develop
|
name: docs-develop
|
||||||
|
|||||||
10
.github/workflows/nightly.yml
vendored
10
.github/workflows/nightly.yml
vendored
@@ -39,17 +39,17 @@ jobs:
|
|||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Release
|
build_type: Release
|
||||||
static: true
|
static: true
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
- os: heavy
|
- os: heavy
|
||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Debug
|
build_type: Debug
|
||||||
static: true
|
static: true
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
- os: heavy
|
- os: heavy
|
||||||
conan_profile: gcc.ubsan
|
conan_profile: gcc.ubsan
|
||||||
build_type: Release
|
build_type: Release
|
||||||
static: false
|
static: false
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
|
|
||||||
uses: ./.github/workflows/build_and_test.yml
|
uses: ./.github/workflows/build_and_test.yml
|
||||||
with:
|
with:
|
||||||
@@ -72,7 +72,7 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- os: heavy
|
- os: heavy
|
||||||
conan_profile: clang
|
conan_profile: clang
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
static: true
|
static: true
|
||||||
- os: macos15
|
- os: macos15
|
||||||
conan_profile: apple-clang
|
conan_profile: apple-clang
|
||||||
@@ -100,8 +100,6 @@ jobs:
|
|||||||
title: "Clio development (nightly) build"
|
title: "Clio development (nightly) build"
|
||||||
version: nightly
|
version: nightly
|
||||||
header: >
|
header: >
|
||||||
# Release notes
|
|
||||||
|
|
||||||
> **Note:** Please remember that this is a development release and it is not recommended for production use.
|
> **Note:** Please remember that this is a development release and it is not recommended for production use.
|
||||||
|
|
||||||
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
|
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
|
||||||
|
|||||||
52
.github/workflows/pre-commit-autoupdate.yml
vendored
52
.github/workflows/pre-commit-autoupdate.yml
vendored
@@ -4,47 +4,19 @@ on:
|
|||||||
# every first day of the month
|
# every first day of the month
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "0 0 1 * *"
|
- cron: "0 0 1 * *"
|
||||||
# on demand
|
pull_request:
|
||||||
|
branches: [release/*, develop]
|
||||||
|
paths:
|
||||||
|
- ".pre-commit-config.yaml"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
auto-update:
|
auto-update:
|
||||||
runs-on: ubuntu-latest
|
uses: XRPLF/actions/.github/workflows/pre-commit-autoupdate.yml@afbcbdafbe0ce5439492fb87eda6441371086386
|
||||||
|
with:
|
||||||
permissions:
|
sign_commit: true
|
||||||
contents: write
|
committer: "Clio CI <skuznetsov@ripple.com>"
|
||||||
pull-requests: write
|
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
||||||
|
secrets:
|
||||||
steps:
|
GPG_PRIVATE_KEY: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
|
||||||
- uses: actions/checkout@v4
|
GPG_PASSPHRASE: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: 3.x
|
|
||||||
|
|
||||||
- run: pip install pre-commit
|
|
||||||
- run: pre-commit autoupdate --freeze
|
|
||||||
- run: pre-commit run --all-files || true
|
|
||||||
|
|
||||||
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
with:
|
|
||||||
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
|
|
||||||
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
|
|
||||||
git_user_signingkey: true
|
|
||||||
git_commit_gpgsign: true
|
|
||||||
|
|
||||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
|
||||||
if: always()
|
|
||||||
env:
|
|
||||||
GH_REPO: ${{ github.repository }}
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
with:
|
|
||||||
commit-message: "style: Update pre-commit hooks"
|
|
||||||
committer: Clio CI <skuznetsov@ripple.com>
|
|
||||||
branch: update/pre-commit-hooks
|
|
||||||
branch-suffix: timestamp
|
|
||||||
delete-branch: true
|
|
||||||
title: "style: Update pre-commit hooks"
|
|
||||||
body: Update versions of pre-commit hooks to latest version.
|
|
||||||
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
|
||||||
|
|||||||
21
.github/workflows/pre-commit.yml
vendored
21
.github/workflows/pre-commit.yml
vendored
@@ -8,20 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
run-hooks:
|
run-hooks:
|
||||||
runs-on: heavy
|
uses: XRPLF/actions/.github/workflows/pre-commit.yml@afbcbdafbe0ce5439492fb87eda6441371086386
|
||||||
container:
|
with:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
runs_on: heavy
|
||||||
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
steps:
|
|
||||||
- name: Checkout Repo ⚡️
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Prepare runner
|
|
||||||
uses: ./.github/actions/prepare_runner
|
|
||||||
with:
|
|
||||||
disable_ccache: true
|
|
||||||
|
|
||||||
- name: Run pre-commit ✅
|
|
||||||
run: pre-commit run --all-files
|
|
||||||
|
|||||||
7
.github/workflows/release.yml
vendored
7
.github/workflows/release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Release
|
build_type: Release
|
||||||
static: true
|
static: true
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
|
|
||||||
uses: ./.github/workflows/build_and_test.yml
|
uses: ./.github/workflows/build_and_test.yml
|
||||||
with:
|
with:
|
||||||
@@ -42,6 +42,7 @@ jobs:
|
|||||||
run_integration_tests: true
|
run_integration_tests: true
|
||||||
upload_clio_server: true
|
upload_clio_server: true
|
||||||
disable_cache: true
|
disable_cache: true
|
||||||
|
expected_version: ${{ github.event_name == 'push' && github.ref_name || '' }}
|
||||||
|
|
||||||
release:
|
release:
|
||||||
needs: build-and-test
|
needs: build-and-test
|
||||||
@@ -52,6 +53,6 @@ jobs:
|
|||||||
title: "${{ github.ref_name}}"
|
title: "${{ github.ref_name}}"
|
||||||
version: "${{ github.ref_name }}"
|
version: "${{ github.ref_name }}"
|
||||||
header: >
|
header: >
|
||||||
# Introducing Clio version ${{ github.ref_name }}
|
${{ contains(github.ref_name, '-') && '> **Note:** Please remember that this is a release candidate and it is not recommended for production use.' || '' }}
|
||||||
generate_changelog: true
|
generate_changelog: ${{ !contains(github.ref_name, '-') }}
|
||||||
draft: true
|
draft: true
|
||||||
|
|||||||
26
.github/workflows/release_impl.yml
vendored
26
.github/workflows/release_impl.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
|||||||
release:
|
release:
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
env:
|
env:
|
||||||
GH_REPO: ${{ github.repository }}
|
GH_REPO: ${{ github.repository }}
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
@@ -56,11 +56,11 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: ./.github/actions/prepare_runner
|
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
disable_ccache: true
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
path: release_artifacts
|
path: release_artifacts
|
||||||
pattern: clio_server_*
|
pattern: clio_server_*
|
||||||
@@ -68,34 +68,24 @@ jobs:
|
|||||||
- name: Create release notes
|
- name: Create release notes
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
printf '%s\n' "${{ inputs.header }}" > "${RUNNER_TEMP}/release_notes.md"
|
echo "# Release notes" > "${RUNNER_TEMP}/release_notes.md"
|
||||||
|
echo "" >> "${RUNNER_TEMP}/release_notes.md"
|
||||||
|
printf '%s\n' "${{ inputs.header }}" >> "${RUNNER_TEMP}/release_notes.md"
|
||||||
|
|
||||||
- name: Generate changelog
|
- name: Generate changelog
|
||||||
shell: bash
|
shell: bash
|
||||||
if: ${{ inputs.generate_changelog }}
|
if: ${{ inputs.generate_changelog }}
|
||||||
run: |
|
run: |
|
||||||
LAST_TAG="$(gh release view --json tagName -q .tagName)"
|
LAST_TAG="$(gh release view --json tagName -q .tagName --repo XRPLF/clio)"
|
||||||
LAST_TAG_COMMIT="$(git rev-parse $LAST_TAG)"
|
LAST_TAG_COMMIT="$(git rev-parse $LAST_TAG)"
|
||||||
BASE_COMMIT="$(git merge-base HEAD $LAST_TAG_COMMIT)"
|
BASE_COMMIT="$(git merge-base HEAD $LAST_TAG_COMMIT)"
|
||||||
git-cliff "${BASE_COMMIT}..HEAD" --ignore-tags "nightly|-b"
|
git-cliff "${BASE_COMMIT}..HEAD" --ignore-tags "nightly|-b|-rc"
|
||||||
cat CHANGELOG.md >> "${RUNNER_TEMP}/release_notes.md"
|
cat CHANGELOG.md >> "${RUNNER_TEMP}/release_notes.md"
|
||||||
|
|
||||||
- name: Prepare release artifacts
|
- name: Prepare release artifacts
|
||||||
shell: bash
|
shell: bash
|
||||||
run: .github/scripts/prepare-release-artifacts.sh release_artifacts
|
run: .github/scripts/prepare-release-artifacts.sh release_artifacts
|
||||||
|
|
||||||
- name: Append sha256 checksums
|
|
||||||
shell: bash
|
|
||||||
working-directory: release_artifacts
|
|
||||||
run: |
|
|
||||||
{
|
|
||||||
echo '## SHA256 checksums'
|
|
||||||
echo
|
|
||||||
echo '```'
|
|
||||||
cat *.sha256sum
|
|
||||||
echo '```'
|
|
||||||
} >> "${RUNNER_TEMP}/release_notes.md"
|
|
||||||
|
|
||||||
- name: Upload release notes
|
- name: Upload release notes
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|||||||
2
.github/workflows/sanitizers.yml
vendored
2
.github/workflows/sanitizers.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
uses: ./.github/workflows/build_and_test.yml
|
uses: ./.github/workflows/build_and_test.yml
|
||||||
with:
|
with:
|
||||||
runs_on: heavy
|
runs_on: heavy
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
|
||||||
disable_cache: true
|
disable_cache: true
|
||||||
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
|
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
|
||||||
build_type: ${{ matrix.build_type }}
|
build_type: ${{ matrix.build_type }}
|
||||||
|
|||||||
12
.github/workflows/test_impl.yml
vendored
12
.github/workflows/test_impl.yml
vendored
@@ -46,15 +46,15 @@ jobs:
|
|||||||
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.asan') || endsWith(inputs.conan_profile, '.tsan') }}
|
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.asan') || endsWith(inputs.conan_profile, '.tsan') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Clean workdir
|
- name: Cleanup workspace
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
|
|
||||||
@@ -120,9 +120,9 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Clean workdir
|
- name: Cleanup workspace
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
|
||||||
|
|
||||||
- name: Spin up scylladb
|
- name: Spin up scylladb
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
@@ -144,7 +144,7 @@ jobs:
|
|||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
|
|
||||||
|
|||||||
64
.github/workflows/update_docker_ci.yml
vendored
64
.github/workflows/update_docker_ci.yml
vendored
@@ -30,8 +30,8 @@ concurrency:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
CLANG_MAJOR_VERSION: 19
|
CLANG_MAJOR_VERSION: 19
|
||||||
GCC_MAJOR_VERSION: 14
|
GCC_MAJOR_VERSION: 15
|
||||||
GCC_VERSION: 14.3.0
|
GCC_VERSION: 15.2.0
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
repo:
|
repo:
|
||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/gcc/**"
|
files: "docker/compilers/gcc/**"
|
||||||
|
|
||||||
@@ -69,7 +69,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
|
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
|
||||||
rippleci/clio_gcc
|
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
|
||||||
push_image: ${{ github.event_name != 'pull_request' }}
|
push_image: ${{ github.event_name != 'pull_request' }}
|
||||||
directory: docker/compilers/gcc
|
directory: docker/compilers/gcc
|
||||||
tags: |
|
tags: |
|
||||||
@@ -81,7 +81,7 @@ jobs:
|
|||||||
build_args: |
|
build_args: |
|
||||||
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
|
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
|
||||||
GCC_VERSION=${{ env.GCC_VERSION }}
|
GCC_VERSION=${{ env.GCC_VERSION }}
|
||||||
dockerhub_repo: rippleci/clio_gcc
|
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
|
||||||
dockerhub_description: GCC compiler for XRPLF/clio.
|
dockerhub_description: GCC compiler for XRPLF/clio.
|
||||||
|
|
||||||
gcc-arm64:
|
gcc-arm64:
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/gcc/**"
|
files: "docker/compilers/gcc/**"
|
||||||
|
|
||||||
@@ -107,7 +107,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
|
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
|
||||||
rippleci/clio_gcc
|
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
|
||||||
push_image: ${{ github.event_name != 'pull_request' }}
|
push_image: ${{ github.event_name != 'pull_request' }}
|
||||||
directory: docker/compilers/gcc
|
directory: docker/compilers/gcc
|
||||||
tags: |
|
tags: |
|
||||||
@@ -119,7 +119,7 @@ jobs:
|
|||||||
build_args: |
|
build_args: |
|
||||||
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
|
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
|
||||||
GCC_VERSION=${{ env.GCC_VERSION }}
|
GCC_VERSION=${{ env.GCC_VERSION }}
|
||||||
dockerhub_repo: rippleci/clio_gcc
|
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
|
||||||
dockerhub_description: GCC compiler for XRPLF/clio.
|
dockerhub_description: GCC compiler for XRPLF/clio.
|
||||||
|
|
||||||
gcc-merge:
|
gcc-merge:
|
||||||
@@ -132,7 +132,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/gcc/**"
|
files: "docker/compilers/gcc/**"
|
||||||
|
|
||||||
@@ -141,15 +141,15 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: github.event_name != 'pull_request'
|
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USER }}
|
username: ${{ secrets.DOCKERHUB_USER }}
|
||||||
password: ${{ secrets.DOCKERHUB_PW }}
|
password: ${{ secrets.DOCKERHUB_PW }}
|
||||||
@@ -157,15 +157,21 @@ jobs:
|
|||||||
- name: Create and push multi-arch manifest
|
- name: Create and push multi-arch manifest
|
||||||
if: github.event_name != 'pull_request' && steps.changed-files.outputs.any_changed == 'true'
|
if: github.event_name != 'pull_request' && steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
for image in ${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc rippleci/clio_gcc; do
|
push_image() {
|
||||||
|
image=$1
|
||||||
|
|
||||||
docker buildx imagetools create \
|
docker buildx imagetools create \
|
||||||
-t $image:latest \
|
-t $image:latest \
|
||||||
-t $image:${{ env.GCC_MAJOR_VERSION }} \
|
-t $image:${{ env.GCC_MAJOR_VERSION }} \
|
||||||
-t $image:${{ env.GCC_VERSION }} \
|
-t $image:${{ env.GCC_VERSION }} \
|
||||||
-t $image:${{ github.sha }} \
|
-t $image:${{ github.sha }} \
|
||||||
$image:arm64-latest \
|
$image:arm64-latest \
|
||||||
$image:amd64-latest
|
$image:amd64-latest
|
||||||
done
|
}
|
||||||
|
push_image ${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
|
||||||
|
if [[ ${{ github.repository_owner }} == 'XRPLF' ]]; then
|
||||||
|
push_image rippleci/clio_clang
|
||||||
|
fi
|
||||||
|
|
||||||
clang:
|
clang:
|
||||||
name: Build and push Clang docker image
|
name: Build and push Clang docker image
|
||||||
@@ -177,7 +183,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/clang/**"
|
files: "docker/compilers/clang/**"
|
||||||
|
|
||||||
@@ -190,7 +196,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ needs.repo.outputs.GHCR_REPO }}/clio-clang
|
${{ needs.repo.outputs.GHCR_REPO }}/clio-clang
|
||||||
rippleci/clio_clang
|
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_clang' || '' }}
|
||||||
push_image: ${{ github.event_name != 'pull_request' }}
|
push_image: ${{ github.event_name != 'pull_request' }}
|
||||||
directory: docker/compilers/clang
|
directory: docker/compilers/clang
|
||||||
tags: |
|
tags: |
|
||||||
@@ -200,7 +206,7 @@ jobs:
|
|||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
build_args: |
|
build_args: |
|
||||||
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
|
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
|
||||||
dockerhub_repo: rippleci/clio_clang
|
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_clang' || '' }}
|
||||||
dockerhub_description: Clang compiler for XRPLF/clio.
|
dockerhub_description: Clang compiler for XRPLF/clio.
|
||||||
|
|
||||||
tools-amd64:
|
tools-amd64:
|
||||||
@@ -213,7 +219,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||||
with:
|
with:
|
||||||
files: "docker/tools/**"
|
files: "docker/tools/**"
|
||||||
|
|
||||||
@@ -244,7 +250,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||||
with:
|
with:
|
||||||
files: "docker/tools/**"
|
files: "docker/tools/**"
|
||||||
|
|
||||||
@@ -275,7 +281,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||||
with:
|
with:
|
||||||
files: "docker/tools/**"
|
files: "docker/tools/**"
|
||||||
|
|
||||||
@@ -284,7 +290,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -315,7 +321,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ needs.repo.outputs.GHCR_REPO }}/clio-ci
|
${{ needs.repo.outputs.GHCR_REPO }}/clio-ci
|
||||||
rippleci/clio_ci
|
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_ci' || '' }}
|
||||||
push_image: ${{ github.event_name != 'pull_request' }}
|
push_image: ${{ github.event_name != 'pull_request' }}
|
||||||
directory: docker/ci
|
directory: docker/ci
|
||||||
tags: |
|
tags: |
|
||||||
@@ -328,5 +334,5 @@ jobs:
|
|||||||
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
|
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
|
||||||
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
|
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
|
||||||
GCC_VERSION=${{ env.GCC_VERSION }}
|
GCC_VERSION=${{ env.GCC_VERSION }}
|
||||||
dockerhub_repo: rippleci/clio_ci
|
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_ci' || '' }}
|
||||||
dockerhub_description: CI image for XRPLF/clio.
|
dockerhub_description: CI image for XRPLF/clio.
|
||||||
|
|||||||
28
.github/workflows/upload_conan_deps.yml
vendored
28
.github/workflows/upload_conan_deps.yml
vendored
@@ -10,15 +10,18 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
type: boolean
|
type: boolean
|
||||||
|
force_upload:
|
||||||
|
description: "Force upload of all dependencies"
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [develop]
|
branches: [develop]
|
||||||
paths:
|
paths:
|
||||||
- .github/workflows/upload_conan_deps.yml
|
- .github/workflows/upload_conan_deps.yml
|
||||||
|
|
||||||
- .github/actions/generate/action.yml
|
- .github/actions/conan/action.yml
|
||||||
- .github/actions/prepare_runner/action.yml
|
|
||||||
- ".github/scripts/conan/**"
|
- ".github/scripts/conan/**"
|
||||||
- "!.github/scripts/conan/apple-clang-local.profile"
|
|
||||||
|
|
||||||
- conanfile.py
|
- conanfile.py
|
||||||
- conan.lock
|
- conan.lock
|
||||||
@@ -27,10 +30,8 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- .github/workflows/upload_conan_deps.yml
|
- .github/workflows/upload_conan_deps.yml
|
||||||
|
|
||||||
- .github/actions/generate/action.yml
|
- .github/actions/conan/action.yml
|
||||||
- .github/actions/prepare_runner/action.yml
|
|
||||||
- ".github/scripts/conan/**"
|
- ".github/scripts/conan/**"
|
||||||
- "!.github/scripts/conan/apple-clang-local.profile"
|
|
||||||
|
|
||||||
- conanfile.py
|
- conanfile.py
|
||||||
- conan.lock
|
- conan.lock
|
||||||
@@ -59,6 +60,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||||
|
max-parallel: 10
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
container: ${{ matrix.container != '' && fromJson(matrix.container) || null }}
|
container: ${{ matrix.container != '' && fromJson(matrix.container) || null }}
|
||||||
@@ -70,7 +72,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: ./.github/actions/prepare_runner
|
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
disable_ccache: true
|
||||||
|
|
||||||
@@ -82,8 +84,8 @@ jobs:
|
|||||||
- name: Show conan profile
|
- name: Show conan profile
|
||||||
run: conan profile show --profile:all ${{ env.CONAN_PROFILE }}
|
run: conan profile show --profile:all ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
- name: Run conan and cmake
|
- name: Run conan
|
||||||
uses: ./.github/actions/generate
|
uses: ./.github/actions/conan
|
||||||
with:
|
with:
|
||||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||||
# We check that everything builds fine from source on scheduled runs
|
# We check that everything builds fine from source on scheduled runs
|
||||||
@@ -92,9 +94,9 @@ jobs:
|
|||||||
build_type: ${{ matrix.build_type }}
|
build_type: ${{ matrix.build_type }}
|
||||||
|
|
||||||
- name: Login to Conan
|
- name: Login to Conan
|
||||||
if: github.event_name != 'pull_request'
|
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request'
|
||||||
run: conan remote login -p ${{ secrets.CONAN_PASSWORD }} ripple ${{ secrets.CONAN_USERNAME }}
|
run: conan remote login -p ${{ secrets.CONAN_PASSWORD }} xrplf ${{ secrets.CONAN_USERNAME }}
|
||||||
|
|
||||||
- name: Upload Conan packages
|
- name: Upload Conan packages
|
||||||
if: github.event_name != 'pull_request' && github.event_name != 'schedule'
|
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule'
|
||||||
run: conan upload "*" -r=ripple --confirm
|
run: conan upload "*" -r=xrplf --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||||
|
|||||||
4
.github/workflows/upload_coverage_report.yml
vendored
4
.github/workflows/upload_coverage_report.yml
vendored
@@ -18,14 +18,14 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Download report artifact
|
- name: Download report artifact
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: coverage-report.xml
|
name: coverage-report.xml
|
||||||
path: build
|
path: build
|
||||||
|
|
||||||
- name: Upload coverage report
|
- name: Upload coverage report
|
||||||
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
|
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
|
||||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||||
with:
|
with:
|
||||||
files: build/coverage_report.xml
|
files: build/coverage_report.xml
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,4 +9,3 @@
|
|||||||
.sanitizer-report
|
.sanitizer-report
|
||||||
CMakeUserPresets.json
|
CMakeUserPresets.json
|
||||||
config.json
|
config.json
|
||||||
src/util/build/Build.cpp
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ exclude: ^(docs/doxygen-awesome-theme/|conan\.lock$)
|
|||||||
repos:
|
repos:
|
||||||
# `pre-commit sample-config` default hooks
|
# `pre-commit sample-config` default hooks
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b # frozen: v5.0.0
|
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
@@ -55,12 +55,6 @@ repos:
|
|||||||
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
|
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
|
||||||
]
|
]
|
||||||
|
|
||||||
- repo: https://github.com/trufflesecurity/trufflehog
|
|
||||||
rev: 6641d4ba5b684fffe195b9820345de1bf19f3181 # frozen: v3.89.2
|
|
||||||
hooks:
|
|
||||||
- id: trufflehog
|
|
||||||
entry: trufflehog git file://. --since-commit HEAD --no-verification --fail
|
|
||||||
|
|
||||||
# Running some C++ hooks before clang-format
|
# Running some C++ hooks before clang-format
|
||||||
# to ensure that the style is consistent.
|
# to ensure that the style is consistent.
|
||||||
- repo: local
|
- repo: local
|
||||||
@@ -86,7 +80,7 @@ repos:
|
|||||||
language: script
|
language: script
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: 6b9072cd80691b1b48d80046d884409fb1d962d1 # frozen: v20.1.7
|
rev: 86fdcc9bd34d6afbbd29358b97436c8ffe3aa3b2 # frozen: v21.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: clang-format
|
- id: clang-format
|
||||||
args: [--style=file]
|
args: [--style=file]
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
cmake_minimum_required(VERSION 3.20)
|
cmake_minimum_required(VERSION 3.20)
|
||||||
|
|
||||||
set(CMAKE_PROJECT_INCLUDE_BEFORE ${CMAKE_CURRENT_SOURCE_DIR}/cmake/ClioVersion.cmake)
|
|
||||||
|
|
||||||
project(clio VERSION ${CLIO_VERSION} HOMEPAGE_URL "https://github.com/XRPLF/clio"
|
project(clio VERSION ${CLIO_VERSION} HOMEPAGE_URL "https://github.com/XRPLF/clio"
|
||||||
DESCRIPTION "An XRP Ledger API Server"
|
DESCRIPTION "An XRP Ledger API Server"
|
||||||
)
|
)
|
||||||
@@ -13,7 +11,7 @@ option(integration_tests "Build integration tests" FALSE)
|
|||||||
option(benchmark "Build benchmarks" FALSE)
|
option(benchmark "Build benchmarks" FALSE)
|
||||||
option(docs "Generate doxygen docs" FALSE)
|
option(docs "Generate doxygen docs" FALSE)
|
||||||
option(coverage "Build test coverage report" FALSE)
|
option(coverage "Build test coverage report" FALSE)
|
||||||
option(packaging "Create distribution packages" FALSE)
|
option(package "Create distribution packages" FALSE)
|
||||||
option(lint "Run clang-tidy checks during compilation" FALSE)
|
option(lint "Run clang-tidy checks during compilation" FALSE)
|
||||||
option(static "Statically linked Clio" FALSE)
|
option(static "Statically linked Clio" FALSE)
|
||||||
option(snapshot "Build snapshot tool" FALSE)
|
option(snapshot "Build snapshot tool" FALSE)
|
||||||
@@ -31,6 +29,7 @@ set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})
|
|||||||
include(Ccache)
|
include(Ccache)
|
||||||
include(CheckCXXCompilerFlag)
|
include(CheckCXXCompilerFlag)
|
||||||
include(ClangTidy)
|
include(ClangTidy)
|
||||||
|
include(Linker)
|
||||||
|
|
||||||
add_library(clio_options INTERFACE)
|
add_library(clio_options INTERFACE)
|
||||||
target_compile_features(clio_options INTERFACE cxx_std_23) # Clio needs c++23 but deps can remain c++20 for now
|
target_compile_features(clio_options INTERFACE cxx_std_23) # Clio needs c++23 but deps can remain c++20 for now
|
||||||
@@ -40,11 +39,6 @@ if (verbose)
|
|||||||
set(CMAKE_VERBOSE_MAKEFILE TRUE)
|
set(CMAKE_VERBOSE_MAKEFILE TRUE)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (packaging)
|
|
||||||
add_definitions(-DPKG=1)
|
|
||||||
target_compile_definitions(clio_options INTERFACE PKG=1)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# Clio tweaks and checks
|
# Clio tweaks and checks
|
||||||
include(CheckCompiler)
|
include(CheckCompiler)
|
||||||
include(Settings)
|
include(Settings)
|
||||||
@@ -58,6 +52,7 @@ include(deps/Threads)
|
|||||||
include(deps/libfmt)
|
include(deps/libfmt)
|
||||||
include(deps/cassandra)
|
include(deps/cassandra)
|
||||||
include(deps/libbacktrace)
|
include(deps/libbacktrace)
|
||||||
|
include(deps/spdlog)
|
||||||
|
|
||||||
add_subdirectory(src)
|
add_subdirectory(src)
|
||||||
add_subdirectory(tests)
|
add_subdirectory(tests)
|
||||||
@@ -93,8 +88,8 @@ if (docs)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include(install/install)
|
include(install/install)
|
||||||
if (packaging)
|
if (package)
|
||||||
include(cmake/packaging.cmake) # This file exists only in build runner
|
include(ClioPackage)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (snapshot)
|
if (snapshot)
|
||||||
|
|||||||
@@ -7,10 +7,12 @@ target_sources(
|
|||||||
Playground.cpp
|
Playground.cpp
|
||||||
# ExecutionContext
|
# ExecutionContext
|
||||||
util/async/ExecutionContextBenchmarks.cpp
|
util/async/ExecutionContextBenchmarks.cpp
|
||||||
|
# Logger
|
||||||
|
util/log/LoggerBenchmark.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
include(deps/gbench)
|
include(deps/gbench)
|
||||||
|
|
||||||
target_include_directories(clio_benchmark PRIVATE .)
|
target_include_directories(clio_benchmark PRIVATE .)
|
||||||
target_link_libraries(clio_benchmark PUBLIC clio_etl benchmark::benchmark_main)
|
target_link_libraries(clio_benchmark PUBLIC clio_util benchmark::benchmark_main spdlog::spdlog)
|
||||||
set_target_properties(clio_benchmark PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
|
set_target_properties(clio_benchmark PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
|
||||||
|
|||||||
149
benchmarks/util/log/LoggerBenchmark.cpp
Normal file
149
benchmarks/util/log/LoggerBenchmark.cpp
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#include "util/config/ConfigDefinition.hpp"
|
||||||
|
#include "util/log/Logger.hpp"
|
||||||
|
#include "util/prometheus/Prometheus.hpp"
|
||||||
|
|
||||||
|
#include <benchmark/benchmark.h>
|
||||||
|
#include <fmt/format.h>
|
||||||
|
#include <spdlog/async.h>
|
||||||
|
#include <spdlog/async_logger.h>
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
|
||||||
|
#include <barrier>
|
||||||
|
#include <chrono>
|
||||||
|
#include <cstddef>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
#include <thread>
|
||||||
|
#include <utility>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
using namespace util;
|
||||||
|
|
||||||
|
static constexpr auto kLOG_FORMAT = "%Y-%m-%d %H:%M:%S.%f %^%3!l:%n%$ - %v";
|
||||||
|
|
||||||
|
struct BenchmarkLoggingInitializer {
|
||||||
|
[[nodiscard]] static std::shared_ptr<spdlog::sinks::sink>
|
||||||
|
createFileSink(LogService::FileLoggingParams const& params)
|
||||||
|
{
|
||||||
|
return LogService::createFileSink(params, kLOG_FORMAT);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Logger
|
||||||
|
getLogger(std::shared_ptr<spdlog::logger> logger)
|
||||||
|
{
|
||||||
|
return Logger(std::move(logger));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
std::string
|
||||||
|
uniqueLogDir()
|
||||||
|
{
|
||||||
|
auto const epochTime = std::chrono::high_resolution_clock::now().time_since_epoch();
|
||||||
|
auto const tmpDir = std::filesystem::temp_directory_path();
|
||||||
|
std::string const dirName =
|
||||||
|
fmt::format("logs_{}", std::chrono::duration_cast<std::chrono::microseconds>(epochTime).count());
|
||||||
|
return tmpDir / "clio_benchmark" / dirName;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // anonymous namespace
|
||||||
|
|
||||||
|
static void
|
||||||
|
benchmarkConcurrentFileLogging(benchmark::State& state)
|
||||||
|
{
|
||||||
|
auto const numThreads = static_cast<size_t>(state.range(0));
|
||||||
|
auto const messagesPerThread = static_cast<size_t>(state.range(1));
|
||||||
|
|
||||||
|
PrometheusService::init(config::getClioConfig());
|
||||||
|
|
||||||
|
auto const logDir = uniqueLogDir();
|
||||||
|
for (auto _ : state) {
|
||||||
|
state.PauseTiming();
|
||||||
|
|
||||||
|
std::filesystem::create_directories(logDir);
|
||||||
|
static constexpr size_t kQUEUE_SIZE = 8192;
|
||||||
|
static constexpr size_t kTHREAD_COUNT = 1;
|
||||||
|
spdlog::init_thread_pool(kQUEUE_SIZE, kTHREAD_COUNT);
|
||||||
|
|
||||||
|
auto fileSink = BenchmarkLoggingInitializer::createFileSink({
|
||||||
|
.logDir = logDir,
|
||||||
|
.rotationSizeMB = 5,
|
||||||
|
.dirMaxFiles = 25,
|
||||||
|
});
|
||||||
|
|
||||||
|
std::vector<std::thread> threads;
|
||||||
|
threads.reserve(numThreads);
|
||||||
|
|
||||||
|
std::chrono::high_resolution_clock::time_point start;
|
||||||
|
std::barrier barrier(numThreads, [&state, &start]() {
|
||||||
|
state.ResumeTiming();
|
||||||
|
start = std::chrono::high_resolution_clock::now();
|
||||||
|
});
|
||||||
|
|
||||||
|
for (size_t threadNum = 0; threadNum < numThreads; ++threadNum) {
|
||||||
|
threads.emplace_back([threadNum, messagesPerThread, fileSink, &barrier]() {
|
||||||
|
std::string const channel = fmt::format("Thread_{}", threadNum);
|
||||||
|
auto logger = std::make_shared<spdlog::async_logger>(
|
||||||
|
channel, fileSink, spdlog::thread_pool(), spdlog::async_overflow_policy::block
|
||||||
|
);
|
||||||
|
spdlog::register_logger(logger);
|
||||||
|
Logger const threadLogger = BenchmarkLoggingInitializer::getLogger(std::move(logger));
|
||||||
|
|
||||||
|
barrier.arrive_and_wait();
|
||||||
|
|
||||||
|
for (size_t messageNum = 0; messageNum < messagesPerThread; ++messageNum) {
|
||||||
|
LOG(threadLogger.info()) << "Test log message #" << messageNum;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto& thread : threads) {
|
||||||
|
thread.join();
|
||||||
|
}
|
||||||
|
spdlog::shutdown();
|
||||||
|
|
||||||
|
auto const end = std::chrono::high_resolution_clock::now();
|
||||||
|
state.SetIterationTime(std::chrono::duration_cast<std::chrono::duration<double>>(end - start).count());
|
||||||
|
|
||||||
|
std::filesystem::remove_all(logDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto const totalMessages = numThreads * messagesPerThread;
|
||||||
|
state.counters["TotalMessagesRate"] = benchmark::Counter(totalMessages, benchmark::Counter::kIsRate);
|
||||||
|
state.counters["Threads"] = numThreads;
|
||||||
|
state.counters["MessagesPerThread"] = messagesPerThread;
|
||||||
|
}
|
||||||
|
|
||||||
|
// One line of log message is around 110 bytes
|
||||||
|
// So, 100K messages is around 10.5MB
|
||||||
|
|
||||||
|
BENCHMARK(benchmarkConcurrentFileLogging)
|
||||||
|
->ArgsProduct({
|
||||||
|
// Number of threads
|
||||||
|
{1, 2, 4, 8},
|
||||||
|
// Messages per thread
|
||||||
|
{10'000, 100'000, 500'000, 1'000'000, 10'000'000},
|
||||||
|
})
|
||||||
|
->UseManualTime()
|
||||||
|
->Unit(benchmark::kMillisecond);
|
||||||
8
cmake/ClioPackage.cmake
Normal file
8
cmake/ClioPackage.cmake
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
include("${CMAKE_CURRENT_LIST_DIR}/ClioVersion.cmake")
|
||||||
|
|
||||||
|
set(CPACK_PACKAGING_INSTALL_PREFIX "/opt/clio")
|
||||||
|
set(CPACK_PACKAGE_VERSION "${CLIO_VERSION}")
|
||||||
|
set(CPACK_STRIP_FILES TRUE)
|
||||||
|
|
||||||
|
include(pkg/deb)
|
||||||
|
include(CPack)
|
||||||
@@ -1,7 +1,3 @@
|
|||||||
#[===================================================================[
|
|
||||||
write version to source
|
|
||||||
#]===================================================================]
|
|
||||||
|
|
||||||
find_package(Git REQUIRED)
|
find_package(Git REQUIRED)
|
||||||
|
|
||||||
set(GIT_COMMAND describe --tags --exact-match)
|
set(GIT_COMMAND describe --tags --exact-match)
|
||||||
@@ -10,15 +6,17 @@ execute_process(
|
|||||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||||
OUTPUT_VARIABLE TAG
|
OUTPUT_VARIABLE TAG
|
||||||
RESULT_VARIABLE RC
|
RESULT_VARIABLE RC
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
ERROR_VARIABLE ERR
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE
|
||||||
)
|
)
|
||||||
|
|
||||||
if (RC EQUAL 0)
|
if (RC EQUAL 0)
|
||||||
# if we are on a tag, use the tag name
|
message(STATUS "Found tag '${TAG}' in git. Will use it as Clio version")
|
||||||
set(CLIO_VERSION "${TAG}")
|
set(CLIO_VERSION "${TAG}")
|
||||||
set(DOC_CLIO_VERSION "${TAG}")
|
set(DOC_CLIO_VERSION "${TAG}")
|
||||||
else ()
|
else ()
|
||||||
# if not, use YYYYMMDDHMS-<branch>-<git-rev>
|
message(STATUS "Error finding tag in git: ${ERR}")
|
||||||
|
message(STATUS "Will use 'YYYYMMDDHMS-<branch>-<git-rev>' as Clio version")
|
||||||
|
|
||||||
set(GIT_COMMAND show -s --date=format:%Y%m%d%H%M%S --format=%cd)
|
set(GIT_COMMAND show -s --date=format:%Y%m%d%H%M%S --format=%cd)
|
||||||
execute_process(
|
execute_process(
|
||||||
@@ -47,5 +45,3 @@ if (CMAKE_BUILD_TYPE MATCHES Debug)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message(STATUS "Build version: ${CLIO_VERSION}")
|
message(STATUS "Build version: ${CLIO_VERSION}")
|
||||||
|
|
||||||
configure_file(${CMAKE_CURRENT_LIST_DIR}/Build.cpp.in ${CMAKE_CURRENT_LIST_DIR}/../src/util/build/Build.cpp)
|
|
||||||
|
|||||||
11
cmake/Linker.cmake
Normal file
11
cmake/Linker.cmake
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
if (DEFINED CMAKE_LINKER_TYPE)
|
||||||
|
message(STATUS "Custom linker is already set: ${CMAKE_LINKER_TYPE}")
|
||||||
|
return()
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
find_program(MOLD_PATH mold)
|
||||||
|
|
||||||
|
if (MOLD_PATH AND CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||||
|
message(STATUS "Using Mold linker: ${MOLD_PATH}")
|
||||||
|
set(CMAKE_LINKER_TYPE MOLD)
|
||||||
|
endif ()
|
||||||
5
cmake/deps/spdlog.cmake
Normal file
5
cmake/deps/spdlog.cmake
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
find_package(spdlog REQUIRED)
|
||||||
|
|
||||||
|
if (NOT TARGET spdlog::spdlog)
|
||||||
|
message(FATAL_ERROR "spdlog::spdlog target not found")
|
||||||
|
endif ()
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Clio XRPL API server
|
|
||||||
Documentation=https://github.com/XRPLF/clio.git
|
|
||||||
|
|
||||||
After=network-online.target
|
|
||||||
Wants=network-online.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
ExecStart=@CLIO_INSTALL_DIR@/bin/clio_server @CLIO_INSTALL_DIR@/etc/config.json
|
|
||||||
Restart=on-failure
|
|
||||||
User=clio
|
|
||||||
Group=clio
|
|
||||||
LimitNOFILE=65536
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
set(CLIO_INSTALL_DIR "/opt/clio")
|
set(CLIO_INSTALL_DIR "/opt/clio")
|
||||||
set(CMAKE_INSTALL_PREFIX ${CLIO_INSTALL_DIR})
|
set(CMAKE_INSTALL_PREFIX "${CLIO_INSTALL_DIR}" CACHE PATH "Install prefix" FORCE)
|
||||||
|
|
||||||
install(TARGETS clio_server DESTINATION bin)
|
set(CPACK_PACKAGING_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
|
||||||
|
|
||||||
|
include(GNUInstallDirs)
|
||||||
|
|
||||||
|
install(TARGETS clio_server DESTINATION "${CMAKE_INSTALL_BINDIR}")
|
||||||
|
|
||||||
file(READ docs/examples/config/example-config.json config)
|
file(READ docs/examples/config/example-config.json config)
|
||||||
string(REGEX REPLACE "./clio_log" "/var/log/clio/" config "${config}")
|
string(REGEX REPLACE "./clio_log" "/var/log/clio/" config "${config}")
|
||||||
file(WRITE ${CMAKE_BINARY_DIR}/install-config.json "${config}")
|
file(WRITE ${CMAKE_BINARY_DIR}/install-config.json "${config}")
|
||||||
install(FILES ${CMAKE_BINARY_DIR}/install-config.json DESTINATION etc RENAME config.json)
|
install(FILES ${CMAKE_BINARY_DIR}/install-config.json DESTINATION etc RENAME config.json)
|
||||||
|
|
||||||
configure_file("${CMAKE_SOURCE_DIR}/cmake/install/clio.service.in" "${CMAKE_BINARY_DIR}/clio.service")
|
|
||||||
|
|
||||||
install(FILES "${CMAKE_BINARY_DIR}/clio.service" DESTINATION /lib/systemd/system)
|
|
||||||
|
|||||||
12
cmake/pkg/deb.cmake
Normal file
12
cmake/pkg/deb.cmake
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
set(CPACK_GENERATOR "DEB")
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_HOMEPAGE "https://github.com/XRPLF/clio")
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Ripple Labs Inc. <support@ripple.com>")
|
||||||
|
|
||||||
|
set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT)
|
||||||
|
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
|
||||||
|
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA ${CMAKE_SOURCE_DIR}/cmake/pkg/postinst)
|
||||||
|
|
||||||
|
# We must replace "-" with "~" otherwise dpkg will sort "X.Y.Z-b1" as greater than "X.Y.Z"
|
||||||
|
string(REPLACE "-" "~" git "${CPACK_PACKAGE_VERSION}")
|
||||||
46
cmake/pkg/postinst
Executable file
46
cmake/pkg/postinst
Executable file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
USER_NAME=clio
|
||||||
|
GROUP_NAME="${USER_NAME}"
|
||||||
|
CLIO_EXECUTABLE="clio_server"
|
||||||
|
CLIO_PREFIX="/opt/clio"
|
||||||
|
CLIO_BIN="$CLIO_PREFIX/bin/${CLIO_EXECUTABLE}"
|
||||||
|
CLIO_CONFIG="$CLIO_PREFIX/etc/config.json"
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
configure)
|
||||||
|
if ! id -u "$USER_NAME" >/dev/null 2>&1; then
|
||||||
|
# Users who should not have a home directory should have their home directory set to /nonexistent
|
||||||
|
# https://www.debian.org/doc/debian-policy/ch-opersys.html#non-existent-home-directories
|
||||||
|
useradd \
|
||||||
|
--system \
|
||||||
|
--home-dir /nonexistent \
|
||||||
|
--no-create-home \
|
||||||
|
--shell /usr/sbin/nologin \
|
||||||
|
--comment "system user for ${CLIO_EXECUTABLE}" \
|
||||||
|
--user-group \
|
||||||
|
${USER_NAME}
|
||||||
|
fi
|
||||||
|
|
||||||
|
install -d -o "$USER_NAME" -g "$GROUP_NAME" /var/log/clio
|
||||||
|
|
||||||
|
if [ -f "$CLIO_CONFIG" ]; then
|
||||||
|
chown "$USER_NAME:$GROUP_NAME" "$CLIO_CONFIG"
|
||||||
|
fi
|
||||||
|
|
||||||
|
chown -R "$USER_NAME:$GROUP_NAME" "$CLIO_PREFIX"
|
||||||
|
|
||||||
|
ln -sf "$CLIO_BIN" "/usr/bin/${CLIO_EXECUTABLE}"
|
||||||
|
|
||||||
|
;;
|
||||||
|
abort-upgrade|abort-remove|abort-deconfigure)
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "postinst called with unknown argument \`$1'" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit 0
|
||||||
74
conan.lock
74
conan.lock
@@ -1,56 +1,60 @@
|
|||||||
{
|
{
|
||||||
"version": "0.5",
|
"version": "0.5",
|
||||||
"requires": [
|
"requires": [
|
||||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1752006674.465",
|
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||||
"xxhash/0.8.2#7856c968c985b2981b707ee8f2413b2b%1752006674.334",
|
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
|
||||||
"xrpl/2.5.0#7880d1696f11fceb1d498570f1a184c8%1752006708.218",
|
"xrpl/2.6.0#57b93b5a6c99dc8511fccb3bb5390352%1756820296.642",
|
||||||
"sqlite3/3.47.0#7a0904fd061f5f8a2366c294f9387830%1752006674.338",
|
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
|
||||||
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1752006674.465",
|
"spdlog/1.15.3#3ca0e9e6b83af4d0151e26541d140c86%1754401846.61",
|
||||||
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1752006674.077",
|
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
|
||||||
"rapidjson/cci.20220822#1b9d8c2256876a154172dc5cfbe447c6%1752006673.227",
|
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
|
||||||
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1752006673.172",
|
"rapidjson/cci.20220822#1b9d8c2256876a154172dc5cfbe447c6%1754325007.656",
|
||||||
"openssl/1.1.1v#216374e4fb5b2e0f5ab1fb6f27b5b434%1752006673.069",
|
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
||||||
"nudb/2.0.8#63990d3e517038e04bf529eb8167f69f%1752006673.862",
|
"openssl/1.1.1w#a8f0792d7c5121b954578a7149d23e03%1756223730.729",
|
||||||
"minizip/1.2.13#9e87d57804bd372d6d1e32b1871517a3%1752006672.983",
|
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
|
||||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1752006672.825",
|
"minizip/1.2.13#9e87d57804bd372d6d1e32b1871517a3%1754325004.374",
|
||||||
"libuv/1.46.0#78565d142ac7102776256328a26cdf60%1752006672.827",
|
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
||||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1752006672.826",
|
"libuv/1.46.0#dc28c1f653fa197f00db5b577a6f6011%1754325003.592",
|
||||||
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1752006672.822",
|
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
||||||
"libarchive/3.7.6#e0453864b2a4d225f06b3304903cb2b7%1752006672.917",
|
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
|
||||||
"http_parser/2.9.4#98d91690d6fd021e9e624218a85d9d97%1752006672.658",
|
"libarchive/3.8.1#5cf685686322e906cb42706ab7e099a8%1756234256.696",
|
||||||
"gtest/1.14.0#f8f0757a574a8dd747d16af62d6eb1b7%1752006671.555",
|
"http_parser/2.9.4#98d91690d6fd021e9e624218a85d9d97%1754325001.385",
|
||||||
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1752006671.777",
|
"gtest/1.14.0#f8f0757a574a8dd747d16af62d6eb1b7%1754325000.842",
|
||||||
"fmt/11.2.0#579bb2cdf4a7607621beea4eb4651e0f%1752006671.557",
|
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
|
||||||
"date/3.0.3#cf28fe9c0aab99fe12da08aa42df65e1%1752006671.553",
|
"fmt/11.2.0#579bb2cdf4a7607621beea4eb4651e0f%1754324999.086",
|
||||||
"cassandra-cpp-driver/2.17.0#e50919efac8418c26be6671fd702540a%1752006671.654",
|
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
|
||||||
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1752006671.554",
|
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
|
||||||
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1752006671.549",
|
"cassandra-cpp-driver/2.17.0#e50919efac8418c26be6671fd702540a%1754324997.363",
|
||||||
"boost/1.83.0#5bcb2a14a35875e328bf312e080d3562%1752006671.557",
|
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
|
||||||
"benchmark/1.8.3#1a2ce62c99e2b3feaa57b1f0c15a8c46%1752006671.408",
|
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
|
||||||
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1752006671.555"
|
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368%1754325043.336",
|
||||||
|
"benchmark/1.9.4#ce4403f7a24d3e1f907cd9da4b678be4%1754578869.672",
|
||||||
|
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
|
||||||
],
|
],
|
||||||
"build_requires": [
|
"build_requires": [
|
||||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1752006674.465",
|
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||||
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1752006673.172",
|
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
||||||
"protobuf/3.21.9#64ce20e1d9ea24f3d6c504015d5f6fa8%1752006673.173",
|
"cmake/3.31.8#dde3bde00bb843687e55aea5afa0e220%1756234232.89",
|
||||||
"cmake/3.31.7#57c3e118bcf267552c0ea3f8bee1e7d5%1752006671.64",
|
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28"
|
||||||
"b2/5.3.2#7b5fabfe7088ae933fb3e78302343ea0%1752006671.407"
|
|
||||||
],
|
],
|
||||||
"python_requires": [],
|
"python_requires": [],
|
||||||
"overrides": {
|
"overrides": {
|
||||||
"boost/1.83.0": [
|
"boost/1.83.0": [
|
||||||
null,
|
null,
|
||||||
"boost/1.83.0#5bcb2a14a35875e328bf312e080d3562"
|
"boost/1.83.0"
|
||||||
],
|
],
|
||||||
"protobuf/3.21.9": [
|
"protobuf/3.21.12": [
|
||||||
null,
|
null,
|
||||||
"protobuf/3.21.12"
|
"protobuf/3.21.12"
|
||||||
],
|
],
|
||||||
|
"boost/1.86.0": [
|
||||||
|
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368"
|
||||||
|
],
|
||||||
"lz4/1.9.4": [
|
"lz4/1.9.4": [
|
||||||
"lz4/1.10.0"
|
"lz4/1.10.0"
|
||||||
],
|
],
|
||||||
"sqlite3/3.44.2": [
|
"sqlite3/3.44.2": [
|
||||||
"sqlite3/3.47.0"
|
"sqlite3/3.49.1"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"config_requires": []
|
"config_requires": []
|
||||||
|
|||||||
41
conanfile.py
41
conanfile.py
@@ -9,19 +9,7 @@ class ClioConan(ConanFile):
|
|||||||
url = 'https://github.com/xrplf/clio'
|
url = 'https://github.com/xrplf/clio'
|
||||||
description = 'Clio RPC server'
|
description = 'Clio RPC server'
|
||||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
settings = 'os', 'compiler', 'build_type', 'arch'
|
||||||
options = {
|
options = {}
|
||||||
'static': [True, False], # static linkage
|
|
||||||
'verbose': [True, False],
|
|
||||||
'tests': [True, False], # build unit tests; create `clio_tests` binary
|
|
||||||
'integration_tests': [True, False], # build integration tests; create `clio_integration_tests` binary
|
|
||||||
'benchmark': [True, False], # build benchmarks; create `clio_benchmarks` binary
|
|
||||||
'docs': [True, False], # doxygen API docs; create custom target 'docs'
|
|
||||||
'packaging': [True, False], # create distribution packages
|
|
||||||
'coverage': [True, False], # build for test coverage report; create custom target `clio_tests-ccov`
|
|
||||||
'lint': [True, False], # run clang-tidy checks during compilation
|
|
||||||
'snapshot': [True, False], # build export/import snapshot tool
|
|
||||||
'time_trace': [True, False] # build using -ftime-trace to create compiler trace reports
|
|
||||||
}
|
|
||||||
|
|
||||||
requires = [
|
requires = [
|
||||||
'boost/1.83.0',
|
'boost/1.83.0',
|
||||||
@@ -29,25 +17,14 @@ class ClioConan(ConanFile):
|
|||||||
'fmt/11.2.0',
|
'fmt/11.2.0',
|
||||||
'protobuf/3.21.12',
|
'protobuf/3.21.12',
|
||||||
'grpc/1.50.1',
|
'grpc/1.50.1',
|
||||||
'openssl/1.1.1v',
|
'openssl/1.1.1w',
|
||||||
'xrpl/2.5.0',
|
'xrpl/2.6.0',
|
||||||
'zlib/1.3.1',
|
'zlib/1.3.1',
|
||||||
'libbacktrace/cci.20210118'
|
'libbacktrace/cci.20210118',
|
||||||
|
'spdlog/1.15.3',
|
||||||
]
|
]
|
||||||
|
|
||||||
default_options = {
|
default_options = {
|
||||||
'static': False,
|
|
||||||
'verbose': False,
|
|
||||||
'tests': False,
|
|
||||||
'integration_tests': False,
|
|
||||||
'benchmark': False,
|
|
||||||
'packaging': False,
|
|
||||||
'coverage': False,
|
|
||||||
'lint': False,
|
|
||||||
'docs': False,
|
|
||||||
'snapshot': False,
|
|
||||||
'time_trace': False,
|
|
||||||
|
|
||||||
'xrpl/*:tests': False,
|
'xrpl/*:tests': False,
|
||||||
'xrpl/*:rocksdb': False,
|
'xrpl/*:rocksdb': False,
|
||||||
'cassandra-cpp-driver/*:shared': False,
|
'cassandra-cpp-driver/*:shared': False,
|
||||||
@@ -68,10 +45,8 @@ class ClioConan(ConanFile):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def requirements(self):
|
def requirements(self):
|
||||||
if self.options.tests or self.options.integration_tests:
|
self.requires('gtest/1.14.0')
|
||||||
self.requires('gtest/1.14.0')
|
self.requires('benchmark/1.9.4')
|
||||||
if self.options.benchmark:
|
|
||||||
self.requires('benchmark/1.8.3')
|
|
||||||
|
|
||||||
def configure(self):
|
def configure(self):
|
||||||
if self.settings.compiler == 'apple-clang':
|
if self.settings.compiler == 'apple-clang':
|
||||||
@@ -87,8 +62,6 @@ class ClioConan(ConanFile):
|
|||||||
|
|
||||||
def generate(self):
|
def generate(self):
|
||||||
tc = CMakeToolchain(self)
|
tc = CMakeToolchain(self)
|
||||||
for option_name, option_value in self.options.items():
|
|
||||||
tc.variables[option_name] = option_value
|
|
||||||
tc.generate()
|
tc.generate()
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
|
|||||||
@@ -20,43 +20,60 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
|||||||
USER root
|
USER root
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|
||||||
ARG LLVM_TOOLS_VERSION=20
|
# Install common tools and dependencies
|
||||||
|
|
||||||
# Add repositories
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
curl \
|
curl \
|
||||||
|
dpkg-dev \
|
||||||
|
file \
|
||||||
|
git \
|
||||||
|
git-lfs \
|
||||||
gnupg \
|
gnupg \
|
||||||
|
graphviz \
|
||||||
|
jq \
|
||||||
|
# libgmp, libmpfr and libncurses are gdb dependencies
|
||||||
|
libgmp-dev \
|
||||||
|
libmpfr-dev \
|
||||||
|
libncurses-dev \
|
||||||
|
make \
|
||||||
|
ninja-build \
|
||||||
wget \
|
wget \
|
||||||
|
zip \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python tools
|
||||||
|
ARG PYTHON_VERSION=3.13
|
||||||
|
|
||||||
|
RUN add-apt-repository ppa:deadsnakes/ppa \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
|
python${PYTHON_VERSION} \
|
||||||
|
python${PYTHON_VERSION}-venv \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
&& echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
|
&& curl -sS https://bootstrap.pypa.io/get-pip.py | python${PYTHON_VERSION}
|
||||||
|
|
||||||
|
# Create a virtual environment for python tools
|
||||||
|
RUN python${PYTHON_VERSION} -m venv /opt/venv
|
||||||
|
ENV PATH="/opt/venv/bin:$PATH"
|
||||||
|
|
||||||
|
RUN pip install -q --no-cache-dir \
|
||||||
|
cmake \
|
||||||
|
conan==2.20.1 \
|
||||||
|
gcovr \
|
||||||
|
pre-commit
|
||||||
|
|
||||||
|
# Install LLVM tools
|
||||||
|
ARG LLVM_TOOLS_VERSION=20
|
||||||
|
|
||||||
|
RUN echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
|
||||||
&& wget --progress=dot:giga -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
&& wget --progress=dot:giga -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
||||||
|
|
||||||
# Install packages
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
clang-tidy-${LLVM_TOOLS_VERSION} \
|
clang-tidy-${LLVM_TOOLS_VERSION} \
|
||||||
clang-tools-${LLVM_TOOLS_VERSION} \
|
clang-tools-${LLVM_TOOLS_VERSION} \
|
||||||
git \
|
|
||||||
git-lfs \
|
|
||||||
graphviz \
|
|
||||||
jq \
|
|
||||||
make \
|
|
||||||
ninja-build \
|
|
||||||
python3 \
|
|
||||||
python3-pip \
|
|
||||||
zip \
|
|
||||||
&& pip3 install -q --upgrade --no-cache-dir pip \
|
|
||||||
&& pip3 install -q --no-cache-dir \
|
|
||||||
# TODO: Remove this once we switch to newer Ubuntu base image
|
|
||||||
# lxml 6.0.0 is not compatible with our image
|
|
||||||
'lxml<6.0.0' \
|
|
||||||
\
|
|
||||||
cmake==3.31.6 \
|
|
||||||
conan==2.17.0 \
|
|
||||||
gcovr \
|
|
||||||
pre-commit \
|
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
@@ -92,12 +109,13 @@ COPY --from=clio-tools \
|
|||||||
/usr/local/bin/ClangBuildAnalyzer \
|
/usr/local/bin/ClangBuildAnalyzer \
|
||||||
/usr/local/bin/git-cliff \
|
/usr/local/bin/git-cliff \
|
||||||
/usr/local/bin/gh \
|
/usr/local/bin/gh \
|
||||||
|
/usr/local/bin/gdb \
|
||||||
/usr/local/bin/
|
/usr/local/bin/
|
||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|
||||||
# Setup conan
|
# Setup conan
|
||||||
RUN conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev
|
RUN conan remote add --index 0 xrplf https://conan.ripplex.io
|
||||||
|
|
||||||
WORKDIR /root/.conan2
|
WORKDIR /root/.conan2
|
||||||
COPY conan/global.conf ./global.conf
|
COPY conan/global.conf ./global.conf
|
||||||
|
|||||||
@@ -8,12 +8,14 @@ The image is based on Ubuntu 20.04 and contains:
|
|||||||
- ccache 4.11.3
|
- ccache 4.11.3
|
||||||
- Clang 19
|
- Clang 19
|
||||||
- ClangBuildAnalyzer 1.6.0
|
- ClangBuildAnalyzer 1.6.0
|
||||||
- Conan 2.17.0
|
- Conan 2.20.1
|
||||||
- Doxygen 1.12
|
- Doxygen 1.12
|
||||||
- GCC 14.3.0
|
- GCC 15.2.0
|
||||||
|
- GDB 16.3
|
||||||
- gh 2.74
|
- gh 2.74
|
||||||
- git-cliff 2.9.1
|
- git-cliff 2.9.1
|
||||||
- mold 2.40.1
|
- mold 2.40.1
|
||||||
|
- Python 3.13
|
||||||
- and some other useful tools
|
- and some other useful tools
|
||||||
|
|
||||||
Conan is set up to build Clio without any additional steps.
|
Conan is set up to build Clio without any additional steps.
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ build_type=Release
|
|||||||
compiler=gcc
|
compiler=gcc
|
||||||
compiler.cppstd=20
|
compiler.cppstd=20
|
||||||
compiler.libcxx=libstdc++11
|
compiler.libcxx=libstdc++11
|
||||||
compiler.version=14
|
compiler.version=15
|
||||||
os=Linux
|
os=Linux
|
||||||
|
|
||||||
[conf]
|
[conf]
|
||||||
tools.build:compiler_executables={"c": "/usr/bin/gcc-14", "cpp": "/usr/bin/g++-14"}
|
tools.build:compiler_executables={"c": "/usr/bin/gcc-15", "cpp": "/usr/bin/g++-15"}
|
||||||
|
|||||||
@@ -1,18 +1,20 @@
|
|||||||
FROM ubuntu:22.04
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
|
libatomic1 \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN groupadd -g 10001 clio \
|
||||||
|
&& useradd -u 10000 -g 10001 -s /bin/bash clio
|
||||||
|
|
||||||
COPY ./clio_server /opt/clio/bin/clio_server
|
COPY ./clio_server /opt/clio/bin/clio_server
|
||||||
|
|
||||||
RUN ln -s /opt/clio/bin/clio_server /usr/local/bin/clio_server \
|
RUN ln -s /opt/clio/bin/clio_server /usr/local/bin/clio_server \
|
||||||
&& mkdir -p /opt/clio/etc/ \
|
&& mkdir -p /opt/clio/etc/ \
|
||||||
&& mkdir -p /opt/clio/log/ \
|
&& mkdir -p /opt/clio/log/ \
|
||||||
&& groupadd -g 10001 clio \
|
&& chown clio:clio /opt/clio/log
|
||||||
&& useradd -u 10000 -g 10001 -s /bin/bash clio \
|
|
||||||
&& chown clio:clio /opt/clio/log \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
|
||||||
libatomic1 \
|
|
||||||
&& apt-get clean \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
USER clio
|
USER clio
|
||||||
ENTRYPOINT ["/opt/clio/bin/clio_server"]
|
ENTRYPOINT ["/opt/clio/bin/clio_server"]
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ Clio repository provides an [example](https://github.com/XRPLF/clio/blob/develop
|
|||||||
|
|
||||||
Config file recommendations:
|
Config file recommendations:
|
||||||
|
|
||||||
- Set `log_to_console` to `false` if you want to avoid logs being written to `stdout`.
|
- Set `log.enable_console` to `false` if you want to avoid logs being written to `stdout`.
|
||||||
- Set `log_directory` to `/opt/clio/log` to store logs in a volume.
|
- Set `log.directory` to `/opt/clio/log` to store logs in a volume.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ RUN apt-get update \
|
|||||||
|
|
||||||
ARG CLANG_MAJOR_VERSION=invalid
|
ARG CLANG_MAJOR_VERSION=invalid
|
||||||
# Bump this version to force rebuild of the image
|
# Bump this version to force rebuild of the image
|
||||||
ARG BUILD_VERSION=0
|
ARG BUILD_VERSION=1
|
||||||
|
|
||||||
RUN wget --progress=dot:giga https://apt.llvm.org/llvm.sh \
|
RUN wget --progress=dot:giga https://apt.llvm.org/llvm.sh \
|
||||||
&& chmod +x llvm.sh \
|
&& chmod +x llvm.sh \
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ ARG UBUNTU_VERSION
|
|||||||
|
|
||||||
ARG GCC_MAJOR_VERSION
|
ARG GCC_MAJOR_VERSION
|
||||||
|
|
||||||
ARG BUILD_VERSION=0
|
ARG BUILD_VERSION=1
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Package: gcc-14-ubuntu-UBUNTUVERSION
|
Package: gcc-15-ubuntu-UBUNTUVERSION
|
||||||
Version: VERSION
|
Version: VERSION
|
||||||
Architecture: TARGETARCH
|
Architecture: TARGETARCH
|
||||||
Maintainer: Alex Kremer <akremer@ripple.com>
|
Maintainer: Alex Kremer <akremer@ripple.com>
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
services:
|
services:
|
||||||
clio_develop:
|
clio_develop:
|
||||||
image: ghcr.io/xrplf/clio-ci:latest
|
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
volumes:
|
volumes:
|
||||||
- clio_develop_conan_data:/root/.conan2/p
|
- clio_develop_conan_data:/root/.conan2/p
|
||||||
- clio_develop_ccache:/root/.ccache
|
- clio_develop_ccache:/root/.ccache
|
||||||
|
|||||||
@@ -8,19 +8,17 @@ ARG TARGETARCH
|
|||||||
|
|
||||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||||
|
|
||||||
ARG BUILD_VERSION=1
|
ARG BUILD_VERSION=2
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
bison \
|
|
||||||
flex \
|
|
||||||
ninja-build \
|
ninja-build \
|
||||||
python3 \
|
python3 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
software-properties-common \
|
software-properties-common \
|
||||||
wget \
|
wget \
|
||||||
&& pip3 install -q --no-cache-dir \
|
&& pip3 install -q --no-cache-dir \
|
||||||
cmake==3.31.6 \
|
cmake \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
@@ -46,6 +44,13 @@ RUN wget --progress=dot:giga "https://github.com/ccache/ccache/releases/download
|
|||||||
&& ninja install \
|
&& ninja install \
|
||||||
&& rm -rf /tmp/* /var/tmp/*
|
&& rm -rf /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
|
bison \
|
||||||
|
flex \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
ARG DOXYGEN_VERSION=1.12.0
|
ARG DOXYGEN_VERSION=1.12.0
|
||||||
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
||||||
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
||||||
@@ -78,4 +83,22 @@ RUN wget --progress=dot:giga "https://github.com/cli/cli/releases/download/v${GH
|
|||||||
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \
|
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \
|
||||||
&& rm -rf /tmp/* /var/tmp/*
|
&& rm -rf /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
|
libgmp-dev \
|
||||||
|
libmpfr-dev \
|
||||||
|
libncurses-dev \
|
||||||
|
make \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
ARG GDB_VERSION=16.3
|
||||||
|
RUN wget --progress=dot:giga "https://sourceware.org/pub/gdb/releases/gdb-${GDB_VERSION}.tar.gz" \
|
||||||
|
&& tar xf "gdb-${GDB_VERSION}.tar.gz" \
|
||||||
|
&& cd "gdb-${GDB_VERSION}" \
|
||||||
|
&& ./configure --prefix=/usr/local \
|
||||||
|
&& make -j "$(nproc)" \
|
||||||
|
&& make install-gdb \
|
||||||
|
&& rm -rf /tmp/* /var/tmp/*
|
||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|||||||
@@ -6,16 +6,22 @@
|
|||||||
## Minimum Requirements
|
## Minimum Requirements
|
||||||
|
|
||||||
- [Python 3.7](https://www.python.org/downloads/)
|
- [Python 3.7](https://www.python.org/downloads/)
|
||||||
- [Conan 2.17.0](https://conan.io/downloads.html)
|
- [Conan 2.20.1](https://conan.io/downloads.html)
|
||||||
- [CMake 3.20, <4.0](https://cmake.org/download/)
|
- [CMake 3.20](https://cmake.org/download/)
|
||||||
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation
|
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation
|
||||||
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
|
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
|
||||||
|
|
||||||
|
We use our Docker image `ghcr.io/XRPLF/clio-ci` to build `Clio`, see [Building Clio with Docker](#building-clio-with-docker).
|
||||||
|
You can find information about exact compiler versions and tools in the [image's README](https://github.com/XRPLF/clio/blob/develop/docker/ci/README.md).
|
||||||
|
|
||||||
|
The following compiler version are guaranteed to work.
|
||||||
|
Any compiler with lower version may not be able to build Clio:
|
||||||
|
|
||||||
| Compiler | Version |
|
| Compiler | Version |
|
||||||
| ----------- | ------- |
|
| ----------- | ------- |
|
||||||
| GCC | 12.3 |
|
| GCC | 15.2 |
|
||||||
| Clang | 16 |
|
| Clang | 19 |
|
||||||
| Apple Clang | 15 |
|
| Apple Clang | 17 |
|
||||||
|
|
||||||
### Conan Configuration
|
### Conan Configuration
|
||||||
|
|
||||||
@@ -84,7 +90,7 @@ core.upload:parallel={{os.cpu_count()}}
|
|||||||
Make sure artifactory is setup with Conan.
|
Make sure artifactory is setup with Conan.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev
|
conan remote add --index 0 xrplf https://conan.ripplex.io
|
||||||
```
|
```
|
||||||
|
|
||||||
Now you should be able to download the prebuilt dependencies (including `xrpl` package) on supported platforms.
|
Now you should be able to download the prebuilt dependencies (including `xrpl` package) on supported platforms.
|
||||||
@@ -98,79 +104,100 @@ It is implicitly used when running `conan` commands, you don't need to specify i
|
|||||||
|
|
||||||
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
|
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
|
||||||
|
|
||||||
To do that, run the following command in the repository root:
|
> [!NOTE]
|
||||||
|
> Conan uses local cache by default when creating a lockfile.
|
||||||
|
>
|
||||||
|
> To ensure, that lockfile creation works the same way on all developer machines, you should clear the local cache before creating a new lockfile.
|
||||||
|
|
||||||
|
To create a new lockfile, run the following commands in the repository root:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
conan lock create . -o '&:tests=True' -o '&:benchmark=True'
|
conan remove '*' --confirm
|
||||||
|
rm conan.lock
|
||||||
|
# This ensure that xrplf remote is the first to be consulted
|
||||||
|
conan remote add --force --index 0 xrplf https://conan.ripplex.io
|
||||||
|
conan lock create .
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> If some dependencies are exclusive for some OS, you may need to run the last command for them adding `--profile:all <PROFILE>`.
|
||||||
|
|
||||||
## Building Clio
|
## Building Clio
|
||||||
|
|
||||||
Navigate to Clio's root directory and run:
|
1. Navigate to Clio's root directory and run:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
# You can also specify profile explicitly by adding `--profile:all <PROFILE_NAME>`
|
```
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True'
|
|
||||||
# You can also add -GNinja to use Ninja build system instead of Make
|
|
||||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
|
||||||
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
|
|
||||||
```
|
|
||||||
|
|
||||||
> [!TIP]
|
2. Install dependencies through conan.
|
||||||
> You can omit the `-o '&:tests=True'` if you don't want to build `clio_tests`.
|
|
||||||
|
|
||||||
If successful, `conan install` will find the required packages and `cmake` will do the rest. You should see `clio_server` and `clio_tests` in the `build` directory (the current directory).
|
```sh
|
||||||
|
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||||
|
```
|
||||||
|
|
||||||
> [!TIP]
|
> You can add `--profile:all <PROFILE_NAME>` to choose a specific conan profile.
|
||||||
> To generate a Code Coverage report, include `-o '&:coverage=True'` in the `conan install` command above, along with `-o '&:tests=True'` to enable tests.
|
|
||||||
> After running the `cmake` commands, execute `make clio_tests-ccov`.
|
|
||||||
> The coverage report will be found at `clio_tests-llvm-cov/index.html`.
|
|
||||||
|
|
||||||
<!-- markdownlint-disable-line MD028 -->
|
3. Configure and generate build files with CMake.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||||
|
```
|
||||||
|
|
||||||
|
> You can add `-GNinja` to use the Ninja build system (instead of Make).
|
||||||
|
|
||||||
|
4. Now, you can build all targets or specific ones:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# builds all targets
|
||||||
|
cmake --build . --parallel 8
|
||||||
|
# builds only clio_server target
|
||||||
|
cmake --build . --parallel 8 --target clio_server
|
||||||
|
```
|
||||||
|
|
||||||
|
You should see `clio_server` and `clio_tests` in the current directory.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> If you've built Clio before and the build is now failing, it's likely due to updated dependencies. Try deleting the build folder and then rerunning the Conan and CMake commands mentioned above.
|
> If you've built Clio before and the build is now failing, it's likely due to updated dependencies. Try deleting the build folder and then rerunning the Conan and CMake commands mentioned above.
|
||||||
|
|
||||||
|
### CMake options
|
||||||
|
|
||||||
|
There are several CMake options you can use to customize the build:
|
||||||
|
|
||||||
|
| CMake Option | Default | CMake Target | Description |
|
||||||
|
| --------------------- | ------- | -------------------------------------------------------- | ------------------------------------- |
|
||||||
|
| `-Dcoverage` | OFF | `clio_tests-ccov` | Enables code coverage generation |
|
||||||
|
| `-Dtests` | OFF | `clio_tests` | Enables unit tests |
|
||||||
|
| `-Dintegration_tests` | OFF | `clio_integration_tests` | Enables integration tests |
|
||||||
|
| `-Dbenchmark` | OFF | `clio_benchmark` | Enables benchmark executable |
|
||||||
|
| `-Ddocs` | OFF | `docs` | Enables API documentation generation |
|
||||||
|
| `-Dlint` | OFF | See [#clang-tidy](#using-clang-tidy-for-static-analysis) | Enables `clang-tidy` static analysis |
|
||||||
|
| `-Dsan` | N/A | N/A | Enables Sanitizer (asan, tsan, ubsan) |
|
||||||
|
| `-Dpackage` | OFF | N/A | Creates a debian package |
|
||||||
|
|
||||||
### Generating API docs for Clio
|
### Generating API docs for Clio
|
||||||
|
|
||||||
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.12.0 on your system.
|
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.12.0 on your system.
|
||||||
|
|
||||||
To generate the API docs:
|
To generate the API docs, please use CMake option `-Ddocs=ON` as described above and build the `docs` target.
|
||||||
|
|
||||||
1. First, include `-o '&:docs=True'` in the conan install command. For example:
|
To view the generated files, go to `build/docs/html`.
|
||||||
|
Open the `index.html` file in your browser to see the documentation pages.
|
||||||
|
|
||||||
```sh
|

|
||||||
mkdir build && cd build
|
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True' -o '&:docs=True'
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Once that has completed successfully, run the `cmake` command and add the `--target docs` option:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
|
||||||
cmake --build . --parallel 8 --target docs
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Go to `build/docs/html` to view the generated files.
|
|
||||||
|
|
||||||
Open the `index.html` file in your browser to see the documentation pages.
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
## Building Clio with Docker
|
## Building Clio with Docker
|
||||||
|
|
||||||
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
|
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run -it ghcr.io/xrplf/clio-ci:latest
|
docker run -it ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
|
||||||
git clone https://github.com/XRPLF/clio
|
git clone https://github.com/XRPLF/clio
|
||||||
mkdir build && cd build
|
cd clio
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True'
|
|
||||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
|
||||||
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Follow the same steps in the [Building Clio](#building-clio) section. You can use `--profile:all gcc` or `--profile:all clang` with the `conan install` command to choose the desired compiler.
|
||||||
|
|
||||||
## Developing against `rippled` in standalone mode
|
## Developing against `rippled` in standalone mode
|
||||||
|
|
||||||
If you wish to develop against a `rippled` instance running in standalone mode there are a few quirks of both Clio and `rippled` that you need to keep in mind. You must:
|
If you wish to develop against a `rippled` instance running in standalone mode there are a few quirks of both Clio and `rippled` that you need to keep in mind. You must:
|
||||||
@@ -223,10 +250,10 @@ Sometimes, during development, you need to build against a custom version of `li
|
|||||||
## Using `clang-tidy` for static analysis
|
## Using `clang-tidy` for static analysis
|
||||||
|
|
||||||
Clang-tidy can be run by CMake when building the project.
|
Clang-tidy can be run by CMake when building the project.
|
||||||
To achieve this, you just need to provide the option `-o '&:lint=True'` for the `conan install` command:
|
To achieve this, you just need to provide the option `-Dlint=ON` when generating CMake files:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True' -o '&:lint=True' --profile:all clang
|
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dlint=ON ..
|
||||||
```
|
```
|
||||||
|
|
||||||
By default CMake will try to find `clang-tidy` automatically in your system.
|
By default CMake will try to find `clang-tidy` automatically in your system.
|
||||||
|
|||||||
@@ -3,7 +3,9 @@
|
|||||||
This document provides a list of all available Clio configuration properties in detail.
|
This document provides a list of all available Clio configuration properties in detail.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> Dot notation in configuration key names represents nested fields. For example, **database.scylladb** refers to the _scylladb_ field inside the _database_ object. If a key name includes "[]", it indicates that the nested field is an array (e.g., etl_sources.[]).
|
> Dot notation in configuration key names represents nested fields.
|
||||||
|
> For example, **database.scylladb** refers to the _scylladb_ field inside the _database_ object.
|
||||||
|
> If a key name includes "[]", it indicates that the nested field is an array (e.g., etl_sources.[]).
|
||||||
|
|
||||||
## Configuration Details
|
## Configuration Details
|
||||||
|
|
||||||
@@ -87,6 +89,14 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||||
- **Description**: Represents the number of threads that will be used for database operations.
|
- **Description**: Represents the number of threads that will be used for database operations.
|
||||||
|
|
||||||
|
### database.cassandra.provider
|
||||||
|
|
||||||
|
- **Required**: True
|
||||||
|
- **Type**: string
|
||||||
|
- **Default value**: `cassandra`
|
||||||
|
- **Constraints**: The value must be one of the following: `cassandra`, `aws_keyspace`.
|
||||||
|
- **Description**: The specific database backend provider we are using.
|
||||||
|
|
||||||
### database.cassandra.core_connections_per_host
|
### database.cassandra.core_connections_per_host
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
@@ -155,7 +165,7 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: boolean
|
- **Type**: boolean
|
||||||
- **Default value**: `True`
|
- **Default value**: `False`
|
||||||
- **Constraints**: None
|
- **Constraints**: None
|
||||||
- **Description**: If set to `True`, allows Clio to start without any ETL source.
|
- **Description**: If set to `True`, allows Clio to start without any ETL source.
|
||||||
|
|
||||||
@@ -327,11 +337,27 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||||
- **Description**: Maximum queue size for sending subscription data to clients. This queue buffers data when a client is slow to receive it, ensuring delivery once the client is ready.
|
- **Description**: Maximum queue size for sending subscription data to clients. This queue buffers data when a client is slow to receive it, ensuring delivery once the client is ready.
|
||||||
|
|
||||||
|
### server.proxy.ips.[]
|
||||||
|
|
||||||
|
- **Required**: True
|
||||||
|
- **Type**: string
|
||||||
|
- **Default value**: None
|
||||||
|
- **Constraints**: None
|
||||||
|
- **Description**: List of proxy ip addresses. When Clio receives a request from proxy it will use `Forwarded` value (if any) as client ip. When this option is used together with `server.proxy.tokens` Clio will identify proxy by ip or by token.
|
||||||
|
|
||||||
|
### server.proxy.tokens.[]
|
||||||
|
|
||||||
|
- **Required**: True
|
||||||
|
- **Type**: string
|
||||||
|
- **Default value**: None
|
||||||
|
- **Constraints**: None
|
||||||
|
- **Description**: List of tokens in identifying request as a request from proxy. Token should be provided in `X-Proxy-Token` header, e.g. `X-Proxy-Token: <very_secret_token>'. When Clio receives a request from proxy it will use 'Forwarded` value (if any) to get client ip. When this option is used together with 'server.proxy.ips' Clio will identify proxy by ip or by token.
|
||||||
|
|
||||||
### prometheus.enabled
|
### prometheus.enabled
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: boolean
|
- **Type**: boolean
|
||||||
- **Default value**: `False`
|
- **Default value**: `True`
|
||||||
- **Constraints**: None
|
- **Constraints**: None
|
||||||
- **Description**: Enables or disables Prometheus metrics.
|
- **Description**: Enables or disables Prometheus metrics.
|
||||||
|
|
||||||
@@ -339,7 +365,7 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: boolean
|
- **Type**: boolean
|
||||||
- **Default value**: `False`
|
- **Default value**: `True`
|
||||||
- **Constraints**: None
|
- **Constraints**: None
|
||||||
- **Description**: Enables or disables compression of Prometheus responses.
|
- **Description**: Enables or disables compression of Prometheus responses.
|
||||||
|
|
||||||
@@ -415,7 +441,7 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Constraints**: The value must be one of the following: `sync`, `async`, `none`.
|
- **Constraints**: The value must be one of the following: `sync`, `async`, `none`.
|
||||||
- **Description**: The strategy used for Cache loading.
|
- **Description**: The strategy used for Cache loading.
|
||||||
|
|
||||||
### log_channels.[].channel
|
### log.channels.[].channel
|
||||||
|
|
||||||
- **Required**: False
|
- **Required**: False
|
||||||
- **Type**: string
|
- **Type**: string
|
||||||
@@ -423,39 +449,63 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Constraints**: The value must be one of the following: `General`, `WebServer`, `Backend`, `RPC`, `ETL`, `Subscriptions`, `Performance`, `Migration`.
|
- **Constraints**: The value must be one of the following: `General`, `WebServer`, `Backend`, `RPC`, `ETL`, `Subscriptions`, `Performance`, `Migration`.
|
||||||
- **Description**: The name of the log channel.
|
- **Description**: The name of the log channel.
|
||||||
|
|
||||||
### log_channels.[].log_level
|
### log.channels.[].level
|
||||||
|
|
||||||
- **Required**: False
|
- **Required**: False
|
||||||
- **Type**: string
|
- **Type**: string
|
||||||
- **Default value**: None
|
- **Default value**: None
|
||||||
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`, `count`.
|
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`.
|
||||||
- **Description**: The log level for the specific log channel.
|
- **Description**: The log level for the specific log channel.
|
||||||
|
|
||||||
### log_level
|
### log.level
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: string
|
- **Type**: string
|
||||||
- **Default value**: `info`
|
- **Default value**: `info`
|
||||||
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`, `count`.
|
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`.
|
||||||
- **Description**: The general logging level of Clio. This level is applied to all log channels that do not have an explicitly defined logging level.
|
- **Description**: The general logging level of Clio. This level is applied to all log channels that do not have an explicitly defined logging level.
|
||||||
|
|
||||||
### log_format
|
### log.format
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: string
|
- **Type**: string
|
||||||
- **Default value**: `%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%`
|
- **Default value**: `%Y-%m-%d %H:%M:%S.%f %^%3!l:%n%$ - %v`
|
||||||
- **Constraints**: None
|
- **Constraints**: None
|
||||||
- **Description**: The format string for log messages. The format is described here: <https://www.boost.org/doc/libs/1_83_0/libs/log/doc/html/log/tutorial/formatters.html>.
|
- **Description**: The format string for log messages using spdlog format patterns.
|
||||||
|
|
||||||
### log_to_console
|
Each of the variables expands like so:
|
||||||
|
|
||||||
|
- `%Y-%m-%d %H:%M:%S.%f`: The full date and time of the log entry with microsecond precision
|
||||||
|
- `%^`: Start color range
|
||||||
|
- `%3!l`: The severity (aka log level) the entry was sent at stripped to 3 characters
|
||||||
|
- `%n`: The logger name (channel) that this log entry was sent to
|
||||||
|
- `%$`: End color range
|
||||||
|
- `%v`: The actual log message
|
||||||
|
|
||||||
|
Some additional variables that might be useful:
|
||||||
|
|
||||||
|
- `%@`: A partial path to the C++ file and the line number in the said file (`src/file/path:linenumber`)
|
||||||
|
- `%t`: The ID of the thread the log entry is written from
|
||||||
|
|
||||||
|
Documentation can be found at: <https://github.com/gabime/spdlog/wiki/Custom-formatting>.
|
||||||
|
|
||||||
|
### log.is_async
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: boolean
|
- **Type**: boolean
|
||||||
- **Default value**: `True`
|
- **Default value**: `True`
|
||||||
- **Constraints**: None
|
- **Constraints**: None
|
||||||
|
- **Description**: Whether spdlog is asynchronous or not.
|
||||||
|
|
||||||
|
### log.enable_console
|
||||||
|
|
||||||
|
- **Required**: True
|
||||||
|
- **Type**: boolean
|
||||||
|
- **Default value**: `False`
|
||||||
|
- **Constraints**: None
|
||||||
- **Description**: Enables or disables logging to the console.
|
- **Description**: Enables or disables logging to the console.
|
||||||
|
|
||||||
### log_directory
|
### log.directory
|
||||||
|
|
||||||
- **Required**: False
|
- **Required**: False
|
||||||
- **Type**: string
|
- **Type**: string
|
||||||
@@ -463,7 +513,7 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Constraints**: None
|
- **Constraints**: None
|
||||||
- **Description**: The directory path for the log files.
|
- **Description**: The directory path for the log files.
|
||||||
|
|
||||||
### log_rotation_size
|
### log.rotation_size
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: int
|
- **Type**: int
|
||||||
@@ -471,23 +521,15 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||||
- **Description**: The log rotation size in megabytes. When the log file reaches this particular size, a new log file starts.
|
- **Description**: The log rotation size in megabytes. When the log file reaches this particular size, a new log file starts.
|
||||||
|
|
||||||
### log_directory_max_size
|
### log.directory_max_files
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: int
|
- **Type**: int
|
||||||
- **Default value**: `51200`
|
- **Default value**: `25`
|
||||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||||
- **Description**: The maximum size of the log directory in megabytes.
|
- **Description**: The maximum number of log files in the directory.
|
||||||
|
|
||||||
### log_rotation_hour_interval
|
### log.tag_style
|
||||||
|
|
||||||
- **Required**: True
|
|
||||||
- **Type**: int
|
|
||||||
- **Default value**: `12`
|
|
||||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
|
||||||
- **Description**: Represents the interval (in hours) for log rotation. If the current log file reaches this value in logging, a new log file starts.
|
|
||||||
|
|
||||||
### log_tag_style
|
|
||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: string
|
- **Type**: string
|
||||||
@@ -507,7 +549,7 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: boolean
|
- **Type**: boolean
|
||||||
- **Default value**: `True`
|
- **Default value**: `False`
|
||||||
- **Constraints**: None
|
- **Constraints**: None
|
||||||
- **Description**: Indicates if the server is allowed to write data to the database.
|
- **Description**: Indicates if the server is allowed to write data to the database.
|
||||||
|
|
||||||
|
|||||||
@@ -88,13 +88,15 @@ Exactly equal password gains admin rights for the request or a websocket connect
|
|||||||
Clio can cache requests to ETL sources to reduce the load on the ETL source.
|
Clio can cache requests to ETL sources to reduce the load on the ETL source.
|
||||||
Only following commands are cached: `server_info`, `server_state`, `server_definitions`, `fee`, `ledger_closed`.
|
Only following commands are cached: `server_info`, `server_state`, `server_definitions`, `fee`, `ledger_closed`.
|
||||||
By default the forwarding cache is off.
|
By default the forwarding cache is off.
|
||||||
To enable the caching for a source, `forwarding_cache_timeout` value should be added to the configuration file, e.g.:
|
To enable the caching for a source, `forwarding.cache_timeout` value should be added to the configuration file, e.g.:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
"forwarding_cache_timeout": 0.250,
|
"forwarding": {
|
||||||
|
"cache_timeout": 0.250,
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
`forwarding_cache_timeout` defines for how long (in seconds) a cache entry will be valid after being placed into the cache.
|
`forwarding.cache_timeout` defines for how long (in seconds) a cache entry will be valid after being placed into the cache.
|
||||||
Zero value turns off the cache feature.
|
Zero value turns off the cache feature.
|
||||||
|
|
||||||
## Graceful shutdown (not fully implemented yet)
|
## Graceful shutdown (not fully implemented yet)
|
||||||
|
|||||||
@@ -31,7 +31,7 @@
|
|||||||
"etl_sources": [
|
"etl_sources": [
|
||||||
{
|
{
|
||||||
"ip": "127.0.0.1",
|
"ip": "127.0.0.1",
|
||||||
"ws_port": "6005",
|
"ws_port": "6006",
|
||||||
"grpc_port": "50051"
|
"grpc_port": "50051"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -76,38 +76,60 @@
|
|||||||
"parallel_requests_limit": 10, // Optional parameter, used only if "processing_strategy" is "parallel". It limits the number of requests for one client connection processed in parallel. Infinite if not specified.
|
"parallel_requests_limit": 10, // Optional parameter, used only if "processing_strategy" is "parallel". It limits the number of requests for one client connection processed in parallel. Infinite if not specified.
|
||||||
// Max number of responses to queue up before sent successfully. If a client's waiting queue is too long, the server will close the connection.
|
// Max number of responses to queue up before sent successfully. If a client's waiting queue is too long, the server will close the connection.
|
||||||
"ws_max_sending_queue_size": 1500,
|
"ws_max_sending_queue_size": 1500,
|
||||||
"__ng_web_server": false // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
|
"__ng_web_server": false, // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
|
||||||
|
"proxy": {
|
||||||
|
"ips": [],
|
||||||
|
"tokens": []
|
||||||
|
}
|
||||||
},
|
},
|
||||||
// Time in seconds for graceful shutdown. Defaults to 10 seconds. Not fully implemented yet.
|
// Time in seconds for graceful shutdown. Defaults to 10 seconds. Not fully implemented yet.
|
||||||
"graceful_period": 10.0,
|
"graceful_period": 10.0,
|
||||||
// Overrides log level on a per logging channel.
|
"log": {
|
||||||
// Defaults to global "log_level" for each unspecified channel.
|
// Overrides log level on a per logging channel.
|
||||||
"log_channels": [
|
// Defaults to global "log.level" for each unspecified channel.
|
||||||
{
|
"channels": [
|
||||||
"channel": "Backend",
|
{
|
||||||
"log_level": "fatal"
|
"channel": "Backend",
|
||||||
},
|
"level": "fatal"
|
||||||
{
|
},
|
||||||
"channel": "WebServer",
|
{
|
||||||
"log_level": "info"
|
"channel": "WebServer",
|
||||||
},
|
"level": "info"
|
||||||
{
|
},
|
||||||
"channel": "Subscriptions",
|
{
|
||||||
"log_level": "info"
|
"channel": "Subscriptions",
|
||||||
},
|
"level": "info"
|
||||||
{
|
},
|
||||||
"channel": "RPC",
|
{
|
||||||
"log_level": "error"
|
"channel": "RPC",
|
||||||
},
|
"level": "error"
|
||||||
{
|
},
|
||||||
"channel": "ETL",
|
{
|
||||||
"log_level": "debug"
|
"channel": "ETL",
|
||||||
},
|
"level": "debug"
|
||||||
{
|
},
|
||||||
"channel": "Performance",
|
{
|
||||||
"log_level": "trace"
|
"channel": "Performance",
|
||||||
}
|
"level": "trace"
|
||||||
],
|
}
|
||||||
|
],
|
||||||
|
// The general logging level of Clio. This level is applied to all log channels that do not have an explicitly defined logging level.
|
||||||
|
"level": "info",
|
||||||
|
// Log format using spdlog format patterns (this is the default format)
|
||||||
|
"format": "%Y-%m-%d %H:%M:%S.%f %^%3!l:%n%$ - %v",
|
||||||
|
// Whether spdlog is asynchronous or not.
|
||||||
|
"is_async": true,
|
||||||
|
// Enables or disables logging to the console.
|
||||||
|
"enable_console": true,
|
||||||
|
// Clio logs to file in the specified directory only if "log.directory" is set
|
||||||
|
// "directory": "./clio_log",
|
||||||
|
// The log rotation size in megabytes. When the log file reaches this particular size, a new log file starts.
|
||||||
|
"rotation_size": 2048,
|
||||||
|
// The maximum number of log files in the directory.
|
||||||
|
"directory_max_files": 25,
|
||||||
|
// Log tags style to use
|
||||||
|
"tag_style": "uint"
|
||||||
|
},
|
||||||
"cache": {
|
"cache": {
|
||||||
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
|
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
|
||||||
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
|
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
|
||||||
@@ -121,16 +143,6 @@
|
|||||||
"enabled": true,
|
"enabled": true,
|
||||||
"compress_reply": true
|
"compress_reply": true
|
||||||
},
|
},
|
||||||
"log_level": "info",
|
|
||||||
// Log format (this is the default format)
|
|
||||||
"log_format": "%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%",
|
|
||||||
"log_to_console": true,
|
|
||||||
// Clio logs to file in the specified directory only if "log_directory" is set
|
|
||||||
// "log_directory": "./clio_log",
|
|
||||||
"log_rotation_size": 2048,
|
|
||||||
"log_directory_max_size": 51200,
|
|
||||||
"log_rotation_hour_interval": 12,
|
|
||||||
"log_tag_style": "uint",
|
|
||||||
"extractor_threads": 8,
|
"extractor_threads": 8,
|
||||||
"read_only": false,
|
"read_only": false,
|
||||||
// "start_sequence": [integer] the ledger index to start from,
|
// "start_sequence": [integer] the ledger index to start from,
|
||||||
|
|||||||
@@ -1,76 +0,0 @@
|
|||||||
# Logging
|
|
||||||
|
|
||||||
Clio provides several logging options, which all are configurable via the config file. These are detailed in the following sections.
|
|
||||||
|
|
||||||
## `log_level`
|
|
||||||
|
|
||||||
The minimum level of severity at which the log message will be outputted by default. Severity options are `trace`, `debug`, `info`, `warning`, `error`, `fatal`. Defaults to `info`.
|
|
||||||
|
|
||||||
## `log_format`
|
|
||||||
|
|
||||||
The format of log lines produced by Clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
|
|
||||||
|
|
||||||
Each of the variables expands like so:
|
|
||||||
|
|
||||||
- `TimeStamp`: The full date and time of the log entry
|
|
||||||
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
|
|
||||||
- `ThreadID`: The ID of the thread the log entry is written from
|
|
||||||
- `Channel`: The channel that this log entry was sent to
|
|
||||||
- `Severity`: The severity (aka log level) the entry was sent at
|
|
||||||
- `Message`: The actual log message
|
|
||||||
|
|
||||||
## `log_channels`
|
|
||||||
|
|
||||||
An array of JSON objects, each overriding properties for a logging `channel`.
|
|
||||||
|
|
||||||
> [!IMPORTANT]
|
|
||||||
> At the time of writing, only `log_level` can be overridden using this mechanism.
|
|
||||||
|
|
||||||
Each object is of this format:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"channel": "Backend",
|
|
||||||
"log_level": "fatal"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
If no override is present for a given channel, that channel will log at the severity specified by the global `log_level`.
|
|
||||||
|
|
||||||
The log channels that can be overridden are: `Backend`, `WebServer`, `Subscriptions`, `RPC`, `ETL` and `Performance`.
|
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> See [example-config.json](../docs/examples/config/example-config.json) for more details.
|
|
||||||
|
|
||||||
## `log_to_console`
|
|
||||||
|
|
||||||
Enable or disable log output to console. Options are `true`/`false`. This option defaults to `true`.
|
|
||||||
|
|
||||||
## `log_directory`
|
|
||||||
|
|
||||||
Path to the directory where log files are stored. If such directory doesn't exist, Clio will create it.
|
|
||||||
|
|
||||||
If the option is not specified, the logs are not written to a file.
|
|
||||||
|
|
||||||
## `log_rotation_size`
|
|
||||||
|
|
||||||
The max size of the log file in **megabytes** before it will rotate into a smaller file. Defaults to 2GB.
|
|
||||||
|
|
||||||
## `log_directory_max_size`
|
|
||||||
|
|
||||||
The max size of the log directory in **megabytes** before old log files will be deleted to free up space. Defaults to 50GB.
|
|
||||||
|
|
||||||
## `log_rotation_hour_interval`
|
|
||||||
|
|
||||||
The time interval in **hours** after the last log rotation to automatically rotate the current log file. Defaults to 12 hours.
|
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> Log rotation based on time occurs in conjunction with size-based log rotation. For example, if a size-based log rotation occurs, the timer for the time-based rotation will reset.
|
|
||||||
|
|
||||||
## `log_tag_style`
|
|
||||||
|
|
||||||
Tag implementation to use. Must be one of:
|
|
||||||
|
|
||||||
- `uint`: Lock free and threadsafe but outputs just a simple unsigned integer
|
|
||||||
- `uuid`: Threadsafe and outputs a UUID tag
|
|
||||||
- `none`: Doesn't use tagging at all
|
|
||||||
@@ -5,11 +5,14 @@ import re
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
PATTERN = r'R"JSON\((.*?)\)JSON"'
|
||||||
|
|
||||||
|
|
||||||
|
def use_uppercase(cpp_content: str) -> str:
|
||||||
|
return cpp_content.replace('R"json(', 'R"JSON(').replace(')json"', ')JSON"')
|
||||||
|
|
||||||
|
|
||||||
def fix_json_style(cpp_content: str) -> str:
|
def fix_json_style(cpp_content: str) -> str:
|
||||||
cpp_content = cpp_content.replace('R"json(', 'R"JSON(').replace(')json"', ')JSON"')
|
|
||||||
|
|
||||||
pattern = r'R"JSON\((.*?)\)JSON"'
|
|
||||||
|
|
||||||
def replace_json(match):
|
def replace_json(match):
|
||||||
raw_json = match.group(1)
|
raw_json = match.group(1)
|
||||||
|
|
||||||
@@ -29,12 +32,51 @@ def fix_json_style(cpp_content: str) -> str:
|
|||||||
raw_json = raw_json.replace(f'":{digit}', f'": {digit}')
|
raw_json = raw_json.replace(f'":{digit}', f'": {digit}')
|
||||||
return f'R"JSON({raw_json})JSON"'
|
return f'R"JSON({raw_json})JSON"'
|
||||||
|
|
||||||
return re.sub(pattern, replace_json, cpp_content, flags=re.DOTALL)
|
return re.sub(PATTERN, replace_json, cpp_content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
|
def fix_colon_spacing(cpp_content: str) -> str:
|
||||||
|
def replace_json(match):
|
||||||
|
raw_json = match.group(1)
|
||||||
|
raw_json = re.sub(r'":\n\s*(\[|\{)', r'": \1', raw_json)
|
||||||
|
return f'R"JSON({raw_json})JSON"'
|
||||||
|
return re.sub(PATTERN, replace_json, cpp_content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
|
def fix_indentation(cpp_content: str) -> str:
|
||||||
|
lines = cpp_content.splitlines()
|
||||||
|
|
||||||
|
def find_indentation(line: str) -> int:
|
||||||
|
return len(line) - len(line.lstrip())
|
||||||
|
|
||||||
|
for (line_num, (line, next_line)) in enumerate(zip(lines[:-1], lines[1:])):
|
||||||
|
if "JSON(" in line and ")JSON" not in line:
|
||||||
|
indent = find_indentation(line)
|
||||||
|
next_indent = find_indentation(next_line)
|
||||||
|
|
||||||
|
by_how_much = next_indent - (indent + 4)
|
||||||
|
if by_how_much != 0:
|
||||||
|
print(
|
||||||
|
f"Indentation error at line: {line_num + 2}: expected {indent + 4} spaces, found {next_indent} spaces"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in range(line_num + 1, len(lines)):
|
||||||
|
if ")JSON" in lines[i]:
|
||||||
|
lines[i] = " " * indent + lines[i].lstrip()
|
||||||
|
break
|
||||||
|
lines[i] = lines[i][by_how_much:] if by_how_much > 0 else " " * (-by_how_much) + lines[i]
|
||||||
|
|
||||||
|
return "\n".join(lines) + "\n"
|
||||||
|
|
||||||
|
|
||||||
def process_file(file_path: Path, dry_run: bool) -> bool:
|
def process_file(file_path: Path, dry_run: bool) -> bool:
|
||||||
content = file_path.read_text(encoding="utf-8")
|
content = file_path.read_text(encoding="utf-8")
|
||||||
new_content = fix_json_style(content)
|
|
||||||
|
new_content = content
|
||||||
|
new_content = use_uppercase(new_content)
|
||||||
|
new_content = fix_json_style(new_content)
|
||||||
|
new_content = fix_colon_spacing(new_content)
|
||||||
|
new_content = fix_indentation(new_content)
|
||||||
|
|
||||||
if new_content != content:
|
if new_content != content:
|
||||||
print(f"Processing file: {file_path}")
|
print(f"Processing file: {file_path}")
|
||||||
|
|||||||
@@ -23,6 +23,7 @@
|
|||||||
#include "util/build/Build.hpp"
|
#include "util/build/Build.hpp"
|
||||||
#include "util/config/ConfigDescription.hpp"
|
#include "util/config/ConfigDescription.hpp"
|
||||||
|
|
||||||
|
#include <boost/program_options/errors.hpp>
|
||||||
#include <boost/program_options/options_description.hpp>
|
#include <boost/program_options/options_description.hpp>
|
||||||
#include <boost/program_options/parsers.hpp>
|
#include <boost/program_options/parsers.hpp>
|
||||||
#include <boost/program_options/positional_options.hpp>
|
#include <boost/program_options/positional_options.hpp>
|
||||||
@@ -56,12 +57,22 @@ CliArgs::parse(int argc, char const* argv[])
|
|||||||
po::positional_options_description positional;
|
po::positional_options_description positional;
|
||||||
positional.add("conf", 1);
|
positional.add("conf", 1);
|
||||||
|
|
||||||
|
auto const printHelp = [&description]() {
|
||||||
|
std::cout << "Clio server " << util::build::getClioFullVersionString() << "\n\n" << description;
|
||||||
|
};
|
||||||
|
|
||||||
po::variables_map parsed;
|
po::variables_map parsed;
|
||||||
po::store(po::command_line_parser(argc, argv).options(description).positional(positional).run(), parsed);
|
try {
|
||||||
po::notify(parsed);
|
po::store(po::command_line_parser(argc, argv).options(description).positional(positional).run(), parsed);
|
||||||
|
po::notify(parsed);
|
||||||
|
} catch (po::error const& e) {
|
||||||
|
std::cerr << "Error: " << e.what() << std::endl << std::endl;
|
||||||
|
printHelp();
|
||||||
|
return Action{Action::Exit{EXIT_FAILURE}};
|
||||||
|
}
|
||||||
|
|
||||||
if (parsed.contains("help")) {
|
if (parsed.contains("help")) {
|
||||||
std::cout << "Clio server " << util::build::getClioFullVersionString() << "\n\n" << description;
|
printHelp();
|
||||||
return Action{Action::Exit{EXIT_SUCCESS}};
|
return Action{Action::Exit{EXIT_SUCCESS}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -178,7 +178,9 @@ ClioApplication::run(bool const useNgWebServer)
|
|||||||
}
|
}
|
||||||
auto const adminVerifier = std::move(expectedAdminVerifier).value();
|
auto const adminVerifier = std::move(expectedAdminVerifier).value();
|
||||||
|
|
||||||
auto httpServer = web::ng::makeServer(config_, OnConnectCheck{dosGuard}, DisconnectHook{dosGuard}, ioc);
|
auto httpServer = web::ng::makeServer(
|
||||||
|
config_, OnConnectCheck{dosGuard}, IpChangeHook{dosGuard}, DisconnectHook{dosGuard}, ioc
|
||||||
|
);
|
||||||
|
|
||||||
if (not httpServer.has_value()) {
|
if (not httpServer.has_value()) {
|
||||||
LOG(util::LogService::error()) << "Error creating web server: " << httpServer.error();
|
LOG(util::LogService::error()) << "Error creating web server: " << httpServer.error();
|
||||||
|
|||||||
@@ -19,6 +19,8 @@
|
|||||||
|
|
||||||
#include "app/Stopper.hpp"
|
#include "app/Stopper.hpp"
|
||||||
|
|
||||||
|
#include "util/Spawn.hpp"
|
||||||
|
|
||||||
#include <boost/asio/spawn.hpp>
|
#include <boost/asio/spawn.hpp>
|
||||||
|
|
||||||
#include <functional>
|
#include <functional>
|
||||||
@@ -36,7 +38,7 @@ Stopper::~Stopper()
|
|||||||
void
|
void
|
||||||
Stopper::setOnStop(std::function<void(boost::asio::yield_context)> cb)
|
Stopper::setOnStop(std::function<void(boost::asio::yield_context)> cb)
|
||||||
{
|
{
|
||||||
boost::asio::spawn(ctx_, std::move(cb));
|
util::spawn(ctx_, std::move(cb));
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|||||||
@@ -108,6 +108,8 @@ public:
|
|||||||
|
|
||||||
ioc.stop();
|
ioc.stop();
|
||||||
LOG(util::LogService::info()) << "io_context stopped";
|
LOG(util::LogService::info()) << "io_context stopped";
|
||||||
|
|
||||||
|
util::LogService::shutdown();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ parseConfig(std::string_view configPath)
|
|||||||
std::cerr << "Error parsing json from config: " << configPath << "\n" << json.error().error << std::endl;
|
std::cerr << "Error parsing json from config: " << configPath << "\n" << json.error().error << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
auto const errors = gClioConfig.parse(json.value());
|
auto const errors = getClioConfig().parse(json.value());
|
||||||
if (errors.has_value()) {
|
if (errors.has_value()) {
|
||||||
for (auto const& err : errors.value()) {
|
for (auto const& err : errors.value()) {
|
||||||
std::cerr << "Issues found in provided config '" << configPath << "':\n";
|
std::cerr << "Issues found in provided config '" << configPath << "':\n";
|
||||||
|
|||||||
@@ -33,6 +33,7 @@
|
|||||||
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
#include <string>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
namespace app {
|
namespace app {
|
||||||
@@ -54,6 +55,17 @@ OnConnectCheck::operator()(web::ng::Connection const& connection)
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IpChangeHook::IpChangeHook(web::dosguard::DOSGuardInterface& dosguard) : dosguard_(dosguard)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
IpChangeHook::operator()(std::string const& oldIp, std::string const& newIp)
|
||||||
|
{
|
||||||
|
dosguard_.get().decrement(oldIp);
|
||||||
|
dosguard_.get().increment(newIp);
|
||||||
|
}
|
||||||
|
|
||||||
DisconnectHook::DisconnectHook(web::dosguard::DOSGuardInterface& dosguard) : dosguard_{dosguard}
|
DisconnectHook::DisconnectHook(web::dosguard::DOSGuardInterface& dosguard) : dosguard_{dosguard}
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -36,6 +36,7 @@
|
|||||||
#include <exception>
|
#include <exception>
|
||||||
#include <functional>
|
#include <functional>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
namespace app {
|
namespace app {
|
||||||
@@ -64,6 +65,31 @@ public:
|
|||||||
operator()(web::ng::Connection const& connection);
|
operator()(web::ng::Connection const& connection);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief A function object that is called when the IP of a connection changes (usually if proxy detected).
|
||||||
|
* This is used to update the DOS guard.
|
||||||
|
*/
|
||||||
|
class IpChangeHook {
|
||||||
|
std::reference_wrapper<web::dosguard::DOSGuardInterface> dosguard_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* @brief Construct a new IpChangeHook object.
|
||||||
|
*
|
||||||
|
* @param dosguard The DOS guard to use.
|
||||||
|
*/
|
||||||
|
IpChangeHook(web::dosguard::DOSGuardInterface& dosguard);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief The call of the function object.
|
||||||
|
*
|
||||||
|
* @param oldIp The old IP of the connection.
|
||||||
|
* @param newIp The new IP of the connection.
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
operator()(std::string const& oldIp, std::string const& newIp);
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief A function object to be called when a connection is disconnected.
|
* @brief A function object to be called when a connection is disconnected.
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -21,10 +21,16 @@
|
|||||||
|
|
||||||
#include "cluster/ClioNode.hpp"
|
#include "cluster/ClioNode.hpp"
|
||||||
#include "data/BackendInterface.hpp"
|
#include "data/BackendInterface.hpp"
|
||||||
|
#include "util/Assert.hpp"
|
||||||
|
#include "util/Spawn.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "util/log/Logger.hpp"
|
||||||
|
|
||||||
|
#include <boost/asio/bind_cancellation_slot.hpp>
|
||||||
|
#include <boost/asio/cancellation_type.hpp>
|
||||||
|
#include <boost/asio/error.hpp>
|
||||||
#include <boost/asio/spawn.hpp>
|
#include <boost/asio/spawn.hpp>
|
||||||
#include <boost/asio/steady_timer.hpp>
|
#include <boost/asio/steady_timer.hpp>
|
||||||
|
#include <boost/asio/use_future.hpp>
|
||||||
#include <boost/json/parse.hpp>
|
#include <boost/json/parse.hpp>
|
||||||
#include <boost/json/serialize.hpp>
|
#include <boost/json/serialize.hpp>
|
||||||
#include <boost/json/value.hpp>
|
#include <boost/json/value.hpp>
|
||||||
@@ -35,11 +41,16 @@
|
|||||||
|
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
|
#include <latch>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
constexpr auto kTOTAL_WORKERS = 2uz; // 1 reading and 1 writing worker (coroutines)
|
||||||
|
} // namespace
|
||||||
|
|
||||||
namespace cluster {
|
namespace cluster {
|
||||||
|
|
||||||
ClusterCommunicationService::ClusterCommunicationService(
|
ClusterCommunicationService::ClusterCommunicationService(
|
||||||
@@ -50,6 +61,7 @@ ClusterCommunicationService::ClusterCommunicationService(
|
|||||||
: backend_(std::move(backend))
|
: backend_(std::move(backend))
|
||||||
, readInterval_(readInterval)
|
, readInterval_(readInterval)
|
||||||
, writeInterval_(writeInterval)
|
, writeInterval_(writeInterval)
|
||||||
|
, finishedCountdown_(kTOTAL_WORKERS)
|
||||||
, selfData_{ClioNode{
|
, selfData_{ClioNode{
|
||||||
.uuid = std::make_shared<boost::uuids::uuid>(boost::uuids::random_generator{}()),
|
.uuid = std::make_shared<boost::uuids::uuid>(boost::uuids::random_generator{}()),
|
||||||
.updateTime = std::chrono::system_clock::time_point{}
|
.updateTime = std::chrono::system_clock::time_point{}
|
||||||
@@ -62,22 +74,42 @@ ClusterCommunicationService::ClusterCommunicationService(
|
|||||||
void
|
void
|
||||||
ClusterCommunicationService::run()
|
ClusterCommunicationService::run()
|
||||||
{
|
{
|
||||||
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
|
ASSERT(not running_ and not stopped_, "Can only be ran once");
|
||||||
|
running_ = true;
|
||||||
|
|
||||||
|
util::spawn(strand_, [this](boost::asio::yield_context yield) {
|
||||||
boost::asio::steady_timer timer(yield.get_executor());
|
boost::asio::steady_timer timer(yield.get_executor());
|
||||||
while (true) {
|
boost::system::error_code ec;
|
||||||
|
|
||||||
|
while (running_) {
|
||||||
timer.expires_after(readInterval_);
|
timer.expires_after(readInterval_);
|
||||||
timer.async_wait(yield);
|
auto token = cancelSignal_.slot();
|
||||||
|
timer.async_wait(boost::asio::bind_cancellation_slot(token, yield[ec]));
|
||||||
|
|
||||||
|
if (ec == boost::asio::error::operation_aborted or not running_)
|
||||||
|
break;
|
||||||
|
|
||||||
doRead(yield);
|
doRead(yield);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
finishedCountdown_.count_down(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
|
util::spawn(strand_, [this](boost::asio::yield_context yield) {
|
||||||
boost::asio::steady_timer timer(yield.get_executor());
|
boost::asio::steady_timer timer(yield.get_executor());
|
||||||
while (true) {
|
boost::system::error_code ec;
|
||||||
|
|
||||||
|
while (running_) {
|
||||||
doWrite();
|
doWrite();
|
||||||
timer.expires_after(writeInterval_);
|
timer.expires_after(writeInterval_);
|
||||||
timer.async_wait(yield);
|
auto token = cancelSignal_.slot();
|
||||||
|
timer.async_wait(boost::asio::bind_cancellation_slot(token, yield[ec]));
|
||||||
|
|
||||||
|
if (ec == boost::asio::error::operation_aborted or not running_)
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
finishedCountdown_.count_down(1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -92,9 +124,19 @@ ClusterCommunicationService::stop()
|
|||||||
if (stopped_)
|
if (stopped_)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ctx_.stop();
|
|
||||||
ctx_.join();
|
|
||||||
stopped_ = true;
|
stopped_ = true;
|
||||||
|
|
||||||
|
// for ASAN to see through concurrency correctly we need to exit all coroutines before joining the ctx
|
||||||
|
running_ = false;
|
||||||
|
|
||||||
|
// cancelSignal_ is not thread safe so we execute emit on the same strand
|
||||||
|
boost::asio::spawn(
|
||||||
|
strand_, [this](auto&&) { cancelSignal_.emit(boost::asio::cancellation_type::all); }, boost::asio::use_future
|
||||||
|
)
|
||||||
|
.wait();
|
||||||
|
finishedCountdown_.wait();
|
||||||
|
|
||||||
|
ctx_.join();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<boost::uuids::uuid>
|
std::shared_ptr<boost::uuids::uuid>
|
||||||
@@ -108,7 +150,7 @@ ClioNode
|
|||||||
ClusterCommunicationService::selfData() const
|
ClusterCommunicationService::selfData() const
|
||||||
{
|
{
|
||||||
ClioNode result{};
|
ClioNode result{};
|
||||||
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) { result = selfData_; });
|
util::spawn(strand_, [this, &result](boost::asio::yield_context) { result = selfData_; });
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -119,7 +161,7 @@ ClusterCommunicationService::clusterData() const
|
|||||||
return std::unexpected{"Service is not healthy"};
|
return std::unexpected{"Service is not healthy"};
|
||||||
}
|
}
|
||||||
std::vector<ClioNode> result;
|
std::vector<ClioNode> result;
|
||||||
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) {
|
util::spawn(strand_, [this, &result](boost::asio::yield_context) {
|
||||||
result = otherNodesData_;
|
result = otherNodesData_;
|
||||||
result.push_back(selfData_);
|
result.push_back(selfData_);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -27,12 +27,15 @@
|
|||||||
#include "util/prometheus/Gauge.hpp"
|
#include "util/prometheus/Gauge.hpp"
|
||||||
#include "util/prometheus/Prometheus.hpp"
|
#include "util/prometheus/Prometheus.hpp"
|
||||||
|
|
||||||
|
#include <boost/asio/cancellation_signal.hpp>
|
||||||
#include <boost/asio/spawn.hpp>
|
#include <boost/asio/spawn.hpp>
|
||||||
#include <boost/asio/strand.hpp>
|
#include <boost/asio/strand.hpp>
|
||||||
#include <boost/asio/thread_pool.hpp>
|
#include <boost/asio/thread_pool.hpp>
|
||||||
#include <boost/uuid/uuid.hpp>
|
#include <boost/uuid/uuid.hpp>
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
#include <latch>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
@@ -65,11 +68,14 @@ class ClusterCommunicationService : public ClusterCommunicationServiceInterface
|
|||||||
std::chrono::steady_clock::duration readInterval_;
|
std::chrono::steady_clock::duration readInterval_;
|
||||||
std::chrono::steady_clock::duration writeInterval_;
|
std::chrono::steady_clock::duration writeInterval_;
|
||||||
|
|
||||||
|
boost::asio::cancellation_signal cancelSignal_;
|
||||||
|
std::latch finishedCountdown_;
|
||||||
|
std::atomic_bool running_ = false;
|
||||||
|
bool stopped_ = false;
|
||||||
|
|
||||||
ClioNode selfData_;
|
ClioNode selfData_;
|
||||||
std::vector<ClioNode> otherNodesData_;
|
std::vector<ClioNode> otherNodesData_;
|
||||||
|
|
||||||
bool stopped_ = false;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
static constexpr std::chrono::milliseconds kDEFAULT_READ_INTERVAL{2100};
|
static constexpr std::chrono::milliseconds kDEFAULT_READ_INTERVAL{2100};
|
||||||
static constexpr std::chrono::milliseconds kDEFAULT_WRITE_INTERVAL{1200};
|
static constexpr std::chrono::milliseconds kDEFAULT_WRITE_INTERVAL{1200};
|
||||||
|
|||||||
@@ -68,7 +68,6 @@ struct Amendments {
|
|||||||
|
|
||||||
/** @cond */
|
/** @cond */
|
||||||
// NOLINTBEGIN(readability-identifier-naming)
|
// NOLINTBEGIN(readability-identifier-naming)
|
||||||
REGISTER(OwnerPaysFee);
|
|
||||||
REGISTER(Flow);
|
REGISTER(Flow);
|
||||||
REGISTER(FlowCross);
|
REGISTER(FlowCross);
|
||||||
REGISTER(fix1513);
|
REGISTER(fix1513);
|
||||||
@@ -145,6 +144,9 @@ struct Amendments {
|
|||||||
REGISTER(TokenEscrow);
|
REGISTER(TokenEscrow);
|
||||||
REGISTER(fixAMMv1_3);
|
REGISTER(fixAMMv1_3);
|
||||||
REGISTER(fixEnforceNFTokenTrustlineV2);
|
REGISTER(fixEnforceNFTokenTrustlineV2);
|
||||||
|
REGISTER(fixAMMClawbackRounding);
|
||||||
|
REGISTER(fixMPTDeliveredAmount);
|
||||||
|
REGISTER(fixPriceOracleOrder);
|
||||||
|
|
||||||
// Obsolete but supported by libxrpl
|
// Obsolete but supported by libxrpl
|
||||||
REGISTER(CryptoConditionsSuite);
|
REGISTER(CryptoConditionsSuite);
|
||||||
@@ -153,6 +155,7 @@ struct Amendments {
|
|||||||
REGISTER(fixNFTokenNegOffer);
|
REGISTER(fixNFTokenNegOffer);
|
||||||
|
|
||||||
// Retired amendments
|
// Retired amendments
|
||||||
|
REGISTER(OwnerPaysFee); // Removed in xrpl 2.6.0 (https://github.com/XRPLF/rippled/pull/5435)
|
||||||
REGISTER(MultiSign);
|
REGISTER(MultiSign);
|
||||||
REGISTER(TrustSetAuth);
|
REGISTER(TrustSetAuth);
|
||||||
REGISTER(FeeEscalation);
|
REGISTER(FeeEscalation);
|
||||||
|
|||||||
@@ -43,11 +43,6 @@
|
|||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
// local to compilation unit loggers
|
|
||||||
namespace {
|
|
||||||
util::Logger gLog{"Backend"};
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief This namespace implements the data access layer and related components.
|
* @brief This namespace implements the data access layer and related components.
|
||||||
*
|
*
|
||||||
@@ -58,10 +53,10 @@ namespace data {
|
|||||||
bool
|
bool
|
||||||
BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
|
BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
|
||||||
{
|
{
|
||||||
LOG(gLog.debug()) << "Want finish writes for " << ledgerSequence;
|
LOG(log_.debug()) << "Want finish writes for " << ledgerSequence;
|
||||||
auto commitRes = doFinishWrites();
|
auto commitRes = doFinishWrites();
|
||||||
if (commitRes) {
|
if (commitRes) {
|
||||||
LOG(gLog.debug()) << "Successfully committed. Updating range now to " << ledgerSequence;
|
LOG(log_.debug()) << "Successfully committed. Updating range now to " << ledgerSequence;
|
||||||
updateRange(ledgerSequence);
|
updateRange(ledgerSequence);
|
||||||
}
|
}
|
||||||
return commitRes;
|
return commitRes;
|
||||||
@@ -89,15 +84,15 @@ BackendInterface::fetchLedgerObject(
|
|||||||
{
|
{
|
||||||
auto obj = cache_.get().get(key, sequence);
|
auto obj = cache_.get().get(key, sequence);
|
||||||
if (obj) {
|
if (obj) {
|
||||||
LOG(gLog.trace()) << "Cache hit - " << ripple::strHex(key);
|
LOG(log_.trace()) << "Cache hit - " << ripple::strHex(key);
|
||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto dbObj = doFetchLedgerObject(key, sequence, yield);
|
auto dbObj = doFetchLedgerObject(key, sequence, yield);
|
||||||
if (!dbObj) {
|
if (!dbObj) {
|
||||||
LOG(gLog.trace()) << "Missed cache and missed in db";
|
LOG(log_.trace()) << "Missed cache and missed in db";
|
||||||
} else {
|
} else {
|
||||||
LOG(gLog.trace()) << "Missed cache but found in db";
|
LOG(log_.trace()) << "Missed cache but found in db";
|
||||||
}
|
}
|
||||||
return dbObj;
|
return dbObj;
|
||||||
}
|
}
|
||||||
@@ -111,7 +106,7 @@ BackendInterface::fetchLedgerObjectSeq(
|
|||||||
{
|
{
|
||||||
auto seq = doFetchLedgerObjectSeq(key, sequence, yield);
|
auto seq = doFetchLedgerObjectSeq(key, sequence, yield);
|
||||||
if (!seq)
|
if (!seq)
|
||||||
LOG(gLog.trace()) << "Missed in db";
|
LOG(log_.trace()) << "Missed in db";
|
||||||
return seq;
|
return seq;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -133,7 +128,7 @@ BackendInterface::fetchLedgerObjects(
|
|||||||
misses.push_back(keys[i]);
|
misses.push_back(keys[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
LOG(gLog.trace()) << "Cache hits = " << keys.size() - misses.size() << " - cache misses = " << misses.size();
|
LOG(log_.trace()) << "Cache hits = " << keys.size() - misses.size() << " - cache misses = " << misses.size();
|
||||||
|
|
||||||
if (!misses.empty()) {
|
if (!misses.empty()) {
|
||||||
auto objs = doFetchLedgerObjects(misses, sequence, yield);
|
auto objs = doFetchLedgerObjects(misses, sequence, yield);
|
||||||
@@ -158,9 +153,9 @@ BackendInterface::fetchSuccessorKey(
|
|||||||
{
|
{
|
||||||
auto succ = cache_.get().getSuccessor(key, ledgerSequence);
|
auto succ = cache_.get().getSuccessor(key, ledgerSequence);
|
||||||
if (succ) {
|
if (succ) {
|
||||||
LOG(gLog.trace()) << "Cache hit - " << ripple::strHex(key);
|
LOG(log_.trace()) << "Cache hit - " << ripple::strHex(key);
|
||||||
} else {
|
} else {
|
||||||
LOG(gLog.trace()) << "Cache miss - " << ripple::strHex(key);
|
LOG(log_.trace()) << "Cache miss - " << ripple::strHex(key);
|
||||||
}
|
}
|
||||||
return succ ? succ->key : doFetchSuccessorKey(key, ledgerSequence, yield);
|
return succ ? succ->key : doFetchSuccessorKey(key, ledgerSequence, yield);
|
||||||
}
|
}
|
||||||
@@ -210,7 +205,7 @@ BackendInterface::fetchBookOffers(
|
|||||||
numSucc++;
|
numSucc++;
|
||||||
succMillis += getMillis(mid2 - mid1);
|
succMillis += getMillis(mid2 - mid1);
|
||||||
if (!offerDir || offerDir->key >= bookEnd) {
|
if (!offerDir || offerDir->key >= bookEnd) {
|
||||||
LOG(gLog.trace()) << "offerDir.has_value() " << offerDir.has_value() << " breaking";
|
LOG(log_.trace()) << "offerDir.has_value() " << offerDir.has_value() << " breaking";
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
uTipIndex = offerDir->key;
|
uTipIndex = offerDir->key;
|
||||||
@@ -223,7 +218,7 @@ BackendInterface::fetchBookOffers(
|
|||||||
keys.insert(keys.end(), indexes.begin(), indexes.end());
|
keys.insert(keys.end(), indexes.begin(), indexes.end());
|
||||||
auto next = sle.getFieldU64(ripple::sfIndexNext);
|
auto next = sle.getFieldU64(ripple::sfIndexNext);
|
||||||
if (next == 0u) {
|
if (next == 0u) {
|
||||||
LOG(gLog.trace()) << "Next is empty. breaking";
|
LOG(log_.trace()) << "Next is empty. breaking";
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
auto nextKey = ripple::keylet::page(uTipIndex, next);
|
auto nextKey = ripple::keylet::page(uTipIndex, next);
|
||||||
@@ -238,13 +233,13 @@ BackendInterface::fetchBookOffers(
|
|||||||
auto mid = std::chrono::system_clock::now();
|
auto mid = std::chrono::system_clock::now();
|
||||||
auto objs = fetchLedgerObjects(keys, ledgerSequence, yield);
|
auto objs = fetchLedgerObjects(keys, ledgerSequence, yield);
|
||||||
for (size_t i = 0; i < keys.size() && i < limit; ++i) {
|
for (size_t i = 0; i < keys.size() && i < limit; ++i) {
|
||||||
LOG(gLog.trace()) << "Key = " << ripple::strHex(keys[i]) << " blob = " << ripple::strHex(objs[i])
|
LOG(log_.trace()) << "Key = " << ripple::strHex(keys[i]) << " blob = " << ripple::strHex(objs[i])
|
||||||
<< " ledgerSequence = " << ledgerSequence;
|
<< " ledgerSequence = " << ledgerSequence;
|
||||||
ASSERT(!objs[i].empty(), "Ledger object can't be empty");
|
ASSERT(!objs[i].empty(), "Ledger object can't be empty");
|
||||||
page.offers.push_back({keys[i], objs[i]});
|
page.offers.push_back({keys[i], objs[i]});
|
||||||
}
|
}
|
||||||
auto end = std::chrono::system_clock::now();
|
auto end = std::chrono::system_clock::now();
|
||||||
LOG(gLog.debug()) << "Fetching " << std::to_string(keys.size()) << " offers took "
|
LOG(log_.debug()) << "Fetching " << std::to_string(keys.size()) << " offers took "
|
||||||
<< std::to_string(getMillis(mid - begin)) << " milliseconds. Fetching next dir took "
|
<< std::to_string(getMillis(mid - begin)) << " milliseconds. Fetching next dir took "
|
||||||
<< std::to_string(succMillis) << " milliseconds. Fetched next dir " << std::to_string(numSucc)
|
<< std::to_string(succMillis) << " milliseconds. Fetched next dir " << std::to_string(numSucc)
|
||||||
<< " times"
|
<< " times"
|
||||||
@@ -275,14 +270,17 @@ BackendInterface::updateRange(uint32_t newMax)
|
|||||||
{
|
{
|
||||||
std::scoped_lock const lck(rngMtx_);
|
std::scoped_lock const lck(rngMtx_);
|
||||||
|
|
||||||
ASSERT(
|
if (range_.has_value() && newMax < range_->maxSequence) {
|
||||||
!range_ || newMax >= range_->maxSequence,
|
ASSERT(
|
||||||
"Range shouldn't exist yet or newMax should be greater. newMax = {}, range->maxSequence = {}",
|
false,
|
||||||
newMax,
|
"Range shouldn't exist yet or newMax should be at least range->maxSequence. newMax = {}, "
|
||||||
range_->maxSequence
|
"range->maxSequence = {}",
|
||||||
);
|
newMax,
|
||||||
|
range_->maxSequence
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (!range_) {
|
if (!range_.has_value()) {
|
||||||
range_ = {.minSequence = newMax, .maxSequence = newMax};
|
range_ = {.minSequence = newMax, .maxSequence = newMax};
|
||||||
} else {
|
} else {
|
||||||
range_->maxSequence = newMax;
|
range_->maxSequence = newMax;
|
||||||
@@ -338,13 +336,13 @@ BackendInterface::fetchLedgerPage(
|
|||||||
if (!objects[i].empty()) {
|
if (!objects[i].empty()) {
|
||||||
page.objects.push_back({keys[i], std::move(objects[i])});
|
page.objects.push_back({keys[i], std::move(objects[i])});
|
||||||
} else if (!outOfOrder) {
|
} else if (!outOfOrder) {
|
||||||
LOG(gLog.error()) << "Deleted or non-existent object in successor table. key = " << ripple::strHex(keys[i])
|
LOG(log_.error()) << "Deleted or non-existent object in successor table. key = " << ripple::strHex(keys[i])
|
||||||
<< " - seq = " << ledgerSequence;
|
<< " - seq = " << ledgerSequence;
|
||||||
std::stringstream msg;
|
std::stringstream msg;
|
||||||
for (size_t j = 0; j < objects.size(); ++j) {
|
for (size_t j = 0; j < objects.size(); ++j) {
|
||||||
msg << " - " << ripple::strHex(keys[j]);
|
msg << " - " << ripple::strHex(keys[j]);
|
||||||
}
|
}
|
||||||
LOG(gLog.error()) << msg.str();
|
LOG(log_.error()) << msg.str();
|
||||||
|
|
||||||
if (corruptionDetector_.has_value())
|
if (corruptionDetector_.has_value())
|
||||||
corruptionDetector_->onCorruptionDetected();
|
corruptionDetector_->onCorruptionDetected();
|
||||||
@@ -365,7 +363,7 @@ BackendInterface::fetchFees(std::uint32_t const seq, boost::asio::yield_context
|
|||||||
auto bytes = fetchLedgerObject(key, seq, yield);
|
auto bytes = fetchLedgerObject(key, seq, yield);
|
||||||
|
|
||||||
if (!bytes) {
|
if (!bytes) {
|
||||||
LOG(gLog.error()) << "Could not find fees";
|
LOG(log_.error()) << "Could not find fees";
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -23,6 +23,7 @@
|
|||||||
#include "data/LedgerCacheInterface.hpp"
|
#include "data/LedgerCacheInterface.hpp"
|
||||||
#include "data/Types.hpp"
|
#include "data/Types.hpp"
|
||||||
#include "etl/CorruptionDetector.hpp"
|
#include "etl/CorruptionDetector.hpp"
|
||||||
|
#include "util/Spawn.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "util/log/Logger.hpp"
|
||||||
|
|
||||||
#include <boost/asio/executor_work_guard.hpp>
|
#include <boost/asio/executor_work_guard.hpp>
|
||||||
@@ -108,14 +109,12 @@ synchronous(FnType&& func)
|
|||||||
using R = typename boost::result_of<FnType(boost::asio::yield_context)>::type;
|
using R = typename boost::result_of<FnType(boost::asio::yield_context)>::type;
|
||||||
if constexpr (!std::is_same_v<R, void>) {
|
if constexpr (!std::is_same_v<R, void>) {
|
||||||
R res;
|
R res;
|
||||||
boost::asio::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func, &res](auto yield) {
|
util::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func, &res](auto yield) { res = func(yield); });
|
||||||
res = func(yield);
|
|
||||||
});
|
|
||||||
|
|
||||||
ctx.run();
|
ctx.run();
|
||||||
return res;
|
return res;
|
||||||
} else {
|
} else {
|
||||||
boost::asio::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func](auto yield) { func(yield); });
|
util::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func](auto yield) { func(yield); });
|
||||||
ctx.run();
|
ctx.run();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -139,6 +138,7 @@ synchronousAndRetryOnTimeout(FnType&& func)
|
|||||||
*/
|
*/
|
||||||
class BackendInterface {
|
class BackendInterface {
|
||||||
protected:
|
protected:
|
||||||
|
util::Logger log_{"Backend"};
|
||||||
mutable std::shared_mutex rngMtx_;
|
mutable std::shared_mutex rngMtx_;
|
||||||
std::optional<LedgerRange> range_;
|
std::optional<LedgerRange> range_;
|
||||||
std::reference_wrapper<LedgerCacheInterface> cache_;
|
std::reference_wrapper<LedgerCacheInterface> cache_;
|
||||||
|
|||||||
@@ -225,8 +225,11 @@ public:
|
|||||||
{
|
{
|
||||||
waitForWritesToFinish();
|
waitForWritesToFinish();
|
||||||
|
|
||||||
if (!range_) {
|
// !range_.has_value() means the table 'ledger_range' is not populated; This would be the first write to the
|
||||||
executor_.writeSync(schema_->updateLedgerRange, ledgerSequence_, false, ledgerSequence_);
|
// table In this case, insert both min_sequence/max_sequence range into the table
|
||||||
|
if (!range_.has_value()) {
|
||||||
|
executor_.writeSync(schema_->insertLedgerRange, false, ledgerSequence_);
|
||||||
|
executor_.writeSync(schema_->insertLedgerRange, true, ledgerSequence_);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (not executeSyncUpdate(schema_->updateLedgerRange.bind(ledgerSequence_, true, ledgerSequence_ - 1))) {
|
if (not executeSyncUpdate(schema_->updateLedgerRange.bind(ledgerSequence_, true, ledgerSequence_ - 1))) {
|
||||||
@@ -513,80 +516,14 @@ public:
|
|||||||
boost::asio::yield_context yield
|
boost::asio::yield_context yield
|
||||||
) const override
|
) const override
|
||||||
{
|
{
|
||||||
NFTsAndCursor ret;
|
|
||||||
|
|
||||||
Statement const idQueryStatement = [&taxon, &issuer, &cursorIn, &limit, this]() {
|
|
||||||
if (taxon.has_value()) {
|
|
||||||
auto r = schema_->selectNFTIDsByIssuerTaxon.bind(issuer);
|
|
||||||
r.bindAt(1, *taxon);
|
|
||||||
r.bindAt(2, cursorIn.value_or(ripple::uint256(0)));
|
|
||||||
r.bindAt(3, Limit{limit});
|
|
||||||
return r;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto r = schema_->selectNFTIDsByIssuer.bind(issuer);
|
|
||||||
r.bindAt(
|
|
||||||
1,
|
|
||||||
std::make_tuple(
|
|
||||||
cursorIn.has_value() ? ripple::nft::toUInt32(ripple::nft::getTaxon(*cursorIn)) : 0,
|
|
||||||
cursorIn.value_or(ripple::uint256(0))
|
|
||||||
)
|
|
||||||
);
|
|
||||||
r.bindAt(2, Limit{limit});
|
|
||||||
return r;
|
|
||||||
}();
|
|
||||||
|
|
||||||
// Query for all the NFTs issued by the account, potentially filtered by the taxon
|
|
||||||
auto const res = executor_.read(yield, idQueryStatement);
|
|
||||||
|
|
||||||
auto const& idQueryResults = res.value();
|
|
||||||
if (not idQueryResults.hasRows()) {
|
|
||||||
LOG(log_.debug()) << "No rows returned";
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<ripple::uint256> nftIDs;
|
std::vector<ripple::uint256> nftIDs;
|
||||||
for (auto const [nftID] : extract<ripple::uint256>(idQueryResults))
|
if (taxon.has_value()) {
|
||||||
nftIDs.push_back(nftID);
|
nftIDs = fetchNFTIDsByTaxon(issuer, *taxon, limit, cursorIn, yield);
|
||||||
|
} else {
|
||||||
if (nftIDs.empty())
|
nftIDs = fetchNFTIDsWithoutTaxon(issuer, limit, cursorIn, yield);
|
||||||
return ret;
|
|
||||||
|
|
||||||
if (nftIDs.size() == limit)
|
|
||||||
ret.cursor = nftIDs.back();
|
|
||||||
|
|
||||||
std::vector<Statement> selectNFTStatements;
|
|
||||||
selectNFTStatements.reserve(nftIDs.size());
|
|
||||||
|
|
||||||
std::transform(
|
|
||||||
std::cbegin(nftIDs), std::cend(nftIDs), std::back_inserter(selectNFTStatements), [&](auto const& nftID) {
|
|
||||||
return schema_->selectNFT.bind(nftID, ledgerSequence);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
auto const nftInfos = executor_.readEach(yield, selectNFTStatements);
|
|
||||||
|
|
||||||
std::vector<Statement> selectNFTURIStatements;
|
|
||||||
selectNFTURIStatements.reserve(nftIDs.size());
|
|
||||||
|
|
||||||
std::transform(
|
|
||||||
std::cbegin(nftIDs), std::cend(nftIDs), std::back_inserter(selectNFTURIStatements), [&](auto const& nftID) {
|
|
||||||
return schema_->selectNFTURI.bind(nftID, ledgerSequence);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
auto const nftUris = executor_.readEach(yield, selectNFTURIStatements);
|
|
||||||
|
|
||||||
for (auto i = 0u; i < nftIDs.size(); i++) {
|
|
||||||
if (auto const maybeRow = nftInfos[i].template get<uint32_t, ripple::AccountID, bool>(); maybeRow) {
|
|
||||||
auto [seq, owner, isBurned] = *maybeRow;
|
|
||||||
NFT nft(nftIDs[i], seq, owner, isBurned);
|
|
||||||
if (auto const maybeUri = nftUris[i].template get<ripple::Blob>(); maybeUri)
|
|
||||||
nft.uri = *maybeUri;
|
|
||||||
ret.nfts.push_back(nft);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return ret;
|
|
||||||
|
return populateNFTsAndCreateCursor(nftIDs, ledgerSequence, limit, yield);
|
||||||
}
|
}
|
||||||
|
|
||||||
MPTHoldersAndCursor
|
MPTHoldersAndCursor
|
||||||
@@ -803,8 +740,9 @@ public:
|
|||||||
std::optional<ripple::AccountID> lastItem;
|
std::optional<ripple::AccountID> lastItem;
|
||||||
|
|
||||||
while (liveAccounts.size() < number) {
|
while (liveAccounts.size() < number) {
|
||||||
Statement const statement = lastItem ? schema_->selectAccountFromToken.bind(*lastItem, Limit{pageSize})
|
Statement const statement = lastItem
|
||||||
: schema_->selectAccountFromBeginning.bind(Limit{pageSize});
|
? schema_->selectAccountFromTokenScylla->bind(*lastItem, Limit{pageSize})
|
||||||
|
: schema_->selectAccountFromBeginningScylla->bind(Limit{pageSize});
|
||||||
|
|
||||||
auto const res = executor_.read(yield, statement);
|
auto const res = executor_.read(yield, statement);
|
||||||
if (res) {
|
if (res) {
|
||||||
@@ -1116,6 +1054,139 @@ private:
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::vector<ripple::uint256>
|
||||||
|
fetchNFTIDsByTaxon(
|
||||||
|
ripple::AccountID const& issuer,
|
||||||
|
std::uint32_t const taxon,
|
||||||
|
std::uint32_t const limit,
|
||||||
|
std::optional<ripple::uint256> const& cursorIn,
|
||||||
|
boost::asio::yield_context yield
|
||||||
|
) const
|
||||||
|
{
|
||||||
|
std::vector<ripple::uint256> nftIDs;
|
||||||
|
Statement statement = schema_->selectNFTIDsByIssuerTaxon.bind(issuer);
|
||||||
|
statement.bindAt(1, taxon);
|
||||||
|
statement.bindAt(2, cursorIn.value_or(ripple::uint256(0)));
|
||||||
|
statement.bindAt(3, Limit{limit});
|
||||||
|
|
||||||
|
auto const res = executor_.read(yield, statement);
|
||||||
|
if (res && res.value().hasRows()) {
|
||||||
|
for (auto const [nftID] : extract<ripple::uint256>(res.value()))
|
||||||
|
nftIDs.push_back(nftID);
|
||||||
|
}
|
||||||
|
return nftIDs;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<ripple::uint256>
|
||||||
|
fetchNFTIDsWithoutTaxon(
|
||||||
|
ripple::AccountID const& issuer,
|
||||||
|
std::uint32_t const limit,
|
||||||
|
std::optional<ripple::uint256> const& cursorIn,
|
||||||
|
boost::asio::yield_context yield
|
||||||
|
) const
|
||||||
|
{
|
||||||
|
std::vector<ripple::uint256> nftIDs;
|
||||||
|
if (settingsProvider_.getSettings().provider == "aws_keyspace") {
|
||||||
|
// --- Amazon Keyspaces Workflow ---
|
||||||
|
auto const startTaxon = cursorIn.has_value() ? ripple::nft::toUInt32(ripple::nft::getTaxon(*cursorIn)) : 0;
|
||||||
|
auto const startTokenID = cursorIn.value_or(ripple::uint256(0));
|
||||||
|
|
||||||
|
Statement firstQuery = schema_->selectNFTIDsByIssuerTaxon.bind(issuer);
|
||||||
|
firstQuery.bindAt(1, startTaxon);
|
||||||
|
firstQuery.bindAt(2, startTokenID);
|
||||||
|
firstQuery.bindAt(3, Limit{limit});
|
||||||
|
|
||||||
|
auto const firstRes = executor_.read(yield, firstQuery);
|
||||||
|
if (firstRes) {
|
||||||
|
for (auto const [nftID] : extract<ripple::uint256>(firstRes.value()))
|
||||||
|
nftIDs.push_back(nftID);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nftIDs.size() < limit) {
|
||||||
|
auto const remainingLimit = limit - nftIDs.size();
|
||||||
|
Statement secondQuery = schema_->selectNFTsAfterTaxonKeyspaces->bind(issuer);
|
||||||
|
secondQuery.bindAt(1, startTaxon);
|
||||||
|
secondQuery.bindAt(2, Limit{remainingLimit});
|
||||||
|
|
||||||
|
auto const secondRes = executor_.read(yield, secondQuery);
|
||||||
|
if (secondRes) {
|
||||||
|
for (auto const [nftID] : extract<ripple::uint256>(secondRes.value()))
|
||||||
|
nftIDs.push_back(nftID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (settingsProvider_.getSettings().provider == "scylladb") {
|
||||||
|
auto r = schema_->selectNFTsByIssuerScylla->bind(issuer);
|
||||||
|
r.bindAt(
|
||||||
|
1,
|
||||||
|
std::make_tuple(
|
||||||
|
cursorIn.has_value() ? ripple::nft::toUInt32(ripple::nft::getTaxon(*cursorIn)) : 0,
|
||||||
|
cursorIn.value_or(ripple::uint256(0))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
r.bindAt(2, Limit{limit});
|
||||||
|
|
||||||
|
auto const res = executor_.read(yield, r);
|
||||||
|
if (res && res.value().hasRows()) {
|
||||||
|
for (auto const [nftID] : extract<ripple::uint256>(res.value()))
|
||||||
|
nftIDs.push_back(nftID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nftIDs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Takes a list of NFT IDs, fetches their full data, and assembles the final result with a cursor.
|
||||||
|
*/
|
||||||
|
NFTsAndCursor
|
||||||
|
populateNFTsAndCreateCursor(
|
||||||
|
std::vector<ripple::uint256> const& nftIDs,
|
||||||
|
std::uint32_t const ledgerSequence,
|
||||||
|
std::uint32_t const limit,
|
||||||
|
boost::asio::yield_context yield
|
||||||
|
) const
|
||||||
|
{
|
||||||
|
if (nftIDs.empty()) {
|
||||||
|
LOG(log_.debug()) << "No rows returned";
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
NFTsAndCursor ret;
|
||||||
|
if (nftIDs.size() == limit)
|
||||||
|
ret.cursor = nftIDs.back();
|
||||||
|
|
||||||
|
// Prepare and execute queries to fetch NFT info and URIs in parallel.
|
||||||
|
std::vector<Statement> selectNFTStatements;
|
||||||
|
selectNFTStatements.reserve(nftIDs.size());
|
||||||
|
std::transform(
|
||||||
|
std::cbegin(nftIDs), std::cend(nftIDs), std::back_inserter(selectNFTStatements), [&](auto const& nftID) {
|
||||||
|
return schema_->selectNFT.bind(nftID, ledgerSequence);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
std::vector<Statement> selectNFTURIStatements;
|
||||||
|
selectNFTURIStatements.reserve(nftIDs.size());
|
||||||
|
std::transform(
|
||||||
|
std::cbegin(nftIDs), std::cend(nftIDs), std::back_inserter(selectNFTURIStatements), [&](auto const& nftID) {
|
||||||
|
return schema_->selectNFTURI.bind(nftID, ledgerSequence);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
auto const nftInfos = executor_.readEach(yield, selectNFTStatements);
|
||||||
|
auto const nftUris = executor_.readEach(yield, selectNFTURIStatements);
|
||||||
|
|
||||||
|
// Combine the results into final NFT objects.
|
||||||
|
for (auto i = 0u; i < nftIDs.size(); ++i) {
|
||||||
|
if (auto const maybeRow = nftInfos[i].template get<uint32_t, ripple::AccountID, bool>(); maybeRow) {
|
||||||
|
auto [seq, owner, isBurned] = *maybeRow;
|
||||||
|
NFT nft(nftIDs[i], seq, owner, isBurned);
|
||||||
|
if (auto const maybeUri = nftUris[i].template get<ripple::Blob>(); maybeUri)
|
||||||
|
nft.uri = *maybeUri;
|
||||||
|
ret.nfts.push_back(nft);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
using CassandraBackend = BasicCassandraBackend<SettingsProvider, impl::DefaultExecutionStrategy<>>;
|
using CassandraBackend = BasicCassandraBackend<SettingsProvider, impl::DefaultExecutionStrategy<>>;
|
||||||
|
|||||||
@@ -28,6 +28,7 @@
|
|||||||
|
|
||||||
#include <functional>
|
#include <functional>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <optional>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <string_view>
|
#include <string_view>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
@@ -347,6 +348,86 @@ public:
|
|||||||
Statements(SettingsProviderType const& settingsProvider, Handle const& handle)
|
Statements(SettingsProviderType const& settingsProvider, Handle const& handle)
|
||||||
: settingsProvider_{settingsProvider}, handle_{std::cref(handle)}
|
: settingsProvider_{settingsProvider}, handle_{std::cref(handle)}
|
||||||
{
|
{
|
||||||
|
// initialize scylladb supported queries
|
||||||
|
if (settingsProvider_.get().getSettings().provider == "scylladb") {
|
||||||
|
selectAccountFromBeginningScylla = [this]() {
|
||||||
|
return handle_.get().prepare(
|
||||||
|
fmt::format(
|
||||||
|
R"(
|
||||||
|
SELECT account
|
||||||
|
FROM {}
|
||||||
|
WHERE token(account) > 0
|
||||||
|
PER PARTITION LIMIT 1
|
||||||
|
LIMIT ?
|
||||||
|
)",
|
||||||
|
qualifiedTableName(settingsProvider_.get(), "account_tx")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}();
|
||||||
|
|
||||||
|
selectAccountFromTokenScylla = [this]() {
|
||||||
|
return handle_.get().prepare(
|
||||||
|
fmt::format(
|
||||||
|
R"(
|
||||||
|
SELECT account
|
||||||
|
FROM {}
|
||||||
|
WHERE token(account) > token(?)
|
||||||
|
PER PARTITION LIMIT 1
|
||||||
|
LIMIT ?
|
||||||
|
)",
|
||||||
|
qualifiedTableName(settingsProvider_.get(), "account_tx")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}();
|
||||||
|
|
||||||
|
selectNFTsByIssuerScylla = [this]() {
|
||||||
|
return handle_.get().prepare(
|
||||||
|
fmt::format(
|
||||||
|
R"(
|
||||||
|
SELECT token_id
|
||||||
|
FROM {}
|
||||||
|
WHERE issuer = ?
|
||||||
|
AND (taxon, token_id) > ?
|
||||||
|
ORDER BY taxon ASC, token_id ASC
|
||||||
|
LIMIT ?
|
||||||
|
)",
|
||||||
|
qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}();
|
||||||
|
|
||||||
|
updateLedgerRange = [this]() {
|
||||||
|
return handle_.get().prepare(
|
||||||
|
fmt::format(
|
||||||
|
R"(
|
||||||
|
UPDATE {}
|
||||||
|
SET sequence = ?
|
||||||
|
WHERE is_latest = ?
|
||||||
|
IF sequence IN (?, null)
|
||||||
|
)",
|
||||||
|
qualifiedTableName(settingsProvider_.get(), "ledger_range")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}();
|
||||||
|
|
||||||
|
// AWS_keyspace supported queries
|
||||||
|
} else if (settingsProvider_.get().getSettings().provider == "aws_keyspace") {
|
||||||
|
selectNFTsAfterTaxonKeyspaces = [this]() {
|
||||||
|
return handle_.get().prepare(
|
||||||
|
fmt::format(
|
||||||
|
R"(
|
||||||
|
SELECT token_id
|
||||||
|
FROM {}
|
||||||
|
WHERE issuer = ?
|
||||||
|
AND taxon > ?
|
||||||
|
ORDER BY taxon ASC, token_id ASC
|
||||||
|
LIMIT ?
|
||||||
|
)",
|
||||||
|
qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
@@ -526,6 +607,17 @@ public:
|
|||||||
// Update (and "delete") queries
|
// Update (and "delete") queries
|
||||||
//
|
//
|
||||||
|
|
||||||
|
PreparedStatement insertLedgerRange = [this]() {
|
||||||
|
return handle_.get().prepare(
|
||||||
|
fmt::format(
|
||||||
|
R"(
|
||||||
|
INSERT INTO {} (is_latest, sequence) VALUES (?, ?) IF NOT EXISTS
|
||||||
|
)",
|
||||||
|
qualifiedTableName(settingsProvider_.get(), "ledger_range")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}();
|
||||||
|
|
||||||
PreparedStatement updateLedgerRange = [this]() {
|
PreparedStatement updateLedgerRange = [this]() {
|
||||||
return handle_.get().prepare(
|
return handle_.get().prepare(
|
||||||
fmt::format(
|
fmt::format(
|
||||||
@@ -533,7 +625,7 @@ public:
|
|||||||
UPDATE {}
|
UPDATE {}
|
||||||
SET sequence = ?
|
SET sequence = ?
|
||||||
WHERE is_latest = ?
|
WHERE is_latest = ?
|
||||||
IF sequence IN (?, null)
|
IF sequence = ?
|
||||||
)",
|
)",
|
||||||
qualifiedTableName(settingsProvider_.get(), "ledger_range")
|
qualifiedTableName(settingsProvider_.get(), "ledger_range")
|
||||||
)
|
)
|
||||||
@@ -654,6 +746,10 @@ public:
|
|||||||
);
|
);
|
||||||
}();
|
}();
|
||||||
|
|
||||||
|
/*
|
||||||
|
Currently, these two SELECT statements is not used.
|
||||||
|
If we ever use them, will need to change the PER PARTITION LIMIT to support for Keyspace
|
||||||
|
|
||||||
PreparedStatement selectLedgerPageKeys = [this]() {
|
PreparedStatement selectLedgerPageKeys = [this]() {
|
||||||
return handle_.get().prepare(
|
return handle_.get().prepare(
|
||||||
fmt::format(
|
fmt::format(
|
||||||
@@ -687,6 +783,7 @@ public:
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
}();
|
}();
|
||||||
|
*/
|
||||||
|
|
||||||
PreparedStatement getToken = [this]() {
|
PreparedStatement getToken = [this]() {
|
||||||
return handle_.get().prepare(
|
return handle_.get().prepare(
|
||||||
@@ -717,36 +814,6 @@ public:
|
|||||||
);
|
);
|
||||||
}();
|
}();
|
||||||
|
|
||||||
PreparedStatement selectAccountFromBeginning = [this]() {
|
|
||||||
return handle_.get().prepare(
|
|
||||||
fmt::format(
|
|
||||||
R"(
|
|
||||||
SELECT account
|
|
||||||
FROM {}
|
|
||||||
WHERE token(account) > 0
|
|
||||||
PER PARTITION LIMIT 1
|
|
||||||
LIMIT ?
|
|
||||||
)",
|
|
||||||
qualifiedTableName(settingsProvider_.get(), "account_tx")
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}();
|
|
||||||
|
|
||||||
PreparedStatement selectAccountFromToken = [this]() {
|
|
||||||
return handle_.get().prepare(
|
|
||||||
fmt::format(
|
|
||||||
R"(
|
|
||||||
SELECT account
|
|
||||||
FROM {}
|
|
||||||
WHERE token(account) > token(?)
|
|
||||||
PER PARTITION LIMIT 1
|
|
||||||
LIMIT ?
|
|
||||||
)",
|
|
||||||
qualifiedTableName(settingsProvider_.get(), "account_tx")
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}();
|
|
||||||
|
|
||||||
PreparedStatement selectAccountTxForward = [this]() {
|
PreparedStatement selectAccountTxForward = [this]() {
|
||||||
return handle_.get().prepare(
|
return handle_.get().prepare(
|
||||||
fmt::format(
|
fmt::format(
|
||||||
@@ -827,22 +894,6 @@ public:
|
|||||||
);
|
);
|
||||||
}();
|
}();
|
||||||
|
|
||||||
PreparedStatement selectNFTIDsByIssuer = [this]() {
|
|
||||||
return handle_.get().prepare(
|
|
||||||
fmt::format(
|
|
||||||
R"(
|
|
||||||
SELECT token_id
|
|
||||||
FROM {}
|
|
||||||
WHERE issuer = ?
|
|
||||||
AND (taxon, token_id) > ?
|
|
||||||
ORDER BY taxon ASC, token_id ASC
|
|
||||||
LIMIT ?
|
|
||||||
)",
|
|
||||||
qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2")
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}();
|
|
||||||
|
|
||||||
PreparedStatement selectNFTIDsByIssuerTaxon = [this]() {
|
PreparedStatement selectNFTIDsByIssuerTaxon = [this]() {
|
||||||
return handle_.get().prepare(
|
return handle_.get().prepare(
|
||||||
fmt::format(
|
fmt::format(
|
||||||
@@ -953,6 +1004,15 @@ public:
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
}();
|
}();
|
||||||
|
|
||||||
|
// For ScyllaDB / Cassandra ONLY
|
||||||
|
std::optional<PreparedStatement> selectAccountFromBeginningScylla;
|
||||||
|
std::optional<PreparedStatement> selectAccountFromTokenScylla;
|
||||||
|
std::optional<PreparedStatement> selectNFTsByIssuerScylla;
|
||||||
|
|
||||||
|
// For AWS Keyspaces ONLY
|
||||||
|
// NOTE: AWS keyspace is not able to load cache with accounts
|
||||||
|
std::optional<PreparedStatement> selectNFTsAfterTaxonKeyspaces;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -97,6 +97,7 @@ SettingsProvider::parseSettings() const
|
|||||||
settings.coreConnectionsPerHost = config_.get<uint32_t>("core_connections_per_host");
|
settings.coreConnectionsPerHost = config_.get<uint32_t>("core_connections_per_host");
|
||||||
settings.queueSizeIO = config_.maybeValue<uint32_t>("queue_size_io");
|
settings.queueSizeIO = config_.maybeValue<uint32_t>("queue_size_io");
|
||||||
settings.writeBatchSize = config_.get<std::size_t>("write_batch_size");
|
settings.writeBatchSize = config_.get<std::size_t>("write_batch_size");
|
||||||
|
settings.provider = config_.get<std::string>("provider");
|
||||||
|
|
||||||
if (config_.getValueView("connect_timeout").hasValue()) {
|
if (config_.getValueView("connect_timeout").hasValue()) {
|
||||||
auto const connectTimeoutSecond = config_.get<uint32_t>("connect_timeout");
|
auto const connectTimeoutSecond = config_.get<uint32_t>("connect_timeout");
|
||||||
|
|||||||
@@ -36,9 +36,18 @@ constexpr auto kBATCH_DELETER = [](CassBatch* ptr) { cass_batch_free(ptr); };
|
|||||||
|
|
||||||
namespace data::cassandra::impl {
|
namespace data::cassandra::impl {
|
||||||
|
|
||||||
// TODO: Use an appropriate value instead of CASS_BATCH_TYPE_LOGGED for different use cases
|
/*
|
||||||
|
* There are 2 main batches of Cassandra Statements:
|
||||||
|
* LOGGED: Ensures all updates in the batch succeed together, or none do.
|
||||||
|
* Use this for critical, related changes (e.g., for the same user), but it is slower.
|
||||||
|
*
|
||||||
|
* UNLOGGED: For performance. Sends many separate updates in one network trip to be fast.
|
||||||
|
* Use this for bulk-loading unrelated data, but know there's NO all-or-nothing guarantee.
|
||||||
|
*
|
||||||
|
* More info here: https://docs.datastax.com/en/developer/cpp-driver-dse/1.10/features/basics/batches/index.html
|
||||||
|
*/
|
||||||
Batch::Batch(std::vector<Statement> const& statements)
|
Batch::Batch(std::vector<Statement> const& statements)
|
||||||
: ManagedObject{cass_batch_new(CASS_BATCH_TYPE_LOGGED), kBATCH_DELETER}
|
: ManagedObject{cass_batch_new(CASS_BATCH_TYPE_UNLOGGED), kBATCH_DELETER}
|
||||||
{
|
{
|
||||||
cass_batch_set_is_idempotent(*this, cass_true);
|
cass_batch_set_is_idempotent(*this, cass_true);
|
||||||
|
|
||||||
|
|||||||
@@ -60,6 +60,13 @@ Cluster::Cluster(Settings const& settings) : ManagedObject{cass_cluster_new(), k
|
|||||||
cass_cluster_set_connect_timeout(*this, settings.connectionTimeout.count());
|
cass_cluster_set_connect_timeout(*this, settings.connectionTimeout.count());
|
||||||
cass_cluster_set_request_timeout(*this, settings.requestTimeout.count());
|
cass_cluster_set_request_timeout(*this, settings.requestTimeout.count());
|
||||||
|
|
||||||
|
// TODO: AWS keyspace reads should be local_one to save cost
|
||||||
|
if (settings.provider == "aws_keyspace") {
|
||||||
|
if (auto const rc = cass_cluster_set_consistency(*this, CASS_CONSISTENCY_LOCAL_QUORUM); rc != CASS_OK) {
|
||||||
|
throw std::runtime_error(fmt::format("Error setting cassandra consistency: {}", cass_error_desc(rc)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (auto const rc = cass_cluster_set_core_connections_per_host(*this, settings.coreConnectionsPerHost);
|
if (auto const rc = cass_cluster_set_core_connections_per_host(*this, settings.coreConnectionsPerHost);
|
||||||
rc != CASS_OK) {
|
rc != CASS_OK) {
|
||||||
throw std::runtime_error(fmt::format("Could not set core connections per host: {}", cass_error_desc(rc)));
|
throw std::runtime_error(fmt::format("Could not set core connections per host: {}", cass_error_desc(rc)));
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ struct Settings {
|
|||||||
static constexpr uint32_t kDEFAULT_MAX_WRITE_REQUESTS_OUTSTANDING = 10'000;
|
static constexpr uint32_t kDEFAULT_MAX_WRITE_REQUESTS_OUTSTANDING = 10'000;
|
||||||
static constexpr uint32_t kDEFAULT_MAX_READ_REQUESTS_OUTSTANDING = 100'000;
|
static constexpr uint32_t kDEFAULT_MAX_READ_REQUESTS_OUTSTANDING = 100'000;
|
||||||
static constexpr std::size_t kDEFAULT_BATCH_SIZE = 20;
|
static constexpr std::size_t kDEFAULT_BATCH_SIZE = 20;
|
||||||
|
static constexpr std::string kDEFAULT_PROVIDER = "cassandra";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Represents the configuration of contact points for cassandra.
|
* @brief Represents the configuration of contact points for cassandra.
|
||||||
@@ -83,11 +84,14 @@ struct Settings {
|
|||||||
uint32_t maxReadRequestsOutstanding = kDEFAULT_MAX_READ_REQUESTS_OUTSTANDING;
|
uint32_t maxReadRequestsOutstanding = kDEFAULT_MAX_READ_REQUESTS_OUTSTANDING;
|
||||||
|
|
||||||
/** @brief The number of connection per host to always have active */
|
/** @brief The number of connection per host to always have active */
|
||||||
uint32_t coreConnectionsPerHost = 1u;
|
uint32_t coreConnectionsPerHost = 3u;
|
||||||
|
|
||||||
/** @brief Size of batches when writing */
|
/** @brief Size of batches when writing */
|
||||||
std::size_t writeBatchSize = kDEFAULT_BATCH_SIZE;
|
std::size_t writeBatchSize = kDEFAULT_BATCH_SIZE;
|
||||||
|
|
||||||
|
/** @brief Provider to know if we are using scylladb or keyspace */
|
||||||
|
std::string provider = kDEFAULT_PROVIDER;
|
||||||
|
|
||||||
/** @brief Size of the IO queue */
|
/** @brief Size of the IO queue */
|
||||||
std::optional<uint32_t> queueSizeIO = std::nullopt; // NOLINT(readability-redundant-member-init)
|
std::optional<uint32_t> queueSizeIO = std::nullopt; // NOLINT(readability-redundant-member-init)
|
||||||
|
|
||||||
|
|||||||
@@ -30,8 +30,8 @@
|
|||||||
|
|
||||||
#include <boost/asio.hpp>
|
#include <boost/asio.hpp>
|
||||||
#include <boost/asio/associated_executor.hpp>
|
#include <boost/asio/associated_executor.hpp>
|
||||||
|
#include <boost/asio/executor_work_guard.hpp>
|
||||||
#include <boost/asio/io_context.hpp>
|
#include <boost/asio/io_context.hpp>
|
||||||
#include <boost/asio/io_service.hpp>
|
|
||||||
#include <boost/asio/spawn.hpp>
|
#include <boost/asio/spawn.hpp>
|
||||||
#include <boost/json/object.hpp>
|
#include <boost/json/object.hpp>
|
||||||
|
|
||||||
@@ -79,7 +79,7 @@ class DefaultExecutionStrategy {
|
|||||||
std::condition_variable syncCv_;
|
std::condition_variable syncCv_;
|
||||||
|
|
||||||
boost::asio::io_context ioc_;
|
boost::asio::io_context ioc_;
|
||||||
std::optional<boost::asio::io_service::work> work_;
|
std::optional<boost::asio::executor_work_guard<boost::asio::io_context::executor_type>> work_;
|
||||||
|
|
||||||
std::reference_wrapper<HandleType const> handle_;
|
std::reference_wrapper<HandleType const> handle_;
|
||||||
std::thread thread_;
|
std::thread thread_;
|
||||||
@@ -107,7 +107,7 @@ public:
|
|||||||
: maxWriteRequestsOutstanding_{settings.maxWriteRequestsOutstanding}
|
: maxWriteRequestsOutstanding_{settings.maxWriteRequestsOutstanding}
|
||||||
, maxReadRequestsOutstanding_{settings.maxReadRequestsOutstanding}
|
, maxReadRequestsOutstanding_{settings.maxReadRequestsOutstanding}
|
||||||
, writeBatchSize_{settings.writeBatchSize}
|
, writeBatchSize_{settings.writeBatchSize}
|
||||||
, work_{ioc_}
|
, work_{boost::asio::make_work_guard(ioc_)}
|
||||||
, handle_{std::cref(handle)}
|
, handle_{std::cref(handle)}
|
||||||
, thread_{[this]() { ioc_.run(); }}
|
, thread_{[this]() { ioc_.run(); }}
|
||||||
, counters_{std::move(counters)}
|
, counters_{std::move(counters)}
|
||||||
@@ -334,7 +334,7 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
auto res = boost::asio::async_compose<CompletionTokenType, void(ResultOrErrorType)>(
|
auto res = boost::asio::async_compose<CompletionTokenType, void(ResultOrErrorType)>(
|
||||||
init, token, boost::asio::get_associated_executor(token)
|
std::move(init), token, boost::asio::get_associated_executor(token)
|
||||||
);
|
);
|
||||||
numReadRequestsOutstanding_ -= numStatements;
|
numReadRequestsOutstanding_ -= numStatements;
|
||||||
|
|
||||||
@@ -387,7 +387,7 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
auto res = boost::asio::async_compose<CompletionTokenType, void(ResultOrErrorType)>(
|
auto res = boost::asio::async_compose<CompletionTokenType, void(ResultOrErrorType)>(
|
||||||
init, token, boost::asio::get_associated_executor(token)
|
std::move(init), token, boost::asio::get_associated_executor(token)
|
||||||
);
|
);
|
||||||
--numReadRequestsOutstanding_;
|
--numReadRequestsOutstanding_;
|
||||||
|
|
||||||
@@ -456,7 +456,7 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
boost::asio::async_compose<CompletionTokenType, void()>(
|
boost::asio::async_compose<CompletionTokenType, void()>(
|
||||||
init, token, boost::asio::get_associated_executor(token)
|
std::move(init), token, boost::asio::get_associated_executor(token)
|
||||||
);
|
);
|
||||||
numReadRequestsOutstanding_ -= statements.size();
|
numReadRequestsOutstanding_ -= statements.size();
|
||||||
|
|
||||||
|
|||||||
@@ -58,14 +58,14 @@ public:
|
|||||||
explicit Statement(std::string_view query, Args&&... args)
|
explicit Statement(std::string_view query, Args&&... args)
|
||||||
: ManagedObject{cass_statement_new_n(query.data(), query.size(), sizeof...(args)), kDELETER}
|
: ManagedObject{cass_statement_new_n(query.data(), query.size(), sizeof...(args)), kDELETER}
|
||||||
{
|
{
|
||||||
cass_statement_set_consistency(*this, CASS_CONSISTENCY_QUORUM);
|
cass_statement_set_consistency(*this, CASS_CONSISTENCY_LOCAL_QUORUM);
|
||||||
cass_statement_set_is_idempotent(*this, cass_true);
|
cass_statement_set_is_idempotent(*this, cass_true);
|
||||||
bind<Args...>(std::forward<Args>(args)...);
|
bind<Args...>(std::forward<Args>(args)...);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* implicit */ Statement(CassStatement* ptr) : ManagedObject{ptr, kDELETER}
|
/* implicit */ Statement(CassStatement* ptr) : ManagedObject{ptr, kDELETER}
|
||||||
{
|
{
|
||||||
cass_statement_set_consistency(*this, CASS_CONSISTENCY_QUORUM);
|
cass_statement_set_consistency(*this, CASS_CONSISTENCY_LOCAL_QUORUM);
|
||||||
cass_statement_set_is_idempotent(*this, cass_true);
|
cass_statement_set_is_idempotent(*this, cass_true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@
|
|||||||
//==============================================================================
|
//==============================================================================
|
||||||
|
|
||||||
#include "data/DBHelpers.hpp"
|
#include "data/DBHelpers.hpp"
|
||||||
|
#include "util/Assert.hpp"
|
||||||
|
|
||||||
#include <ripple/protocol/STBase.h>
|
#include <ripple/protocol/STBase.h>
|
||||||
#include <ripple/protocol/STTx.h>
|
#include <ripple/protocol/STTx.h>
|
||||||
@@ -71,6 +72,9 @@ getMPTHolderFromTx(ripple::TxMeta const& txMeta, ripple::STTx const& sttx)
|
|||||||
std::optional<MPTHolderData>
|
std::optional<MPTHolderData>
|
||||||
getMPTHolderFromObj(std::string const& key, std::string const& blob)
|
getMPTHolderFromObj(std::string const& key, std::string const& blob)
|
||||||
{
|
{
|
||||||
|
// https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033-multi-purpose-tokens#2121-mptoken-ledger-identifier
|
||||||
|
ASSERT(key.size() == ripple::uint256::size(), "The size of the key is expected to fit uint256 exactly");
|
||||||
|
|
||||||
ripple::STLedgerEntry const sle =
|
ripple::STLedgerEntry const sle =
|
||||||
ripple::STLedgerEntry(ripple::SerialIter{blob.data(), blob.size()}, ripple::uint256::fromVoid(key.data()));
|
ripple::STLedgerEntry(ripple::SerialIter{blob.data(), blob.size()}, ripple::uint256::fromVoid(key.data()));
|
||||||
|
|
||||||
|
|||||||
@@ -23,6 +23,7 @@
|
|||||||
#include "feed/SubscriptionManagerInterface.hpp"
|
#include "feed/SubscriptionManagerInterface.hpp"
|
||||||
#include "rpc/JS.hpp"
|
#include "rpc/JS.hpp"
|
||||||
#include "util/Retry.hpp"
|
#include "util/Retry.hpp"
|
||||||
|
#include "util/Spawn.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "util/log/Logger.hpp"
|
||||||
#include "util/prometheus/Label.hpp"
|
#include "util/prometheus/Label.hpp"
|
||||||
#include "util/prometheus/Prometheus.hpp"
|
#include "util/prometheus/Prometheus.hpp"
|
||||||
@@ -157,7 +158,7 @@ SubscriptionSource::stop(boost::asio::yield_context yield)
|
|||||||
void
|
void
|
||||||
SubscriptionSource::subscribe()
|
SubscriptionSource::subscribe()
|
||||||
{
|
{
|
||||||
boost::asio::spawn(strand_, [this, _ = boost::asio::make_work_guard(strand_)](boost::asio::yield_context yield) {
|
util::spawn(strand_, [this, _ = boost::asio::make_work_guard(strand_)](boost::asio::yield_context yield) {
|
||||||
if (auto connection = wsConnectionBuilder_.connect(yield); connection) {
|
if (auto connection = wsConnectionBuilder_.connect(yield); connection) {
|
||||||
wsConnection_ = std::move(connection).value();
|
wsConnection_ = std::move(connection).value();
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -305,6 +305,8 @@ ETLService::startLoading(uint32_t seq)
|
|||||||
{
|
{
|
||||||
ASSERT(not state_->isStrictReadonly, "This should only happen on writer nodes");
|
ASSERT(not state_->isStrictReadonly, "This should only happen on writer nodes");
|
||||||
taskMan_ = taskManagerProvider_->make(ctx_, *monitor_, seq, finishSequence_);
|
taskMan_ = taskManagerProvider_->make(ctx_, *monitor_, seq, finishSequence_);
|
||||||
|
|
||||||
|
// FIXME: this legacy name "extractor_threads" is no longer accurate (we have coroutines now)
|
||||||
taskMan_->run(config_.get().get<std::size_t>("extractor_threads"));
|
taskMan_->run(config_.get().get<std::size_t>("extractor_threads"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,7 @@
|
|||||||
#include "etlng/SchedulerInterface.hpp"
|
#include "etlng/SchedulerInterface.hpp"
|
||||||
#include "etlng/impl/Monitor.hpp"
|
#include "etlng/impl/Monitor.hpp"
|
||||||
#include "etlng/impl/TaskQueue.hpp"
|
#include "etlng/impl/TaskQueue.hpp"
|
||||||
|
#include "util/Assert.hpp"
|
||||||
#include "util/Constants.hpp"
|
#include "util/Constants.hpp"
|
||||||
#include "util/LedgerUtils.hpp"
|
#include "util/LedgerUtils.hpp"
|
||||||
#include "util/Profiler.hpp"
|
#include "util/Profiler.hpp"
|
||||||
@@ -70,11 +71,10 @@ TaskManager::~TaskManager()
|
|||||||
void
|
void
|
||||||
TaskManager::run(std::size_t numExtractors)
|
TaskManager::run(std::size_t numExtractors)
|
||||||
{
|
{
|
||||||
LOG(log_.debug()) << "Starting task manager with " << numExtractors << " extractors...\n";
|
ASSERT(not running_, "TaskManager can only be started once");
|
||||||
|
running_ = true;
|
||||||
|
|
||||||
stop();
|
LOG(log_.debug()) << "Starting task manager with " << numExtractors << " extractors...\n";
|
||||||
extractors_.clear();
|
|
||||||
loaders_.clear();
|
|
||||||
|
|
||||||
extractors_.reserve(numExtractors);
|
extractors_.reserve(numExtractors);
|
||||||
for ([[maybe_unused]] auto _ : std::views::iota(0uz, numExtractors))
|
for ([[maybe_unused]] auto _ : std::views::iota(0uz, numExtractors))
|
||||||
@@ -155,6 +155,11 @@ TaskManager::spawnLoader(TaskQueue& queue)
|
|||||||
<< " tps[" << txnCount / seconds << "], ops[" << objCount / seconds << "]";
|
<< " tps[" << txnCount / seconds << "], ops[" << objCount / seconds << "]";
|
||||||
|
|
||||||
monitor_.get().notifySequenceLoaded(data->seq);
|
monitor_.get().notifySequenceLoaded(data->seq);
|
||||||
|
} else {
|
||||||
|
// TODO (https://github.com/XRPLF/clio/issues/1852) this is probably better done with a timeout (on
|
||||||
|
// coroutine) so that the thread itself is not blocked. for now this implies that the context
|
||||||
|
// (io_threads) needs at least 2 threads
|
||||||
|
queue.awaitTask();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -174,11 +179,14 @@ TaskManager::wait()
|
|||||||
void
|
void
|
||||||
TaskManager::stop()
|
TaskManager::stop()
|
||||||
{
|
{
|
||||||
|
ASSERT(running_, "TaskManager is not running");
|
||||||
|
|
||||||
for (auto& extractor : extractors_)
|
for (auto& extractor : extractors_)
|
||||||
extractor.abort();
|
extractor.abort();
|
||||||
for (auto& loader : loaders_)
|
for (auto& loader : loaders_)
|
||||||
loader.abort();
|
loader.abort();
|
||||||
|
|
||||||
|
queue_.stop();
|
||||||
wait();
|
wait();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ class TaskManager : public TaskManagerInterface {
|
|||||||
std::vector<util::async::AnyOperation<void>> extractors_;
|
std::vector<util::async::AnyOperation<void>> extractors_;
|
||||||
std::vector<util::async::AnyOperation<void>> loaders_;
|
std::vector<util::async::AnyOperation<void>> loaders_;
|
||||||
|
|
||||||
|
std::atomic_bool running_ = false;
|
||||||
util::Logger log_{"ETL"};
|
util::Logger log_{"ETL"};
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@@ -70,6 +71,13 @@ public:
|
|||||||
|
|
||||||
~TaskManager() override;
|
~TaskManager() override;
|
||||||
|
|
||||||
|
TaskManager(TaskManager const&) = delete;
|
||||||
|
TaskManager(TaskManager&&) = delete;
|
||||||
|
TaskManager&
|
||||||
|
operator=(TaskManager const&) = delete;
|
||||||
|
TaskManager&
|
||||||
|
operator=(TaskManager&&) = delete;
|
||||||
|
|
||||||
void
|
void
|
||||||
run(std::size_t numExtractors) override;
|
run(std::size_t numExtractors) override;
|
||||||
|
|
||||||
|
|||||||
@@ -20,10 +20,14 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "etlng/Models.hpp"
|
#include "etlng/Models.hpp"
|
||||||
|
#include "util/Assert.hpp"
|
||||||
#include "util/Mutex.hpp"
|
#include "util/Mutex.hpp"
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
|
#include <condition_variable>
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
|
#include <mutex>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <queue>
|
#include <queue>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
@@ -44,9 +48,6 @@ struct ReverseOrderComparator {
|
|||||||
* @note This may be a candidate for future improvements if performance proves to be poor (e.g. use a lock free queue)
|
* @note This may be a candidate for future improvements if performance proves to be poor (e.g. use a lock free queue)
|
||||||
*/
|
*/
|
||||||
class TaskQueue {
|
class TaskQueue {
|
||||||
std::size_t limit_;
|
|
||||||
std::uint32_t increment_;
|
|
||||||
|
|
||||||
struct Data {
|
struct Data {
|
||||||
std::uint32_t expectedSequence;
|
std::uint32_t expectedSequence;
|
||||||
std::priority_queue<model::LedgerData, std::vector<model::LedgerData>, ReverseOrderComparator> forwardLoadQueue;
|
std::priority_queue<model::LedgerData, std::vector<model::LedgerData>, ReverseOrderComparator> forwardLoadQueue;
|
||||||
@@ -56,8 +57,13 @@ class TaskQueue {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
std::size_t limit_;
|
||||||
|
std::uint32_t increment_;
|
||||||
util::Mutex<Data> data_;
|
util::Mutex<Data> data_;
|
||||||
|
|
||||||
|
std::condition_variable cv_;
|
||||||
|
std::atomic_bool stopping_ = false;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
struct Settings {
|
struct Settings {
|
||||||
std::uint32_t startSeq = 0u; // sequence to start from (for dequeue)
|
std::uint32_t startSeq = 0u; // sequence to start from (for dequeue)
|
||||||
@@ -67,13 +73,19 @@ public:
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new priority queue
|
* @brief Construct a new priority queue
|
||||||
* @param limit The limit of items allowed simultaneously in the queue
|
* @param settings Settings for the queue, including starting sequence, increment value, and optional limit
|
||||||
|
* @note If limit is not set, the queue will have no limit
|
||||||
*/
|
*/
|
||||||
explicit TaskQueue(Settings settings)
|
explicit TaskQueue(Settings settings)
|
||||||
: limit_(settings.limit.value_or(0uz)), increment_(settings.increment), data_(settings.startSeq)
|
: limit_(settings.limit.value_or(0uz)), increment_(settings.increment), data_(settings.startSeq)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
~TaskQueue()
|
||||||
|
{
|
||||||
|
ASSERT(stopping_, "stop() must be called before destroying the TaskQueue");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Enqueue a new item onto the queue if space is available
|
* @brief Enqueue a new item onto the queue if space is available
|
||||||
* @note This function blocks until the item is attempted to be added to the queue
|
* @note This function blocks until the item is attempted to be added to the queue
|
||||||
@@ -88,6 +100,8 @@ public:
|
|||||||
|
|
||||||
if (limit_ == 0uz or lock->forwardLoadQueue.size() < limit_) {
|
if (limit_ == 0uz or lock->forwardLoadQueue.size() < limit_) {
|
||||||
lock->forwardLoadQueue.push(std::move(item));
|
lock->forwardLoadQueue.push(std::move(item));
|
||||||
|
cv_.notify_all();
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,6 +139,32 @@ public:
|
|||||||
{
|
{
|
||||||
return data_.lock()->forwardLoadQueue.empty();
|
return data_.lock()->forwardLoadQueue.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Awaits for the queue to become non-empty
|
||||||
|
* @note This function blocks until there is a task or the queue is being destroyed
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
awaitTask()
|
||||||
|
{
|
||||||
|
if (stopping_)
|
||||||
|
return;
|
||||||
|
|
||||||
|
auto lock = data_.lock<std::unique_lock>();
|
||||||
|
cv_.wait(lock, [&] { return stopping_ or not lock->forwardLoadQueue.empty(); });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Notify the queue that it's no longer needed
|
||||||
|
* @note This must be called before the queue is destroyed
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
stop()
|
||||||
|
{
|
||||||
|
// unblock all waiters
|
||||||
|
stopping_ = true;
|
||||||
|
cv_.notify_all();
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace etlng::impl
|
} // namespace etlng::impl
|
||||||
|
|||||||
@@ -96,7 +96,7 @@ SubscriptionManager::forwardProposedTransaction(boost::json::object const& recei
|
|||||||
boost::json::object
|
boost::json::object
|
||||||
SubscriptionManager::subLedger(boost::asio::yield_context yield, SubscriberSharedPtr const& subscriber)
|
SubscriptionManager::subLedger(boost::asio::yield_context yield, SubscriberSharedPtr const& subscriber)
|
||||||
{
|
{
|
||||||
return ledgerFeed_.sub(yield, backend_, subscriber);
|
return ledgerFeed_.sub(yield, backend_, subscriber, networkID_);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@@ -113,7 +113,7 @@ SubscriptionManager::pubLedger(
|
|||||||
std::uint32_t const txnCount
|
std::uint32_t const txnCount
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
ledgerFeed_.pub(lgrInfo, fees, ledgerRange, txnCount);
|
ledgerFeed_.pub(lgrInfo, fees, ledgerRange, txnCount, networkID_);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|||||||
@@ -44,7 +44,8 @@ LedgerFeed::makeLedgerPubMessage(
|
|||||||
ripple::LedgerHeader const& lgrInfo,
|
ripple::LedgerHeader const& lgrInfo,
|
||||||
ripple::Fees const& fees,
|
ripple::Fees const& fees,
|
||||||
std::string const& ledgerRange,
|
std::string const& ledgerRange,
|
||||||
std::uint32_t const txnCount
|
uint32_t const txnCount,
|
||||||
|
uint32_t const networkID
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
boost::json::object pubMsg;
|
boost::json::object pubMsg;
|
||||||
@@ -57,6 +58,7 @@ LedgerFeed::makeLedgerPubMessage(
|
|||||||
pubMsg["reserve_inc"] = rpc::toBoostJson(fees.increment.jsonClipped());
|
pubMsg["reserve_inc"] = rpc::toBoostJson(fees.increment.jsonClipped());
|
||||||
pubMsg["validated_ledgers"] = ledgerRange;
|
pubMsg["validated_ledgers"] = ledgerRange;
|
||||||
pubMsg["txn_count"] = txnCount;
|
pubMsg["txn_count"] = txnCount;
|
||||||
|
pubMsg["network_id"] = networkID;
|
||||||
return pubMsg;
|
return pubMsg;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,7 +66,8 @@ boost::json::object
|
|||||||
LedgerFeed::sub(
|
LedgerFeed::sub(
|
||||||
boost::asio::yield_context yield,
|
boost::asio::yield_context yield,
|
||||||
std::shared_ptr<data::BackendInterface const> const& backend,
|
std::shared_ptr<data::BackendInterface const> const& backend,
|
||||||
SubscriberSharedPtr const& subscriber
|
SubscriberSharedPtr const& subscriber,
|
||||||
|
uint32_t const networkID
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
SingleFeedBase::sub(subscriber);
|
SingleFeedBase::sub(subscriber);
|
||||||
@@ -81,7 +84,7 @@ LedgerFeed::sub(
|
|||||||
|
|
||||||
auto const range = std::to_string(ledgerRange->minSequence) + "-" + std::to_string(ledgerRange->maxSequence);
|
auto const range = std::to_string(ledgerRange->minSequence) + "-" + std::to_string(ledgerRange->maxSequence);
|
||||||
|
|
||||||
auto pubMsg = makeLedgerPubMessage(*lgrInfo, *fees, range, 0);
|
auto pubMsg = makeLedgerPubMessage(*lgrInfo, *fees, range, 0, networkID);
|
||||||
pubMsg.erase("txn_count");
|
pubMsg.erase("txn_count");
|
||||||
pubMsg.erase("type");
|
pubMsg.erase("type");
|
||||||
|
|
||||||
@@ -93,9 +96,10 @@ LedgerFeed::pub(
|
|||||||
ripple::LedgerHeader const& lgrInfo,
|
ripple::LedgerHeader const& lgrInfo,
|
||||||
ripple::Fees const& fees,
|
ripple::Fees const& fees,
|
||||||
std::string const& ledgerRange,
|
std::string const& ledgerRange,
|
||||||
std::uint32_t const txnCount
|
uint32_t const txnCount,
|
||||||
|
uint32_t const networkID
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
SingleFeedBase::pub(boost::json::serialize(makeLedgerPubMessage(lgrInfo, fees, ledgerRange, txnCount)));
|
SingleFeedBase::pub(boost::json::serialize(makeLedgerPubMessage(lgrInfo, fees, ledgerRange, txnCount, networkID)));
|
||||||
}
|
}
|
||||||
} // namespace feed::impl
|
} // namespace feed::impl
|
||||||
|
|||||||
@@ -41,7 +41,8 @@ namespace feed::impl {
|
|||||||
* @brief Feed that publishes the ledger info.
|
* @brief Feed that publishes the ledger info.
|
||||||
* Example : {'type': 'ledgerClosed', 'ledger_index': 2647935, 'ledger_hash':
|
* Example : {'type': 'ledgerClosed', 'ledger_index': 2647935, 'ledger_hash':
|
||||||
* '5D022718CD782A82EE10D2147FD90B5F42F26A7E937C870B4FE3CF1086C916AE', 'ledger_time': 756395681, 'fee_base': 10,
|
* '5D022718CD782A82EE10D2147FD90B5F42F26A7E937C870B4FE3CF1086C916AE', 'ledger_time': 756395681, 'fee_base': 10,
|
||||||
* 'reserve_base': 10000000, 'reserve_inc': 2000000, 'validated_ledgers': '2619127-2647935', 'txn_count': 0}
|
* 'reserve_base': 10000000, 'reserve_inc': 2000000, 'validated_ledgers': '2619127-2647935', 'txn_count': 0,
|
||||||
|
* 'network_id': 1}
|
||||||
*/
|
*/
|
||||||
class LedgerFeed : public SingleFeedBase {
|
class LedgerFeed : public SingleFeedBase {
|
||||||
public:
|
public:
|
||||||
@@ -57,13 +58,15 @@ public:
|
|||||||
* @brief Subscribe the ledger feed.
|
* @brief Subscribe the ledger feed.
|
||||||
* @param yield The coroutine yield.
|
* @param yield The coroutine yield.
|
||||||
* @param backend The backend.
|
* @param backend The backend.
|
||||||
* @param subscriber
|
* @param subscriber The subscriber.
|
||||||
|
* @param networkID The network ID.
|
||||||
* @return The information of the latest ledger.
|
* @return The information of the latest ledger.
|
||||||
*/
|
*/
|
||||||
boost::json::object
|
boost::json::object
|
||||||
sub(boost::asio::yield_context yield,
|
sub(boost::asio::yield_context yield,
|
||||||
std::shared_ptr<data::BackendInterface const> const& backend,
|
std::shared_ptr<data::BackendInterface const> const& backend,
|
||||||
SubscriberSharedPtr const& subscriber);
|
SubscriberSharedPtr const& subscriber,
|
||||||
|
uint32_t networkID);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Publishes the ledger feed.
|
* @brief Publishes the ledger feed.
|
||||||
@@ -71,12 +74,14 @@ public:
|
|||||||
* @param fees The fees.
|
* @param fees The fees.
|
||||||
* @param ledgerRange The ledger range.
|
* @param ledgerRange The ledger range.
|
||||||
* @param txnCount The transaction count.
|
* @param txnCount The transaction count.
|
||||||
|
* @param networkID The network ID.
|
||||||
*/
|
*/
|
||||||
void
|
void
|
||||||
pub(ripple::LedgerHeader const& lgrInfo,
|
pub(ripple::LedgerHeader const& lgrInfo,
|
||||||
ripple::Fees const& fees,
|
ripple::Fees const& fees,
|
||||||
std::string const& ledgerRange,
|
std::string const& ledgerRange,
|
||||||
std::uint32_t txnCount);
|
uint32_t txnCount,
|
||||||
|
uint32_t networkID);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static boost::json::object
|
static boost::json::object
|
||||||
@@ -84,7 +89,8 @@ private:
|
|||||||
ripple::LedgerHeader const& lgrInfo,
|
ripple::LedgerHeader const& lgrInfo,
|
||||||
ripple::Fees const& fees,
|
ripple::Fees const& fees,
|
||||||
std::string const& ledgerRange,
|
std::string const& ledgerRange,
|
||||||
std::uint32_t txnCount
|
uint32_t txnCount,
|
||||||
|
uint32_t networkID
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
} // namespace feed::impl
|
} // namespace feed::impl
|
||||||
|
|||||||
@@ -33,10 +33,12 @@
|
|||||||
#include <boost/json/serialize.hpp>
|
#include <boost/json/serialize.hpp>
|
||||||
#include <xrpl/basics/chrono.h>
|
#include <xrpl/basics/chrono.h>
|
||||||
#include <xrpl/basics/strHex.h>
|
#include <xrpl/basics/strHex.h>
|
||||||
|
#include <xrpl/json/json_value.h>
|
||||||
#include <xrpl/protocol/AccountID.h>
|
#include <xrpl/protocol/AccountID.h>
|
||||||
#include <xrpl/protocol/Book.h>
|
#include <xrpl/protocol/Book.h>
|
||||||
#include <xrpl/protocol/LedgerFormats.h>
|
#include <xrpl/protocol/LedgerFormats.h>
|
||||||
#include <xrpl/protocol/LedgerHeader.h>
|
#include <xrpl/protocol/LedgerHeader.h>
|
||||||
|
#include <xrpl/protocol/NFTSyntheticSerializer.h>
|
||||||
#include <xrpl/protocol/SField.h>
|
#include <xrpl/protocol/SField.h>
|
||||||
#include <xrpl/protocol/STObject.h>
|
#include <xrpl/protocol/STObject.h>
|
||||||
#include <xrpl/protocol/TER.h>
|
#include <xrpl/protocol/TER.h>
|
||||||
@@ -204,8 +206,18 @@ TransactionFeed::pub(
|
|||||||
pubObj[txKey] = rpc::toJson(*tx);
|
pubObj[txKey] = rpc::toJson(*tx);
|
||||||
pubObj[JS(meta)] = rpc::toJson(*meta);
|
pubObj[JS(meta)] = rpc::toJson(*meta);
|
||||||
rpc::insertDeliveredAmount(pubObj[JS(meta)].as_object(), tx, meta, txMeta.date);
|
rpc::insertDeliveredAmount(pubObj[JS(meta)].as_object(), tx, meta, txMeta.date);
|
||||||
rpc::insertDeliverMaxAlias(pubObj[txKey].as_object(), version);
|
|
||||||
rpc::insertMPTIssuanceID(pubObj[JS(meta)].as_object(), tx, meta);
|
auto& txnPubobj = pubObj[txKey].as_object();
|
||||||
|
rpc::insertDeliverMaxAlias(txnPubobj, version);
|
||||||
|
|
||||||
|
Json::Value nftJson;
|
||||||
|
ripple::RPC::insertNFTSyntheticInJson(nftJson, tx, *meta);
|
||||||
|
auto const nftBoostJson = rpc::toBoostJson(nftJson).as_object();
|
||||||
|
if (nftBoostJson.contains(JS(meta)) && nftBoostJson.at(JS(meta)).is_object()) {
|
||||||
|
auto& metaObjInPub = pubObj.at(JS(meta)).as_object();
|
||||||
|
for (auto const& [k, v] : nftBoostJson.at(JS(meta)).as_object())
|
||||||
|
metaObjInPub.insert_or_assign(k, v);
|
||||||
|
}
|
||||||
|
|
||||||
auto const& metaObj = pubObj[JS(meta)];
|
auto const& metaObj = pubObj[JS(meta)];
|
||||||
ASSERT(metaObj.is_object(), "meta must be an obj in rippled and clio");
|
ASSERT(metaObj.is_object(), "meta must be an obj in rippled and clio");
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
#include "app/VerifyConfig.hpp"
|
#include "app/VerifyConfig.hpp"
|
||||||
#include "migration/MigrationApplication.hpp"
|
#include "migration/MigrationApplication.hpp"
|
||||||
#include "rpc/common/impl/HandlerProvider.hpp"
|
#include "rpc/common/impl/HandlerProvider.hpp"
|
||||||
|
#include "util/ScopeGuard.hpp"
|
||||||
#include "util/TerminationHandler.hpp"
|
#include "util/TerminationHandler.hpp"
|
||||||
#include "util/config/ConfigDefinition.hpp"
|
#include "util/config/ConfigDefinition.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "util/log/Logger.hpp"
|
||||||
@@ -33,18 +34,16 @@
|
|||||||
|
|
||||||
using namespace util::config;
|
using namespace util::config;
|
||||||
|
|
||||||
|
[[nodiscard]]
|
||||||
int
|
int
|
||||||
main(int argc, char const* argv[])
|
runApp(int argc, char const* argv[])
|
||||||
try {
|
{
|
||||||
util::setTerminationHandler();
|
|
||||||
|
|
||||||
auto const action = app::CliArgs::parse(argc, argv);
|
auto const action = app::CliArgs::parse(argc, argv);
|
||||||
return action.apply(
|
return action.apply(
|
||||||
[](app::CliArgs::Action::Exit const& exit) { return exit.exitCode; },
|
[](app::CliArgs::Action::Exit const& exit) { return exit.exitCode; },
|
||||||
[](app::CliArgs::Action::VerifyConfig const& verify) {
|
[](app::CliArgs::Action::VerifyConfig const& verify) {
|
||||||
if (app::parseConfig(verify.configPath)) {
|
if (app::parseConfig(verify.configPath)) {
|
||||||
std::cout << "Config " << verify.configPath << " is correct"
|
std::cout << "Config " << verify.configPath << " is correct" << "\n";
|
||||||
<< "\n";
|
|
||||||
return EXIT_SUCCESS;
|
return EXIT_SUCCESS;
|
||||||
}
|
}
|
||||||
return EXIT_FAILURE;
|
return EXIT_FAILURE;
|
||||||
@@ -53,6 +52,7 @@ try {
|
|||||||
if (not app::parseConfig(run.configPath))
|
if (not app::parseConfig(run.configPath))
|
||||||
return EXIT_FAILURE;
|
return EXIT_FAILURE;
|
||||||
|
|
||||||
|
ClioConfigDefinition const& gClioConfig = getClioConfig();
|
||||||
PrometheusService::init(gClioConfig);
|
PrometheusService::init(gClioConfig);
|
||||||
if (auto const initSuccess = util::LogService::init(gClioConfig); not initSuccess) {
|
if (auto const initSuccess = util::LogService::init(gClioConfig); not initSuccess) {
|
||||||
std::cerr << initSuccess.error() << std::endl;
|
std::cerr << initSuccess.error() << std::endl;
|
||||||
@@ -65,18 +65,30 @@ try {
|
|||||||
if (not app::parseConfig(migrate.configPath))
|
if (not app::parseConfig(migrate.configPath))
|
||||||
return EXIT_FAILURE;
|
return EXIT_FAILURE;
|
||||||
|
|
||||||
if (auto const initSuccess = util::LogService::init(gClioConfig); not initSuccess) {
|
if (auto const initSuccess = util::LogService::init(getClioConfig()); not initSuccess) {
|
||||||
std::cerr << initSuccess.error() << std::endl;
|
std::cerr << initSuccess.error() << std::endl;
|
||||||
return EXIT_FAILURE;
|
return EXIT_FAILURE;
|
||||||
}
|
}
|
||||||
app::MigratorApplication migrator{gClioConfig, migrate.subCmd};
|
app::MigratorApplication migrator{getClioConfig(), migrate.subCmd};
|
||||||
return migrator.run();
|
return migrator.run();
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} catch (std::exception const& e) {
|
}
|
||||||
LOG(util::LogService::fatal()) << "Exit on exception: " << e.what();
|
|
||||||
return EXIT_FAILURE;
|
int
|
||||||
} catch (...) {
|
main(int argc, char const* argv[])
|
||||||
LOG(util::LogService::fatal()) << "Exit on exception: unknown";
|
{
|
||||||
return EXIT_FAILURE;
|
util::setTerminationHandler();
|
||||||
|
|
||||||
|
util::ScopeGuard const loggerShutdownGuard{[] { util::LogService::shutdown(); }};
|
||||||
|
|
||||||
|
try {
|
||||||
|
return runApp(argc, argv);
|
||||||
|
} catch (std::exception const& e) {
|
||||||
|
LOG(util::LogService::fatal()) << "Exit on exception: " << e.what();
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
} catch (...) {
|
||||||
|
LOG(util::LogService::fatal()) << "Exit on exception: unknown";
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -101,11 +101,6 @@
|
|||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
// local to compilation unit loggers
|
|
||||||
namespace {
|
|
||||||
util::Logger gLog{"RPC"};
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
namespace rpc {
|
namespace rpc {
|
||||||
|
|
||||||
std::optional<AccountCursor>
|
std::optional<AccountCursor>
|
||||||
@@ -208,6 +203,8 @@ accountFromStringStrict(std::string const& account)
|
|||||||
std::pair<std::shared_ptr<ripple::STTx const>, std::shared_ptr<ripple::STObject const>>
|
std::pair<std::shared_ptr<ripple::STTx const>, std::shared_ptr<ripple::STObject const>>
|
||||||
deserializeTxPlusMeta(data::TransactionAndMetadata const& blobs)
|
deserializeTxPlusMeta(data::TransactionAndMetadata const& blobs)
|
||||||
{
|
{
|
||||||
|
static util::Logger const log{"RPC"}; // NOLINT(readability-identifier-naming)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
std::pair<std::shared_ptr<ripple::STTx const>, std::shared_ptr<ripple::STObject const>> result;
|
std::pair<std::shared_ptr<ripple::STTx const>, std::shared_ptr<ripple::STObject const>> result;
|
||||||
{
|
{
|
||||||
@@ -224,9 +221,9 @@ deserializeTxPlusMeta(data::TransactionAndMetadata const& blobs)
|
|||||||
std::stringstream meta;
|
std::stringstream meta;
|
||||||
std::ranges::copy(blobs.transaction, std::ostream_iterator<unsigned char>(txn));
|
std::ranges::copy(blobs.transaction, std::ostream_iterator<unsigned char>(txn));
|
||||||
std::ranges::copy(blobs.metadata, std::ostream_iterator<unsigned char>(meta));
|
std::ranges::copy(blobs.metadata, std::ostream_iterator<unsigned char>(meta));
|
||||||
LOG(gLog.error()) << "Failed to deserialize transaction. txn = " << txn.str() << " - meta = " << meta.str()
|
LOG(log.error()) << "Failed to deserialize transaction. txn = " << txn.str() << " - meta = " << meta.str()
|
||||||
<< " txn length = " << std::to_string(blobs.transaction.size())
|
<< " txn length = " << std::to_string(blobs.transaction.size())
|
||||||
<< " meta length = " << std::to_string(blobs.metadata.size());
|
<< " meta length = " << std::to_string(blobs.metadata.size());
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -262,11 +259,10 @@ toExpandedJson(
|
|||||||
auto metaJson = toJson(*meta);
|
auto metaJson = toJson(*meta);
|
||||||
insertDeliveredAmount(metaJson, txn, meta, blobs.date);
|
insertDeliveredAmount(metaJson, txn, meta, blobs.date);
|
||||||
insertDeliverMaxAlias(txnJson, apiVersion);
|
insertDeliverMaxAlias(txnJson, apiVersion);
|
||||||
insertMPTIssuanceID(metaJson, txn, meta);
|
|
||||||
|
|
||||||
if (nftEnabled == NFTokenjson::ENABLE) {
|
if (nftEnabled == NFTokenjson::ENABLE) {
|
||||||
Json::Value nftJson;
|
Json::Value nftJson;
|
||||||
ripple::insertNFTSyntheticInJson(nftJson, txn, *meta);
|
ripple::RPC::insertNFTSyntheticInJson(nftJson, txn, *meta);
|
||||||
// if there is no nft fields, the nftJson will be {"meta":null}
|
// if there is no nft fields, the nftJson will be {"meta":null}
|
||||||
auto const nftBoostJson = toBoostJson(nftJson).as_object();
|
auto const nftBoostJson = toBoostJson(nftJson).as_object();
|
||||||
if (nftBoostJson.contains(JS(meta)) and nftBoostJson.at(JS(meta)).is_object()) {
|
if (nftBoostJson.contains(JS(meta)) and nftBoostJson.at(JS(meta)).is_object()) {
|
||||||
@@ -321,67 +317,6 @@ insertDeliveredAmount(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Get the delivered amount
|
|
||||||
*
|
|
||||||
* @param meta The metadata
|
|
||||||
* @return The mpt_issuance_id or std::nullopt if not available
|
|
||||||
*/
|
|
||||||
static std::optional<ripple::uint192>
|
|
||||||
getMPTIssuanceID(std::shared_ptr<ripple::TxMeta const> const& meta)
|
|
||||||
{
|
|
||||||
ripple::TxMeta const& transactionMeta = *meta;
|
|
||||||
|
|
||||||
for (ripple::STObject const& node : transactionMeta.getNodes()) {
|
|
||||||
if (node.getFieldU16(ripple::sfLedgerEntryType) != ripple::ltMPTOKEN_ISSUANCE ||
|
|
||||||
node.getFName() != ripple::sfCreatedNode)
|
|
||||||
continue;
|
|
||||||
|
|
||||||
auto const& mptNode = node.peekAtField(ripple::sfNewFields).downcast<ripple::STObject>();
|
|
||||||
return ripple::makeMptID(mptNode[ripple::sfSequence], mptNode[ripple::sfIssuer]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Check if transaction has a new MPToken created
|
|
||||||
*
|
|
||||||
* @param txn The transaction
|
|
||||||
* @param meta The metadata
|
|
||||||
* @return true if the transaction can have a mpt_issuance_id
|
|
||||||
*/
|
|
||||||
static bool
|
|
||||||
canHaveMPTIssuanceID(std::shared_ptr<ripple::STTx const> const& txn, std::shared_ptr<ripple::TxMeta const> const& meta)
|
|
||||||
{
|
|
||||||
if (txn->getTxnType() != ripple::ttMPTOKEN_ISSUANCE_CREATE)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (meta->getResultTER() != ripple::tesSUCCESS)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
insertMPTIssuanceID(
|
|
||||||
boost::json::object& metaJson,
|
|
||||||
std::shared_ptr<ripple::STTx const> const& txn,
|
|
||||||
std::shared_ptr<ripple::TxMeta const> const& meta
|
|
||||||
)
|
|
||||||
{
|
|
||||||
if (!canHaveMPTIssuanceID(txn, meta))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (auto const id = getMPTIssuanceID(meta)) {
|
|
||||||
metaJson[JS(mpt_issuance_id)] = ripple::to_string(*id);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(false);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
void
|
||||||
insertDeliverMaxAlias(boost::json::object& txJson, std::uint32_t const apiVersion)
|
insertDeliverMaxAlias(boost::json::object& txJson, std::uint32_t const apiVersion)
|
||||||
{
|
{
|
||||||
@@ -806,7 +741,9 @@ traverseOwnedNodes(
|
|||||||
}
|
}
|
||||||
auto end = std::chrono::system_clock::now();
|
auto end = std::chrono::system_clock::now();
|
||||||
|
|
||||||
LOG(gLog.debug()) << fmt::format(
|
static util::Logger const log{"RPC"}; // NOLINT(readability-identifier-naming)
|
||||||
|
|
||||||
|
LOG(log.debug()) << fmt::format(
|
||||||
"Time loading owned directories: {} milliseconds, entries size: {}",
|
"Time loading owned directories: {} milliseconds, entries size: {}",
|
||||||
std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count(),
|
std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count(),
|
||||||
keys.size()
|
keys.size()
|
||||||
@@ -814,7 +751,7 @@ traverseOwnedNodes(
|
|||||||
|
|
||||||
auto [objects, timeDiff] = util::timed([&]() { return backend.fetchLedgerObjects(keys, sequence, yield); });
|
auto [objects, timeDiff] = util::timed([&]() { return backend.fetchLedgerObjects(keys, sequence, yield); });
|
||||||
|
|
||||||
LOG(gLog.debug()) << "Time loading owned entries: " << timeDiff << " milliseconds";
|
LOG(log.debug()) << "Time loading owned entries: " << timeDiff << " milliseconds";
|
||||||
|
|
||||||
for (auto i = 0u; i < objects.size(); ++i) {
|
for (auto i = 0u; i < objects.size(); ++i) {
|
||||||
ripple::SerialIter it{objects[i].data(), objects[i].size()};
|
ripple::SerialIter it{objects[i].data(), objects[i].size()};
|
||||||
@@ -1302,7 +1239,8 @@ postProcessOrderBook(
|
|||||||
|
|
||||||
jsonOffers.push_back(offerJson);
|
jsonOffers.push_back(offerJson);
|
||||||
} catch (std::exception const& e) {
|
} catch (std::exception const& e) {
|
||||||
LOG(gLog.error()) << "caught exception: " << e.what();
|
util::Logger const log{"RPC"};
|
||||||
|
LOG(log.error()) << "caught exception: " << e.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return jsonOffers;
|
return jsonOffers;
|
||||||
|
|||||||
@@ -199,21 +199,6 @@ insertDeliveredAmount(
|
|||||||
uint32_t date
|
uint32_t date
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Add "mpt_issuance_id" into MPTokenIssuanceCreate transaction json.
|
|
||||||
*
|
|
||||||
* @param metaJson The metadata json object to add "MPTokenIssuanceID"
|
|
||||||
* @param txn The transaction object
|
|
||||||
* @param meta The metadata object
|
|
||||||
* @return true if the "mpt_issuance_id" is added to the metadata json object
|
|
||||||
*/
|
|
||||||
bool
|
|
||||||
insertMPTIssuanceID(
|
|
||||||
boost::json::object& metaJson,
|
|
||||||
std::shared_ptr<ripple::STTx const> const& txn,
|
|
||||||
std::shared_ptr<ripple::TxMeta const> const& meta
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Convert STBase object to JSON
|
* @brief Convert STBase object to JSON
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -19,7 +19,9 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "util/Assert.hpp"
|
||||||
#include "util/Mutex.hpp"
|
#include "util/Mutex.hpp"
|
||||||
|
#include "util/Spawn.hpp"
|
||||||
#include "util/config/ConfigDefinition.hpp"
|
#include "util/config/ConfigDefinition.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "util/log/Logger.hpp"
|
||||||
#include "util/prometheus/Counter.hpp"
|
#include "util/prometheus/Counter.hpp"
|
||||||
@@ -127,7 +129,7 @@ public:
|
|||||||
|
|
||||||
// Each time we enqueue a job, we want to post a symmetrical job that will dequeue and run the job at the front
|
// Each time we enqueue a job, we want to post a symmetrical job that will dequeue and run the job at the front
|
||||||
// of the job queue.
|
// of the job queue.
|
||||||
boost::asio::spawn(
|
util::spawn(
|
||||||
ioc_,
|
ioc_,
|
||||||
[this, func = std::forward<FnType>(func), start = std::chrono::system_clock::now()](auto yield) mutable {
|
[this, func = std::forward<FnType>(func), start = std::chrono::system_clock::now()](auto yield) mutable {
|
||||||
auto const run = std::chrono::system_clock::now();
|
auto const run = std::chrono::system_clock::now();
|
||||||
|
|||||||
@@ -69,9 +69,10 @@ concept SomeProcessor = (SomeRequirement<T> or SomeModifier<T>);
|
|||||||
* @brief A process function that expects both some Input and a Context.
|
* @brief A process function that expects both some Input and a Context.
|
||||||
*/
|
*/
|
||||||
template <typename T>
|
template <typename T>
|
||||||
concept SomeContextProcessWithInput = requires(T a, typename T::Input in, typename T::Output out, Context const& ctx) {
|
concept SomeContextProcessWithInput =
|
||||||
{ a.process(in, ctx) } -> std::same_as<HandlerReturnType<decltype(out)>>;
|
requires(T a, typename T::Input const& in, typename T::Output out, Context const& ctx) {
|
||||||
};
|
{ a.process(in, ctx) } -> std::same_as<HandlerReturnType<decltype(out)>>;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief A process function that expects no Input but does take a Context.
|
* @brief A process function that expects no Input but does take a Context.
|
||||||
|
|||||||
@@ -23,6 +23,7 @@
|
|||||||
#include "rpc/RPCHelpers.hpp"
|
#include "rpc/RPCHelpers.hpp"
|
||||||
#include "rpc/common/Types.hpp"
|
#include "rpc/common/Types.hpp"
|
||||||
#include "util/AccountUtils.hpp"
|
#include "util/AccountUtils.hpp"
|
||||||
|
#include "util/LedgerUtils.hpp"
|
||||||
#include "util/TimeUtils.hpp"
|
#include "util/TimeUtils.hpp"
|
||||||
|
|
||||||
#include <boost/json/object.hpp>
|
#include <boost/json/object.hpp>
|
||||||
@@ -32,6 +33,7 @@
|
|||||||
#include <xrpl/basics/StringUtilities.h>
|
#include <xrpl/basics/StringUtilities.h>
|
||||||
#include <xrpl/basics/base_uint.h>
|
#include <xrpl/basics/base_uint.h>
|
||||||
#include <xrpl/protocol/AccountID.h>
|
#include <xrpl/protocol/AccountID.h>
|
||||||
|
#include <xrpl/protocol/LedgerFormats.h>
|
||||||
#include <xrpl/protocol/Protocol.h>
|
#include <xrpl/protocol/Protocol.h>
|
||||||
#include <xrpl/protocol/UintTypes.h>
|
#include <xrpl/protocol/UintTypes.h>
|
||||||
|
|
||||||
@@ -120,6 +122,18 @@ CustomValidator CustomValidators::ledgerIndexValidator =
|
|||||||
return MaybeError{};
|
return MaybeError{};
|
||||||
}};
|
}};
|
||||||
|
|
||||||
|
CustomValidator CustomValidators::ledgerTypeValidator =
|
||||||
|
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
|
||||||
|
if (!value.is_string())
|
||||||
|
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}', not string.", key)}};
|
||||||
|
|
||||||
|
auto const type = util::LedgerTypes::getLedgerEntryTypeFromStr(boost::json::value_to<std::string>(value));
|
||||||
|
if (type == ripple::ltANY)
|
||||||
|
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}'.", key)}};
|
||||||
|
|
||||||
|
return MaybeError{};
|
||||||
|
}};
|
||||||
|
|
||||||
CustomValidator CustomValidators::accountValidator =
|
CustomValidator CustomValidators::accountValidator =
|
||||||
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
|
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
|
||||||
if (!value.is_string())
|
if (!value.is_string())
|
||||||
@@ -160,6 +174,19 @@ CustomValidator CustomValidators::accountMarkerValidator =
|
|||||||
return MaybeError{};
|
return MaybeError{};
|
||||||
}};
|
}};
|
||||||
|
|
||||||
|
CustomValidator CustomValidators::accountTypeValidator =
|
||||||
|
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
|
||||||
|
if (!value.is_string())
|
||||||
|
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}', not string.", key)}};
|
||||||
|
|
||||||
|
auto const type =
|
||||||
|
util::LedgerTypes::getAccountOwnedLedgerTypeFromStr(boost::json::value_to<std::string>(value));
|
||||||
|
if (type == ripple::ltANY)
|
||||||
|
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}'.", key)}};
|
||||||
|
|
||||||
|
return MaybeError{};
|
||||||
|
}};
|
||||||
|
|
||||||
CustomValidator CustomValidators::currencyValidator =
|
CustomValidator CustomValidators::currencyValidator =
|
||||||
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
|
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
|
||||||
if (!value.is_string())
|
if (!value.is_string())
|
||||||
|
|||||||
@@ -486,6 +486,14 @@ struct CustomValidators final {
|
|||||||
*/
|
*/
|
||||||
static CustomValidator ledgerIndexValidator;
|
static CustomValidator ledgerIndexValidator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Provides a validator for ledger type.
|
||||||
|
*
|
||||||
|
* A type accepts canonical names of ledger entry types (case insensitive) or short names.
|
||||||
|
* Used by ledger_data.
|
||||||
|
*/
|
||||||
|
static CustomValidator ledgerTypeValidator;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Provides a commonly used validator for accounts.
|
* @brief Provides a commonly used validator for accounts.
|
||||||
*
|
*
|
||||||
@@ -508,6 +516,14 @@ struct CustomValidators final {
|
|||||||
*/
|
*/
|
||||||
static CustomValidator accountMarkerValidator;
|
static CustomValidator accountMarkerValidator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Provides a validator for account type.
|
||||||
|
*
|
||||||
|
* A type accepts canonical names of owned ledger entry types (case insensitive) or short names.
|
||||||
|
* Used by account_objects.
|
||||||
|
*/
|
||||||
|
static CustomValidator accountTypeValidator;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Provides a commonly used validator for uint160(AccountID) hex string.
|
* @brief Provides a commonly used validator for uint160(AccountID) hex string.
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ toIso8601(ripple::NetClock::time_point tp)
|
|||||||
namespace rpc {
|
namespace rpc {
|
||||||
|
|
||||||
AMMInfoHandler::Result
|
AMMInfoHandler::Result
|
||||||
AMMInfoHandler::process(AMMInfoHandler::Input input, Context const& ctx) const
|
AMMInfoHandler::process(AMMInfoHandler::Input const& input, Context const& ctx) const
|
||||||
{
|
{
|
||||||
using namespace ripple;
|
using namespace ripple;
|
||||||
|
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ public:
|
|||||||
* @return The result of the operation
|
* @return The result of the operation
|
||||||
*/
|
*/
|
||||||
Result
|
Result
|
||||||
process(Input input, Context const& ctx) const;
|
process(Input const& input, Context const& ctx) const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
/**
|
/**
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user