Compare commits

..

5 Commits

Author SHA1 Message Date
Sergey Kuznetsov
7558dfc7f5 chore: Commits for 2.5.0 (#2352) 2025-07-22 11:43:18 +01:00
Sergey Kuznetsov
d83be17ded chore: Commits for 2.5.0 (#2330) 2025-07-14 15:55:34 +01:00
Sergey Kuznetsov
f04d2a97ec chore: Commits for 2.5.0 (#2268) 2025-06-30 11:32:26 +01:00
Sergey Kuznetsov
b8b82e5dd9 chore: Commits for 2.5.0-b3 (#2197) 2025-06-09 11:48:12 +01:00
Sergey Kuznetsov
764601e7fc chore: Commits for 2.5.0-b2 (#2146) 2025-05-20 16:53:06 +01:00
353 changed files with 6805 additions and 8995 deletions

View File

@@ -16,7 +16,6 @@ coverage:
#
# More info: https://github.com/XRPLF/clio/pull/2066
ignore:
- "benchmarks"
- "tests"
- "src/data/cassandra/"
- "src/data/CassandraBackend.hpp"

View File

@@ -24,7 +24,6 @@ inputs:
dockerhub_repo:
description: DockerHub repository name
required: false
default: ""
dockerhub_description:
description: Short description of the image
required: false
@@ -34,14 +33,14 @@ runs:
steps:
- name: Login to DockerHub
if: ${{ inputs.push_image == 'true' && inputs.dockerhub_repo != '' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_PW }}
- name: Login to GitHub Container Registry
if: ${{ inputs.push_image == 'true' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -52,7 +51,7 @@ runs:
cache-image: false
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
- uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
id: meta
with:
images: ${{ inputs.images }}

View File

@@ -1,73 +0,0 @@
name: Run CMake
description: Run CMake to generate build files
inputs:
build_dir:
description: Build directory
required: false
default: "build"
conan_profile:
description: Conan profile name
required: true
build_type:
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
required: true
default: "Release"
integration_tests:
description: Whether to generate target integration tests
required: true
default: "true"
benchmark:
description: Whether to generate targets for benchmarks
required: true
default: "true"
code_coverage:
description: Whether to enable code coverage
required: true
default: "false"
static:
description: Whether Clio is to be statically linked
required: true
default: "false"
time_trace:
description: Whether to enable compiler trace reports
required: true
default: "false"
package:
description: Whether to generate Debian package
required: true
default: "false"
runs:
using: composite
steps:
- name: Run cmake
shell: bash
env:
BUILD_TYPE: "${{ inputs.build_type }}"
SANITIZER_OPTION: |-
${{ endsWith(inputs.conan_profile, '.asan') && '-Dsan=address' ||
endsWith(inputs.conan_profile, '.tsan') && '-Dsan=thread' ||
endsWith(inputs.conan_profile, '.ubsan') && '-Dsan=undefined' ||
'' }}
INTEGRATION_TESTS: "${{ inputs.integration_tests == 'true' && 'ON' || 'OFF' }}"
BENCHMARK: "${{ inputs.benchmark == 'true' && 'ON' || 'OFF' }}"
COVERAGE: "${{ inputs.code_coverage == 'true' && 'ON' || 'OFF' }}"
STATIC: "${{ inputs.static == 'true' && 'ON' || 'OFF' }}"
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'ON' || 'OFF' }}"
PACKAGE: "${{ inputs.package == 'true' && 'ON' || 'OFF' }}"
run: |
cmake \
-B ${{inputs.build_dir}} \
-S . \
-G Ninja \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
"${SANITIZER_OPTION}" \
-Dtests=ON \
-Dintegration_tests="${INTEGRATION_TESTS}" \
-Dbenchmark="${BENCHMARK}" \
-Dcoverage="${COVERAGE}" \
-Dstatic="${STATIC}" \
-Dtime_trace="${TIME_TRACE}" \
-Dpackage="${PACKAGE}"

View File

@@ -15,7 +15,6 @@ runs:
shell: bash
run: |
gcovr \
-e benchmarks \
-e tests \
-e src/data/cassandra \
-e src/data/CassandraBackend.hpp \

View File

@@ -1,38 +0,0 @@
name: Run Conan
description: Run conan to install dependencies
inputs:
build_dir:
description: Build directory
required: false
default: "build"
conan_profile:
description: Conan profile name
required: true
force_conan_source_build:
description: Whether conan should build all dependencies from source
required: true
default: "false"
build_type:
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
required: true
default: "Release"
runs:
using: composite
steps:
- name: Create build directory
shell: bash
run: mkdir -p "${{ inputs.build_dir }}"
- name: Run conan
shell: bash
env:
CONAN_BUILD_OPTION: "${{ inputs.force_conan_source_build == 'true' && '*' || 'missing' }}"
run: |
conan \
install . \
-of build \
-b "$CONAN_BUILD_OPTION" \
-s "build_type=${{ inputs.build_type }}" \
--profile:all "${{ inputs.conan_profile }}"

79
.github/actions/generate/action.yml vendored Normal file
View File

@@ -0,0 +1,79 @@
name: Run conan and cmake
description: Run conan and cmake
inputs:
conan_profile:
description: Conan profile name
required: true
force_conan_source_build:
description: Whether conan should build all dependencies from source
required: true
default: "false"
build_type:
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
required: true
default: "Release"
build_integration_tests:
description: Whether to build integration tests
required: true
default: "true"
code_coverage:
description: Whether conan's coverage option should be on or not
required: true
default: "false"
static:
description: Whether Clio is to be statically linked
required: true
default: "false"
time_trace:
description: Whether to enable compiler trace reports
required: true
default: "false"
runs:
using: composite
steps:
- name: Create build directory
shell: bash
run: mkdir -p build
- name: Run conan
shell: bash
env:
CONAN_BUILD_OPTION: "${{ inputs.force_conan_source_build == 'true' && '*' || 'missing' }}"
CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}"
STATIC_OPTION: "${{ inputs.static == 'true' && 'True' || 'False' }}"
INTEGRATION_TESTS_OPTION: "${{ inputs.build_integration_tests == 'true' && 'True' || 'False' }}"
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'True' || 'False' }}"
run: |
cd build
conan \
install .. \
-of . \
-b "$CONAN_BUILD_OPTION" \
-s "build_type=${{ inputs.build_type }}" \
-o "&:static=${STATIC_OPTION}" \
-o "&:tests=True" \
-o "&:integration_tests=${INTEGRATION_TESTS_OPTION}" \
-o "&:lint=False" \
-o "&:coverage=${CODE_COVERAGE}" \
-o "&:time_trace=${TIME_TRACE}" \
--profile:all "${{ inputs.conan_profile }}"
- name: Run cmake
shell: bash
env:
BUILD_TYPE: "${{ inputs.build_type }}"
SANITIZER_OPTION: |-
${{ endsWith(inputs.conan_profile, '.asan') && '-Dsan=address' ||
endsWith(inputs.conan_profile, '.tsan') && '-Dsan=thread' ||
endsWith(inputs.conan_profile, '.ubsan') && '-Dsan=undefined' ||
'' }}
run: |
cd build
cmake \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
"${SANITIZER_OPTION}" \
.. \
-G Ninja

View File

@@ -0,0 +1,77 @@
name: Prepare runner
description: Install packages, set environment variables, create directories
inputs:
disable_ccache:
description: Whether ccache should be disabled
required: true
runs:
using: composite
steps:
- name: Install packages on mac
if: ${{ runner.os == 'macOS' }}
shell: bash
run: |
brew install --quiet \
bison \
ca-certificates \
ccache \
clang-build-analyzer \
conan \
gh \
jq \
llvm@14 \
ninja \
pkg-config
echo "/opt/homebrew/opt/conan@2/bin" >> $GITHUB_PATH
- name: Install CMake 3.31.6 on mac
if: ${{ runner.os == 'macOS' }}
shell: bash
run: |
# Uninstall any existing cmake
brew uninstall --formula cmake --ignore-dependencies || true
# Download specific cmake formula
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
mkdir -p /tmp/homebrew-formula
curl -s -L "$FORMULA_URL" -o /tmp/homebrew-formula/cmake.rb
echo "$FORMULA_EXPECTED_SHA256 /tmp/homebrew-formula/cmake.rb" | shasum -a 256 -c
# Install cmake from the specific formula with force flag
brew install --formula --quiet --force /tmp/homebrew-formula/cmake.rb
- name: Fix git permissions on Linux
if: ${{ runner.os == 'Linux' }}
shell: bash
run: git config --global --add safe.directory "$PWD"
- name: Set env variables for macOS
if: ${{ runner.os == 'macOS' }}
shell: bash
run: |
echo "CCACHE_DIR=${{ github.workspace }}/.ccache" >> $GITHUB_ENV
echo "CONAN_HOME=${{ github.workspace }}/.conan2" >> $GITHUB_ENV
- name: Set env variables for Linux
if: ${{ runner.os == 'Linux' }}
shell: bash
run: |
echo "CCACHE_DIR=/root/.ccache" >> $GITHUB_ENV
echo "CONAN_HOME=/root/.conan2" >> $GITHUB_ENV
- name: Set CCACHE_DISABLE=1
if: ${{ inputs.disable_ccache == 'true' }}
shell: bash
run: |
echo "CCACHE_DISABLE=1" >> $GITHUB_ENV
- name: Create directories
shell: bash
run: |
mkdir -p "$CCACHE_DIR"
mkdir -p "$CONAN_HOME"

View File

@@ -39,19 +39,6 @@ updates:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/cmake/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/code_coverage/
schedule:
@@ -66,7 +53,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/conan/
directory: .github/actions/create_issue/
schedule:
interval: weekly
day: monday
@@ -79,7 +66,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/create_issue/
directory: .github/actions/generate/
schedule:
interval: weekly
day: monday
@@ -117,6 +104,19 @@ updates:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/prepare_runner/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/restore_cache/
schedule:

View File

@@ -0,0 +1,8 @@
[settings]
arch={{detect_api.detect_arch()}}
build_type=Release
compiler=apple-clang
compiler.cppstd=20
compiler.libcxx=libc++
compiler.version=16
os=Macos

View File

@@ -3,7 +3,7 @@ import itertools
import json
LINUX_OS = ["heavy", "heavy-arm64"]
LINUX_CONTAINERS = ['{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }']
LINUX_CONTAINERS = ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
LINUX_COMPILERS = ["gcc", "clang"]
MACOS_OS = ["macos15"]

View File

@@ -8,11 +8,10 @@ REPO_DIR="$(cd "$CURRENT_DIR/../../../" && pwd)"
CONAN_DIR="${CONAN_HOME:-$HOME/.conan2}"
PROFILES_DIR="$CONAN_DIR/profiles"
# When developers' compilers are updated, these profiles might be different
if [[ -z "$CI" ]]; then
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-17.profile"
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-local.profile"
else
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-17.profile"
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-ci.profile"
fi
GCC_PROFILE="$REPO_DIR/docker/ci/conan/gcc.profile"
@@ -22,7 +21,7 @@ SANITIZER_TEMPLATE_FILE="$REPO_DIR/docker/ci/conan/sanitizer_template.profile"
rm -rf "$CONAN_DIR"
conan remote add --index 0 xrplf https://conan.ripplex.io
conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev
cp "$REPO_DIR/docker/ci/conan/global.conf" "$CONAN_DIR/global.conf"

View File

@@ -28,9 +28,8 @@ on:
workflow_dispatch:
concurrency:
# Develop branch: Each run gets unique group (using run_number) for parallel execution
# Other branches: Shared group with cancel-in-progress to stop old runs when new commits are pushed
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.ref == 'refs/heads/develop' && github.run_number || 'branch' }}
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
@@ -43,10 +42,7 @@ jobs:
os: [heavy]
conan_profile: [gcc, clang]
build_type: [Release, Debug]
container:
[
'{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }',
]
container: ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
static: [true]
include:
@@ -73,7 +69,7 @@ jobs:
uses: ./.github/workflows/build_impl.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
conan_profile: gcc
build_type: Debug
disable_cache: false
@@ -85,34 +81,17 @@ jobs:
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
package:
name: Build packages
uses: ./.github/workflows/build_impl.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
conan_profile: gcc
build_type: Release
disable_cache: false
code_coverage: false
static: true
upload_clio_server: false
package: true
targets: package
analyze_build_time: false
check_config:
name: Check Config Description
needs: build-and-test
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:latest
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v4
with:
name: clio_server_Linux_Release_gcc

View File

@@ -57,18 +57,6 @@ on:
type: string
default: all
expected_version:
description: Expected version of the clio_server binary
required: false
type: string
default: ""
package:
description: Whether to generate Debian package
required: false
type: boolean
default: false
jobs:
build:
uses: ./.github/workflows/build_impl.yml
@@ -83,8 +71,6 @@ jobs:
upload_clio_server: ${{ inputs.upload_clio_server }}
targets: ${{ inputs.targets }}
analyze_build_time: false
expected_version: ${{ inputs.expected_version }}
package: ${{ inputs.package }}
test:
needs: build

View File

@@ -48,7 +48,7 @@ jobs:
- name: Download Clio binary from artifact
if: ${{ inputs.artifact_name != null }}
uses: actions/download-artifact@v5
uses: actions/download-artifact@v4
with:
name: ${{ inputs.artifact_name }}
path: ./docker/clio/artifact/
@@ -73,8 +73,7 @@ jobs:
elif [[ $artifact == *.tar.gz ]]; then
tar -xvf $artifact
fi
chmod +x ./clio_server
mv ./clio_server ../
mv clio_server ../
cd ../
rm -rf ./artifact
@@ -83,11 +82,6 @@ jobs:
shell: bash
run: strip ./docker/clio/clio_server
- name: Set GHCR_REPO
id: set-ghcr-repo
run: |
echo "GHCR_REPO=$(echo ghcr.io/${{ github.repository_owner }} | tr '[:upper:]' '[:lower:]')" >> ${GITHUB_OUTPUT}
- name: Build Docker image
uses: ./.github/actions/build_docker_image
env:
@@ -96,11 +90,11 @@ jobs:
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
with:
images: |
ghcr.io/${{ steps.set-ghcr-repo.outputs.GHCR_REPO }}/clio
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio' || '' }}
ghcr.io/xrplf/clio
rippleci/clio
push_image: ${{ inputs.publish_image }}
directory: docker/clio
tags: ${{ inputs.tags }}
platforms: linux/amd64
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio' || '' }}
dockerhub_repo: rippleci/clio
dockerhub_description: Clio is an XRP Ledger API server.

View File

@@ -53,17 +53,6 @@ on:
required: true
type: boolean
expected_version:
description: Expected version of the clio_server binary
required: false
type: string
default: ""
package:
description: Whether to generate Debian package
required: false
type: boolean
secrets:
CODECOV_TOKEN:
required: false
@@ -75,20 +64,16 @@ jobs:
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
steps:
- name: Cleanup workspace
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
# We need to fetch tags to have correct version in the release
# The workaround is based on https://github.com/actions/checkout/issues/1467
fetch-tags: true
ref: ${{ github.ref }}
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
uses: ./.github/actions/prepare_runner
with:
disable_ccache: ${{ inputs.disable_cache }}
@@ -107,21 +92,14 @@ jobs:
build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }}
- name: Run conan
uses: ./.github/actions/conan
with:
conan_profile: ${{ inputs.conan_profile }}
build_type: ${{ inputs.build_type }}
- name: Run CMake
uses: ./.github/actions/cmake
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ inputs.conan_profile }}
build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }}
static: ${{ inputs.static }}
time_trace: ${{ inputs.analyze_build_time }}
package: ${{ inputs.package }}
- name: Build Clio
uses: ./.github/actions/build_clio
@@ -169,26 +147,19 @@ jobs:
path: build/clio_server
- name: Upload clio_tests
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
uses: actions/upload-artifact@v4
with:
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build/clio_tests
- name: Upload clio_integration_tests
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
uses: actions/upload-artifact@v4
with:
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build/clio_integration_tests
- name: Upload Clio Linux package
if: inputs.package
uses: actions/upload-artifact@v4
with:
name: clio_deb_package_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build/*.deb
- name: Save cache
if: ${{ !inputs.disable_cache && github.ref == 'refs/heads/develop' }}
uses: ./.github/actions/save_cache
@@ -203,6 +174,7 @@ jobs:
# This is run as part of the build job, because it requires the following:
# - source code
# - generated source code (Build.cpp)
# - conan packages
# - .gcno files in build directory
#
@@ -211,18 +183,6 @@ jobs:
if: ${{ inputs.code_coverage }}
uses: ./.github/actions/code_coverage
- name: Verify expected version
if: ${{ inputs.expected_version != '' }}
shell: bash
run: |
set -e
EXPECTED_VERSION="clio-${{ inputs.expected_version }}"
actual_version=$(./build/clio_server --version)
if [[ "$actual_version" != "$EXPECTED_VERSION" ]]; then
echo "Expected version '$EXPECTED_VERSION', but got '$actual_version'"
exit 1
fi
# `codecov/codecov-action` will rerun `gcov` if it's available and build directory is present
# To prevent this from happening, we run this action in a separate workflow
#

View File

@@ -17,36 +17,31 @@ jobs:
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
- name: Update libXRPL version requirement
shell: bash
run: |
sed -i.bak -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/${{ github.event.client_payload.conan_ref }}'|g" conanfile.py
sed -i.bak -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/${{ github.event.client_payload.version }}'|g" conanfile.py
rm -f conanfile.py.bak
- name: Update conan lockfile
shell: bash
run: |
conan lock create . --profile:all ${{ env.CONAN_PROFILE }}
conan lock create . -o '&:tests=True' -o '&:benchmark=True'
- name: Run conan
uses: ./.github/actions/conan
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
conan_profile: ${{ env.CONAN_PROFILE }}
disable_ccache: true
- name: Run CMake
uses: ./.github/actions/cmake
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ env.CONAN_PROFILE }}
@@ -67,10 +62,10 @@ jobs:
needs: build
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:latest
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v4
with:
name: clio_tests_check_libxrpl
@@ -100,7 +95,6 @@ jobs:
labels: "compatibility,bug"
title: "Proposed libXRPL check failed"
body: >
Clio build or tests failed against `libXRPL ${{ github.event.client_payload.conan_ref }}`.
Clio build or tests failed against `libXRPL ${{ github.event.client_payload.version }}`.
PR: ${{ github.event.client_payload.pr_url }}
Workflow run: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: ytanikin/pr-conventional-commits@b72758283dcbee706975950e96bc4bf323a8d8c0 # v1.4.2
- uses: ytanikin/pr-conventional-commits@8267db1bacc237419f9ed0228bb9d94e94271a1d # v1.4.1
with:
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
add_label: false

View File

@@ -24,7 +24,7 @@ jobs:
clang_tidy:
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:latest
permissions:
contents: write
@@ -37,7 +37,7 @@ jobs:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
@@ -48,13 +48,8 @@ jobs:
conan_profile: ${{ env.CONAN_PROFILE }}
ccache_dir: ${{ env.CCACHE_DIR }}
- name: Run conan
uses: ./.github/actions/conan
with:
conan_profile: ${{ env.CONAN_PROFILE }}
- name: Run CMake
uses: ./.github/actions/cmake
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ env.CONAN_PROFILE }}

View File

@@ -14,7 +14,7 @@ jobs:
build:
runs-on: ubuntu-latest
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:latest
steps:
- name: Checkout
@@ -23,7 +23,7 @@ jobs:
lfs: true
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
@@ -42,7 +42,7 @@ jobs:
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v4
uses: actions/upload-pages-artifact@v3
with:
path: build_docs/html
name: docs-develop

View File

@@ -39,17 +39,17 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
- os: heavy
conan_profile: gcc
build_type: Debug
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
- os: heavy
conan_profile: gcc.ubsan
build_type: Release
static: false
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
uses: ./.github/workflows/build_and_test.yml
with:
@@ -72,7 +72,7 @@ jobs:
include:
- os: heavy
conan_profile: clang
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
static: true
- os: macos15
conan_profile: apple-clang
@@ -100,6 +100,8 @@ jobs:
title: "Clio development (nightly) build"
version: nightly
header: >
# Release notes
> **Note:** Please remember that this is a development release and it is not recommended for production use.
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>

View File

@@ -4,19 +4,47 @@ on:
# every first day of the month
schedule:
- cron: "0 0 1 * *"
pull_request:
branches: [release/*, develop]
paths:
- ".pre-commit-config.yaml"
# on demand
workflow_dispatch:
jobs:
auto-update:
uses: XRPLF/actions/.github/workflows/pre-commit-autoupdate.yml@afbcbdafbe0ce5439492fb87eda6441371086386
with:
sign_commit: true
committer: "Clio CI <skuznetsov@ripple.com>"
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
secrets:
GPG_PRIVATE_KEY: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
GPG_PASSPHRASE: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: pip install pre-commit
- run: pre-commit autoupdate --freeze
- run: pre-commit run --all-files || true
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
if: github.event_name != 'pull_request'
with:
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
git_user_signingkey: true
git_commit_gpgsign: true
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
if: always()
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
with:
commit-message: "style: Update pre-commit hooks"
committer: Clio CI <skuznetsov@ripple.com>
branch: update/pre-commit-hooks
branch-suffix: timestamp
delete-branch: true
title: "style: Update pre-commit hooks"
body: Update versions of pre-commit hooks to latest version.
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"

View File

@@ -8,7 +8,20 @@ on:
jobs:
run-hooks:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@afbcbdafbe0ce5439492fb87eda6441371086386
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:latest
steps:
- name: Checkout Repo ⚡️
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
- name: Run pre-commit ✅
run: pre-commit run --all-files

View File

@@ -29,7 +29,7 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
uses: ./.github/workflows/build_and_test.yml
with:
@@ -42,7 +42,6 @@ jobs:
run_integration_tests: true
upload_clio_server: true
disable_cache: true
expected_version: ${{ github.event_name == 'push' && github.ref_name || '' }}
release:
needs: build-and-test
@@ -53,6 +52,6 @@ jobs:
title: "${{ github.ref_name}}"
version: "${{ github.ref_name }}"
header: >
${{ contains(github.ref_name, '-') && '> **Note:** Please remember that this is a release candidate and it is not recommended for production use.' || '' }}
generate_changelog: ${{ !contains(github.ref_name, '-') }}
# Introducing Clio version ${{ github.ref_name }}
generate_changelog: true
draft: true

View File

@@ -42,7 +42,7 @@ jobs:
release:
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:latest
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
@@ -56,11 +56,11 @@ jobs:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v4
with:
path: release_artifacts
pattern: clio_server_*
@@ -68,24 +68,34 @@ jobs:
- name: Create release notes
shell: bash
run: |
echo "# Release notes" > "${RUNNER_TEMP}/release_notes.md"
echo "" >> "${RUNNER_TEMP}/release_notes.md"
printf '%s\n' "${{ inputs.header }}" >> "${RUNNER_TEMP}/release_notes.md"
printf '%s\n' "${{ inputs.header }}" > "${RUNNER_TEMP}/release_notes.md"
- name: Generate changelog
shell: bash
if: ${{ inputs.generate_changelog }}
run: |
LAST_TAG="$(gh release view --json tagName -q .tagName --repo XRPLF/clio)"
LAST_TAG="$(gh release view --json tagName -q .tagName)"
LAST_TAG_COMMIT="$(git rev-parse $LAST_TAG)"
BASE_COMMIT="$(git merge-base HEAD $LAST_TAG_COMMIT)"
git-cliff "${BASE_COMMIT}..HEAD" --ignore-tags "nightly|-b|-rc"
git-cliff "${BASE_COMMIT}..HEAD" --ignore-tags "nightly|-b"
cat CHANGELOG.md >> "${RUNNER_TEMP}/release_notes.md"
- name: Prepare release artifacts
shell: bash
run: .github/scripts/prepare-release-artifacts.sh release_artifacts
- name: Append sha256 checksums
shell: bash
working-directory: release_artifacts
run: |
{
echo '## SHA256 checksums'
echo
echo '```'
cat *.sha256sum
echo '```'
} >> "${RUNNER_TEMP}/release_notes.md"
- name: Upload release notes
uses: actions/upload-artifact@v4
with:

View File

@@ -44,7 +44,7 @@ jobs:
uses: ./.github/workflows/build_and_test.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
disable_cache: true
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
build_type: ${{ matrix.build_type }}

View File

@@ -46,15 +46,15 @@ jobs:
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.asan') || endsWith(inputs.conan_profile, '.tsan') }}
steps:
- name: Cleanup workspace
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v4
with:
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
@@ -120,9 +120,9 @@ jobs:
--health-retries 5
steps:
- name: Cleanup workspace
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
- name: Spin up scylladb
if: ${{ runner.os == 'macOS' }}
@@ -144,7 +144,7 @@ jobs:
sleep 5
done
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v4
with:
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}

View File

@@ -30,8 +30,8 @@ concurrency:
env:
CLANG_MAJOR_VERSION: 19
GCC_MAJOR_VERSION: 15
GCC_VERSION: 15.2.0
GCC_MAJOR_VERSION: 14
GCC_VERSION: 14.3.0
jobs:
repo:
@@ -56,7 +56,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/gcc/**"
@@ -69,7 +69,7 @@ jobs:
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
rippleci/clio_gcc
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/compilers/gcc
tags: |
@@ -81,7 +81,7 @@ jobs:
build_args: |
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
GCC_VERSION=${{ env.GCC_VERSION }}
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
dockerhub_repo: rippleci/clio_gcc
dockerhub_description: GCC compiler for XRPLF/clio.
gcc-arm64:
@@ -94,7 +94,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/gcc/**"
@@ -107,7 +107,7 @@ jobs:
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
rippleci/clio_gcc
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/compilers/gcc
tags: |
@@ -119,7 +119,7 @@ jobs:
build_args: |
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
GCC_VERSION=${{ env.GCC_VERSION }}
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_gcc' || '' }}
dockerhub_repo: rippleci/clio_gcc
dockerhub_description: GCC compiler for XRPLF/clio.
gcc-merge:
@@ -132,7 +132,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/gcc/**"
@@ -141,15 +141,15 @@ jobs:
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request'
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PW }}
@@ -157,21 +157,15 @@ jobs:
- name: Create and push multi-arch manifest
if: github.event_name != 'pull_request' && steps.changed-files.outputs.any_changed == 'true'
run: |
push_image() {
image=$1
for image in ${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc rippleci/clio_gcc; do
docker buildx imagetools create \
-t $image:latest \
-t $image:${{ env.GCC_MAJOR_VERSION }} \
-t $image:${{ env.GCC_VERSION }} \
-t $image:${{ github.sha }} \
$image:arm64-latest \
$image:amd64-latest
}
push_image ${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
if [[ ${{ github.repository_owner }} == 'XRPLF' ]]; then
push_image rippleci/clio_clang
fi
-t $image:latest \
-t $image:${{ env.GCC_MAJOR_VERSION }} \
-t $image:${{ env.GCC_VERSION }} \
-t $image:${{ github.sha }} \
$image:arm64-latest \
$image:amd64-latest
done
clang:
name: Build and push Clang docker image
@@ -183,7 +177,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/clang/**"
@@ -196,7 +190,7 @@ jobs:
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-clang
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_clang' || '' }}
rippleci/clio_clang
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/compilers/clang
tags: |
@@ -206,7 +200,7 @@ jobs:
platforms: linux/amd64,linux/arm64
build_args: |
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_clang' || '' }}
dockerhub_repo: rippleci/clio_clang
dockerhub_description: Clang compiler for XRPLF/clio.
tools-amd64:
@@ -219,7 +213,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/tools/**"
@@ -250,7 +244,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/tools/**"
@@ -281,7 +275,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/tools/**"
@@ -290,7 +284,7 @@ jobs:
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -321,7 +315,7 @@ jobs:
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-ci
${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_ci' || '' }}
rippleci/clio_ci
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/ci
tags: |
@@ -334,5 +328,5 @@ jobs:
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
GCC_VERSION=${{ env.GCC_VERSION }}
dockerhub_repo: ${{ github.repository_owner == 'XRPLF' && 'rippleci/clio_ci' || '' }}
dockerhub_repo: rippleci/clio_ci
dockerhub_description: CI image for XRPLF/clio.

View File

@@ -10,18 +10,15 @@ on:
required: false
default: false
type: boolean
force_upload:
description: "Force upload of all dependencies"
required: false
default: false
type: boolean
pull_request:
branches: [develop]
paths:
- .github/workflows/upload_conan_deps.yml
- .github/actions/conan/action.yml
- .github/actions/generate/action.yml
- .github/actions/prepare_runner/action.yml
- ".github/scripts/conan/**"
- "!.github/scripts/conan/apple-clang-local.profile"
- conanfile.py
- conan.lock
@@ -30,8 +27,10 @@ on:
paths:
- .github/workflows/upload_conan_deps.yml
- .github/actions/conan/action.yml
- .github/actions/generate/action.yml
- .github/actions/prepare_runner/action.yml
- ".github/scripts/conan/**"
- "!.github/scripts/conan/apple-clang-local.profile"
- conanfile.py
- conan.lock
@@ -60,7 +59,6 @@ jobs:
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
max-parallel: 10
runs-on: ${{ matrix.os }}
container: ${{ matrix.container != '' && fromJson(matrix.container) || null }}
@@ -72,7 +70,7 @@ jobs:
- uses: actions/checkout@v4
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
@@ -84,8 +82,8 @@ jobs:
- name: Show conan profile
run: conan profile show --profile:all ${{ env.CONAN_PROFILE }}
- name: Run conan
uses: ./.github/actions/conan
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ env.CONAN_PROFILE }}
# We check that everything builds fine from source on scheduled runs
@@ -94,9 +92,9 @@ jobs:
build_type: ${{ matrix.build_type }}
- name: Login to Conan
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request'
run: conan remote login -p ${{ secrets.CONAN_PASSWORD }} xrplf ${{ secrets.CONAN_USERNAME }}
if: github.event_name != 'pull_request'
run: conan remote login -p ${{ secrets.CONAN_PASSWORD }} ripple ${{ secrets.CONAN_USERNAME }}
- name: Upload Conan packages
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule'
run: conan upload "*" -r=xrplf --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
if: github.event_name != 'pull_request' && github.event_name != 'schedule'
run: conan upload "*" -r=ripple --confirm

View File

@@ -18,14 +18,14 @@ jobs:
fetch-depth: 0
- name: Download report artifact
uses: actions/download-artifact@v5
uses: actions/download-artifact@v4
with:
name: coverage-report.xml
path: build
- name: Upload coverage report
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
files: build/coverage_report.xml
fail_ci_if_error: true

1
.gitignore vendored
View File

@@ -9,3 +9,4 @@
.sanitizer-report
CMakeUserPresets.json
config.json
src/util/build/Build.cpp

View File

@@ -16,7 +16,7 @@ exclude: ^(docs/doxygen-awesome-theme/|conan\.lock$)
repos:
# `pre-commit sample-config` default hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
rev: cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b # frozen: v5.0.0
hooks:
- id: check-added-large-files
- id: check-executables-have-shebangs
@@ -55,6 +55,12 @@ repos:
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
]
- repo: https://github.com/trufflesecurity/trufflehog
rev: 6641d4ba5b684fffe195b9820345de1bf19f3181 # frozen: v3.89.2
hooks:
- id: trufflehog
entry: trufflehog git file://. --since-commit HEAD --no-verification --fail
# Running some C++ hooks before clang-format
# to ensure that the style is consistent.
- repo: local
@@ -80,7 +86,7 @@ repos:
language: script
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: 86fdcc9bd34d6afbbd29358b97436c8ffe3aa3b2 # frozen: v21.1.0
rev: 6b9072cd80691b1b48d80046d884409fb1d962d1 # frozen: v20.1.7
hooks:
- id: clang-format
args: [--style=file]

View File

@@ -1,5 +1,7 @@
cmake_minimum_required(VERSION 3.20)
set(CMAKE_PROJECT_INCLUDE_BEFORE ${CMAKE_CURRENT_SOURCE_DIR}/cmake/ClioVersion.cmake)
project(clio VERSION ${CLIO_VERSION} HOMEPAGE_URL "https://github.com/XRPLF/clio"
DESCRIPTION "An XRP Ledger API Server"
)
@@ -11,7 +13,7 @@ option(integration_tests "Build integration tests" FALSE)
option(benchmark "Build benchmarks" FALSE)
option(docs "Generate doxygen docs" FALSE)
option(coverage "Build test coverage report" FALSE)
option(package "Create distribution packages" FALSE)
option(packaging "Create distribution packages" FALSE)
option(lint "Run clang-tidy checks during compilation" FALSE)
option(static "Statically linked Clio" FALSE)
option(snapshot "Build snapshot tool" FALSE)
@@ -29,7 +31,6 @@ set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})
include(Ccache)
include(CheckCXXCompilerFlag)
include(ClangTidy)
include(Linker)
add_library(clio_options INTERFACE)
target_compile_features(clio_options INTERFACE cxx_std_23) # Clio needs c++23 but deps can remain c++20 for now
@@ -39,6 +40,11 @@ if (verbose)
set(CMAKE_VERBOSE_MAKEFILE TRUE)
endif ()
if (packaging)
add_definitions(-DPKG=1)
target_compile_definitions(clio_options INTERFACE PKG=1)
endif ()
# Clio tweaks and checks
include(CheckCompiler)
include(Settings)
@@ -52,7 +58,6 @@ include(deps/Threads)
include(deps/libfmt)
include(deps/cassandra)
include(deps/libbacktrace)
include(deps/spdlog)
add_subdirectory(src)
add_subdirectory(tests)
@@ -88,8 +93,8 @@ if (docs)
endif ()
include(install/install)
if (package)
include(ClioPackage)
if (packaging)
include(cmake/packaging.cmake) # This file exists only in build runner
endif ()
if (snapshot)

View File

@@ -7,12 +7,10 @@ target_sources(
Playground.cpp
# ExecutionContext
util/async/ExecutionContextBenchmarks.cpp
# Logger
util/log/LoggerBenchmark.cpp
)
include(deps/gbench)
target_include_directories(clio_benchmark PRIVATE .)
target_link_libraries(clio_benchmark PUBLIC clio_util benchmark::benchmark_main spdlog::spdlog)
target_link_libraries(clio_benchmark PUBLIC clio_etl benchmark::benchmark_main)
set_target_properties(clio_benchmark PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})

View File

@@ -1,149 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include "util/config/ConfigDefinition.hpp"
#include "util/log/Logger.hpp"
#include "util/prometheus/Prometheus.hpp"
#include <benchmark/benchmark.h>
#include <fmt/format.h>
#include <spdlog/async.h>
#include <spdlog/async_logger.h>
#include <spdlog/spdlog.h>
#include <barrier>
#include <chrono>
#include <cstddef>
#include <filesystem>
#include <memory>
#include <string>
#include <thread>
#include <utility>
#include <vector>
using namespace util;
static constexpr auto kLOG_FORMAT = "%Y-%m-%d %H:%M:%S.%f %^%3!l:%n%$ - %v";
struct BenchmarkLoggingInitializer {
[[nodiscard]] static std::shared_ptr<spdlog::sinks::sink>
createFileSink(LogService::FileLoggingParams const& params)
{
return LogService::createFileSink(params, kLOG_FORMAT);
}
static Logger
getLogger(std::shared_ptr<spdlog::logger> logger)
{
return Logger(std::move(logger));
}
};
namespace {
std::string
uniqueLogDir()
{
auto const epochTime = std::chrono::high_resolution_clock::now().time_since_epoch();
auto const tmpDir = std::filesystem::temp_directory_path();
std::string const dirName =
fmt::format("logs_{}", std::chrono::duration_cast<std::chrono::microseconds>(epochTime).count());
return tmpDir / "clio_benchmark" / dirName;
}
} // anonymous namespace
static void
benchmarkConcurrentFileLogging(benchmark::State& state)
{
auto const numThreads = static_cast<size_t>(state.range(0));
auto const messagesPerThread = static_cast<size_t>(state.range(1));
PrometheusService::init(config::getClioConfig());
auto const logDir = uniqueLogDir();
for (auto _ : state) {
state.PauseTiming();
std::filesystem::create_directories(logDir);
static constexpr size_t kQUEUE_SIZE = 8192;
static constexpr size_t kTHREAD_COUNT = 1;
spdlog::init_thread_pool(kQUEUE_SIZE, kTHREAD_COUNT);
auto fileSink = BenchmarkLoggingInitializer::createFileSink({
.logDir = logDir,
.rotationSizeMB = 5,
.dirMaxFiles = 25,
});
std::vector<std::thread> threads;
threads.reserve(numThreads);
std::chrono::high_resolution_clock::time_point start;
std::barrier barrier(numThreads, [&state, &start]() {
state.ResumeTiming();
start = std::chrono::high_resolution_clock::now();
});
for (size_t threadNum = 0; threadNum < numThreads; ++threadNum) {
threads.emplace_back([threadNum, messagesPerThread, fileSink, &barrier]() {
std::string const channel = fmt::format("Thread_{}", threadNum);
auto logger = std::make_shared<spdlog::async_logger>(
channel, fileSink, spdlog::thread_pool(), spdlog::async_overflow_policy::block
);
spdlog::register_logger(logger);
Logger const threadLogger = BenchmarkLoggingInitializer::getLogger(std::move(logger));
barrier.arrive_and_wait();
for (size_t messageNum = 0; messageNum < messagesPerThread; ++messageNum) {
LOG(threadLogger.info()) << "Test log message #" << messageNum;
}
});
}
for (auto& thread : threads) {
thread.join();
}
spdlog::shutdown();
auto const end = std::chrono::high_resolution_clock::now();
state.SetIterationTime(std::chrono::duration_cast<std::chrono::duration<double>>(end - start).count());
std::filesystem::remove_all(logDir);
}
auto const totalMessages = numThreads * messagesPerThread;
state.counters["TotalMessagesRate"] = benchmark::Counter(totalMessages, benchmark::Counter::kIsRate);
state.counters["Threads"] = numThreads;
state.counters["MessagesPerThread"] = messagesPerThread;
}
// One line of log message is around 110 bytes
// So, 100K messages is around 10.5MB
BENCHMARK(benchmarkConcurrentFileLogging)
->ArgsProduct({
// Number of threads
{1, 2, 4, 8},
// Messages per thread
{10'000, 100'000, 500'000, 1'000'000, 10'000'000},
})
->UseManualTime()
->Unit(benchmark::kMillisecond);

View File

@@ -23,22 +23,19 @@
namespace util::build {
#ifndef CLIO_VERSION
#error "CLIO_VERSION must be defined"
#endif
static constexpr char kVERSION_STRING[] = CLIO_VERSION;
static constexpr char versionString[] = "@CLIO_VERSION@"; // NOLINT(readability-identifier-naming)
std::string const&
getClioVersionString()
{
static std::string const value = kVERSION_STRING; // NOLINT(readability-identifier-naming)
static std::string const value = versionString; // NOLINT(readability-identifier-naming)
return value;
}
std::string const&
getClioFullVersionString()
{
static std::string const value = "clio-" + getClioVersionString(); // NOLINT(readability-identifier-naming)
static std::string const value = "clio-" + getClioVersionString(); // NOLINT(readability-identifier-naming)
return value;
}

View File

@@ -1,8 +0,0 @@
include("${CMAKE_CURRENT_LIST_DIR}/ClioVersion.cmake")
set(CPACK_PACKAGING_INSTALL_PREFIX "/opt/clio")
set(CPACK_PACKAGE_VERSION "${CLIO_VERSION}")
set(CPACK_STRIP_FILES TRUE)
include(pkg/deb)
include(CPack)

View File

@@ -1,3 +1,7 @@
#[===================================================================[
write version to source
#]===================================================================]
find_package(Git REQUIRED)
set(GIT_COMMAND describe --tags --exact-match)
@@ -6,17 +10,15 @@ execute_process(
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE TAG
RESULT_VARIABLE RC
ERROR_VARIABLE ERR
OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE
OUTPUT_STRIP_TRAILING_WHITESPACE
)
if (RC EQUAL 0)
message(STATUS "Found tag '${TAG}' in git. Will use it as Clio version")
# if we are on a tag, use the tag name
set(CLIO_VERSION "${TAG}")
set(DOC_CLIO_VERSION "${TAG}")
else ()
message(STATUS "Error finding tag in git: ${ERR}")
message(STATUS "Will use 'YYYYMMDDHMS-<branch>-<git-rev>' as Clio version")
# if not, use YYYYMMDDHMS-<branch>-<git-rev>
set(GIT_COMMAND show -s --date=format:%Y%m%d%H%M%S --format=%cd)
execute_process(
@@ -45,3 +47,5 @@ if (CMAKE_BUILD_TYPE MATCHES Debug)
endif ()
message(STATUS "Build version: ${CLIO_VERSION}")
configure_file(${CMAKE_CURRENT_LIST_DIR}/Build.cpp.in ${CMAKE_CURRENT_LIST_DIR}/../src/util/build/Build.cpp)

View File

@@ -1,11 +0,0 @@
if (DEFINED CMAKE_LINKER_TYPE)
message(STATUS "Custom linker is already set: ${CMAKE_LINKER_TYPE}")
return()
endif ()
find_program(MOLD_PATH mold)
if (MOLD_PATH AND CMAKE_SYSTEM_NAME STREQUAL "Linux")
message(STATUS "Using Mold linker: ${MOLD_PATH}")
set(CMAKE_LINKER_TYPE MOLD)
endif ()

View File

@@ -1,5 +0,0 @@
find_package(spdlog REQUIRED)
if (NOT TARGET spdlog::spdlog)
message(FATAL_ERROR "spdlog::spdlog target not found")
endif ()

View File

@@ -0,0 +1,17 @@
[Unit]
Description=Clio XRPL API server
Documentation=https://github.com/XRPLF/clio.git
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=@CLIO_INSTALL_DIR@/bin/clio_server @CLIO_INSTALL_DIR@/etc/config.json
Restart=on-failure
User=clio
Group=clio
LimitNOFILE=65536
[Install]
WantedBy=multi-user.target

View File

@@ -1,13 +1,13 @@
set(CLIO_INSTALL_DIR "/opt/clio")
set(CMAKE_INSTALL_PREFIX "${CLIO_INSTALL_DIR}" CACHE PATH "Install prefix" FORCE)
set(CMAKE_INSTALL_PREFIX ${CLIO_INSTALL_DIR})
set(CPACK_PACKAGING_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
include(GNUInstallDirs)
install(TARGETS clio_server DESTINATION "${CMAKE_INSTALL_BINDIR}")
install(TARGETS clio_server DESTINATION bin)
file(READ docs/examples/config/example-config.json config)
string(REGEX REPLACE "./clio_log" "/var/log/clio/" config "${config}")
file(WRITE ${CMAKE_BINARY_DIR}/install-config.json "${config}")
install(FILES ${CMAKE_BINARY_DIR}/install-config.json DESTINATION etc RENAME config.json)
configure_file("${CMAKE_SOURCE_DIR}/cmake/install/clio.service.in" "${CMAKE_BINARY_DIR}/clio.service")
install(FILES "${CMAKE_BINARY_DIR}/clio.service" DESTINATION /lib/systemd/system)

View File

@@ -1,12 +0,0 @@
set(CPACK_GENERATOR "DEB")
set(CPACK_DEBIAN_PACKAGE_HOMEPAGE "https://github.com/XRPLF/clio")
set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Ripple Labs Inc. <support@ripple.com>")
set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT)
set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA ${CMAKE_SOURCE_DIR}/cmake/pkg/postinst)
# We must replace "-" with "~" otherwise dpkg will sort "X.Y.Z-b1" as greater than "X.Y.Z"
string(REPLACE "-" "~" git "${CPACK_PACKAGE_VERSION}")

View File

@@ -1,46 +0,0 @@
#!/bin/sh
set -e
USER_NAME=clio
GROUP_NAME="${USER_NAME}"
CLIO_EXECUTABLE="clio_server"
CLIO_PREFIX="/opt/clio"
CLIO_BIN="$CLIO_PREFIX/bin/${CLIO_EXECUTABLE}"
CLIO_CONFIG="$CLIO_PREFIX/etc/config.json"
case "$1" in
configure)
if ! id -u "$USER_NAME" >/dev/null 2>&1; then
# Users who should not have a home directory should have their home directory set to /nonexistent
# https://www.debian.org/doc/debian-policy/ch-opersys.html#non-existent-home-directories
useradd \
--system \
--home-dir /nonexistent \
--no-create-home \
--shell /usr/sbin/nologin \
--comment "system user for ${CLIO_EXECUTABLE}" \
--user-group \
${USER_NAME}
fi
install -d -o "$USER_NAME" -g "$GROUP_NAME" /var/log/clio
if [ -f "$CLIO_CONFIG" ]; then
chown "$USER_NAME:$GROUP_NAME" "$CLIO_CONFIG"
fi
chown -R "$USER_NAME:$GROUP_NAME" "$CLIO_PREFIX"
ln -sf "$CLIO_BIN" "/usr/bin/${CLIO_EXECUTABLE}"
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
exit 0

View File

@@ -1,60 +1,56 @@
{
"version": "0.5",
"requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
"xrpl/2.6.0#57b93b5a6c99dc8511fccb3bb5390352%1756820296.642",
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
"spdlog/1.15.3#3ca0e9e6b83af4d0151e26541d140c86%1754401846.61",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
"rapidjson/cci.20220822#1b9d8c2256876a154172dc5cfbe447c6%1754325007.656",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"openssl/1.1.1w#a8f0792d7c5121b954578a7149d23e03%1756223730.729",
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
"minizip/1.2.13#9e87d57804bd372d6d1e32b1871517a3%1754325004.374",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
"libuv/1.46.0#dc28c1f653fa197f00db5b577a6f6011%1754325003.592",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
"libarchive/3.8.1#5cf685686322e906cb42706ab7e099a8%1756234256.696",
"http_parser/2.9.4#98d91690d6fd021e9e624218a85d9d97%1754325001.385",
"gtest/1.14.0#f8f0757a574a8dd747d16af62d6eb1b7%1754325000.842",
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
"fmt/11.2.0#579bb2cdf4a7607621beea4eb4651e0f%1754324999.086",
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
"cassandra-cpp-driver/2.17.0#e50919efac8418c26be6671fd702540a%1754324997.363",
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368%1754325043.336",
"benchmark/1.9.4#ce4403f7a24d3e1f907cd9da4b678be4%1754578869.672",
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1752006674.465",
"xxhash/0.8.2#7856c968c985b2981b707ee8f2413b2b%1752006674.334",
"xrpl/2.5.0#7880d1696f11fceb1d498570f1a184c8%1752006708.218",
"sqlite3/3.47.0#7a0904fd061f5f8a2366c294f9387830%1752006674.338",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1752006674.465",
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1752006674.077",
"rapidjson/cci.20220822#1b9d8c2256876a154172dc5cfbe447c6%1752006673.227",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1752006673.172",
"openssl/1.1.1v#216374e4fb5b2e0f5ab1fb6f27b5b434%1752006673.069",
"nudb/2.0.8#63990d3e517038e04bf529eb8167f69f%1752006673.862",
"minizip/1.2.13#9e87d57804bd372d6d1e32b1871517a3%1752006672.983",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1752006672.825",
"libuv/1.46.0#78565d142ac7102776256328a26cdf60%1752006672.827",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1752006672.826",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1752006672.822",
"libarchive/3.7.6#e0453864b2a4d225f06b3304903cb2b7%1752006672.917",
"http_parser/2.9.4#98d91690d6fd021e9e624218a85d9d97%1752006672.658",
"gtest/1.14.0#f8f0757a574a8dd747d16af62d6eb1b7%1752006671.555",
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1752006671.777",
"fmt/11.2.0#579bb2cdf4a7607621beea4eb4651e0f%1752006671.557",
"date/3.0.3#cf28fe9c0aab99fe12da08aa42df65e1%1752006671.553",
"cassandra-cpp-driver/2.17.0#e50919efac8418c26be6671fd702540a%1752006671.654",
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1752006671.554",
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1752006671.549",
"boost/1.83.0#5bcb2a14a35875e328bf312e080d3562%1752006671.557",
"benchmark/1.8.3#1a2ce62c99e2b3feaa57b1f0c15a8c46%1752006671.408",
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1752006671.555"
],
"build_requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"cmake/3.31.8#dde3bde00bb843687e55aea5afa0e220%1756234232.89",
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28"
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1752006674.465",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1752006673.172",
"protobuf/3.21.9#64ce20e1d9ea24f3d6c504015d5f6fa8%1752006673.173",
"cmake/3.31.7#57c3e118bcf267552c0ea3f8bee1e7d5%1752006671.64",
"b2/5.3.2#7b5fabfe7088ae933fb3e78302343ea0%1752006671.407"
],
"python_requires": [],
"overrides": {
"boost/1.83.0": [
null,
"boost/1.83.0"
"boost/1.83.0#5bcb2a14a35875e328bf312e080d3562"
],
"protobuf/3.21.12": [
"protobuf/3.21.9": [
null,
"protobuf/3.21.12"
],
"boost/1.86.0": [
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368"
],
"lz4/1.9.4": [
"lz4/1.10.0"
],
"sqlite3/3.44.2": [
"sqlite3/3.49.1"
"sqlite3/3.47.0"
]
},
"config_requires": []

View File

@@ -9,7 +9,19 @@ class ClioConan(ConanFile):
url = 'https://github.com/xrplf/clio'
description = 'Clio RPC server'
settings = 'os', 'compiler', 'build_type', 'arch'
options = {}
options = {
'static': [True, False], # static linkage
'verbose': [True, False],
'tests': [True, False], # build unit tests; create `clio_tests` binary
'integration_tests': [True, False], # build integration tests; create `clio_integration_tests` binary
'benchmark': [True, False], # build benchmarks; create `clio_benchmarks` binary
'docs': [True, False], # doxygen API docs; create custom target 'docs'
'packaging': [True, False], # create distribution packages
'coverage': [True, False], # build for test coverage report; create custom target `clio_tests-ccov`
'lint': [True, False], # run clang-tidy checks during compilation
'snapshot': [True, False], # build export/import snapshot tool
'time_trace': [True, False] # build using -ftime-trace to create compiler trace reports
}
requires = [
'boost/1.83.0',
@@ -17,14 +29,25 @@ class ClioConan(ConanFile):
'fmt/11.2.0',
'protobuf/3.21.12',
'grpc/1.50.1',
'openssl/1.1.1w',
'xrpl/2.6.0',
'openssl/1.1.1v',
'xrpl/2.5.0',
'zlib/1.3.1',
'libbacktrace/cci.20210118',
'spdlog/1.15.3',
'libbacktrace/cci.20210118'
]
default_options = {
'static': False,
'verbose': False,
'tests': False,
'integration_tests': False,
'benchmark': False,
'packaging': False,
'coverage': False,
'lint': False,
'docs': False,
'snapshot': False,
'time_trace': False,
'xrpl/*:tests': False,
'xrpl/*:rocksdb': False,
'cassandra-cpp-driver/*:shared': False,
@@ -45,8 +68,10 @@ class ClioConan(ConanFile):
)
def requirements(self):
self.requires('gtest/1.14.0')
self.requires('benchmark/1.9.4')
if self.options.tests or self.options.integration_tests:
self.requires('gtest/1.14.0')
if self.options.benchmark:
self.requires('benchmark/1.8.3')
def configure(self):
if self.settings.compiler == 'apple-clang':
@@ -62,6 +87,8 @@ class ClioConan(ConanFile):
def generate(self):
tc = CMakeToolchain(self)
for option_name, option_value in self.options.items():
tc.variables[option_name] = option_value
tc.generate()
def build(self):

View File

@@ -20,60 +20,43 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
USER root
WORKDIR /root
# Install common tools and dependencies
ARG LLVM_TOOLS_VERSION=20
# Add repositories
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
curl \
dpkg-dev \
file \
git \
git-lfs \
gnupg \
graphviz \
jq \
# libgmp, libmpfr and libncurses are gdb dependencies
libgmp-dev \
libmpfr-dev \
libncurses-dev \
make \
ninja-build \
wget \
zip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install Python tools
ARG PYTHON_VERSION=3.13
RUN add-apt-repository ppa:deadsnakes/ppa \
&& apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
python${PYTHON_VERSION} \
python${PYTHON_VERSION}-venv \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& curl -sS https://bootstrap.pypa.io/get-pip.py | python${PYTHON_VERSION}
# Create a virtual environment for python tools
RUN python${PYTHON_VERSION} -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
RUN pip install -q --no-cache-dir \
cmake \
conan==2.20.1 \
gcovr \
pre-commit
# Install LLVM tools
ARG LLVM_TOOLS_VERSION=20
RUN echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
&& echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
&& wget --progress=dot:giga -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
# Install packages
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
clang-tidy-${LLVM_TOOLS_VERSION} \
clang-tools-${LLVM_TOOLS_VERSION} \
git \
git-lfs \
graphviz \
jq \
make \
ninja-build \
python3 \
python3-pip \
zip \
&& pip3 install -q --upgrade --no-cache-dir pip \
&& pip3 install -q --no-cache-dir \
# TODO: Remove this once we switch to newer Ubuntu base image
# lxml 6.0.0 is not compatible with our image
'lxml<6.0.0' \
\
cmake==3.31.6 \
conan==2.17.0 \
gcovr \
pre-commit \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
@@ -109,13 +92,12 @@ COPY --from=clio-tools \
/usr/local/bin/ClangBuildAnalyzer \
/usr/local/bin/git-cliff \
/usr/local/bin/gh \
/usr/local/bin/gdb \
/usr/local/bin/
WORKDIR /root
# Setup conan
RUN conan remote add --index 0 xrplf https://conan.ripplex.io
RUN conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev
WORKDIR /root/.conan2
COPY conan/global.conf ./global.conf

View File

@@ -8,14 +8,12 @@ The image is based on Ubuntu 20.04 and contains:
- ccache 4.11.3
- Clang 19
- ClangBuildAnalyzer 1.6.0
- Conan 2.20.1
- Conan 2.17.0
- Doxygen 1.12
- GCC 15.2.0
- GDB 16.3
- GCC 14.3.0
- gh 2.74
- git-cliff 2.9.1
- mold 2.40.1
- Python 3.13
- and some other useful tools
Conan is set up to build Clio without any additional steps.

View File

@@ -4,8 +4,8 @@ build_type=Release
compiler=gcc
compiler.cppstd=20
compiler.libcxx=libstdc++11
compiler.version=15
compiler.version=14
os=Linux
[conf]
tools.build:compiler_executables={"c": "/usr/bin/gcc-15", "cpp": "/usr/bin/g++-15"}
tools.build:compiler_executables={"c": "/usr/bin/gcc-14", "cpp": "/usr/bin/g++-14"}

View File

@@ -1,20 +1,18 @@
FROM ubuntu:22.04
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
libatomic1 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -g 10001 clio \
&& useradd -u 10000 -g 10001 -s /bin/bash clio
COPY ./clio_server /opt/clio/bin/clio_server
RUN ln -s /opt/clio/bin/clio_server /usr/local/bin/clio_server \
&& mkdir -p /opt/clio/etc/ \
&& mkdir -p /opt/clio/log/ \
&& chown clio:clio /opt/clio/log
&& groupadd -g 10001 clio \
&& useradd -u 10000 -g 10001 -s /bin/bash clio \
&& chown clio:clio /opt/clio/log \
&& apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
libatomic1 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
USER clio
ENTRYPOINT ["/opt/clio/bin/clio_server"]

View File

@@ -13,8 +13,8 @@ Clio repository provides an [example](https://github.com/XRPLF/clio/blob/develop
Config file recommendations:
- Set `log.enable_console` to `false` if you want to avoid logs being written to `stdout`.
- Set `log.directory` to `/opt/clio/log` to store logs in a volume.
- Set `log_to_console` to `false` if you want to avoid logs being written to `stdout`.
- Set `log_directory` to `/opt/clio/log` to store logs in a volume.
## Usage

View File

@@ -18,7 +18,7 @@ RUN apt-get update \
ARG CLANG_MAJOR_VERSION=invalid
# Bump this version to force rebuild of the image
ARG BUILD_VERSION=1
ARG BUILD_VERSION=0
RUN wget --progress=dot:giga https://apt.llvm.org/llvm.sh \
&& chmod +x llvm.sh \

View File

@@ -8,7 +8,7 @@ ARG UBUNTU_VERSION
ARG GCC_MAJOR_VERSION
ARG BUILD_VERSION=1
ARG BUILD_VERSION=0
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH

View File

@@ -1,4 +1,4 @@
Package: gcc-15-ubuntu-UBUNTUVERSION
Package: gcc-14-ubuntu-UBUNTUVERSION
Version: VERSION
Architecture: TARGETARCH
Maintainer: Alex Kremer <akremer@ripple.com>

View File

@@ -1,6 +1,6 @@
services:
clio_develop:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:latest
volumes:
- clio_develop_conan_data:/root/.conan2/p
- clio_develop_ccache:/root/.ccache

View File

@@ -8,17 +8,19 @@ ARG TARGETARCH
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ARG BUILD_VERSION=2
ARG BUILD_VERSION=1
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
bison \
flex \
ninja-build \
python3 \
python3-pip \
software-properties-common \
wget \
&& pip3 install -q --no-cache-dir \
cmake \
cmake==3.31.6 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
@@ -44,13 +46,6 @@ RUN wget --progress=dot:giga "https://github.com/ccache/ccache/releases/download
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
bison \
flex \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG DOXYGEN_VERSION=1.12.0
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
@@ -83,22 +78,4 @@ RUN wget --progress=dot:giga "https://github.com/cli/cli/releases/download/v${GH
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \
&& rm -rf /tmp/* /var/tmp/*
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
libgmp-dev \
libmpfr-dev \
libncurses-dev \
make \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG GDB_VERSION=16.3
RUN wget --progress=dot:giga "https://sourceware.org/pub/gdb/releases/gdb-${GDB_VERSION}.tar.gz" \
&& tar xf "gdb-${GDB_VERSION}.tar.gz" \
&& cd "gdb-${GDB_VERSION}" \
&& ./configure --prefix=/usr/local \
&& make -j "$(nproc)" \
&& make install-gdb \
&& rm -rf /tmp/* /var/tmp/*
WORKDIR /root

View File

@@ -6,22 +6,16 @@
## Minimum Requirements
- [Python 3.7](https://www.python.org/downloads/)
- [Conan 2.20.1](https://conan.io/downloads.html)
- [CMake 3.20](https://cmake.org/download/)
- [Conan 2.17.0](https://conan.io/downloads.html)
- [CMake 3.20, <4.0](https://cmake.org/download/)
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
We use our Docker image `ghcr.io/XRPLF/clio-ci` to build `Clio`, see [Building Clio with Docker](#building-clio-with-docker).
You can find information about exact compiler versions and tools in the [image's README](https://github.com/XRPLF/clio/blob/develop/docker/ci/README.md).
The following compiler version are guaranteed to work.
Any compiler with lower version may not be able to build Clio:
| Compiler | Version |
| ----------- | ------- |
| GCC | 15.2 |
| Clang | 19 |
| Apple Clang | 17 |
| GCC | 12.3 |
| Clang | 16 |
| Apple Clang | 15 |
### Conan Configuration
@@ -90,7 +84,7 @@ core.upload:parallel={{os.cpu_count()}}
Make sure artifactory is setup with Conan.
```sh
conan remote add --index 0 xrplf https://conan.ripplex.io
conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev
```
Now you should be able to download the prebuilt dependencies (including `xrpl` package) on supported platforms.
@@ -104,100 +98,79 @@ It is implicitly used when running `conan` commands, you don't need to specify i
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
> [!NOTE]
> Conan uses local cache by default when creating a lockfile.
>
> To ensure, that lockfile creation works the same way on all developer machines, you should clear the local cache before creating a new lockfile.
To create a new lockfile, run the following commands in the repository root:
To do that, run the following command in the repository root:
```bash
conan remove '*' --confirm
rm conan.lock
# This ensure that xrplf remote is the first to be consulted
conan remote add --force --index 0 xrplf https://conan.ripplex.io
conan lock create .
conan lock create . -o '&:tests=True' -o '&:benchmark=True'
```
> [!NOTE]
> If some dependencies are exclusive for some OS, you may need to run the last command for them adding `--profile:all <PROFILE>`.
## Building Clio
1. Navigate to Clio's root directory and run:
Navigate to Clio's root directory and run:
```sh
mkdir build && cd build
```
```sh
mkdir build && cd build
# You can also specify profile explicitly by adding `--profile:all <PROFILE_NAME>`
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True'
# You can also add -GNinja to use Ninja build system instead of Make
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
```
2. Install dependencies through conan.
> [!TIP]
> You can omit the `-o '&:tests=True'` if you don't want to build `clio_tests`.
```sh
conan install .. --output-folder . --build missing --settings build_type=Release
```
If successful, `conan install` will find the required packages and `cmake` will do the rest. You should see `clio_server` and `clio_tests` in the `build` directory (the current directory).
> You can add `--profile:all <PROFILE_NAME>` to choose a specific conan profile.
> [!TIP]
> To generate a Code Coverage report, include `-o '&:coverage=True'` in the `conan install` command above, along with `-o '&:tests=True'` to enable tests.
> After running the `cmake` commands, execute `make clio_tests-ccov`.
> The coverage report will be found at `clio_tests-llvm-cov/index.html`.
3. Configure and generate build files with CMake.
```sh
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
```
> You can add `-GNinja` to use the Ninja build system (instead of Make).
4. Now, you can build all targets or specific ones:
```sh
# builds all targets
cmake --build . --parallel 8
# builds only clio_server target
cmake --build . --parallel 8 --target clio_server
```
You should see `clio_server` and `clio_tests` in the current directory.
<!-- markdownlint-disable-line MD028 -->
> [!NOTE]
> If you've built Clio before and the build is now failing, it's likely due to updated dependencies. Try deleting the build folder and then rerunning the Conan and CMake commands mentioned above.
### CMake options
There are several CMake options you can use to customize the build:
| CMake Option | Default | CMake Target | Description |
| --------------------- | ------- | -------------------------------------------------------- | ------------------------------------- |
| `-Dcoverage` | OFF | `clio_tests-ccov` | Enables code coverage generation |
| `-Dtests` | OFF | `clio_tests` | Enables unit tests |
| `-Dintegration_tests` | OFF | `clio_integration_tests` | Enables integration tests |
| `-Dbenchmark` | OFF | `clio_benchmark` | Enables benchmark executable |
| `-Ddocs` | OFF | `docs` | Enables API documentation generation |
| `-Dlint` | OFF | See [#clang-tidy](#using-clang-tidy-for-static-analysis) | Enables `clang-tidy` static analysis |
| `-Dsan` | N/A | N/A | Enables Sanitizer (asan, tsan, ubsan) |
| `-Dpackage` | OFF | N/A | Creates a debian package |
### Generating API docs for Clio
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.12.0 on your system.
To generate the API docs, please use CMake option `-Ddocs=ON` as described above and build the `docs` target.
To generate the API docs:
To view the generated files, go to `build/docs/html`.
Open the `index.html` file in your browser to see the documentation pages.
1. First, include `-o '&:docs=True'` in the conan install command. For example:
![API index page](./img/doxygen-docs-output.png "API index page")
```sh
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True' -o '&:docs=True'
```
2. Once that has completed successfully, run the `cmake` command and add the `--target docs` option:
```sh
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 --target docs
```
3. Go to `build/docs/html` to view the generated files.
Open the `index.html` file in your browser to see the documentation pages.
![API index page](./img/doxygen-docs-output.png "API index page")
## Building Clio with Docker
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
```sh
docker run -it ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
docker run -it ghcr.io/xrplf/clio-ci:latest
git clone https://github.com/XRPLF/clio
cd clio
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True'
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
```
Follow the same steps in the [Building Clio](#building-clio) section. You can use `--profile:all gcc` or `--profile:all clang` with the `conan install` command to choose the desired compiler.
## Developing against `rippled` in standalone mode
If you wish to develop against a `rippled` instance running in standalone mode there are a few quirks of both Clio and `rippled` that you need to keep in mind. You must:
@@ -250,10 +223,10 @@ Sometimes, during development, you need to build against a custom version of `li
## Using `clang-tidy` for static analysis
Clang-tidy can be run by CMake when building the project.
To achieve this, you just need to provide the option `-Dlint=ON` when generating CMake files:
To achieve this, you just need to provide the option `-o '&:lint=True'` for the `conan install` command:
```sh
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dlint=ON ..
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True' -o '&:lint=True' --profile:all clang
```
By default CMake will try to find `clang-tidy` automatically in your system.

View File

@@ -3,9 +3,7 @@
This document provides a list of all available Clio configuration properties in detail.
> [!NOTE]
> Dot notation in configuration key names represents nested fields.
> For example, **database.scylladb** refers to the _scylladb_ field inside the _database_ object.
> If a key name includes "[]", it indicates that the nested field is an array (e.g., etl_sources.[]).
> Dot notation in configuration key names represents nested fields. For example, **database.scylladb** refers to the _scylladb_ field inside the _database_ object. If a key name includes "[]", it indicates that the nested field is an array (e.g., etl_sources.[]).
## Configuration Details
@@ -157,7 +155,7 @@ This document provides a list of all available Clio configuration properties in
- **Required**: True
- **Type**: boolean
- **Default value**: `False`
- **Default value**: `True`
- **Constraints**: None
- **Description**: If set to `True`, allows Clio to start without any ETL source.
@@ -329,27 +327,11 @@ This document provides a list of all available Clio configuration properties in
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
- **Description**: Maximum queue size for sending subscription data to clients. This queue buffers data when a client is slow to receive it, ensuring delivery once the client is ready.
### server.proxy.ips.[]
- **Required**: True
- **Type**: string
- **Default value**: None
- **Constraints**: None
- **Description**: List of proxy ip addresses. When Clio receives a request from proxy it will use `Forwarded` value (if any) as client ip. When this option is used together with `server.proxy.tokens` Clio will identify proxy by ip or by token.
### server.proxy.tokens.[]
- **Required**: True
- **Type**: string
- **Default value**: None
- **Constraints**: None
- **Description**: List of tokens in identifying request as a request from proxy. Token should be provided in `X-Proxy-Token` header, e.g. `X-Proxy-Token: <very_secret_token>'. When Clio receives a request from proxy it will use 'Forwarded` value (if any) to get client ip. When this option is used together with 'server.proxy.ips' Clio will identify proxy by ip or by token.
### prometheus.enabled
- **Required**: True
- **Type**: boolean
- **Default value**: `True`
- **Default value**: `False`
- **Constraints**: None
- **Description**: Enables or disables Prometheus metrics.
@@ -357,7 +339,7 @@ This document provides a list of all available Clio configuration properties in
- **Required**: True
- **Type**: boolean
- **Default value**: `True`
- **Default value**: `False`
- **Constraints**: None
- **Description**: Enables or disables compression of Prometheus responses.
@@ -433,7 +415,7 @@ This document provides a list of all available Clio configuration properties in
- **Constraints**: The value must be one of the following: `sync`, `async`, `none`.
- **Description**: The strategy used for Cache loading.
### log.channels.[].channel
### log_channels.[].channel
- **Required**: False
- **Type**: string
@@ -441,63 +423,39 @@ This document provides a list of all available Clio configuration properties in
- **Constraints**: The value must be one of the following: `General`, `WebServer`, `Backend`, `RPC`, `ETL`, `Subscriptions`, `Performance`, `Migration`.
- **Description**: The name of the log channel.
### log.channels.[].level
### log_channels.[].log_level
- **Required**: False
- **Type**: string
- **Default value**: None
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`.
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`, `count`.
- **Description**: The log level for the specific log channel.
### log.level
### log_level
- **Required**: True
- **Type**: string
- **Default value**: `info`
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`.
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`, `count`.
- **Description**: The general logging level of Clio. This level is applied to all log channels that do not have an explicitly defined logging level.
### log.format
### log_format
- **Required**: True
- **Type**: string
- **Default value**: `%Y-%m-%d %H:%M:%S.%f %^%3!l:%n%$ - %v`
- **Default value**: `%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%`
- **Constraints**: None
- **Description**: The format string for log messages using spdlog format patterns.
- **Description**: The format string for log messages. The format is described here: <https://www.boost.org/doc/libs/1_83_0/libs/log/doc/html/log/tutorial/formatters.html>.
Each of the variables expands like so:
- `%Y-%m-%d %H:%M:%S.%f`: The full date and time of the log entry with microsecond precision
- `%^`: Start color range
- `%3!l`: The severity (aka log level) the entry was sent at stripped to 3 characters
- `%n`: The logger name (channel) that this log entry was sent to
- `%$`: End color range
- `%v`: The actual log message
Some additional variables that might be useful:
- `%@`: A partial path to the C++ file and the line number in the said file (`src/file/path:linenumber`)
- `%t`: The ID of the thread the log entry is written from
Documentation can be found at: <https://github.com/gabime/spdlog/wiki/Custom-formatting>.
### log.is_async
### log_to_console
- **Required**: True
- **Type**: boolean
- **Default value**: `True`
- **Constraints**: None
- **Description**: Whether spdlog is asynchronous or not.
### log.enable_console
- **Required**: True
- **Type**: boolean
- **Default value**: `False`
- **Constraints**: None
- **Description**: Enables or disables logging to the console.
### log.directory
### log_directory
- **Required**: False
- **Type**: string
@@ -505,7 +463,7 @@ Documentation can be found at: <https://github.com/gabime/spdlog/wiki/Custom-for
- **Constraints**: None
- **Description**: The directory path for the log files.
### log.rotation_size
### log_rotation_size
- **Required**: True
- **Type**: int
@@ -513,15 +471,23 @@ Documentation can be found at: <https://github.com/gabime/spdlog/wiki/Custom-for
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
- **Description**: The log rotation size in megabytes. When the log file reaches this particular size, a new log file starts.
### log.directory_max_files
### log_directory_max_size
- **Required**: True
- **Type**: int
- **Default value**: `25`
- **Default value**: `51200`
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
- **Description**: The maximum number of log files in the directory.
- **Description**: The maximum size of the log directory in megabytes.
### log.tag_style
### log_rotation_hour_interval
- **Required**: True
- **Type**: int
- **Default value**: `12`
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
- **Description**: Represents the interval (in hours) for log rotation. If the current log file reaches this value in logging, a new log file starts.
### log_tag_style
- **Required**: True
- **Type**: string
@@ -541,7 +507,7 @@ Documentation can be found at: <https://github.com/gabime/spdlog/wiki/Custom-for
- **Required**: True
- **Type**: boolean
- **Default value**: `False`
- **Default value**: `True`
- **Constraints**: None
- **Description**: Indicates if the server is allowed to write data to the database.

View File

@@ -88,15 +88,13 @@ Exactly equal password gains admin rights for the request or a websocket connect
Clio can cache requests to ETL sources to reduce the load on the ETL source.
Only following commands are cached: `server_info`, `server_state`, `server_definitions`, `fee`, `ledger_closed`.
By default the forwarding cache is off.
To enable the caching for a source, `forwarding.cache_timeout` value should be added to the configuration file, e.g.:
To enable the caching for a source, `forwarding_cache_timeout` value should be added to the configuration file, e.g.:
```json
"forwarding": {
"cache_timeout": 0.250,
}
"forwarding_cache_timeout": 0.250,
```
`forwarding.cache_timeout` defines for how long (in seconds) a cache entry will be valid after being placed into the cache.
`forwarding_cache_timeout` defines for how long (in seconds) a cache entry will be valid after being placed into the cache.
Zero value turns off the cache feature.
## Graceful shutdown (not fully implemented yet)

View File

@@ -31,7 +31,7 @@
"etl_sources": [
{
"ip": "127.0.0.1",
"ws_port": "6006",
"ws_port": "6005",
"grpc_port": "50051"
}
],
@@ -76,60 +76,38 @@
"parallel_requests_limit": 10, // Optional parameter, used only if "processing_strategy" is "parallel". It limits the number of requests for one client connection processed in parallel. Infinite if not specified.
// Max number of responses to queue up before sent successfully. If a client's waiting queue is too long, the server will close the connection.
"ws_max_sending_queue_size": 1500,
"__ng_web_server": false, // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
"proxy": {
"ips": [],
"tokens": []
}
"__ng_web_server": false // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
},
// Time in seconds for graceful shutdown. Defaults to 10 seconds. Not fully implemented yet.
"graceful_period": 10.0,
"log": {
// Overrides log level on a per logging channel.
// Defaults to global "log.level" for each unspecified channel.
"channels": [
{
"channel": "Backend",
"level": "fatal"
},
{
"channel": "WebServer",
"level": "info"
},
{
"channel": "Subscriptions",
"level": "info"
},
{
"channel": "RPC",
"level": "error"
},
{
"channel": "ETL",
"level": "debug"
},
{
"channel": "Performance",
"level": "trace"
}
],
// The general logging level of Clio. This level is applied to all log channels that do not have an explicitly defined logging level.
"level": "info",
// Log format using spdlog format patterns (this is the default format)
"format": "%Y-%m-%d %H:%M:%S.%f %^%3!l:%n%$ - %v",
// Whether spdlog is asynchronous or not.
"is_async": true,
// Enables or disables logging to the console.
"enable_console": true,
// Clio logs to file in the specified directory only if "log.directory" is set
// "directory": "./clio_log",
// The log rotation size in megabytes. When the log file reaches this particular size, a new log file starts.
"rotation_size": 2048,
// The maximum number of log files in the directory.
"directory_max_files": 25,
// Log tags style to use
"tag_style": "uint"
},
// Overrides log level on a per logging channel.
// Defaults to global "log_level" for each unspecified channel.
"log_channels": [
{
"channel": "Backend",
"log_level": "fatal"
},
{
"channel": "WebServer",
"log_level": "info"
},
{
"channel": "Subscriptions",
"log_level": "info"
},
{
"channel": "RPC",
"log_level": "error"
},
{
"channel": "ETL",
"log_level": "debug"
},
{
"channel": "Performance",
"log_level": "trace"
}
],
"cache": {
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
@@ -143,6 +121,16 @@
"enabled": true,
"compress_reply": true
},
"log_level": "info",
// Log format (this is the default format)
"log_format": "%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%",
"log_to_console": true,
// Clio logs to file in the specified directory only if "log_directory" is set
// "log_directory": "./clio_log",
"log_rotation_size": 2048,
"log_directory_max_size": 51200,
"log_rotation_hour_interval": 12,
"log_tag_style": "uint",
"extractor_threads": 8,
"read_only": false,
// "start_sequence": [integer] the ledger index to start from,

76
docs/logging.md Normal file
View File

@@ -0,0 +1,76 @@
# Logging
Clio provides several logging options, which all are configurable via the config file. These are detailed in the following sections.
## `log_level`
The minimum level of severity at which the log message will be outputted by default. Severity options are `trace`, `debug`, `info`, `warning`, `error`, `fatal`. Defaults to `info`.
## `log_format`
The format of log lines produced by Clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
Each of the variables expands like so:
- `TimeStamp`: The full date and time of the log entry
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
- `ThreadID`: The ID of the thread the log entry is written from
- `Channel`: The channel that this log entry was sent to
- `Severity`: The severity (aka log level) the entry was sent at
- `Message`: The actual log message
## `log_channels`
An array of JSON objects, each overriding properties for a logging `channel`.
> [!IMPORTANT]
> At the time of writing, only `log_level` can be overridden using this mechanism.
Each object is of this format:
```json
{
"channel": "Backend",
"log_level": "fatal"
}
```
If no override is present for a given channel, that channel will log at the severity specified by the global `log_level`.
The log channels that can be overridden are: `Backend`, `WebServer`, `Subscriptions`, `RPC`, `ETL` and `Performance`.
> [!NOTE]
> See [example-config.json](../docs/examples/config/example-config.json) for more details.
## `log_to_console`
Enable or disable log output to console. Options are `true`/`false`. This option defaults to `true`.
## `log_directory`
Path to the directory where log files are stored. If such directory doesn't exist, Clio will create it.
If the option is not specified, the logs are not written to a file.
## `log_rotation_size`
The max size of the log file in **megabytes** before it will rotate into a smaller file. Defaults to 2GB.
## `log_directory_max_size`
The max size of the log directory in **megabytes** before old log files will be deleted to free up space. Defaults to 50GB.
## `log_rotation_hour_interval`
The time interval in **hours** after the last log rotation to automatically rotate the current log file. Defaults to 12 hours.
> [!NOTE]
> Log rotation based on time occurs in conjunction with size-based log rotation. For example, if a size-based log rotation occurs, the timer for the time-based rotation will reset.
## `log_tag_style`
Tag implementation to use. Must be one of:
- `uint`: Lock free and threadsafe but outputs just a simple unsigned integer
- `uuid`: Threadsafe and outputs a UUID tag
- `none`: Doesn't use tagging at all

View File

@@ -5,14 +5,11 @@ import re
from pathlib import Path
PATTERN = r'R"JSON\((.*?)\)JSON"'
def use_uppercase(cpp_content: str) -> str:
return cpp_content.replace('R"json(', 'R"JSON(').replace(')json"', ')JSON"')
def fix_json_style(cpp_content: str) -> str:
cpp_content = cpp_content.replace('R"json(', 'R"JSON(').replace(')json"', ')JSON"')
pattern = r'R"JSON\((.*?)\)JSON"'
def replace_json(match):
raw_json = match.group(1)
@@ -32,51 +29,12 @@ def fix_json_style(cpp_content: str) -> str:
raw_json = raw_json.replace(f'":{digit}', f'": {digit}')
return f'R"JSON({raw_json})JSON"'
return re.sub(PATTERN, replace_json, cpp_content, flags=re.DOTALL)
def fix_colon_spacing(cpp_content: str) -> str:
def replace_json(match):
raw_json = match.group(1)
raw_json = re.sub(r'":\n\s*(\[|\{)', r'": \1', raw_json)
return f'R"JSON({raw_json})JSON"'
return re.sub(PATTERN, replace_json, cpp_content, flags=re.DOTALL)
def fix_indentation(cpp_content: str) -> str:
lines = cpp_content.splitlines()
def find_indentation(line: str) -> int:
return len(line) - len(line.lstrip())
for (line_num, (line, next_line)) in enumerate(zip(lines[:-1], lines[1:])):
if "JSON(" in line and ")JSON" not in line:
indent = find_indentation(line)
next_indent = find_indentation(next_line)
by_how_much = next_indent - (indent + 4)
if by_how_much != 0:
print(
f"Indentation error at line: {line_num + 2}: expected {indent + 4} spaces, found {next_indent} spaces"
)
for i in range(line_num + 1, len(lines)):
if ")JSON" in lines[i]:
lines[i] = " " * indent + lines[i].lstrip()
break
lines[i] = lines[i][by_how_much:] if by_how_much > 0 else " " * (-by_how_much) + lines[i]
return "\n".join(lines) + "\n"
return re.sub(pattern, replace_json, cpp_content, flags=re.DOTALL)
def process_file(file_path: Path, dry_run: bool) -> bool:
content = file_path.read_text(encoding="utf-8")
new_content = content
new_content = use_uppercase(new_content)
new_content = fix_json_style(new_content)
new_content = fix_colon_spacing(new_content)
new_content = fix_indentation(new_content)
new_content = fix_json_style(content)
if new_content != content:
print(f"Processing file: {file_path}")

View File

@@ -23,7 +23,6 @@
#include "util/build/Build.hpp"
#include "util/config/ConfigDescription.hpp"
#include <boost/program_options/errors.hpp>
#include <boost/program_options/options_description.hpp>
#include <boost/program_options/parsers.hpp>
#include <boost/program_options/positional_options.hpp>
@@ -57,22 +56,12 @@ CliArgs::parse(int argc, char const* argv[])
po::positional_options_description positional;
positional.add("conf", 1);
auto const printHelp = [&description]() {
std::cout << "Clio server " << util::build::getClioFullVersionString() << "\n\n" << description;
};
po::variables_map parsed;
try {
po::store(po::command_line_parser(argc, argv).options(description).positional(positional).run(), parsed);
po::notify(parsed);
} catch (po::error const& e) {
std::cerr << "Error: " << e.what() << std::endl << std::endl;
printHelp();
return Action{Action::Exit{EXIT_FAILURE}};
}
po::store(po::command_line_parser(argc, argv).options(description).positional(positional).run(), parsed);
po::notify(parsed);
if (parsed.contains("help")) {
printHelp();
std::cout << "Clio server " << util::build::getClioFullVersionString() << "\n\n" << description;
return Action{Action::Exit{EXIT_SUCCESS}};
}

View File

@@ -178,9 +178,7 @@ ClioApplication::run(bool const useNgWebServer)
}
auto const adminVerifier = std::move(expectedAdminVerifier).value();
auto httpServer = web::ng::makeServer(
config_, OnConnectCheck{dosGuard}, IpChangeHook{dosGuard}, DisconnectHook{dosGuard}, ioc
);
auto httpServer = web::ng::makeServer(config_, OnConnectCheck{dosGuard}, DisconnectHook{dosGuard}, ioc);
if (not httpServer.has_value()) {
LOG(util::LogService::error()) << "Error creating web server: " << httpServer.error();

View File

@@ -19,8 +19,6 @@
#include "app/Stopper.hpp"
#include "util/Spawn.hpp"
#include <boost/asio/spawn.hpp>
#include <functional>
@@ -38,7 +36,7 @@ Stopper::~Stopper()
void
Stopper::setOnStop(std::function<void(boost::asio::yield_context)> cb)
{
util::spawn(ctx_, std::move(cb));
boost::asio::spawn(ctx_, std::move(cb));
}
void

View File

@@ -108,8 +108,6 @@ public:
ioc.stop();
LOG(util::LogService::info()) << "io_context stopped";
util::LogService::shutdown();
};
}
};

View File

@@ -44,7 +44,7 @@ parseConfig(std::string_view configPath)
std::cerr << "Error parsing json from config: " << configPath << "\n" << json.error().error << std::endl;
return false;
}
auto const errors = getClioConfig().parse(json.value());
auto const errors = gClioConfig.parse(json.value());
if (errors.has_value()) {
for (auto const& err : errors.value()) {
std::cerr << "Issues found in provided config '" << configPath << "':\n";

View File

@@ -33,7 +33,6 @@
#include <memory>
#include <optional>
#include <string>
#include <utility>
namespace app {
@@ -55,17 +54,6 @@ OnConnectCheck::operator()(web::ng::Connection const& connection)
return {};
}
IpChangeHook::IpChangeHook(web::dosguard::DOSGuardInterface& dosguard) : dosguard_(dosguard)
{
}
void
IpChangeHook::operator()(std::string const& oldIp, std::string const& newIp)
{
dosguard_.get().decrement(oldIp);
dosguard_.get().increment(newIp);
}
DisconnectHook::DisconnectHook(web::dosguard::DOSGuardInterface& dosguard) : dosguard_{dosguard}
{
}

View File

@@ -36,7 +36,6 @@
#include <exception>
#include <functional>
#include <memory>
#include <string>
#include <utility>
namespace app {
@@ -65,31 +64,6 @@ public:
operator()(web::ng::Connection const& connection);
};
/**
* @brief A function object that is called when the IP of a connection changes (usually if proxy detected).
* This is used to update the DOS guard.
*/
class IpChangeHook {
std::reference_wrapper<web::dosguard::DOSGuardInterface> dosguard_;
public:
/**
* @brief Construct a new IpChangeHook object.
*
* @param dosguard The DOS guard to use.
*/
IpChangeHook(web::dosguard::DOSGuardInterface& dosguard);
/**
* @brief The call of the function object.
*
* @param oldIp The old IP of the connection.
* @param newIp The new IP of the connection.
*/
void
operator()(std::string const& oldIp, std::string const& newIp);
};
/**
* @brief A function object to be called when a connection is disconnected.
*/

View File

@@ -21,16 +21,10 @@
#include "cluster/ClioNode.hpp"
#include "data/BackendInterface.hpp"
#include "util/Assert.hpp"
#include "util/Spawn.hpp"
#include "util/log/Logger.hpp"
#include <boost/asio/bind_cancellation_slot.hpp>
#include <boost/asio/cancellation_type.hpp>
#include <boost/asio/error.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/asio/steady_timer.hpp>
#include <boost/asio/use_future.hpp>
#include <boost/json/parse.hpp>
#include <boost/json/serialize.hpp>
#include <boost/json/value.hpp>
@@ -41,16 +35,11 @@
#include <chrono>
#include <ctime>
#include <latch>
#include <memory>
#include <string>
#include <utility>
#include <vector>
namespace {
constexpr auto kTOTAL_WORKERS = 2uz; // 1 reading and 1 writing worker (coroutines)
} // namespace
namespace cluster {
ClusterCommunicationService::ClusterCommunicationService(
@@ -61,7 +50,6 @@ ClusterCommunicationService::ClusterCommunicationService(
: backend_(std::move(backend))
, readInterval_(readInterval)
, writeInterval_(writeInterval)
, finishedCountdown_(kTOTAL_WORKERS)
, selfData_{ClioNode{
.uuid = std::make_shared<boost::uuids::uuid>(boost::uuids::random_generator{}()),
.updateTime = std::chrono::system_clock::time_point{}
@@ -74,42 +62,22 @@ ClusterCommunicationService::ClusterCommunicationService(
void
ClusterCommunicationService::run()
{
ASSERT(not running_ and not stopped_, "Can only be ran once");
running_ = true;
util::spawn(strand_, [this](boost::asio::yield_context yield) {
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
boost::asio::steady_timer timer(yield.get_executor());
boost::system::error_code ec;
while (running_) {
while (true) {
timer.expires_after(readInterval_);
auto token = cancelSignal_.slot();
timer.async_wait(boost::asio::bind_cancellation_slot(token, yield[ec]));
if (ec == boost::asio::error::operation_aborted or not running_)
break;
timer.async_wait(yield);
doRead(yield);
}
finishedCountdown_.count_down(1);
});
util::spawn(strand_, [this](boost::asio::yield_context yield) {
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
boost::asio::steady_timer timer(yield.get_executor());
boost::system::error_code ec;
while (running_) {
while (true) {
doWrite();
timer.expires_after(writeInterval_);
auto token = cancelSignal_.slot();
timer.async_wait(boost::asio::bind_cancellation_slot(token, yield[ec]));
if (ec == boost::asio::error::operation_aborted or not running_)
break;
timer.async_wait(yield);
}
finishedCountdown_.count_down(1);
});
}
@@ -124,19 +92,9 @@ ClusterCommunicationService::stop()
if (stopped_)
return;
stopped_ = true;
// for ASAN to see through concurrency correctly we need to exit all coroutines before joining the ctx
running_ = false;
// cancelSignal_ is not thread safe so we execute emit on the same strand
boost::asio::spawn(
strand_, [this](auto&&) { cancelSignal_.emit(boost::asio::cancellation_type::all); }, boost::asio::use_future
)
.wait();
finishedCountdown_.wait();
ctx_.stop();
ctx_.join();
stopped_ = true;
}
std::shared_ptr<boost::uuids::uuid>
@@ -150,7 +108,7 @@ ClioNode
ClusterCommunicationService::selfData() const
{
ClioNode result{};
util::spawn(strand_, [this, &result](boost::asio::yield_context) { result = selfData_; });
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) { result = selfData_; });
return result;
}
@@ -161,7 +119,7 @@ ClusterCommunicationService::clusterData() const
return std::unexpected{"Service is not healthy"};
}
std::vector<ClioNode> result;
util::spawn(strand_, [this, &result](boost::asio::yield_context) {
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) {
result = otherNodesData_;
result.push_back(selfData_);
});

View File

@@ -27,15 +27,12 @@
#include "util/prometheus/Gauge.hpp"
#include "util/prometheus/Prometheus.hpp"
#include <boost/asio/cancellation_signal.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/asio/strand.hpp>
#include <boost/asio/thread_pool.hpp>
#include <boost/uuid/uuid.hpp>
#include <atomic>
#include <chrono>
#include <latch>
#include <memory>
#include <string>
#include <vector>
@@ -68,14 +65,11 @@ class ClusterCommunicationService : public ClusterCommunicationServiceInterface
std::chrono::steady_clock::duration readInterval_;
std::chrono::steady_clock::duration writeInterval_;
boost::asio::cancellation_signal cancelSignal_;
std::latch finishedCountdown_;
std::atomic_bool running_ = false;
bool stopped_ = false;
ClioNode selfData_;
std::vector<ClioNode> otherNodesData_;
bool stopped_ = false;
public:
static constexpr std::chrono::milliseconds kDEFAULT_READ_INTERVAL{2100};
static constexpr std::chrono::milliseconds kDEFAULT_WRITE_INTERVAL{1200};

View File

@@ -68,6 +68,7 @@ struct Amendments {
/** @cond */
// NOLINTBEGIN(readability-identifier-naming)
REGISTER(OwnerPaysFee);
REGISTER(Flow);
REGISTER(FlowCross);
REGISTER(fix1513);
@@ -144,9 +145,6 @@ struct Amendments {
REGISTER(TokenEscrow);
REGISTER(fixAMMv1_3);
REGISTER(fixEnforceNFTokenTrustlineV2);
REGISTER(fixAMMClawbackRounding);
REGISTER(fixMPTDeliveredAmount);
REGISTER(fixPriceOracleOrder);
// Obsolete but supported by libxrpl
REGISTER(CryptoConditionsSuite);
@@ -155,7 +153,6 @@ struct Amendments {
REGISTER(fixNFTokenNegOffer);
// Retired amendments
REGISTER(OwnerPaysFee); // Removed in xrpl 2.6.0 (https://github.com/XRPLF/rippled/pull/5435)
REGISTER(MultiSign);
REGISTER(TrustSetAuth);
REGISTER(FeeEscalation);

View File

@@ -43,6 +43,11 @@
#include <utility>
#include <vector>
// local to compilation unit loggers
namespace {
util::Logger gLog{"Backend"};
} // namespace
/**
* @brief This namespace implements the data access layer and related components.
*
@@ -53,10 +58,10 @@ namespace data {
bool
BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
{
LOG(log_.debug()) << "Want finish writes for " << ledgerSequence;
LOG(gLog.debug()) << "Want finish writes for " << ledgerSequence;
auto commitRes = doFinishWrites();
if (commitRes) {
LOG(log_.debug()) << "Successfully committed. Updating range now to " << ledgerSequence;
LOG(gLog.debug()) << "Successfully committed. Updating range now to " << ledgerSequence;
updateRange(ledgerSequence);
}
return commitRes;
@@ -84,15 +89,15 @@ BackendInterface::fetchLedgerObject(
{
auto obj = cache_.get().get(key, sequence);
if (obj) {
LOG(log_.trace()) << "Cache hit - " << ripple::strHex(key);
LOG(gLog.trace()) << "Cache hit - " << ripple::strHex(key);
return obj;
}
auto dbObj = doFetchLedgerObject(key, sequence, yield);
if (!dbObj) {
LOG(log_.trace()) << "Missed cache and missed in db";
LOG(gLog.trace()) << "Missed cache and missed in db";
} else {
LOG(log_.trace()) << "Missed cache but found in db";
LOG(gLog.trace()) << "Missed cache but found in db";
}
return dbObj;
}
@@ -106,7 +111,7 @@ BackendInterface::fetchLedgerObjectSeq(
{
auto seq = doFetchLedgerObjectSeq(key, sequence, yield);
if (!seq)
LOG(log_.trace()) << "Missed in db";
LOG(gLog.trace()) << "Missed in db";
return seq;
}
@@ -128,7 +133,7 @@ BackendInterface::fetchLedgerObjects(
misses.push_back(keys[i]);
}
}
LOG(log_.trace()) << "Cache hits = " << keys.size() - misses.size() << " - cache misses = " << misses.size();
LOG(gLog.trace()) << "Cache hits = " << keys.size() - misses.size() << " - cache misses = " << misses.size();
if (!misses.empty()) {
auto objs = doFetchLedgerObjects(misses, sequence, yield);
@@ -153,9 +158,9 @@ BackendInterface::fetchSuccessorKey(
{
auto succ = cache_.get().getSuccessor(key, ledgerSequence);
if (succ) {
LOG(log_.trace()) << "Cache hit - " << ripple::strHex(key);
LOG(gLog.trace()) << "Cache hit - " << ripple::strHex(key);
} else {
LOG(log_.trace()) << "Cache miss - " << ripple::strHex(key);
LOG(gLog.trace()) << "Cache miss - " << ripple::strHex(key);
}
return succ ? succ->key : doFetchSuccessorKey(key, ledgerSequence, yield);
}
@@ -205,7 +210,7 @@ BackendInterface::fetchBookOffers(
numSucc++;
succMillis += getMillis(mid2 - mid1);
if (!offerDir || offerDir->key >= bookEnd) {
LOG(log_.trace()) << "offerDir.has_value() " << offerDir.has_value() << " breaking";
LOG(gLog.trace()) << "offerDir.has_value() " << offerDir.has_value() << " breaking";
break;
}
uTipIndex = offerDir->key;
@@ -218,7 +223,7 @@ BackendInterface::fetchBookOffers(
keys.insert(keys.end(), indexes.begin(), indexes.end());
auto next = sle.getFieldU64(ripple::sfIndexNext);
if (next == 0u) {
LOG(log_.trace()) << "Next is empty. breaking";
LOG(gLog.trace()) << "Next is empty. breaking";
break;
}
auto nextKey = ripple::keylet::page(uTipIndex, next);
@@ -233,13 +238,13 @@ BackendInterface::fetchBookOffers(
auto mid = std::chrono::system_clock::now();
auto objs = fetchLedgerObjects(keys, ledgerSequence, yield);
for (size_t i = 0; i < keys.size() && i < limit; ++i) {
LOG(log_.trace()) << "Key = " << ripple::strHex(keys[i]) << " blob = " << ripple::strHex(objs[i])
LOG(gLog.trace()) << "Key = " << ripple::strHex(keys[i]) << " blob = " << ripple::strHex(objs[i])
<< " ledgerSequence = " << ledgerSequence;
ASSERT(!objs[i].empty(), "Ledger object can't be empty");
page.offers.push_back({keys[i], objs[i]});
}
auto end = std::chrono::system_clock::now();
LOG(log_.debug()) << "Fetching " << std::to_string(keys.size()) << " offers took "
LOG(gLog.debug()) << "Fetching " << std::to_string(keys.size()) << " offers took "
<< std::to_string(getMillis(mid - begin)) << " milliseconds. Fetching next dir took "
<< std::to_string(succMillis) << " milliseconds. Fetched next dir " << std::to_string(numSucc)
<< " times"
@@ -270,17 +275,14 @@ BackendInterface::updateRange(uint32_t newMax)
{
std::scoped_lock const lck(rngMtx_);
if (range_.has_value() && newMax < range_->maxSequence) {
ASSERT(
false,
"Range shouldn't exist yet or newMax should be at least range->maxSequence. newMax = {}, "
"range->maxSequence = {}",
newMax,
range_->maxSequence
);
}
ASSERT(
!range_ || newMax >= range_->maxSequence,
"Range shouldn't exist yet or newMax should be greater. newMax = {}, range->maxSequence = {}",
newMax,
range_->maxSequence
);
if (!range_.has_value()) {
if (!range_) {
range_ = {.minSequence = newMax, .maxSequence = newMax};
} else {
range_->maxSequence = newMax;
@@ -336,13 +338,13 @@ BackendInterface::fetchLedgerPage(
if (!objects[i].empty()) {
page.objects.push_back({keys[i], std::move(objects[i])});
} else if (!outOfOrder) {
LOG(log_.error()) << "Deleted or non-existent object in successor table. key = " << ripple::strHex(keys[i])
LOG(gLog.error()) << "Deleted or non-existent object in successor table. key = " << ripple::strHex(keys[i])
<< " - seq = " << ledgerSequence;
std::stringstream msg;
for (size_t j = 0; j < objects.size(); ++j) {
msg << " - " << ripple::strHex(keys[j]);
}
LOG(log_.error()) << msg.str();
LOG(gLog.error()) << msg.str();
if (corruptionDetector_.has_value())
corruptionDetector_->onCorruptionDetected();
@@ -363,7 +365,7 @@ BackendInterface::fetchFees(std::uint32_t const seq, boost::asio::yield_context
auto bytes = fetchLedgerObject(key, seq, yield);
if (!bytes) {
LOG(log_.error()) << "Could not find fees";
LOG(gLog.error()) << "Could not find fees";
return {};
}

View File

@@ -23,7 +23,6 @@
#include "data/LedgerCacheInterface.hpp"
#include "data/Types.hpp"
#include "etl/CorruptionDetector.hpp"
#include "util/Spawn.hpp"
#include "util/log/Logger.hpp"
#include <boost/asio/executor_work_guard.hpp>
@@ -109,12 +108,14 @@ synchronous(FnType&& func)
using R = typename boost::result_of<FnType(boost::asio::yield_context)>::type;
if constexpr (!std::is_same_v<R, void>) {
R res;
util::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func, &res](auto yield) { res = func(yield); });
boost::asio::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func, &res](auto yield) {
res = func(yield);
});
ctx.run();
return res;
} else {
util::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func](auto yield) { func(yield); });
boost::asio::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func](auto yield) { func(yield); });
ctx.run();
}
}
@@ -138,7 +139,6 @@ synchronousAndRetryOnTimeout(FnType&& func)
*/
class BackendInterface {
protected:
util::Logger log_{"Backend"};
mutable std::shared_mutex rngMtx_;
std::optional<LedgerRange> range_;
std::reference_wrapper<LedgerCacheInterface> cache_;

View File

@@ -30,8 +30,8 @@
#include <boost/asio.hpp>
#include <boost/asio/associated_executor.hpp>
#include <boost/asio/executor_work_guard.hpp>
#include <boost/asio/io_context.hpp>
#include <boost/asio/io_service.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/json/object.hpp>
@@ -79,7 +79,7 @@ class DefaultExecutionStrategy {
std::condition_variable syncCv_;
boost::asio::io_context ioc_;
std::optional<boost::asio::executor_work_guard<boost::asio::io_context::executor_type>> work_;
std::optional<boost::asio::io_service::work> work_;
std::reference_wrapper<HandleType const> handle_;
std::thread thread_;
@@ -107,7 +107,7 @@ public:
: maxWriteRequestsOutstanding_{settings.maxWriteRequestsOutstanding}
, maxReadRequestsOutstanding_{settings.maxReadRequestsOutstanding}
, writeBatchSize_{settings.writeBatchSize}
, work_{boost::asio::make_work_guard(ioc_)}
, work_{ioc_}
, handle_{std::cref(handle)}
, thread_{[this]() { ioc_.run(); }}
, counters_{std::move(counters)}
@@ -334,7 +334,7 @@ public:
};
auto res = boost::asio::async_compose<CompletionTokenType, void(ResultOrErrorType)>(
std::move(init), token, boost::asio::get_associated_executor(token)
init, token, boost::asio::get_associated_executor(token)
);
numReadRequestsOutstanding_ -= numStatements;
@@ -387,7 +387,7 @@ public:
};
auto res = boost::asio::async_compose<CompletionTokenType, void(ResultOrErrorType)>(
std::move(init), token, boost::asio::get_associated_executor(token)
init, token, boost::asio::get_associated_executor(token)
);
--numReadRequestsOutstanding_;
@@ -456,7 +456,7 @@ public:
};
boost::asio::async_compose<CompletionTokenType, void()>(
std::move(init), token, boost::asio::get_associated_executor(token)
init, token, boost::asio::get_associated_executor(token)
);
numReadRequestsOutstanding_ -= statements.size();

View File

@@ -18,7 +18,6 @@
//==============================================================================
#include "data/DBHelpers.hpp"
#include "util/Assert.hpp"
#include <ripple/protocol/STBase.h>
#include <ripple/protocol/STTx.h>
@@ -72,9 +71,6 @@ getMPTHolderFromTx(ripple::TxMeta const& txMeta, ripple::STTx const& sttx)
std::optional<MPTHolderData>
getMPTHolderFromObj(std::string const& key, std::string const& blob)
{
// https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033-multi-purpose-tokens#2121-mptoken-ledger-identifier
ASSERT(key.size() == ripple::uint256::size(), "The size of the key is expected to fit uint256 exactly");
ripple::STLedgerEntry const sle =
ripple::STLedgerEntry(ripple::SerialIter{blob.data(), blob.size()}, ripple::uint256::fromVoid(key.data()));

View File

@@ -23,7 +23,6 @@
#include "feed/SubscriptionManagerInterface.hpp"
#include "rpc/JS.hpp"
#include "util/Retry.hpp"
#include "util/Spawn.hpp"
#include "util/log/Logger.hpp"
#include "util/prometheus/Label.hpp"
#include "util/prometheus/Prometheus.hpp"
@@ -158,7 +157,7 @@ SubscriptionSource::stop(boost::asio::yield_context yield)
void
SubscriptionSource::subscribe()
{
util::spawn(strand_, [this, _ = boost::asio::make_work_guard(strand_)](boost::asio::yield_context yield) {
boost::asio::spawn(strand_, [this, _ = boost::asio::make_work_guard(strand_)](boost::asio::yield_context yield) {
if (auto connection = wsConnectionBuilder_.connect(yield); connection) {
wsConnection_ = std::move(connection).value();
} else {

View File

@@ -305,8 +305,6 @@ ETLService::startLoading(uint32_t seq)
{
ASSERT(not state_->isStrictReadonly, "This should only happen on writer nodes");
taskMan_ = taskManagerProvider_->make(ctx_, *monitor_, seq, finishSequence_);
// FIXME: this legacy name "extractor_threads" is no longer accurate (we have coroutines now)
taskMan_->run(config_.get().get<std::size_t>("extractor_threads"));
}

View File

@@ -26,7 +26,6 @@
#include "etlng/SchedulerInterface.hpp"
#include "etlng/impl/Monitor.hpp"
#include "etlng/impl/TaskQueue.hpp"
#include "util/Assert.hpp"
#include "util/Constants.hpp"
#include "util/LedgerUtils.hpp"
#include "util/Profiler.hpp"
@@ -71,11 +70,12 @@ TaskManager::~TaskManager()
void
TaskManager::run(std::size_t numExtractors)
{
ASSERT(not running_, "TaskManager can only be started once");
running_ = true;
LOG(log_.debug()) << "Starting task manager with " << numExtractors << " extractors...\n";
stop();
extractors_.clear();
loaders_.clear();
extractors_.reserve(numExtractors);
for ([[maybe_unused]] auto _ : std::views::iota(0uz, numExtractors))
extractors_.push_back(spawnExtractor(queue_));
@@ -155,11 +155,6 @@ TaskManager::spawnLoader(TaskQueue& queue)
<< " tps[" << txnCount / seconds << "], ops[" << objCount / seconds << "]";
monitor_.get().notifySequenceLoaded(data->seq);
} else {
// TODO (https://github.com/XRPLF/clio/issues/1852) this is probably better done with a timeout (on
// coroutine) so that the thread itself is not blocked. for now this implies that the context
// (io_threads) needs at least 2 threads
queue.awaitTask();
}
}
@@ -179,14 +174,11 @@ TaskManager::wait()
void
TaskManager::stop()
{
ASSERT(running_, "TaskManager is not running");
for (auto& extractor : extractors_)
extractor.abort();
for (auto& loader : loaders_)
loader.abort();
queue_.stop();
wait();
}

View File

@@ -56,7 +56,6 @@ class TaskManager : public TaskManagerInterface {
std::vector<util::async::AnyOperation<void>> extractors_;
std::vector<util::async::AnyOperation<void>> loaders_;
std::atomic_bool running_ = false;
util::Logger log_{"ETL"};
public:
@@ -71,13 +70,6 @@ public:
~TaskManager() override;
TaskManager(TaskManager const&) = delete;
TaskManager(TaskManager&&) = delete;
TaskManager&
operator=(TaskManager const&) = delete;
TaskManager&
operator=(TaskManager&&) = delete;
void
run(std::size_t numExtractors) override;

View File

@@ -20,14 +20,10 @@
#pragma once
#include "etlng/Models.hpp"
#include "util/Assert.hpp"
#include "util/Mutex.hpp"
#include <atomic>
#include <condition_variable>
#include <cstddef>
#include <cstdint>
#include <mutex>
#include <optional>
#include <queue>
#include <utility>
@@ -48,6 +44,9 @@ struct ReverseOrderComparator {
* @note This may be a candidate for future improvements if performance proves to be poor (e.g. use a lock free queue)
*/
class TaskQueue {
std::size_t limit_;
std::uint32_t increment_;
struct Data {
std::uint32_t expectedSequence;
std::priority_queue<model::LedgerData, std::vector<model::LedgerData>, ReverseOrderComparator> forwardLoadQueue;
@@ -57,13 +56,8 @@ class TaskQueue {
}
};
std::size_t limit_;
std::uint32_t increment_;
util::Mutex<Data> data_;
std::condition_variable cv_;
std::atomic_bool stopping_ = false;
public:
struct Settings {
std::uint32_t startSeq = 0u; // sequence to start from (for dequeue)
@@ -73,19 +67,13 @@ public:
/**
* @brief Construct a new priority queue
* @param settings Settings for the queue, including starting sequence, increment value, and optional limit
* @note If limit is not set, the queue will have no limit
* @param limit The limit of items allowed simultaneously in the queue
*/
explicit TaskQueue(Settings settings)
: limit_(settings.limit.value_or(0uz)), increment_(settings.increment), data_(settings.startSeq)
{
}
~TaskQueue()
{
ASSERT(stopping_, "stop() must be called before destroying the TaskQueue");
}
/**
* @brief Enqueue a new item onto the queue if space is available
* @note This function blocks until the item is attempted to be added to the queue
@@ -100,8 +88,6 @@ public:
if (limit_ == 0uz or lock->forwardLoadQueue.size() < limit_) {
lock->forwardLoadQueue.push(std::move(item));
cv_.notify_all();
return true;
}
@@ -139,32 +125,6 @@ public:
{
return data_.lock()->forwardLoadQueue.empty();
}
/**
* @brief Awaits for the queue to become non-empty
* @note This function blocks until there is a task or the queue is being destroyed
*/
void
awaitTask()
{
if (stopping_)
return;
auto lock = data_.lock<std::unique_lock>();
cv_.wait(lock, [&] { return stopping_ or not lock->forwardLoadQueue.empty(); });
}
/**
* @brief Notify the queue that it's no longer needed
* @note This must be called before the queue is destroyed
*/
void
stop()
{
// unblock all waiters
stopping_ = true;
cv_.notify_all();
}
};
} // namespace etlng::impl

View File

@@ -96,7 +96,7 @@ SubscriptionManager::forwardProposedTransaction(boost::json::object const& recei
boost::json::object
SubscriptionManager::subLedger(boost::asio::yield_context yield, SubscriberSharedPtr const& subscriber)
{
return ledgerFeed_.sub(yield, backend_, subscriber, networkID_);
return ledgerFeed_.sub(yield, backend_, subscriber);
}
void
@@ -113,7 +113,7 @@ SubscriptionManager::pubLedger(
std::uint32_t const txnCount
)
{
ledgerFeed_.pub(lgrInfo, fees, ledgerRange, txnCount, networkID_);
ledgerFeed_.pub(lgrInfo, fees, ledgerRange, txnCount);
}
void

View File

@@ -44,8 +44,7 @@ LedgerFeed::makeLedgerPubMessage(
ripple::LedgerHeader const& lgrInfo,
ripple::Fees const& fees,
std::string const& ledgerRange,
uint32_t const txnCount,
uint32_t const networkID
std::uint32_t const txnCount
)
{
boost::json::object pubMsg;
@@ -58,7 +57,6 @@ LedgerFeed::makeLedgerPubMessage(
pubMsg["reserve_inc"] = rpc::toBoostJson(fees.increment.jsonClipped());
pubMsg["validated_ledgers"] = ledgerRange;
pubMsg["txn_count"] = txnCount;
pubMsg["network_id"] = networkID;
return pubMsg;
}
@@ -66,8 +64,7 @@ boost::json::object
LedgerFeed::sub(
boost::asio::yield_context yield,
std::shared_ptr<data::BackendInterface const> const& backend,
SubscriberSharedPtr const& subscriber,
uint32_t const networkID
SubscriberSharedPtr const& subscriber
)
{
SingleFeedBase::sub(subscriber);
@@ -84,7 +81,7 @@ LedgerFeed::sub(
auto const range = std::to_string(ledgerRange->minSequence) + "-" + std::to_string(ledgerRange->maxSequence);
auto pubMsg = makeLedgerPubMessage(*lgrInfo, *fees, range, 0, networkID);
auto pubMsg = makeLedgerPubMessage(*lgrInfo, *fees, range, 0);
pubMsg.erase("txn_count");
pubMsg.erase("type");
@@ -96,10 +93,9 @@ LedgerFeed::pub(
ripple::LedgerHeader const& lgrInfo,
ripple::Fees const& fees,
std::string const& ledgerRange,
uint32_t const txnCount,
uint32_t const networkID
std::uint32_t const txnCount
)
{
SingleFeedBase::pub(boost::json::serialize(makeLedgerPubMessage(lgrInfo, fees, ledgerRange, txnCount, networkID)));
SingleFeedBase::pub(boost::json::serialize(makeLedgerPubMessage(lgrInfo, fees, ledgerRange, txnCount)));
}
} // namespace feed::impl

View File

@@ -41,8 +41,7 @@ namespace feed::impl {
* @brief Feed that publishes the ledger info.
* Example : {'type': 'ledgerClosed', 'ledger_index': 2647935, 'ledger_hash':
* '5D022718CD782A82EE10D2147FD90B5F42F26A7E937C870B4FE3CF1086C916AE', 'ledger_time': 756395681, 'fee_base': 10,
* 'reserve_base': 10000000, 'reserve_inc': 2000000, 'validated_ledgers': '2619127-2647935', 'txn_count': 0,
* 'network_id': 1}
* 'reserve_base': 10000000, 'reserve_inc': 2000000, 'validated_ledgers': '2619127-2647935', 'txn_count': 0}
*/
class LedgerFeed : public SingleFeedBase {
public:
@@ -58,15 +57,13 @@ public:
* @brief Subscribe the ledger feed.
* @param yield The coroutine yield.
* @param backend The backend.
* @param subscriber The subscriber.
* @param networkID The network ID.
* @param subscriber
* @return The information of the latest ledger.
*/
boost::json::object
sub(boost::asio::yield_context yield,
std::shared_ptr<data::BackendInterface const> const& backend,
SubscriberSharedPtr const& subscriber,
uint32_t networkID);
SubscriberSharedPtr const& subscriber);
/**
* @brief Publishes the ledger feed.
@@ -74,14 +71,12 @@ public:
* @param fees The fees.
* @param ledgerRange The ledger range.
* @param txnCount The transaction count.
* @param networkID The network ID.
*/
void
pub(ripple::LedgerHeader const& lgrInfo,
ripple::Fees const& fees,
std::string const& ledgerRange,
uint32_t txnCount,
uint32_t networkID);
std::uint32_t txnCount);
private:
static boost::json::object
@@ -89,8 +84,7 @@ private:
ripple::LedgerHeader const& lgrInfo,
ripple::Fees const& fees,
std::string const& ledgerRange,
uint32_t txnCount,
uint32_t networkID
std::uint32_t txnCount
);
};
} // namespace feed::impl

View File

@@ -33,12 +33,10 @@
#include <boost/json/serialize.hpp>
#include <xrpl/basics/chrono.h>
#include <xrpl/basics/strHex.h>
#include <xrpl/json/json_value.h>
#include <xrpl/protocol/AccountID.h>
#include <xrpl/protocol/Book.h>
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/LedgerHeader.h>
#include <xrpl/protocol/NFTSyntheticSerializer.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STObject.h>
#include <xrpl/protocol/TER.h>
@@ -206,19 +204,8 @@ TransactionFeed::pub(
pubObj[txKey] = rpc::toJson(*tx);
pubObj[JS(meta)] = rpc::toJson(*meta);
rpc::insertDeliveredAmount(pubObj[JS(meta)].as_object(), tx, meta, txMeta.date);
auto& txnPubobj = pubObj[txKey].as_object();
rpc::insertDeliverMaxAlias(txnPubobj, version);
rpc::insertMPTIssuanceID(txnPubobj, meta);
Json::Value nftJson;
ripple::RPC::insertNFTSyntheticInJson(nftJson, tx, *meta);
auto const nftBoostJson = rpc::toBoostJson(nftJson).as_object();
if (nftBoostJson.contains(JS(meta)) && nftBoostJson.at(JS(meta)).is_object()) {
auto& metaObjInPub = pubObj.at(JS(meta)).as_object();
for (auto const& [k, v] : nftBoostJson.at(JS(meta)).as_object())
metaObjInPub.insert_or_assign(k, v);
}
rpc::insertDeliverMaxAlias(pubObj[txKey].as_object(), version);
rpc::insertMPTIssuanceID(pubObj[JS(meta)].as_object(), tx, meta);
auto const& metaObj = pubObj[JS(meta)];
ASSERT(metaObj.is_object(), "meta must be an obj in rippled and clio");

View File

@@ -22,7 +22,6 @@
#include "app/VerifyConfig.hpp"
#include "migration/MigrationApplication.hpp"
#include "rpc/common/impl/HandlerProvider.hpp"
#include "util/ScopeGuard.hpp"
#include "util/TerminationHandler.hpp"
#include "util/config/ConfigDefinition.hpp"
#include "util/log/Logger.hpp"
@@ -34,16 +33,18 @@
using namespace util::config;
[[nodiscard]]
int
runApp(int argc, char const* argv[])
{
main(int argc, char const* argv[])
try {
util::setTerminationHandler();
auto const action = app::CliArgs::parse(argc, argv);
return action.apply(
[](app::CliArgs::Action::Exit const& exit) { return exit.exitCode; },
[](app::CliArgs::Action::VerifyConfig const& verify) {
if (app::parseConfig(verify.configPath)) {
std::cout << "Config " << verify.configPath << " is correct" << "\n";
std::cout << "Config " << verify.configPath << " is correct"
<< "\n";
return EXIT_SUCCESS;
}
return EXIT_FAILURE;
@@ -52,7 +53,6 @@ runApp(int argc, char const* argv[])
if (not app::parseConfig(run.configPath))
return EXIT_FAILURE;
ClioConfigDefinition const& gClioConfig = getClioConfig();
PrometheusService::init(gClioConfig);
if (auto const initSuccess = util::LogService::init(gClioConfig); not initSuccess) {
std::cerr << initSuccess.error() << std::endl;
@@ -65,30 +65,18 @@ runApp(int argc, char const* argv[])
if (not app::parseConfig(migrate.configPath))
return EXIT_FAILURE;
if (auto const initSuccess = util::LogService::init(getClioConfig()); not initSuccess) {
if (auto const initSuccess = util::LogService::init(gClioConfig); not initSuccess) {
std::cerr << initSuccess.error() << std::endl;
return EXIT_FAILURE;
}
app::MigratorApplication migrator{getClioConfig(), migrate.subCmd};
app::MigratorApplication migrator{gClioConfig, migrate.subCmd};
return migrator.run();
}
);
}
int
main(int argc, char const* argv[])
{
util::setTerminationHandler();
util::ScopeGuard const loggerShutdownGuard{[] { util::LogService::shutdown(); }};
try {
return runApp(argc, argv);
} catch (std::exception const& e) {
LOG(util::LogService::fatal()) << "Exit on exception: " << e.what();
return EXIT_FAILURE;
} catch (...) {
LOG(util::LogService::fatal()) << "Exit on exception: unknown";
return EXIT_FAILURE;
}
} catch (std::exception const& e) {
LOG(util::LogService::fatal()) << "Exit on exception: " << e.what();
return EXIT_FAILURE;
} catch (...) {
LOG(util::LogService::fatal()) << "Exit on exception: unknown";
return EXIT_FAILURE;
}

View File

@@ -101,6 +101,11 @@
#include <utility>
#include <vector>
// local to compilation unit loggers
namespace {
util::Logger gLog{"RPC"};
} // namespace
namespace rpc {
std::optional<AccountCursor>
@@ -203,8 +208,6 @@ accountFromStringStrict(std::string const& account)
std::pair<std::shared_ptr<ripple::STTx const>, std::shared_ptr<ripple::STObject const>>
deserializeTxPlusMeta(data::TransactionAndMetadata const& blobs)
{
static util::Logger const log{"RPC"}; // NOLINT(readability-identifier-naming)
try {
std::pair<std::shared_ptr<ripple::STTx const>, std::shared_ptr<ripple::STObject const>> result;
{
@@ -221,9 +224,9 @@ deserializeTxPlusMeta(data::TransactionAndMetadata const& blobs)
std::stringstream meta;
std::ranges::copy(blobs.transaction, std::ostream_iterator<unsigned char>(txn));
std::ranges::copy(blobs.metadata, std::ostream_iterator<unsigned char>(meta));
LOG(log.error()) << "Failed to deserialize transaction. txn = " << txn.str() << " - meta = " << meta.str()
<< " txn length = " << std::to_string(blobs.transaction.size())
<< " meta length = " << std::to_string(blobs.metadata.size());
LOG(gLog.error()) << "Failed to deserialize transaction. txn = " << txn.str() << " - meta = " << meta.str()
<< " txn length = " << std::to_string(blobs.transaction.size())
<< " meta length = " << std::to_string(blobs.metadata.size());
throw e;
}
}
@@ -259,11 +262,11 @@ toExpandedJson(
auto metaJson = toJson(*meta);
insertDeliveredAmount(metaJson, txn, meta, blobs.date);
insertDeliverMaxAlias(txnJson, apiVersion);
insertMPTIssuanceID(txnJson, meta);
insertMPTIssuanceID(metaJson, txn, meta);
if (nftEnabled == NFTokenjson::ENABLE) {
Json::Value nftJson;
ripple::RPC::insertNFTSyntheticInJson(nftJson, txn, *meta);
ripple::insertNFTSyntheticInJson(nftJson, txn, *meta);
// if there is no nft fields, the nftJson will be {"meta":null}
auto const nftBoostJson = toBoostJson(nftJson).as_object();
if (nftBoostJson.contains(JS(meta)) and nftBoostJson.at(JS(meta)).is_object()) {
@@ -344,15 +347,14 @@ getMPTIssuanceID(std::shared_ptr<ripple::TxMeta const> const& meta)
/**
* @brief Check if transaction has a new MPToken created
*
* @param txnJson The transaction Json
* @param txn The transaction
* @param meta The metadata
* @return true if the transaction can have a mpt_issuance_id
*/
static bool
canHaveMPTIssuanceID(boost::json::object const& txnJson, std::shared_ptr<ripple::TxMeta const> const& meta)
canHaveMPTIssuanceID(std::shared_ptr<ripple::STTx const> const& txn, std::shared_ptr<ripple::TxMeta const> const& meta)
{
if (txnJson.at(JS(TransactionType)).is_string() and
not boost::iequals(txnJson.at(JS(TransactionType)).as_string(), JS(MPTokenIssuanceCreate)))
if (txn->getTxnType() != ripple::ttMPTOKEN_ISSUANCE_CREATE)
return false;
if (meta->getResultTER() != ripple::tesSUCCESS)
@@ -362,13 +364,17 @@ canHaveMPTIssuanceID(boost::json::object const& txnJson, std::shared_ptr<ripple:
}
bool
insertMPTIssuanceID(boost::json::object& txnJson, std::shared_ptr<ripple::TxMeta const> const& meta)
insertMPTIssuanceID(
boost::json::object& metaJson,
std::shared_ptr<ripple::STTx const> const& txn,
std::shared_ptr<ripple::TxMeta const> const& meta
)
{
if (!canHaveMPTIssuanceID(txnJson, meta))
if (!canHaveMPTIssuanceID(txn, meta))
return false;
if (auto const id = getMPTIssuanceID(meta)) {
txnJson[JS(mpt_issuance_id)] = ripple::to_string(*id);
metaJson[JS(mpt_issuance_id)] = ripple::to_string(*id);
return true;
}
@@ -800,9 +806,7 @@ traverseOwnedNodes(
}
auto end = std::chrono::system_clock::now();
static util::Logger const log{"RPC"}; // NOLINT(readability-identifier-naming)
LOG(log.debug()) << fmt::format(
LOG(gLog.debug()) << fmt::format(
"Time loading owned directories: {} milliseconds, entries size: {}",
std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count(),
keys.size()
@@ -810,7 +814,7 @@ traverseOwnedNodes(
auto [objects, timeDiff] = util::timed([&]() { return backend.fetchLedgerObjects(keys, sequence, yield); });
LOG(log.debug()) << "Time loading owned entries: " << timeDiff << " milliseconds";
LOG(gLog.debug()) << "Time loading owned entries: " << timeDiff << " milliseconds";
for (auto i = 0u; i < objects.size(); ++i) {
ripple::SerialIter it{objects[i].data(), objects[i].size()};
@@ -1298,8 +1302,7 @@ postProcessOrderBook(
jsonOffers.push_back(offerJson);
} catch (std::exception const& e) {
util::Logger const log{"RPC"};
LOG(log.error()) << "caught exception: " << e.what();
LOG(gLog.error()) << "caught exception: " << e.what();
}
}
return jsonOffers;

View File

@@ -202,12 +202,17 @@ insertDeliveredAmount(
/**
* @brief Add "mpt_issuance_id" into MPTokenIssuanceCreate transaction json.
*
* @param txnJson The transaction Json object
* @param metaJson The metadata json object to add "MPTokenIssuanceID"
* @param txn The transaction object
* @param meta The metadata object
* @return true if the "mpt_issuance_id" is added to the metadata json object
*/
bool
insertMPTIssuanceID(boost::json::object& txnJson, std::shared_ptr<ripple::TxMeta const> const& meta);
insertMPTIssuanceID(
boost::json::object& metaJson,
std::shared_ptr<ripple::STTx const> const& txn,
std::shared_ptr<ripple::TxMeta const> const& meta
);
/**
* @brief Convert STBase object to JSON

View File

@@ -19,9 +19,7 @@
#pragma once
#include "util/Assert.hpp"
#include "util/Mutex.hpp"
#include "util/Spawn.hpp"
#include "util/config/ConfigDefinition.hpp"
#include "util/log/Logger.hpp"
#include "util/prometheus/Counter.hpp"
@@ -129,7 +127,7 @@ public:
// Each time we enqueue a job, we want to post a symmetrical job that will dequeue and run the job at the front
// of the job queue.
util::spawn(
boost::asio::spawn(
ioc_,
[this, func = std::forward<FnType>(func), start = std::chrono::system_clock::now()](auto yield) mutable {
auto const run = std::chrono::system_clock::now();

View File

@@ -69,10 +69,9 @@ concept SomeProcessor = (SomeRequirement<T> or SomeModifier<T>);
* @brief A process function that expects both some Input and a Context.
*/
template <typename T>
concept SomeContextProcessWithInput =
requires(T a, typename T::Input const& in, typename T::Output out, Context const& ctx) {
{ a.process(in, ctx) } -> std::same_as<HandlerReturnType<decltype(out)>>;
};
concept SomeContextProcessWithInput = requires(T a, typename T::Input in, typename T::Output out, Context const& ctx) {
{ a.process(in, ctx) } -> std::same_as<HandlerReturnType<decltype(out)>>;
};
/**
* @brief A process function that expects no Input but does take a Context.

View File

@@ -23,7 +23,6 @@
#include "rpc/RPCHelpers.hpp"
#include "rpc/common/Types.hpp"
#include "util/AccountUtils.hpp"
#include "util/LedgerUtils.hpp"
#include "util/TimeUtils.hpp"
#include <boost/json/object.hpp>
@@ -33,7 +32,6 @@
#include <xrpl/basics/StringUtilities.h>
#include <xrpl/basics/base_uint.h>
#include <xrpl/protocol/AccountID.h>
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/Protocol.h>
#include <xrpl/protocol/UintTypes.h>
@@ -122,18 +120,6 @@ CustomValidator CustomValidators::ledgerIndexValidator =
return MaybeError{};
}};
CustomValidator CustomValidators::ledgerTypeValidator =
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
if (!value.is_string())
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}', not string.", key)}};
auto const type = util::LedgerTypes::getLedgerEntryTypeFromStr(boost::json::value_to<std::string>(value));
if (type == ripple::ltANY)
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}'.", key)}};
return MaybeError{};
}};
CustomValidator CustomValidators::accountValidator =
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
if (!value.is_string())
@@ -174,19 +160,6 @@ CustomValidator CustomValidators::accountMarkerValidator =
return MaybeError{};
}};
CustomValidator CustomValidators::accountTypeValidator =
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
if (!value.is_string())
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}', not string.", key)}};
auto const type =
util::LedgerTypes::getAccountOwnedLedgerTypeFromStr(boost::json::value_to<std::string>(value));
if (type == ripple::ltANY)
return Error{Status{RippledError::rpcINVALID_PARAMS, fmt::format("Invalid field '{}'.", key)}};
return MaybeError{};
}};
CustomValidator CustomValidators::currencyValidator =
CustomValidator{[](boost::json::value const& value, std::string_view key) -> MaybeError {
if (!value.is_string())

View File

@@ -486,14 +486,6 @@ struct CustomValidators final {
*/
static CustomValidator ledgerIndexValidator;
/**
* @brief Provides a validator for ledger type.
*
* A type accepts canonical names of ledger entry types (case insensitive) or short names.
* Used by ledger_data.
*/
static CustomValidator ledgerTypeValidator;
/**
* @brief Provides a commonly used validator for accounts.
*
@@ -516,14 +508,6 @@ struct CustomValidators final {
*/
static CustomValidator accountMarkerValidator;
/**
* @brief Provides a validator for account type.
*
* A type accepts canonical names of owned ledger entry types (case insensitive) or short names.
* Used by account_objects.
*/
static CustomValidator accountTypeValidator;
/**
* @brief Provides a commonly used validator for uint160(AccountID) hex string.
*

View File

@@ -77,7 +77,7 @@ toIso8601(ripple::NetClock::time_point tp)
namespace rpc {
AMMInfoHandler::Result
AMMInfoHandler::process(AMMInfoHandler::Input const& input, Context const& ctx) const
AMMInfoHandler::process(AMMInfoHandler::Input input, Context const& ctx) const
{
using namespace ripple;

View File

@@ -111,7 +111,7 @@ public:
* @return The result of the operation
*/
Result
process(Input const& input, Context const& ctx) const;
process(Input input, Context const& ctx) const;
private:
/**

View File

@@ -81,7 +81,7 @@ AccountChannelsHandler::addChannel(std::vector<ChannelResponse>& jsonChannels, r
}
AccountChannelsHandler::Result
AccountChannelsHandler::process(AccountChannelsHandler::Input const& input, Context const& ctx) const
AccountChannelsHandler::process(AccountChannelsHandler::Input input, Context const& ctx) const
{
auto const range = sharedPtrBackend_->fetchLedgerRange();
ASSERT(range.has_value(), "AccountChannel's ledger range must be available");

View File

@@ -145,7 +145,7 @@ public:
* @return The result of the operation
*/
Result
process(Input const& input, Context const& ctx) const;
process(Input input, Context const& ctx) const;
private:
static void

View File

@@ -43,7 +43,7 @@
namespace rpc {
AccountCurrenciesHandler::Result
AccountCurrenciesHandler::process(AccountCurrenciesHandler::Input const& input, Context const& ctx) const
AccountCurrenciesHandler::process(AccountCurrenciesHandler::Input input, Context const& ctx) const
{
auto const range = sharedPtrBackend_->fetchLedgerRange();
ASSERT(range.has_value(), "AccountCurrencies' ledger range must be available");

View File

@@ -110,7 +110,7 @@ public:
* @return The result of the operation
*/
Result
process(Input const& input, Context const& ctx) const;
process(Input input, Context const& ctx) const;
private:
/**

View File

@@ -43,7 +43,6 @@
#include <algorithm>
#include <iterator>
#include <optional>
#include <string>
#include <string_view>
#include <utility>
@@ -51,7 +50,7 @@
namespace rpc {
AccountInfoHandler::Result
AccountInfoHandler::process(AccountInfoHandler::Input const& input, Context const& ctx) const
AccountInfoHandler::process(AccountInfoHandler::Input input, Context const& ctx) const
{
using namespace data;
@@ -89,18 +88,6 @@ AccountInfoHandler::process(AccountInfoHandler::Input const& input, Context cons
auto const isDisallowIncomingEnabled = isEnabled(Amendments::DisallowIncoming);
auto const isClawbackEnabled = isEnabled(Amendments::Clawback);
auto const isTokenEscrowEnabled = isEnabled(Amendments::TokenEscrow);
Output out{
.ledgerIndex = lgrInfo.seq,
.ledgerHash = ripple::strHex(lgrInfo.hash),
.accountData = sle,
.isDisallowIncomingEnabled = isDisallowIncomingEnabled,
.isClawbackEnabled = isClawbackEnabled,
.isTokenEscrowEnabled = isTokenEscrowEnabled,
.apiVersion = ctx.apiVersion,
.signerLists = std::nullopt
};
// Return SignerList(s) if that is requested.
if (input.signerLists) {
@@ -111,6 +98,7 @@ AccountInfoHandler::process(AccountInfoHandler::Input const& input, Context cons
// This code will need to be revisited if in the future we
// support multiple SignerLists on one account.
auto const signers = sharedPtrBackend_->fetchLedgerObject(signersKey.key, lgrInfo.seq, ctx.yield);
std::vector<ripple::STLedgerEntry> signerList;
if (signers) {
ripple::STLedgerEntry const sleSigners{
@@ -120,11 +108,23 @@ AccountInfoHandler::process(AccountInfoHandler::Input const& input, Context cons
if (!signersKey.check(sleSigners))
return Error{Status{RippledError::rpcDB_DESERIALIZATION}};
out.signerLists = std::vector<ripple::STLedgerEntry>{sleSigners};
signerList.push_back(sleSigners);
}
return Output(
lgrInfo.seq,
ripple::strHex(lgrInfo.hash),
sle,
isDisallowIncomingEnabled,
isClawbackEnabled,
ctx.apiVersion,
signerList
);
}
return out;
return Output(
lgrInfo.seq, ripple::strHex(lgrInfo.hash), sle, isDisallowIncomingEnabled, isClawbackEnabled, ctx.apiVersion
);
}
void
@@ -159,11 +159,9 @@ tag_invoke(boost::json::value_from_tag, boost::json::value& jv, AccountInfoHandl
lsFlags.insert(lsFlags.end(), disallowIncomingFlags.begin(), disallowIncomingFlags.end());
}
if (output.isClawbackEnabled)
if (output.isClawbackEnabled) {
lsFlags.emplace_back("allowTrustLineClawback", ripple::lsfAllowTrustLineClawback);
if (output.isTokenEscrowEnabled)
lsFlags.emplace_back("allowTrustLineLocking", ripple::lsfAllowTrustLineLocking);
}
boost::json::object acctFlags;
for (auto const& lsf : lsFlags)

View File

@@ -61,11 +61,40 @@ public:
ripple::STLedgerEntry accountData;
bool isDisallowIncomingEnabled = false;
bool isClawbackEnabled = false;
bool isTokenEscrowEnabled = false;
uint32_t apiVersion;
std::optional<std::vector<ripple::STLedgerEntry>> signerLists;
// validated should be sent via framework
bool validated = true;
/**
* @brief Construct a new Output object
*
* @param ledgerId The ledger index
* @param ledgerHash The ledger hash
* @param sle The account data
* @param isDisallowIncomingEnabled Whether disallow incoming is enabled
* @param isClawbackEnabled Whether clawback is enabled
* @param version The API version
* @param signerLists The signer lists
*/
Output(
uint32_t ledgerId,
std::string ledgerHash,
ripple::STLedgerEntry sle,
bool isDisallowIncomingEnabled,
bool isClawbackEnabled,
uint32_t version,
std::optional<std::vector<ripple::STLedgerEntry>> signerLists = std::nullopt
)
: ledgerIndex(ledgerId)
, ledgerHash(std::move(ledgerHash))
, accountData(std::move(sle))
, isDisallowIncomingEnabled(isDisallowIncomingEnabled)
, isClawbackEnabled(isClawbackEnabled)
, apiVersion(version)
, signerLists(std::move(signerLists))
{
}
};
/**
@@ -130,7 +159,7 @@ public:
* @return The result of the operation
*/
Result
process(Input const& input, Context const& ctx) const;
process(Input input, Context const& ctx) const;
private:
/**

Some files were not shown because too many files have changed in this diff Show More