Merge remote-tracking branch 'origin/develop' into release/2.6.0

This commit is contained in:
Alex Kremer
2025-10-08 13:52:49 +01:00
59 changed files with 346 additions and 288 deletions

View File

@@ -6,7 +6,7 @@ inputs:
description: Space-separated build target names
default: all
subtract_threads:
description: An option for the action get_number_of_threads. See get_number_of_threads
description: An option for the action get-threads-number.
required: true
default: "0"
@@ -14,7 +14,7 @@ runs:
using: composite
steps:
- name: Get number of threads
uses: ./.github/actions/get_number_of_threads
uses: ./.github/actions/get-threads-number
id: number_of_threads
with:
subtract_threads: ${{ inputs.subtract_threads }}

View File

@@ -34,14 +34,14 @@ runs:
steps:
- name: Login to DockerHub
if: ${{ inputs.push_image == 'true' && inputs.dockerhub_repo != '' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_PW }}
- name: Login to GitHub Container Registry
if: ${{ inputs.push_image == 'true' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -27,7 +27,7 @@ runs:
steps:
- name: Find common commit
id: git_common_ancestor
uses: ./.github/actions/git_common_ancestor
uses: ./.github/actions/git-common-ancestor
- name: Restore ccache cache
uses: actions/cache/restore@v4

View File

@@ -28,7 +28,7 @@ runs:
steps:
- name: Find common commit
id: git_common_ancestor
uses: ./.github/actions/git_common_ancestor
uses: ./.github/actions/git-common-ancestor
- name: Save ccache cache
if: ${{ inputs.ccache_cache_hit != 'true' || inputs.ccache_cache_miss_rate == '100.0' }}

View File

@@ -14,7 +14,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/build_clio/
directory: .github/actions/build-clio/
schedule:
interval: weekly
day: monday
@@ -27,7 +27,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/build_docker_image/
directory: .github/actions/build-docker-image/
schedule:
interval: weekly
day: monday
@@ -53,7 +53,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/code_coverage/
directory: .github/actions/code-coverage/
schedule:
interval: weekly
day: monday
@@ -79,7 +79,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/create_issue/
directory: .github/actions/create-issue/
schedule:
interval: weekly
day: monday
@@ -92,7 +92,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/get_number_of_threads/
directory: .github/actions/get-threads-number/
schedule:
interval: weekly
day: monday
@@ -105,7 +105,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/git_common_ancestor/
directory: .github/actions/git-common-ancestor/
schedule:
interval: weekly
day: monday
@@ -118,7 +118,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/restore_cache/
directory: .github/actions/restore-cache/
schedule:
interval: weekly
day: monday
@@ -131,7 +131,7 @@ updates:
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/save_cache/
directory: .github/actions/save-cache/
schedule:
interval: weekly
day: monday

View File

@@ -3,7 +3,9 @@ import itertools
import json
LINUX_OS = ["heavy", "heavy-arm64"]
LINUX_CONTAINERS = ['{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }']
LINUX_CONTAINERS = [
'{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
]
LINUX_COMPILERS = ["gcc", "clang"]
MACOS_OS = ["macos15"]

View File

@@ -89,7 +89,7 @@ jobs:
echo "GHCR_REPO=$(echo ghcr.io/${{ github.repository_owner }} | tr '[:upper:]' '[:lower:]')" >> ${GITHUB_OUTPUT}
- name: Build Docker image
uses: ./.github/actions/build_docker_image
uses: ./.github/actions/build-docker-image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}

View File

@@ -8,14 +8,14 @@ on:
paths:
- .github/workflows/build.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- .github/workflows/upload_coverage_report.yml
- .github/workflows/reusable-build-test.yml
- .github/workflows/reusable-build.yml
- .github/workflows/reusable-test.yml
- .github/workflows/reusable-upload-coverage-report.yml
- ".github/actions/**"
- "!.github/actions/build_docker_image/**"
- "!.github/actions/create_issue/**"
- "!.github/actions/build-docker-image/**"
- "!.github/actions/create-issue/**"
- CMakeLists.txt
- conanfile.py
@@ -45,7 +45,7 @@ jobs:
build_type: [Release, Debug]
container:
[
'{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }',
'{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }',
]
static: [true]
@@ -56,7 +56,7 @@ jobs:
container: ""
static: false
uses: ./.github/workflows/build_and_test.yml
uses: ./.github/workflows/reusable-build-test.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
@@ -72,10 +72,10 @@ jobs:
code_coverage:
name: Run Code Coverage
uses: ./.github/workflows/build_impl.yml
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
conan_profile: gcc
build_type: Debug
download_ccache: true
@@ -91,10 +91,10 @@ jobs:
package:
name: Build packages
uses: ./.github/workflows/build_impl.yml
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
conan_profile: gcc
build_type: Release
download_ccache: true
@@ -111,7 +111,7 @@ jobs:
needs: build-and-test
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- uses: actions/checkout@v4

View File

@@ -17,7 +17,7 @@ jobs:
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- uses: actions/checkout@v4
@@ -51,7 +51,7 @@ jobs:
conan_profile: ${{ env.CONAN_PROFILE }}
- name: Build Clio
uses: ./.github/actions/build_clio
uses: ./.github/actions/build-clio
- name: Strip tests
run: strip build/clio_tests
@@ -67,7 +67,7 @@ jobs:
needs: build
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- uses: actions/download-artifact@v5
@@ -93,7 +93,7 @@ jobs:
- uses: actions/checkout@v4
- name: Create an issue
uses: ./.github/actions/create_issue
uses: ./.github/actions/create-issue
env:
GH_TOKEN: ${{ github.token }}
with:

View File

@@ -15,3 +15,10 @@ jobs:
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
add_label: false
custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
- name: Check if message starts with upper-case letter
run: |
if [[ ! "${{ github.event.pull_request.title }}" =~ ^[a-z]+:\ [\[A-Z] ]]; then
echo "Error: PR title must start with an upper-case letter."
exit 1
fi

View File

@@ -27,7 +27,7 @@ jobs:
if: github.event_name != 'push' || contains(github.event.head_commit.message, 'clang-tidy auto fixes')
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
permissions:
contents: write
@@ -45,7 +45,7 @@ jobs:
disable_ccache: true
- name: Restore cache
uses: ./.github/actions/restore_cache
uses: ./.github/actions/restore-cache
id: restore_cache
with:
conan_profile: ${{ env.CONAN_PROFILE }}
@@ -62,7 +62,7 @@ jobs:
conan_profile: ${{ env.CONAN_PROFILE }}
- name: Get number of threads
uses: ./.github/actions/get_number_of_threads
uses: ./.github/actions/get-threads-number
id: number_of_threads
- name: Run clang-tidy
@@ -90,7 +90,7 @@ jobs:
- name: Create an issue
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
id: create_issue
uses: ./.github/actions/create_issue
uses: ./.github/actions/create-issue
env:
GH_TOKEN: ${{ github.token }}
with:

View File

@@ -14,7 +14,7 @@ jobs:
build:
runs-on: ubuntu-latest
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- name: Checkout

View File

@@ -8,14 +8,14 @@ on:
paths:
- .github/workflows/nightly.yml
- .github/workflows/release_impl.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- .github/workflows/build_clio_docker_image.yml
- .github/workflows/reusable-release.yml
- .github/workflows/reusable-build-test.yml
- .github/workflows/reusable-build.yml
- .github/workflows/reusable-test.yml
- .github/workflows/build-clio-docker-image.yml
- ".github/actions/**"
- "!.github/actions/code_coverage/**"
- "!.github/actions/code-coverage/**"
- .github/scripts/prepare-release-artifacts.sh
concurrency:
@@ -39,19 +39,19 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
- os: heavy
conan_profile: gcc
build_type: Debug
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
- os: heavy
conan_profile: gcc.ubsan
build_type: Release
static: false
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
uses: ./.github/workflows/build_and_test.yml
uses: ./.github/workflows/reusable-build-test.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
@@ -73,13 +73,13 @@ jobs:
include:
- os: heavy
conan_profile: clang
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
static: true
- os: macos15
conan_profile: apple-clang
container: ""
static: false
uses: ./.github/workflows/build_impl.yml
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
@@ -95,7 +95,7 @@ jobs:
nightly_release:
needs: build-and-test
uses: ./.github/workflows/release_impl.yml
uses: ./.github/workflows/reusable-release.yml
with:
overwrite_release: true
prerelease: true
@@ -109,7 +109,7 @@ jobs:
draft: false
build_and_publish_docker_image:
uses: ./.github/workflows/build_clio_docker_image.yml
uses: ./.github/workflows/build-clio-docker-image.yml
needs: build-and-test
secrets: inherit
with:
@@ -133,7 +133,7 @@ jobs:
- uses: actions/checkout@v4
- name: Create an issue
uses: ./.github/actions/create_issue
uses: ./.github/actions/create-issue
env:
GH_TOKEN: ${{ github.token }}
with:

View File

@@ -11,4 +11,4 @@ jobs:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@afbcbdafbe0ce5439492fb87eda6441371086386
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-pre-commit:213752862ca95ecadeb59a6176c3db91a7864b3e" }'

View File

@@ -29,9 +29,9 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
uses: ./.github/workflows/build_and_test.yml
uses: ./.github/workflows/reusable-build-test.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
@@ -47,7 +47,7 @@ jobs:
release:
needs: build-and-test
uses: ./.github/workflows/release_impl.yml
uses: ./.github/workflows/reusable-release.yml
with:
overwrite_release: false
prerelease: ${{ contains(github.ref_name, '-') }}

View File

@@ -77,7 +77,7 @@ on:
jobs:
build:
uses: ./.github/workflows/build_impl.yml
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: ${{ inputs.runs_on }}
container: ${{ inputs.container }}
@@ -95,7 +95,7 @@ jobs:
test:
needs: build
uses: ./.github/workflows/test_impl.yml
uses: ./.github/workflows/reusable-test.yml
with:
runs_on: ${{ inputs.runs_on }}
container: ${{ inputs.container }}

View File

@@ -106,7 +106,7 @@ jobs:
- name: Restore cache
if: ${{ inputs.download_ccache }}
uses: ./.github/actions/restore_cache
uses: ./.github/actions/restore-cache
id: restore_cache
with:
conan_profile: ${{ inputs.conan_profile }}
@@ -131,7 +131,7 @@ jobs:
package: ${{ inputs.package }}
- name: Build Clio
uses: ./.github/actions/build_clio
uses: ./.github/actions/build-clio
with:
targets: ${{ inputs.targets }}
@@ -198,7 +198,7 @@ jobs:
- name: Save cache
if: ${{ inputs.upload_ccache && github.ref == 'refs/heads/develop' }}
uses: ./.github/actions/save_cache
uses: ./.github/actions/save-cache
with:
conan_profile: ${{ inputs.conan_profile }}
ccache_dir: ${{ env.CCACHE_DIR }}
@@ -216,7 +216,7 @@ jobs:
# It's all available in the build job, but not in the test job
- name: Run code coverage
if: ${{ inputs.code_coverage }}
uses: ./.github/actions/code_coverage
uses: ./.github/actions/code-coverage
- name: Verify expected version
if: ${{ inputs.expected_version != '' }}
@@ -238,6 +238,6 @@ jobs:
if: ${{ inputs.code_coverage }}
name: Codecov
needs: build
uses: ./.github/workflows/upload_coverage_report.yml
uses: ./.github/workflows/reusable-upload-coverage-report.yml
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -42,7 +42,7 @@ jobs:
release:
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}

View File

@@ -91,7 +91,7 @@ jobs:
- name: Create an issue
if: ${{ false && env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true' }}
uses: ./.github/actions/create_issue
uses: ./.github/actions/create-issue
env:
GH_TOKEN: ${{ github.token }}
with:

View File

@@ -1,7 +1,6 @@
name: Upload report
on:
workflow_dispatch:
workflow_call:
secrets:
CODECOV_TOKEN:

View File

@@ -8,13 +8,13 @@ on:
paths:
- .github/workflows/sanitizers.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- .github/workflows/reusable-build-test.yml
- .github/workflows/reusable-build.yml
- .github/workflows/reusable-test.yml
- ".github/actions/**"
- "!.github/actions/build_docker_image/**"
- "!.github/actions/create_issue/**"
- "!.github/actions/build-docker-image/**"
- "!.github/actions/create-issue/**"
- .github/scripts/execute-tests-under-sanitizer
- CMakeLists.txt
@@ -41,10 +41,10 @@ jobs:
sanitizer_ext: [.asan, .tsan, .ubsan]
build_type: [Release, Debug]
uses: ./.github/workflows/build_and_test.yml
uses: ./.github/workflows/reusable-build-test.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
download_ccache: false
upload_ccache: false
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}

View File

@@ -3,23 +3,23 @@ name: Update CI docker image
on:
pull_request:
paths:
- .github/workflows/update_docker_ci.yml
- .github/workflows/update-docker-ci.yml
- ".github/actions/build_docker_image/**"
- ".github/actions/build-docker-image/**"
- "docker/ci/**"
- "docker/compilers/**"
- "docker/tools/**"
- "docker/**"
- "!docker/clio/**"
- "!docker/develop/**"
push:
branches: [develop]
paths:
- .github/workflows/update_docker_ci.yml
- .github/workflows/update-docker-ci.yml
- ".github/actions/build_docker_image/**"
- ".github/actions/build-docker-image/**"
- "docker/ci/**"
- "docker/compilers/**"
- "docker/tools/**"
- "docker/**"
- "!docker/clio/**"
- "!docker/develop/**"
workflow_dispatch:
concurrency:
@@ -60,7 +60,7 @@ jobs:
with:
files: "docker/compilers/gcc/**"
- uses: ./.github/actions/build_docker_image
- uses: ./.github/actions/build-docker-image
if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -94,11 +94,11 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/gcc/**"
- uses: ./.github/actions/build_docker_image
- uses: ./.github/actions/build-docker-image
if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -141,7 +141,7 @@ jobs:
- name: Login to GitHub Container Registry
if: ${{ github.event_name != 'pull_request' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -149,7 +149,7 @@ jobs:
- name: Login to DockerHub
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PW }}
@@ -187,7 +187,7 @@ jobs:
with:
files: "docker/compilers/clang/**"
- uses: ./.github/actions/build_docker_image
- uses: ./.github/actions/build-docker-image
if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -223,7 +223,7 @@ jobs:
with:
files: "docker/tools/**"
- uses: ./.github/actions/build_docker_image
- uses: ./.github/actions/build-docker-image
if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -250,11 +250,11 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/tools/**"
- uses: ./.github/actions/build_docker_image
- uses: ./.github/actions/build-docker-image
if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -290,7 +290,7 @@ jobs:
- name: Login to GitHub Container Registry
if: ${{ github.event_name != 'pull_request' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -306,6 +306,28 @@ jobs:
$image:arm64-latest \
$image:amd64-latest
pre-commit:
name: Build and push pre-commit docker image
runs-on: heavy
needs: [repo, tools-merge]
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/build-docker-image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-pre-commit
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/pre-commit
tags: |
type=raw,value=latest
type=raw,value=${{ github.sha }}
platforms: linux/amd64,linux/arm64
build_args: |
GHCR_REPO=${{ needs.repo.outputs.GHCR_REPO }}
ci:
name: Build and push CI docker image
runs-on: heavy
@@ -313,7 +335,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/build_docker_image
- uses: ./.github/actions/build-docker-image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}

View File

@@ -18,7 +18,7 @@ on:
pull_request:
branches: [develop]
paths:
- .github/workflows/upload_conan_deps.yml
- .github/workflows/upload-conan-deps.yml
- .github/actions/conan/action.yml
- ".github/scripts/conan/**"
@@ -28,7 +28,7 @@ on:
push:
branches: [develop]
paths:
- .github/workflows/upload_conan_deps.yml
- .github/workflows/upload-conan-deps.yml
- .github/actions/conan/action.yml
- ".github/scripts/conan/**"

View File

@@ -43,7 +43,7 @@ repos:
# hadolint-docker is a special hook that runs hadolint in a Docker container
# Docker is not installed in the environment where pre-commit is run
stages: [manual]
entry: hadolint/hadolint:v2.14 hadolint
entry: hadolint/hadolint:v2.14.0 hadolint
- repo: https://github.com/codespell-project/codespell
rev: 63c8f8312b7559622c0d82815639671ae42132ac # frozen: v2.4.1

View File

@@ -43,26 +43,20 @@ RUN apt-get update \
&& rm -rf /var/lib/apt/lists/*
# Install Python tools
ARG PYTHON_VERSION=3.13
RUN add-apt-repository ppa:deadsnakes/ppa \
&& apt-get update \
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
python${PYTHON_VERSION} \
python${PYTHON_VERSION}-venv \
python3 \
python3-pip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& curl -sS https://bootstrap.pypa.io/get-pip.py | python${PYTHON_VERSION}
# Create a virtual environment for python tools
RUN python${PYTHON_VERSION} -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
&& rm -rf /var/lib/apt/lists/*
RUN pip install -q --no-cache-dir \
# TODO: Remove this once we switch to newer Ubuntu base image
# lxml 6.0.0 is not compatible with our image
'lxml<6.0.0' \
cmake \
conan==2.20.1 \
gcovr \
pre-commit
gcovr
# Install LLVM tools
ARG LLVM_TOOLS_VERSION=20

View File

@@ -9,7 +9,7 @@ The image is based on Ubuntu 20.04 and contains:
- Clang 19
- ClangBuildAnalyzer 1.6.0
- Conan 2.20.1
- Doxygen 1.12
- Doxygen 1.14
- GCC 15.2.0
- GDB 16.3
- gh 2.74

View File

@@ -1,6 +1,6 @@
services:
clio_develop:
image: ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
volumes:
- clio_develop_conan_data:/root/.conan2/p
- clio_develop_ccache:/root/.ccache

View File

@@ -0,0 +1,37 @@
ARG GHCR_REPO=invalid
FROM ${GHCR_REPO}/clio-tools:latest AS clio-tools
# We're using Ubuntu 24.04 to have a more recent version of Python
FROM ubuntu:24.04
ARG DEBIAN_FRONTEND=noninteractive
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# hadolint ignore=DL3002
USER root
WORKDIR /root
# Install common tools and dependencies
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
curl \
git \
software-properties-common \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install Python tools
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
python3 \
python3-pip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN pip install -q --no-cache-dir --break-system-packages \
pre-commit
COPY --from=clio-tools \
/usr/local/bin/doxygen \
/usr/local/bin/

View File

@@ -51,7 +51,7 @@ RUN apt-get update \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG DOXYGEN_VERSION=1.12.0
ARG DOXYGEN_VERSION=1.14.0
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& cd "doxygen-${DOXYGEN_VERSION}" \

View File

@@ -15,6 +15,7 @@ EXTRACT_ANON_NSPACES = NO
SORT_MEMBERS_CTORS_1ST = YES
INPUT = ${SOURCE}/src
USE_MDFILE_AS_MAINPAGE = ${SOURCE}/src/README.md
EXCLUDE_SYMBOLS = ${EXCLUDES}
RECURSIVE = YES
HAVE_DOT = ${USE_DOT}

View File

@@ -177,7 +177,7 @@ There are several CMake options you can use to customize the build:
### Generating API docs for Clio
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.12.0 on your system.
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.14.0 on your system.
To generate the API docs, please use CMake option `-Ddocs=ON` as described above and build the `docs` target.
@@ -191,7 +191,7 @@ Open the `index.html` file in your browser to see the documentation pages.
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
```sh
docker run -it ghcr.io/xrplf/clio-ci:384e79cd32f5f6c0ab9be3a1122ead41c5a7e67d
docker run -it ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
git clone https://github.com/XRPLF/clio
cd clio
```

View File

@@ -77,7 +77,7 @@ It's possible to configure `minimum`, `maximum` and `default` version like so:
All of the above are optional.
Clio will fallback to hardcoded defaults when these values are not specified in the config file, or if the configured values are outside of the minimum and maximum supported versions hardcoded in [src/rpc/common/APIVersion.h](../src/rpc/common/APIVersion.hpp).
Clio will fallback to hardcoded defaults when these values are not specified in the config file, or if the configured values are outside of the minimum and maximum supported versions hardcoded in [src/rpc/common/APIVersion.hpp](../src/rpc/common/APIVersion.hpp).
> [!TIP]
> See the [example-config.json](../docs/examples/config/example-config.json) for more details.

View File

@@ -36,19 +36,19 @@ EOF
exit 0
fi
# Check version of doxygen is at least 1.12
# Check version of doxygen is at least 1.14
version=$($DOXYGEN --version | grep -o '[0-9\.]*')
if [[ "1.12.0" > "$version" ]]; then
if [[ "1.14.0" > "$version" ]]; then
# No hard error if doxygen version is not the one we want - let CI deal with it
cat <<EOF
ERROR
-----------------------------------------------------------------------------
A minimum of version 1.12 of `which doxygen` is required.
Your version is $version. Please upgrade it for next time.
A minimum of version 1.14 of `which doxygen` is required.
Your version is $version. Please upgrade it.
Your changes may fail to pass CI once pushed.
Your changes may fail CI checks.
-----------------------------------------------------------------------------
EOF

20
src/README.md Normal file
View File

@@ -0,0 +1,20 @@
# Clio API server
## Introduction
Clio is an XRP Ledger API server optimized for RPC calls over WebSocket or JSON-RPC.
It stores validated historical ledger and transaction data in a more space efficient format, and uses up to 4 times
less space than [rippled](https://github.com/XRPLF/rippled).
Clio can be configured to store data in [Apache Cassandra](https://cassandra.apache.org/_/index.html) or
[ScyllaDB](https://www.scylladb.com/), enabling scalable read throughput. Multiple Clio nodes can share
access to the same dataset, which allows for a highly available cluster of Clio nodes without the need for redundant
data storage or computation.
## Develop
As you prepare to develop code for Clio, please be sure you are aware of our current
[Contribution guidelines](https://github.com/XRPLF/clio/blob/develop/CONTRIBUTING.md).
Read about @ref "rpc" carefully to know more about writing your own handlers for Clio.

View File

@@ -134,7 +134,7 @@ public:
auto const startTaxon = cursorIn.has_value() ? ripple::nft::toUInt32(ripple::nft::getTaxon(*cursorIn)) : 0;
auto const startTokenID = cursorIn.value_or(ripple::uint256(0));
Statement firstQuery = schema_->selectNFTIDsByIssuerTaxon.bind(issuer);
Statement const firstQuery = schema_->selectNFTIDsByIssuerTaxon.bind(issuer);
firstQuery.bindAt(1, startTaxon);
firstQuery.bindAt(2, startTokenID);
firstQuery.bindAt(3, Limit{limit});
@@ -147,7 +147,7 @@ public:
if (nftIDs.size() < limit) {
auto const remainingLimit = limit - nftIDs.size();
Statement secondQuery = schema_->selectNFTsAfterTaxonKeyspaces.bind(issuer);
Statement const secondQuery = schema_->selectNFTsAfterTaxonKeyspaces.bind(issuer);
secondQuery.bindAt(1, startTaxon);
secondQuery.bindAt(2, Limit{remainingLimit});
@@ -197,7 +197,7 @@ private:
) const
{
std::vector<ripple::uint256> nftIDs;
Statement statement = schema_->selectNFTIDsByIssuerTaxon.bind(issuer);
Statement const statement = schema_->selectNFTIDsByIssuerTaxon.bind(issuer);
statement.bindAt(1, taxon);
statement.bindAt(2, cursorIn.value_or(ripple::uint256(0)));
statement.bindAt(3, Limit{limit});

View File

@@ -1,8 +1,10 @@
# Backend
# Backend
@page "backend" Backend
The backend of Clio is responsible for handling the proper reading and writing of past ledger data from and to a given database. Currently, Cassandra and ScyllaDB are the only supported databases that are production-ready.
To support additional database types, you can create new classes that implement the virtual methods in [BackendInterface.h](https://github.com/XRPLF/clio/blob/develop/src/data/BackendInterface.hpp). Then, leveraging the Factory Object Design Pattern, modify [BackendFactory.h](https://github.com/XRPLF/clio/blob/develop/src/data/BackendFactory.hpp) with logic that returns the new database interface if the relevant `type` is provided in Clio's configuration file.
To support additional database types, you can create new classes that implement the virtual methods in [BackendInterface.hpp](https://github.com/XRPLF/clio/blob/develop/src/data/BackendInterface.hpp). Then, leveraging the Factory Object Design Pattern, modify [BackendFactory.hpp](https://github.com/XRPLF/clio/blob/develop/src/data/BackendFactory.hpp) with logic that returns the new database interface if the relevant `type` is provided in Clio's configuration file.
## Data Model

View File

@@ -146,9 +146,6 @@ public:
*/
CassandraBackendFamily(CassandraBackendFamily&&) = delete;
/**
* @copydoc BackendInterface::fetchAccountTransactions
*/
TransactionsAndCursor
fetchAccountTransactions(
ripple::AccountID const& account,
@@ -217,18 +214,12 @@ public:
return {txns, {}};
}
/**
* @copydoc BackendInterface::waitForWritesToFinish
*/
void
waitForWritesToFinish() override
{
executor_.sync();
}
/**
* @copydoc BackendInterface::writeLedger
*/
void
writeLedger(ripple::LedgerHeader const& ledgerHeader, std::string&& blob) override
{
@@ -239,9 +230,6 @@ public:
ledgerSequence_ = ledgerHeader.seq;
}
/**
* @copydoc BackendInterface::fetchLatestLedgerSequence
*/
std::optional<std::uint32_t>
fetchLatestLedgerSequence(boost::asio::yield_context yield) const override
{
@@ -262,9 +250,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::fetchLedgerBySequence
*/
std::optional<ripple::LedgerHeader>
fetchLedgerBySequence(std::uint32_t const sequence, boost::asio::yield_context yield) const override
{
@@ -292,9 +277,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::fetchLedgerByHash
*/
std::optional<ripple::LedgerHeader>
fetchLedgerByHash(ripple::uint256 const& hash, boost::asio::yield_context yield) const override
{
@@ -315,9 +297,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::hardFetchLedgerRange(boost::asio::yield_context) const
*/
std::optional<LedgerRange>
hardFetchLedgerRange(boost::asio::yield_context yield) const override
{
@@ -356,9 +335,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::fetchAllTransactionsInLedger
*/
std::vector<TransactionAndMetadata>
fetchAllTransactionsInLedger(std::uint32_t const ledgerSequence, boost::asio::yield_context yield) const override
{
@@ -366,9 +342,6 @@ public:
return fetchTransactions(hashes, yield);
}
/**
* @copydoc BackendInterface::fetchAllTransactionHashesInLedger
*/
std::vector<ripple::uint256>
fetchAllTransactionHashesInLedger(
std::uint32_t const ledgerSequence,
@@ -402,9 +375,6 @@ public:
return hashes;
}
/**
* @copydoc BackendInterface::fetchNFT
*/
std::optional<NFT>
fetchNFT(
ripple::uint256 const& tokenID,
@@ -444,9 +414,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::fetchNFTTransactions
*/
TransactionsAndCursor
fetchNFTTransactions(
ripple::uint256 const& tokenID,
@@ -518,9 +485,6 @@ public:
return {txns, {}};
}
/**
* @copydoc BackendInterface::fetchMPTHolders
*/
MPTHoldersAndCursor
fetchMPTHolders(
ripple::uint192 const& mptID,
@@ -560,9 +524,6 @@ public:
return {mptObjects, {}};
}
/**
* @copydoc BackendInterface::doFetchLedgerObject
*/
std::optional<Blob>
doFetchLedgerObject(
ripple::uint256 const& key,
@@ -585,9 +546,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::doFetchLedgerObjectSeq
*/
std::optional<std::uint32_t>
doFetchLedgerObjectSeq(
ripple::uint256 const& key,
@@ -609,9 +567,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::fetchTransaction
*/
std::optional<TransactionAndMetadata>
fetchTransaction(ripple::uint256 const& hash, boost::asio::yield_context yield) const override
{
@@ -629,9 +584,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::doFetchSuccessorKey
*/
std::optional<ripple::uint256>
doFetchSuccessorKey(
ripple::uint256 key,
@@ -654,9 +606,6 @@ public:
return std::nullopt;
}
/**
* @copydoc BackendInterface::fetchTransactions
*/
std::vector<TransactionAndMetadata>
fetchTransactions(std::vector<ripple::uint256> const& hashes, boost::asio::yield_context yield) const override
{
@@ -698,9 +647,6 @@ public:
return results;
}
/**
* @copydoc BackendInterface::doFetchLedgerObjects
*/
std::vector<Blob>
doFetchLedgerObjects(
std::vector<ripple::uint256> const& keys,
@@ -741,9 +687,6 @@ public:
return results;
}
/**
* @copydoc BackendInterface::fetchLedgerDiff
*/
std::vector<LedgerObject>
fetchLedgerDiff(std::uint32_t const ledgerSequence, boost::asio::yield_context yield) const override
{
@@ -789,9 +732,6 @@ public:
return results;
}
/**
* @copydoc BackendInterface::fetchMigratorStatus
*/
std::optional<std::string>
fetchMigratorStatus(std::string const& migratorName, boost::asio::yield_context yield) const override
{
@@ -812,9 +752,6 @@ public:
return {};
}
/**
* @copydoc BackendInterface::fetchClioNodesData
*/
std::expected<std::vector<std::pair<boost::uuids::uuid, std::string>>, std::string>
fetchClioNodesData(boost::asio::yield_context yield) const override
{
@@ -831,9 +768,6 @@ public:
return result;
}
/**
* @copydoc BackendInterface::doWriteLedgerObject
*/
void
doWriteLedgerObject(std::string&& key, std::uint32_t const seq, std::string&& blob) override
{
@@ -845,9 +779,6 @@ public:
executor_.write(schema_->insertObject, std::move(key), seq, std::move(blob));
}
/**
* @copydoc BackendInterface::writeSuccessor
*/
void
writeSuccessor(std::string&& key, std::uint32_t const seq, std::string&& successor) override
{
@@ -859,9 +790,6 @@ public:
executor_.write(schema_->insertSuccessor, std::move(key), seq, std::move(successor));
}
/**
* @copydoc BackendInterface::writeAccountTransactions
*/
void
writeAccountTransactions(std::vector<AccountTransactionsData> data) override
{
@@ -881,9 +809,6 @@ public:
executor_.write(std::move(statements));
}
/**
* @copydoc BackendInterface::writeAccountTransaction
*/
void
writeAccountTransaction(AccountTransactionsData record) override
{
@@ -901,9 +826,6 @@ public:
executor_.write(std::move(statements));
}
/**
* @copydoc BackendInterface::writeNFTTransactions
*/
void
writeNFTTransactions(std::vector<NFTTransactionsData> const& data) override
{
@@ -919,9 +841,6 @@ public:
executor_.write(std::move(statements));
}
/**
* @copydoc BackendInterface::writeTransaction
*/
void
writeTransaction(
std::string&& hash,
@@ -939,9 +858,6 @@ public:
);
}
/**
* @copydoc BackendInterface::writeNFTs
*/
void
writeNFTs(std::vector<NFTsData> const& data) override
{
@@ -980,9 +896,6 @@ public:
executor_.writeEach(std::move(statements));
}
/**
* @copydoc BackendInterface::writeNFTs
*/
void
writeMPTHolders(std::vector<MPTHolderData> const& data) override
{
@@ -994,9 +907,6 @@ public:
executor_.write(std::move(statements));
}
/**
* @copydoc BackendInterface::startWrites
*/
void
startWrites() const override
{
@@ -1004,9 +914,6 @@ public:
// probably was used in PG to start a transaction or smth.
}
/**
* @copydoc BackendInterface::writeMigratorStatus
*/
void
writeMigratorStatus(std::string const& migratorName, std::string const& status) override
{
@@ -1015,27 +922,18 @@ public:
);
}
/**
* @copydoc BackendInterface::writeNodeMessage
*/
void
writeNodeMessage(boost::uuids::uuid const& uuid, std::string message) override
{
executor_.writeSync(schema_->updateClioNodeMessage, data::cassandra::Text{std::move(message)}, uuid);
}
/**
* @copydoc BackendInterface::isTooBusy
*/
bool
isTooBusy() const override
{
return executor_.isTooBusy();
}
/**
* @copydoc BackendInterface::stats
*/
boost::json::object
stats() const override
{

View File

@@ -1,5 +1,7 @@
# ETL subsystem
@page "etl" ETL subsystem
A single Clio node has one or more ETL sources specified in the config file. Clio subscribes to the `ledgers` stream of each of the ETL sources. The stream sends a message whenever a new ledger is validated.
Upon receiving a message on the stream, Clio fetches the data associated with the newly validated ledger from one of the ETL sources. The fetch is performed via a gRPC request called `GetLedger`. This request returns the ledger header, transactions and metadata blobs, and every ledger object added/modified/deleted as part of this ledger. The ETL subsystem then writes all of this data to the databases, and moves on to the next ledger.

View File

@@ -27,6 +27,7 @@
#include <boost/asio/io_context.hpp>
#include <boost/asio/ip/tcp.hpp>
#include <fmt/format.h>
#include <grpc/grpc.h>
#include <grpcpp/client_context.h>
#include <grpcpp/security/credentials.h>
#include <grpcpp/support/channel_arguments.h>
@@ -34,6 +35,7 @@
#include <org/xrpl/rpc/v1/get_ledger.pb.h>
#include <org/xrpl/rpc/v1/xrp_ledger.grpc.pb.h>
#include <chrono>
#include <cstddef>
#include <cstdint>
#include <exception>
@@ -52,17 +54,25 @@ GrpcSource::GrpcSource(std::string const& ip, std::string const& grpcPort, std::
try {
boost::asio::io_context ctx;
boost::asio::ip::tcp::resolver resolver{ctx};
auto const resolverResult = resolver.resolve(ip, grpcPort);
if (resolverResult.empty()) {
if (resolverResult.empty())
throw std::runtime_error("Failed to resolve " + ip + ":" + grpcPort);
}
std::stringstream ss;
ss << resolverResult.begin()->endpoint();
grpc::ChannelArguments chArgs;
chArgs.SetMaxReceiveMessageSize(-1);
chArgs.SetInt(GRPC_ARG_KEEPALIVE_TIME_MS, kKEEPALIVE_PING_INTERVAL_MS);
chArgs.SetInt(GRPC_ARG_KEEPALIVE_TIMEOUT_MS, kKEEPALIVE_TIMEOUT_MS);
chArgs.SetInt(GRPC_ARG_KEEPALIVE_PERMIT_WITHOUT_CALLS, static_cast<int>(kKEEPALIVE_PERMIT_WITHOUT_CALLS));
chArgs.SetInt(GRPC_ARG_HTTP2_MAX_PINGS_WITHOUT_DATA, kMAX_PINGS_WITHOUT_DATA);
stub_ = org::xrpl::rpc::v1::XRPLedgerAPIService::NewStub(
grpc::CreateCustomChannel(ss.str(), grpc::InsecureChannelCredentials(), chArgs)
);
LOG(log_.debug()) << "Made stub for remote.";
} catch (std::exception const& e) {
LOG(log_.warn()) << "Exception while creating stub: " << e.what() << ".";
@@ -76,10 +86,11 @@ GrpcSource::fetchLedger(uint32_t sequence, bool getObjects, bool getObjectNeighb
if (!stub_)
return {{grpc::StatusCode::INTERNAL, "No Stub"}, response};
// Ledger header with txns and metadata
org::xrpl::rpc::v1::GetLedgerRequest request;
grpc::ClientContext context;
context.set_deadline(std::chrono::system_clock::now() + kDEADLINE); // Prevent indefinite blocking
request.mutable_ledger()->set_sequence(sequence);
request.set_transactions(true);
request.set_expand(true);

View File

@@ -26,6 +26,7 @@
#include <org/xrpl/rpc/v1/get_ledger.pb.h>
#include <xrpl/proto/org/xrpl/rpc/v1/xrp_ledger.grpc.pb.h>
#include <chrono>
#include <cstdint>
#include <memory>
#include <string>
@@ -38,6 +39,12 @@ class GrpcSource {
std::unique_ptr<org::xrpl::rpc::v1::XRPLedgerAPIService::Stub> stub_;
std::shared_ptr<BackendInterface> backend_;
static constexpr auto kKEEPALIVE_PING_INTERVAL_MS = 10000;
static constexpr auto kKEEPALIVE_TIMEOUT_MS = 5000;
static constexpr auto kKEEPALIVE_PERMIT_WITHOUT_CALLS = true; // Allow keepalive pings when no calls
static constexpr auto kMAX_PINGS_WITHOUT_DATA = 0; // No limit
static constexpr auto kDEADLINE = std::chrono::seconds(30);
public:
GrpcSource(std::string const& ip, std::string const& grpcPort, std::shared_ptr<BackendInterface> backend);

View File

@@ -32,6 +32,12 @@ struct AmendmentBlockHandlerInterface {
*/
virtual void
notifyAmendmentBlocked() = 0;
/**
* @brief Stop the block handler from repeatedly executing
*/
virtual void
stop() = 0;
};
} // namespace etlng

View File

@@ -25,6 +25,7 @@
#include <chrono>
#include <functional>
#include <optional>
#include <utility>
namespace etlng::impl {
@@ -45,6 +46,11 @@ AmendmentBlockHandler::AmendmentBlockHandler(
{
}
AmendmentBlockHandler::~AmendmentBlockHandler()
{
stop();
}
void
AmendmentBlockHandler::notifyAmendmentBlocked()
{
@@ -53,4 +59,13 @@ AmendmentBlockHandler::notifyAmendmentBlocked()
operation_.emplace(ctx_.executeRepeatedly(interval_, action_));
}
void
AmendmentBlockHandler::stop()
{
if (operation_.has_value()) {
operation_->abort();
operation_.reset();
}
}
} // namespace etlng::impl

View File

@@ -56,11 +56,10 @@ public:
ActionType action = kDEFAULT_AMENDMENT_BLOCK_ACTION
);
~AmendmentBlockHandler() override
{
if (operation_.has_value())
operation_.value().abort();
}
~AmendmentBlockHandler() override;
void
stop() override;
void
notifyAmendmentBlocked() override;

View File

@@ -28,6 +28,7 @@
#include <boost/asio/spawn.hpp>
#include <fmt/format.h>
#include <grpc/grpc.h>
#include <grpcpp/client_context.h>
#include <grpcpp/security/credentials.h>
#include <grpcpp/support/channel_arguments.h>
@@ -36,6 +37,7 @@
#include <org/xrpl/rpc/v1/xrp_ledger.grpc.pb.h>
#include <atomic>
#include <chrono>
#include <cstddef>
#include <cstdint>
#include <exception>
@@ -63,13 +65,18 @@ resolve(std::string const& ip, std::string const& port)
namespace etlng::impl {
GrpcSource::GrpcSource(std::string const& ip, std::string const& grpcPort)
GrpcSource::GrpcSource(std::string const& ip, std::string const& grpcPort, std::chrono::system_clock::duration deadline)
: log_(fmt::format("ETL_Grpc[{}:{}]", ip, grpcPort))
, initialLoadShouldStop_(std::make_unique<std::atomic_bool>(false))
, deadline_{deadline}
{
try {
grpc::ChannelArguments chArgs;
chArgs.SetMaxReceiveMessageSize(-1);
chArgs.SetInt(GRPC_ARG_KEEPALIVE_TIME_MS, kKEEPALIVE_PING_INTERVAL_MS);
chArgs.SetInt(GRPC_ARG_KEEPALIVE_TIMEOUT_MS, kKEEPALIVE_TIMEOUT_MS);
chArgs.SetInt(GRPC_ARG_KEEPALIVE_PERMIT_WITHOUT_CALLS, static_cast<int>(kKEEPALIVE_PERMIT_WITHOUT_CALLS));
chArgs.SetInt(GRPC_ARG_HTTP2_MAX_PINGS_WITHOUT_DATA, kMAX_PINGS_WITHOUT_DATA);
stub_ = org::xrpl::rpc::v1::XRPLedgerAPIService::NewStub(
grpc::CreateCustomChannel(resolve(ip, grpcPort), grpc::InsecureChannelCredentials(), chArgs)
@@ -88,10 +95,11 @@ GrpcSource::fetchLedger(uint32_t sequence, bool getObjects, bool getObjectNeighb
if (!stub_)
return {{grpc::StatusCode::INTERNAL, "No Stub"}, response};
// Ledger header with txns and metadata
org::xrpl::rpc::v1::GetLedgerRequest request;
grpc::ClientContext context;
context.set_deadline(std::chrono::system_clock::now() + deadline_); // Prevent indefinite blocking
request.mutable_ledger()->set_sequence(sequence);
request.set_transactions(true);
request.set_expand(true);

View File

@@ -29,6 +29,7 @@
#include <xrpl/proto/org/xrpl/rpc/v1/xrp_ledger.grpc.pb.h>
#include <atomic>
#include <chrono>
#include <cstdint>
#include <memory>
#include <string>
@@ -40,9 +41,20 @@ class GrpcSource {
util::Logger log_;
std::unique_ptr<org::xrpl::rpc::v1::XRPLedgerAPIService::Stub> stub_;
std::unique_ptr<std::atomic_bool> initialLoadShouldStop_;
std::chrono::system_clock::duration deadline_;
static constexpr auto kKEEPALIVE_PING_INTERVAL_MS = 10000;
static constexpr auto kKEEPALIVE_TIMEOUT_MS = 5000;
static constexpr auto kKEEPALIVE_PERMIT_WITHOUT_CALLS = true; // Allow keepalive pings when no calls
static constexpr auto kMAX_PINGS_WITHOUT_DATA = 0; // No limit
static constexpr auto kDEADLINE = std::chrono::seconds(30);
public:
GrpcSource(std::string const& ip, std::string const& grpcPort);
GrpcSource(
std::string const& ip,
std::string const& grpcPort,
std::chrono::system_clock::duration deadline = kDEADLINE
);
/**
* @brief Fetch data for a specific ledger.

View File

@@ -1,42 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2023, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
/**
* @mainpage Clio API server
*
* @section intro Introduction
*
* Clio is an XRP Ledger API server optimized for RPC calls over WebSocket or JSON-RPC.
*
* It stores validated historical ledger and transaction data in a more space efficient format, and uses up to 4 times
* less space than <A HREF="https://github.com/XRPLF/rippled">rippled</A>.
*
* Clio can be configured to store data in <A HREF="https://cassandra.apache.org/_/index.html">Apache Cassandra</A> or
* <A HREF="https://www.scylladb.com/">ScyllaDB</A>, enabling scalable read throughput. Multiple Clio nodes can share
* access to the same dataset, which allows for a highly available cluster of Clio nodes without the need for redundant
* data storage or computation.
*
* @section Develop
*
* As you prepare to develop code for Clio, please be sure you are aware of our current
* <A HREF="https://github.com/XRPLF/clio/blob/develop/CONTRIBUTING.md">Contribution guidelines</A>.
*
* Read [rpc/README.md](../rpc/README.md) carefully to know more about writing your own handlers for
* Clio.
*/

View File

@@ -1,5 +1,7 @@
# Clio Migration
@page "migration" Clio Migration
Clio maintains the off-chain data of XRPL and multiple indexes tables to powering complex queries. To simplify the creation of index tables, this migration framework handles the process of database change and facilitates the migration of historical data seamlessly.
## Command Line Usage

View File

@@ -1,4 +1,6 @@
# RPC subsystem
# RPC subsystem
@page "rpc" RPC subsystem
The RPC subsystem is where the common framework for handling incoming JSON requests is implemented.

View File

@@ -1,5 +1,7 @@
# Async framework
@page "async" Async framework
## Introduction
Clio uses threads intensively. Multiple parts of Clio were/are implemented by running a `std::thread` with some sort of loop inside. Every time this pattern is reimplemented in a slightly different way. State is managed using asynchronous queues, atomic flags, mutexes and other low level primitives.

View File

@@ -1,5 +1,7 @@
# Web server subsystem
@page "web" Web server subsystem
This folder contains all of the classes for running the web server.
The web server subsystem:

View File

@@ -25,4 +25,5 @@
struct MockAmendmentBlockHandler : etlng::AmendmentBlockHandlerInterface {
MOCK_METHOD(void, notifyAmendmentBlocked, (), (override));
MOCK_METHOD(void, stop, (), (override));
};

View File

@@ -32,7 +32,9 @@
#include <org/xrpl/rpc/v1/get_ledger_entry.pb.h>
#include <xrpl/proto/org/xrpl/rpc/v1/xrp_ledger.grpc.pb.h>
#include <chrono>
#include <memory>
#include <optional>
#include <string>
#include <thread>
@@ -90,8 +92,7 @@ struct WithMockXrpLedgerAPIService : virtual ::testing::Test {
~WithMockXrpLedgerAPIService() override
{
server_->Shutdown();
serverThread_.join();
shutdown();
}
int
@@ -99,6 +100,19 @@ struct WithMockXrpLedgerAPIService : virtual ::testing::Test {
{
return port_;
}
void
shutdown(std::optional<std::chrono::system_clock::duration> deadline = std::nullopt)
{
if (deadline.has_value()) {
server_->Shutdown(std::chrono::system_clock::now() + *deadline);
} else {
server_->Shutdown();
}
if (serverThread_.joinable())
serverThread_.join();
}
MockXrpLedgerAPIService mockXrpLedgerAPIService;
private:

View File

@@ -36,7 +36,7 @@ struct AmendmentBlockHandlerTest : util::prometheus::WithPrometheus, SyncAsioCon
etl::SystemState state;
};
TEST_F(AmendmentBlockHandlerTest, CallTonotifyAmendmentBlockedSetsStateAndRepeatedlyCallsAction)
TEST_F(AmendmentBlockHandlerTest, CallToNotifyAmendmentBlockedSetsStateAndRepeatedlyCallsAction)
{
AmendmentBlockHandler handler{ctx_, state, std::chrono::nanoseconds{1}, actionMock.AsStdFunction()};

View File

@@ -40,7 +40,7 @@ protected:
util::async::CoroExecutionContext ctx_;
};
TEST_F(AmendmentBlockHandlerNgTests, CallTonotifyAmendmentBlockedSetsStateAndRepeatedlyCallsAction)
TEST_F(AmendmentBlockHandlerNgTests, CallToNotifyAmendmentBlockedSetsStateAndRepeatedlyCallsAction)
{
static constexpr auto kMAX_ITERATIONS = 10uz;
etlng::impl::AmendmentBlockHandler handler{ctx_, state_, std::chrono::nanoseconds{1}, actionMock_.AsStdFunction()};
@@ -55,6 +55,7 @@ TEST_F(AmendmentBlockHandlerNgTests, CallTonotifyAmendmentBlockedSetsStateAndRep
handler.notifyAmendmentBlocked();
stop.acquire(); // wait for the counter to reach over kMAX_ITERATIONS
handler.stop();
EXPECT_TRUE(state_.isAmendmentBlocked);
}

View File

@@ -41,15 +41,18 @@
#include <xrpl/basics/strHex.h>
#include <atomic>
#include <chrono>
#include <condition_variable>
#include <cstddef>
#include <cstdint>
#include <functional>
#include <future>
#include <map>
#include <memory>
#include <mutex>
#include <optional>
#include <queue>
#include <semaphore>
#include <string>
#include <vector>
@@ -357,3 +360,34 @@ TEST_F(GrpcSourceStopTests, LoadInitialLedgerStopsWhenRequested)
ASSERT_FALSE(res.has_value());
EXPECT_EQ(res.error(), etlng::InitialLedgerLoadError::Cancelled);
}
TEST_F(GrpcSourceNgTests, DeadlineIsHandledCorrectly)
{
static constexpr auto kDEADLINE = std::chrono::milliseconds{5};
uint32_t const sequence = 123u;
bool const getObjects = true;
bool const getObjectNeighbors = false;
std::binary_semaphore sem(0);
auto grpcSource =
std::make_unique<etlng::impl::GrpcSource>("localhost", std::to_string(getXRPLMockPort()), kDEADLINE);
EXPECT_CALL(mockXrpLedgerAPIService, GetLedger)
.WillOnce([&](grpc::ServerContext*,
org::xrpl::rpc::v1::GetLedgerRequest const*,
org::xrpl::rpc::v1::GetLedgerResponse*) {
// wait for main thread to discard us and fail the test if unsuccessful within expected timeframe
[&] { ASSERT_TRUE(sem.try_acquire_for(std::chrono::milliseconds{50})); }();
return grpc::Status{};
});
auto const [status, response] = grpcSource->fetchLedger(sequence, getObjects, getObjectNeighbors);
ASSERT_FALSE(status.ok()); // timed out after kDEADLINE
sem.release(); // we don't need to hold GetLedger thread any longer
grpcSource.reset();
shutdown(std::chrono::milliseconds{10});
}