mirror of
https://github.com/XRPLF/clio.git
synced 2026-01-21 15:15:29 +00:00
Compare commits
138 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a72e5a180f | ||
|
|
0ebbaaadef | ||
|
|
b983aea15d | ||
|
|
63e7f9a72b | ||
|
|
eebee4d671 | ||
|
|
a6d5f94470 | ||
|
|
2b473c8613 | ||
|
|
6f6d8cdf25 | ||
|
|
232838862a | ||
|
|
cd93b2a469 | ||
|
|
95712c22b1 | ||
|
|
14342e087c | ||
|
|
159024898e | ||
|
|
c6be761f33 | ||
|
|
f33f15c02d | ||
|
|
e733fadb45 | ||
|
|
a7ac7b54a8 | ||
|
|
88866ea6fd | ||
|
|
bb39bce40b | ||
|
|
bb3159bda0 | ||
|
|
c0c5c14791 | ||
|
|
b0abe14057 | ||
|
|
c9df784c4e | ||
|
|
a9787b131e | ||
|
|
9f76eabf0a | ||
|
|
79c08fc735 | ||
|
|
2c9c5634ad | ||
|
|
850333528c | ||
|
|
8da4194fe2 | ||
|
|
4dece23ede | ||
|
|
2327e81b0b | ||
|
|
5269ea0223 | ||
|
|
89fbcbf66a | ||
|
|
4b731a92ae | ||
|
|
7600e740a0 | ||
|
|
db9a460867 | ||
|
|
d5b0329e70 | ||
|
|
612434677a | ||
|
|
5a5a79fe30 | ||
|
|
b1a49fdaab | ||
|
|
f451996944 | ||
|
|
488bb05d22 | ||
|
|
f2c4275f61 | ||
|
|
e9b98cf5b3 | ||
|
|
3aa1854129 | ||
|
|
f2f5a6ab19 | ||
|
|
1469d4b198 | ||
|
|
06ea05891d | ||
|
|
c7c270cc03 | ||
|
|
c1f2f5b100 | ||
|
|
bea0b51c8b | ||
|
|
69b8e5bd06 | ||
|
|
33dc4ad95a | ||
|
|
13cbb405c7 | ||
|
|
8a37a2e083 | ||
|
|
f8b6c98219 | ||
|
|
92883bf012 | ||
|
|
88881e95dd | ||
|
|
94e70e4026 | ||
|
|
b534570cdd | ||
|
|
56fbfc63c2 | ||
|
|
80978657c0 | ||
|
|
067449c3f8 | ||
|
|
946976546a | ||
|
|
73e90b0a3f | ||
|
|
7681c58a3a | ||
|
|
391e7b07ab | ||
|
|
4eadaa85fa | ||
|
|
1b1a46c429 | ||
|
|
89707d9668 | ||
|
|
ae260d1229 | ||
|
|
058c05cfb6 | ||
|
|
b2a7d185cb | ||
|
|
9ea61ba6b9 | ||
|
|
19157dec74 | ||
|
|
42a6f516dc | ||
|
|
2cd8226a11 | ||
|
|
e3170203de | ||
|
|
8b280e7742 | ||
|
|
7ed30bc40d | ||
|
|
ac608004bc | ||
|
|
6ab92ca0a6 | ||
|
|
77387d8f9f | ||
|
|
b62cfe949f | ||
|
|
56f074e6ee | ||
|
|
f0becbbec3 | ||
|
|
2075171ca5 | ||
|
|
3a4249dcc3 | ||
|
|
8742dcab3d | ||
|
|
1ef7ec3464 | ||
|
|
20e7e275cf | ||
|
|
addb17ae7d | ||
|
|
346c9f9bdf | ||
|
|
c6308ce036 | ||
|
|
d023ed2be2 | ||
|
|
6236941140 | ||
|
|
59b7b249ff | ||
|
|
893daab8f8 | ||
|
|
be9f0615fa | ||
|
|
093606106c | ||
|
|
224e835e7c | ||
|
|
138a2d3440 | ||
|
|
c0eedd273d | ||
|
|
a5b1dcfe55 | ||
|
|
c973e99f4b | ||
|
|
51dbd09ef6 | ||
|
|
1ecc6a6040 | ||
|
|
1d3e34b392 | ||
|
|
2f8a704071 | ||
|
|
fcc5a5425e | ||
|
|
316126746b | ||
|
|
6d79dd6b2b | ||
|
|
d6ab2cc1e4 | ||
|
|
13baa42993 | ||
|
|
b485fdc18d | ||
|
|
7e4e12385f | ||
|
|
c117f470f2 | ||
|
|
30e88fe72c | ||
|
|
cecf082952 | ||
|
|
d5b95c2e61 | ||
|
|
8375eb1766 | ||
|
|
be6aaffa7a | ||
|
|
104ef6a9dc | ||
|
|
eed757e0c4 | ||
|
|
3b61a85ba0 | ||
|
|
7c8152d76f | ||
|
|
0425d34b55 | ||
|
|
8c8a7ff3b8 | ||
|
|
16493abd0d | ||
|
|
3dd72d94e1 | ||
|
|
5e914abf29 | ||
|
|
9603968808 | ||
|
|
0124c06a53 | ||
|
|
1bfdd0dd89 | ||
|
|
f41d574204 | ||
|
|
d0ec60381b | ||
|
|
0b19a42a96 | ||
|
|
030f4f1b22 |
@@ -49,6 +49,7 @@ IndentFunctionDeclarationAfterType: false
|
|||||||
IndentWidth: 4
|
IndentWidth: 4
|
||||||
IndentWrappedFunctionNames: false
|
IndentWrappedFunctionNames: false
|
||||||
IndentRequiresClause: true
|
IndentRequiresClause: true
|
||||||
|
InsertNewlineAtEOF: true
|
||||||
RequiresClausePosition: OwnLine
|
RequiresClausePosition: OwnLine
|
||||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||||
MaxEmptyLinesToKeep: 1
|
MaxEmptyLinesToKeep: 1
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ format:
|
|||||||
_help_max_pargs_hwrap:
|
_help_max_pargs_hwrap:
|
||||||
- If a positional argument group contains more than this many
|
- If a positional argument group contains more than this many
|
||||||
- arguments, then force it to a vertical layout.
|
- arguments, then force it to a vertical layout.
|
||||||
max_pargs_hwrap: 6
|
max_pargs_hwrap: 5
|
||||||
_help_max_rows_cmdline:
|
_help_max_rows_cmdline:
|
||||||
- If a cmdline positional group consumes more than this many
|
- If a cmdline positional group consumes more than this many
|
||||||
- lines without nesting, then invalidate the layout (and nest)
|
- lines without nesting, then invalidate the layout (and nest)
|
||||||
|
|||||||
18
.github/actions/build-clio/action.yml
vendored
18
.github/actions/build-clio/action.yml
vendored
@@ -5,25 +5,27 @@ inputs:
|
|||||||
targets:
|
targets:
|
||||||
description: Space-separated build target names
|
description: Space-separated build target names
|
||||||
default: all
|
default: all
|
||||||
subtract_threads:
|
nproc_subtract:
|
||||||
description: An option for the action get-threads-number.
|
description: The number of processors to subtract when calculating parallelism.
|
||||||
required: true
|
required: true
|
||||||
default: "0"
|
default: "0"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
steps:
|
steps:
|
||||||
- name: Get number of threads
|
- name: Get number of processors
|
||||||
uses: ./.github/actions/get-threads-number
|
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf
|
||||||
id: number_of_threads
|
id: nproc
|
||||||
with:
|
with:
|
||||||
subtract_threads: ${{ inputs.subtract_threads }}
|
subtract: ${{ inputs.nproc_subtract }}
|
||||||
|
|
||||||
- name: Build targets
|
- name: Build targets
|
||||||
shell: bash
|
shell: bash
|
||||||
|
env:
|
||||||
|
CMAKE_TARGETS: ${{ inputs.targets }}
|
||||||
run: |
|
run: |
|
||||||
cd build
|
cd build
|
||||||
cmake \
|
cmake \
|
||||||
--build . \
|
--build . \
|
||||||
--parallel "${{ steps.number_of_threads.outputs.threads_number }}" \
|
--parallel "${{ steps.nproc.outputs.nproc }}" \
|
||||||
--target ${{ inputs.targets }}
|
--target ${CMAKE_TARGETS}
|
||||||
|
|||||||
@@ -47,12 +47,12 @@ runs:
|
|||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ env.GITHUB_TOKEN }}
|
password: ${{ env.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
- uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||||
with:
|
with:
|
||||||
cache-image: false
|
cache-image: false
|
||||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||||
|
|
||||||
- uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
- uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||||
id: meta
|
id: meta
|
||||||
with:
|
with:
|
||||||
images: ${{ inputs.images }}
|
images: ${{ inputs.images }}
|
||||||
|
|||||||
41
.github/actions/cache-key/action.yml
vendored
Normal file
41
.github/actions/cache-key/action.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: Cache key
|
||||||
|
description: Generate cache key for ccache
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
conan_profile:
|
||||||
|
description: Conan profile name
|
||||||
|
required: true
|
||||||
|
build_type:
|
||||||
|
description: Current build type (e.g. Release, Debug)
|
||||||
|
required: true
|
||||||
|
default: Release
|
||||||
|
code_coverage:
|
||||||
|
description: Whether code coverage is on
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
key:
|
||||||
|
description: Generated cache key for ccache
|
||||||
|
value: ${{ steps.key_without_commit.outputs.key }}-${{ steps.git_common_ancestor.outputs.commit }}
|
||||||
|
restore_keys:
|
||||||
|
description: Cache restore keys for fallback
|
||||||
|
value: ${{ steps.key_without_commit.outputs.key }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Find common commit
|
||||||
|
id: git_common_ancestor
|
||||||
|
uses: ./.github/actions/git-common-ancestor
|
||||||
|
|
||||||
|
- name: Set cache key without commit
|
||||||
|
id: key_without_commit
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
RUNNER_OS: ${{ runner.os }}
|
||||||
|
BUILD_TYPE: ${{ inputs.build_type }}
|
||||||
|
CODE_COVERAGE: ${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}
|
||||||
|
CONAN_PROFILE: ${{ inputs.conan_profile }}
|
||||||
|
run: |
|
||||||
|
echo "key=clio-ccache-${RUNNER_OS}-${BUILD_TYPE}${CODE_COVERAGE}-${CONAN_PROFILE}-develop" >> "${GITHUB_OUTPUT}"
|
||||||
20
.github/actions/cmake/action.yml
vendored
20
.github/actions/cmake/action.yml
vendored
@@ -37,6 +37,10 @@ inputs:
|
|||||||
description: Whether to generate Debian package
|
description: Whether to generate Debian package
|
||||||
required: true
|
required: true
|
||||||
default: "false"
|
default: "false"
|
||||||
|
version:
|
||||||
|
description: Version of the clio_server binary
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
@@ -44,6 +48,7 @@ runs:
|
|||||||
- name: Run cmake
|
- name: Run cmake
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
|
BUILD_DIR: "${{ inputs.build_dir }}"
|
||||||
BUILD_TYPE: "${{ inputs.build_type }}"
|
BUILD_TYPE: "${{ inputs.build_type }}"
|
||||||
SANITIZER_OPTION: |-
|
SANITIZER_OPTION: |-
|
||||||
${{ endsWith(inputs.conan_profile, '.asan') && '-Dsan=address' ||
|
${{ endsWith(inputs.conan_profile, '.asan') && '-Dsan=address' ||
|
||||||
@@ -56,9 +61,22 @@ runs:
|
|||||||
STATIC: "${{ inputs.static == 'true' && 'ON' || 'OFF' }}"
|
STATIC: "${{ inputs.static == 'true' && 'ON' || 'OFF' }}"
|
||||||
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'ON' || 'OFF' }}"
|
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'ON' || 'OFF' }}"
|
||||||
PACKAGE: "${{ inputs.package == 'true' && 'ON' || 'OFF' }}"
|
PACKAGE: "${{ inputs.package == 'true' && 'ON' || 'OFF' }}"
|
||||||
|
# GitHub creates a merge commit for a PR
|
||||||
|
# https://www.kenmuse.com/blog/the-many-shas-of-a-github-pull-request/
|
||||||
|
#
|
||||||
|
# We:
|
||||||
|
# - explicitly provide branch name
|
||||||
|
# - use `github.head_ref` to get the SHA of last commit in the PR branch
|
||||||
|
#
|
||||||
|
# This way it works both for PRs and pushes to branches.
|
||||||
|
GITHUB_BRANCH_NAME: "${{ github.head_ref || github.ref_name }}"
|
||||||
|
GITHUB_HEAD_SHA: "${{ github.event.pull_request.head.sha || github.sha }}"
|
||||||
|
#
|
||||||
|
# If tag is being pushed, or it's a nightly release, we use that version.
|
||||||
|
FORCE_CLIO_VERSION: ${{ inputs.version }}
|
||||||
run: |
|
run: |
|
||||||
cmake \
|
cmake \
|
||||||
-B ${{inputs.build_dir}} \
|
-B "${BUILD_DIR}" \
|
||||||
-S . \
|
-S . \
|
||||||
-G Ninja \
|
-G Ninja \
|
||||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||||
|
|||||||
2
.github/actions/code-coverage/action.yml
vendored
2
.github/actions/code-coverage/action.yml
vendored
@@ -24,7 +24,7 @@ runs:
|
|||||||
-j8 --exclude-throw-branches
|
-j8 --exclude-throw-branches
|
||||||
|
|
||||||
- name: Archive coverage report
|
- name: Archive coverage report
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: coverage-report.xml
|
name: coverage-report.xml
|
||||||
path: build/coverage_report.xml
|
path: build/coverage_report.xml
|
||||||
|
|||||||
15
.github/actions/conan/action.yml
vendored
15
.github/actions/conan/action.yml
vendored
@@ -21,18 +21,17 @@ inputs:
|
|||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
steps:
|
steps:
|
||||||
- name: Create build directory
|
|
||||||
shell: bash
|
|
||||||
run: mkdir -p "${{ inputs.build_dir }}"
|
|
||||||
|
|
||||||
- name: Run conan
|
- name: Run conan
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
|
BUILD_DIR: "${{ inputs.build_dir }}"
|
||||||
CONAN_BUILD_OPTION: "${{ inputs.force_conan_source_build == 'true' && '*' || 'missing' }}"
|
CONAN_BUILD_OPTION: "${{ inputs.force_conan_source_build == 'true' && '*' || 'missing' }}"
|
||||||
|
BUILD_TYPE: "${{ inputs.build_type }}"
|
||||||
|
CONAN_PROFILE: "${{ inputs.conan_profile }}"
|
||||||
run: |
|
run: |
|
||||||
conan \
|
conan \
|
||||||
install . \
|
install . \
|
||||||
-of build \
|
-of "${BUILD_DIR}" \
|
||||||
-b "$CONAN_BUILD_OPTION" \
|
-b "${CONAN_BUILD_OPTION}" \
|
||||||
-s "build_type=${{ inputs.build_type }}" \
|
-s "build_type=${BUILD_TYPE}" \
|
||||||
--profile:all "${{ inputs.conan_profile }}"
|
--profile:all "${CONAN_PROFILE}"
|
||||||
|
|||||||
13
.github/actions/create-issue/action.yml
vendored
13
.github/actions/create-issue/action.yml
vendored
@@ -28,12 +28,17 @@ runs:
|
|||||||
- name: Create an issue
|
- name: Create an issue
|
||||||
id: create_issue
|
id: create_issue
|
||||||
shell: bash
|
shell: bash
|
||||||
|
env:
|
||||||
|
ISSUE_BODY: ${{ inputs.body }}
|
||||||
|
ISSUE_ASSIGNEES: ${{ inputs.assignees }}
|
||||||
|
ISSUE_LABELS: ${{ inputs.labels }}
|
||||||
|
ISSUE_TITLE: ${{ inputs.title }}
|
||||||
run: |
|
run: |
|
||||||
echo -e '${{ inputs.body }}' > issue.md
|
echo -e "${ISSUE_BODY}" > issue.md
|
||||||
gh issue create \
|
gh issue create \
|
||||||
--assignee '${{ inputs.assignees }}' \
|
--assignee "${ISSUE_ASSIGNEES}" \
|
||||||
--label '${{ inputs.labels }}' \
|
--label "${ISSUE_LABELS}" \
|
||||||
--title '${{ inputs.title }}' \
|
--title "${ISSUE_TITLE}" \
|
||||||
--body-file ./issue.md \
|
--body-file ./issue.md \
|
||||||
> create_issue.log
|
> create_issue.log
|
||||||
created_issue="$(sed 's|.*/||' create_issue.log)"
|
created_issue="$(sed 's|.*/||' create_issue.log)"
|
||||||
|
|||||||
36
.github/actions/get-threads-number/action.yml
vendored
36
.github/actions/get-threads-number/action.yml
vendored
@@ -1,36 +0,0 @@
|
|||||||
name: Get number of threads
|
|
||||||
description: Determines number of threads to use on macOS and Linux
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
subtract_threads:
|
|
||||||
description: How many threads to subtract from the calculated number
|
|
||||||
required: true
|
|
||||||
default: "0"
|
|
||||||
outputs:
|
|
||||||
threads_number:
|
|
||||||
description: Number of threads to use
|
|
||||||
value: ${{ steps.number_of_threads_export.outputs.num }}
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Get number of threads on mac
|
|
||||||
id: mac_threads
|
|
||||||
if: ${{ runner.os == 'macOS' }}
|
|
||||||
shell: bash
|
|
||||||
run: echo "num=$(($(sysctl -n hw.logicalcpu) - 2))" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Get number of threads on Linux
|
|
||||||
id: linux_threads
|
|
||||||
if: ${{ runner.os == 'Linux' }}
|
|
||||||
shell: bash
|
|
||||||
run: echo "num=$(($(nproc) - 2))" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Shift and export number of threads
|
|
||||||
id: number_of_threads_export
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
num_of_threads="${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}"
|
|
||||||
shift_by="${{ inputs.subtract_threads }}"
|
|
||||||
shifted="$((num_of_threads - shift_by))"
|
|
||||||
echo "num=$(( shifted > 1 ? shifted : 1 ))" >> $GITHUB_OUTPUT
|
|
||||||
38
.github/actions/restore-cache/action.yml
vendored
38
.github/actions/restore-cache/action.yml
vendored
@@ -1,38 +0,0 @@
|
|||||||
name: Restore cache
|
|
||||||
description: Find and restores ccache cache
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
conan_profile:
|
|
||||||
description: Conan profile name
|
|
||||||
required: true
|
|
||||||
ccache_dir:
|
|
||||||
description: Path to .ccache directory
|
|
||||||
required: true
|
|
||||||
build_type:
|
|
||||||
description: Current build type (e.g. Release, Debug)
|
|
||||||
required: true
|
|
||||||
default: Release
|
|
||||||
code_coverage:
|
|
||||||
description: Whether code coverage is on
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
ccache_cache_hit:
|
|
||||||
description: True if ccache cache has been downloaded
|
|
||||||
value: ${{ steps.ccache_cache.outputs.cache-hit }}
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Find common commit
|
|
||||||
id: git_common_ancestor
|
|
||||||
uses: ./.github/actions/git-common-ancestor
|
|
||||||
|
|
||||||
- name: Restore ccache cache
|
|
||||||
uses: actions/cache/restore@v4
|
|
||||||
id: ccache_cache
|
|
||||||
if: ${{ env.CCACHE_DISABLE != '1' }}
|
|
||||||
with:
|
|
||||||
path: ${{ inputs.ccache_dir }}
|
|
||||||
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-${{ inputs.conan_profile }}-develop-${{ steps.git_common_ancestor.outputs.commit }}
|
|
||||||
38
.github/actions/save-cache/action.yml
vendored
38
.github/actions/save-cache/action.yml
vendored
@@ -1,38 +0,0 @@
|
|||||||
name: Save cache
|
|
||||||
description: Save ccache cache for develop branch
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
conan_profile:
|
|
||||||
description: Conan profile name
|
|
||||||
required: true
|
|
||||||
ccache_dir:
|
|
||||||
description: Path to .ccache directory
|
|
||||||
required: true
|
|
||||||
build_type:
|
|
||||||
description: Current build type (e.g. Release, Debug)
|
|
||||||
required: true
|
|
||||||
default: Release
|
|
||||||
code_coverage:
|
|
||||||
description: Whether code coverage is on
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
|
|
||||||
ccache_cache_hit:
|
|
||||||
description: Whether ccache cache has been downloaded
|
|
||||||
required: true
|
|
||||||
ccache_cache_miss_rate:
|
|
||||||
description: How many ccache cache misses happened
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Find common commit
|
|
||||||
id: git_common_ancestor
|
|
||||||
uses: ./.github/actions/git-common-ancestor
|
|
||||||
|
|
||||||
- name: Save ccache cache
|
|
||||||
if: ${{ inputs.ccache_cache_hit != 'true' || inputs.ccache_cache_miss_rate == '100.0' }}
|
|
||||||
uses: actions/cache/save@v4
|
|
||||||
with:
|
|
||||||
path: ${{ inputs.ccache_dir }}
|
|
||||||
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-${{ inputs.conan_profile }}-develop-${{ steps.git_common_ancestor.outputs.commit }}
|
|
||||||
28
.github/dependabot.yml
vendored
28
.github/dependabot.yml
vendored
@@ -91,19 +91,6 @@ updates:
|
|||||||
prefix: "ci: [DEPENDABOT] "
|
prefix: "ci: [DEPENDABOT] "
|
||||||
target-branch: develop
|
target-branch: develop
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
|
||||||
directory: .github/actions/get-threads-number/
|
|
||||||
schedule:
|
|
||||||
interval: weekly
|
|
||||||
day: monday
|
|
||||||
time: "04:00"
|
|
||||||
timezone: Etc/GMT
|
|
||||||
reviewers:
|
|
||||||
- XRPLF/clio-dev-team
|
|
||||||
commit-message:
|
|
||||||
prefix: "ci: [DEPENDABOT] "
|
|
||||||
target-branch: develop
|
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
- package-ecosystem: github-actions
|
||||||
directory: .github/actions/git-common-ancestor/
|
directory: .github/actions/git-common-ancestor/
|
||||||
schedule:
|
schedule:
|
||||||
@@ -118,20 +105,7 @@ updates:
|
|||||||
target-branch: develop
|
target-branch: develop
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
- package-ecosystem: github-actions
|
||||||
directory: .github/actions/restore-cache/
|
directory: .github/actions/cache-key/
|
||||||
schedule:
|
|
||||||
interval: weekly
|
|
||||||
day: monday
|
|
||||||
time: "04:00"
|
|
||||||
timezone: Etc/GMT
|
|
||||||
reviewers:
|
|
||||||
- XRPLF/clio-dev-team
|
|
||||||
commit-message:
|
|
||||||
prefix: "ci: [DEPENDABOT] "
|
|
||||||
target-branch: develop
|
|
||||||
|
|
||||||
- package-ecosystem: github-actions
|
|
||||||
directory: .github/actions/save-cache/
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: weekly
|
interval: weekly
|
||||||
day: monday
|
day: monday
|
||||||
|
|||||||
2
.github/scripts/conan/apple-clang-17.profile
vendored
2
.github/scripts/conan/apple-clang-17.profile
vendored
@@ -4,7 +4,7 @@ build_type=Release
|
|||||||
compiler=apple-clang
|
compiler=apple-clang
|
||||||
compiler.cppstd=20
|
compiler.cppstd=20
|
||||||
compiler.libcxx=libc++
|
compiler.libcxx=libc++
|
||||||
compiler.version=17
|
compiler.version=17.0
|
||||||
os=Macos
|
os=Macos
|
||||||
|
|
||||||
[conf]
|
[conf]
|
||||||
|
|||||||
2
.github/scripts/conan/generate_matrix.py
vendored
2
.github/scripts/conan/generate_matrix.py
vendored
@@ -4,7 +4,7 @@ import json
|
|||||||
|
|
||||||
LINUX_OS = ["heavy", "heavy-arm64"]
|
LINUX_OS = ["heavy", "heavy-arm64"]
|
||||||
LINUX_CONTAINERS = [
|
LINUX_CONTAINERS = [
|
||||||
'{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
'{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
]
|
]
|
||||||
LINUX_COMPILERS = ["gcc", "clang"]
|
LINUX_COMPILERS = ["gcc", "clang"]
|
||||||
|
|
||||||
|
|||||||
4
.github/scripts/conan/init.sh
vendored
4
.github/scripts/conan/init.sh
vendored
@@ -40,9 +40,9 @@ mkdir -p "$PROFILES_DIR"
|
|||||||
|
|
||||||
if [[ "$(uname)" == "Darwin" ]]; then
|
if [[ "$(uname)" == "Darwin" ]]; then
|
||||||
create_profile_with_sanitizers "apple-clang" "$APPLE_CLANG_PROFILE"
|
create_profile_with_sanitizers "apple-clang" "$APPLE_CLANG_PROFILE"
|
||||||
echo "include(apple-clang)" > "$PROFILES_DIR/default"
|
echo "include(apple-clang)" >"$PROFILES_DIR/default"
|
||||||
else
|
else
|
||||||
create_profile_with_sanitizers "clang" "$CLANG_PROFILE"
|
create_profile_with_sanitizers "clang" "$CLANG_PROFILE"
|
||||||
create_profile_with_sanitizers "gcc" "$GCC_PROFILE"
|
create_profile_with_sanitizers "gcc" "$GCC_PROFILE"
|
||||||
echo "include(gcc)" > "$PROFILES_DIR/default"
|
echo "include(gcc)" >"$PROFILES_DIR/default"
|
||||||
fi
|
fi
|
||||||
|
|||||||
25
.github/scripts/conan/regenerate_lockfile.sh
vendored
Executable file
25
.github/scripts/conan/regenerate_lockfile.sh
vendored
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
TEMP_DIR=$(mktemp -d)
|
||||||
|
trap "rm -rf $TEMP_DIR" EXIT
|
||||||
|
|
||||||
|
echo "Using temporary CONAN_HOME: $TEMP_DIR"
|
||||||
|
|
||||||
|
# We use a temporary Conan home to avoid polluting the user's existing Conan
|
||||||
|
# configuration and to not use local cache (which leads to non-reproducible lockfiles).
|
||||||
|
export CONAN_HOME="$TEMP_DIR"
|
||||||
|
|
||||||
|
# Ensure that the xrplf remote is the first to be consulted, so any recipes we
|
||||||
|
# patched are used. We also add it there to not created huge diff when the
|
||||||
|
# official Conan Center Index is updated.
|
||||||
|
conan remote add --force --index 0 xrplf https://conan.ripplex.io
|
||||||
|
|
||||||
|
# Delete any existing lockfile.
|
||||||
|
rm -f conan.lock
|
||||||
|
|
||||||
|
# Create a new lockfile that is compatible with macOS.
|
||||||
|
# It should also work on Linux.
|
||||||
|
conan lock create . \
|
||||||
|
--profile:all=.github/scripts/conan/apple-clang-17.profile
|
||||||
@@ -22,8 +22,8 @@ fi
|
|||||||
TEST_BINARY=$1
|
TEST_BINARY=$1
|
||||||
|
|
||||||
if [[ ! -f "$TEST_BINARY" ]]; then
|
if [[ ! -f "$TEST_BINARY" ]]; then
|
||||||
echo "Test binary not found: $TEST_BINARY"
|
echo "Test binary not found: $TEST_BINARY"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
TESTS=$($TEST_BINARY --gtest_list_tests | awk '/^ / {print suite $1} !/^ / {suite=$1}')
|
TESTS=$($TEST_BINARY --gtest_list_tests | awk '/^ / {print suite $1} !/^ / {suite=$1}')
|
||||||
@@ -31,15 +31,16 @@ TESTS=$($TEST_BINARY --gtest_list_tests | awk '/^ / {print suite $1} !/^ / {su
|
|||||||
OUTPUT_DIR="./.sanitizer-report"
|
OUTPUT_DIR="./.sanitizer-report"
|
||||||
mkdir -p "$OUTPUT_DIR"
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
for TEST in $TESTS; do
|
export TSAN_OPTIONS="die_after_fork=0"
|
||||||
OUTPUT_FILE="$OUTPUT_DIR/${TEST//\//_}"
|
export MallocNanoZone='0' # for MacOSX
|
||||||
export TSAN_OPTIONS="log_path=\"$OUTPUT_FILE\" die_after_fork=0"
|
|
||||||
export ASAN_OPTIONS="log_path=\"$OUTPUT_FILE\""
|
|
||||||
export UBSAN_OPTIONS="log_path=\"$OUTPUT_FILE\""
|
|
||||||
export MallocNanoZone='0' # for MacOSX
|
|
||||||
$TEST_BINARY --gtest_filter="$TEST" > /dev/null 2>&1
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
for TEST in $TESTS; do
|
||||||
echo "'$TEST' failed a sanitizer check."
|
OUTPUT_FILE="$OUTPUT_DIR/${TEST//\//_}.log"
|
||||||
fi
|
$TEST_BINARY --gtest_filter="$TEST" >"$OUTPUT_FILE" 2>&1
|
||||||
|
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "'$TEST' failed a sanitizer check."
|
||||||
|
else
|
||||||
|
rm "$OUTPUT_FILE"
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
2
.github/scripts/prepare-release-artifacts.sh
vendored
2
.github/scripts/prepare-release-artifacts.sh
vendored
@@ -20,5 +20,5 @@ for artifact_name in $(ls); do
|
|||||||
rm "${artifact_name}/${BINARY_NAME}"
|
rm "${artifact_name}/${BINARY_NAME}"
|
||||||
rm -r "${artifact_name}"
|
rm -r "${artifact_name}"
|
||||||
|
|
||||||
sha256sum "./${artifact_name}.zip" > "./${artifact_name}.zip.sha256sum"
|
sha256sum "./${artifact_name}.zip" >"./${artifact_name}.zip.sha256sum"
|
||||||
done
|
done
|
||||||
|
|||||||
18
.github/workflows/build-clio-docker-image.yml
vendored
18
.github/workflows/build-clio-docker-image.yml
vendored
@@ -38,32 +38,37 @@ on:
|
|||||||
description: Whether to strip clio binary
|
description: Whether to strip clio binary
|
||||||
default: true
|
default: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_and_publish_image:
|
build_and_publish_image:
|
||||||
name: Build and publish image
|
name: Build and publish image
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Download Clio binary from artifact
|
- name: Download Clio binary from artifact
|
||||||
if: ${{ inputs.artifact_name != null }}
|
if: ${{ inputs.artifact_name != null }}
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.artifact_name }}
|
name: ${{ inputs.artifact_name }}
|
||||||
path: ./docker/clio/artifact/
|
path: ./docker/clio/artifact/
|
||||||
|
|
||||||
- name: Download Clio binary from url
|
- name: Download Clio binary from url
|
||||||
if: ${{ inputs.clio_server_binary_url != null }}
|
if: ${{ inputs.clio_server_binary_url != null }}
|
||||||
shell: bash
|
env:
|
||||||
|
BINARY_URL: ${{ inputs.clio_server_binary_url }}
|
||||||
|
BINARY_SHA256: ${{ inputs.binary_sha256 }}
|
||||||
run: |
|
run: |
|
||||||
wget "${{inputs.clio_server_binary_url}}" -P ./docker/clio/artifact/
|
wget "${BINARY_URL}" -P ./docker/clio/artifact/
|
||||||
if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${{inputs.binary_sha256}}" ]; then
|
if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${BINARY_SHA256}" ]; then
|
||||||
echo "Binary sha256 sum doesn't match"
|
echo "Binary sha256 sum doesn't match"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
- name: Unpack binary
|
- name: Unpack binary
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt update && sudo apt install -y tar unzip
|
sudo apt update && sudo apt install -y tar unzip
|
||||||
cd docker/clio/artifact
|
cd docker/clio/artifact
|
||||||
@@ -80,7 +85,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Strip binary
|
- name: Strip binary
|
||||||
if: ${{ inputs.strip_binary }}
|
if: ${{ inputs.strip_binary }}
|
||||||
shell: bash
|
|
||||||
run: strip ./docker/clio/clio_server
|
run: strip ./docker/clio/clio_server
|
||||||
|
|
||||||
- name: Set GHCR_REPO
|
- name: Set GHCR_REPO
|
||||||
|
|||||||
36
.github/workflows/build.yml
vendored
36
.github/workflows/build.yml
vendored
@@ -23,6 +23,7 @@ on:
|
|||||||
- "cmake/**"
|
- "cmake/**"
|
||||||
- "src/**"
|
- "src/**"
|
||||||
- "tests/**"
|
- "tests/**"
|
||||||
|
- "benchmarks/**"
|
||||||
|
|
||||||
- docs/config-description.md
|
- docs/config-description.md
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@@ -33,6 +34,10 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.ref == 'refs/heads/develop' && github.run_number || 'branch' }}
|
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.ref == 'refs/heads/develop' && github.run_number || 'branch' }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-test:
|
build-and-test:
|
||||||
name: Build and Test
|
name: Build and Test
|
||||||
@@ -45,7 +50,7 @@ jobs:
|
|||||||
build_type: [Release, Debug]
|
build_type: [Release, Debug]
|
||||||
container:
|
container:
|
||||||
[
|
[
|
||||||
'{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }',
|
'{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }',
|
||||||
]
|
]
|
||||||
static: [true]
|
static: [true]
|
||||||
|
|
||||||
@@ -75,11 +80,11 @@ jobs:
|
|||||||
uses: ./.github/workflows/reusable-build.yml
|
uses: ./.github/workflows/reusable-build.yml
|
||||||
with:
|
with:
|
||||||
runs_on: heavy
|
runs_on: heavy
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Debug
|
build_type: Debug
|
||||||
download_ccache: true
|
download_ccache: true
|
||||||
upload_ccache: false
|
upload_ccache: true
|
||||||
code_coverage: true
|
code_coverage: true
|
||||||
static: true
|
static: true
|
||||||
upload_clio_server: false
|
upload_clio_server: false
|
||||||
@@ -88,40 +93,21 @@ jobs:
|
|||||||
secrets:
|
secrets:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
package:
|
|
||||||
name: Build packages
|
|
||||||
|
|
||||||
uses: ./.github/workflows/reusable-build.yml
|
|
||||||
with:
|
|
||||||
runs_on: heavy
|
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
|
||||||
conan_profile: gcc
|
|
||||||
build_type: Release
|
|
||||||
download_ccache: true
|
|
||||||
upload_ccache: false
|
|
||||||
code_coverage: false
|
|
||||||
static: true
|
|
||||||
upload_clio_server: false
|
|
||||||
package: true
|
|
||||||
targets: package
|
|
||||||
analyze_build_time: false
|
|
||||||
|
|
||||||
check_config:
|
check_config:
|
||||||
name: Check Config Description
|
name: Check Config Description
|
||||||
needs: build-and-test
|
needs: build-and-test
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
image: ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- uses: actions/download-artifact@v5
|
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_server_Linux_Release_gcc
|
name: clio_server_Linux_Release_gcc
|
||||||
|
|
||||||
- name: Compare Config Description
|
- name: Compare Config Description
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
repoConfigFile=docs/config-description.md
|
repoConfigFile=docs/config-description.md
|
||||||
configDescriptionFile=config_description_new.md
|
configDescriptionFile=config_description_new.md
|
||||||
|
|||||||
22
.github/workflows/check-libxrpl.yml
vendored
22
.github/workflows/check-libxrpl.yml
vendored
@@ -12,31 +12,33 @@ concurrency:
|
|||||||
env:
|
env:
|
||||||
CONAN_PROFILE: gcc
|
CONAN_PROFILE: gcc
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
|
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
image: ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
uses: XRPLF/actions/prepare-runner@e8d2d2a546a03e1d161dca52890705f3bc641215
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
enable_ccache: false
|
||||||
|
|
||||||
- name: Update libXRPL version requirement
|
- name: Update libXRPL version requirement
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
sed -i.bak -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/${{ github.event.client_payload.conan_ref }}'|g" conanfile.py
|
sed -i.bak -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/${{ github.event.client_payload.conan_ref }}'|g" conanfile.py
|
||||||
rm -f conanfile.py.bak
|
rm -f conanfile.py.bak
|
||||||
|
|
||||||
- name: Update conan lockfile
|
- name: Update conan lockfile
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
conan lock create . --profile:all ${{ env.CONAN_PROFILE }}
|
conan lock create . --profile:all ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
@@ -57,7 +59,7 @@ jobs:
|
|||||||
run: strip build/clio_tests
|
run: strip build/clio_tests
|
||||||
|
|
||||||
- name: Upload clio_tests
|
- name: Upload clio_tests
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_tests_check_libxrpl
|
name: clio_tests_check_libxrpl
|
||||||
path: build/clio_tests
|
path: build/clio_tests
|
||||||
@@ -67,10 +69,10 @@ jobs:
|
|||||||
needs: build
|
needs: build
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
image: ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v5
|
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_tests_check_libxrpl
|
name: clio_tests_check_libxrpl
|
||||||
|
|
||||||
@@ -90,7 +92,7 @@ jobs:
|
|||||||
issues: write
|
issues: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Create an issue
|
- name: Create an issue
|
||||||
uses: ./.github/actions/create-issue
|
uses: ./.github/actions/create-issue
|
||||||
|
|||||||
10
.github/workflows/check-pr-title.yml
vendored
10
.github/workflows/check-pr-title.yml
vendored
@@ -5,20 +5,26 @@ on:
|
|||||||
types: [opened, edited, reopened, synchronize]
|
types: [opened, edited, reopened, synchronize]
|
||||||
branches: [develop]
|
branches: [develop]
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_title:
|
check_title:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ytanikin/pr-conventional-commits@b72758283dcbee706975950e96bc4bf323a8d8c0 # v1.4.2
|
- uses: ytanikin/pr-conventional-commits@fda730cb152c05a849d6d84325e50c6182d9d1e9 # 1.5.1
|
||||||
with:
|
with:
|
||||||
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
|
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
|
||||||
add_label: false
|
add_label: false
|
||||||
custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
|
custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
|
||||||
|
|
||||||
- name: Check if message starts with upper-case letter
|
- name: Check if message starts with upper-case letter
|
||||||
|
env:
|
||||||
|
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||||
run: |
|
run: |
|
||||||
if [[ ! "${{ github.event.pull_request.title }}" =~ ^[a-z]+:\ [\[A-Z] ]]; then
|
if [[ ! "${PR_TITLE}" =~ ^[a-z]+:\ [\[A-Z] ]]; then
|
||||||
echo "Error: PR title must start with an upper-case letter."
|
echo "Error: PR title must start with an upper-case letter."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
64
.github/workflows/clang-tidy.yml
vendored
64
.github/workflows/clang-tidy.yml
vendored
@@ -22,12 +22,16 @@ env:
|
|||||||
CONAN_PROFILE: clang
|
CONAN_PROFILE: clang
|
||||||
LLVM_TOOLS_VERSION: 20
|
LLVM_TOOLS_VERSION: 20
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
clang_tidy:
|
clang_tidy:
|
||||||
if: github.event_name != 'push' || contains(github.event.head_commit.message, 'clang-tidy auto fixes')
|
if: github.event_name != 'push' || contains(github.event.head_commit.message, 'clang-tidy auto fixes')
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
image: ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
@@ -35,21 +39,14 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
uses: XRPLF/actions/prepare-runner@e8d2d2a546a03e1d161dca52890705f3bc641215
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
enable_ccache: false
|
||||||
|
|
||||||
- name: Restore cache
|
|
||||||
uses: ./.github/actions/restore-cache
|
|
||||||
id: restore_cache
|
|
||||||
with:
|
|
||||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
|
||||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
|
||||||
|
|
||||||
- name: Run conan
|
- name: Run conan
|
||||||
uses: ./.github/actions/conan
|
uses: ./.github/actions/conan
|
||||||
@@ -61,34 +58,34 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||||
|
|
||||||
- name: Get number of threads
|
- name: Get number of processors
|
||||||
uses: ./.github/actions/get-threads-number
|
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf
|
||||||
id: number_of_threads
|
id: nproc
|
||||||
|
|
||||||
- name: Run clang-tidy
|
- name: Run clang-tidy (several times)
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
shell: bash
|
id: clang_tidy
|
||||||
id: run_clang_tidy
|
|
||||||
run: |
|
run: |
|
||||||
run-clang-tidy-${{ env.LLVM_TOOLS_VERSION }} -p build -j "${{ steps.number_of_threads.outputs.threads_number }}" -fix -quiet 1>output.txt
|
# We run clang-tidy several times, because some fixes may enable new fixes in subsequent runs.
|
||||||
|
CLANG_TIDY_COMMAND="run-clang-tidy-${{ env.LLVM_TOOLS_VERSION }} -p build -j ${{ steps.nproc.outputs.nproc }} -fix -quiet"
|
||||||
|
${CLANG_TIDY_COMMAND} ||
|
||||||
|
${CLANG_TIDY_COMMAND} ||
|
||||||
|
${CLANG_TIDY_COMMAND}
|
||||||
|
|
||||||
|
- name: Check for changes
|
||||||
|
id: files_changed
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
git diff --exit-code
|
||||||
|
|
||||||
- name: Fix local includes and clang-format style
|
- name: Fix local includes and clang-format style
|
||||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
if: ${{ steps.files_changed.outcome != 'success' }}
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
pre-commit run --all-files fix-local-includes || true
|
pre-commit run --all-files fix-local-includes || true
|
||||||
pre-commit run --all-files clang-format || true
|
pre-commit run --all-files clang-format || true
|
||||||
|
|
||||||
- name: Print issues found
|
|
||||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
sed -i '/error\||/!d' ./output.txt
|
|
||||||
cat output.txt
|
|
||||||
rm output.txt
|
|
||||||
|
|
||||||
- name: Create an issue
|
- name: Create an issue
|
||||||
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
|
if: ${{ (steps.clang_tidy.outcome != 'success' || steps.files_changed.outcome != 'success') && github.event_name != 'pull_request' }}
|
||||||
id: create_issue
|
id: create_issue
|
||||||
uses: ./.github/actions/create-issue
|
uses: ./.github/actions/create-issue
|
||||||
env:
|
env:
|
||||||
@@ -101,7 +98,7 @@ jobs:
|
|||||||
List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||||
|
|
||||||
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
|
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
|
||||||
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
|
if: ${{ steps.files_changed.outcome != 'success' && github.event_name != 'pull_request' }}
|
||||||
with:
|
with:
|
||||||
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
|
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
|
||||||
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
|
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
|
||||||
@@ -109,8 +106,8 @@ jobs:
|
|||||||
git_commit_gpgsign: true
|
git_commit_gpgsign: true
|
||||||
|
|
||||||
- name: Create PR with fixes
|
- name: Create PR with fixes
|
||||||
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
|
if: ${{ steps.files_changed.outcome != 'success' && github.event_name != 'pull_request' }}
|
||||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||||
env:
|
env:
|
||||||
GH_REPO: ${{ github.repository }}
|
GH_REPO: ${{ github.repository }}
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
@@ -125,6 +122,5 @@ jobs:
|
|||||||
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
||||||
|
|
||||||
- name: Fail the job
|
- name: Fail the job
|
||||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
if: ${{ steps.clang_tidy.outcome != 'success' || steps.files_changed.outcome != 'success' }}
|
||||||
shell: bash
|
|
||||||
run: exit 1
|
run: exit 1
|
||||||
|
|||||||
18
.github/workflows/docs.yml
vendored
18
.github/workflows/docs.yml
vendored
@@ -10,22 +10,26 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
image: ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
uses: XRPLF/actions/prepare-runner@e8d2d2a546a03e1d161dca52890705f3bc641215
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
enable_ccache: false
|
||||||
|
|
||||||
- name: Create build directory
|
- name: Create build directory
|
||||||
run: mkdir build_docs
|
run: mkdir build_docs
|
||||||
@@ -39,10 +43,10 @@ jobs:
|
|||||||
run: cmake --build . --target docs
|
run: cmake --build . --target docs
|
||||||
|
|
||||||
- name: Setup Pages
|
- name: Setup Pages
|
||||||
uses: actions/configure-pages@v5
|
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5.0.0
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-pages-artifact@v4
|
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0
|
||||||
with:
|
with:
|
||||||
path: build_docs/html
|
path: build_docs/html
|
||||||
name: docs-develop
|
name: docs-develop
|
||||||
@@ -62,6 +66,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Deploy to GitHub Pages
|
- name: Deploy to GitHub Pages
|
||||||
id: deployment
|
id: deployment
|
||||||
uses: actions/deploy-pages@v4
|
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5
|
||||||
with:
|
with:
|
||||||
artifact_name: docs-develop
|
artifact_name: docs-develop
|
||||||
|
|||||||
59
.github/workflows/nightly.yml
vendored
59
.github/workflows/nightly.yml
vendored
@@ -23,9 +23,25 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
get_date:
|
||||||
|
name: Get Date
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
date: ${{ steps.get_date.outputs.date }}
|
||||||
|
steps:
|
||||||
|
- name: Get current date
|
||||||
|
id: get_date
|
||||||
|
run: |
|
||||||
|
echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
build-and-test:
|
build-and-test:
|
||||||
name: Build and Test
|
name: Build and Test
|
||||||
|
needs: get_date
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -39,17 +55,17 @@ jobs:
|
|||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Release
|
build_type: Release
|
||||||
static: true
|
static: true
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
- os: heavy
|
- os: heavy
|
||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Debug
|
build_type: Debug
|
||||||
static: true
|
static: true
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
- os: heavy
|
- os: heavy
|
||||||
conan_profile: gcc.ubsan
|
conan_profile: gcc.ubsan
|
||||||
build_type: Release
|
build_type: Release
|
||||||
static: false
|
static: false
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
|
|
||||||
uses: ./.github/workflows/reusable-build-test.yml
|
uses: ./.github/workflows/reusable-build-test.yml
|
||||||
with:
|
with:
|
||||||
@@ -63,9 +79,31 @@ jobs:
|
|||||||
upload_clio_server: true
|
upload_clio_server: true
|
||||||
download_ccache: false
|
download_ccache: false
|
||||||
upload_ccache: false
|
upload_ccache: false
|
||||||
|
version: nightly-${{ needs.get_date.outputs.date }}
|
||||||
|
|
||||||
|
package:
|
||||||
|
name: Build debian package
|
||||||
|
needs: get_date
|
||||||
|
|
||||||
|
uses: ./.github/workflows/reusable-build.yml
|
||||||
|
with:
|
||||||
|
runs_on: heavy
|
||||||
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
|
conan_profile: gcc
|
||||||
|
build_type: Release
|
||||||
|
download_ccache: false
|
||||||
|
upload_ccache: false
|
||||||
|
code_coverage: false
|
||||||
|
static: true
|
||||||
|
upload_clio_server: false
|
||||||
|
package: true
|
||||||
|
version: nightly-${{ needs.get_date.outputs.date }}
|
||||||
|
targets: package
|
||||||
|
analyze_build_time: false
|
||||||
|
|
||||||
analyze_build_time:
|
analyze_build_time:
|
||||||
name: Analyze Build Time
|
name: Analyze Build Time
|
||||||
|
needs: get_date
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -73,7 +111,7 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- os: heavy
|
- os: heavy
|
||||||
conan_profile: clang
|
conan_profile: clang
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
static: true
|
static: true
|
||||||
- os: macos15
|
- os: macos15
|
||||||
conan_profile: apple-clang
|
conan_profile: apple-clang
|
||||||
@@ -92,19 +130,20 @@ jobs:
|
|||||||
upload_clio_server: false
|
upload_clio_server: false
|
||||||
targets: all
|
targets: all
|
||||||
analyze_build_time: true
|
analyze_build_time: true
|
||||||
|
version: nightly-${{ needs.get_date.outputs.date }}
|
||||||
|
|
||||||
nightly_release:
|
nightly_release:
|
||||||
needs: build-and-test
|
needs: [build-and-test, package, get_date]
|
||||||
uses: ./.github/workflows/reusable-release.yml
|
uses: ./.github/workflows/reusable-release.yml
|
||||||
with:
|
with:
|
||||||
overwrite_release: true
|
delete_pattern: "nightly-*"
|
||||||
prerelease: true
|
prerelease: true
|
||||||
title: "Clio development (nightly) build"
|
title: "Clio development build (nightly-${{ needs.get_date.outputs.date }})"
|
||||||
version: nightly
|
version: nightly-${{ needs.get_date.outputs.date }}
|
||||||
header: >
|
header: >
|
||||||
> **Note:** Please remember that this is a development release and it is not recommended for production use.
|
> **Note:** Please remember that this is a development release and it is not recommended for production use.
|
||||||
|
|
||||||
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
|
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly-${{ needs.get_date.outputs.date }}>
|
||||||
generate_changelog: false
|
generate_changelog: false
|
||||||
draft: false
|
draft: false
|
||||||
|
|
||||||
@@ -130,7 +169,7 @@ jobs:
|
|||||||
issues: write
|
issues: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Create an issue
|
- name: Create an issue
|
||||||
uses: ./.github/actions/create-issue
|
uses: ./.github/actions/create-issue
|
||||||
|
|||||||
4
.github/workflows/pre-commit-autoupdate.yml
vendored
4
.github/workflows/pre-commit-autoupdate.yml
vendored
@@ -1,8 +1,8 @@
|
|||||||
name: Pre-commit auto-update
|
name: Pre-commit auto-update
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# every first day of the month
|
|
||||||
schedule:
|
schedule:
|
||||||
|
# every first day of the month
|
||||||
- cron: "0 0 1 * *"
|
- cron: "0 0 1 * *"
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [release/*, develop]
|
branches: [release/*, develop]
|
||||||
@@ -12,7 +12,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
auto-update:
|
auto-update:
|
||||||
uses: XRPLF/actions/.github/workflows/pre-commit-autoupdate.yml@afbcbdafbe0ce5439492fb87eda6441371086386
|
uses: XRPLF/actions/.github/workflows/pre-commit-autoupdate.yml@ad4ab1ae5a54a4bab0e87294c31fc0729f788b2b
|
||||||
with:
|
with:
|
||||||
sign_commit: true
|
sign_commit: true
|
||||||
committer: "Clio CI <skuznetsov@ripple.com>"
|
committer: "Clio CI <skuznetsov@ripple.com>"
|
||||||
|
|||||||
4
.github/workflows/pre-commit.yml
vendored
4
.github/workflows/pre-commit.yml
vendored
@@ -8,7 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
run-hooks:
|
run-hooks:
|
||||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@a8d7472b450eb53a1e5228f64552e5974457a21a
|
uses: XRPLF/actions/.github/workflows/pre-commit.yml@01163508e81d7dd63d4601d4090b297a260b18c2
|
||||||
with:
|
with:
|
||||||
runs_on: heavy
|
runs_on: heavy
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-pre-commit:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-pre-commit:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
|
|||||||
27
.github/workflows/release.yml
vendored
27
.github/workflows/release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
conan_profile: gcc
|
conan_profile: gcc
|
||||||
build_type: Release
|
build_type: Release
|
||||||
static: true
|
static: true
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
|
|
||||||
uses: ./.github/workflows/reusable-build-test.yml
|
uses: ./.github/workflows/reusable-build-test.yml
|
||||||
with:
|
with:
|
||||||
@@ -43,13 +43,32 @@ jobs:
|
|||||||
upload_clio_server: true
|
upload_clio_server: true
|
||||||
download_ccache: false
|
download_ccache: false
|
||||||
upload_ccache: false
|
upload_ccache: false
|
||||||
expected_version: ${{ github.event_name == 'push' && github.ref_name || '' }}
|
version: ${{ github.event_name == 'push' && github.ref_name || '' }}
|
||||||
|
|
||||||
|
package:
|
||||||
|
name: Build debian package
|
||||||
|
|
||||||
|
uses: ./.github/workflows/reusable-build.yml
|
||||||
|
with:
|
||||||
|
runs_on: heavy
|
||||||
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
|
conan_profile: gcc
|
||||||
|
build_type: Release
|
||||||
|
download_ccache: false
|
||||||
|
upload_ccache: false
|
||||||
|
code_coverage: false
|
||||||
|
static: true
|
||||||
|
upload_clio_server: false
|
||||||
|
package: true
|
||||||
|
version: ${{ github.event_name == 'push' && github.ref_name || '' }}
|
||||||
|
targets: package
|
||||||
|
analyze_build_time: false
|
||||||
|
|
||||||
release:
|
release:
|
||||||
needs: build-and-test
|
needs: [build-and-test, package]
|
||||||
uses: ./.github/workflows/reusable-release.yml
|
uses: ./.github/workflows/reusable-release.yml
|
||||||
with:
|
with:
|
||||||
overwrite_release: false
|
delete_pattern: ""
|
||||||
prerelease: ${{ contains(github.ref_name, '-') }}
|
prerelease: ${{ contains(github.ref_name, '-') }}
|
||||||
title: "${{ github.ref_name }}"
|
title: "${{ github.ref_name }}"
|
||||||
version: "${{ github.ref_name }}"
|
version: "${{ github.ref_name }}"
|
||||||
|
|||||||
14
.github/workflows/reusable-build-test.yml
vendored
14
.github/workflows/reusable-build-test.yml
vendored
@@ -63,18 +63,18 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
default: all
|
default: all
|
||||||
|
|
||||||
expected_version:
|
|
||||||
description: Expected version of the clio_server binary
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
|
|
||||||
package:
|
package:
|
||||||
description: Whether to generate Debian package
|
description: Whether to generate Debian package
|
||||||
required: false
|
required: false
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
|
|
||||||
|
version:
|
||||||
|
description: Version of the clio_server binary
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
uses: ./.github/workflows/reusable-build.yml
|
uses: ./.github/workflows/reusable-build.yml
|
||||||
@@ -90,8 +90,8 @@ jobs:
|
|||||||
upload_clio_server: ${{ inputs.upload_clio_server }}
|
upload_clio_server: ${{ inputs.upload_clio_server }}
|
||||||
targets: ${{ inputs.targets }}
|
targets: ${{ inputs.targets }}
|
||||||
analyze_build_time: false
|
analyze_build_time: false
|
||||||
expected_version: ${{ inputs.expected_version }}
|
|
||||||
package: ${{ inputs.package }}
|
package: ${{ inputs.package }}
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
|
||||||
test:
|
test:
|
||||||
needs: build
|
needs: build
|
||||||
|
|||||||
107
.github/workflows/reusable-build.yml
vendored
107
.github/workflows/reusable-build.yml
vendored
@@ -60,21 +60,25 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
expected_version:
|
|
||||||
description: Expected version of the clio_server binary
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
|
|
||||||
package:
|
package:
|
||||||
description: Whether to generate Debian package
|
description: Whether to generate Debian package
|
||||||
required: false
|
required: false
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
version:
|
||||||
|
description: Version of the clio_server binary
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
CODECOV_TOKEN:
|
CODECOV_TOKEN:
|
||||||
required: false
|
required: false
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build
|
name: Build
|
||||||
@@ -84,36 +88,38 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Cleanup workspace
|
- name: Cleanup workspace
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
|
uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# We need to fetch tags to have correct version in the release
|
|
||||||
# The workaround is based on https://github.com/actions/checkout/issues/1467
|
|
||||||
fetch-tags: true
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
uses: XRPLF/actions/prepare-runner@e8d2d2a546a03e1d161dca52890705f3bc641215
|
||||||
with:
|
with:
|
||||||
disable_ccache: ${{ !inputs.download_ccache }}
|
enable_ccache: ${{ inputs.download_ccache }}
|
||||||
|
|
||||||
- name: Setup conan on macOS
|
- name: Setup conan on macOS
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
shell: bash
|
|
||||||
run: ./.github/scripts/conan/init.sh
|
run: ./.github/scripts/conan/init.sh
|
||||||
|
|
||||||
- name: Restore cache
|
- name: Generate cache key
|
||||||
if: ${{ inputs.download_ccache }}
|
uses: ./.github/actions/cache-key
|
||||||
uses: ./.github/actions/restore-cache
|
id: cache_key
|
||||||
id: restore_cache
|
|
||||||
with:
|
with:
|
||||||
conan_profile: ${{ inputs.conan_profile }}
|
conan_profile: ${{ inputs.conan_profile }}
|
||||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
|
||||||
build_type: ${{ inputs.build_type }}
|
build_type: ${{ inputs.build_type }}
|
||||||
code_coverage: ${{ inputs.code_coverage }}
|
code_coverage: ${{ inputs.code_coverage }}
|
||||||
|
|
||||||
|
- name: Restore ccache cache
|
||||||
|
if: ${{ inputs.download_ccache && github.ref != 'refs/heads/develop' }}
|
||||||
|
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||||
|
with:
|
||||||
|
path: ${{ env.CCACHE_DIR }}
|
||||||
|
key: ${{ steps.cache_key.outputs.key }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ steps.cache_key.outputs.restore_keys }}
|
||||||
|
|
||||||
- name: Run conan
|
- name: Run conan
|
||||||
uses: ./.github/actions/conan
|
uses: ./.github/actions/conan
|
||||||
with:
|
with:
|
||||||
@@ -129,6 +135,7 @@ jobs:
|
|||||||
static: ${{ inputs.static }}
|
static: ${{ inputs.static }}
|
||||||
time_trace: ${{ inputs.analyze_build_time }}
|
time_trace: ${{ inputs.analyze_build_time }}
|
||||||
package: ${{ inputs.package }}
|
package: ${{ inputs.package }}
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
|
||||||
- name: Build Clio
|
- name: Build Clio
|
||||||
uses: ./.github/actions/build-clio
|
uses: ./.github/actions/build-clio
|
||||||
@@ -141,24 +148,26 @@ jobs:
|
|||||||
ClangBuildAnalyzer --all build/ build_time_report.bin
|
ClangBuildAnalyzer --all build/ build_time_report.bin
|
||||||
ClangBuildAnalyzer --analyze build_time_report.bin > build_time_report.txt
|
ClangBuildAnalyzer --analyze build_time_report.bin > build_time_report.txt
|
||||||
cat build_time_report.txt
|
cat build_time_report.txt
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Upload build time analyze report
|
- name: Upload build time analyze report
|
||||||
if: ${{ inputs.analyze_build_time }}
|
if: ${{ inputs.analyze_build_time }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: build_time_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: build_time_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: build_time_report.txt
|
path: build_time_report.txt
|
||||||
|
|
||||||
- name: Show ccache's statistics
|
- name: Show ccache's statistics and zero it
|
||||||
if: ${{ inputs.download_ccache }}
|
if: ${{ inputs.download_ccache }}
|
||||||
shell: bash
|
|
||||||
id: ccache_stats
|
|
||||||
run: |
|
run: |
|
||||||
ccache -s > /tmp/ccache.stats
|
ccache --show-stats -vv
|
||||||
miss_rate=$(cat /tmp/ccache.stats | grep 'Misses' | head -n1 | sed 's/.*(\(.*\)%).*/\1/')
|
ccache --zero-stats
|
||||||
echo "miss_rate=${miss_rate}" >> $GITHUB_OUTPUT
|
|
||||||
cat /tmp/ccache.stats
|
- name: Save ccache cache
|
||||||
|
if: ${{ inputs.upload_ccache && github.ref == 'refs/heads/develop' }}
|
||||||
|
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||||
|
with:
|
||||||
|
path: ${{ env.CCACHE_DIR }}
|
||||||
|
key: ${{ steps.cache_key.outputs.key }}
|
||||||
|
|
||||||
- name: Strip unit_tests
|
- name: Strip unit_tests
|
||||||
if: ${{ !endsWith(inputs.conan_profile, 'san') && !inputs.code_coverage && !inputs.analyze_build_time }}
|
if: ${{ !endsWith(inputs.conan_profile, 'san') && !inputs.code_coverage && !inputs.analyze_build_time }}
|
||||||
@@ -170,44 +179,32 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload clio_server
|
- name: Upload clio_server
|
||||||
if: ${{ inputs.upload_clio_server && !inputs.code_coverage && !inputs.analyze_build_time }}
|
if: ${{ inputs.upload_clio_server && !inputs.code_coverage && !inputs.analyze_build_time }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: build/clio_server
|
path: build/clio_server
|
||||||
|
|
||||||
- name: Upload clio_tests
|
- name: Upload clio_tests
|
||||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
|
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: build/clio_tests
|
path: build/clio_tests
|
||||||
|
|
||||||
- name: Upload clio_integration_tests
|
- name: Upload clio_integration_tests
|
||||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
|
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: build/clio_integration_tests
|
path: build/clio_integration_tests
|
||||||
|
|
||||||
- name: Upload Clio Linux package
|
- name: Upload Clio Linux package
|
||||||
if: ${{ inputs.package }}
|
if: ${{ inputs.package }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_deb_package_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_deb_package_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: build/*.deb
|
path: build/*.deb
|
||||||
|
|
||||||
- name: Save cache
|
|
||||||
if: ${{ inputs.upload_ccache && github.ref == 'refs/heads/develop' }}
|
|
||||||
uses: ./.github/actions/save-cache
|
|
||||||
with:
|
|
||||||
conan_profile: ${{ inputs.conan_profile }}
|
|
||||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
|
||||||
build_type: ${{ inputs.build_type }}
|
|
||||||
code_coverage: ${{ inputs.code_coverage }}
|
|
||||||
|
|
||||||
ccache_cache_hit: ${{ steps.restore_cache.outputs.ccache_cache_hit }}
|
|
||||||
ccache_cache_miss_rate: ${{ steps.ccache_stats.outputs.miss_rate }}
|
|
||||||
|
|
||||||
# This is run as part of the build job, because it requires the following:
|
# This is run as part of the build job, because it requires the following:
|
||||||
# - source code
|
# - source code
|
||||||
# - conan packages
|
# - conan packages
|
||||||
@@ -218,15 +215,21 @@ jobs:
|
|||||||
if: ${{ inputs.code_coverage }}
|
if: ${{ inputs.code_coverage }}
|
||||||
uses: ./.github/actions/code-coverage
|
uses: ./.github/actions/code-coverage
|
||||||
|
|
||||||
- name: Verify expected version
|
- name: Verify version is expected
|
||||||
if: ${{ inputs.expected_version != '' }}
|
if: ${{ inputs.version != '' }}
|
||||||
shell: bash
|
env:
|
||||||
|
INPUT_VERSION: ${{ inputs.version }}
|
||||||
|
BUILD_TYPE: ${{ inputs.build_type }}
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
EXPECTED_VERSION="clio-${{ inputs.expected_version }}"
|
EXPECTED_VERSION="clio-${INPUT_VERSION}"
|
||||||
actual_version=$(./build/clio_server --version)
|
if [[ "${BUILD_TYPE}" == "Debug" ]]; then
|
||||||
if [[ "$actual_version" != "$EXPECTED_VERSION" ]]; then
|
EXPECTED_VERSION="${EXPECTED_VERSION}+DEBUG"
|
||||||
echo "Expected version '$EXPECTED_VERSION', but got '$actual_version'"
|
fi
|
||||||
|
|
||||||
|
actual_version=$(./build/clio_server --version | head -n 1)
|
||||||
|
if [[ "${actual_version}" != "${EXPECTED_VERSION}" ]]; then
|
||||||
|
echo "Expected version '${EXPECTED_VERSION}', but got '${actual_version}'"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
75
.github/workflows/reusable-release.yml
vendored
75
.github/workflows/reusable-release.yml
vendored
@@ -3,10 +3,10 @@ name: Make release
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
overwrite_release:
|
delete_pattern:
|
||||||
description: "Overwrite the current release and tag"
|
description: "Pattern to delete previous releases"
|
||||||
required: true
|
required: true
|
||||||
type: boolean
|
type: string
|
||||||
|
|
||||||
prerelease:
|
prerelease:
|
||||||
description: "Create a prerelease"
|
description: "Create a prerelease"
|
||||||
@@ -38,11 +38,15 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
runs-on: heavy
|
runs-on: heavy
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
image: ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
env:
|
env:
|
||||||
GH_REPO: ${{ github.repository }}
|
GH_REPO: ${{ github.repository }}
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
@@ -51,62 +55,75 @@ jobs:
|
|||||||
contents: write
|
contents: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
uses: XRPLF/actions/prepare-runner@e8d2d2a546a03e1d161dca52890705f3bc641215
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
enable_ccache: false
|
||||||
|
|
||||||
- uses: actions/download-artifact@v5
|
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
path: release_artifacts
|
path: release_artifacts
|
||||||
pattern: clio_server_*
|
pattern: clio_server_*
|
||||||
|
|
||||||
|
- name: Prepare release artifacts
|
||||||
|
run: .github/scripts/prepare-release-artifacts.sh release_artifacts
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
|
with:
|
||||||
|
path: release_artifacts
|
||||||
|
pattern: clio_deb_package_*
|
||||||
|
|
||||||
- name: Create release notes
|
- name: Create release notes
|
||||||
shell: bash
|
env:
|
||||||
|
RELEASE_HEADER: ${{ inputs.header }}
|
||||||
run: |
|
run: |
|
||||||
echo "# Release notes" > "${RUNNER_TEMP}/release_notes.md"
|
echo "# Release notes" > "${RUNNER_TEMP}/release_notes.md"
|
||||||
echo "" >> "${RUNNER_TEMP}/release_notes.md"
|
echo "" >> "${RUNNER_TEMP}/release_notes.md"
|
||||||
printf '%s\n' "${{ inputs.header }}" >> "${RUNNER_TEMP}/release_notes.md"
|
printf '%s\n' "${RELEASE_HEADER}" >> "${RUNNER_TEMP}/release_notes.md"
|
||||||
|
|
||||||
- name: Generate changelog
|
- name: Generate changelog
|
||||||
shell: bash
|
|
||||||
if: ${{ inputs.generate_changelog }}
|
if: ${{ inputs.generate_changelog }}
|
||||||
run: |
|
run: |
|
||||||
LAST_TAG="$(gh release view --json tagName -q .tagName --repo XRPLF/clio)"
|
LAST_TAG="$(gh release view --json tagName -q .tagName --repo XRPLF/clio)"
|
||||||
LAST_TAG_COMMIT="$(git rev-parse $LAST_TAG)"
|
LAST_TAG_COMMIT="$(git rev-parse $LAST_TAG)"
|
||||||
BASE_COMMIT="$(git merge-base HEAD $LAST_TAG_COMMIT)"
|
BASE_COMMIT="$(git merge-base HEAD $LAST_TAG_COMMIT)"
|
||||||
git-cliff "${BASE_COMMIT}..HEAD" --ignore-tags "nightly|-b|-rc"
|
git-cliff "${BASE_COMMIT}..HEAD" --ignore-tags "nightly|-b|-rc" >> "${RUNNER_TEMP}/release_notes.md"
|
||||||
cat CHANGELOG.md >> "${RUNNER_TEMP}/release_notes.md"
|
|
||||||
|
|
||||||
- name: Prepare release artifacts
|
|
||||||
shell: bash
|
|
||||||
run: .github/scripts/prepare-release-artifacts.sh release_artifacts
|
|
||||||
|
|
||||||
- name: Upload release notes
|
- name: Upload release notes
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: release_notes_${{ inputs.version }}
|
name: release_notes_${{ inputs.version }}
|
||||||
path: "${RUNNER_TEMP}/release_notes.md"
|
path: "${RUNNER_TEMP}/release_notes.md"
|
||||||
|
|
||||||
- name: Remove current release and tag
|
- name: Remove previous release with a pattern
|
||||||
if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }}
|
if: ${{ github.event_name != 'pull_request' && inputs.delete_pattern != '' }}
|
||||||
shell: bash
|
env:
|
||||||
|
DELETE_PATTERN: ${{ inputs.delete_pattern }}
|
||||||
run: |
|
run: |
|
||||||
gh release delete ${{ inputs.version }} --yes || true
|
RELEASES_TO_DELETE=$(gh release list --limit 50 --repo "${GH_REPO}" | grep -E "${DELETE_PATTERN}" | awk -F'\t' '{print $3}' || true)
|
||||||
git push origin :${{ inputs.version }} || true
|
if [ -n "$RELEASES_TO_DELETE" ]; then
|
||||||
|
for RELEASE in $RELEASES_TO_DELETE; do
|
||||||
|
echo "Deleting release: $RELEASE"
|
||||||
|
gh release delete "$RELEASE" --repo "${GH_REPO}" --yes --cleanup-tag
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Publish release
|
- name: Publish release
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
shell: bash
|
env:
|
||||||
|
RELEASE_VERSION: ${{ inputs.version }}
|
||||||
|
PRERELEASE_OPTION: ${{ inputs.prerelease && '--prerelease' || '' }}
|
||||||
|
RELEASE_TITLE: ${{ inputs.title }}
|
||||||
|
DRAFT_OPTION: ${{ inputs.draft && '--draft' || '' }}
|
||||||
run: |
|
run: |
|
||||||
gh release create "${{ inputs.version }}" \
|
gh release create "${RELEASE_VERSION}" \
|
||||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
${PRERELEASE_OPTION} \
|
||||||
--title "${{ inputs.title }}" \
|
--title "${RELEASE_TITLE}" \
|
||||||
--target "${GITHUB_SHA}" \
|
--target "${GITHUB_SHA}" \
|
||||||
${{ inputs.draft && '--draft' || '' }} \
|
${DRAFT_OPTION} \
|
||||||
--notes-file "${RUNNER_TEMP}/release_notes.md" \
|
--notes-file "${RUNNER_TEMP}/release_notes.md" \
|
||||||
./release_artifacts/clio_server*
|
./release_artifacts/clio_*
|
||||||
|
|||||||
40
.github/workflows/reusable-test.yml
vendored
40
.github/workflows/reusable-test.yml
vendored
@@ -33,6 +33,10 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
unit_tests:
|
unit_tests:
|
||||||
name: Unit testing
|
name: Unit testing
|
||||||
@@ -43,23 +47,22 @@ jobs:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
# TODO: remove completely when we have fixed all currently existing issues with sanitizers
|
# TODO: remove completely when we have fixed all currently existing issues with sanitizers
|
||||||
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.tsan') || inputs.conan_profile == 'clang.asan' || (inputs.conan_profile == 'gcc.asan' && inputs.build_type == 'Release') }}
|
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.tsan') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Cleanup workspace
|
- name: Cleanup workspace
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
|
uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/download-artifact@v5
|
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
|
|
||||||
- name: Make clio_tests executable
|
- name: Make clio_tests executable
|
||||||
shell: bash
|
|
||||||
run: chmod +x ./clio_tests
|
run: chmod +x ./clio_tests
|
||||||
|
|
||||||
- name: Run clio_tests (regular)
|
- name: Run clio_tests (regular)
|
||||||
@@ -68,11 +71,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Run clio_tests (sanitizer errors ignored)
|
- name: Run clio_tests (sanitizer errors ignored)
|
||||||
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' }}
|
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' }}
|
||||||
run: ./.github/scripts/execute-tests-under-sanitizer ./clio_tests
|
run: ./.github/scripts/execute-tests-under-sanitizer.sh ./clio_tests
|
||||||
|
|
||||||
- name: Check for sanitizer report
|
- name: Check for sanitizer report
|
||||||
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' }}
|
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' }}
|
||||||
shell: bash
|
|
||||||
id: check_report
|
id: check_report
|
||||||
run: |
|
run: |
|
||||||
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
|
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
|
||||||
@@ -83,7 +85,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload sanitizer report
|
- name: Upload sanitizer report
|
||||||
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true' }}
|
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true' }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: sanitizer_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: sanitizer_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
path: .sanitizer-report/*
|
path: .sanitizer-report/*
|
||||||
@@ -122,13 +124,19 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Cleanup workspace
|
- name: Cleanup workspace
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
|
uses: XRPLF/actions/cleanup-workspace@cf0433aa74563aead044a1e395610c96d65a37cf
|
||||||
|
|
||||||
- name: Spin up scylladb
|
- name: Delete and start colima (macOS)
|
||||||
|
# This is a temporary workaround for colima issues on macOS runners
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
timeout-minutes: 3
|
|
||||||
run: |
|
run: |
|
||||||
docker rm --force scylladb || true
|
colima delete --force
|
||||||
|
colima start
|
||||||
|
|
||||||
|
- name: Spin up scylladb (macOS)
|
||||||
|
if: ${{ runner.os == 'macOS' }}
|
||||||
|
timeout-minutes: 1
|
||||||
|
run: |
|
||||||
docker run \
|
docker run \
|
||||||
--detach \
|
--detach \
|
||||||
--name scylladb \
|
--name scylladb \
|
||||||
@@ -140,11 +148,15 @@ jobs:
|
|||||||
--memory 16G \
|
--memory 16G \
|
||||||
scylladb/scylla
|
scylladb/scylla
|
||||||
|
|
||||||
|
- name: Wait for scylladb container to be healthy (macOS)
|
||||||
|
if: ${{ runner.os == 'macOS' }}
|
||||||
|
timeout-minutes: 1
|
||||||
|
run: |
|
||||||
until [ "$(docker inspect -f '{{.State.Health.Status}}' scylladb)" == "healthy" ]; do
|
until [ "$(docker inspect -f '{{.State.Health.Status}}' scylladb)" == "healthy" ]; do
|
||||||
sleep 5
|
sleep 1
|
||||||
done
|
done
|
||||||
|
|
||||||
- uses: actions/download-artifact@v5
|
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||||
|
|
||||||
|
|||||||
@@ -6,25 +6,29 @@ on:
|
|||||||
CODECOV_TOKEN:
|
CODECOV_TOKEN:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
upload_report:
|
upload_report:
|
||||||
name: Upload report
|
name: Upload report
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Download report artifact
|
- name: Download report artifact
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
name: coverage-report.xml
|
name: coverage-report.xml
|
||||||
path: build
|
path: build
|
||||||
|
|
||||||
- name: Upload coverage report
|
- name: Upload coverage report
|
||||||
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
|
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
|
||||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||||
with:
|
with:
|
||||||
files: build/coverage_report.xml
|
files: build/coverage_report.xml
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
|
|||||||
7
.github/workflows/sanitizers.yml
vendored
7
.github/workflows/sanitizers.yml
vendored
@@ -15,7 +15,7 @@ on:
|
|||||||
- ".github/actions/**"
|
- ".github/actions/**"
|
||||||
- "!.github/actions/build-docker-image/**"
|
- "!.github/actions/build-docker-image/**"
|
||||||
- "!.github/actions/create-issue/**"
|
- "!.github/actions/create-issue/**"
|
||||||
- .github/scripts/execute-tests-under-sanitizer
|
- .github/scripts/execute-tests-under-sanitizer.sh
|
||||||
|
|
||||||
- CMakeLists.txt
|
- CMakeLists.txt
|
||||||
- conanfile.py
|
- conanfile.py
|
||||||
@@ -44,14 +44,13 @@ jobs:
|
|||||||
uses: ./.github/workflows/reusable-build-test.yml
|
uses: ./.github/workflows/reusable-build-test.yml
|
||||||
with:
|
with:
|
||||||
runs_on: heavy
|
runs_on: heavy
|
||||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
|
container: '{ "image": "ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696" }'
|
||||||
download_ccache: false
|
download_ccache: false
|
||||||
upload_ccache: false
|
upload_ccache: false
|
||||||
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
|
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
|
||||||
build_type: ${{ matrix.build_type }}
|
build_type: ${{ matrix.build_type }}
|
||||||
static: false
|
static: false
|
||||||
# Currently, both gcc.tsan and clang.tsan unit tests hang
|
run_unit_tests: true
|
||||||
run_unit_tests: ${{ matrix.sanitizer_ext != '.tsan' }}
|
|
||||||
run_integration_tests: false
|
run_integration_tests: false
|
||||||
upload_clio_server: false
|
upload_clio_server: false
|
||||||
targets: clio_tests clio_integration_tests
|
targets: clio_tests clio_integration_tests
|
||||||
|
|||||||
40
.github/workflows/update-docker-ci.yml
vendored
40
.github/workflows/update-docker-ci.yml
vendored
@@ -33,6 +33,10 @@ env:
|
|||||||
GCC_MAJOR_VERSION: 15
|
GCC_MAJOR_VERSION: 15
|
||||||
GCC_VERSION: 15.2.0
|
GCC_VERSION: 15.2.0
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
repo:
|
repo:
|
||||||
name: Calculate repo name
|
name: Calculate repo name
|
||||||
@@ -52,11 +56,11 @@ jobs:
|
|||||||
needs: repo
|
needs: repo
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # v47.0.1
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/gcc/**"
|
files: "docker/compilers/gcc/**"
|
||||||
|
|
||||||
@@ -90,11 +94,11 @@ jobs:
|
|||||||
needs: repo
|
needs: repo
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # v47.0.1
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/gcc/**"
|
files: "docker/compilers/gcc/**"
|
||||||
|
|
||||||
@@ -128,16 +132,16 @@ jobs:
|
|||||||
needs: [repo, gcc-amd64, gcc-arm64]
|
needs: [repo, gcc-amd64, gcc-arm64]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # v47.0.1
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/gcc/**"
|
files: "docker/compilers/gcc/**"
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
@@ -179,11 +183,11 @@ jobs:
|
|||||||
needs: repo
|
needs: repo
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # v47.0.1
|
||||||
with:
|
with:
|
||||||
files: "docker/compilers/clang/**"
|
files: "docker/compilers/clang/**"
|
||||||
|
|
||||||
@@ -215,11 +219,11 @@ jobs:
|
|||||||
needs: [repo, gcc-merge]
|
needs: [repo, gcc-merge]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # v47.0.1
|
||||||
with:
|
with:
|
||||||
files: "docker/tools/**"
|
files: "docker/tools/**"
|
||||||
|
|
||||||
@@ -246,11 +250,11 @@ jobs:
|
|||||||
needs: [repo, gcc-merge]
|
needs: [repo, gcc-merge]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # v47.0.1
|
||||||
with:
|
with:
|
||||||
files: "docker/tools/**"
|
files: "docker/tools/**"
|
||||||
|
|
||||||
@@ -277,16 +281,16 @@ jobs:
|
|||||||
needs: [repo, tools-amd64, tools-arm64]
|
needs: [repo, tools-amd64, tools-arm64]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # v47.0.1
|
||||||
with:
|
with:
|
||||||
files: "docker/tools/**"
|
files: "docker/tools/**"
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
@@ -312,7 +316,7 @@ jobs:
|
|||||||
needs: [repo, tools-merge]
|
needs: [repo, tools-merge]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
- uses: ./.github/actions/build-docker-image
|
- uses: ./.github/actions/build-docker-image
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@@ -334,7 +338,7 @@ jobs:
|
|||||||
needs: [repo, gcc-merge, clang, tools-merge]
|
needs: [repo, gcc-merge, clang, tools-merge]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
- uses: ./.github/actions/build-docker-image
|
- uses: ./.github/actions/build-docker-image
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
19
.github/workflows/upload-conan-deps.yml
vendored
19
.github/workflows/upload-conan-deps.yml
vendored
@@ -22,6 +22,7 @@ on:
|
|||||||
|
|
||||||
- .github/actions/conan/action.yml
|
- .github/actions/conan/action.yml
|
||||||
- ".github/scripts/conan/**"
|
- ".github/scripts/conan/**"
|
||||||
|
- "!.github/scripts/conan/regenerate_lockfile.sh"
|
||||||
|
|
||||||
- conanfile.py
|
- conanfile.py
|
||||||
- conan.lock
|
- conan.lock
|
||||||
@@ -32,6 +33,7 @@ on:
|
|||||||
|
|
||||||
- .github/actions/conan/action.yml
|
- .github/actions/conan/action.yml
|
||||||
- ".github/scripts/conan/**"
|
- ".github/scripts/conan/**"
|
||||||
|
- "!.github/scripts/conan/regenerate_lockfile.sh"
|
||||||
|
|
||||||
- conanfile.py
|
- conanfile.py
|
||||||
- conan.lock
|
- conan.lock
|
||||||
@@ -40,13 +42,17 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
generate-matrix:
|
generate-matrix:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Calculate conan matrix
|
- name: Calculate conan matrix
|
||||||
id: set-matrix
|
id: set-matrix
|
||||||
@@ -69,16 +75,15 @@ jobs:
|
|||||||
CONAN_PROFILE: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
|
CONAN_PROFILE: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
- name: Prepare runner
|
- name: Prepare runner
|
||||||
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
|
uses: XRPLF/actions/prepare-runner@e8d2d2a546a03e1d161dca52890705f3bc641215
|
||||||
with:
|
with:
|
||||||
disable_ccache: true
|
enable_ccache: false
|
||||||
|
|
||||||
- name: Setup conan on macOS
|
- name: Setup conan on macOS
|
||||||
if: ${{ runner.os == 'macOS' }}
|
if: ${{ runner.os == 'macOS' }}
|
||||||
shell: bash
|
|
||||||
run: ./.github/scripts/conan/init.sh
|
run: ./.github/scripts/conan/init.sh
|
||||||
|
|
||||||
- name: Show conan profile
|
- name: Show conan profile
|
||||||
@@ -99,4 +104,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload Conan packages
|
- name: Upload Conan packages
|
||||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
|
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
|
||||||
run: conan upload "*" -r=xrplf --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
env:
|
||||||
|
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||||
|
run: conan upload "*" -r=xrplf --confirm ${FORCE_OPTION}
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,6 +4,7 @@
|
|||||||
.build
|
.build
|
||||||
.cache
|
.cache
|
||||||
.vscode
|
.vscode
|
||||||
|
.zed
|
||||||
.python-version
|
.python-version
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.sanitizer-report
|
.sanitizer-report
|
||||||
|
|||||||
@@ -11,7 +11,10 @@
|
|||||||
#
|
#
|
||||||
# See https://pre-commit.com for more information
|
# See https://pre-commit.com for more information
|
||||||
# See https://pre-commit.com/hooks.html for more hooks
|
# See https://pre-commit.com/hooks.html for more hooks
|
||||||
exclude: ^(docs/doxygen-awesome-theme/|conan\.lock$)
|
exclude: |
|
||||||
|
(?x)^(
|
||||||
|
docs/doxygen-awesome-theme/.*
|
||||||
|
)$
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
# `pre-commit sample-config` default hooks
|
# `pre-commit sample-config` default hooks
|
||||||
@@ -26,12 +29,12 @@ repos:
|
|||||||
|
|
||||||
# Autoformat: YAML, JSON, Markdown, etc.
|
# Autoformat: YAML, JSON, Markdown, etc.
|
||||||
- repo: https://github.com/rbubley/mirrors-prettier
|
- repo: https://github.com/rbubley/mirrors-prettier
|
||||||
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
|
rev: 14abee445aea04b39069c19b4bd54efff6775819 # frozen: v3.7.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
|
|
||||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||||
rev: 192ad822316c3a22fb3d3cc8aa6eafa0b8488360 # frozen: v0.45.0
|
rev: 76b3d32d3f4b965e1d6425253c59407420ae2c43 # frozen: v0.47.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: markdownlint-fix
|
- id: markdownlint-fix
|
||||||
exclude: LICENSE.md
|
exclude: LICENSE.md
|
||||||
@@ -55,6 +58,17 @@ repos:
|
|||||||
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
|
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
|
rev: 831207fd435b47aeffdf6af853097e64322b4d44 # frozen: 25.12.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
|
||||||
|
- repo: https://github.com/scop/pre-commit-shfmt
|
||||||
|
rev: 2a30809d16bc7a60d9b97353c797f42b510d3368 # frozen: v3.12.0-2
|
||||||
|
hooks:
|
||||||
|
- id: shfmt
|
||||||
|
args: ["-i", "4", "--write"]
|
||||||
|
|
||||||
# Running some C++ hooks before clang-format
|
# Running some C++ hooks before clang-format
|
||||||
# to ensure that the style is consistent.
|
# to ensure that the style is consistent.
|
||||||
- repo: local
|
- repo: local
|
||||||
@@ -80,7 +94,7 @@ repos:
|
|||||||
language: script
|
language: script
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: 719856d56a62953b8d2839fb9e851f25c3cfeef8 # frozen: v21.1.2
|
rev: 75ca4ad908dc4a99f57921f29b7e6c1521e10b26 # frozen: v21.1.8
|
||||||
hooks:
|
hooks:
|
||||||
- id: clang-format
|
- id: clang-format
|
||||||
args: [--style=file]
|
args: [--style=file]
|
||||||
|
|||||||
@@ -75,10 +75,6 @@ if (san)
|
|||||||
endif ()
|
endif ()
|
||||||
target_compile_options(clio_options INTERFACE ${SAN_OPTIMIZATION_FLAG} ${SAN_FLAG} -fno-omit-frame-pointer)
|
target_compile_options(clio_options INTERFACE ${SAN_OPTIMIZATION_FLAG} ${SAN_FLAG} -fno-omit-frame-pointer)
|
||||||
|
|
||||||
target_compile_definitions(
|
|
||||||
clio_options INTERFACE $<$<STREQUAL:${san},address>:SANITIZER=ASAN> $<$<STREQUAL:${san},thread>:SANITIZER=TSAN>
|
|
||||||
$<$<STREQUAL:${san},memory>:SANITIZER=MSAN> $<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN>
|
|
||||||
)
|
|
||||||
target_link_libraries(clio_options INTERFACE ${SAN_FLAG} ${SAN_LIB})
|
target_link_libraries(clio_options INTERFACE ${SAN_FLAG} ${SAN_LIB})
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
|||||||
@@ -180,6 +180,7 @@ Existing maintainers can resign, or be subject to a vote for removal at the behe
|
|||||||
- [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
- [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
||||||
- [legleux](https://github.com/legleux) (Ripple)
|
- [legleux](https://github.com/legleux) (Ripple)
|
||||||
- [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
|
- [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
|
||||||
|
- [mathbunnyru](https://github.com/mathbunnyru) (Ripple)
|
||||||
|
|
||||||
### Honorable ex-Maintainers
|
### Honorable ex-Maintainers
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,6 @@ Below are some useful docs to learn more about Clio.
|
|||||||
|
|
||||||
- [How to configure Clio and rippled](./docs/configure-clio.md)
|
- [How to configure Clio and rippled](./docs/configure-clio.md)
|
||||||
- [How to run Clio](./docs/run-clio.md)
|
- [How to run Clio](./docs/run-clio.md)
|
||||||
- [Logging](./docs/logging.md)
|
|
||||||
- [Troubleshooting guide](./docs/trouble_shooting.md)
|
- [Troubleshooting guide](./docs/trouble_shooting.md)
|
||||||
|
|
||||||
**General reference material:**
|
**General reference material:**
|
||||||
|
|||||||
@@ -9,10 +9,12 @@ target_sources(
|
|||||||
util/async/ExecutionContextBenchmarks.cpp
|
util/async/ExecutionContextBenchmarks.cpp
|
||||||
# Logger
|
# Logger
|
||||||
util/log/LoggerBenchmark.cpp
|
util/log/LoggerBenchmark.cpp
|
||||||
|
# WorkQueue
|
||||||
|
rpc/WorkQueueBenchmarks.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
include(deps/gbench)
|
include(deps/gbench)
|
||||||
|
|
||||||
target_include_directories(clio_benchmark PRIVATE .)
|
target_include_directories(clio_benchmark PRIVATE .)
|
||||||
target_link_libraries(clio_benchmark PUBLIC clio_util benchmark::benchmark_main spdlog::spdlog)
|
target_link_libraries(clio_benchmark PRIVATE clio_rpc clio_util benchmark::benchmark_main spdlog::spdlog)
|
||||||
set_target_properties(clio_benchmark PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
|
set_target_properties(clio_benchmark PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
|
||||||
|
|||||||
145
benchmarks/rpc/WorkQueueBenchmarks.cpp
Normal file
145
benchmarks/rpc/WorkQueueBenchmarks.cpp
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#include "rpc/WorkQueue.hpp"
|
||||||
|
#include "util/Assert.hpp"
|
||||||
|
#include "util/config/Array.hpp"
|
||||||
|
#include "util/config/ConfigConstraints.hpp"
|
||||||
|
#include "util/config/ConfigDefinition.hpp"
|
||||||
|
#include "util/config/ConfigValue.hpp"
|
||||||
|
#include "util/config/Types.hpp"
|
||||||
|
#include "util/log/Logger.hpp"
|
||||||
|
#include "util/prometheus/Prometheus.hpp"
|
||||||
|
|
||||||
|
#include <benchmark/benchmark.h>
|
||||||
|
#include <boost/asio/steady_timer.hpp>
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <atomic>
|
||||||
|
#include <cassert>
|
||||||
|
#include <chrono>
|
||||||
|
#include <cstddef>
|
||||||
|
#include <cstdint>
|
||||||
|
#include <mutex>
|
||||||
|
#include <thread>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
using namespace rpc;
|
||||||
|
using namespace util::config;
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
auto const kCONFIG = ClioConfigDefinition{
|
||||||
|
{"prometheus.compress_reply", ConfigValue{ConfigType::Boolean}.defaultValue(true)},
|
||||||
|
{"prometheus.enabled", ConfigValue{ConfigType::Boolean}.defaultValue(true)},
|
||||||
|
{"log.channels.[].channel", Array{ConfigValue{ConfigType::String}}},
|
||||||
|
{"log.channels.[].level", Array{ConfigValue{ConfigType::String}}},
|
||||||
|
{"log.level", ConfigValue{ConfigType::String}.defaultValue("info")},
|
||||||
|
{"log.format", ConfigValue{ConfigType::String}.defaultValue(R"(%Y-%m-%d %H:%M:%S.%f %^%3!l:%n%$ - %v)")},
|
||||||
|
{"log.is_async", ConfigValue{ConfigType::Boolean}.defaultValue(false)},
|
||||||
|
{"log.enable_console", ConfigValue{ConfigType::Boolean}.defaultValue(false)},
|
||||||
|
{"log.directory", ConfigValue{ConfigType::String}.optional()},
|
||||||
|
{"log.rotation_size", ConfigValue{ConfigType::Integer}.defaultValue(2048).withConstraint(gValidateUint32)},
|
||||||
|
{"log.directory_max_files", ConfigValue{ConfigType::Integer}.defaultValue(25).withConstraint(gValidateUint32)},
|
||||||
|
{"log.tag_style", ConfigValue{ConfigType::String}.defaultValue("none")},
|
||||||
|
};
|
||||||
|
|
||||||
|
// this should be a fixture but it did not work with Args very well
|
||||||
|
void
|
||||||
|
init()
|
||||||
|
{
|
||||||
|
static std::once_flag kONCE;
|
||||||
|
std::call_once(kONCE, [] {
|
||||||
|
PrometheusService::init(kCONFIG);
|
||||||
|
(void)util::LogService::init(kCONFIG);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
static void
|
||||||
|
benchmarkWorkQueue(benchmark::State& state)
|
||||||
|
{
|
||||||
|
init();
|
||||||
|
|
||||||
|
auto const wqThreads = static_cast<uint32_t>(state.range(0));
|
||||||
|
auto const maxQueueSize = static_cast<uint32_t>(state.range(1));
|
||||||
|
auto const clientThreads = static_cast<uint32_t>(state.range(2));
|
||||||
|
auto const itemsPerClient = static_cast<uint32_t>(state.range(3));
|
||||||
|
auto const clientProcessingMs = static_cast<uint32_t>(state.range(4));
|
||||||
|
|
||||||
|
for (auto _ : state) {
|
||||||
|
std::atomic_size_t totalExecuted = 0uz;
|
||||||
|
std::atomic_size_t totalQueued = 0uz;
|
||||||
|
|
||||||
|
state.PauseTiming();
|
||||||
|
WorkQueue queue(wqThreads, maxQueueSize);
|
||||||
|
state.ResumeTiming();
|
||||||
|
|
||||||
|
std::vector<std::thread> threads;
|
||||||
|
threads.reserve(clientThreads);
|
||||||
|
|
||||||
|
for (auto t = 0uz; t < clientThreads; ++t) {
|
||||||
|
threads.emplace_back([&] {
|
||||||
|
for (auto i = 0uz; i < itemsPerClient; ++i) {
|
||||||
|
totalQueued += static_cast<std::size_t>(queue.postCoro(
|
||||||
|
[&clientProcessingMs, &totalExecuted](auto yield) {
|
||||||
|
++totalExecuted;
|
||||||
|
|
||||||
|
boost::asio::steady_timer timer(
|
||||||
|
yield.get_executor(), std::chrono::milliseconds{clientProcessingMs}
|
||||||
|
);
|
||||||
|
timer.async_wait(yield);
|
||||||
|
|
||||||
|
std::this_thread::sleep_for(std::chrono::microseconds{10});
|
||||||
|
},
|
||||||
|
/* isWhiteListed = */ false
|
||||||
|
));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto& t : threads)
|
||||||
|
t.join();
|
||||||
|
|
||||||
|
queue.stop();
|
||||||
|
|
||||||
|
ASSERT(totalExecuted == totalQueued, "Totals don't match");
|
||||||
|
ASSERT(totalQueued <= itemsPerClient * clientThreads, "Queued more than requested");
|
||||||
|
|
||||||
|
if (maxQueueSize == 0) {
|
||||||
|
ASSERT(totalQueued == itemsPerClient * clientThreads, "Queued exactly the expected amount");
|
||||||
|
} else {
|
||||||
|
ASSERT(totalQueued >= std::min(maxQueueSize, itemsPerClient * clientThreads), "Queued less than expected");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Usage example:
|
||||||
|
/*
|
||||||
|
./clio_benchmark \
|
||||||
|
--benchmark_repetitions=10 \
|
||||||
|
--benchmark_display_aggregates_only=true \
|
||||||
|
--benchmark_min_time=1x \
|
||||||
|
--benchmark_filter="WorkQueue"
|
||||||
|
*/
|
||||||
|
// TODO: figure out what happens on 1 thread
|
||||||
|
BENCHMARK(benchmarkWorkQueue)
|
||||||
|
->ArgsProduct({{2, 4, 8, 16}, {0, 5'000}, {4, 8, 16}, {1'000, 10'000}, {10, 100, 250}})
|
||||||
|
->Unit(benchmark::kMillisecond);
|
||||||
@@ -49,8 +49,6 @@ postprocessors = [
|
|||||||
]
|
]
|
||||||
# render body even when there are no releases to process
|
# render body even when there are no releases to process
|
||||||
# render_always = true
|
# render_always = true
|
||||||
# output file path
|
|
||||||
output = "CHANGELOG.md"
|
|
||||||
|
|
||||||
[git]
|
[git]
|
||||||
# parse the commits based on https://www.conventionalcommits.org
|
# parse the commits based on https://www.conventionalcommits.org
|
||||||
|
|||||||
@@ -1,42 +1,42 @@
|
|||||||
find_package(Git REQUIRED)
|
find_package(Git REQUIRED)
|
||||||
|
|
||||||
set(GIT_COMMAND describe --tags --exact-match)
|
if (DEFINED ENV{GITHUB_BRANCH_NAME})
|
||||||
execute_process(
|
set(GIT_BUILD_BRANCH $ENV{GITHUB_BRANCH_NAME})
|
||||||
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND}
|
set(GIT_COMMIT_HASH $ENV{GITHUB_HEAD_SHA})
|
||||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
|
||||||
OUTPUT_VARIABLE TAG
|
|
||||||
RESULT_VARIABLE RC
|
|
||||||
ERROR_VARIABLE ERR
|
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE
|
|
||||||
)
|
|
||||||
|
|
||||||
if (RC EQUAL 0)
|
|
||||||
message(STATUS "Found tag '${TAG}' in git. Will use it as Clio version")
|
|
||||||
set(CLIO_VERSION "${TAG}")
|
|
||||||
set(DOC_CLIO_VERSION "${TAG}")
|
|
||||||
else ()
|
else ()
|
||||||
message(STATUS "Error finding tag in git: ${ERR}")
|
|
||||||
message(STATUS "Will use 'YYYYMMDDHMS-<branch>-<git-rev>' as Clio version")
|
|
||||||
|
|
||||||
set(GIT_COMMAND show -s --date=format:%Y%m%d%H%M%S --format=%cd)
|
|
||||||
execute_process(
|
|
||||||
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE DATE
|
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
|
|
||||||
)
|
|
||||||
|
|
||||||
set(GIT_COMMAND branch --show-current)
|
set(GIT_COMMAND branch --show-current)
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE BRANCH
|
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE GIT_BUILD_BRANCH
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
|
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
|
||||||
)
|
)
|
||||||
|
|
||||||
set(GIT_COMMAND rev-parse --short HEAD)
|
set(GIT_COMMAND rev-parse HEAD)
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE REV
|
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE GIT_COMMIT_HASH
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
|
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
|
||||||
)
|
)
|
||||||
|
endif ()
|
||||||
|
|
||||||
set(CLIO_VERSION "${DATE}-${BRANCH}-${REV}")
|
execute_process(
|
||||||
|
COMMAND date +%Y%m%d%H%M%S WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE BUILD_DATE
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
|
||||||
|
)
|
||||||
|
|
||||||
|
message(STATUS "Git branch: ${GIT_BUILD_BRANCH}")
|
||||||
|
message(STATUS "Git commit hash: ${GIT_COMMIT_HASH}")
|
||||||
|
message(STATUS "Build date: ${BUILD_DATE}")
|
||||||
|
|
||||||
|
if (DEFINED ENV{FORCE_CLIO_VERSION} AND NOT "$ENV{FORCE_CLIO_VERSION}" STREQUAL "")
|
||||||
|
message(STATUS "Using explicitly provided '${FORCE_CLIO_VERSION}' as Clio version")
|
||||||
|
|
||||||
|
set(CLIO_VERSION "$ENV{FORCE_CLIO_VERSION}")
|
||||||
|
set(DOC_CLIO_VERSION "$ENV{FORCE_CLIO_VERSION}")
|
||||||
|
else ()
|
||||||
|
message(STATUS "Using 'YYYYMMDDHMS-<branch>-<git short rev>' as Clio version")
|
||||||
|
|
||||||
|
string(SUBSTRING ${GIT_COMMIT_HASH} 0 7 GIT_COMMIT_HASH_SHORT)
|
||||||
|
|
||||||
|
set(CLIO_VERSION "${BUILD_DATE}-${GIT_BUILD_BRANCH}-${GIT_COMMIT_HASH_SHORT}")
|
||||||
set(DOC_CLIO_VERSION "develop")
|
set(DOC_CLIO_VERSION "develop")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
|||||||
17
cmake/install/clio.service.in
Normal file
17
cmake/install/clio.service.in
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Clio XRPL API server
|
||||||
|
Documentation=https://github.com/XRPLF/clio.git
|
||||||
|
|
||||||
|
After=network-online.target
|
||||||
|
Wants=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
ExecStart=@CLIO_INSTALL_DIR@/bin/clio_server @CLIO_INSTALL_DIR@/etc/config.json
|
||||||
|
Restart=on-failure
|
||||||
|
User=clio
|
||||||
|
Group=clio
|
||||||
|
LimitNOFILE=65536
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
@@ -11,3 +11,6 @@ file(READ docs/examples/config/example-config.json config)
|
|||||||
string(REGEX REPLACE "./clio_log" "/var/log/clio/" config "${config}")
|
string(REGEX REPLACE "./clio_log" "/var/log/clio/" config "${config}")
|
||||||
file(WRITE ${CMAKE_BINARY_DIR}/install-config.json "${config}")
|
file(WRITE ${CMAKE_BINARY_DIR}/install-config.json "${config}")
|
||||||
install(FILES ${CMAKE_BINARY_DIR}/install-config.json DESTINATION etc RENAME config.json)
|
install(FILES ${CMAKE_BINARY_DIR}/install-config.json DESTINATION etc RENAME config.json)
|
||||||
|
|
||||||
|
configure_file("${CMAKE_SOURCE_DIR}/cmake/install/clio.service.in" "${CMAKE_BINARY_DIR}/clio.service")
|
||||||
|
install(FILES "${CMAKE_BINARY_DIR}/clio.service" DESTINATION /lib/systemd/system)
|
||||||
|
|||||||
@@ -10,37 +10,36 @@ CLIO_BIN="$CLIO_PREFIX/bin/${CLIO_EXECUTABLE}"
|
|||||||
CLIO_CONFIG="$CLIO_PREFIX/etc/config.json"
|
CLIO_CONFIG="$CLIO_PREFIX/etc/config.json"
|
||||||
|
|
||||||
case "$1" in
|
case "$1" in
|
||||||
configure)
|
configure)
|
||||||
if ! id -u "$USER_NAME" >/dev/null 2>&1; then
|
if ! id -u "$USER_NAME" >/dev/null 2>&1; then
|
||||||
# Users who should not have a home directory should have their home directory set to /nonexistent
|
# Users who should not have a home directory should have their home directory set to /nonexistent
|
||||||
# https://www.debian.org/doc/debian-policy/ch-opersys.html#non-existent-home-directories
|
# https://www.debian.org/doc/debian-policy/ch-opersys.html#non-existent-home-directories
|
||||||
useradd \
|
useradd \
|
||||||
--system \
|
--system \
|
||||||
--home-dir /nonexistent \
|
--home-dir /nonexistent \
|
||||||
--no-create-home \
|
--no-create-home \
|
||||||
--shell /usr/sbin/nologin \
|
--shell /usr/sbin/nologin \
|
||||||
--comment "system user for ${CLIO_EXECUTABLE}" \
|
--comment "system user for ${CLIO_EXECUTABLE}" \
|
||||||
--user-group \
|
--user-group \
|
||||||
${USER_NAME}
|
${USER_NAME}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
install -d -o "$USER_NAME" -g "$GROUP_NAME" /var/log/clio
|
install -d -o "$USER_NAME" -g "$GROUP_NAME" /var/log/clio
|
||||||
|
|
||||||
if [ -f "$CLIO_CONFIG" ]; then
|
if [ -f "$CLIO_CONFIG" ]; then
|
||||||
chown "$USER_NAME:$GROUP_NAME" "$CLIO_CONFIG"
|
chown "$USER_NAME:$GROUP_NAME" "$CLIO_CONFIG"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
chown -R "$USER_NAME:$GROUP_NAME" "$CLIO_PREFIX"
|
chown -R "$USER_NAME:$GROUP_NAME" "$CLIO_PREFIX"
|
||||||
|
|
||||||
ln -sf "$CLIO_BIN" "/usr/bin/${CLIO_EXECUTABLE}"
|
ln -sf "$CLIO_BIN" "/usr/bin/${CLIO_EXECUTABLE}"
|
||||||
|
|
||||||
;;
|
;;
|
||||||
abort-upgrade|abort-remove|abort-deconfigure)
|
abort-upgrade | abort-remove | abort-deconfigure) ;;
|
||||||
;;
|
*)
|
||||||
*)
|
echo "postinst called with unknown argument \`$1'" >&2
|
||||||
echo "postinst called with unknown argument \`$1'" >&2
|
exit 1
|
||||||
exit 1
|
;;
|
||||||
;;
|
|
||||||
esac
|
esac
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
55
conan.lock
55
conan.lock
@@ -1,51 +1,52 @@
|
|||||||
{
|
{
|
||||||
"version": "0.5",
|
"version": "0.5",
|
||||||
"requires": [
|
"requires": [
|
||||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1765850150.075",
|
||||||
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
|
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1765850149.987",
|
||||||
"xrpl/2.6.1#973af2bf9631f239941dd9f5a100bb84%1759275059.342",
|
"xrpl/3.0.0#534d3f65a336109eee929b88962bae4e%1765375071.547",
|
||||||
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
|
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1765850149.926",
|
||||||
"spdlog/1.15.3#3ca0e9e6b83af4d0151e26541d140c86%1754401846.61",
|
"spdlog/1.17.0#bcbaaf7147bda6ad24ffbd1ac3d7142c%1767636069.964",
|
||||||
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
|
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1765850149.46",
|
||||||
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
|
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1765850148.103",
|
||||||
"rapidjson/cci.20220822#1b9d8c2256876a154172dc5cfbe447c6%1754325007.656",
|
"rapidjson/cci.20220822#1b9d8c2256876a154172dc5cfbe447c6%1754325007.656",
|
||||||
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
"protobuf/3.21.12#44ee56c0a6eea0c19aeeaca680370b88%1764175361.456",
|
||||||
"openssl/1.1.1w#a8f0792d7c5121b954578a7149d23e03%1756223730.729",
|
"openssl/1.1.1w#a8f0792d7c5121b954578a7149d23e03%1756223730.729",
|
||||||
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
|
"nudb/2.0.9#fb8dfd1a5557f5e0528114c2da17721e%1765850143.957",
|
||||||
"minizip/1.2.13#9e87d57804bd372d6d1e32b1871517a3%1754325004.374",
|
"minizip/1.2.13#9e87d57804bd372d6d1e32b1871517a3%1754325004.374",
|
||||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1765850143.914",
|
||||||
"libuv/1.46.0#dc28c1f653fa197f00db5b577a6f6011%1754325003.592",
|
"libuv/1.46.0#dc28c1f653fa197f00db5b577a6f6011%1754325003.592",
|
||||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1765842973.492",
|
||||||
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
|
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1765842973.03",
|
||||||
"libarchive/3.8.1#5cf685686322e906cb42706ab7e099a8%1756234256.696",
|
"libarchive/3.8.1#ffee18995c706e02bf96e7a2f7042e0d%1765850144.736",
|
||||||
"http_parser/2.9.4#98d91690d6fd021e9e624218a85d9d97%1754325001.385",
|
"http_parser/2.9.4#98d91690d6fd021e9e624218a85d9d97%1754325001.385",
|
||||||
"gtest/1.14.0#f8f0757a574a8dd747d16af62d6eb1b7%1754325000.842",
|
"gtest/1.17.0#5224b3b3ff3b4ce1133cbdd27d53ee7d%1755784855.585",
|
||||||
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
|
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
|
||||||
"fmt/11.2.0#579bb2cdf4a7607621beea4eb4651e0f%1754324999.086",
|
"fmt/12.1.0#50abab23274d56bb8f42c94b3b9a40c7%1763984116.926",
|
||||||
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
|
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
|
||||||
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
|
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1765850143.772",
|
||||||
"cassandra-cpp-driver/2.17.0#e50919efac8418c26be6671fd702540a%1754324997.363",
|
"cassandra-cpp-driver/2.17.0#bd3934138689482102c265d01288a316%1764175359.611",
|
||||||
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
|
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1765850144.336",
|
||||||
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
|
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1765850143.837",
|
||||||
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368%1754325043.336",
|
"boost/1.83.0#91d8b1572534d2c334d6790e3c34d0c1%1764175359.61",
|
||||||
"benchmark/1.9.4#ce4403f7a24d3e1f907cd9da4b678be4%1754578869.672",
|
"benchmark/1.9.4#ce4403f7a24d3e1f907cd9da4b678be4%1754578869.672",
|
||||||
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
|
"abseil/20230802.1#90ba607d4ee8fb5fb157c3db540671fc%1764175359.429"
|
||||||
],
|
],
|
||||||
"build_requires": [
|
"build_requires": [
|
||||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1765850150.075",
|
||||||
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
"protobuf/3.21.12#44ee56c0a6eea0c19aeeaca680370b88%1764175361.456",
|
||||||
"cmake/3.31.8#dde3bde00bb843687e55aea5afa0e220%1756234232.89",
|
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1765850153.937",
|
||||||
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28"
|
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1765850153.479",
|
||||||
|
"b2/5.3.3#107c15377719889654eb9a162a673975%1765850144.355"
|
||||||
],
|
],
|
||||||
"python_requires": [],
|
"python_requires": [],
|
||||||
"overrides": {
|
"overrides": {
|
||||||
"boost/1.83.0": [
|
"boost/1.83.0": [
|
||||||
null,
|
null,
|
||||||
"boost/1.83.0#5d975011d65b51abb2d2f6eb8386b368"
|
"boost/1.83.0#91d8b1572534d2c334d6790e3c34d0c1"
|
||||||
],
|
],
|
||||||
"protobuf/3.21.12": [
|
"protobuf/3.21.12": [
|
||||||
null,
|
null,
|
||||||
"protobuf/3.21.12"
|
"protobuf/3.21.12#44ee56c0a6eea0c19aeeaca680370b88"
|
||||||
],
|
],
|
||||||
"lz4/1.9.4": [
|
"lz4/1.9.4": [
|
||||||
"lz4/1.10.0"
|
"lz4/1.10.0"
|
||||||
|
|||||||
74
conanfile.py
74
conanfile.py
@@ -3,62 +3,60 @@ from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
|||||||
|
|
||||||
|
|
||||||
class ClioConan(ConanFile):
|
class ClioConan(ConanFile):
|
||||||
name = 'clio'
|
name = "clio"
|
||||||
license = 'ISC'
|
license = "ISC"
|
||||||
author = 'Alex Kremer <akremer@ripple.com>, John Freeman <jfreeman@ripple.com>, Ayaz Salikhov <asalikhov@ripple.com>'
|
author = "Alex Kremer <akremer@ripple.com>, John Freeman <jfreeman@ripple.com>, Ayaz Salikhov <asalikhov@ripple.com>"
|
||||||
url = 'https://github.com/xrplf/clio'
|
url = "https://github.com/xrplf/clio"
|
||||||
description = 'Clio RPC server'
|
description = "Clio RPC server"
|
||||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
settings = "os", "compiler", "build_type", "arch"
|
||||||
options = {}
|
options = {}
|
||||||
|
|
||||||
requires = [
|
requires = [
|
||||||
'boost/1.83.0',
|
"boost/1.83.0",
|
||||||
'cassandra-cpp-driver/2.17.0',
|
"cassandra-cpp-driver/2.17.0",
|
||||||
'fmt/11.2.0',
|
"fmt/12.1.0",
|
||||||
'protobuf/3.21.12',
|
"grpc/1.50.1",
|
||||||
'grpc/1.50.1',
|
"libbacktrace/cci.20210118",
|
||||||
'openssl/1.1.1w',
|
"openssl/1.1.1w",
|
||||||
'xrpl/2.6.1',
|
"protobuf/3.21.12",
|
||||||
'zlib/1.3.1',
|
"spdlog/1.17.0",
|
||||||
'libbacktrace/cci.20210118',
|
"xrpl/3.0.0",
|
||||||
'spdlog/1.15.3',
|
"zlib/1.3.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
default_options = {
|
default_options = {
|
||||||
'xrpl/*:tests': False,
|
"cassandra-cpp-driver/*:shared": False,
|
||||||
'xrpl/*:rocksdb': False,
|
"date/*:header_only": True,
|
||||||
'cassandra-cpp-driver/*:shared': False,
|
"grpc/*:secure": True,
|
||||||
'date/*:header_only': True,
|
"grpc/*:shared": False,
|
||||||
'grpc/*:shared': False,
|
"gtest/*:no_main": True,
|
||||||
'grpc/*:secure': True,
|
"libpq/*:shared": False,
|
||||||
'libpq/*:shared': False,
|
"lz4/*:shared": False,
|
||||||
'lz4/*:shared': False,
|
"openssl/*:shared": False,
|
||||||
'openssl/*:shared': False,
|
"protobuf/*:shared": False,
|
||||||
'protobuf/*:shared': False,
|
"protobuf/*:with_zlib": True,
|
||||||
'protobuf/*:with_zlib': True,
|
"snappy/*:shared": False,
|
||||||
'snappy/*:shared': False,
|
"xrpl/*:rocksdb": False,
|
||||||
'gtest/*:no_main': True,
|
"xrpl/*:tests": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
exports_sources = (
|
exports_sources = ("CMakeLists.txt", "cmake/*", "src/*")
|
||||||
'CMakeLists.txt', 'cmake/*', 'src/*'
|
|
||||||
)
|
|
||||||
|
|
||||||
def requirements(self):
|
def requirements(self):
|
||||||
self.requires('gtest/1.14.0')
|
self.requires("gtest/1.17.0")
|
||||||
self.requires('benchmark/1.9.4')
|
self.requires("benchmark/1.9.4")
|
||||||
|
|
||||||
def configure(self):
|
def configure(self):
|
||||||
if self.settings.compiler == 'apple-clang':
|
if self.settings.compiler == "apple-clang":
|
||||||
self.options['boost'].visibility = 'global'
|
self.options["boost"].visibility = "global"
|
||||||
|
|
||||||
def layout(self):
|
def layout(self):
|
||||||
cmake_layout(self)
|
cmake_layout(self)
|
||||||
# Fix this setting to follow the default introduced in Conan 1.48
|
# Fix this setting to follow the default introduced in Conan 1.48
|
||||||
# to align with our build instructions.
|
# to align with our build instructions.
|
||||||
self.folders.generators = 'build/generators'
|
self.folders.generators = "build/generators"
|
||||||
|
|
||||||
generators = 'CMakeDeps'
|
generators = "CMakeDeps"
|
||||||
|
|
||||||
def generate(self):
|
def generate(self):
|
||||||
tc = CMakeToolchain(self)
|
tc = CMakeToolchain(self)
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ RUN apt-get update \
|
|||||||
libmpfr-dev \
|
libmpfr-dev \
|
||||||
libncurses-dev \
|
libncurses-dev \
|
||||||
make \
|
make \
|
||||||
ninja-build \
|
|
||||||
wget \
|
wget \
|
||||||
zip \
|
zip \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
@@ -55,8 +54,11 @@ RUN pip install -q --no-cache-dir \
|
|||||||
# lxml 6.0.0 is not compatible with our image
|
# lxml 6.0.0 is not compatible with our image
|
||||||
'lxml<6.0.0' \
|
'lxml<6.0.0' \
|
||||||
cmake \
|
cmake \
|
||||||
conan==2.20.1 \
|
conan==2.24.0 \
|
||||||
gcovr
|
gcovr \
|
||||||
|
# We're adding pre-commit to this image as well,
|
||||||
|
# because clang-tidy workflow requires it
|
||||||
|
pre-commit
|
||||||
|
|
||||||
# Install LLVM tools
|
# Install LLVM tools
|
||||||
ARG LLVM_TOOLS_VERSION=20
|
ARG LLVM_TOOLS_VERSION=20
|
||||||
@@ -104,6 +106,7 @@ COPY --from=clio-tools \
|
|||||||
/usr/local/bin/git-cliff \
|
/usr/local/bin/git-cliff \
|
||||||
/usr/local/bin/gh \
|
/usr/local/bin/gh \
|
||||||
/usr/local/bin/gdb \
|
/usr/local/bin/gdb \
|
||||||
|
/usr/local/bin/ninja \
|
||||||
/usr/local/bin/
|
/usr/local/bin/
|
||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|||||||
@@ -5,17 +5,18 @@ It is used in [Clio Github Actions](https://github.com/XRPLF/clio/actions) but c
|
|||||||
|
|
||||||
The image is based on Ubuntu 20.04 and contains:
|
The image is based on Ubuntu 20.04 and contains:
|
||||||
|
|
||||||
- ccache 4.11.3
|
- ccache 4.12.2
|
||||||
- Clang 19
|
- Clang 19
|
||||||
- ClangBuildAnalyzer 1.6.0
|
- ClangBuildAnalyzer 1.6.0
|
||||||
- Conan 2.20.1
|
- Conan 2.24.0
|
||||||
- Doxygen 1.14
|
- Doxygen 1.16.1
|
||||||
- GCC 15.2.0
|
- GCC 15.2.0
|
||||||
- GDB 16.3
|
- GDB 17.1
|
||||||
- gh 2.74
|
- gh 2.83.2
|
||||||
- git-cliff 2.9.1
|
- git-cliff 2.11.0
|
||||||
- mold 2.40.1
|
- mold 2.40.4
|
||||||
- Python 3.13
|
- Ninja 1.13.2
|
||||||
|
- Python 3.8
|
||||||
- and some other useful tools
|
- and some other useful tools
|
||||||
|
|
||||||
Conan is set up to build Clio without any additional steps.
|
Conan is set up to build Clio without any additional steps.
|
||||||
|
|||||||
@@ -3,6 +3,13 @@
|
|||||||
{% set sanitizer_opt_map = {"asan": "address", "tsan": "thread", "ubsan": "undefined"} %}
|
{% set sanitizer_opt_map = {"asan": "address", "tsan": "thread", "ubsan": "undefined"} %}
|
||||||
{% set sanitizer = sanitizer_opt_map[sani] %}
|
{% set sanitizer = sanitizer_opt_map[sani] %}
|
||||||
|
|
||||||
|
{% set sanitizer_b2_flags_map = {
|
||||||
|
"address": "context-impl=ucontext address-sanitizer=norecover",
|
||||||
|
"thread": "context-impl=ucontext thread-sanitizer=norecover",
|
||||||
|
"undefined": "undefined-sanitizer=norecover"
|
||||||
|
} %}
|
||||||
|
{% set sanitizer_b2_flags_str = sanitizer_b2_flags_map[sanitizer] %}
|
||||||
|
|
||||||
{% set sanitizer_build_flags_str = "-fsanitize=" ~ sanitizer ~ " -g -O1 -fno-omit-frame-pointer" %}
|
{% set sanitizer_build_flags_str = "-fsanitize=" ~ sanitizer ~ " -g -O1 -fno-omit-frame-pointer" %}
|
||||||
{% set sanitizer_build_flags = sanitizer_build_flags_str.split(' ') %}
|
{% set sanitizer_build_flags = sanitizer_build_flags_str.split(' ') %}
|
||||||
{% set sanitizer_link_flags_str = "-fsanitize=" ~ sanitizer %}
|
{% set sanitizer_link_flags_str = "-fsanitize=" ~ sanitizer %}
|
||||||
@@ -11,7 +18,8 @@
|
|||||||
include({{ compiler }})
|
include({{ compiler }})
|
||||||
|
|
||||||
[options]
|
[options]
|
||||||
boost/*:extra_b2_flags="cxxflags=\"{{ sanitizer_build_flags_str }}\" linkflags=\"{{ sanitizer_link_flags_str }}\""
|
boost/*:extra_b2_flags="{{ sanitizer_b2_flags_str }}"
|
||||||
|
boost/*:without_context=False
|
||||||
boost/*:without_stacktrace=True
|
boost/*:without_stacktrace=True
|
||||||
|
|
||||||
[conf]
|
[conf]
|
||||||
@@ -20,4 +28,10 @@ tools.build:cxxflags+={{ sanitizer_build_flags }}
|
|||||||
tools.build:exelinkflags+={{ sanitizer_link_flags }}
|
tools.build:exelinkflags+={{ sanitizer_link_flags }}
|
||||||
tools.build:sharedlinkflags+={{ sanitizer_link_flags }}
|
tools.build:sharedlinkflags+={{ sanitizer_link_flags }}
|
||||||
|
|
||||||
tools.info.package_id:confs+=["tools.build:cflags", "tools.build:cxxflags", "tools.build:exelinkflags", "tools.build:sharedlinkflags"]
|
{% if sanitizer == "address" %}
|
||||||
|
tools.build:defines+=["BOOST_USE_ASAN", "BOOST_USE_UCONTEXT"]
|
||||||
|
{% elif sanitizer == "thread" %}
|
||||||
|
tools.build:defines+=["BOOST_USE_TSAN", "BOOST_USE_UCONTEXT"]
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
tools.info.package_id:confs+=["tools.build:cflags", "tools.build:cxxflags", "tools.build:exelinkflags", "tools.build:sharedlinkflags", "tools.build:defines"]
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ ARG UBUNTU_VERSION
|
|||||||
|
|
||||||
ARG GCC_MAJOR_VERSION
|
ARG GCC_MAJOR_VERSION
|
||||||
|
|
||||||
ARG BUILD_VERSION=1
|
ARG BUILD_VERSION=0
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
@@ -34,6 +34,7 @@ RUN wget --progress=dot:giga https://gcc.gnu.org/pub/gcc/releases/gcc-$GCC_VERSI
|
|||||||
WORKDIR /gcc-$GCC_VERSION
|
WORKDIR /gcc-$GCC_VERSION
|
||||||
RUN ./contrib/download_prerequisites
|
RUN ./contrib/download_prerequisites
|
||||||
|
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN mkdir /gcc-build
|
RUN mkdir /gcc-build
|
||||||
WORKDIR /gcc-build
|
WORKDIR /gcc-build
|
||||||
RUN /gcc-$GCC_VERSION/configure \
|
RUN /gcc-$GCC_VERSION/configure \
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
services:
|
services:
|
||||||
clio_develop:
|
clio_develop:
|
||||||
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
image: ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
volumes:
|
volumes:
|
||||||
- clio_develop_conan_data:/root/.conan2/p
|
- clio_develop_conan_data:/root/.conan2/p
|
||||||
- clio_develop_ccache:/root/.ccache
|
- clio_develop_ccache:/root/.ccache
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
script_dir=$(dirname $0)
|
script_dir=$(dirname $0)
|
||||||
|
|
||||||
pushd $script_dir > /dev/null
|
pushd $script_dir >/dev/null
|
||||||
|
|
||||||
function start_container {
|
function start_container {
|
||||||
if [ -z "$(docker ps -q -f name=clio_develop)" ]; then
|
if [ -z "$(docker ps -q -f name=clio_develop)" ]; then
|
||||||
@@ -41,21 +41,26 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
case $1 in
|
case $1 in
|
||||||
-h|--help)
|
-h | --help)
|
||||||
print_help ;;
|
print_help
|
||||||
|
;;
|
||||||
|
|
||||||
-t|--terminal)
|
-t | --terminal)
|
||||||
open_terminal ;;
|
open_terminal
|
||||||
|
;;
|
||||||
|
|
||||||
-s|--stop)
|
-s | --stop)
|
||||||
stop_container ;;
|
stop_container
|
||||||
|
;;
|
||||||
|
|
||||||
-*)
|
-*)
|
||||||
echo "Unknown option: $1"
|
echo "Unknown option: $1"
|
||||||
print_help ;;
|
print_help
|
||||||
|
;;
|
||||||
|
|
||||||
*)
|
*)
|
||||||
run "$@" ;;
|
run "$@"
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
popd > /dev/null
|
popd >/dev/null
|
||||||
|
|||||||
@@ -8,11 +8,10 @@ ARG TARGETARCH
|
|||||||
|
|
||||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||||
|
|
||||||
ARG BUILD_VERSION=2
|
ARG BUILD_VERSION=0
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
&& apt-get install -y --no-install-recommends --no-install-suggests \
|
||||||
ninja-build \
|
|
||||||
python3 \
|
python3 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
software-properties-common \
|
software-properties-common \
|
||||||
@@ -24,7 +23,16 @@ RUN apt-get update \
|
|||||||
|
|
||||||
WORKDIR /tmp
|
WORKDIR /tmp
|
||||||
|
|
||||||
ARG MOLD_VERSION=2.40.1
|
ARG NINJA_VERSION=1.13.2
|
||||||
|
|
||||||
|
RUN wget --progress=dot:giga "https://github.com/ninja-build/ninja/archive/refs/tags/v${NINJA_VERSION}.tar.gz" \
|
||||||
|
&& tar xf "v${NINJA_VERSION}.tar.gz" \
|
||||||
|
&& cd "ninja-${NINJA_VERSION}" \
|
||||||
|
&& ./configure.py --bootstrap \
|
||||||
|
&& mv ninja /usr/local/bin/ninja \
|
||||||
|
&& rm -rf /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
ARG MOLD_VERSION=2.40.4
|
||||||
RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v${MOLD_VERSION}.tar.gz" \
|
RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v${MOLD_VERSION}.tar.gz" \
|
||||||
&& tar xf "v${MOLD_VERSION}.tar.gz" \
|
&& tar xf "v${MOLD_VERSION}.tar.gz" \
|
||||||
&& cd "mold-${MOLD_VERSION}" \
|
&& cd "mold-${MOLD_VERSION}" \
|
||||||
@@ -34,7 +42,7 @@ RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v
|
|||||||
&& ninja install \
|
&& ninja install \
|
||||||
&& rm -rf /tmp/* /var/tmp/*
|
&& rm -rf /tmp/* /var/tmp/*
|
||||||
|
|
||||||
ARG CCACHE_VERSION=4.11.3
|
ARG CCACHE_VERSION=4.12.2
|
||||||
RUN wget --progress=dot:giga "https://github.com/ccache/ccache/releases/download/v${CCACHE_VERSION}/ccache-${CCACHE_VERSION}.tar.gz" \
|
RUN wget --progress=dot:giga "https://github.com/ccache/ccache/releases/download/v${CCACHE_VERSION}/ccache-${CCACHE_VERSION}.tar.gz" \
|
||||||
&& tar xf "ccache-${CCACHE_VERSION}.tar.gz" \
|
&& tar xf "ccache-${CCACHE_VERSION}.tar.gz" \
|
||||||
&& cd "ccache-${CCACHE_VERSION}" \
|
&& cd "ccache-${CCACHE_VERSION}" \
|
||||||
@@ -51,7 +59,7 @@ RUN apt-get update \
|
|||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
ARG DOXYGEN_VERSION=1.14.0
|
ARG DOXYGEN_VERSION=1.16.1
|
||||||
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
||||||
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
|
||||||
&& cd "doxygen-${DOXYGEN_VERSION}" \
|
&& cd "doxygen-${DOXYGEN_VERSION}" \
|
||||||
@@ -71,13 +79,13 @@ RUN wget --progress=dot:giga "https://github.com/aras-p/ClangBuildAnalyzer/archi
|
|||||||
&& ninja install \
|
&& ninja install \
|
||||||
&& rm -rf /tmp/* /var/tmp/*
|
&& rm -rf /tmp/* /var/tmp/*
|
||||||
|
|
||||||
ARG GIT_CLIFF_VERSION=2.9.1
|
ARG GIT_CLIFF_VERSION=2.11.0
|
||||||
RUN wget --progress=dot:giga "https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz" \
|
RUN wget --progress=dot:giga "https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz" \
|
||||||
&& tar xf git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz \
|
&& tar xf git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz \
|
||||||
&& mv git-cliff-${GIT_CLIFF_VERSION}/git-cliff /usr/local/bin/git-cliff \
|
&& mv git-cliff-${GIT_CLIFF_VERSION}/git-cliff /usr/local/bin/git-cliff \
|
||||||
&& rm -rf /tmp/* /var/tmp/*
|
&& rm -rf /tmp/* /var/tmp/*
|
||||||
|
|
||||||
ARG GH_VERSION=2.74.0
|
ARG GH_VERSION=2.83.2
|
||||||
RUN wget --progress=dot:giga "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
|
RUN wget --progress=dot:giga "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
|
||||||
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
|
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
|
||||||
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \
|
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \
|
||||||
@@ -92,7 +100,7 @@ RUN apt-get update \
|
|||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
ARG GDB_VERSION=16.3
|
ARG GDB_VERSION=17.1
|
||||||
RUN wget --progress=dot:giga "https://sourceware.org/pub/gdb/releases/gdb-${GDB_VERSION}.tar.gz" \
|
RUN wget --progress=dot:giga "https://sourceware.org/pub/gdb/releases/gdb-${GDB_VERSION}.tar.gz" \
|
||||||
&& tar xf "gdb-${GDB_VERSION}.tar.gz" \
|
&& tar xf "gdb-${GDB_VERSION}.tar.gz" \
|
||||||
&& cd "gdb-${GDB_VERSION}" \
|
&& cd "gdb-${GDB_VERSION}" \
|
||||||
|
|||||||
@@ -97,30 +97,14 @@ Now you should be able to download the prebuilt dependencies (including `xrpl` p
|
|||||||
|
|
||||||
#### Conan lockfile
|
#### Conan lockfile
|
||||||
|
|
||||||
To achieve reproducible dependencies, we use [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html).
|
To achieve reproducible dependencies, we use a [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html).
|
||||||
|
|
||||||
The `conan.lock` file in the repository contains a "snapshot" of the current dependencies.
|
The `conan.lock` file in the repository contains a "snapshot" of the current dependencies.
|
||||||
It is implicitly used when running `conan` commands, you don't need to specify it.
|
It is implicitly used when running `conan` commands, you don't need to specify it.
|
||||||
|
|
||||||
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
|
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
|
||||||
|
|
||||||
> [!NOTE]
|
To update a lockfile, run from the repository root: `./.github/scripts/conan/regenerate_lockfile.sh`
|
||||||
> Conan uses local cache by default when creating a lockfile.
|
|
||||||
>
|
|
||||||
> To ensure, that lockfile creation works the same way on all developer machines, you should clear the local cache before creating a new lockfile.
|
|
||||||
|
|
||||||
To create a new lockfile, run the following commands in the repository root:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
conan remove '*' --confirm
|
|
||||||
rm conan.lock
|
|
||||||
# This ensure that xrplf remote is the first to be consulted
|
|
||||||
conan remote add --force --index 0 xrplf https://conan.ripplex.io
|
|
||||||
conan lock create .
|
|
||||||
```
|
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> If some dependencies are exclusive for some OS, you may need to run the last command for them adding `--profile:all <PROFILE>`.
|
|
||||||
|
|
||||||
## Building Clio
|
## Building Clio
|
||||||
|
|
||||||
@@ -191,7 +175,7 @@ Open the `index.html` file in your browser to see the documentation pages.
|
|||||||
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
|
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run -it ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
|
docker run -it ghcr.io/xrplf/clio-ci:14342e087ceb8b593027198bf9ef06a43833c696
|
||||||
git clone https://github.com/XRPLF/clio
|
git clone https://github.com/XRPLF/clio
|
||||||
cd clio
|
cd clio
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -293,7 +293,7 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
|
|
||||||
- **Required**: True
|
- **Required**: True
|
||||||
- **Type**: int
|
- **Type**: int
|
||||||
- **Default value**: `1`
|
- **Default value**: `1000`
|
||||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||||
- **Description**: The maximum size of the server's request queue. If set to `0`, this means there is no queue size limit.
|
- **Description**: The maximum size of the server's request queue. If set to `0`, this means there is no queue size limit.
|
||||||
|
|
||||||
@@ -391,7 +391,7 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Type**: double
|
- **Type**: double
|
||||||
- **Default value**: `10`
|
- **Default value**: `10`
|
||||||
- **Constraints**: The value must be a positive double number.
|
- **Constraints**: The value must be a positive double number.
|
||||||
- **Description**: The number of milliseconds the server waits to shutdown gracefully. If Clio does not shutdown gracefully after the specified value, it will be killed instead.
|
- **Description**: The number of seconds the server waits to shutdown gracefully. If Clio does not shutdown gracefully after the specified value, it will be killed instead.
|
||||||
|
|
||||||
### cache.num_diffs
|
### cache.num_diffs
|
||||||
|
|
||||||
@@ -441,6 +441,30 @@ This document provides a list of all available Clio configuration properties in
|
|||||||
- **Constraints**: The value must be one of the following: `sync`, `async`, `none`.
|
- **Constraints**: The value must be one of the following: `sync`, `async`, `none`.
|
||||||
- **Description**: The strategy used for Cache loading.
|
- **Description**: The strategy used for Cache loading.
|
||||||
|
|
||||||
|
### cache.file.path
|
||||||
|
|
||||||
|
- **Required**: False
|
||||||
|
- **Type**: string
|
||||||
|
- **Default value**: None
|
||||||
|
- **Constraints**: None
|
||||||
|
- **Description**: The path to a file where cache will be saved to on shutdown and loaded from on startup. If the file couldn't be read Clio will load cache as usual (from DB or from rippled).
|
||||||
|
|
||||||
|
### cache.file.max_sequence_age
|
||||||
|
|
||||||
|
- **Required**: True
|
||||||
|
- **Type**: int
|
||||||
|
- **Default value**: `5000`
|
||||||
|
- **Constraints**: None
|
||||||
|
- **Description**: Max allowed difference between the latest sequence in DB and in cache file. If the cache file is too old (contains too low latest sequence) Clio will reject using it.
|
||||||
|
|
||||||
|
### cache.file.async_save
|
||||||
|
|
||||||
|
- **Required**: True
|
||||||
|
- **Type**: boolean
|
||||||
|
- **Default value**: `False`
|
||||||
|
- **Constraints**: None
|
||||||
|
- **Description**: When false, Clio waits for cache saving to finish before shutting down. When true, cache saving runs in parallel with other shutdown operations.
|
||||||
|
|
||||||
### log.channels.[].channel
|
### log.channels.[].channel
|
||||||
|
|
||||||
- **Required**: False
|
- **Required**: False
|
||||||
|
|||||||
@@ -61,7 +61,7 @@
|
|||||||
"ip": "0.0.0.0",
|
"ip": "0.0.0.0",
|
||||||
"port": 51233,
|
"port": 51233,
|
||||||
// Max number of requests to queue up before rejecting further requests.
|
// Max number of requests to queue up before rejecting further requests.
|
||||||
// Defaults to 0, which disables the limit.
|
// Defaults to 1000 (use 0 to make the queue unbound).
|
||||||
"max_queue_size": 500,
|
"max_queue_size": 500,
|
||||||
// If request contains header with authorization, Clio will check if it matches the prefix 'Password ' + this value's sha256 hash
|
// If request contains header with authorization, Clio will check if it matches the prefix 'Password ' + this value's sha256 hash
|
||||||
// If matches, the request will be considered as admin request
|
// If matches, the request will be considered as admin request
|
||||||
@@ -137,7 +137,11 @@
|
|||||||
// "num_cursors_from_account": 3200, // Read the cursors from the account table until we have enough cursors to partition the ledger to load concurrently.
|
// "num_cursors_from_account": 3200, // Read the cursors from the account table until we have enough cursors to partition the ledger to load concurrently.
|
||||||
"num_markers": 48, // The number of markers is the number of coroutines to load the cache concurrently.
|
"num_markers": 48, // The number of markers is the number of coroutines to load the cache concurrently.
|
||||||
"page_fetch_size": 512, // The number of rows to load for each page.
|
"page_fetch_size": 512, // The number of rows to load for each page.
|
||||||
"load": "async" // "sync" to load cache synchronously or "async" to load cache asynchronously or "none"/"no" to turn off the cache.
|
"load": "async", // "sync" to load cache synchronously or "async" to load cache asynchronously or "none"/"no" to turn off the cache.
|
||||||
|
"file": {
|
||||||
|
"path": "./cache.bin",
|
||||||
|
"max_sequence_age": 5000
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"prometheus": {
|
"prometheus": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ if [[ "1.14.0" > "$version" ]]; then
|
|||||||
|
|
||||||
ERROR
|
ERROR
|
||||||
-----------------------------------------------------------------------------
|
-----------------------------------------------------------------------------
|
||||||
A minimum of version 1.14 of `which doxygen` is required.
|
A minimum of version 1.14 of $(which doxygen) is required.
|
||||||
Your version is $version. Please upgrade it.
|
Your version is $version. Please upgrade it.
|
||||||
|
|
||||||
Your changes may fail CI checks.
|
Your changes may fail CI checks.
|
||||||
@@ -55,26 +55,26 @@ EOF
|
|||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mkdir -p ${DOCDIR} > /dev/null 2>&1
|
mkdir -p ${DOCDIR} >/dev/null 2>&1
|
||||||
pushd ${DOCDIR} > /dev/null 2>&1
|
pushd ${DOCDIR} >/dev/null 2>&1
|
||||||
|
|
||||||
cat ${ROOT}/docs/Doxyfile | \
|
cat ${ROOT}/docs/Doxyfile |
|
||||||
sed \
|
sed \
|
||||||
-e "s/\${LINT}/YES/" \
|
-e "s/\${LINT}/YES/" \
|
||||||
-e "s/\${WARN_AS_ERROR}/NO/" \
|
-e "s/\${WARN_AS_ERROR}/NO/" \
|
||||||
-e "s!\${SOURCE}!${ROOT}!" \
|
-e "s!\${SOURCE}!${ROOT}!" \
|
||||||
-e "s/\${USE_DOT}/NO/" \
|
-e "s/\${USE_DOT}/NO/" \
|
||||||
-e "s/\${EXCLUDES}/impl/" \
|
-e "s/\${EXCLUDES}/impl/" |
|
||||||
| ${DOXYGEN} - 2> ${TMPFILE} 1> /dev/null
|
${DOXYGEN} - 2>${TMPFILE} 1>/dev/null
|
||||||
|
|
||||||
# We don't want to check for default values and typedefs as well as for member variables
|
# We don't want to check for default values and typedefs as well as for member variables
|
||||||
OUT=$(cat ${TMPFILE} \
|
OUT=$(cat ${TMPFILE} |
|
||||||
| grep -v "=default" \
|
grep -v "=default" |
|
||||||
| grep -v "\(variable\)" \
|
grep -v "\(variable\)" |
|
||||||
| grep -v "\(typedef\)")
|
grep -v "\(typedef\)")
|
||||||
|
|
||||||
rm -rf ${TMPFILE} > /dev/null 2>&1
|
rm -rf ${TMPFILE} >/dev/null 2>&1
|
||||||
popd > /dev/null 2>&1
|
popd >/dev/null 2>&1
|
||||||
|
|
||||||
if [[ ! -z "$OUT" ]]; then
|
if [[ ! -z "$OUT" ]]; then
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
|
|||||||
@@ -23,10 +23,10 @@ fix_includes() {
|
|||||||
file_path_fixed="${file_path}.tmp.fixed"
|
file_path_fixed="${file_path}.tmp.fixed"
|
||||||
|
|
||||||
# Make all includes to be <...> style
|
# Make all includes to be <...> style
|
||||||
sed -E 's|#include "(.*)"|#include <\1>|g' "$file_path" > "$file_path_all_global"
|
sed -E 's|#include "(.*)"|#include <\1>|g' "$file_path" >"$file_path_all_global"
|
||||||
|
|
||||||
# Make local includes to be "..." style
|
# Make local includes to be "..." style
|
||||||
sed -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g" "$file_path_all_global" > "$file_path_fixed"
|
sed -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g" "$file_path_all_global" >"$file_path_fixed"
|
||||||
rm "$file_path_all_global"
|
rm "$file_path_all_global"
|
||||||
|
|
||||||
# Check if the temporary file is different from the original file
|
# Check if the temporary file is different from the original file
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import argparse
|
|||||||
import re
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
PATTERN = r'R"JSON\((.*?)\)JSON"'
|
PATTERN = r'R"JSON\((.*?)\)JSON"'
|
||||||
|
|
||||||
|
|
||||||
@@ -40,16 +39,22 @@ def fix_colon_spacing(cpp_content: str) -> str:
|
|||||||
raw_json = match.group(1)
|
raw_json = match.group(1)
|
||||||
raw_json = re.sub(r'":\n\s*(\[|\{)', r'": \1', raw_json)
|
raw_json = re.sub(r'":\n\s*(\[|\{)', r'": \1', raw_json)
|
||||||
return f'R"JSON({raw_json})JSON"'
|
return f'R"JSON({raw_json})JSON"'
|
||||||
|
|
||||||
return re.sub(PATTERN, replace_json, cpp_content, flags=re.DOTALL)
|
return re.sub(PATTERN, replace_json, cpp_content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
def fix_indentation(cpp_content: str) -> str:
|
def fix_indentation(cpp_content: str) -> str:
|
||||||
|
if "JSON(" not in cpp_content:
|
||||||
|
return cpp_content
|
||||||
|
|
||||||
lines = cpp_content.splitlines()
|
lines = cpp_content.splitlines()
|
||||||
|
|
||||||
|
ends_with_newline = cpp_content.endswith("\n")
|
||||||
|
|
||||||
def find_indentation(line: str) -> int:
|
def find_indentation(line: str) -> int:
|
||||||
return len(line) - len(line.lstrip())
|
return len(line) - len(line.lstrip())
|
||||||
|
|
||||||
for (line_num, (line, next_line)) in enumerate(zip(lines[:-1], lines[1:])):
|
for line_num, (line, next_line) in enumerate(zip(lines[:-1], lines[1:])):
|
||||||
if "JSON(" in line and ")JSON" not in line:
|
if "JSON(" in line and ")JSON" not in line:
|
||||||
indent = find_indentation(line)
|
indent = find_indentation(line)
|
||||||
next_indent = find_indentation(next_line)
|
next_indent = find_indentation(next_line)
|
||||||
@@ -64,9 +69,17 @@ def fix_indentation(cpp_content: str) -> str:
|
|||||||
if ")JSON" in lines[i]:
|
if ")JSON" in lines[i]:
|
||||||
lines[i] = " " * indent + lines[i].lstrip()
|
lines[i] = " " * indent + lines[i].lstrip()
|
||||||
break
|
break
|
||||||
lines[i] = lines[i][by_how_much:] if by_how_much > 0 else " " * (-by_how_much) + lines[i]
|
lines[i] = (
|
||||||
|
lines[i][by_how_much:]
|
||||||
|
if by_how_much > 0
|
||||||
|
else " " * (-by_how_much) + lines[i]
|
||||||
|
)
|
||||||
|
|
||||||
return "\n".join(lines) + "\n"
|
result = "\n".join(lines)
|
||||||
|
|
||||||
|
if ends_with_newline:
|
||||||
|
result += "\n"
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def process_file(file_path: Path, dry_run: bool) -> bool:
|
def process_file(file_path: Path, dry_run: bool) -> bool:
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
#
|
#
|
||||||
set -e -o pipefail
|
set -e -o pipefail
|
||||||
|
|
||||||
if ! command -v gofmt &> /dev/null ; then
|
if ! command -v gofmt &>/dev/null; then
|
||||||
echo "gofmt not installed or available in the PATH" >&2
|
echo "gofmt not installed or available in the PATH" >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/bin/sh
|
#!/bin/bash
|
||||||
|
|
||||||
|
|
||||||
# git for-each-ref refs/tags # see which tags are annotated and which are lightweight. Annotated tags are "tag" objects.
|
# git for-each-ref refs/tags # see which tags are annotated and which are lightweight. Annotated tags are "tag" objects.
|
||||||
# # Set these so your commits and tags are always signed
|
# # Set these so your commits and tags are always signed
|
||||||
@@ -7,7 +6,7 @@
|
|||||||
# git config tag.gpgsign true
|
# git config tag.gpgsign true
|
||||||
|
|
||||||
verify_commit_signed() {
|
verify_commit_signed() {
|
||||||
if git verify-commit HEAD &> /dev/null; then
|
if git verify-commit HEAD &>/dev/null; then
|
||||||
:
|
:
|
||||||
# echo "HEAD commit seems signed..."
|
# echo "HEAD commit seems signed..."
|
||||||
else
|
else
|
||||||
@@ -17,7 +16,7 @@ verify_commit_signed() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
verify_tag() {
|
verify_tag() {
|
||||||
if git describe --exact-match --tags HEAD &> /dev/null; then
|
if git describe --exact-match --tags HEAD &>/dev/null; then
|
||||||
: # You might be ok to push
|
: # You might be ok to push
|
||||||
# echo "Tag is annotated."
|
# echo "Tag is annotated."
|
||||||
return 0
|
return 0
|
||||||
@@ -28,7 +27,7 @@ verify_tag() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
verify_tag_signed() {
|
verify_tag_signed() {
|
||||||
if git verify-tag "$version" &> /dev/null ; then
|
if git verify-tag "$version" &>/dev/null; then
|
||||||
: # ok, I guess we'll let you push
|
: # ok, I guess we'll let you push
|
||||||
# echo "Tag appears signed"
|
# echo "Tag appears signed"
|
||||||
return 0
|
return 0
|
||||||
@@ -40,11 +39,11 @@ verify_tag_signed() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Check some things if we're pushing a branch called "release/"
|
# Check some things if we're pushing a branch called "release/"
|
||||||
if echo "$PRE_COMMIT_REMOTE_BRANCH" | grep ^refs\/heads\/release\/ &> /dev/null ; then
|
if echo "$PRE_COMMIT_REMOTE_BRANCH" | grep ^refs\/heads\/release\/ &>/dev/null; then
|
||||||
version=$(git tag --points-at HEAD)
|
version=$(git tag --points-at HEAD)
|
||||||
echo "Looks like you're trying to push a $version release..."
|
echo "Looks like you're trying to push a $version release..."
|
||||||
echo "Making sure you've signed and tagged it."
|
echo "Making sure you've signed and tagged it."
|
||||||
if verify_commit_signed && verify_tag && verify_tag_signed ; then
|
if verify_commit_signed && verify_tag && verify_tag_signed; then
|
||||||
: # Ok, I guess you can push
|
: # Ok, I guess you can push
|
||||||
else
|
else
|
||||||
exit 1
|
exit 1
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ add_subdirectory(util)
|
|||||||
add_subdirectory(data)
|
add_subdirectory(data)
|
||||||
add_subdirectory(cluster)
|
add_subdirectory(cluster)
|
||||||
add_subdirectory(etl)
|
add_subdirectory(etl)
|
||||||
add_subdirectory(etlng)
|
|
||||||
add_subdirectory(feed)
|
add_subdirectory(feed)
|
||||||
add_subdirectory(rpc)
|
add_subdirectory(rpc)
|
||||||
add_subdirectory(web)
|
add_subdirectory(web)
|
||||||
|
|||||||
@@ -5,10 +5,9 @@ target_link_libraries(
|
|||||||
clio_app
|
clio_app
|
||||||
PUBLIC clio_cluster
|
PUBLIC clio_cluster
|
||||||
clio_etl
|
clio_etl
|
||||||
clio_etlng
|
|
||||||
clio_feed
|
clio_feed
|
||||||
clio_web
|
|
||||||
clio_rpc
|
|
||||||
clio_migration
|
clio_migration
|
||||||
|
clio_rpc
|
||||||
|
clio_web
|
||||||
PRIVATE Boost::program_options
|
PRIVATE Boost::program_options
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -77,7 +77,10 @@ CliArgs::parse(int argc, char const* argv[])
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (parsed.contains("version")) {
|
if (parsed.contains("version")) {
|
||||||
std::cout << util::build::getClioFullVersionString() << '\n';
|
std::cout << util::build::getClioFullVersionString() << '\n'
|
||||||
|
<< "Git commit hash: " << util::build::getGitCommitHash() << '\n'
|
||||||
|
<< "Git build branch: " << util::build::getGitBuildBranch() << '\n'
|
||||||
|
<< "Build date: " << util::build::getBuildDate() << '\n';
|
||||||
return Action{Action::Exit{EXIT_SUCCESS}};
|
return Action{Action::Exit{EXIT_SUCCESS}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -25,11 +25,12 @@
|
|||||||
#include "data/AmendmentCenter.hpp"
|
#include "data/AmendmentCenter.hpp"
|
||||||
#include "data/BackendFactory.hpp"
|
#include "data/BackendFactory.hpp"
|
||||||
#include "data/LedgerCache.hpp"
|
#include "data/LedgerCache.hpp"
|
||||||
|
#include "data/LedgerCacheSaver.hpp"
|
||||||
#include "etl/ETLService.hpp"
|
#include "etl/ETLService.hpp"
|
||||||
#include "etl/LoadBalancer.hpp"
|
#include "etl/LoadBalancer.hpp"
|
||||||
#include "etl/NetworkValidatedLedgers.hpp"
|
#include "etl/NetworkValidatedLedgers.hpp"
|
||||||
#include "etlng/LoadBalancer.hpp"
|
#include "etl/SystemState.hpp"
|
||||||
#include "etlng/LoadBalancerInterface.hpp"
|
#include "etl/WriterState.hpp"
|
||||||
#include "feed/SubscriptionManager.hpp"
|
#include "feed/SubscriptionManager.hpp"
|
||||||
#include "migration/MigrationInspectorFactory.hpp"
|
#include "migration/MigrationInspectorFactory.hpp"
|
||||||
#include "rpc/Counters.hpp"
|
#include "rpc/Counters.hpp"
|
||||||
@@ -57,6 +58,7 @@
|
|||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
#include <string>
|
||||||
#include <thread>
|
#include <thread>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
@@ -91,6 +93,7 @@ ClioApplication::ClioApplication(util::config::ClioConfigDefinition const& confi
|
|||||||
{
|
{
|
||||||
LOG(util::LogService::info()) << "Clio version: " << util::build::getClioFullVersionString();
|
LOG(util::LogService::info()) << "Clio version: " << util::build::getClioFullVersionString();
|
||||||
signalsHandler_.subscribeToStop([this]() { appStopper_.stop(); });
|
signalsHandler_.subscribeToStop([this]() { appStopper_.stop(); });
|
||||||
|
appStopper_.setOnComplete([this]() { signalsHandler_.notifyGracefulShutdownComplete(); });
|
||||||
}
|
}
|
||||||
|
|
||||||
int
|
int
|
||||||
@@ -99,25 +102,32 @@ ClioApplication::run(bool const useNgWebServer)
|
|||||||
auto const threads = config_.get<uint16_t>("io_threads");
|
auto const threads = config_.get<uint16_t>("io_threads");
|
||||||
LOG(util::LogService::info()) << "Number of io threads = " << threads;
|
LOG(util::LogService::info()) << "Number of io threads = " << threads;
|
||||||
|
|
||||||
|
// Similarly we need a context to run ETL on
|
||||||
|
// In the future we can remove the raw ioc and use ctx instead
|
||||||
|
// This context should be above ioc because its reference is getting into tasks inside ioc
|
||||||
|
util::async::CoroExecutionContext ctx{threads};
|
||||||
|
|
||||||
// IO context to handle all incoming requests, as well as other things.
|
// IO context to handle all incoming requests, as well as other things.
|
||||||
// This is not the only io context in the application.
|
// This is not the only io context in the application.
|
||||||
boost::asio::io_context ioc{threads};
|
boost::asio::io_context ioc{threads};
|
||||||
|
|
||||||
// Similarly we need a context to run ETLng on
|
|
||||||
// In the future we can remove the raw ioc and use ctx instead
|
|
||||||
util::async::CoroExecutionContext ctx{threads};
|
|
||||||
|
|
||||||
// Rate limiter, to prevent abuse
|
// Rate limiter, to prevent abuse
|
||||||
auto whitelistHandler = web::dosguard::WhitelistHandler{config_};
|
auto whitelistHandler = web::dosguard::WhitelistHandler{config_};
|
||||||
auto const dosguardWeights = web::dosguard::Weights::make(config_);
|
auto const dosguardWeights = web::dosguard::Weights::make(config_);
|
||||||
auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler, dosguardWeights};
|
auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler, dosguardWeights};
|
||||||
auto sweepHandler = web::dosguard::IntervalSweepHandler{config_, ioc, dosGuard};
|
auto sweepHandler = web::dosguard::IntervalSweepHandler{config_, ioc, dosGuard};
|
||||||
|
|
||||||
auto cache = data::LedgerCache{};
|
auto cache = data::LedgerCache{};
|
||||||
|
auto cacheSaver = data::LedgerCacheSaver{config_, cache};
|
||||||
|
|
||||||
// Interface to the database
|
// Interface to the database
|
||||||
auto backend = data::makeBackend(config_, cache);
|
auto backend = data::makeBackend(config_, cache);
|
||||||
|
|
||||||
cluster::ClusterCommunicationService clusterCommunicationService{backend};
|
auto systemState = etl::SystemState::makeSystemState(config_);
|
||||||
|
|
||||||
|
cluster::ClusterCommunicationService clusterCommunicationService{
|
||||||
|
backend, std::make_unique<etl::WriterState>(systemState)
|
||||||
|
};
|
||||||
clusterCommunicationService.run();
|
clusterCommunicationService.run();
|
||||||
|
|
||||||
auto const amendmentCenter = std::make_shared<data::AmendmentCenter const>(backend);
|
auto const amendmentCenter = std::make_shared<data::AmendmentCenter const>(backend);
|
||||||
@@ -142,20 +152,14 @@ ClioApplication::run(bool const useNgWebServer)
|
|||||||
// ETL uses the balancer to extract data.
|
// ETL uses the balancer to extract data.
|
||||||
// The server uses the balancer to forward RPCs to a rippled node.
|
// The server uses the balancer to forward RPCs to a rippled node.
|
||||||
// The balancer itself publishes to streams (transactions_proposed and accounts_proposed)
|
// The balancer itself publishes to streams (transactions_proposed and accounts_proposed)
|
||||||
auto balancer = [&] -> std::shared_ptr<etlng::LoadBalancerInterface> {
|
auto balancer = etl::LoadBalancer::makeLoadBalancer(
|
||||||
if (config_.get<bool>("__ng_etl")) {
|
config_, ioc, backend, subscriptions, std::make_unique<util::MTRandomGenerator>(), ledgers
|
||||||
return etlng::LoadBalancer::makeLoadBalancer(
|
);
|
||||||
config_, ioc, backend, subscriptions, std::make_unique<util::MTRandomGenerator>(), ledgers
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return etl::LoadBalancer::makeLoadBalancer(
|
|
||||||
config_, ioc, backend, subscriptions, std::make_unique<util::MTRandomGenerator>(), ledgers
|
|
||||||
);
|
|
||||||
}();
|
|
||||||
|
|
||||||
// ETL is responsible for writing and publishing to streams. In read-only mode, ETL only publishes
|
// ETL is responsible for writing and publishing to streams. In read-only mode, ETL only publishes
|
||||||
auto etl = etl::ETLService::makeETLService(config_, ioc, ctx, backend, subscriptions, balancer, ledgers);
|
auto etl = etl::ETLService::makeETLService(
|
||||||
|
config_, std::move(systemState), ctx, backend, subscriptions, balancer, ledgers
|
||||||
|
);
|
||||||
|
|
||||||
auto workQueue = rpc::WorkQueue::makeWorkQueue(config_);
|
auto workQueue = rpc::WorkQueue::makeWorkQueue(config_);
|
||||||
auto counters = rpc::Counters::makeCounters(workQueue);
|
auto counters = rpc::Counters::makeCounters(workQueue);
|
||||||
@@ -187,7 +191,7 @@ ClioApplication::run(bool const useNgWebServer)
|
|||||||
return EXIT_FAILURE;
|
return EXIT_FAILURE;
|
||||||
}
|
}
|
||||||
|
|
||||||
httpServer->onGet("/metrics", MetricsHandler{adminVerifier});
|
httpServer->onGet("/metrics", MetricsHandler{adminVerifier, workQueue});
|
||||||
httpServer->onGet("/health", HealthCheckHandler{});
|
httpServer->onGet("/health", HealthCheckHandler{});
|
||||||
httpServer->onGet("/cache_state", CacheStateHandler{cache});
|
httpServer->onGet("/cache_state", CacheStateHandler{cache});
|
||||||
auto requestHandler = RequestHandler{adminVerifier, handler};
|
auto requestHandler = RequestHandler{adminVerifier, handler};
|
||||||
@@ -201,7 +205,16 @@ ClioApplication::run(bool const useNgWebServer)
|
|||||||
}
|
}
|
||||||
|
|
||||||
appStopper_.setOnStop(
|
appStopper_.setOnStop(
|
||||||
Stopper::makeOnStopCallback(httpServer.value(), *balancer, *etl, *subscriptions, *backend, ioc)
|
Stopper::makeOnStopCallback(
|
||||||
|
httpServer.value(),
|
||||||
|
*balancer,
|
||||||
|
*etl,
|
||||||
|
*subscriptions,
|
||||||
|
*backend,
|
||||||
|
cacheSaver,
|
||||||
|
clusterCommunicationService,
|
||||||
|
ioc
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
// Blocks until stopped.
|
// Blocks until stopped.
|
||||||
@@ -216,6 +229,11 @@ ClioApplication::run(bool const useNgWebServer)
|
|||||||
auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl, dosGuard);
|
auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl, dosGuard);
|
||||||
|
|
||||||
auto const httpServer = web::makeHttpServer(config_, ioc, dosGuard, handler, cache);
|
auto const httpServer = web::makeHttpServer(config_, ioc, dosGuard, handler, cache);
|
||||||
|
appStopper_.setOnStop(
|
||||||
|
Stopper::makeOnStopCallback(
|
||||||
|
*httpServer, *balancer, *etl, *subscriptions, *backend, cacheSaver, clusterCommunicationService, ioc
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
// Blocks until stopped.
|
// Blocks until stopped.
|
||||||
// When stopped, shared_ptrs fall out of scope
|
// When stopped, shared_ptrs fall out of scope
|
||||||
|
|||||||
@@ -38,7 +38,18 @@ Stopper::~Stopper()
|
|||||||
void
|
void
|
||||||
Stopper::setOnStop(std::function<void(boost::asio::yield_context)> cb)
|
Stopper::setOnStop(std::function<void(boost::asio::yield_context)> cb)
|
||||||
{
|
{
|
||||||
util::spawn(ctx_, std::move(cb));
|
util::spawn(ctx_, [this, cb = std::move(cb)](auto yield) {
|
||||||
|
cb(yield);
|
||||||
|
|
||||||
|
if (onCompleteCallback_)
|
||||||
|
onCompleteCallback_();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
Stopper::setOnComplete(std::function<void()> cb)
|
||||||
|
{
|
||||||
|
onCompleteCallback_ = std::move(cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|||||||
@@ -19,13 +19,15 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "cluster/Concepts.hpp"
|
||||||
#include "data/BackendInterface.hpp"
|
#include "data/BackendInterface.hpp"
|
||||||
#include "etlng/ETLServiceInterface.hpp"
|
#include "data/LedgerCacheSaver.hpp"
|
||||||
#include "etlng/LoadBalancerInterface.hpp"
|
#include "etl/ETLServiceInterface.hpp"
|
||||||
|
#include "etl/LoadBalancerInterface.hpp"
|
||||||
#include "feed/SubscriptionManagerInterface.hpp"
|
#include "feed/SubscriptionManagerInterface.hpp"
|
||||||
#include "util/CoroutineGroup.hpp"
|
#include "util/CoroutineGroup.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "util/log/Logger.hpp"
|
||||||
#include "web/ng/Server.hpp"
|
#include "web/interface/Concepts.hpp"
|
||||||
|
|
||||||
#include <boost/asio/executor_work_guard.hpp>
|
#include <boost/asio/executor_work_guard.hpp>
|
||||||
#include <boost/asio/io_context.hpp>
|
#include <boost/asio/io_context.hpp>
|
||||||
@@ -42,6 +44,7 @@ namespace app {
|
|||||||
class Stopper {
|
class Stopper {
|
||||||
boost::asio::io_context ctx_;
|
boost::asio::io_context ctx_;
|
||||||
std::thread worker_;
|
std::thread worker_;
|
||||||
|
std::function<void()> onCompleteCallback_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
@@ -57,6 +60,14 @@ public:
|
|||||||
void
|
void
|
||||||
setOnStop(std::function<void(boost::asio::yield_context)> cb);
|
setOnStop(std::function<void(boost::asio::yield_context)> cb);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Set the callback to be called when graceful shutdown completes.
|
||||||
|
*
|
||||||
|
* @param cb The callback to be called when shutdown completes.
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
setOnComplete(std::function<void()> cb);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Stop the application and run the shutdown tasks.
|
* @brief Stop the application and run the shutdown tasks.
|
||||||
*/
|
*/
|
||||||
@@ -71,21 +82,30 @@ public:
|
|||||||
* @param etl The ETL service to stop.
|
* @param etl The ETL service to stop.
|
||||||
* @param subscriptions The subscription manager to stop.
|
* @param subscriptions The subscription manager to stop.
|
||||||
* @param backend The backend to stop.
|
* @param backend The backend to stop.
|
||||||
|
* @param cacheSaver The ledger cache saver
|
||||||
|
* @param clusterCommunicationService The cluster communication service to stop.
|
||||||
* @param ioc The io_context to stop.
|
* @param ioc The io_context to stop.
|
||||||
* @return The callback to be called on application stop.
|
* @return The callback to be called on application stop.
|
||||||
*/
|
*/
|
||||||
template <web::ng::SomeServer ServerType>
|
template <
|
||||||
|
web::SomeServer ServerType,
|
||||||
|
data::SomeLedgerCacheSaver LedgerCacheSaverType,
|
||||||
|
cluster::SomeClusterCommunicationService ClusterCommunicationServiceType>
|
||||||
static std::function<void(boost::asio::yield_context)>
|
static std::function<void(boost::asio::yield_context)>
|
||||||
makeOnStopCallback(
|
makeOnStopCallback(
|
||||||
ServerType& server,
|
ServerType& server,
|
||||||
etlng::LoadBalancerInterface& balancer,
|
etl::LoadBalancerInterface& balancer,
|
||||||
etlng::ETLServiceInterface& etl,
|
etl::ETLServiceInterface& etl,
|
||||||
feed::SubscriptionManagerInterface& subscriptions,
|
feed::SubscriptionManagerInterface& subscriptions,
|
||||||
data::BackendInterface& backend,
|
data::BackendInterface& backend,
|
||||||
|
LedgerCacheSaverType& cacheSaver,
|
||||||
|
ClusterCommunicationServiceType& clusterCommunicationService,
|
||||||
boost::asio::io_context& ioc
|
boost::asio::io_context& ioc
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
return [&](boost::asio::yield_context yield) {
|
return [&](boost::asio::yield_context yield) {
|
||||||
|
cacheSaver.save();
|
||||||
|
|
||||||
util::CoroutineGroup coroutineGroup{yield};
|
util::CoroutineGroup coroutineGroup{yield};
|
||||||
coroutineGroup.spawn(yield, [&server](auto innerYield) {
|
coroutineGroup.spawn(yield, [&server](auto innerYield) {
|
||||||
server.stop(innerYield);
|
server.stop(innerYield);
|
||||||
@@ -97,6 +117,8 @@ public:
|
|||||||
});
|
});
|
||||||
coroutineGroup.asyncWait(yield);
|
coroutineGroup.asyncWait(yield);
|
||||||
|
|
||||||
|
clusterCommunicationService.stop();
|
||||||
|
|
||||||
etl.stop();
|
etl.stop();
|
||||||
LOG(util::LogService::info()) << "ETL stopped";
|
LOG(util::LogService::info()) << "ETL stopped";
|
||||||
|
|
||||||
@@ -106,6 +128,8 @@ public:
|
|||||||
backend.waitForWritesToFinish();
|
backend.waitForWritesToFinish();
|
||||||
LOG(util::LogService::info()) << "Backend writes finished";
|
LOG(util::LogService::info()) << "Backend writes finished";
|
||||||
|
|
||||||
|
cacheSaver.waitToFinish();
|
||||||
|
|
||||||
ioc.stop();
|
ioc.stop();
|
||||||
LOG(util::LogService::info()) << "io_context stopped";
|
LOG(util::LogService::info()) << "io_context stopped";
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,10 @@
|
|||||||
|
|
||||||
#include "app/WebHandlers.hpp"
|
#include "app/WebHandlers.hpp"
|
||||||
|
|
||||||
|
#include "rpc/Errors.hpp"
|
||||||
|
#include "rpc/WorkQueue.hpp"
|
||||||
#include "util/Assert.hpp"
|
#include "util/Assert.hpp"
|
||||||
|
#include "util/CoroutineGroup.hpp"
|
||||||
#include "util/prometheus/Http.hpp"
|
#include "util/prometheus/Http.hpp"
|
||||||
#include "web/AdminVerificationStrategy.hpp"
|
#include "web/AdminVerificationStrategy.hpp"
|
||||||
#include "web/SubscriptionContextInterface.hpp"
|
#include "web/SubscriptionContextInterface.hpp"
|
||||||
@@ -31,6 +34,7 @@
|
|||||||
#include <boost/asio/spawn.hpp>
|
#include <boost/asio/spawn.hpp>
|
||||||
#include <boost/beast/http/status.hpp>
|
#include <boost/beast/http/status.hpp>
|
||||||
|
|
||||||
|
#include <functional>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <string>
|
#include <string>
|
||||||
@@ -76,8 +80,8 @@ DisconnectHook::operator()(web::ng::Connection const& connection)
|
|||||||
dosguard_.get().decrement(connection.ip());
|
dosguard_.get().decrement(connection.ip());
|
||||||
}
|
}
|
||||||
|
|
||||||
MetricsHandler::MetricsHandler(std::shared_ptr<web::AdminVerificationStrategy> adminVerifier)
|
MetricsHandler::MetricsHandler(std::shared_ptr<web::AdminVerificationStrategy> adminVerifier, rpc::WorkQueue& workQueue)
|
||||||
: adminVerifier_{std::move(adminVerifier)}
|
: adminVerifier_{std::move(adminVerifier)}, workQueue_{std::ref(workQueue)}
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -86,19 +90,45 @@ MetricsHandler::operator()(
|
|||||||
web::ng::Request const& request,
|
web::ng::Request const& request,
|
||||||
web::ng::ConnectionMetadata& connectionMetadata,
|
web::ng::ConnectionMetadata& connectionMetadata,
|
||||||
web::SubscriptionContextPtr,
|
web::SubscriptionContextPtr,
|
||||||
boost::asio::yield_context
|
boost::asio::yield_context yield
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
auto const maybeHttpRequest = request.asHttpRequest();
|
std::optional<web::ng::Response> response;
|
||||||
ASSERT(maybeHttpRequest.has_value(), "Got not a http request in Get");
|
util::CoroutineGroup coroutineGroup{yield, 1};
|
||||||
auto const& httpRequest = maybeHttpRequest->get();
|
auto const onTaskComplete = coroutineGroup.registerForeign(yield);
|
||||||
|
ASSERT(onTaskComplete.has_value(), "Coroutine group can't be full");
|
||||||
|
|
||||||
// FIXME(#1702): Using veb server thread to handle prometheus request. Better to post on work queue.
|
bool const postSuccessful = workQueue_.get().postCoro(
|
||||||
auto maybeResponse = util::prometheus::handlePrometheusRequest(
|
[this, &request, &response, &onTaskComplete = onTaskComplete.value(), &connectionMetadata](
|
||||||
httpRequest, adminVerifier_->isAdmin(httpRequest, connectionMetadata.ip())
|
boost::asio::yield_context
|
||||||
|
) mutable {
|
||||||
|
auto const maybeHttpRequest = request.asHttpRequest();
|
||||||
|
ASSERT(maybeHttpRequest.has_value(), "Got not a http request in Get");
|
||||||
|
auto const& httpRequest = maybeHttpRequest->get();
|
||||||
|
|
||||||
|
auto maybeResponse = util::prometheus::handlePrometheusRequest(
|
||||||
|
httpRequest, adminVerifier_->isAdmin(httpRequest, connectionMetadata.ip())
|
||||||
|
);
|
||||||
|
ASSERT(maybeResponse.has_value(), "Got unexpected request for Prometheus");
|
||||||
|
response = web::ng::Response{std::move(maybeResponse).value(), request};
|
||||||
|
// notify the coroutine group that the foreign task is done
|
||||||
|
onTaskComplete();
|
||||||
|
},
|
||||||
|
/* isWhiteListed= */ true,
|
||||||
|
rpc::WorkQueue::Priority::High
|
||||||
);
|
);
|
||||||
ASSERT(maybeResponse.has_value(), "Got unexpected request for Prometheus");
|
|
||||||
return web::ng::Response{std::move(maybeResponse).value(), request};
|
if (!postSuccessful) {
|
||||||
|
return web::ng::Response{
|
||||||
|
boost::beast::http::status::too_many_requests, rpc::makeError(rpc::RippledError::rpcTOO_BUSY), request
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put the coroutine to sleep until the foreign task is done
|
||||||
|
coroutineGroup.asyncWait(yield);
|
||||||
|
ASSERT(response.has_value(), "Woke up coroutine without setting response");
|
||||||
|
|
||||||
|
return std::move(response).value();
|
||||||
}
|
}
|
||||||
|
|
||||||
web::ng::Response
|
web::ng::Response
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
|
|
||||||
#include "data/LedgerCacheInterface.hpp"
|
#include "data/LedgerCacheInterface.hpp"
|
||||||
#include "rpc/Errors.hpp"
|
#include "rpc/Errors.hpp"
|
||||||
|
#include "rpc/WorkQueue.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "util/log/Logger.hpp"
|
||||||
#include "web/AdminVerificationStrategy.hpp"
|
#include "web/AdminVerificationStrategy.hpp"
|
||||||
#include "web/SubscriptionContextInterface.hpp"
|
#include "web/SubscriptionContextInterface.hpp"
|
||||||
@@ -119,20 +120,23 @@ public:
|
|||||||
*/
|
*/
|
||||||
class MetricsHandler {
|
class MetricsHandler {
|
||||||
std::shared_ptr<web::AdminVerificationStrategy> adminVerifier_;
|
std::shared_ptr<web::AdminVerificationStrategy> adminVerifier_;
|
||||||
|
std::reference_wrapper<rpc::WorkQueue> workQueue_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new MetricsHandler object
|
* @brief Construct a new MetricsHandler object
|
||||||
*
|
*
|
||||||
* @param adminVerifier The AdminVerificationStrategy to use for verifying the connection for admin access.
|
* @param adminVerifier The AdminVerificationStrategy to use for verifying the connection for admin access.
|
||||||
|
* @param workQueue The WorkQueue to use for handling the request.
|
||||||
*/
|
*/
|
||||||
MetricsHandler(std::shared_ptr<web::AdminVerificationStrategy> adminVerifier);
|
MetricsHandler(std::shared_ptr<web::AdminVerificationStrategy> adminVerifier, rpc::WorkQueue& workQueue);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief The call of the function object.
|
* @brief The call of the function object.
|
||||||
*
|
*
|
||||||
* @param request The request to handle.
|
* @param request The request to handle.
|
||||||
* @param connectionMetadata The connection metadata.
|
* @param connectionMetadata The connection metadata.
|
||||||
|
* @param yield The yield context.
|
||||||
* @return The response to the request.
|
* @return The response to the request.
|
||||||
*/
|
*/
|
||||||
web::ng::Response
|
web::ng::Response
|
||||||
@@ -140,7 +144,7 @@ public:
|
|||||||
web::ng::Request const& request,
|
web::ng::Request const& request,
|
||||||
web::ng::ConnectionMetadata& connectionMetadata,
|
web::ng::ConnectionMetadata& connectionMetadata,
|
||||||
web::SubscriptionContextPtr,
|
web::SubscriptionContextPtr,
|
||||||
boost::asio::yield_context
|
boost::asio::yield_context yield
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
141
src/cluster/Backend.cpp
Normal file
141
src/cluster/Backend.cpp
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#include "cluster/Backend.hpp"
|
||||||
|
|
||||||
|
#include "cluster/ClioNode.hpp"
|
||||||
|
#include "data/BackendInterface.hpp"
|
||||||
|
#include "etl/WriterState.hpp"
|
||||||
|
|
||||||
|
#include <boost/asio/bind_cancellation_slot.hpp>
|
||||||
|
#include <boost/asio/cancellation_type.hpp>
|
||||||
|
#include <boost/asio/error.hpp>
|
||||||
|
#include <boost/asio/execution_context.hpp>
|
||||||
|
#include <boost/asio/executor.hpp>
|
||||||
|
#include <boost/asio/spawn.hpp>
|
||||||
|
#include <boost/asio/steady_timer.hpp>
|
||||||
|
#include <boost/asio/thread_pool.hpp>
|
||||||
|
#include <boost/asio/use_future.hpp>
|
||||||
|
#include <boost/json/parse.hpp>
|
||||||
|
#include <boost/json/serialize.hpp>
|
||||||
|
#include <boost/json/value.hpp>
|
||||||
|
#include <boost/json/value_from.hpp>
|
||||||
|
#include <boost/json/value_to.hpp>
|
||||||
|
#include <boost/uuid/random_generator.hpp>
|
||||||
|
#include <boost/uuid/uuid.hpp>
|
||||||
|
#include <fmt/format.h>
|
||||||
|
|
||||||
|
#include <chrono>
|
||||||
|
#include <memory>
|
||||||
|
#include <utility>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
namespace cluster {
|
||||||
|
|
||||||
|
Backend::Backend(
|
||||||
|
boost::asio::thread_pool& ctx,
|
||||||
|
std::shared_ptr<data::BackendInterface> backend,
|
||||||
|
std::unique_ptr<etl::WriterStateInterface const> writerState,
|
||||||
|
std::chrono::steady_clock::duration readInterval,
|
||||||
|
std::chrono::steady_clock::duration writeInterval
|
||||||
|
)
|
||||||
|
: backend_(std::move(backend))
|
||||||
|
, writerState_(std::move(writerState))
|
||||||
|
, readerTask_(readInterval, ctx)
|
||||||
|
, writerTask_(writeInterval, ctx)
|
||||||
|
, selfUuid_(std::make_shared<boost::uuids::uuid>(boost::uuids::random_generator{}()))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
Backend::run()
|
||||||
|
{
|
||||||
|
readerTask_.run([this](boost::asio::yield_context yield) {
|
||||||
|
auto clusterData = doRead(yield);
|
||||||
|
onNewState_(selfUuid_, std::make_shared<ClusterData>(std::move(clusterData)));
|
||||||
|
});
|
||||||
|
|
||||||
|
writerTask_.run([this]() { doWrite(); });
|
||||||
|
}
|
||||||
|
|
||||||
|
Backend::~Backend()
|
||||||
|
{
|
||||||
|
stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
Backend::stop()
|
||||||
|
{
|
||||||
|
readerTask_.stop();
|
||||||
|
writerTask_.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
ClioNode::CUuid
|
||||||
|
Backend::selfId() const
|
||||||
|
{
|
||||||
|
return selfUuid_;
|
||||||
|
}
|
||||||
|
|
||||||
|
Backend::ClusterData
|
||||||
|
Backend::doRead(boost::asio::yield_context yield)
|
||||||
|
{
|
||||||
|
BackendInterface::ClioNodesDataFetchResult expectedResult;
|
||||||
|
try {
|
||||||
|
expectedResult = backend_->fetchClioNodesData(yield);
|
||||||
|
} catch (...) {
|
||||||
|
expectedResult = std::unexpected{"Failed to fetch Clio nodes data"};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!expectedResult.has_value()) {
|
||||||
|
return std::unexpected{std::move(expectedResult).error()};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<ClioNode> otherNodesData;
|
||||||
|
for (auto const& [uuid, nodeDataStr] : expectedResult.value()) {
|
||||||
|
if (uuid == *selfUuid_) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
boost::system::error_code errorCode;
|
||||||
|
auto const json = boost::json::parse(nodeDataStr, errorCode);
|
||||||
|
if (errorCode.failed()) {
|
||||||
|
return std::unexpected{fmt::format("Error parsing json from DB: {}", nodeDataStr)};
|
||||||
|
}
|
||||||
|
|
||||||
|
auto expectedNodeData = boost::json::try_value_to<ClioNode>(json);
|
||||||
|
if (expectedNodeData.has_error()) {
|
||||||
|
return std::unexpected{fmt::format("Error converting json to ClioNode: {}", nodeDataStr)};
|
||||||
|
}
|
||||||
|
*expectedNodeData->uuid = uuid;
|
||||||
|
otherNodesData.push_back(std::move(expectedNodeData).value());
|
||||||
|
}
|
||||||
|
otherNodesData.push_back(ClioNode::from(selfUuid_, *writerState_));
|
||||||
|
return otherNodesData;
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
Backend::doWrite()
|
||||||
|
{
|
||||||
|
auto const selfData = ClioNode::from(selfUuid_, *writerState_);
|
||||||
|
boost::json::value jsonValue{};
|
||||||
|
boost::json::value_from(selfData, jsonValue);
|
||||||
|
backend_->writeNodeMessage(*selfData.uuid, boost::json::serialize(jsonValue.as_object()));
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace cluster
|
||||||
147
src/cluster/Backend.hpp
Normal file
147
src/cluster/Backend.hpp
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "cluster/ClioNode.hpp"
|
||||||
|
#include "cluster/impl/RepeatedTask.hpp"
|
||||||
|
#include "data/BackendInterface.hpp"
|
||||||
|
#include "etl/WriterState.hpp"
|
||||||
|
#include "util/log/Logger.hpp"
|
||||||
|
|
||||||
|
#include <boost/asio/any_io_executor.hpp>
|
||||||
|
#include <boost/asio/cancellation_signal.hpp>
|
||||||
|
#include <boost/asio/execution_context.hpp>
|
||||||
|
#include <boost/asio/executor.hpp>
|
||||||
|
#include <boost/asio/spawn.hpp>
|
||||||
|
#include <boost/asio/strand.hpp>
|
||||||
|
#include <boost/asio/thread_pool.hpp>
|
||||||
|
#include <boost/signals2/connection.hpp>
|
||||||
|
#include <boost/signals2/signal.hpp>
|
||||||
|
#include <boost/signals2/variadic_signal.hpp>
|
||||||
|
#include <boost/uuid/uuid.hpp>
|
||||||
|
|
||||||
|
#include <chrono>
|
||||||
|
#include <concepts>
|
||||||
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
namespace cluster {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Backend communication handler for cluster state synchronization.
|
||||||
|
*
|
||||||
|
* This class manages reading and writing cluster state information to/from the backend database.
|
||||||
|
* It periodically reads the state of other nodes in the cluster and writes the current node's state,
|
||||||
|
* enabling cluster-wide coordination and awareness.
|
||||||
|
*/
|
||||||
|
class Backend {
|
||||||
|
public:
|
||||||
|
/** @brief Type representing cluster data result - either a vector of nodes or an error message */
|
||||||
|
using ClusterData = std::expected<std::vector<ClioNode>, std::string>;
|
||||||
|
|
||||||
|
private:
|
||||||
|
util::Logger log_{"ClusterCommunication"};
|
||||||
|
|
||||||
|
std::shared_ptr<data::BackendInterface> backend_;
|
||||||
|
std::unique_ptr<etl::WriterStateInterface const> writerState_;
|
||||||
|
|
||||||
|
impl::RepeatedTask<boost::asio::thread_pool> readerTask_;
|
||||||
|
impl::RepeatedTask<boost::asio::thread_pool> writerTask_;
|
||||||
|
|
||||||
|
ClioNode::Uuid selfUuid_;
|
||||||
|
|
||||||
|
boost::signals2::signal<void(ClioNode::CUuid, std::shared_ptr<ClusterData const>)> onNewState_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* @brief Construct a Backend communication handler.
|
||||||
|
*
|
||||||
|
* @param ctx The execution context for asynchronous operations
|
||||||
|
* @param backend Interface to the backend database
|
||||||
|
* @param writerState State indicating whether this node is writing to the database
|
||||||
|
* @param readInterval How often to read cluster state from the backend
|
||||||
|
* @param writeInterval How often to write this node's state to the backend
|
||||||
|
*/
|
||||||
|
Backend(
|
||||||
|
boost::asio::thread_pool& ctx,
|
||||||
|
std::shared_ptr<data::BackendInterface> backend,
|
||||||
|
std::unique_ptr<etl::WriterStateInterface const> writerState,
|
||||||
|
std::chrono::steady_clock::duration readInterval,
|
||||||
|
std::chrono::steady_clock::duration writeInterval
|
||||||
|
);
|
||||||
|
|
||||||
|
~Backend();
|
||||||
|
|
||||||
|
Backend(Backend&&) = delete;
|
||||||
|
Backend&
|
||||||
|
operator=(Backend&&) = delete;
|
||||||
|
Backend(Backend const&) = delete;
|
||||||
|
Backend&
|
||||||
|
operator=(Backend const&) = delete;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Start the backend read and write tasks.
|
||||||
|
*
|
||||||
|
* Begins periodic reading of cluster state from the backend and writing of this node's state.
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
run();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Stop the backend read and write tasks.
|
||||||
|
*
|
||||||
|
* Stops all periodic tasks and waits for them to complete.
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
stop();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Subscribe to new cluster state notifications.
|
||||||
|
*
|
||||||
|
* @tparam S Callable type accepting (ClioNode::cUUID, ClusterData)
|
||||||
|
* @param s Subscriber callback to be invoked when new cluster state is available
|
||||||
|
* @return A connection object that can be used to unsubscribe
|
||||||
|
*/
|
||||||
|
template <typename S>
|
||||||
|
requires std::invocable<S, ClioNode::CUuid, std::shared_ptr<ClusterData const>>
|
||||||
|
boost::signals2::connection
|
||||||
|
subscribeToNewState(S&& s)
|
||||||
|
{
|
||||||
|
return onNewState_.connect(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Get the UUID of this node in the cluster.
|
||||||
|
*
|
||||||
|
* @return The UUID of this node.
|
||||||
|
*/
|
||||||
|
ClioNode::CUuid
|
||||||
|
selfId() const;
|
||||||
|
|
||||||
|
private:
|
||||||
|
ClusterData
|
||||||
|
doRead(boost::asio::yield_context yield);
|
||||||
|
|
||||||
|
void
|
||||||
|
doWrite();
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace cluster
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
add_library(clio_cluster)
|
add_library(clio_cluster)
|
||||||
|
|
||||||
target_sources(clio_cluster PRIVATE ClioNode.cpp ClusterCommunicationService.cpp)
|
target_sources(
|
||||||
|
clio_cluster PRIVATE Backend.cpp ClioNode.cpp ClusterCommunicationService.cpp Metrics.cpp WriterDecider.cpp
|
||||||
|
)
|
||||||
|
|
||||||
target_link_libraries(clio_cluster PRIVATE clio_util clio_data)
|
target_link_libraries(clio_cluster PRIVATE clio_util clio_data)
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
#include "cluster/ClioNode.hpp"
|
#include "cluster/ClioNode.hpp"
|
||||||
|
|
||||||
|
#include "etl/WriterState.hpp"
|
||||||
#include "util/TimeUtils.hpp"
|
#include "util/TimeUtils.hpp"
|
||||||
|
|
||||||
#include <boost/json/conversion.hpp>
|
#include <boost/json/conversion.hpp>
|
||||||
@@ -26,39 +27,72 @@
|
|||||||
#include <boost/json/value.hpp>
|
#include <boost/json/value.hpp>
|
||||||
#include <boost/uuid/uuid.hpp>
|
#include <boost/uuid/uuid.hpp>
|
||||||
|
|
||||||
|
#include <chrono>
|
||||||
|
#include <cstdint>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <stdexcept>
|
#include <stdexcept>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <string_view>
|
#include <string_view>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
namespace cluster {
|
namespace cluster {
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
struct Fields {
|
struct JsonFields {
|
||||||
static constexpr std::string_view const kUPDATE_TIME = "update_time";
|
static constexpr std::string_view const kUPDATE_TIME = "update_time";
|
||||||
|
static constexpr std::string_view const kDB_ROLE = "db_role";
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
|
ClioNode
|
||||||
|
ClioNode::from(ClioNode::Uuid uuid, etl::WriterStateInterface const& writerState)
|
||||||
|
{
|
||||||
|
auto const dbRole = [&writerState]() {
|
||||||
|
if (writerState.isReadOnly()) {
|
||||||
|
return ClioNode::DbRole::ReadOnly;
|
||||||
|
}
|
||||||
|
if (writerState.isFallback()) {
|
||||||
|
return ClioNode::DbRole::Fallback;
|
||||||
|
}
|
||||||
|
if (writerState.isLoadingCache()) {
|
||||||
|
return ClioNode::DbRole::LoadingCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
return writerState.isWriting() ? ClioNode::DbRole::Writer : ClioNode::DbRole::NotWriter;
|
||||||
|
}();
|
||||||
|
return ClioNode{.uuid = std::move(uuid), .updateTime = std::chrono::system_clock::now(), .dbRole = dbRole};
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, ClioNode const& node)
|
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, ClioNode const& node)
|
||||||
{
|
{
|
||||||
jv = {
|
jv = {
|
||||||
{Fields::kUPDATE_TIME, util::systemTpToUtcStr(node.updateTime, ClioNode::kTIME_FORMAT)},
|
{JsonFields::kUPDATE_TIME, util::systemTpToUtcStr(node.updateTime, ClioNode::kTIME_FORMAT)},
|
||||||
|
{JsonFields::kDB_ROLE, static_cast<int64_t>(node.dbRole)}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
ClioNode
|
ClioNode
|
||||||
tag_invoke(boost::json::value_to_tag<ClioNode>, boost::json::value const& jv)
|
tag_invoke(boost::json::value_to_tag<ClioNode>, boost::json::value const& jv)
|
||||||
{
|
{
|
||||||
auto const& updateTimeStr = jv.as_object().at(Fields::kUPDATE_TIME).as_string();
|
auto const& updateTimeStr = jv.as_object().at(JsonFields::kUPDATE_TIME).as_string();
|
||||||
auto const updateTime = util::systemTpFromUtcStr(std::string(updateTimeStr), ClioNode::kTIME_FORMAT);
|
auto const updateTime = util::systemTpFromUtcStr(std::string(updateTimeStr), ClioNode::kTIME_FORMAT);
|
||||||
if (!updateTime.has_value()) {
|
if (!updateTime.has_value()) {
|
||||||
throw std::runtime_error("Failed to parse update time");
|
throw std::runtime_error("Failed to parse update time");
|
||||||
}
|
}
|
||||||
|
|
||||||
return ClioNode{.uuid = std::make_shared<boost::uuids::uuid>(), .updateTime = updateTime.value()};
|
auto const dbRoleValue = jv.as_object().at(JsonFields::kDB_ROLE).as_int64();
|
||||||
|
if (dbRoleValue > static_cast<int64_t>(ClioNode::DbRole::MAX))
|
||||||
|
throw std::runtime_error("Invalid db_role value");
|
||||||
|
|
||||||
|
return ClioNode{
|
||||||
|
// Json data doesn't contain uuid so leaving it empty here. It will be filled outside of this parsing
|
||||||
|
.uuid = std::make_shared<boost::uuids::uuid>(),
|
||||||
|
.updateTime = updateTime.value(),
|
||||||
|
.dbRole = static_cast<ClioNode::DbRole>(dbRoleValue)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace cluster
|
} // namespace cluster
|
||||||
|
|||||||
@@ -19,6 +19,8 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "etl/WriterState.hpp"
|
||||||
|
|
||||||
#include <boost/json/conversion.hpp>
|
#include <boost/json/conversion.hpp>
|
||||||
#include <boost/json/value.hpp>
|
#include <boost/json/value.hpp>
|
||||||
#include <boost/uuid/uuid.hpp>
|
#include <boost/uuid/uuid.hpp>
|
||||||
@@ -37,16 +39,37 @@ struct ClioNode {
|
|||||||
*/
|
*/
|
||||||
static constexpr char const* kTIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ";
|
static constexpr char const* kTIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ";
|
||||||
|
|
||||||
// enum class WriterRole {
|
/**
|
||||||
// ReadOnly,
|
* @brief Database role of a node in the cluster.
|
||||||
// NotWriter,
|
*
|
||||||
// Writer
|
* Roles are used to coordinate which node writes to the database:
|
||||||
// };
|
* - ReadOnly: Node is configured to never write (strict read-only mode)
|
||||||
|
* - NotWriter: Node can write but is currently not the designated writer
|
||||||
|
* - Writer: Node is actively writing to the database
|
||||||
|
* - Fallback: Node is using the fallback writer decision mechanism
|
||||||
|
*
|
||||||
|
* When any node in the cluster is in Fallback mode, the entire cluster switches
|
||||||
|
* from the cluster communication mechanism to the slower but more reliable
|
||||||
|
* database-based conflict detection mechanism.
|
||||||
|
*/
|
||||||
|
enum class DbRole { ReadOnly = 0, LoadingCache = 1, NotWriter = 2, Writer = 3, Fallback = 4, MAX = 4 };
|
||||||
|
|
||||||
std::shared_ptr<boost::uuids::uuid> uuid; ///< The UUID of the node.
|
using Uuid = std::shared_ptr<boost::uuids::uuid>;
|
||||||
|
using CUuid = std::shared_ptr<boost::uuids::uuid const>;
|
||||||
|
|
||||||
|
Uuid uuid; ///< The UUID of the node.
|
||||||
std::chrono::system_clock::time_point updateTime; ///< The time the data about the node was last updated.
|
std::chrono::system_clock::time_point updateTime; ///< The time the data about the node was last updated.
|
||||||
|
DbRole dbRole; ///< The database role of the node
|
||||||
|
|
||||||
// WriterRole writerRole;
|
/**
|
||||||
|
* @brief Create a ClioNode from writer state.
|
||||||
|
*
|
||||||
|
* @param uuid The UUID of the node
|
||||||
|
* @param writerState The writer state to determine the node's database role
|
||||||
|
* @return A ClioNode with the current time and role derived from writerState
|
||||||
|
*/
|
||||||
|
static ClioNode
|
||||||
|
from(Uuid uuid, etl::WriterStateInterface const& writerState);
|
||||||
};
|
};
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|||||||
@@ -19,11 +19,8 @@
|
|||||||
|
|
||||||
#include "cluster/ClusterCommunicationService.hpp"
|
#include "cluster/ClusterCommunicationService.hpp"
|
||||||
|
|
||||||
#include "cluster/ClioNode.hpp"
|
|
||||||
#include "data/BackendInterface.hpp"
|
#include "data/BackendInterface.hpp"
|
||||||
#include "util/Assert.hpp"
|
#include "etl/WriterState.hpp"
|
||||||
#include "util/Spawn.hpp"
|
|
||||||
#include "util/log/Logger.hpp"
|
|
||||||
|
|
||||||
#include <boost/asio/bind_cancellation_slot.hpp>
|
#include <boost/asio/bind_cancellation_slot.hpp>
|
||||||
#include <boost/asio/cancellation_type.hpp>
|
#include <boost/asio/cancellation_type.hpp>
|
||||||
@@ -41,76 +38,32 @@
|
|||||||
|
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
#include <latch>
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
namespace {
|
|
||||||
constexpr auto kTOTAL_WORKERS = 2uz; // 1 reading and 1 writing worker (coroutines)
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
namespace cluster {
|
namespace cluster {
|
||||||
|
|
||||||
ClusterCommunicationService::ClusterCommunicationService(
|
ClusterCommunicationService::ClusterCommunicationService(
|
||||||
std::shared_ptr<data::BackendInterface> backend,
|
std::shared_ptr<data::BackendInterface> backend,
|
||||||
|
std::unique_ptr<etl::WriterStateInterface> writerState,
|
||||||
std::chrono::steady_clock::duration readInterval,
|
std::chrono::steady_clock::duration readInterval,
|
||||||
std::chrono::steady_clock::duration writeInterval
|
std::chrono::steady_clock::duration writeInterval
|
||||||
)
|
)
|
||||||
: backend_(std::move(backend))
|
: backend_(ctx_, std::move(backend), writerState->clone(), readInterval, writeInterval)
|
||||||
, readInterval_(readInterval)
|
, writerDecider_(ctx_, std::move(writerState))
|
||||||
, writeInterval_(writeInterval)
|
|
||||||
, finishedCountdown_(kTOTAL_WORKERS)
|
|
||||||
, selfData_{ClioNode{
|
|
||||||
.uuid = std::make_shared<boost::uuids::uuid>(boost::uuids::random_generator{}()),
|
|
||||||
.updateTime = std::chrono::system_clock::time_point{}
|
|
||||||
}}
|
|
||||||
{
|
{
|
||||||
nodesInClusterMetric_.set(1); // The node always sees itself
|
|
||||||
isHealthy_ = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ClusterCommunicationService::run()
|
ClusterCommunicationService::run()
|
||||||
{
|
{
|
||||||
ASSERT(not running_ and not stopped_, "Can only be ran once");
|
backend_.subscribeToNewState([this](auto&&... args) {
|
||||||
running_ = true;
|
metrics_.onNewState(std::forward<decltype(args)>(args)...);
|
||||||
|
|
||||||
util::spawn(strand_, [this](boost::asio::yield_context yield) {
|
|
||||||
boost::asio::steady_timer timer(yield.get_executor());
|
|
||||||
boost::system::error_code ec;
|
|
||||||
|
|
||||||
while (running_) {
|
|
||||||
timer.expires_after(readInterval_);
|
|
||||||
auto token = cancelSignal_.slot();
|
|
||||||
timer.async_wait(boost::asio::bind_cancellation_slot(token, yield[ec]));
|
|
||||||
|
|
||||||
if (ec == boost::asio::error::operation_aborted or not running_)
|
|
||||||
break;
|
|
||||||
|
|
||||||
doRead(yield);
|
|
||||||
}
|
|
||||||
|
|
||||||
finishedCountdown_.count_down(1);
|
|
||||||
});
|
});
|
||||||
|
backend_.subscribeToNewState([this](auto&&... args) {
|
||||||
util::spawn(strand_, [this](boost::asio::yield_context yield) {
|
writerDecider_.onNewState(std::forward<decltype(args)>(args)...);
|
||||||
boost::asio::steady_timer timer(yield.get_executor());
|
|
||||||
boost::system::error_code ec;
|
|
||||||
|
|
||||||
while (running_) {
|
|
||||||
doWrite();
|
|
||||||
timer.expires_after(writeInterval_);
|
|
||||||
auto token = cancelSignal_.slot();
|
|
||||||
timer.async_wait(boost::asio::bind_cancellation_slot(token, yield[ec]));
|
|
||||||
|
|
||||||
if (ec == boost::asio::error::operation_aborted or not running_)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
finishedCountdown_.count_down(1);
|
|
||||||
});
|
});
|
||||||
|
backend_.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
ClusterCommunicationService::~ClusterCommunicationService()
|
ClusterCommunicationService::~ClusterCommunicationService()
|
||||||
@@ -121,107 +74,7 @@ ClusterCommunicationService::~ClusterCommunicationService()
|
|||||||
void
|
void
|
||||||
ClusterCommunicationService::stop()
|
ClusterCommunicationService::stop()
|
||||||
{
|
{
|
||||||
if (stopped_)
|
backend_.stop();
|
||||||
return;
|
|
||||||
|
|
||||||
stopped_ = true;
|
|
||||||
|
|
||||||
// for ASAN to see through concurrency correctly we need to exit all coroutines before joining the ctx
|
|
||||||
running_ = false;
|
|
||||||
|
|
||||||
// cancelSignal_ is not thread safe so we execute emit on the same strand
|
|
||||||
boost::asio::spawn(
|
|
||||||
strand_, [this](auto&&) { cancelSignal_.emit(boost::asio::cancellation_type::all); }, boost::asio::use_future
|
|
||||||
)
|
|
||||||
.wait();
|
|
||||||
finishedCountdown_.wait();
|
|
||||||
|
|
||||||
ctx_.join();
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<boost::uuids::uuid>
|
|
||||||
ClusterCommunicationService::selfUuid() const
|
|
||||||
{
|
|
||||||
// Uuid never changes so it is safe to copy it without using strand_
|
|
||||||
return selfData_.uuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
ClioNode
|
|
||||||
ClusterCommunicationService::selfData() const
|
|
||||||
{
|
|
||||||
ClioNode result{};
|
|
||||||
util::spawn(strand_, [this, &result](boost::asio::yield_context) { result = selfData_; });
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::expected<std::vector<ClioNode>, std::string>
|
|
||||||
ClusterCommunicationService::clusterData() const
|
|
||||||
{
|
|
||||||
if (not isHealthy_) {
|
|
||||||
return std::unexpected{"Service is not healthy"};
|
|
||||||
}
|
|
||||||
std::vector<ClioNode> result;
|
|
||||||
util::spawn(strand_, [this, &result](boost::asio::yield_context) {
|
|
||||||
result = otherNodesData_;
|
|
||||||
result.push_back(selfData_);
|
|
||||||
});
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
ClusterCommunicationService::doRead(boost::asio::yield_context yield)
|
|
||||||
{
|
|
||||||
otherNodesData_.clear();
|
|
||||||
|
|
||||||
BackendInterface::ClioNodesDataFetchResult expectedResult;
|
|
||||||
try {
|
|
||||||
expectedResult = backend_->fetchClioNodesData(yield);
|
|
||||||
} catch (...) {
|
|
||||||
expectedResult = std::unexpected{"Failed to fecth Clio nodes data"};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!expectedResult.has_value()) {
|
|
||||||
LOG(log_.error()) << "Failed to fetch nodes data";
|
|
||||||
isHealthy_ = false;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new vector here to not have partially parsed data in otherNodesData_
|
|
||||||
std::vector<ClioNode> otherNodesData;
|
|
||||||
for (auto const& [uuid, nodeDataStr] : expectedResult.value()) {
|
|
||||||
if (uuid == *selfData_.uuid) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
boost::system::error_code errorCode;
|
|
||||||
auto const json = boost::json::parse(nodeDataStr, errorCode);
|
|
||||||
if (errorCode.failed()) {
|
|
||||||
LOG(log_.error()) << "Error parsing json from DB: " << nodeDataStr;
|
|
||||||
isHealthy_ = false;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto expectedNodeData = boost::json::try_value_to<ClioNode>(json);
|
|
||||||
if (expectedNodeData.has_error()) {
|
|
||||||
LOG(log_.error()) << "Error converting json to ClioNode: " << json;
|
|
||||||
isHealthy_ = false;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
*expectedNodeData->uuid = uuid;
|
|
||||||
otherNodesData.push_back(std::move(expectedNodeData).value());
|
|
||||||
}
|
|
||||||
otherNodesData_ = std::move(otherNodesData);
|
|
||||||
nodesInClusterMetric_.set(otherNodesData_.size() + 1);
|
|
||||||
isHealthy_ = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
ClusterCommunicationService::doWrite()
|
|
||||||
{
|
|
||||||
selfData_.updateTime = std::chrono::system_clock::now();
|
|
||||||
boost::json::value jsonValue{};
|
|
||||||
boost::json::value_from(selfData_, jsonValue);
|
|
||||||
backend_->writeNodeMessage(*selfData_.uuid, boost::json::serialize(jsonValue.as_object()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace cluster
|
} // namespace cluster
|
||||||
|
|||||||
@@ -19,13 +19,12 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "cluster/ClioNode.hpp"
|
#include "cluster/Backend.hpp"
|
||||||
#include "cluster/ClusterCommunicationServiceInterface.hpp"
|
#include "cluster/Concepts.hpp"
|
||||||
|
#include "cluster/Metrics.hpp"
|
||||||
|
#include "cluster/WriterDecider.hpp"
|
||||||
#include "data/BackendInterface.hpp"
|
#include "data/BackendInterface.hpp"
|
||||||
#include "util/log/Logger.hpp"
|
#include "etl/WriterState.hpp"
|
||||||
#include "util/prometheus/Bool.hpp"
|
|
||||||
#include "util/prometheus/Gauge.hpp"
|
|
||||||
#include "util/prometheus/Prometheus.hpp"
|
|
||||||
|
|
||||||
#include <boost/asio/cancellation_signal.hpp>
|
#include <boost/asio/cancellation_signal.hpp>
|
||||||
#include <boost/asio/spawn.hpp>
|
#include <boost/asio/spawn.hpp>
|
||||||
@@ -33,67 +32,49 @@
|
|||||||
#include <boost/asio/thread_pool.hpp>
|
#include <boost/asio/thread_pool.hpp>
|
||||||
#include <boost/uuid/uuid.hpp>
|
#include <boost/uuid/uuid.hpp>
|
||||||
|
|
||||||
#include <atomic>
|
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
#include <latch>
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
namespace cluster {
|
namespace cluster {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Service to post and read messages to/from the cluster. It uses a backend to communicate with the cluster.
|
* @brief Service to post and read messages to/from the cluster. It uses a backend to communicate with the cluster.
|
||||||
*/
|
*/
|
||||||
class ClusterCommunicationService : public ClusterCommunicationServiceInterface {
|
class ClusterCommunicationService : public ClusterCommunicationServiceTag {
|
||||||
util::prometheus::GaugeInt& nodesInClusterMetric_ = PrometheusService::gaugeInt(
|
|
||||||
"cluster_nodes_total_number",
|
|
||||||
{},
|
|
||||||
"Total number of nodes this node can detect in the cluster."
|
|
||||||
);
|
|
||||||
util::prometheus::Bool isHealthy_ = PrometheusService::boolMetric(
|
|
||||||
"cluster_communication_is_healthy",
|
|
||||||
{},
|
|
||||||
"Whether cluster communication service is operating healthy (1 - healthy, 0 - we have a problem)"
|
|
||||||
);
|
|
||||||
|
|
||||||
// TODO: Use util::async::CoroExecutionContext after https://github.com/XRPLF/clio/issues/1973 is implemented
|
// TODO: Use util::async::CoroExecutionContext after https://github.com/XRPLF/clio/issues/1973 is implemented
|
||||||
boost::asio::thread_pool ctx_{1};
|
boost::asio::thread_pool ctx_{1};
|
||||||
boost::asio::strand<boost::asio::thread_pool::executor_type> strand_ = boost::asio::make_strand(ctx_);
|
Backend backend_;
|
||||||
|
Metrics metrics_;
|
||||||
util::Logger log_{"ClusterCommunication"};
|
WriterDecider writerDecider_;
|
||||||
|
|
||||||
std::shared_ptr<data::BackendInterface> backend_;
|
|
||||||
|
|
||||||
std::chrono::steady_clock::duration readInterval_;
|
|
||||||
std::chrono::steady_clock::duration writeInterval_;
|
|
||||||
|
|
||||||
boost::asio::cancellation_signal cancelSignal_;
|
|
||||||
std::latch finishedCountdown_;
|
|
||||||
std::atomic_bool running_ = false;
|
|
||||||
bool stopped_ = false;
|
|
||||||
|
|
||||||
ClioNode selfData_;
|
|
||||||
std::vector<ClioNode> otherNodesData_;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
static constexpr std::chrono::milliseconds kDEFAULT_READ_INTERVAL{2100};
|
static constexpr std::chrono::milliseconds kDEFAULT_READ_INTERVAL{1000};
|
||||||
static constexpr std::chrono::milliseconds kDEFAULT_WRITE_INTERVAL{1200};
|
static constexpr std::chrono::milliseconds kDEFAULT_WRITE_INTERVAL{1000};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new Cluster Communication Service object.
|
* @brief Construct a new Cluster Communication Service object.
|
||||||
*
|
*
|
||||||
* @param backend The backend to use for communication.
|
* @param backend The backend to use for communication.
|
||||||
|
* @param writerState The state showing whether clio is writing to the database.
|
||||||
* @param readInterval The interval to read messages from the cluster.
|
* @param readInterval The interval to read messages from the cluster.
|
||||||
* @param writeInterval The interval to write messages to the cluster.
|
* @param writeInterval The interval to write messages to the cluster.
|
||||||
*/
|
*/
|
||||||
ClusterCommunicationService(
|
ClusterCommunicationService(
|
||||||
std::shared_ptr<data::BackendInterface> backend,
|
std::shared_ptr<data::BackendInterface> backend,
|
||||||
|
std::unique_ptr<etl::WriterStateInterface> writerState,
|
||||||
std::chrono::steady_clock::duration readInterval = kDEFAULT_READ_INTERVAL,
|
std::chrono::steady_clock::duration readInterval = kDEFAULT_READ_INTERVAL,
|
||||||
std::chrono::steady_clock::duration writeInterval = kDEFAULT_WRITE_INTERVAL
|
std::chrono::steady_clock::duration writeInterval = kDEFAULT_WRITE_INTERVAL
|
||||||
);
|
);
|
||||||
|
|
||||||
~ClusterCommunicationService() override;
|
~ClusterCommunicationService() override;
|
||||||
|
|
||||||
|
ClusterCommunicationService(ClusterCommunicationService&&) = delete;
|
||||||
|
ClusterCommunicationService(ClusterCommunicationService const&) = delete;
|
||||||
|
ClusterCommunicationService&
|
||||||
|
operator=(ClusterCommunicationService&&) = delete;
|
||||||
|
ClusterCommunicationService&
|
||||||
|
operator=(ClusterCommunicationService const&) = delete;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Start the service.
|
* @brief Start the service.
|
||||||
*/
|
*/
|
||||||
@@ -105,44 +86,6 @@ public:
|
|||||||
*/
|
*/
|
||||||
void
|
void
|
||||||
stop();
|
stop();
|
||||||
|
|
||||||
ClusterCommunicationService(ClusterCommunicationService&&) = delete;
|
|
||||||
ClusterCommunicationService(ClusterCommunicationService const&) = delete;
|
|
||||||
ClusterCommunicationService&
|
|
||||||
operator=(ClusterCommunicationService&&) = delete;
|
|
||||||
ClusterCommunicationService&
|
|
||||||
operator=(ClusterCommunicationService const&) = delete;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Get the UUID of the current node.
|
|
||||||
*
|
|
||||||
* @return The UUID of the current node.
|
|
||||||
*/
|
|
||||||
std::shared_ptr<boost::uuids::uuid>
|
|
||||||
selfUuid() const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Get the data of the current node.
|
|
||||||
*
|
|
||||||
* @return The data of the current node.
|
|
||||||
*/
|
|
||||||
ClioNode
|
|
||||||
selfData() const override;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Get the data of all nodes in the cluster (including self).
|
|
||||||
*
|
|
||||||
* @return The data of all nodes in the cluster or error if the service is not healthy.
|
|
||||||
*/
|
|
||||||
std::expected<std::vector<ClioNode>, std::string>
|
|
||||||
clusterData() const override;
|
|
||||||
|
|
||||||
private:
|
|
||||||
void
|
|
||||||
doRead(boost::asio::yield_context yield);
|
|
||||||
|
|
||||||
void
|
|
||||||
doWrite();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace cluster
|
} // namespace cluster
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
/*
|
/*
|
||||||
This file is part of clio: https://github.com/XRPLF/clio
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
Copyright (c) 2023, the clio developers.
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software for any
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
@@ -19,24 +19,21 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "etl/impl/LedgerLoader.hpp"
|
#include <concepts>
|
||||||
#include "util/FakeFetchResponse.hpp"
|
|
||||||
|
|
||||||
#include <gmock/gmock.h>
|
namespace cluster {
|
||||||
#include <xrpl/protocol/LedgerHeader.h>
|
|
||||||
|
|
||||||
#include <cstdint>
|
/**
|
||||||
#include <optional>
|
* @brief Tag type for cluster communication service implementations.
|
||||||
|
*
|
||||||
struct MockLedgerLoader {
|
* This tag is used to identify types that implement cluster communication functionality.
|
||||||
using GetLedgerResponseType = FakeFetchResponse;
|
* Types should inherit from this tag to be recognized as cluster communication services.
|
||||||
using RawLedgerObjectType = FakeLedgerObject;
|
*/
|
||||||
|
struct ClusterCommunicationServiceTag {
|
||||||
MOCK_METHOD(
|
virtual ~ClusterCommunicationServiceTag() = default;
|
||||||
FormattedTransactionsData,
|
|
||||||
insertTransactions,
|
|
||||||
(ripple::LedgerHeader const&, GetLedgerResponseType& data),
|
|
||||||
()
|
|
||||||
);
|
|
||||||
MOCK_METHOD(std::optional<ripple::LedgerHeader>, loadInitialLedger, (uint32_t sequence), ());
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
concept SomeClusterCommunicationService = std::derived_from<T, ClusterCommunicationServiceTag>;
|
||||||
|
|
||||||
|
} // namespace cluster
|
||||||
@@ -17,38 +17,31 @@
|
|||||||
*/
|
*/
|
||||||
//==============================================================================
|
//==============================================================================
|
||||||
|
|
||||||
#pragma once
|
#include "cluster/Metrics.hpp"
|
||||||
|
|
||||||
|
#include "cluster/Backend.hpp"
|
||||||
#include "cluster/ClioNode.hpp"
|
#include "cluster/ClioNode.hpp"
|
||||||
|
|
||||||
#include <expected>
|
#include <memory>
|
||||||
#include <string>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
namespace cluster {
|
namespace cluster {
|
||||||
|
|
||||||
/**
|
Metrics::Metrics()
|
||||||
* @brief Interface for the cluster communication service.
|
{
|
||||||
*/
|
nodesInClusterMetric_.set(1); // The node always sees itself
|
||||||
class ClusterCommunicationServiceInterface {
|
isHealthy_ = true;
|
||||||
public:
|
}
|
||||||
virtual ~ClusterCommunicationServiceInterface() = default;
|
|
||||||
|
|
||||||
/**
|
void
|
||||||
* @brief Get the data of the current node.
|
Metrics::onNewState(ClioNode::CUuid, std::shared_ptr<Backend::ClusterData const> clusterData)
|
||||||
*
|
{
|
||||||
* @return The data of the current node.
|
if (clusterData->has_value()) {
|
||||||
*/
|
isHealthy_ = true;
|
||||||
[[nodiscard]] virtual ClioNode
|
nodesInClusterMetric_.set(clusterData->value().size());
|
||||||
selfData() const = 0;
|
} else {
|
||||||
|
isHealthy_ = false;
|
||||||
/**
|
nodesInClusterMetric_.set(1);
|
||||||
* @brief Get the data of all nodes in the cluster (including self).
|
}
|
||||||
*
|
}
|
||||||
* @return The data of all nodes in the cluster or error if the service is not healthy.
|
|
||||||
*/
|
|
||||||
[[nodiscard]] virtual std::expected<std::vector<ClioNode>, std::string>
|
|
||||||
clusterData() const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace cluster
|
} // namespace cluster
|
||||||
76
src/cluster/Metrics.hpp
Normal file
76
src/cluster/Metrics.hpp
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "cluster/Backend.hpp"
|
||||||
|
#include "cluster/ClioNode.hpp"
|
||||||
|
#include "util/prometheus/Bool.hpp"
|
||||||
|
#include "util/prometheus/Gauge.hpp"
|
||||||
|
#include "util/prometheus/Prometheus.hpp"
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
namespace cluster {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Manages Prometheus metrics for cluster communication and node tracking.
|
||||||
|
*
|
||||||
|
* This class tracks cluster-related metrics including:
|
||||||
|
* - Total number of nodes detected in the cluster
|
||||||
|
* - Health status of cluster communication
|
||||||
|
*/
|
||||||
|
class Metrics {
|
||||||
|
/** @brief Gauge tracking the total number of nodes visible in the cluster */
|
||||||
|
util::prometheus::GaugeInt& nodesInClusterMetric_ = PrometheusService::gaugeInt(
|
||||||
|
"cluster_nodes_total_number",
|
||||||
|
{},
|
||||||
|
"Total number of nodes this node can detect in the cluster."
|
||||||
|
);
|
||||||
|
|
||||||
|
/** @brief Boolean metric indicating whether cluster communication is healthy */
|
||||||
|
util::prometheus::Bool isHealthy_ = PrometheusService::boolMetric(
|
||||||
|
"cluster_communication_is_healthy",
|
||||||
|
{},
|
||||||
|
"Whether cluster communication service is operating healthy (1 - healthy, 0 - we have a problem)"
|
||||||
|
);
|
||||||
|
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* @brief Constructs a Metrics instance and initializes metrics.
|
||||||
|
*
|
||||||
|
* Sets the initial node count to 1 (self) and marks communication as healthy.
|
||||||
|
*/
|
||||||
|
Metrics();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Updates metrics based on new cluster state.
|
||||||
|
*
|
||||||
|
* This callback is invoked when cluster state changes. It updates:
|
||||||
|
* - Health status based on whether cluster data is available
|
||||||
|
* - Node count to reflect the current cluster size
|
||||||
|
*
|
||||||
|
* @param uuid The UUID of the node (unused in current implementation)
|
||||||
|
* @param clusterData Shared pointer to the current cluster data; may be empty if communication failed
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
onNewState(ClioNode::CUuid uuid, std::shared_ptr<Backend::ClusterData const> clusterData);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace cluster
|
||||||
98
src/cluster/WriterDecider.cpp
Normal file
98
src/cluster/WriterDecider.cpp
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#include "cluster/WriterDecider.hpp"
|
||||||
|
|
||||||
|
#include "cluster/Backend.hpp"
|
||||||
|
#include "cluster/ClioNode.hpp"
|
||||||
|
#include "etl/WriterState.hpp"
|
||||||
|
#include "util/Assert.hpp"
|
||||||
|
#include "util/Spawn.hpp"
|
||||||
|
|
||||||
|
#include <boost/asio/thread_pool.hpp>
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <memory>
|
||||||
|
#include <utility>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
namespace cluster {
|
||||||
|
|
||||||
|
WriterDecider::WriterDecider(boost::asio::thread_pool& ctx, std::unique_ptr<etl::WriterStateInterface> writerState)
|
||||||
|
: ctx_(ctx), writerState_(std::move(writerState))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
WriterDecider::onNewState(ClioNode::CUuid selfId, std::shared_ptr<Backend::ClusterData const> clusterData)
|
||||||
|
{
|
||||||
|
if (not clusterData->has_value())
|
||||||
|
return;
|
||||||
|
|
||||||
|
util::spawn(
|
||||||
|
ctx_,
|
||||||
|
[writerState = writerState_->clone(),
|
||||||
|
selfId = std::move(selfId),
|
||||||
|
clusterData = clusterData->value()](auto&&) mutable {
|
||||||
|
auto const selfData =
|
||||||
|
std::ranges::find_if(clusterData, [&selfId](ClioNode const& node) { return node.uuid == selfId; });
|
||||||
|
ASSERT(selfData != clusterData.end(), "Self data should always be in the cluster data");
|
||||||
|
|
||||||
|
if (selfData->dbRole == ClioNode::DbRole::Fallback) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (selfData->dbRole == ClioNode::DbRole::ReadOnly) {
|
||||||
|
writerState->giveUpWriting();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If any node in the cluster is in Fallback mode, the entire cluster must switch
|
||||||
|
// to the fallback writer decision mechanism for consistency
|
||||||
|
if (std::ranges::any_of(clusterData, [](ClioNode const& node) {
|
||||||
|
return node.dbRole == ClioNode::DbRole::Fallback;
|
||||||
|
})) {
|
||||||
|
writerState->setWriterDecidingFallback();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We are not ReadOnly and there is no Fallback in the cluster
|
||||||
|
std::ranges::sort(clusterData, [](ClioNode const& lhs, ClioNode const& rhs) {
|
||||||
|
return *lhs.uuid < *rhs.uuid;
|
||||||
|
});
|
||||||
|
|
||||||
|
auto const it = std::ranges::find_if(clusterData, [](ClioNode const& node) {
|
||||||
|
return node.dbRole == ClioNode::DbRole::NotWriter or node.dbRole == ClioNode::DbRole::Writer;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (it == clusterData.end()) {
|
||||||
|
// No writer nodes in the cluster yet
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (*it->uuid == *selfId) {
|
||||||
|
writerState->startWriting();
|
||||||
|
} else {
|
||||||
|
writerState->giveUpWriting();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace cluster
|
||||||
75
src/cluster/WriterDecider.hpp
Normal file
75
src/cluster/WriterDecider.hpp
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "cluster/Backend.hpp"
|
||||||
|
#include "cluster/ClioNode.hpp"
|
||||||
|
#include "etl/WriterState.hpp"
|
||||||
|
|
||||||
|
#include <boost/asio/thread_pool.hpp>
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
namespace cluster {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Decides which node in the cluster should be the writer based on cluster state.
|
||||||
|
*
|
||||||
|
* This class monitors cluster state changes and determines whether the current node
|
||||||
|
* should act as the writer to the database. The decision is made by:
|
||||||
|
* 1. Sorting all nodes by UUID for deterministic ordering
|
||||||
|
* 2. Selecting the first node that is allowed to write (not ReadOnly)
|
||||||
|
* 3. Activating writing on this node if it's the current node, otherwise deactivating
|
||||||
|
*
|
||||||
|
* This ensures only one node in the cluster actively writes to the database at a time.
|
||||||
|
*/
|
||||||
|
class WriterDecider {
|
||||||
|
/** @brief Thread pool for spawning asynchronous tasks */
|
||||||
|
boost::asio::thread_pool& ctx_;
|
||||||
|
|
||||||
|
/** @brief Interface for controlling the writer state of this node */
|
||||||
|
std::unique_ptr<etl::WriterStateInterface> writerState_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* @brief Constructs a WriterDecider.
|
||||||
|
*
|
||||||
|
* @param ctx Thread pool for executing asynchronous operations
|
||||||
|
* @param writerState Writer state interface for controlling write operations
|
||||||
|
*/
|
||||||
|
WriterDecider(boost::asio::thread_pool& ctx, std::unique_ptr<etl::WriterStateInterface> writerState);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Handles cluster state changes and decides whether this node should be the writer.
|
||||||
|
*
|
||||||
|
* This method is called when cluster state changes. It asynchronously:
|
||||||
|
* - Sorts all nodes by UUID to establish a deterministic order
|
||||||
|
* - Identifies the first node allowed to write (not ReadOnly)
|
||||||
|
* - Activates writing if this node is selected, otherwise deactivates writing
|
||||||
|
* - Logs a warning if no nodes in the cluster are allowed to write
|
||||||
|
*
|
||||||
|
* @param selfId The UUID of the current node
|
||||||
|
* @param clusterData Shared pointer to current cluster data; may be empty if communication failed
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
onNewState(ClioNode::CUuid selfId, std::shared_ptr<Backend::ClusterData const> clusterData);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace cluster
|
||||||
104
src/cluster/impl/RepeatedTask.hpp
Normal file
104
src/cluster/impl/RepeatedTask.hpp
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "util/Assert.hpp"
|
||||||
|
#include "util/Spawn.hpp"
|
||||||
|
|
||||||
|
#include <boost/asio/bind_cancellation_slot.hpp>
|
||||||
|
#include <boost/asio/cancellation_signal.hpp>
|
||||||
|
#include <boost/asio/cancellation_type.hpp>
|
||||||
|
#include <boost/asio/error.hpp>
|
||||||
|
#include <boost/asio/executor.hpp>
|
||||||
|
#include <boost/asio/spawn.hpp>
|
||||||
|
#include <boost/asio/steady_timer.hpp>
|
||||||
|
#include <boost/asio/strand.hpp>
|
||||||
|
#include <boost/asio/use_future.hpp>
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
|
#include <chrono>
|
||||||
|
#include <concepts>
|
||||||
|
#include <semaphore>
|
||||||
|
|
||||||
|
namespace cluster::impl {
|
||||||
|
|
||||||
|
// TODO: Try to replace util::Repeat by this. https://github.com/XRPLF/clio/issues/2926
|
||||||
|
template <typename Context>
|
||||||
|
class RepeatedTask {
|
||||||
|
std::chrono::steady_clock::duration interval_;
|
||||||
|
boost::asio::strand<typename Context::executor_type> strand_;
|
||||||
|
|
||||||
|
enum class State { Running, Stopped };
|
||||||
|
std::atomic<State> state_ = State::Stopped;
|
||||||
|
|
||||||
|
std::binary_semaphore semaphore_{0};
|
||||||
|
boost::asio::steady_timer timer_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
RepeatedTask(std::chrono::steady_clock::duration interval, Context& ctx)
|
||||||
|
: interval_(interval), strand_(boost::asio::make_strand(ctx)), timer_(strand_)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
~RepeatedTask()
|
||||||
|
{
|
||||||
|
stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename Fn>
|
||||||
|
requires std::invocable<Fn, boost::asio::yield_context> or std::invocable<Fn>
|
||||||
|
void
|
||||||
|
run(Fn&& f)
|
||||||
|
{
|
||||||
|
ASSERT(state_ == State::Stopped, "Can only be ran once");
|
||||||
|
state_ = State::Running;
|
||||||
|
util::spawn(strand_, [this, f = std::forward<Fn>(f)](boost::asio::yield_context yield) {
|
||||||
|
boost::system::error_code ec;
|
||||||
|
|
||||||
|
while (state_ == State::Running) {
|
||||||
|
timer_.expires_after(interval_);
|
||||||
|
timer_.async_wait(yield[ec]);
|
||||||
|
|
||||||
|
if (ec or state_ != State::Running)
|
||||||
|
break;
|
||||||
|
|
||||||
|
if constexpr (std::invocable<decltype(f), boost::asio::yield_context>) {
|
||||||
|
f(yield);
|
||||||
|
} else {
|
||||||
|
f();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
semaphore_.release();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
stop()
|
||||||
|
{
|
||||||
|
if (auto expected = State::Running; not state_.compare_exchange_strong(expected, State::Stopped))
|
||||||
|
return; // Already stopped or not started
|
||||||
|
|
||||||
|
boost::asio::spawn(strand_, [this](auto&&) { timer_.cancel(); }, boost::asio::use_future).wait();
|
||||||
|
semaphore_.acquire();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace cluster::impl
|
||||||
@@ -146,9 +146,12 @@ AmendmentCenter::isEnabled(AmendmentKey const& key, uint32_t seq) const
|
|||||||
bool
|
bool
|
||||||
AmendmentCenter::isEnabled(boost::asio::yield_context yield, AmendmentKey const& key, uint32_t seq) const
|
AmendmentCenter::isEnabled(boost::asio::yield_context yield, AmendmentKey const& key, uint32_t seq) const
|
||||||
{
|
{
|
||||||
if (auto const listAmendments = fetchAmendmentsList(yield, seq); listAmendments)
|
try {
|
||||||
return lookupAmendment(all_, *listAmendments, key);
|
if (auto const listAmendments = fetchAmendmentsList(yield, seq); listAmendments)
|
||||||
|
return lookupAmendment(all_, *listAmendments, key);
|
||||||
|
} catch (std::runtime_error const&) {
|
||||||
|
return false; // Some old ledger does not contain Amendments ledger object so do best we can for now
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -157,13 +160,19 @@ AmendmentCenter::isEnabled(boost::asio::yield_context yield, std::vector<Amendme
|
|||||||
{
|
{
|
||||||
namespace rg = std::ranges;
|
namespace rg = std::ranges;
|
||||||
|
|
||||||
if (auto const listAmendments = fetchAmendmentsList(yield, seq); listAmendments) {
|
try {
|
||||||
std::vector<bool> out;
|
if (auto const listAmendments = fetchAmendmentsList(yield, seq); listAmendments) {
|
||||||
rg::transform(keys, std::back_inserter(out), [this, &listAmendments](auto const& key) {
|
std::vector<bool> out;
|
||||||
return lookupAmendment(all_, *listAmendments, key);
|
rg::transform(keys, std::back_inserter(out), [this, &listAmendments](auto const& key) {
|
||||||
});
|
return lookupAmendment(all_, *listAmendments, key);
|
||||||
|
});
|
||||||
|
|
||||||
return out;
|
return out;
|
||||||
|
}
|
||||||
|
} catch (std::runtime_error const&) {
|
||||||
|
return std::vector<bool>(
|
||||||
|
keys.size(), false
|
||||||
|
); // Some old ledger does not contain Amendments ledger object so do best we can for now
|
||||||
}
|
}
|
||||||
|
|
||||||
return std::vector<bool>(keys.size(), false);
|
return std::vector<bool>(keys.size(), false);
|
||||||
|
|||||||
@@ -147,6 +147,12 @@ struct Amendments {
|
|||||||
REGISTER(fixAMMClawbackRounding);
|
REGISTER(fixAMMClawbackRounding);
|
||||||
REGISTER(fixMPTDeliveredAmount);
|
REGISTER(fixMPTDeliveredAmount);
|
||||||
REGISTER(fixPriceOracleOrder);
|
REGISTER(fixPriceOracleOrder);
|
||||||
|
REGISTER(DynamicMPT);
|
||||||
|
REGISTER(fixDelegateV1_1);
|
||||||
|
REGISTER(fixDirectoryLimit);
|
||||||
|
REGISTER(fixIncludeKeyletFields);
|
||||||
|
REGISTER(fixTokenEscrowV1);
|
||||||
|
REGISTER(LendingProtocol);
|
||||||
|
|
||||||
// Obsolete but supported by libxrpl
|
// Obsolete but supported by libxrpl
|
||||||
REGISTER(CryptoConditionsSuite);
|
REGISTER(CryptoConditionsSuite);
|
||||||
|
|||||||
@@ -270,7 +270,7 @@ BackendInterface::updateRange(uint32_t newMax)
|
|||||||
{
|
{
|
||||||
std::scoped_lock const lck(rngMtx_);
|
std::scoped_lock const lck(rngMtx_);
|
||||||
|
|
||||||
if (range_.has_value() && newMax < range_->maxSequence) {
|
if (range_.has_value() and newMax < range_->maxSequence) {
|
||||||
ASSERT(
|
ASSERT(
|
||||||
false,
|
false,
|
||||||
"Range shouldn't exist yet or newMax should be at least range->maxSequence. newMax = {}, "
|
"Range shouldn't exist yet or newMax should be at least range->maxSequence. newMax = {}, "
|
||||||
@@ -280,11 +280,14 @@ BackendInterface::updateRange(uint32_t newMax)
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!range_.has_value()) {
|
updateRangeImpl(newMax);
|
||||||
range_ = {.minSequence = newMax, .maxSequence = newMax};
|
}
|
||||||
} else {
|
|
||||||
range_->maxSequence = newMax;
|
void
|
||||||
}
|
BackendInterface::forceUpdateRange(uint32_t newMax)
|
||||||
|
{
|
||||||
|
std::scoped_lock const lck(rngMtx_);
|
||||||
|
updateRangeImpl(newMax);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@@ -410,4 +413,14 @@ BackendInterface::fetchFees(std::uint32_t const seq, boost::asio::yield_context
|
|||||||
return fees;
|
return fees;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
BackendInterface::updateRangeImpl(uint32_t newMax)
|
||||||
|
{
|
||||||
|
if (!range_.has_value()) {
|
||||||
|
range_ = {.minSequence = newMax, .maxSequence = newMax};
|
||||||
|
} else {
|
||||||
|
range_->maxSequence = newMax;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace data
|
} // namespace data
|
||||||
|
|||||||
@@ -249,6 +249,15 @@ public:
|
|||||||
void
|
void
|
||||||
updateRange(uint32_t newMax);
|
updateRange(uint32_t newMax);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Updates the range of sequences that are stored in the DB without any checks
|
||||||
|
* @note In the most cases you should use updateRange() instead
|
||||||
|
*
|
||||||
|
* @param newMax The new maximum sequence available
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
forceUpdateRange(uint32_t newMax);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Sets the range of sequences that are stored in the DB.
|
* @brief Sets the range of sequences that are stored in the DB.
|
||||||
*
|
*
|
||||||
@@ -776,6 +785,9 @@ private:
|
|||||||
*/
|
*/
|
||||||
virtual bool
|
virtual bool
|
||||||
doFinishWrites() = 0;
|
doFinishWrites() = 0;
|
||||||
|
|
||||||
|
void
|
||||||
|
updateRangeImpl(uint32_t newMax);
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace data
|
} // namespace data
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ target_sources(
|
|||||||
BackendCounters.cpp
|
BackendCounters.cpp
|
||||||
BackendInterface.cpp
|
BackendInterface.cpp
|
||||||
LedgerCache.cpp
|
LedgerCache.cpp
|
||||||
|
LedgerCacheSaver.cpp
|
||||||
LedgerHeaderCache.cpp
|
LedgerHeaderCache.cpp
|
||||||
cassandra/impl/Future.cpp
|
cassandra/impl/Future.cpp
|
||||||
cassandra/impl/Cluster.cpp
|
cassandra/impl/Cluster.cpp
|
||||||
@@ -14,6 +15,9 @@ target_sources(
|
|||||||
cassandra/impl/SslContext.cpp
|
cassandra/impl/SslContext.cpp
|
||||||
cassandra/Handle.cpp
|
cassandra/Handle.cpp
|
||||||
cassandra/SettingsProvider.cpp
|
cassandra/SettingsProvider.cpp
|
||||||
|
impl/InputFile.cpp
|
||||||
|
impl/LedgerCacheFile.cpp
|
||||||
|
impl/OutputFile.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
target_link_libraries(clio_data PUBLIC cassandra-cpp-driver::cassandra-cpp-driver clio_util)
|
target_link_libraries(clio_data PUBLIC cassandra-cpp-driver::cassandra-cpp-driver clio_util)
|
||||||
|
|||||||
@@ -20,16 +20,22 @@
|
|||||||
#include "data/LedgerCache.hpp"
|
#include "data/LedgerCache.hpp"
|
||||||
|
|
||||||
#include "data/Types.hpp"
|
#include "data/Types.hpp"
|
||||||
#include "etlng/Models.hpp"
|
#include "data/impl/LedgerCacheFile.hpp"
|
||||||
|
#include "etl/Models.hpp"
|
||||||
#include "util/Assert.hpp"
|
#include "util/Assert.hpp"
|
||||||
|
|
||||||
#include <xrpl/basics/base_uint.h>
|
#include <xrpl/basics/base_uint.h>
|
||||||
|
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <cstring>
|
||||||
|
#include <map>
|
||||||
#include <mutex>
|
#include <mutex>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <shared_mutex>
|
#include <shared_mutex>
|
||||||
|
#include <string>
|
||||||
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
namespace data {
|
namespace data {
|
||||||
@@ -89,7 +95,7 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
LedgerCache::update(std::vector<etlng::model::Object> const& objs, uint32_t seq)
|
LedgerCache::update(std::vector<etl::model::Object> const& objs, uint32_t seq)
|
||||||
{
|
{
|
||||||
if (disabled_)
|
if (disabled_)
|
||||||
return;
|
return;
|
||||||
@@ -251,4 +257,34 @@ LedgerCache::getSuccessorHitRate() const
|
|||||||
return static_cast<float>(successorHitCounter_.get().value()) / successorReqCounter_.get().value();
|
return static_cast<float>(successorHitCounter_.get().value()) / successorReqCounter_.get().value();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::expected<void, std::string>
|
||||||
|
LedgerCache::saveToFile(std::string const& path) const
|
||||||
|
{
|
||||||
|
if (not isFull()) {
|
||||||
|
return std::unexpected{"Ledger cache is not full"};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl::LedgerCacheFile file{path};
|
||||||
|
std::shared_lock const lock{mtx_};
|
||||||
|
impl::LedgerCacheFile::DataView const data{.latestSeq = latestSeq_, .map = map_, .deleted = deleted_};
|
||||||
|
return file.write(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::expected<void, std::string>
|
||||||
|
LedgerCache::loadFromFile(std::string const& path, uint32_t minLatestSequence)
|
||||||
|
{
|
||||||
|
impl::LedgerCacheFile file{path};
|
||||||
|
auto data = file.read(minLatestSequence);
|
||||||
|
if (not data.has_value()) {
|
||||||
|
return std::unexpected(std::move(data).error());
|
||||||
|
}
|
||||||
|
auto [latestSeq, map, deleted] = std::move(data).value();
|
||||||
|
std::unique_lock const lock{mtx_};
|
||||||
|
latestSeq_ = latestSeq;
|
||||||
|
map_ = std::move(map);
|
||||||
|
deleted_ = std::move(deleted);
|
||||||
|
full_ = true;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace data
|
} // namespace data
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
|
|
||||||
#include "data/LedgerCacheInterface.hpp"
|
#include "data/LedgerCacheInterface.hpp"
|
||||||
#include "data/Types.hpp"
|
#include "data/Types.hpp"
|
||||||
#include "etlng/Models.hpp"
|
#include "etl/Models.hpp"
|
||||||
#include "util/prometheus/Bool.hpp"
|
#include "util/prometheus/Bool.hpp"
|
||||||
#include "util/prometheus/Counter.hpp"
|
#include "util/prometheus/Counter.hpp"
|
||||||
#include "util/prometheus/Label.hpp"
|
#include "util/prometheus/Label.hpp"
|
||||||
@@ -37,6 +37,7 @@
|
|||||||
#include <map>
|
#include <map>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <shared_mutex>
|
#include <shared_mutex>
|
||||||
|
#include <string>
|
||||||
#include <unordered_set>
|
#include <unordered_set>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
@@ -46,11 +47,16 @@ namespace data {
|
|||||||
* @brief Cache for an entire ledger.
|
* @brief Cache for an entire ledger.
|
||||||
*/
|
*/
|
||||||
class LedgerCache : public LedgerCacheInterface {
|
class LedgerCache : public LedgerCacheInterface {
|
||||||
|
public:
|
||||||
|
/** @brief An entry of the cache */
|
||||||
struct CacheEntry {
|
struct CacheEntry {
|
||||||
uint32_t seq = 0;
|
uint32_t seq = 0;
|
||||||
Blob blob;
|
Blob blob;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
using CacheMap = std::map<ripple::uint256, CacheEntry>;
|
||||||
|
|
||||||
|
private:
|
||||||
// counters for fetchLedgerObject(s) hit rate
|
// counters for fetchLedgerObject(s) hit rate
|
||||||
std::reference_wrapper<util::prometheus::CounterInt> objectReqCounter_{PrometheusService::counterInt(
|
std::reference_wrapper<util::prometheus::CounterInt> objectReqCounter_{PrometheusService::counterInt(
|
||||||
"ledger_cache_counter_total_number",
|
"ledger_cache_counter_total_number",
|
||||||
@@ -73,8 +79,8 @@ class LedgerCache : public LedgerCacheInterface {
|
|||||||
util::prometheus::Labels({{"type", "cache_hit"}, {"fetch", "successor_key"}})
|
util::prometheus::Labels({{"type", "cache_hit"}, {"fetch", "successor_key"}})
|
||||||
)};
|
)};
|
||||||
|
|
||||||
std::map<ripple::uint256, CacheEntry> map_;
|
CacheMap map_;
|
||||||
std::map<ripple::uint256, CacheEntry> deleted_;
|
CacheMap deleted_;
|
||||||
|
|
||||||
mutable std::shared_mutex mtx_;
|
mutable std::shared_mutex mtx_;
|
||||||
std::condition_variable_any cv_;
|
std::condition_variable_any cv_;
|
||||||
@@ -98,7 +104,7 @@ public:
|
|||||||
update(std::vector<LedgerObject> const& objs, uint32_t seq, bool isBackground) override;
|
update(std::vector<LedgerObject> const& objs, uint32_t seq, bool isBackground) override;
|
||||||
|
|
||||||
void
|
void
|
||||||
update(std::vector<etlng::model::Object> const& objs, uint32_t seq) override;
|
update(std::vector<etl::model::Object> const& objs, uint32_t seq) override;
|
||||||
|
|
||||||
std::optional<Blob>
|
std::optional<Blob>
|
||||||
get(ripple::uint256 const& key, uint32_t seq) const override;
|
get(ripple::uint256 const& key, uint32_t seq) const override;
|
||||||
@@ -138,6 +144,12 @@ public:
|
|||||||
|
|
||||||
void
|
void
|
||||||
waitUntilCacheContainsSeq(uint32_t seq) override;
|
waitUntilCacheContainsSeq(uint32_t seq) override;
|
||||||
|
|
||||||
|
std::expected<void, std::string>
|
||||||
|
saveToFile(std::string const& path) const override;
|
||||||
|
|
||||||
|
std::expected<void, std::string>
|
||||||
|
loadFromFile(std::string const& path, uint32_t minLatestSequence) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace data
|
} // namespace data
|
||||||
|
|||||||
@@ -20,14 +20,16 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "data/Types.hpp"
|
#include "data/Types.hpp"
|
||||||
#include "etlng/Models.hpp"
|
#include "etl/Models.hpp"
|
||||||
|
|
||||||
#include <xrpl/basics/base_uint.h>
|
#include <xrpl/basics/base_uint.h>
|
||||||
#include <xrpl/basics/hardened_hash.h>
|
#include <xrpl/basics/hardened_hash.h>
|
||||||
|
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
|
#include <expected>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
namespace data {
|
namespace data {
|
||||||
@@ -63,7 +65,7 @@ public:
|
|||||||
* @param seq The sequence to update cache for
|
* @param seq The sequence to update cache for
|
||||||
*/
|
*/
|
||||||
virtual void
|
virtual void
|
||||||
update(std::vector<etlng::model::Object> const& objs, uint32_t seq) = 0;
|
update(std::vector<etl::model::Object> const& objs, uint32_t seq) = 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Fetch a cached object by its key and sequence number.
|
* @brief Fetch a cached object by its key and sequence number.
|
||||||
@@ -168,6 +170,27 @@ public:
|
|||||||
*/
|
*/
|
||||||
virtual void
|
virtual void
|
||||||
waitUntilCacheContainsSeq(uint32_t seq) = 0;
|
waitUntilCacheContainsSeq(uint32_t seq) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Save the cache to file
|
||||||
|
* @note This operation takes about 7 seconds and it keeps a shared lock of mtx_
|
||||||
|
*
|
||||||
|
* @param path The file path to save the cache to
|
||||||
|
* @return An error as a string if any
|
||||||
|
*/
|
||||||
|
[[nodiscard]] virtual std::expected<void, std::string>
|
||||||
|
saveToFile(std::string const& path) const = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Load the cache from file
|
||||||
|
* @note This operation takes about 7 seconds and it keeps mtx_ exclusively locked
|
||||||
|
*
|
||||||
|
* @param path The file path to load data from
|
||||||
|
* @param minLatestSequence The minimum allowed value of the latestLedgerSequence in cache file
|
||||||
|
* @return An error as a string if any
|
||||||
|
*/
|
||||||
|
[[nodiscard]] virtual std::expected<void, std::string>
|
||||||
|
loadFromFile(std::string const& path, uint32_t minLatestSequence) = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace data
|
} // namespace data
|
||||||
|
|||||||
75
src/data/LedgerCacheSaver.cpp
Normal file
75
src/data/LedgerCacheSaver.cpp
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#include "data/LedgerCacheSaver.hpp"
|
||||||
|
|
||||||
|
#include "data/LedgerCacheInterface.hpp"
|
||||||
|
#include "util/Assert.hpp"
|
||||||
|
#include "util/Profiler.hpp"
|
||||||
|
#include "util/log/Logger.hpp"
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <thread>
|
||||||
|
|
||||||
|
namespace data {
|
||||||
|
|
||||||
|
LedgerCacheSaver::LedgerCacheSaver(util::config::ClioConfigDefinition const& config, LedgerCacheInterface const& cache)
|
||||||
|
: cacheFilePath_(config.maybeValue<std::string>("cache.file.path"))
|
||||||
|
, cache_(cache)
|
||||||
|
, isAsync_(config.get<bool>("cache.file.async_save"))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
LedgerCacheSaver::~LedgerCacheSaver()
|
||||||
|
{
|
||||||
|
waitToFinish();
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
LedgerCacheSaver::save()
|
||||||
|
{
|
||||||
|
ASSERT(not savingThread_.has_value(), "Multiple save() calls are not allowed");
|
||||||
|
savingThread_ = std::thread([this]() {
|
||||||
|
if (not cacheFilePath_.has_value()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG(util::LogService::info()) << "Saving ledger cache to " << *cacheFilePath_;
|
||||||
|
if (auto const [success, durationMs] = util::timed([&]() { return cache_.get().saveToFile(*cacheFilePath_); });
|
||||||
|
success.has_value()) {
|
||||||
|
LOG(util::LogService::info()) << "Successfully saved ledger cache in " << durationMs << " ms";
|
||||||
|
} else {
|
||||||
|
LOG(util::LogService::error()) << "Error saving LedgerCache to file: " << success.error();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (not isAsync_) {
|
||||||
|
waitToFinish();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
LedgerCacheSaver::waitToFinish()
|
||||||
|
{
|
||||||
|
if (savingThread_.has_value() and savingThread_->joinable()) {
|
||||||
|
savingThread_->join();
|
||||||
|
}
|
||||||
|
savingThread_.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace data
|
||||||
94
src/data/LedgerCacheSaver.hpp
Normal file
94
src/data/LedgerCacheSaver.hpp
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "data/LedgerCacheInterface.hpp"
|
||||||
|
#include "util/config/ConfigDefinition.hpp"
|
||||||
|
|
||||||
|
#include <concepts>
|
||||||
|
#include <functional>
|
||||||
|
#include <optional>
|
||||||
|
#include <string>
|
||||||
|
#include <thread>
|
||||||
|
|
||||||
|
namespace data {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief A concept for a class that can save ledger cache asynchronously.
|
||||||
|
*
|
||||||
|
* This concept defines the interface requirements for any type that manages
|
||||||
|
* asynchronous saving of ledger cache to persistent storage.
|
||||||
|
*/
|
||||||
|
template <typename T>
|
||||||
|
concept SomeLedgerCacheSaver = requires(T a) {
|
||||||
|
{ a.save() } -> std::same_as<void>;
|
||||||
|
{ a.waitToFinish() } -> std::same_as<void>;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Manages asynchronous saving of ledger cache to a file.
|
||||||
|
*
|
||||||
|
* This class provides functionality to save the ledger cache to a file in a separate thread,
|
||||||
|
* allowing the main application to continue without blocking. The file path is configured
|
||||||
|
* through the application's configuration system.
|
||||||
|
*/
|
||||||
|
class LedgerCacheSaver {
|
||||||
|
std::optional<std::string> cacheFilePath_;
|
||||||
|
std::reference_wrapper<LedgerCacheInterface const> cache_;
|
||||||
|
std::optional<std::thread> savingThread_;
|
||||||
|
bool isAsync_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* @brief Constructs a LedgerCacheSaver instance.
|
||||||
|
*
|
||||||
|
* @param config The configuration object containing the cache file path setting
|
||||||
|
* @param cache Reference to the ledger cache interface to be saved
|
||||||
|
*/
|
||||||
|
LedgerCacheSaver(util::config::ClioConfigDefinition const& config, LedgerCacheInterface const& cache);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Destructor that ensures the saving thread is properly joined.
|
||||||
|
*
|
||||||
|
* Waits for any ongoing save operation to complete before destruction.
|
||||||
|
*/
|
||||||
|
~LedgerCacheSaver();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Initiates an asynchronous save operation of the ledger cache.
|
||||||
|
*
|
||||||
|
* Spawns a new thread that saves the ledger cache to the configured file path.
|
||||||
|
* If no file path is configured, the operation is skipped. Logs the progress
|
||||||
|
* and result of the save operation.
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
save();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Waits for the saving thread to complete.
|
||||||
|
*
|
||||||
|
* Blocks until the saving operation finishes if a thread is currently active.
|
||||||
|
* Safe to call multiple times or when no save operation is in progress.
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
waitToFinish();
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace data
|
||||||
@@ -247,6 +247,9 @@ struct MPTHoldersAndCursor {
|
|||||||
struct LedgerRange {
|
struct LedgerRange {
|
||||||
std::uint32_t minSequence = 0;
|
std::uint32_t minSequence = 0;
|
||||||
std::uint32_t maxSequence = 0;
|
std::uint32_t maxSequence = 0;
|
||||||
|
|
||||||
|
bool
|
||||||
|
operator==(LedgerRange const&) const = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
58
src/data/impl/InputFile.cpp
Normal file
58
src/data/impl/InputFile.cpp
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#include "data/impl/InputFile.hpp"
|
||||||
|
|
||||||
|
#include <xrpl/basics/base_uint.h>
|
||||||
|
|
||||||
|
#include <cstddef>
|
||||||
|
#include <cstring>
|
||||||
|
#include <ios>
|
||||||
|
#include <iosfwd>
|
||||||
|
#include <string>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
|
namespace data::impl {
|
||||||
|
|
||||||
|
InputFile::InputFile(std::string const& path) : file_(path, std::ios::binary | std::ios::in)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
InputFile::isOpen() const
|
||||||
|
{
|
||||||
|
return file_.is_open();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
InputFile::readRaw(char* data, size_t size)
|
||||||
|
{
|
||||||
|
file_.read(data, size);
|
||||||
|
shasum_.update(data, size);
|
||||||
|
return not file_.fail();
|
||||||
|
}
|
||||||
|
|
||||||
|
ripple::uint256
|
||||||
|
InputFile::hash() const
|
||||||
|
{
|
||||||
|
auto sum = shasum_;
|
||||||
|
return std::move(sum).finalize();
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace data::impl
|
||||||
57
src/data/impl/InputFile.hpp
Normal file
57
src/data/impl/InputFile.hpp
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of clio: https://github.com/XRPLF/clio
|
||||||
|
Copyright (c) 2025, the clio developers.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "util/Shasum.hpp"
|
||||||
|
|
||||||
|
#include <xrpl/basics/base_uint.h>
|
||||||
|
|
||||||
|
#include <cstddef>
|
||||||
|
#include <cstring>
|
||||||
|
#include <fstream>
|
||||||
|
#include <iosfwd>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
namespace data::impl {
|
||||||
|
|
||||||
|
class InputFile {
|
||||||
|
std::ifstream file_;
|
||||||
|
util::Sha256sum shasum_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
InputFile(std::string const& path);
|
||||||
|
|
||||||
|
bool
|
||||||
|
isOpen() const;
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
bool
|
||||||
|
read(T& t)
|
||||||
|
{
|
||||||
|
return readRaw(reinterpret_cast<char*>(&t), sizeof(T));
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
readRaw(char* data, size_t size);
|
||||||
|
|
||||||
|
ripple::uint256
|
||||||
|
hash() const;
|
||||||
|
};
|
||||||
|
} // namespace data::impl
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user