Compare commits

..

1 Commits

Author SHA1 Message Date
godexsoft
038b2d2f35 [CI] clang-tidy auto fixes 2025-05-07 09:37:45 +00:00
538 changed files with 11596 additions and 20193 deletions

View File

@@ -25,5 +25,5 @@ runs:
cd build cd build
cmake \ cmake \
--build . \ --build . \
--parallel "${{ steps.number_of_threads.outputs.threads_number }}" \ --parallel ${{ steps.number_of_threads.outputs.threads_number }} \
--target ${{ inputs.targets }} --target ${{ inputs.targets }}

View File

@@ -5,6 +5,9 @@ inputs:
images: images:
description: Name of the images to use as a base name description: Name of the images to use as a base name
required: true required: true
dockerhub_repo:
description: DockerHub repository name
required: true
push_image: push_image:
description: Whether to push the image to the registry (true/false) description: Whether to push the image to the registry (true/false)
required: true required: true
@@ -17,22 +20,15 @@ inputs:
platforms: platforms:
description: Platforms to build the image for (e.g. linux/amd64,linux/arm64) description: Platforms to build the image for (e.g. linux/amd64,linux/arm64)
required: true required: true
build_args: description:
description: List of build-time variables
required: false
dockerhub_repo:
description: DockerHub repository name
required: false
dockerhub_description:
description: Short description of the image description: Short description of the image
required: false required: true
runs: runs:
using: composite using: composite
steps: steps:
- name: Login to DockerHub - name: Login to DockerHub
if: ${{ inputs.push_image == 'true' && inputs.dockerhub_repo != '' }} if: ${{ inputs.push_image == 'true' }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with: with:
username: ${{ env.DOCKERHUB_USER }} username: ${{ env.DOCKERHUB_USER }}
@@ -49,7 +45,7 @@ runs:
- uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with: with:
cache-image: false cache-image: false
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 - uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
id: meta id: meta
@@ -58,10 +54,19 @@ runs:
tags: ${{ inputs.tags }} tags: ${{ inputs.tags }}
- name: Build and push - name: Build and push
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with: with:
context: ${{ inputs.directory }} context: ${{ inputs.directory }}
platforms: ${{ inputs.platforms }} platforms: ${{ inputs.platforms }}
push: ${{ inputs.push_image == 'true' }} push: ${{ inputs.push_image == 'true' }}
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
build-args: ${{ inputs.build_args }}
- name: Update DockerHub description
if: ${{ inputs.push_image == 'true' }}
uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_PW }}
repository: ${{ inputs.dockerhub_repo }}
short-description: ${{ inputs.description }}
readme-filepath: ${{ inputs.directory }}/README.md

View File

@@ -15,7 +15,7 @@ inputs:
assignees: assignees:
description: Comma-separated list of assignees description: Comma-separated list of assignees
required: true required: true
default: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru" default: "godexsoft,kuznetsss,PeterChen13579"
outputs: outputs:
created_issue_id: created_issue_id:
@@ -36,6 +36,6 @@ runs:
--title '${{ inputs.title }}' \ --title '${{ inputs.title }}' \
--body-file ./issue.md \ --body-file ./issue.md \
> create_issue.log > create_issue.log
created_issue="$(sed 's|.*/||' create_issue.log)" created_issue=$(cat create_issue.log | sed 's|.*/||')
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
rm create_issue.log issue.md rm create_issue.log issue.md

View File

@@ -5,8 +5,8 @@ inputs:
conan_profile: conan_profile:
description: Conan profile name description: Conan profile name
required: true required: true
force_conan_source_build: conan_cache_hit:
description: Whether conan should build all dependencies from source description: Whether conan cache has been downloaded
required: true required: true
default: "false" default: "false"
build_type: build_type:
@@ -25,6 +25,15 @@ inputs:
description: Whether Clio is to be statically linked description: Whether Clio is to be statically linked
required: true required: true
default: "false" default: "false"
sanitizer:
description: Sanitizer to use
required: true
default: "false"
choices:
- "false"
- "tsan"
- "asan"
- "ubsan"
time_trace: time_trace:
description: Whether to enable compiler trace reports description: Whether to enable compiler trace reports
required: true required: true
@@ -40,7 +49,7 @@ runs:
- name: Run conan - name: Run conan
shell: bash shell: bash
env: env:
CONAN_BUILD_OPTION: "${{ inputs.force_conan_source_build == 'true' && '*' || 'missing' }}" BUILD_OPTION: "${{ inputs.conan_cache_hit == 'true' && 'missing' || '' }}"
CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}" CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}"
STATIC_OPTION: "${{ inputs.static == 'true' && 'True' || 'False' }}" STATIC_OPTION: "${{ inputs.static == 'true' && 'True' || 'False' }}"
INTEGRATION_TESTS_OPTION: "${{ inputs.build_integration_tests == 'true' && 'True' || 'False' }}" INTEGRATION_TESTS_OPTION: "${{ inputs.build_integration_tests == 'true' && 'True' || 'False' }}"
@@ -50,30 +59,30 @@ runs:
conan \ conan \
install .. \ install .. \
-of . \ -of . \
-b "$CONAN_BUILD_OPTION" \ -b $BUILD_OPTION \
-s "build_type=${{ inputs.build_type }}" \ -s build_type=${{ inputs.build_type }} \
-o "&:static=${STATIC_OPTION}" \ -o clio:static="${STATIC_OPTION}" \
-o "&:tests=True" \ -o clio:tests=True \
-o "&:integration_tests=${INTEGRATION_TESTS_OPTION}" \ -o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" \
-o "&:lint=False" \ -o clio:lint=False \
-o "&:coverage=${CODE_COVERAGE}" \ -o clio:coverage="${CODE_COVERAGE}" \
-o "&:time_trace=${TIME_TRACE}" \ -o clio:time_trace="${TIME_TRACE}" \
--profile:all "${{ inputs.conan_profile }}" --profile ${{ inputs.conan_profile }}
- name: Run cmake - name: Run cmake
shell: bash shell: bash
env: env:
BUILD_TYPE: "${{ inputs.build_type }}" BUILD_TYPE: "${{ inputs.build_type }}"
SANITIZER_OPTION: |- SANITIZER_OPTION: |
${{ endsWith(inputs.conan_profile, '.asan') && '-Dsan=address' || ${{ inputs.sanitizer == 'tsan' && '-Dsan=thread' ||
endsWith(inputs.conan_profile, '.tsan') && '-Dsan=thread' || inputs.sanitizer == 'ubsan' && '-Dsan=undefined' ||
endsWith(inputs.conan_profile, '.ubsan') && '-Dsan=undefined' || inputs.sanitizer == 'asan' && '-Dsan=address' ||
'' }} '' }}
run: | run: |
cd build cd build
cmake \ cmake \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \ -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \ -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
"${SANITIZER_OPTION}" \ ${SANITIZER_OPTION} \
.. \ .. \
-G Ninja -G Ninja

View File

@@ -30,7 +30,7 @@ runs:
id: number_of_threads_export id: number_of_threads_export
shell: bash shell: bash
run: | run: |
num_of_threads="${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}" num_of_threads=${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}
shift_by="${{ inputs.subtract_threads }}" shift_by=${{ inputs.subtract_threads }}
shifted="$((num_of_threads - shift_by))" shifted=$((num_of_threads - shift_by))
echo "num=$(( shifted > 1 ? shifted : 1 ))" >> $GITHUB_OUTPUT echo "num=$(( shifted > 1 ? shifted : 1 ))" >> $GITHUB_OUTPUT

View File

@@ -13,4 +13,4 @@ runs:
id: find_common_ancestor id: find_common_ancestor
shell: bash shell: bash
run: | run: |
echo "commit=\"$(git merge-base --fork-point origin/develop)\"" >> $GITHUB_OUTPUT echo "commit=$(git merge-base --fork-point origin/develop)" >> $GITHUB_OUTPUT

View File

@@ -13,56 +13,56 @@ runs:
if: ${{ runner.os == 'macOS' }} if: ${{ runner.os == 'macOS' }}
shell: bash shell: bash
run: | run: |
brew install --quiet \ brew install \
bison \ bison \
ca-certificates \ ca-certificates \
ccache \ ccache \
clang-build-analyzer \ clang-build-analyzer \
conan \ conan@1 \
gh \ gh \
jq \ jq \
llvm@14 \ llvm@14 \
ninja \ ninja \
pkg-config pkg-config
echo "/opt/homebrew/opt/conan@2/bin" >> $GITHUB_PATH echo "/opt/homebrew/opt/conan@1/bin" >> $GITHUB_PATH
- name: Install CMake 3.31.6 on mac - name: Install CMake 3.31.6 on mac
if: ${{ runner.os == 'macOS' }} if: ${{ runner.os == 'macOS' }}
shell: bash shell: bash
run: | run: |
# Uninstall any existing cmake # Uninstall any existing cmake
brew uninstall --formula cmake --ignore-dependencies || true brew uninstall cmake --ignore-dependencies || true
# Download specific cmake formula # Download specific cmake formula
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb" FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4" FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
mkdir -p /tmp/homebrew-formula mkdir -p /tmp/homebrew-formula
curl -s -L "$FORMULA_URL" -o /tmp/homebrew-formula/cmake.rb curl -s -L $FORMULA_URL -o /tmp/homebrew-formula/cmake.rb
echo "$FORMULA_EXPECTED_SHA256 /tmp/homebrew-formula/cmake.rb" | shasum -a 256 -c echo "$FORMULA_EXPECTED_SHA256 /tmp/homebrew-formula/cmake.rb" | shasum -a 256 -c
# Install cmake from the specific formula with force flag # Install cmake from the specific formula with force flag
brew install --formula --quiet --force /tmp/homebrew-formula/cmake.rb brew install --formula --force /tmp/homebrew-formula/cmake.rb
- name: Fix git permissions on Linux - name: Fix git permissions on Linux
if: ${{ runner.os == 'Linux' }} if: ${{ runner.os == 'Linux' }}
shell: bash shell: bash
run: git config --global --add safe.directory "$PWD" run: git config --global --add safe.directory $PWD
- name: Set env variables for macOS - name: Set env variables for macOS
if: ${{ runner.os == 'macOS' }} if: ${{ runner.os == 'macOS' }}
shell: bash shell: bash
run: | run: |
echo "CCACHE_DIR=${{ github.workspace }}/.ccache" >> $GITHUB_ENV echo "CCACHE_DIR=${{ github.workspace }}/.ccache" >> $GITHUB_ENV
echo "CONAN_HOME=${{ github.workspace }}/.conan2" >> $GITHUB_ENV echo "CONAN_USER_HOME=${{ github.workspace }}" >> $GITHUB_ENV
- name: Set env variables for Linux - name: Set env variables for Linux
if: ${{ runner.os == 'Linux' }} if: ${{ runner.os == 'Linux' }}
shell: bash shell: bash
run: | run: |
echo "CCACHE_DIR=/root/.ccache" >> $GITHUB_ENV echo "CCACHE_DIR=/root/.ccache" >> $GITHUB_ENV
echo "CONAN_HOME=/root/.conan2" >> $GITHUB_ENV echo "CONAN_USER_HOME=/root/" >> $GITHUB_ENV
- name: Set CCACHE_DISABLE=1 - name: Set CCACHE_DISABLE=1
if: ${{ inputs.disable_ccache == 'true' }} if: ${{ inputs.disable_ccache == 'true' }}
@@ -73,5 +73,5 @@ runs:
- name: Create directories - name: Create directories
shell: bash shell: bash
run: | run: |
mkdir -p "$CCACHE_DIR" mkdir -p $CCACHE_DIR
mkdir -p "$CONAN_HOME" mkdir -p $CONAN_USER_HOME/.conan

View File

@@ -1,7 +1,10 @@
name: Restore cache name: Restore cache
description: Find and restores ccache cache description: Find and restores conan and ccache cache
inputs: inputs:
conan_dir:
description: Path to .conan directory
required: true
conan_profile: conan_profile:
description: Conan profile name description: Conan profile name
required: true required: true
@@ -16,8 +19,13 @@ inputs:
description: Whether code coverage is on description: Whether code coverage is on
required: true required: true
default: "false" default: "false"
outputs: outputs:
conan_hash:
description: Hash to use as a part of conan cache key
value: ${{ steps.conan_hash.outputs.hash }}
conan_cache_hit:
description: True if conan cache has been downloaded
value: ${{ steps.conan_cache.outputs.cache-hit }}
ccache_cache_hit: ccache_cache_hit:
description: True if ccache cache has been downloaded description: True if ccache cache has been downloaded
value: ${{ steps.ccache_cache.outputs.cache-hit }} value: ${{ steps.ccache_cache.outputs.cache-hit }}
@@ -29,6 +37,24 @@ runs:
id: git_common_ancestor id: git_common_ancestor
uses: ./.github/actions/git_common_ancestor uses: ./.github/actions/git_common_ancestor
- name: Calculate conan hash
id: conan_hash
shell: bash
run: |
conan info . -j info.json -o clio:tests=True
packages_info=$(cat info.json | jq '.[] | "\(.display_name): \(.id)"' | grep -v 'clio')
echo "$packages_info"
hash=$(echo "$packages_info" | shasum -a 256 | cut -d ' ' -f 1)
rm info.json
echo "hash=$hash" >> $GITHUB_OUTPUT
- name: Restore conan cache
uses: actions/cache/restore@v4
id: conan_cache
with:
path: ${{ inputs.conan_dir }}/data
key: clio-conan_data-${{ runner.os }}-${{ inputs.build_type }}-${{ inputs.conan_profile }}-develop-${{ steps.conan_hash.outputs.hash }}
- name: Restore ccache cache - name: Restore ccache cache
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
id: ccache_cache id: ccache_cache

View File

@@ -1,13 +1,27 @@
name: Save cache name: Save cache
description: Save ccache cache for develop branch description: Save conan and ccache cache for develop branch
inputs: inputs:
conan_dir:
description: Path to .conan directory
required: true
conan_profile: conan_profile:
description: Conan profile name description: Conan profile name
required: true required: true
conan_hash:
description: Hash to use as a part of conan cache key
required: true
conan_cache_hit:
description: Whether conan cache has been downloaded
required: true
ccache_dir: ccache_dir:
description: Path to .ccache directory description: Path to .ccache directory
required: true required: true
ccache_cache_hit:
description: Whether conan cache has been downloaded
required: true
ccache_cache_miss_rate:
description: How many cache misses happened
build_type: build_type:
description: Current build type (e.g. Release, Debug) description: Current build type (e.g. Release, Debug)
required: true required: true
@@ -17,12 +31,6 @@ inputs:
required: true required: true
default: "false" default: "false"
ccache_cache_hit:
description: Whether ccache cache has been downloaded
required: true
ccache_cache_miss_rate:
description: How many ccache cache misses happened
runs: runs:
using: composite using: composite
steps: steps:
@@ -30,6 +38,19 @@ runs:
id: git_common_ancestor id: git_common_ancestor
uses: ./.github/actions/git_common_ancestor uses: ./.github/actions/git_common_ancestor
- name: Cleanup conan directory from extra data
if: ${{ inputs.conan_cache_hit != 'true' }}
shell: bash
run: |
conan remove "*" -s -b -f
- name: Save conan cache
if: ${{ inputs.conan_cache_hit != 'true' }}
uses: actions/cache/save@v4
with:
path: ${{ inputs.conan_dir }}/data
key: clio-conan_data-${{ runner.os }}-${{ inputs.build_type }}-${{ inputs.conan_profile }}-develop-${{ inputs.conan_hash }}
- name: Save ccache cache - name: Save ccache cache
if: ${{ inputs.ccache_cache_hit != 'true' || inputs.ccache_cache_miss_rate == '100.0' }} if: ${{ inputs.ccache_cache_hit != 'true' || inputs.ccache_cache_miss_rate == '100.0' }}
uses: actions/cache/save@v4 uses: actions/cache/save@v4

33
.github/actions/setup_conan/action.yml vendored Normal file
View File

@@ -0,0 +1,33 @@
name: Setup conan
description: Setup conan profile and artifactory
inputs:
conan_profile:
description: Conan profile name
required: true
runs:
using: composite
steps:
- name: Create conan profile on macOS
if: ${{ runner.os == 'macOS' }}
shell: bash
env:
CONAN_PROFILE: ${{ inputs.conan_profile }}
run: |
echo "Creating $CONAN_PROFILE conan profile"
conan profile new $CONAN_PROFILE --detect --force
conan profile update settings.compiler.libcxx=libc++ $CONAN_PROFILE
conan profile update settings.compiler.cppstd=20 $CONAN_PROFILE
conan profile update env.CXXFLAGS=-DBOOST_ASIO_DISABLE_CONCEPTS $CONAN_PROFILE
conan profile update "conf.tools.build:cxxflags+=[\"-DBOOST_ASIO_DISABLE_CONCEPTS\"]" $CONAN_PROFILE
- name: Add conan-non-prod artifactory
shell: bash
run: |
if [[ -z $(conan remote list | grep conan-non-prod) ]]; then
echo "Adding conan-non-prod"
conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
else
echo "Conan-non-prod is available"
fi

View File

@@ -142,3 +142,16 @@ updates:
commit-message: commit-message:
prefix: "ci: [DEPENDABOT] " prefix: "ci: [DEPENDABOT] "
target-branch: develop target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/setup_conan/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop

View File

@@ -1,8 +0,0 @@
[settings]
arch={{detect_api.detect_arch()}}
build_type=Release
compiler=apple-clang
compiler.cppstd=20
compiler.libcxx=libc++
compiler.version=16
os=Macos

View File

@@ -1,11 +0,0 @@
[settings]
arch={{detect_api.detect_arch()}}
build_type=Release
compiler=apple-clang
compiler.cppstd=20
compiler.libcxx=libc++
compiler.version=17
os=Macos
[conf]
grpc/1.50.1:tools.build:cxxflags+=["-Wno-missing-template-arg-list-after-template-kw"]

View File

@@ -1,39 +0,0 @@
#!/usr/bin/env python3
import itertools
import json
LINUX_OS = ["heavy", "heavy-arm64"]
LINUX_CONTAINERS = ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
LINUX_COMPILERS = ["gcc", "clang"]
MACOS_OS = ["macos15"]
MACOS_CONTAINERS = [""]
MACOS_COMPILERS = ["apple-clang"]
BUILD_TYPES = ["Release", "Debug"]
SANITIZER_EXT = [".asan", ".tsan", ".ubsan", ""]
def generate_matrix():
configurations = []
for os, container, compiler in itertools.chain(
itertools.product(LINUX_OS, LINUX_CONTAINERS, LINUX_COMPILERS),
itertools.product(MACOS_OS, MACOS_CONTAINERS, MACOS_COMPILERS),
):
for sanitizer_ext, build_type in itertools.product(SANITIZER_EXT, BUILD_TYPES):
configurations.append(
{
"os": os,
"container": container,
"compiler": compiler,
"sanitizer_ext": sanitizer_ext,
"build_type": build_type,
}
)
return {"include": configurations}
if __name__ == "__main__":
print(f"matrix={json.dumps(generate_matrix())}")

View File

@@ -1,47 +0,0 @@
#!/bin/bash
set -ex
CURRENT_DIR="$(cd "$(dirname "$0")" && pwd)"
REPO_DIR="$(cd "$CURRENT_DIR/../../../" && pwd)"
CONAN_DIR="${CONAN_HOME:-$HOME/.conan2}"
PROFILES_DIR="$CONAN_DIR/profiles"
if [[ -z "$CI" ]]; then
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-local.profile"
else
APPLE_CLANG_PROFILE="$CURRENT_DIR/apple-clang-ci.profile"
fi
GCC_PROFILE="$REPO_DIR/docker/ci/conan/gcc.profile"
CLANG_PROFILE="$REPO_DIR/docker/ci/conan/clang.profile"
SANITIZER_TEMPLATE_FILE="$REPO_DIR/docker/ci/conan/sanitizer_template.profile"
rm -rf "$CONAN_DIR"
conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev
cp "$REPO_DIR/docker/ci/conan/global.conf" "$CONAN_DIR/global.conf"
create_profile_with_sanitizers() {
profile_name="$1"
profile_source="$2"
cp "$profile_source" "$PROFILES_DIR/$profile_name"
cp "$SANITIZER_TEMPLATE_FILE" "$PROFILES_DIR/$profile_name.asan"
cp "$SANITIZER_TEMPLATE_FILE" "$PROFILES_DIR/$profile_name.tsan"
cp "$SANITIZER_TEMPLATE_FILE" "$PROFILES_DIR/$profile_name.ubsan"
}
mkdir -p "$PROFILES_DIR"
if [[ "$(uname)" == "Darwin" ]]; then
create_profile_with_sanitizers "apple-clang" "$APPLE_CLANG_PROFILE"
echo "include(apple-clang)" > "$PROFILES_DIR/default"
else
create_profile_with_sanitizers "clang" "$CLANG_PROFILE"
create_profile_with_sanitizers "gcc" "$GCC_PROFILE"
echo "include(gcc)" > "$PROFILES_DIR/default"
fi

View File

@@ -1,24 +0,0 @@
#!/bin/bash
set -ex -o pipefail
BINARY_NAME="clio_server"
ARTIFACTS_DIR="$1"
if [ -z "${ARTIFACTS_DIR}" ]; then
echo "Usage: $0 <artifacts_directory>"
exit 1
fi
cd "${ARTIFACTS_DIR}" || exit 1
for artifact_name in $(ls); do
pushd "${artifact_name}" || exit 1
zip -r "../${artifact_name}.zip" ./${BINARY_NAME}
popd || exit 1
rm "${artifact_name}/${BINARY_NAME}"
rm -r "${artifact_name}"
sha256sum "./${artifact_name}.zip" > "./${artifact_name}.zip.sha256sum"
done

28
.github/scripts/update-libxrpl-version vendored Executable file
View File

@@ -0,0 +1,28 @@
#!/bin/bash
# Note: This script is intended to be run from the root of the repository.
#
# This script modifies conanfile.py such that the specified version of libXRPL is used.
if [[ -z "$1" ]]; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
Version should be passed as first argument to the script.
-----------------------------------------------------------------------------
EOF
exit 1
fi
VERSION=$1
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
echo "+ Updating required libXRPL version to $VERSION"
if [[ "$GNU_SED" == "false" ]]; then
sed -i '' -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/$VERSION'|g" conanfile.py
else
sed -i -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/$VERSION'|g" conanfile.py
fi

View File

@@ -2,29 +2,9 @@ name: Build
on: on:
push: push:
branches: [release/*, develop] branches: [master, release/*, develop]
pull_request: pull_request:
branches: [release/*, develop] branches: [master, release/*, develop]
paths:
- .github/workflows/build.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- .github/workflows/upload_coverage_report.yml
- ".github/actions/**"
- "!.github/actions/build_docker_image/**"
- "!.github/actions/create_issue/**"
- CMakeLists.txt
- conanfile.py
- conan.lock
- "cmake/**"
- "src/**"
- "tests/**"
- docs/config-description.md
workflow_dispatch: workflow_dispatch:
concurrency: concurrency:
@@ -47,7 +27,7 @@ jobs:
include: include:
- os: macos15 - os: macos15
conan_profile: apple-clang conan_profile: default_apple_clang
build_type: Release build_type: Release
container: "" container: ""
static: false static: false
@@ -77,6 +57,7 @@ jobs:
static: true static: true
upload_clio_server: false upload_clio_server: false
targets: all targets: all
sanitizer: "false"
analyze_build_time: false analyze_build_time: false
secrets: secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
@@ -99,9 +80,23 @@ jobs:
shell: bash shell: bash
run: | run: |
repoConfigFile=docs/config-description.md repoConfigFile=docs/config-description.md
configDescriptionFile=config_description_new.md if ! [ -f ${repoConfigFile} ]; then
echo "Config Description markdown file is missing in docs folder"
exit 1
fi
chmod +x ./clio_server chmod +x ./clio_server
./clio_server -d "${configDescriptionFile}" configDescriptionFile=config_description_new.md
./clio_server -d ${configDescriptionFile}
diff -u "${repoConfigFile}" "${configDescriptionFile}" configDescriptionHash=$(sha256sum ${configDescriptionFile} | cut -d' ' -f1)
repoConfigHash=$(sha256sum ${repoConfigFile} | cut -d' ' -f1)
if [ ${configDescriptionHash} != ${repoConfigHash} ]; then
echo "Markdown file is not up to date"
diff -u "${repoConfigFile}" "${configDescriptionFile}"
rm -f ${configDescriptionFile}
exit 1
fi
rm -f ${configDescriptionFile}
exit 0

View File

@@ -24,7 +24,7 @@ on:
type: string type: string
disable_cache: disable_cache:
description: Whether ccache should be disabled description: Whether ccache and conan cache should be disabled
required: false required: false
type: boolean type: boolean
default: false default: false
@@ -57,6 +57,12 @@ on:
type: string type: string
default: all default: all
sanitizer:
description: Sanitizer to use
required: false
type: string
default: "false"
jobs: jobs:
build: build:
uses: ./.github/workflows/build_impl.yml uses: ./.github/workflows/build_impl.yml
@@ -70,6 +76,7 @@ jobs:
static: ${{ inputs.static }} static: ${{ inputs.static }}
upload_clio_server: ${{ inputs.upload_clio_server }} upload_clio_server: ${{ inputs.upload_clio_server }}
targets: ${{ inputs.targets }} targets: ${{ inputs.targets }}
sanitizer: ${{ inputs.sanitizer }}
analyze_build_time: false analyze_build_time: false
test: test:
@@ -82,3 +89,4 @@ jobs:
build_type: ${{ inputs.build_type }} build_type: ${{ inputs.build_type }}
run_unit_tests: ${{ inputs.run_unit_tests }} run_unit_tests: ${{ inputs.run_unit_tests }}
run_integration_tests: ${{ inputs.run_integration_tests }} run_integration_tests: ${{ inputs.run_integration_tests }}
sanitizer: ${{ inputs.sanitizer }}

View File

@@ -57,7 +57,7 @@ jobs:
if: ${{ inputs.clio_server_binary_url != null }} if: ${{ inputs.clio_server_binary_url != null }}
shell: bash shell: bash
run: | run: |
wget "${{inputs.clio_server_binary_url}}" -P ./docker/clio/artifact/ wget ${{inputs.clio_server_binary_url}} -P ./docker/clio/artifact/
if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${{inputs.binary_sha256}}" ]; then if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${{inputs.binary_sha256}}" ]; then
echo "Binary sha256 sum doesn't match" echo "Binary sha256 sum doesn't match"
exit 1 exit 1
@@ -85,16 +85,16 @@ jobs:
- name: Build Docker image - name: Build Docker image
uses: ./.github/actions/build_docker_image uses: ./.github/actions/build_docker_image
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }} DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
images: | images: |
ghcr.io/xrplf/clio
rippleci/clio rippleci/clio
ghcr.io/xrplf/clio
dockerhub_repo: rippleci/clio
push_image: ${{ inputs.publish_image }} push_image: ${{ inputs.publish_image }}
directory: docker/clio directory: docker/clio
tags: ${{ inputs.tags }} tags: ${{ inputs.tags }}
platforms: linux/amd64 platforms: linux/amd64
dockerhub_repo: rippleci/clio description: Clio is an XRP Ledger API server.
dockerhub_description: Clio is an XRP Ledger API server.

View File

@@ -24,7 +24,7 @@ on:
type: string type: string
disable_cache: disable_cache:
description: Whether ccache should be disabled description: Whether ccache and conan cache should be disabled
required: false required: false
type: boolean type: boolean
@@ -48,6 +48,11 @@ on:
required: true required: true
type: string type: string
sanitizer:
description: Sanitizer to use
required: true
type: string
analyze_build_time: analyze_build_time:
description: Whether to enable build time analysis description: Whether to enable build time analysis
required: true required: true
@@ -59,7 +64,7 @@ on:
jobs: jobs:
build: build:
name: Build name: Build ${{ inputs.container != '' && 'in container' || 'natively' }}
runs-on: ${{ inputs.runs_on }} runs-on: ${{ inputs.runs_on }}
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }} container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
@@ -77,16 +82,17 @@ jobs:
with: with:
disable_ccache: ${{ inputs.disable_cache }} disable_ccache: ${{ inputs.disable_cache }}
- name: Setup conan on macOS - name: Setup conan
if: runner.os == 'macOS' uses: ./.github/actions/setup_conan
shell: bash with:
run: ./.github/scripts/conan/init.sh conan_profile: ${{ inputs.conan_profile }}
- name: Restore cache - name: Restore cache
if: ${{ !inputs.disable_cache }} if: ${{ !inputs.disable_cache }}
uses: ./.github/actions/restore_cache uses: ./.github/actions/restore_cache
id: restore_cache id: restore_cache
with: with:
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
conan_profile: ${{ inputs.conan_profile }} conan_profile: ${{ inputs.conan_profile }}
ccache_dir: ${{ env.CCACHE_DIR }} ccache_dir: ${{ env.CCACHE_DIR }}
build_type: ${{ inputs.build_type }} build_type: ${{ inputs.build_type }}
@@ -96,9 +102,11 @@ jobs:
uses: ./.github/actions/generate uses: ./.github/actions/generate
with: with:
conan_profile: ${{ inputs.conan_profile }} conan_profile: ${{ inputs.conan_profile }}
conan_cache_hit: ${{ !inputs.disable_cache && steps.restore_cache.outputs.conan_cache_hit }}
build_type: ${{ inputs.build_type }} build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }} code_coverage: ${{ inputs.code_coverage }}
static: ${{ inputs.static }} static: ${{ inputs.static }}
sanitizer: ${{ inputs.sanitizer }}
time_trace: ${{ inputs.analyze_build_time }} time_trace: ${{ inputs.analyze_build_time }}
- name: Build Clio - name: Build Clio
@@ -132,11 +140,11 @@ jobs:
cat /tmp/ccache.stats cat /tmp/ccache.stats
- name: Strip unit_tests - name: Strip unit_tests
if: ${{ !endsWith(inputs.conan_profile, 'san') && !inputs.code_coverage && !inputs.analyze_build_time }} if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
run: strip build/clio_tests run: strip build/clio_tests
- name: Strip integration_tests - name: Strip integration_tests
if: ${{ !endsWith(inputs.conan_profile, 'san') && !inputs.code_coverage && !inputs.analyze_build_time }} if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
run: strip build/clio_integration_tests run: strip build/clio_integration_tests
- name: Upload clio_server - name: Upload clio_server
@@ -164,13 +172,15 @@ jobs:
if: ${{ !inputs.disable_cache && github.ref == 'refs/heads/develop' }} if: ${{ !inputs.disable_cache && github.ref == 'refs/heads/develop' }}
uses: ./.github/actions/save_cache uses: ./.github/actions/save_cache
with: with:
conan_profile: ${{ inputs.conan_profile }} conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
conan_hash: ${{ steps.restore_cache.outputs.conan_hash }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
ccache_dir: ${{ env.CCACHE_DIR }} ccache_dir: ${{ env.CCACHE_DIR }}
build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }}
ccache_cache_hit: ${{ steps.restore_cache.outputs.ccache_cache_hit }} ccache_cache_hit: ${{ steps.restore_cache.outputs.ccache_cache_hit }}
ccache_cache_miss_rate: ${{ steps.ccache_stats.outputs.miss_rate }} ccache_cache_miss_rate: ${{ steps.ccache_stats.outputs.miss_rate }}
build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }}
conan_profile: ${{ inputs.conan_profile }}
# This is run as part of the build job, because it requires the following: # This is run as part of the build job, because it requires the following:
# - source code # - source code

View File

@@ -15,7 +15,7 @@ env:
jobs: jobs:
build: build:
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}` name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
runs-on: heavy runs-on: [self-hosted, heavy]
container: container:
image: ghcr.io/xrplf/clio-ci:latest image: ghcr.io/xrplf/clio-ci:latest
@@ -27,23 +27,24 @@ jobs:
- name: Update libXRPL version requirement - name: Update libXRPL version requirement
shell: bash shell: bash
run: | run: |
sed -i.bak -E "s|'xrpl/[a-zA-Z0-9\\.\\-]+'|'xrpl/${{ github.event.client_payload.version }}'|g" conanfile.py ./.github/scripts/update-libxrpl-version ${{ github.event.client_payload.version }}
rm -f conanfile.py.bak
- name: Update conan lockfile
shell: bash
run: |
conan lock create . -o '&:tests=True' -o '&:benchmark=True'
- name: Prepare runner - name: Prepare runner
uses: ./.github/actions/prepare_runner uses: ./.github/actions/prepare_runner
with: with:
disable_ccache: true disable_ccache: true
- name: Setup conan
uses: ./.github/actions/setup_conan
with:
conan_profile: ${{ env.CONAN_PROFILE }}
- name: Run conan and cmake - name: Run conan and cmake
uses: ./.github/actions/generate uses: ./.github/actions/generate
with: with:
conan_profile: ${{ env.CONAN_PROFILE }} conan_profile: ${{ env.CONAN_PROFILE }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
build_type: Release
- name: Build Clio - name: Build Clio
uses: ./.github/actions/build_clio uses: ./.github/actions/build_clio
@@ -60,7 +61,7 @@ jobs:
run_tests: run_tests:
name: Run tests name: Run tests
needs: build needs: build
runs-on: heavy runs-on: [self-hosted, heavy]
container: container:
image: ghcr.io/xrplf/clio-ci:latest image: ghcr.io/xrplf/clio-ci:latest

View File

@@ -7,9 +7,8 @@ on:
pull_request: pull_request:
branches: [develop] branches: [develop]
paths: paths:
- .github/workflows/clang-tidy.yml
- .clang_tidy - .clang_tidy
- .github/workflows/clang-tidy.yml
concurrency: concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags # Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
@@ -18,7 +17,6 @@ concurrency:
env: env:
CONAN_PROFILE: clang CONAN_PROFILE: clang
LLVM_TOOLS_VERSION: 20
jobs: jobs:
clang_tidy: clang_tidy:
@@ -41,17 +39,25 @@ jobs:
with: with:
disable_ccache: true disable_ccache: true
- name: Setup conan
uses: ./.github/actions/setup_conan
with:
conan_profile: ${{ env.CONAN_PROFILE }}
- name: Restore cache - name: Restore cache
uses: ./.github/actions/restore_cache uses: ./.github/actions/restore_cache
id: restore_cache id: restore_cache
with: with:
conan_profile: ${{ env.CONAN_PROFILE }} conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
ccache_dir: ${{ env.CCACHE_DIR }} ccache_dir: ${{ env.CCACHE_DIR }}
conan_profile: ${{ env.CONAN_PROFILE }}
- name: Run conan and cmake - name: Run conan and cmake
uses: ./.github/actions/generate uses: ./.github/actions/generate
with: with:
conan_profile: ${{ env.CONAN_PROFILE }} conan_profile: ${{ env.CONAN_PROFILE }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
build_type: Release
- name: Get number of threads - name: Get number of threads
uses: ./.github/actions/get_number_of_threads uses: ./.github/actions/get_number_of_threads
@@ -62,14 +68,13 @@ jobs:
shell: bash shell: bash
id: run_clang_tidy id: run_clang_tidy
run: | run: |
run-clang-tidy-${{ env.LLVM_TOOLS_VERSION }} -p build -j "${{ steps.number_of_threads.outputs.threads_number }}" -fix -quiet 1>output.txt run-clang-tidy-19 -p build -j ${{ steps.number_of_threads.outputs.threads_number }} -fix -quiet 1>output.txt
- name: Fix local includes and clang-format style - name: Fix local includes
if: ${{ steps.run_clang_tidy.outcome != 'success' }} if: ${{ steps.run_clang_tidy.outcome != 'success' }}
continue-on-error: true
shell: bash shell: bash
run: | run: pre-commit run --all-files fix-local-includes
pre-commit run --all-files fix-local-includes || true
pre-commit run --all-files clang-format || true
- name: Print issues found - name: Print issues found
if: ${{ steps.run_clang_tidy.outcome != 'success' }} if: ${{ steps.run_clang_tidy.outcome != 'success' }}
@@ -80,7 +85,7 @@ jobs:
rm output.txt rm output.txt
- name: Create an issue - name: Create an issue
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }} if: ${{ steps.run_clang_tidy.outcome != 'success' }}
id: create_issue id: create_issue
uses: ./.github/actions/create_issue uses: ./.github/actions/create_issue
env: env:
@@ -93,7 +98,7 @@ jobs:
List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/ List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0 - uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }} if: ${{ steps.run_clang_tidy.outcome != 'success' }}
with: with:
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }} gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }} passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
@@ -101,7 +106,7 @@ jobs:
git_commit_gpgsign: true git_commit_gpgsign: true
- name: Create PR with fixes - name: Create PR with fixes
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }} if: ${{ steps.run_clang_tidy.outcome != 'success' }}
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
env: env:
GH_REPO: ${{ github.repository }} GH_REPO: ${{ github.repository }}
@@ -114,7 +119,7 @@ jobs:
delete-branch: true delete-branch: true
title: "style: clang-tidy auto fixes" title: "style: clang-tidy auto fixes"
body: "Fixes #${{ steps.create_issue.outputs.created_issue_id }}. Please review and commit clang-tidy fixes." body: "Fixes #${{ steps.create_issue.outputs.created_issue_id }}. Please review and commit clang-tidy fixes."
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru" reviewers: "godexsoft,kuznetsss,PeterChen13579"
- name: Fail the job - name: Fail the job
if: ${{ steps.run_clang_tidy.outcome != 'success' }} if: ${{ steps.run_clang_tidy.outcome != 'success' }}

View File

@@ -18,8 +18,8 @@ jobs:
id: check id: check
shell: bash shell: bash
run: | run: |
passed=$(if [[ "$(git log -1 --pretty=format:%s | grep 'style: clang-tidy auto fixes')" ]]; then echo 'true' ; else echo 'false' ; fi) passed=$(if [[ $(git log -1 --pretty=format:%s | grep 'style: clang-tidy auto fixes') ]]; then echo 'true' ; else echo 'false' ; fi)
echo "passed=\"$passed\"" >> $GITHUB_OUTPUT echo "passed=$passed" >> $GITHUB_OUTPUT
- name: Run clang-tidy workflow - name: Run clang-tidy workflow
if: ${{ contains(steps.check.outputs.passed, 'true') }} if: ${{ contains(steps.check.outputs.passed, 'true') }}

View File

@@ -5,14 +5,23 @@ on:
branches: [develop] branches: [develop]
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
pages: write
id-token: write
concurrency: concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags # Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
build: deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
continue-on-error: true
container: container:
image: ghcr.io/xrplf/clio-ci:latest image: ghcr.io/xrplf/clio-ci:latest
@@ -22,21 +31,10 @@ jobs:
with: with:
lfs: true lfs: true
- name: Prepare runner - name: Build docs
uses: ./.github/actions/prepare_runner run: |
with: mkdir -p build_docs && cd build_docs
disable_ccache: true cmake ../docs && cmake --build . --target docs
- name: Create build directory
run: mkdir build_docs
- name: Configure CMake
working-directory: build_docs
run: cmake ../docs
- name: Build
working-directory: build_docs
run: cmake --build . --target docs
- name: Setup Pages - name: Setup Pages
uses: actions/configure-pages@v5 uses: actions/configure-pages@v5
@@ -47,19 +45,6 @@ jobs:
path: build_docs/html path: build_docs/html
name: docs-develop name: docs-develop
deploy:
needs: build
permissions:
pages: write
id-token: write
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Deploy to GitHub Pages - name: Deploy to GitHub Pages
id: deployment id: deployment
uses: actions/deploy-pages@v4 uses: actions/deploy-pages@v4

View File

@@ -6,17 +6,8 @@ on:
workflow_dispatch: workflow_dispatch:
pull_request: pull_request:
paths: paths:
- .github/workflows/nightly.yml - ".github/workflows/nightly.yml"
- ".github/workflows/build_clio_docker_image.yml"
- .github/workflows/release_impl.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- .github/workflows/build_clio_docker_image.yml
- ".github/actions/**"
- "!.github/actions/code_coverage/**"
- .github/scripts/prepare-release-artifacts.sh
concurrency: concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags # Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
@@ -32,7 +23,7 @@ jobs:
matrix: matrix:
include: include:
- os: macos15 - os: macos15
conan_profile: apple-clang conan_profile: default_apple_clang
build_type: Release build_type: Release
static: false static: false
- os: heavy - os: heavy
@@ -45,11 +36,6 @@ jobs:
build_type: Debug build_type: Debug
static: true static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }' container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
- os: heavy
conan_profile: gcc.ubsan
build_type: Release
static: false
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
uses: ./.github/workflows/build_and_test.yml uses: ./.github/workflows/build_and_test.yml
with: with:
@@ -59,7 +45,7 @@ jobs:
build_type: ${{ matrix.build_type }} build_type: ${{ matrix.build_type }}
static: ${{ matrix.static }} static: ${{ matrix.static }}
run_unit_tests: true run_unit_tests: true
run_integration_tests: true run_integration_tests: ${{ matrix.os != 'macos15' }}
upload_clio_server: true upload_clio_server: true
disable_cache: true disable_cache: true
@@ -70,12 +56,15 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- os: heavy # TODO: Enable when we have at least ubuntu 22.04
conan_profile: clang # as ClangBuildAnalyzer requires relatively modern glibc
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }' #
static: true # - os: heavy
# conan_profile: clang
# container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
# static: true
- os: macos15 - os: macos15
conan_profile: apple-clang conan_profile: default_apple_clang
container: "" container: ""
static: false static: false
uses: ./.github/workflows/build_impl.yml uses: ./.github/workflows/build_impl.yml
@@ -89,6 +78,7 @@ jobs:
static: ${{ matrix.static }} static: ${{ matrix.static }}
upload_clio_server: false upload_clio_server: false
targets: all targets: all
sanitizer: "false"
analyze_build_time: true analyze_build_time: true
nightly_release: nightly_release:
@@ -96,17 +86,9 @@ jobs:
uses: ./.github/workflows/release_impl.yml uses: ./.github/workflows/release_impl.yml
with: with:
overwrite_release: true overwrite_release: true
prerelease: true
title: "Clio development (nightly) build" title: "Clio development (nightly) build"
version: nightly version: nightly
header: > notes_header_file: nightly_notes.md
# Release notes
> **Note:** Please remember that this is a development release and it is not recommended for production use.
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
generate_changelog: false
draft: false
build_and_publish_docker_image: build_and_publish_docker_image:
uses: ./.github/workflows/build_clio_docker_image.yml uses: ./.github/workflows/build_clio_docker_image.yml

7
.github/workflows/nightly_notes.md vendored Normal file
View File

@@ -0,0 +1,7 @@
# Release notes
> **Note:** Please remember that this is a development release and it is not recommended for production use.
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
## SHA256 checksums

View File

@@ -23,16 +23,8 @@ jobs:
python-version: 3.x python-version: 3.x
- run: pip install pre-commit - run: pip install pre-commit
- run: pre-commit autoupdate --freeze - run: pre-commit autoupdate
- run: pre-commit run --all-files || true - run: pre-commit run --all-files
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
if: github.event_name != 'pull_request'
with:
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
git_user_signingkey: true
git_commit_gpgsign: true
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
if: always() if: always()
@@ -40,11 +32,8 @@ jobs:
GH_REPO: ${{ github.repository }} GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }} GH_TOKEN: ${{ github.token }}
with: with:
commit-message: "style: Update pre-commit hooks"
committer: Clio CI <skuznetsov@ripple.com>
branch: update/pre-commit-hooks branch: update/pre-commit-hooks
branch-suffix: timestamp title: Update pre-commit hooks
delete-branch: true commit-message: "style: update pre-commit hooks"
title: "style: Update pre-commit hooks"
body: Update versions of pre-commit hooks to latest version. body: Update versions of pre-commit hooks to latest version.
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru" reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"

View File

@@ -3,7 +3,8 @@ name: Run pre-commit hooks
on: on:
pull_request: pull_request:
push: push:
branches: [develop] branches:
- develop
workflow_dispatch: workflow_dispatch:
jobs: jobs:

View File

@@ -1,57 +0,0 @@
name: Create release
on:
push:
tags:
- "*.*.*"
pull_request:
paths:
- .github/workflows/release.yml
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-and-test:
name: Build and Test
strategy:
fail-fast: false
matrix:
include:
- os: macos15
conan_profile: apple-clang
build_type: Release
static: false
- os: heavy
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
uses: ./.github/workflows/build_and_test.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
conan_profile: ${{ matrix.conan_profile }}
build_type: ${{ matrix.build_type }}
static: ${{ matrix.static }}
run_unit_tests: true
run_integration_tests: true
upload_clio_server: true
disable_cache: true
release:
needs: build-and-test
uses: ./.github/workflows/release_impl.yml
with:
overwrite_release: false
prerelease: ${{ contains(github.ref_name, '-') }}
title: "${{ github.ref_name}}"
version: "${{ github.ref_name }}"
header: >
# Introducing Clio version ${{ github.ref_name }}
generate_changelog: true
draft: true

View File

@@ -8,11 +8,6 @@ on:
required: true required: true
type: boolean type: boolean
prerelease:
description: "Create a prerelease"
required: true
type: boolean
title: title:
description: "Release title" description: "Release title"
required: true required: true
@@ -23,26 +18,14 @@ on:
required: true required: true
type: string type: string
header: notes_header_file:
description: "Release notes header" description: "Release notes header file"
required: true required: true
type: string type: string
generate_changelog:
description: "Generate changelog"
required: true
type: boolean
draft:
description: "Create a draft release"
required: true
type: boolean
jobs: jobs:
release: release:
runs-on: heavy runs-on: ubuntu-latest
container:
image: ghcr.io/xrplf/clio-ci:latest
env: env:
GH_REPO: ${{ github.repository }} GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }} GH_TOKEN: ${{ github.token }}
@@ -52,55 +35,29 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
- uses: actions/download-artifact@v4 - uses: actions/download-artifact@v4
with: with:
path: release_artifacts path: release_artifacts
pattern: clio_server_* pattern: clio_server_*
- name: Create release notes - name: Prepare files
shell: bash
run: |
printf '%s\n' "${{ inputs.header }}" > "${RUNNER_TEMP}/release_notes.md"
- name: Generate changelog
shell: bash
if: ${{ inputs.generate_changelog }}
run: |
LAST_TAG="$(gh release view --json tagName -q .tagName)"
LAST_TAG_COMMIT="$(git rev-parse $LAST_TAG)"
BASE_COMMIT="$(git merge-base HEAD $LAST_TAG_COMMIT)"
git-cliff "${BASE_COMMIT}..HEAD" --ignore-tags "nightly|-b"
cat CHANGELOG.md >> "${RUNNER_TEMP}/release_notes.md"
- name: Prepare release artifacts
shell: bash
run: .github/scripts/prepare-release-artifacts.sh release_artifacts
- name: Append sha256 checksums
shell: bash shell: bash
working-directory: release_artifacts working-directory: release_artifacts
run: | run: |
{ cp ${{ github.workspace }}/.github/workflows/${{ inputs.notes_header_file }} "${RUNNER_TEMP}/release_notes.md"
echo '## SHA256 checksums' echo '' >> "${RUNNER_TEMP}/release_notes.md"
echo echo '```' >> "${RUNNER_TEMP}/release_notes.md"
echo '```'
cat *.sha256sum
echo '```'
} >> "${RUNNER_TEMP}/release_notes.md"
- name: Upload release notes for d in $(ls); do
uses: actions/upload-artifact@v4 archive_name=$(ls $d)
with: mv ${d}/${archive_name} ./
name: release_notes_${{ inputs.version }} rm -r $d
path: "${RUNNER_TEMP}/release_notes.md" sha256sum ./$archive_name > ./${archive_name}.sha256sum
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/release_notes.md"
done
echo '```' >> "${RUNNER_TEMP}/release_notes.md"
- name: Remove current release and tag - name: Remove current release and tag
if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }} if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }}
@@ -113,10 +70,9 @@ jobs:
if: ${{ github.event_name != 'pull_request' }} if: ${{ github.event_name != 'pull_request' }}
shell: bash shell: bash
run: | run: |
gh release create "${{ inputs.version }}" \ gh release create ${{ inputs.version }} \
${{ inputs.prerelease && '--prerelease' || '' }} \ ${{ inputs.overwrite_release && '--prerelease' || '' }} \
--title "${{ inputs.title }}" \ --title "${{ inputs.title }}" \
--target "${GITHUB_SHA}" \ --target $GITHUB_SHA \
${{ inputs.draft && '--draft' || '' }} \
--notes-file "${RUNNER_TEMP}/release_notes.md" \ --notes-file "${RUNNER_TEMP}/release_notes.md" \
./release_artifacts/clio_server* ./release_artifacts/clio_server*

View File

@@ -6,24 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
pull_request: pull_request:
paths: paths:
- .github/workflows/sanitizers.yml - ".github/workflows/sanitizers.yml"
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- ".github/actions/**"
- "!.github/actions/build_docker_image/**"
- "!.github/actions/create_issue/**"
- .github/scripts/execute-tests-under-sanitizer
- CMakeLists.txt
- conanfile.py
- conan.lock
- "cmake/**"
# We don't run sanitizer on code change, because it takes too long
# - "src/**"
# - "tests/**"
concurrency: concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags # Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
@@ -37,20 +20,24 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
compiler: [gcc, clang] include:
sanitizer_ext: [.asan, .tsan, .ubsan] - sanitizer: tsan
build_type: [Release, Debug] compiler: gcc
- sanitizer: asan
compiler: gcc
- sanitizer: ubsan
compiler: gcc
uses: ./.github/workflows/build_and_test.yml uses: ./.github/workflows/build_and_test.yml
with: with:
runs_on: heavy runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }' container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
disable_cache: true disable_cache: true
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }} conan_profile: ${{ matrix.compiler }}.${{ matrix.sanitizer }}
build_type: ${{ matrix.build_type }} build_type: Release
static: false static: false
# Currently, both gcc.tsan and clang.tsan unit tests hang run_unit_tests: true
run_unit_tests: ${{ matrix.sanitizer_ext != '.tsan' }}
run_integration_tests: false run_integration_tests: false
upload_clio_server: false upload_clio_server: false
targets: clio_tests clio_integration_tests targets: clio_tests clio_integration_tests
sanitizer: ${{ matrix.sanitizer }}

View File

@@ -33,18 +33,19 @@ on:
required: true required: true
type: boolean type: boolean
sanitizer:
description: Sanitizer to use
required: true
type: string
jobs: jobs:
unit_tests: unit_tests:
name: Unit testing name: Unit testing ${{ inputs.container != '' && 'in container' || 'natively' }}
runs-on: ${{ inputs.runs_on }} runs-on: ${{ inputs.runs_on }}
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }} container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
if: inputs.run_unit_tests if: inputs.run_unit_tests
env:
# TODO: remove completely when we have fixed all currently existing issues with sanitizers
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.asan') || endsWith(inputs.conan_profile, '.tsan') }}
steps: steps:
- name: Clean workdir - name: Clean workdir
if: ${{ runner.os == 'macOS' }} if: ${{ runner.os == 'macOS' }}
@@ -63,15 +64,15 @@ jobs:
run: chmod +x ./clio_tests run: chmod +x ./clio_tests
- name: Run clio_tests (regular) - name: Run clio_tests (regular)
if: env.SANITIZER_IGNORE_ERRORS == 'false' if: inputs.sanitizer == 'false'
run: ./clio_tests run: ./clio_tests
- name: Run clio_tests (sanitizer errors ignored) - name: Run clio_tests (sanitizer)
if: env.SANITIZER_IGNORE_ERRORS == 'true' if: inputs.sanitizer != 'false'
run: ./.github/scripts/execute-tests-under-sanitizer ./clio_tests run: ./.github/scripts/execute-tests-under-sanitizer ./clio_tests
- name: Check for sanitizer report - name: Check for sanitizer report
if: env.SANITIZER_IGNORE_ERRORS == 'true' if: inputs.sanitizer != 'false'
shell: bash shell: bash
id: check_report id: check_report
run: | run: |
@@ -82,15 +83,16 @@ jobs:
fi fi
- name: Upload sanitizer report - name: Upload sanitizer report
if: env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true' if: inputs.sanitizer != 'false' && steps.check_report.outputs.found_report == 'true'
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: sanitizer_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }} name: ${{ inputs.conan_profile }}_report
path: .sanitizer-report/* path: .sanitizer-report/*
include-hidden-files: true include-hidden-files: true
# TODO: enable when we have fixed all currently existing issues from sanitizers
- name: Create an issue - name: Create an issue
if: false && env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true' if: false && inputs.sanitizer != 'false' && steps.check_report.outputs.found_report == 'true'
uses: ./.github/actions/create_issue uses: ./.github/actions/create_issue
env: env:
GH_TOKEN: ${{ github.token }} GH_TOKEN: ${{ github.token }}
@@ -104,7 +106,7 @@ jobs:
Reports are available as artifacts. Reports are available as artifacts.
integration_tests: integration_tests:
name: Integration testing name: Integration testing ${{ inputs.container != '' && 'in container' || 'natively' }}
runs-on: ${{ inputs.runs_on }} runs-on: ${{ inputs.runs_on }}
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }} container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
@@ -112,7 +114,7 @@ jobs:
services: services:
scylladb: scylladb:
image: ${{ inputs.container != '' && 'scylladb/scylla' || '' }} image: "scylladb/scylla"
options: >- options: >-
--health-cmd "cqlsh -e 'describe cluster'" --health-cmd "cqlsh -e 'describe cluster'"
--health-interval 10s --health-interval 10s
@@ -124,37 +126,13 @@ jobs:
if: ${{ runner.os == 'macOS' }} if: ${{ runner.os == 'macOS' }}
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0 uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
- name: Spin up scylladb
if: ${{ runner.os == 'macOS' }}
timeout-minutes: 3
run: |
docker rm --force scylladb || true
docker run \
--detach \
--name scylladb \
--health-cmd "cqlsh -e 'describe cluster'" \
--health-interval 10s \
--health-timeout 5s \
--health-retries 5 \
--publish 9042:9042 \
--memory 16G \
scylladb/scylla
until [ "$(docker inspect -f '{{.State.Health.Status}}' scylladb)" == "healthy" ]; do
sleep 5
done
- uses: actions/download-artifact@v4 - uses: actions/download-artifact@v4
with: with:
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }} name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
# To be enabled back once docker in mac runner arrives
# https://github.com/XRPLF/clio/issues/1400
- name: Run clio_integration_tests - name: Run clio_integration_tests
run: | run: |
chmod +x ./clio_integration_tests chmod +x ./clio_integration_tests
./clio_integration_tests ${{ runner.os != 'macOS' && '--backend_host=scylladb' || '' }} ./clio_integration_tests --backend_host=scylladb
- name: Show docker logs and stop scylladb
if: ${{ always() && runner.os == 'macOS' }}
run: |
docker logs scylladb
docker rm --force scylladb || true

View File

@@ -3,330 +3,48 @@ name: Update CI docker image
on: on:
pull_request: pull_request:
paths: paths:
- .github/workflows/update_docker_ci.yml
- ".github/actions/build_docker_image/**"
- "docker/ci/**" - "docker/ci/**"
- "docker/compilers/**" - "docker/compilers/**"
- "docker/tools/**" - .github/workflows/update_docker_ci.yml
- ".github/actions/build_docker_image/**"
push: push:
branches: [develop] branches: [develop]
paths: paths:
- .github/workflows/update_docker_ci.yml # CI image must update when either its Dockerfile changes
# or any compilers changed and were pushed by hand
- ".github/actions/build_docker_image/**"
- "docker/ci/**" - "docker/ci/**"
- "docker/compilers/**" - "docker/compilers/**"
- "docker/tools/**" - .github/workflows/update_docker_ci.yml
- ".github/actions/build_docker_image/**"
workflow_dispatch: workflow_dispatch:
concurrency: concurrency:
# Only matches runs for the current workflow - matches against branch & tags # Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
# We want to execute all builds sequentially in develop cancel-in-progress: true
cancel-in-progress: false
env:
CLANG_MAJOR_VERSION: 19
GCC_MAJOR_VERSION: 14
GCC_VERSION: 14.3.0
jobs: jobs:
repo: build_and_push:
name: Calculate repo name name: Build and push docker image
runs-on: ubuntu-latest runs-on: [self-hosted, heavy]
outputs:
GHCR_REPO: ${{ steps.set-ghcr-repo.outputs.GHCR_REPO }}
steps:
- name: Set GHCR_REPO
id: set-ghcr-repo
run: |
echo "GHCR_REPO=$(echo ghcr.io/${{ github.repository_owner }} | tr '[:upper:]' '[:lower:]')" >> ${GITHUB_OUTPUT}
gcc-amd64:
name: Build and push GCC docker image (amd64)
runs-on: heavy
needs: repo
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/gcc/**"
- uses: ./.github/actions/build_docker_image - uses: ./.github/actions/build_docker_image
if: steps.changed-files.outputs.any_changed == 'true'
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }} DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
rippleci/clio_gcc
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/compilers/gcc
tags: |
type=raw,value=amd64-latest
type=raw,value=amd64-${{ env.GCC_MAJOR_VERSION }}
type=raw,value=amd64-${{ env.GCC_VERSION }}
type=raw,value=amd64-${{ github.sha }}
platforms: linux/amd64
build_args: |
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
GCC_VERSION=${{ env.GCC_VERSION }}
dockerhub_repo: rippleci/clio_gcc
dockerhub_description: GCC compiler for XRPLF/clio.
gcc-arm64:
name: Build and push GCC docker image (arm64)
runs-on: heavy-arm64
needs: repo
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/gcc/**"
- uses: ./.github/actions/build_docker_image
if: steps.changed-files.outputs.any_changed == 'true'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc
rippleci/clio_gcc
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/compilers/gcc
tags: |
type=raw,value=arm64-latest
type=raw,value=arm64-${{ env.GCC_MAJOR_VERSION }}
type=raw,value=arm64-${{ env.GCC_VERSION }}
type=raw,value=arm64-${{ github.sha }}
platforms: linux/arm64
build_args: |
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
GCC_VERSION=${{ env.GCC_VERSION }}
dockerhub_repo: rippleci/clio_gcc
dockerhub_description: GCC compiler for XRPLF/clio.
gcc-merge:
name: Merge and push multi-arch GCC docker image
runs-on: heavy
needs: [repo, gcc-amd64, gcc-arm64]
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/gcc/**"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PW }}
- name: Create and push multi-arch manifest
if: github.event_name != 'pull_request' && steps.changed-files.outputs.any_changed == 'true'
run: |
for image in ${{ needs.repo.outputs.GHCR_REPO }}/clio-gcc rippleci/clio_gcc; do
docker buildx imagetools create \
-t $image:latest \
-t $image:${{ env.GCC_MAJOR_VERSION }} \
-t $image:${{ env.GCC_VERSION }} \
-t $image:${{ github.sha }} \
$image:arm64-latest \
$image:amd64-latest
done
clang:
name: Build and push Clang docker image
runs-on: heavy
needs: repo
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/compilers/clang/**"
- uses: ./.github/actions/build_docker_image
if: steps.changed-files.outputs.any_changed == 'true'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-clang
rippleci/clio_clang
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/compilers/clang
tags: |
type=raw,value=latest
type=raw,value=${{ env.CLANG_MAJOR_VERSION }}
type=raw,value=${{ github.sha }}
platforms: linux/amd64,linux/arm64
build_args: |
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
dockerhub_repo: rippleci/clio_clang
dockerhub_description: Clang compiler for XRPLF/clio.
tools-amd64:
name: Build and push tools docker image (amd64)
runs-on: heavy
needs: [repo, gcc-merge]
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/tools/**"
- uses: ./.github/actions/build_docker_image
if: steps.changed-files.outputs.any_changed == 'true'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
images: | images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-tools
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/tools
tags: |
type=raw,value=amd64-latest
type=raw,value=amd64-${{ github.sha }}
platforms: linux/amd64
build_args: |
GHCR_REPO=${{ needs.repo.outputs.GHCR_REPO }}
GCC_VERSION=${{ env.GCC_VERSION }}
tools-arm64:
name: Build and push tools docker image (arm64)
runs-on: heavy-arm64
needs: [repo, gcc-merge]
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/tools/**"
- uses: ./.github/actions/build_docker_image
if: steps.changed-files.outputs.any_changed == 'true'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-tools
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/tools
tags: |
type=raw,value=arm64-latest
type=raw,value=arm64-${{ github.sha }}
platforms: linux/arm64
build_args: |
GHCR_REPO=${{ needs.repo.outputs.GHCR_REPO }}
GCC_VERSION=${{ env.GCC_VERSION }}
tools-merge:
name: Merge and push multi-arch tools docker image
runs-on: heavy
needs: [repo, tools-amd64, tools-arm64]
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: "docker/tools/**"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Create and push multi-arch manifest
if: github.event_name != 'pull_request' && steps.changed-files.outputs.any_changed == 'true'
run: |
image=${{ needs.repo.outputs.GHCR_REPO }}/clio-tools
docker buildx imagetools create \
-t $image:latest \
-t $image:${{ github.sha }} \
$image:arm64-latest \
$image:amd64-latest
ci:
name: Build and push CI docker image
runs-on: heavy
needs: [repo, gcc-merge, clang, tools-merge]
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/build_docker_image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
with:
images: |
${{ needs.repo.outputs.GHCR_REPO }}/clio-ci
rippleci/clio_ci rippleci/clio_ci
ghcr.io/xrplf/clio-ci
dockerhub_repo: rippleci/clio_ci
push_image: ${{ github.event_name != 'pull_request' }} push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/ci directory: docker/ci
tags: | tags: |
type=raw,value=latest type=raw,value=latest
type=raw,value=gcc_${{ env.GCC_MAJOR_VERSION }}_clang_${{ env.CLANG_MAJOR_VERSION }} type=raw,value=gcc_12_clang_16
type=raw,value=${{ github.sha }} type=raw,value=${{ github.sha }}
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
build_args: | description: CI image for XRPLF/clio.
GHCR_REPO=${{ needs.repo.outputs.GHCR_REPO }}
CLANG_MAJOR_VERSION=${{ env.CLANG_MAJOR_VERSION }}
GCC_MAJOR_VERSION=${{ env.GCC_MAJOR_VERSION }}
GCC_VERSION=${{ env.GCC_VERSION }}
dockerhub_repo: rippleci/clio_ci
dockerhub_description: CI image for XRPLF/clio.

View File

@@ -1,100 +0,0 @@
name: Upload Conan Dependencies
on:
schedule:
- cron: "0 9 * * 1-5"
workflow_dispatch:
inputs:
force_source_build:
description: "Force source build of all dependencies"
required: false
default: false
type: boolean
pull_request:
branches: [develop]
paths:
- .github/workflows/upload_conan_deps.yml
- .github/actions/generate/action.yml
- .github/actions/prepare_runner/action.yml
- ".github/scripts/conan/**"
- "!.github/scripts/conan/apple-clang-local.profile"
- conanfile.py
- conan.lock
push:
branches: [develop]
paths:
- .github/workflows/upload_conan_deps.yml
- .github/actions/generate/action.yml
- .github/actions/prepare_runner/action.yml
- ".github/scripts/conan/**"
- "!.github/scripts/conan/apple-clang-local.profile"
- conanfile.py
- conan.lock
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
generate-matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@v4
- name: Calculate conan matrix
id: set-matrix
run: .github/scripts/conan/generate_matrix.py >> "${GITHUB_OUTPUT}"
upload-conan-deps:
name: Build ${{ matrix.compiler }}${{ matrix.sanitizer_ext }} ${{ matrix.build_type }}
needs: generate-matrix
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
runs-on: ${{ matrix.os }}
container: ${{ matrix.container != '' && fromJson(matrix.container) || null }}
env:
CONAN_PROFILE: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
steps:
- uses: actions/checkout@v4
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
- name: Setup conan on macOS
if: runner.os == 'macOS'
shell: bash
run: ./.github/scripts/conan/init.sh
- name: Show conan profile
run: conan profile show --profile:all ${{ env.CONAN_PROFILE }}
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ env.CONAN_PROFILE }}
# We check that everything builds fine from source on scheduled runs
# But we do build and upload packages with build=missing by default
force_conan_source_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
build_type: ${{ matrix.build_type }}
- name: Login to Conan
if: github.event_name != 'pull_request'
run: conan remote login -p ${{ secrets.CONAN_PASSWORD }} ripple ${{ secrets.CONAN_USERNAME }}
- name: Upload Conan packages
if: github.event_name != 'pull_request' && github.event_name != 'schedule'
run: conan upload "*" -r=ripple --confirm

View File

@@ -25,7 +25,7 @@ jobs:
- name: Upload coverage report - name: Upload coverage report
if: ${{ hashFiles('build/coverage_report.xml') != '' }} if: ${{ hashFiles('build/coverage_report.xml') != '' }}
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
with: with:
files: build/coverage_report.xml files: build/coverage_report.xml
fail_ci_if_error: true fail_ci_if_error: true

View File

@@ -1,6 +0,0 @@
---
ignored:
- DL3003
- DL3007
- DL3008
- DL3013

View File

@@ -7,93 +7,50 @@
# Common tasks # Common tasks
# #
# - Run on all files: pre-commit run --all-files # - Run on all files: pre-commit run --all-files
# - Register git hooks: pre-commit install --hook-type pre-commit --hook-type pre-push # - Register git hooks: pre-commit install --install-hooks
# #
# See https://pre-commit.com for more information # See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks # See https://pre-commit.com/hooks.html for more hooks
exclude: ^(docs/doxygen-awesome-theme/|conan\.lock$)
repos: repos:
# `pre-commit sample-config` default hooks # `pre-commit sample-config` default hooks
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b # frozen: v5.0.0 rev: v5.0.0
hooks: hooks:
- id: check-added-large-files - id: check-added-large-files
- id: check-executables-have-shebangs - id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable - id: check-shebang-scripts-are-executable
- id: end-of-file-fixer - id: end-of-file-fixer
exclude: ^docs/doxygen-awesome-theme/
- id: trailing-whitespace - id: trailing-whitespace
exclude: ^docs/doxygen-awesome-theme/
# Autoformat: YAML, JSON, Markdown, etc. # Autoformat: YAML, JSON, Markdown, etc.
- repo: https://github.com/rbubley/mirrors-prettier - repo: https://github.com/rbubley/mirrors-prettier
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2 rev: v3.5.3
hooks: hooks:
- id: prettier - id: prettier
exclude: ^docs/doxygen-awesome-theme/
- repo: https://github.com/igorshubovych/markdownlint-cli - repo: https://github.com/igorshubovych/markdownlint-cli
rev: 192ad822316c3a22fb3d3cc8aa6eafa0b8488360 # frozen: v0.45.0 rev: v0.44.0
hooks: hooks:
- id: markdownlint-fix - id: markdownlint-fix
exclude: LICENSE.md exclude: LICENSE.md
- repo: https://github.com/hadolint/hadolint - repo: https://github.com/crate-ci/typos
rev: c3dc18df7a501f02a560a2cc7ba3c69a85ca01d3 # frozen: v2.13.1-beta rev: v1.31.2
hooks: hooks:
- id: hadolint-docker - id: typos
# hadolint-docker is a special hook that runs hadolint in a Docker container
# Docker is not installed in the environment where pre-commit is run
stages: [manual]
entry: hadolint/hadolint:v2.12.1-beta hadolint
- repo: https://github.com/codespell-project/codespell
rev: 63c8f8312b7559622c0d82815639671ae42132ac # frozen: v2.4.1
hooks:
- id: codespell
args:
[
--write-changes,
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
]
- repo: https://github.com/trufflesecurity/trufflehog
rev: 6641d4ba5b684fffe195b9820345de1bf19f3181 # frozen: v3.89.2
hooks:
- id: trufflehog
entry: trufflehog git file://. --since-commit HEAD --no-verification --fail
# Running some C++ hooks before clang-format
# to ensure that the style is consistent.
- repo: local
hooks:
- id: json-in-cpp
name: Fix JSON style in C++
entry: pre-commit-hooks/json_in_cpp.py
types: [c++]
language: python
exclude: |
(?x)^(
tests/unit/etl/SubscriptionSourceTests.cpp|
tests/unit/web/ServerTests.cpp|
tests/unit/web/impl/ErrorHandlingTests.cpp|
tests/unit/web/ng/ServerTests.cpp|
tests/unit/web/ng/impl/ErrorHandlingTests.cpp
)$
- id: fix-local-includes
name: Fix Local Includes
entry: pre-commit-hooks/fix-local-includes.sh
types: [c++]
language: script
- repo: https://github.com/pre-commit/mirrors-clang-format - repo: https://github.com/pre-commit/mirrors-clang-format
rev: 6b9072cd80691b1b48d80046d884409fb1d962d1 # frozen: v20.1.7 rev: v19.1.7
hooks: hooks:
- id: clang-format - id: clang-format
args: [--style=file] args: [--style=file]
types: [c++] types: [c++]
- repo: https://github.com/cheshirekow/cmake-format-precommit - repo: https://github.com/cheshirekow/cmake-format-precommit
rev: e2c2116d86a80e72e7146a06e68b7c228afc6319 # frozen: v0.6.13 rev: v0.6.13
hooks: hooks:
- id: cmake-format - id: cmake-format
additional_dependencies: [PyYAML] additional_dependencies: [PyYAML]
@@ -118,12 +75,44 @@ repos:
name: Check Doxygen Documentation name: Check Doxygen Documentation
entry: pre-commit-hooks/check-doxygen-docs.sh entry: pre-commit-hooks/check-doxygen-docs.sh
types: [text] types: [text]
language: script language: system
pass_filenames: false
- id: fix-local-includes
name: Fix Local Includes
entry: pre-commit-hooks/fix-local-includes.sh
types: [c++]
language: system
pass_filenames: false pass_filenames: false
- id: verify-commits - id: verify-commits
name: Verify Commits name: Verify Commits
entry: pre-commit-hooks/verify-commits.sh entry: pre-commit-hooks/verify-commits.sh
always_run: true types: [text]
stages: [pre-push] language: system
language: script
pass_filenames: false pass_filenames: false
- repo: local
hooks:
- id: lfs-post-checkout
name: LFS Post Checkout
entry: pre-commit-hooks/lfs/post-checkout
types: [text]
stages: [post-checkout]
language: system
- id: lfs-post-commit
name: LFS Post Commit
entry: pre-commit-hooks/lfs/post-commit
types: [text]
stages: [post-commit]
language: system
- id: lfs-post-merge
name: LFS Post Merge
entry: pre-commit-hooks/lfs/post-merge
types: [text]
stages: [post-merge]
language: system
- id: lfs-pre-push
name: LFS Pre Push
entry: pre-commit-hooks/lfs/pre-push
types: [text]
stages: [pre-push]
language: system

View File

@@ -69,17 +69,15 @@ endif ()
# Enable selected sanitizer if enabled via `san` # Enable selected sanitizer if enabled via `san`
if (san) if (san)
set(SUPPORTED_SANITIZERS "address" "thread" "memory" "undefined") set(SUPPORTED_SANITIZERS "address" "thread" "memory" "undefined")
if (NOT san IN_LIST SUPPORTED_SANITIZERS) list(FIND SUPPORTED_SANITIZERS "${san}" INDEX)
if (INDEX EQUAL -1)
message(FATAL_ERROR "Error: Unsupported sanitizer '${san}'. Supported values are: ${SUPPORTED_SANITIZERS}.") message(FATAL_ERROR "Error: Unsupported sanitizer '${san}'. Supported values are: ${SUPPORTED_SANITIZERS}.")
endif () endif ()
# Sanitizers recommend minimum of -O1 for reasonable performance so we enable it for debug builds target_compile_options(
set(SAN_OPTIMIZATION_FLAG "") clio_options INTERFACE # Sanitizers recommend minimum of -O1 for reasonable performance
if (CMAKE_BUILD_TYPE STREQUAL "Debug") $<$<CONFIG:Debug>:-O1> ${SAN_FLAG} -fno-omit-frame-pointer
set(SAN_OPTIMIZATION_FLAG -O1) )
endif ()
target_compile_options(clio_options INTERFACE ${SAN_OPTIMIZATION_FLAG} ${SAN_FLAG} -fno-omit-frame-pointer)
target_compile_definitions( target_compile_definitions(
clio_options INTERFACE $<$<STREQUAL:${san},address>:SANITIZER=ASAN> $<$<STREQUAL:${san},thread>:SANITIZER=TSAN> clio_options INTERFACE $<$<STREQUAL:${san},address>:SANITIZER=ASAN> $<$<STREQUAL:${san},thread>:SANITIZER=TSAN>
$<$<STREQUAL:${san},memory>:SANITIZER=MSAN> $<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN> $<$<STREQUAL:${san},memory>:SANITIZER=MSAN> $<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN>

View File

@@ -17,16 +17,6 @@ To contribute, please:
> **Note:** Please read the [Style guide](#style-guide). > **Note:** Please read the [Style guide](#style-guide).
### `git lfs` hooks
Install `git lfs` hooks using the following command:
```bash
git lfs install
```
> **Note:** You need to install Git LFS hooks before installing `pre-commit` hooks.
### `pre-commit` hooks ### `pre-commit` hooks
To ensure code quality and style, we use [`pre-commit`](https://pre-commit.com/). To ensure code quality and style, we use [`pre-commit`](https://pre-commit.com/).
@@ -35,7 +25,7 @@ Run the following command to enable `pre-commit` hooks that help with Clio devel
```bash ```bash
pip3 install pre-commit pip3 install pre-commit
pre-commit install --hook-type pre-commit --hook-type pre-push pre-commit install
``` ```
`pre-commit` takes care of running each tool in [`.pre-commit-config.yaml`](https://github.com/XRPLF/clio/blob/develop/.pre-commit-config.yaml) in a separate environment. `pre-commit` takes care of running each tool in [`.pre-commit-config.yaml`](https://github.com/XRPLF/clio/blob/develop/.pre-commit-config.yaml) in a separate environment.

22
_typos.toml Normal file
View File

@@ -0,0 +1,22 @@
[default]
# This allows to ignore account ids in tests and private keys
# More info: https://github.com/crate-ci/typos/issues/415
extend-ignore-re = [
"[a-z-A-Z0-9]{33}",
"[a-z-A-Z0-9]{34}",
"[a-z-A-Z0-9]{64}",
]
[default.extend-identifiers]
# (S)tring
tring = "tring"
trings = "trings"
ASSERTs = "ASSERTs"
EXCLUDEs = "EXCLUDEs"
ser = "ser"
[default.extend-words]
strat = "strat"
datas = "datas"

View File

@@ -191,9 +191,8 @@ generateData()
constexpr auto kTOTAL = 10'000; constexpr auto kTOTAL = 10'000;
std::vector<uint64_t> data; std::vector<uint64_t> data;
data.reserve(kTOTAL); data.reserve(kTOTAL);
util::MTRandomGenerator randomGenerator;
for (auto i = 0; i < kTOTAL; ++i) for (auto i = 0; i < kTOTAL; ++i)
data.push_back(randomGenerator.uniform(1, 100'000'000)); data.push_back(util::Random::uniform(1, 100'000'000));
return data; return data;
} }

View File

@@ -8,24 +8,15 @@
[changelog] [changelog]
# template for the changelog header # template for the changelog header
header = """ header = """
# Changelog\n
All notable changes to this project will be documented in this file.\n
""" """
# template for the changelog body # template for the changelog body
# https://keats.github.io/tera/docs/#introduction # https://keats.github.io/tera/docs/#introduction
body = """ body = """
{% if version %}\ {% if version %}\
Version {{ version | trim_start_matches(pat="v") }} of Clio, an XRP Ledger API server optimized for HTTP and WebSocket API calls, is now available.
{% else %}\
Clio, an XRP Ledger API server optimized for HTTP and WebSocket API calls, is under active development.
{% endif %}\
<!-- Please, remove one of the 2 following lines -->
This release adds new features and bug fixes.
This release adds bug fixes.
\
{% if version %}
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %} {% else %}\
## [unreleased] ## [unreleased]
{% endif %}\ {% endif %}\
{% for group, commits in commits | filter(attribute="merge_commit", value=false) | group_by(attribute="group") %} {% for group, commits in commits | filter(attribute="merge_commit", value=false) | group_by(attribute="group") %}
@@ -33,7 +24,7 @@ This release adds bug fixes.
{% for commit in commits %} {% for commit in commits %}
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
{% if commit.breaking %}[**breaking**] {% endif %}\ {% if commit.breaking %}[**breaking**] {% endif %}\
{{ commit.message | upper_first }}{% if commit.remote.username %} by @{{ commit.remote.username }}{% endif %}\ {{ commit.message | upper_first }} {% if commit.remote.username %}by @{{ commit.remote.username }}{% endif %}\
{% endfor %} {% endfor %}
{% endfor %}\n {% endfor %}\n
""" """

View File

@@ -8,7 +8,7 @@ if (lint)
endif () endif ()
message(STATUS "Using clang-tidy from CLIO_CLANG_TIDY_BIN") message(STATUS "Using clang-tidy from CLIO_CLANG_TIDY_BIN")
else () else ()
find_program(_CLANG_TIDY_BIN NAMES "clang-tidy-20" "clang-tidy" REQUIRED) find_program(_CLANG_TIDY_BIN NAMES "clang-tidy-19" "clang-tidy" REQUIRED)
endif () endif ()
if (NOT _CLANG_TIDY_BIN) if (NOT _CLANG_TIDY_BIN)

View File

@@ -4,42 +4,39 @@
find_package(Git REQUIRED) find_package(Git REQUIRED)
set(GIT_COMMAND describe --tags --exact-match) set(GIT_COMMAND rev-parse --short HEAD)
execute_process( execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE REV
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE TAG
RESULT_VARIABLE RC
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_STRIP_TRAILING_WHITESPACE
) )
if (RC EQUAL 0) set(GIT_COMMAND branch --show-current)
# if we are on a tag, use the tag name execute_process(
set(CLIO_VERSION "${TAG}") COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE BRANCH
set(DOC_CLIO_VERSION "${TAG}") OUTPUT_STRIP_TRAILING_WHITESPACE
else () )
# if not, use YYYYMMDDHMS-<branch>-<git-rev>
if (BRANCH STREQUAL "")
set(BRANCH "dev")
endif ()
if (NOT (BRANCH MATCHES master OR BRANCH MATCHES release/*)) # for develop and any other branch name
# YYYYMMDDHMS-<branch>-<git-rev>
set(GIT_COMMAND show -s --date=format:%Y%m%d%H%M%S --format=%cd) set(GIT_COMMAND show -s --date=format:%Y%m%d%H%M%S --format=%cd)
execute_process( execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE DATE COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE DATE
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY OUTPUT_STRIP_TRAILING_WHITESPACE
) )
set(GIT_COMMAND branch --show-current)
execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE BRANCH
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
)
set(GIT_COMMAND rev-parse --short HEAD)
execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE REV
OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY
)
set(CLIO_VERSION "${DATE}-${BRANCH}-${REV}") set(CLIO_VERSION "${DATE}-${BRANCH}-${REV}")
set(DOC_CLIO_VERSION "develop") set(DOC_CLIO_VERSION "develop")
else ()
set(GIT_COMMAND describe --tags)
execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE CLIO_TAG_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE
)
set(CLIO_VERSION "${CLIO_TAG_VERSION}")
set(DOC_CLIO_VERSION "${CLIO_TAG_VERSION}")
endif () endif ()
if (CMAKE_BUILD_TYPE MATCHES Debug) if (CMAKE_BUILD_TYPE MATCHES Debug)

View File

@@ -1,22 +1,21 @@
set(COMPILER_FLAGS set(COMPILER_FLAGS
-pedantic
-Wall -Wall
-Wcast-align -Wcast-align
-Wdouble-promotion -Wdouble-promotion
-Werror
-Wextra -Wextra
-Werror
-Wformat=2 -Wformat=2
-Wimplicit-fallthrough -Wimplicit-fallthrough
-Wmisleading-indentation -Wmisleading-indentation
-Wno-dangling-else
-Wno-deprecated-declarations
-Wno-narrowing -Wno-narrowing
-Wno-deprecated-declarations
-Wno-dangling-else
-Wno-unused-but-set-variable -Wno-unused-but-set-variable
-Wnon-virtual-dtor -Wnon-virtual-dtor
-Wnull-dereference -Wnull-dereference
-Wold-style-cast -Wold-style-cast
-pedantic
-Wpedantic -Wpedantic
-Wunreachable-code
-Wunused -Wunused
# FIXME: The following bunch are needed for gcc12 atm. # FIXME: The following bunch are needed for gcc12 atm.
-Wno-missing-requires -Wno-missing-requires
@@ -27,7 +26,7 @@ set(COMPILER_FLAGS
# TODO: Address these and others in https://github.com/XRPLF/clio/issues/1273 # TODO: Address these and others in https://github.com/XRPLF/clio/issues/1273
) )
# TODO: re-enable when we change CI #884 if (is_gcc AND NOT lint) list(APPEND COMPILER_FLAGS -Wduplicated-branches # TODO: reenable when we change CI #884 if (is_gcc AND NOT lint) list(APPEND COMPILER_FLAGS -Wduplicated-branches
# -Wduplicated-cond -Wlogical-op -Wuseless-cast ) endif () # -Wduplicated-cond -Wlogical-op -Wuseless-cast ) endif ()
if (is_clang) if (is_clang)

View File

@@ -1,57 +0,0 @@
{
"version": "0.5",
"requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1752006674.465",
"xxhash/0.8.2#7856c968c985b2981b707ee8f2413b2b%1752006674.334",
"xrpl/2.5.0#7880d1696f11fceb1d498570f1a184c8%1752006708.218",
"sqlite3/3.47.0#7a0904fd061f5f8a2366c294f9387830%1752006674.338",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1752006674.465",
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1752006674.077",
"rapidjson/cci.20220822#1b9d8c2256876a154172dc5cfbe447c6%1752006673.227",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1752006673.172",
"openssl/1.1.1v#216374e4fb5b2e0f5ab1fb6f27b5b434%1752006673.069",
"nudb/2.0.8#63990d3e517038e04bf529eb8167f69f%1752006673.862",
"minizip/1.2.13#9e87d57804bd372d6d1e32b1871517a3%1752006672.983",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1752006672.825",
"libuv/1.46.0#78565d142ac7102776256328a26cdf60%1752006672.827",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1752006672.826",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1752006672.822",
"libarchive/3.7.6#e0453864b2a4d225f06b3304903cb2b7%1752006672.917",
"http_parser/2.9.4#98d91690d6fd021e9e624218a85d9d97%1752006672.658",
"gtest/1.14.0#f8f0757a574a8dd747d16af62d6eb1b7%1752006671.555",
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1752006671.777",
"fmt/11.2.0#579bb2cdf4a7607621beea4eb4651e0f%1752006671.557",
"date/3.0.3#cf28fe9c0aab99fe12da08aa42df65e1%1752006671.553",
"cassandra-cpp-driver/2.17.0#e50919efac8418c26be6671fd702540a%1752006671.654",
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1752006671.554",
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1752006671.549",
"boost/1.83.0#5bcb2a14a35875e328bf312e080d3562%1752006671.557",
"benchmark/1.8.3#1a2ce62c99e2b3feaa57b1f0c15a8c46%1752006671.408",
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1752006671.555"
],
"build_requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1752006674.465",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1752006673.172",
"protobuf/3.21.9#64ce20e1d9ea24f3d6c504015d5f6fa8%1752006673.173",
"cmake/3.31.7#57c3e118bcf267552c0ea3f8bee1e7d5%1752006671.64",
"b2/5.3.2#7b5fabfe7088ae933fb3e78302343ea0%1752006671.407"
],
"python_requires": [],
"overrides": {
"boost/1.83.0": [
null,
"boost/1.83.0#5bcb2a14a35875e328bf312e080d3562"
],
"protobuf/3.21.9": [
null,
"protobuf/3.21.12"
],
"lz4/1.9.4": [
"lz4/1.10.0"
],
"sqlite3/3.44.2": [
"sqlite3/3.47.0"
]
},
"config_requires": []
}

View File

@@ -2,15 +2,16 @@ from conan import ConanFile
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
class ClioConan(ConanFile): class Clio(ConanFile):
name = 'clio' name = 'clio'
license = 'ISC' license = 'ISC'
author = 'Alex Kremer <akremer@ripple.com>, John Freeman <jfreeman@ripple.com>, Ayaz Salikhov <asalikhov@ripple.com>' author = 'Alex Kremer <akremer@ripple.com>, John Freeman <jfreeman@ripple.com>'
url = 'https://github.com/xrplf/clio' url = 'https://github.com/xrplf/clio'
description = 'Clio RPC server' description = 'Clio RPC server'
settings = 'os', 'compiler', 'build_type', 'arch' settings = 'os', 'compiler', 'build_type', 'arch'
options = { options = {
'static': [True, False], # static linkage 'static': [True, False], # static linkage
'fPIC': [True, False], # unused?
'verbose': [True, False], 'verbose': [True, False],
'tests': [True, False], # build unit tests; create `clio_tests` binary 'tests': [True, False], # build unit tests; create `clio_tests` binary
'integration_tests': [True, False], # build integration tests; create `clio_integration_tests` binary 'integration_tests': [True, False], # build integration tests; create `clio_integration_tests` binary
@@ -26,17 +27,18 @@ class ClioConan(ConanFile):
requires = [ requires = [
'boost/1.83.0', 'boost/1.83.0',
'cassandra-cpp-driver/2.17.0', 'cassandra-cpp-driver/2.17.0',
'fmt/11.2.0', 'fmt/10.1.1',
'protobuf/3.21.12', 'protobuf/3.21.9',
'grpc/1.50.1', 'grpc/1.50.1',
'openssl/1.1.1v', 'openssl/1.1.1v',
'xrpl/2.5.0', 'xrpl/2.4.0',
'zlib/1.3.1', 'zlib/1.3.1',
'libbacktrace/cci.20210118' 'libbacktrace/cci.20210118'
] ]
default_options = { default_options = {
'static': False, 'static': False,
'fPIC': True,
'verbose': False, 'verbose': False,
'tests': False, 'tests': False,
'integration_tests': False, 'integration_tests': False,
@@ -87,8 +89,17 @@ class ClioConan(ConanFile):
def generate(self): def generate(self):
tc = CMakeToolchain(self) tc = CMakeToolchain(self)
for option_name, option_value in self.options.items(): tc.variables['verbose'] = self.options.verbose
tc.variables[option_name] = option_value tc.variables['static'] = self.options.static
tc.variables['tests'] = self.options.tests
tc.variables['integration_tests'] = self.options.integration_tests
tc.variables['coverage'] = self.options.coverage
tc.variables['lint'] = self.options.lint
tc.variables['docs'] = self.options.docs
tc.variables['packaging'] = self.options.packaging
tc.variables['benchmark'] = self.options.benchmark
tc.variables['snapshot'] = self.options.snapshot
tc.variables['time_trace'] = self.options.time_trace
tc.generate() tc.generate()
def build(self): def build(self):

View File

@@ -1,117 +1,113 @@
ARG GHCR_REPO=invalid FROM rippleci/clio_clang:16
ARG CLANG_MAJOR_VERSION=invalid
ARG GCC_VERSION=invalid
FROM ${GHCR_REPO}/clio-gcc:${GCC_VERSION} AS clio-gcc
FROM ${GHCR_REPO}/clio-tools:latest AS clio-tools
FROM ${GHCR_REPO}/clio-clang:${CLANG_MAJOR_VERSION}
ARG DEBIAN_FRONTEND=noninteractive ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
SHELL ["/bin/bash", "-o", "pipefail", "-c"] SHELL ["/bin/bash", "-c"]
USER root
WORKDIR /root
ENV CCACHE_VERSION=4.10.2 \
LLVM_TOOLS_VERSION=19 \
GH_VERSION=2.40.0 \
DOXYGEN_VERSION=1.12.0 \
CLANG_BUILD_ANALYZER_VERSION=1.6.0
# Add repositories
RUN apt-get -qq update \
&& apt-get -qq install -y --no-install-recommends --no-install-suggests gnupg wget curl software-properties-common \
&& echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
&& wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
# Install packages
RUN apt update -qq \
&& apt install -y --no-install-recommends --no-install-suggests python3 python3-pip git git-lfs make ninja-build flex bison jq graphviz \
clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
&& pip3 install -q --upgrade --no-cache-dir pip && pip3 install -q --no-cache-dir conan==1.62 gcovr cmake==3.31.6 pre-commit \
&& apt-get clean && apt remove -y software-properties-common
# Install gcc-12 and make ldconfig aware of the new libstdc++ location (for gcc)
# Note: Clang is using libc++ instead
COPY --from=rippleci/clio_gcc:12.3.0 /gcc12.deb /
RUN apt update && apt-get install -y binutils libc6-dev \
&& dpkg -i /gcc12.deb \
&& rm -rf /gcc12.deb \
&& ldconfig
# Rewire to use gcc-12 as default compiler
RUN update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 100 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-12 100 \
&& update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 100 \
&& update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-12 100 \
&& update-alternatives --install /usr/bin/gcov gcov /usr/bin/gcov-12 100 \
&& update-alternatives --install /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-12 100 \
&& update-alternatives --install /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-12 100
WORKDIR /tmp
# Install ccache from source
RUN wget "https://github.com/ccache/ccache/releases/download/v${CCACHE_VERSION}/ccache-${CCACHE_VERSION}.tar.gz" \
&& tar xf "ccache-${CCACHE_VERSION}.tar.gz" \
&& cd "ccache-${CCACHE_VERSION}" \
&& mkdir build && cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
&& cmake --build . --target install \
&& rm -rf /tmp/* /var/tmp/*
# Install doxygen from source
RUN wget "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& cd "doxygen-${DOXYGEN_VERSION}" \
&& mkdir build && cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
&& cmake --build . --target install \
&& rm -rf /tmp/* /var/tmp/*
# Install ClangBuildAnalyzer
RUN wget "https://github.com/aras-p/ClangBuildAnalyzer/releases/download/v${CLANG_BUILD_ANALYZER_VERSION}/ClangBuildAnalyzer-linux" \
&& chmod +x ClangBuildAnalyzer-linux \
&& mv ClangBuildAnalyzer-linux /usr/bin/ClangBuildAnalyzer \
&& rm -rf /tmp/* /var/tmp/*
# Install gh
RUN wget "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/bin/gh \
&& rm -rf /tmp/* /var/tmp/*
WORKDIR /root
# Using root by default is not very secure but github checkout action doesn't work with any other user # Using root by default is not very secure but github checkout action doesn't work with any other user
# https://github.com/actions/checkout/issues/956 # https://github.com/actions/checkout/issues/956
# And Github Actions doc recommends using root # And Github Actions doc recommends using root
# https://docs.github.com/en/actions/sharing-automations/creating-actions/dockerfile-support-for-github-actions#user # https://docs.github.com/en/actions/sharing-automations/creating-actions/dockerfile-support-for-github-actions#user
# hadolint ignore=DL3002
USER root
WORKDIR /root
ARG LLVM_TOOLS_VERSION=20
# Add repositories
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
curl \
gnupg \
wget \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
&& wget --progress=dot:giga -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
# Install packages
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
clang-tidy-${LLVM_TOOLS_VERSION} \
clang-tools-${LLVM_TOOLS_VERSION} \
git \
git-lfs \
graphviz \
jq \
make \
ninja-build \
python3 \
python3-pip \
zip \
&& pip3 install -q --upgrade --no-cache-dir pip \
&& pip3 install -q --no-cache-dir \
# TODO: Remove this once we switch to newer Ubuntu base image
# lxml 6.0.0 is not compatible with our image
'lxml<6.0.0' \
\
cmake==3.31.6 \
conan==2.17.0 \
gcovr \
pre-commit \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG GCC_MAJOR_VERSION=invalid
# Install custom-built gcc and make ldconfig aware of the new libstdc++ location (for gcc)
# Note: Clang is using libc++ instead
COPY --from=clio-gcc /gcc${GCC_MAJOR_VERSION}.deb /
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
binutils \
libc6-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& dpkg -i /gcc${GCC_MAJOR_VERSION}.deb \
&& rm -rf /gcc${GCC_MAJOR_VERSION}.deb \
&& ldconfig
# Rewire to use our custom-built gcc as default compiler
RUN update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov gcov /usr/bin/gcov-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-${GCC_MAJOR_VERSION} 100
COPY --from=clio-tools \
/usr/local/bin/mold \
/usr/local/bin/ld.mold \
/usr/local/bin/ccache \
/usr/local/bin/doxygen \
/usr/local/bin/ClangBuildAnalyzer \
/usr/local/bin/git-cliff \
/usr/local/bin/gh \
/usr/local/bin/
WORKDIR /root
# Setup conan # Setup conan
RUN conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev RUN conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
WORKDIR /root/.conan2 # Note: intentionally leaving cppstd=20
COPY conan/global.conf ./global.conf RUN conan profile new gcc --detect \
&& conan profile update settings.compiler=gcc gcc \
&& conan profile update settings.compiler.version=12 gcc \
&& conan profile update settings.compiler.cppstd=20 gcc \
&& conan profile update settings.compiler.libcxx=libstdc++11 gcc \
&& conan profile update env.CC=/usr/bin/gcc-12 gcc \
&& conan profile update env.CXX=/usr/bin/g++-12 gcc \
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/gcc-12\", \"cpp\": \"/usr/bin/g++-12\"}" gcc
WORKDIR /root/.conan2/profiles RUN conan profile new clang --detect \
&& conan profile update settings.compiler=clang clang \
&& conan profile update settings.compiler.version=16 clang \
&& conan profile update settings.compiler.cppstd=20 clang \
&& conan profile update settings.compiler.libcxx=libc++ clang \
&& conan profile update env.CC=/usr/bin/clang-16 clang \
&& conan profile update env.CXX=/usr/bin/clang++-16 clang \
&& conan profile update env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS" clang \
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/clang-16\", \"cpp\": \"/usr/bin/clang++-16\"}" clang
COPY conan/clang.profile ./clang RUN echo "include(gcc)" >> .conan/profiles/default
COPY conan/sanitizer_template.profile ./clang.asan
COPY conan/sanitizer_template.profile ./clang.tsan
COPY conan/sanitizer_template.profile ./clang.ubsan
COPY conan/gcc.profile ./gcc COPY conan/gcc.asan /root/.conan/profiles
COPY conan/sanitizer_template.profile ./gcc.asan COPY conan/gcc.tsan /root/.conan/profiles
COPY conan/sanitizer_template.profile ./gcc.tsan COPY conan/gcc.ubsan /root/.conan/profiles
COPY conan/sanitizer_template.profile ./gcc.ubsan COPY conan/clang.asan /root/.conan/profiles
COPY conan/clang.tsan /root/.conan/profiles
WORKDIR /root COPY conan/clang.ubsan /root/.conan/profiles

View File

@@ -5,18 +5,13 @@ It is used in [Clio Github Actions](https://github.com/XRPLF/clio/actions) but c
The image is based on Ubuntu 20.04 and contains: The image is based on Ubuntu 20.04 and contains:
- ccache 4.11.3 - clang 16.0.6
- Clang 19 - gcc 12.3
- ClangBuildAnalyzer 1.6.0 - doxygen 1.12
- Conan 2.17.0 - gh 2.40
- Doxygen 1.12 - ccache 4.10.2
- GCC 14.3.0 - conan 1.62
- gh 2.74
- git-cliff 2.9.1
- mold 2.40.1
- and some other useful tools - and some other useful tools
Conan is set up to build Clio without any additional steps. Conan is set up to build Clio without any additional steps. There are two preset conan profiles: `clang` and `gcc` to use corresponding compiler. By default conan is setup to use `gcc`.
There are two preset conan profiles: `clang` and `gcc` to use corresponding compiler. Sanitizer builds for `ASAN`, `TSAN` and `UBSAN` are enabled via conan profiles for each of the supported compilers. These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile gcc.tsan`).
`ASan`, `TSan` and `UBSan` sanitizer builds are enabled via conan profiles for each of the supported compilers.
These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile:all gcc.tsan`).

View File

@@ -0,0 +1,9 @@
include(clang)
[options]
boost:extra_b2_flags="cxxflags=\"-fsanitize=address\" linkflags=\"-fsanitize=address\""
boost:without_stacktrace=True
[env]
CFLAGS="-fsanitize=address"
CXXFLAGS="-fsanitize=address"
LDFLAGS="-fsanitize=address"

View File

@@ -1,12 +0,0 @@
[settings]
arch={{detect_api.detect_arch()}}
build_type=Release
compiler=clang
compiler.cppstd=20
compiler.libcxx=libc++
compiler.version=19
os=Linux
[conf]
tools.build:compiler_executables={"c": "/usr/bin/clang-19", "cpp": "/usr/bin/clang++-19"}
grpc/1.50.1:tools.build:cxxflags+=["-Wno-missing-template-arg-list-after-template-kw"]

View File

@@ -0,0 +1,9 @@
include(clang)
[options]
boost:extra_b2_flags="cxxflags=\"-fsanitize=thread\" linkflags=\"-fsanitize=thread\""
boost:without_stacktrace=True
[env]
CFLAGS="-fsanitize=thread"
CXXFLAGS="-fsanitize=thread"
LDFLAGS="-fsanitize=thread"

View File

@@ -0,0 +1,9 @@
include(clang)
[options]
boost:extra_b2_flags="cxxflags=\"-fsanitize=undefined\" linkflags=\"-fsanitize=undefined\""
boost:without_stacktrace=True
[env]
CFLAGS="-fsanitize=undefined"
CXXFLAGS="-fsanitize=undefined"
LDFLAGS="-fsanitize=undefined"

9
docker/ci/conan/gcc.asan Normal file
View File

@@ -0,0 +1,9 @@
include(gcc)
[options]
boost:extra_b2_flags="cxxflags=\"-fsanitize=address\" linkflags=\"-fsanitize=address\""
boost:without_stacktrace=True
[env]
CFLAGS="-fsanitize=address"
CXXFLAGS="-fsanitize=address"
LDFLAGS="-fsanitize=address"

View File

@@ -1,11 +0,0 @@
[settings]
arch={{detect_api.detect_arch()}}
build_type=Release
compiler=gcc
compiler.cppstd=20
compiler.libcxx=libstdc++11
compiler.version=14
os=Linux
[conf]
tools.build:compiler_executables={"c": "/usr/bin/gcc-14", "cpp": "/usr/bin/g++-14"}

9
docker/ci/conan/gcc.tsan Normal file
View File

@@ -0,0 +1,9 @@
include(gcc)
[options]
boost:extra_b2_flags="cxxflags=\"-fsanitize=thread\" linkflags=\"-fsanitize=thread\""
boost:without_stacktrace=True
[env]
CFLAGS="-fsanitize=thread"
CXXFLAGS="-fsanitize=thread"
LDFLAGS="-fsanitize=thread"

View File

@@ -0,0 +1,9 @@
include(gcc)
[options]
boost:extra_b2_flags="cxxflags=\"-fsanitize=undefined\" linkflags=\"-fsanitize=undefined\""
boost:without_stacktrace=True
[env]
CFLAGS="-fsanitize=undefined"
CXXFLAGS="-fsanitize=undefined"
LDFLAGS="-fsanitize=undefined"

View File

@@ -1,2 +0,0 @@
core.download:parallel={{os.cpu_count()}}
core.upload:parallel={{os.cpu_count()}}

View File

@@ -1,23 +0,0 @@
{% set compiler, sani = profile_name.split('.') %}
{% set sanitizer_opt_map = {"asan": "address", "tsan": "thread", "ubsan": "undefined"} %}
{% set sanitizer = sanitizer_opt_map[sani] %}
{% set sanitizer_build_flags_str = "-fsanitize=" ~ sanitizer ~ " -g -O1 -fno-omit-frame-pointer" %}
{% set sanitizer_build_flags = sanitizer_build_flags_str.split(' ') %}
{% set sanitizer_link_flags_str = "-fsanitize=" ~ sanitizer %}
{% set sanitizer_link_flags = sanitizer_link_flags_str.split(' ') %}
include({{ compiler }})
[options]
boost/*:extra_b2_flags="cxxflags=\"{{ sanitizer_build_flags_str }}\" linkflags=\"{{ sanitizer_link_flags_str }}\""
boost/*:without_stacktrace=True
[conf]
tools.build:cflags+={{ sanitizer_build_flags }}
tools.build:cxxflags+={{ sanitizer_build_flags }}
tools.build:exelinkflags+={{ sanitizer_link_flags }}
tools.build:sharedlinkflags+={{ sanitizer_link_flags }}
tools.info.package_id:confs+=["tools.build:cflags", "tools.build:cxxflags", "tools.build:exelinkflags", "tools.build:sharedlinkflags"]

View File

@@ -2,17 +2,14 @@ FROM ubuntu:22.04
COPY ./clio_server /opt/clio/bin/clio_server COPY ./clio_server /opt/clio/bin/clio_server
RUN ln -s /opt/clio/bin/clio_server /usr/local/bin/clio_server \ RUN ln -s /opt/clio/bin/clio_server /usr/local/bin/clio_server && \
&& mkdir -p /opt/clio/etc/ \ mkdir -p /opt/clio/etc/ && \
&& mkdir -p /opt/clio/log/ \ mkdir -p /opt/clio/log/ && \
&& groupadd -g 10001 clio \ groupadd -g 10001 clio && \
&& useradd -u 10000 -g 10001 -s /bin/bash clio \ useradd -u 10000 -g 10001 -s /bin/bash clio && \
&& chown clio:clio /opt/clio/log \ chown clio:clio /opt/clio/log && \
&& apt-get update \ apt update && \
&& apt-get install -y --no-install-recommends --no-install-suggests \ apt install -y libatomic1
libatomic1 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
USER clio USER clio
ENTRYPOINT ["/opt/clio/bin/clio_server"] ENTRYPOINT ["/opt/clio/bin/clio_server"]

View File

@@ -0,0 +1,19 @@
FROM ubuntu:focal
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
SHELL ["/bin/bash", "-c"]
USER root
WORKDIR /root
ENV CLANG_VERSION=16
RUN apt update -qq \
&& apt install -qq -y --no-install-recommends --no-install-suggests \
wget software-properties-common gnupg
RUN wget https://apt.llvm.org/llvm.sh \
&& chmod +x llvm.sh \
&& ./llvm.sh ${CLANG_VERSION} \
&& rm -rf llvm.sh \
&& apt-get install -y libc++-16-dev libc++abi-16-dev

View File

@@ -1,32 +0,0 @@
FROM ubuntu:20.04
ARG DEBIAN_FRONTEND=noninteractive
SHELL ["/bin/bash", "-c"]
# hadolint ignore=DL3002
USER root
WORKDIR /root
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
wget \
software-properties-common \
gnupg \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG CLANG_MAJOR_VERSION=invalid
# Bump this version to force rebuild of the image
ARG BUILD_VERSION=0
RUN wget --progress=dot:giga https://apt.llvm.org/llvm.sh \
&& chmod +x llvm.sh \
&& ./llvm.sh ${CLANG_MAJOR_VERSION} \
&& rm -rf llvm.sh \
&& apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
libc++-${CLANG_MAJOR_VERSION}-dev \
libc++abi-${CLANG_MAJOR_VERSION}-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -1,3 +0,0 @@
# Clang compiler
This image contains clang compiler to build <https://github.com/XRPLF/clio>.

View File

@@ -0,0 +1,74 @@
FROM ubuntu:focal as build
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
ARG UBUNTU_VERSION=20.04
ARG GCC_VERSION=12.3.0
ARG BUILD_VERSION=1
RUN apt update && apt install -y wget build-essential file flex libz-dev libzstd-dev
RUN wget https://gcc.gnu.org/pub/gcc/releases/gcc-$GCC_VERSION/gcc-$GCC_VERSION.tar.gz \
&& tar xf gcc-$GCC_VERSION.tar.gz \
&& cd /gcc-$GCC_VERSION && ./contrib/download_prerequisites
RUN mkdir /${TARGETARCH}-gcc-12
WORKDIR /${TARGETARCH}-gcc-12
RUN /gcc-$GCC_VERSION/configure \
--with-pkgversion="clio-build-$BUILD_VERSION https://github.com/XRPLF/clio" \
--enable-languages=c,c++ \
--prefix=/usr \
--with-gcc-major-version-only \
--program-suffix=-12 \
--enable-shared \
--enable-linker-build-id \
--libexecdir=/usr/lib \
--without-included-gettext \
--enable-threads=posix \
--libdir=/usr/lib \
--disable-nls \
--enable-clocale=gnu \
--enable-libstdcxx-backtrace=yes \
--enable-libstdcxx-debug \
--enable-libstdcxx-time=yes \
--with-default-libstdcxx-abi=new \
--enable-gnu-unique-object \
--disable-vtable-verify \
--enable-plugin \
--enable-default-pie \
--with-system-zlib \
--enable-libphobos-checking=release \
--with-target-system-zlib=auto \
--disable-werror \
--enable-cet \
--disable-multilib \
--without-cuda-driver \
--enable-checking=release \
&& make -j`nproc` \
&& make install-strip DESTDIR=/gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION \
&& mkdir -p /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/share/gdb/auto-load/usr/lib64 \
&& mv /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/lib64/libstdc++.so.6.0.30-gdb.py /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/share/gdb/auto-load/usr/lib64/libstdc++.so.6.0.30-gdb.py
# Generate deb
WORKDIR /
COPY control.m4 /
COPY ld.so.conf /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/etc/ld.so.conf.d/1-gcc-12.conf
RUN mkdir /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/DEBIAN \
&& m4 -P -DUBUNTU_VERSION=$UBUNTU_VERSION -DVERSION=$GCC_VERSION-$BUILD_VERSION -DTARGETARCH=$TARGETARCH control.m4 > /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/DEBIAN/control \
&& dpkg-deb --build --root-owner-group /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION /gcc12.deb
# Create final image
FROM ubuntu:focal as gcc
COPY --from=build /gcc12.deb /
# Make gcc-12 available but also leave gcc12.deb for others to copy if needed
RUN apt update && apt-get install -y binutils libc6-dev \
&& dpkg -i /gcc12.deb
RUN update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 100 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-12 100 \
&& update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 100 \
&& update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-12 100 \
&& update-alternatives --install /usr/bin/gcov gcov /usr/bin/gcov-12 100 \
&& update-alternatives --install /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-12 100 \
&& update-alternatives --install /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-12 100

View File

@@ -0,0 +1,6 @@
Package: gcc-12-ubuntu-UBUNTUVERSION
Version: VERSION
Architecture: TARGETARCH
Maintainer: Alex Kremer <akremer@ripple.com>
Description: Gcc VERSION build for ubuntu UBUNTUVERSION
Depends: binutils, libc6-dev

View File

@@ -1,119 +0,0 @@
ARG UBUNTU_VERSION=20.04
ARG GCC_MAJOR_VERSION=invalid
FROM ubuntu:$UBUNTU_VERSION AS build
ARG UBUNTU_VERSION
ARG GCC_MAJOR_VERSION
ARG BUILD_VERSION=0
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
build-essential \
file \
flex \
libz-dev \
libzstd-dev \
software-properties-common \
wget \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG GCC_VERSION
WORKDIR /
RUN wget --progress=dot:giga https://gcc.gnu.org/pub/gcc/releases/gcc-$GCC_VERSION/gcc-$GCC_VERSION.tar.gz \
&& tar xf gcc-$GCC_VERSION.tar.gz
WORKDIR /gcc-$GCC_VERSION
RUN ./contrib/download_prerequisites
RUN mkdir /gcc-build
WORKDIR /gcc-build
RUN /gcc-$GCC_VERSION/configure \
--with-pkgversion="clio-build-$BUILD_VERSION https://github.com/XRPLF/clio" \
--enable-languages=c,c++ \
--prefix=/usr \
--with-gcc-major-version-only \
--program-suffix=-${GCC_MAJOR_VERSION} \
--enable-shared \
--enable-linker-build-id \
--libexecdir=/usr/lib \
--without-included-gettext \
--enable-threads=posix \
--libdir=/usr/lib \
--disable-nls \
--enable-clocale=gnu \
--enable-libstdcxx-backtrace=yes \
--enable-libstdcxx-debug \
--enable-libstdcxx-time=yes \
--with-default-libstdcxx-abi=new \
--enable-gnu-unique-object \
--disable-vtable-verify \
--enable-plugin \
--enable-default-pie \
--with-system-zlib \
--enable-libphobos-checking=release \
--with-target-system-zlib=auto \
--disable-werror \
--enable-cet \
--disable-multilib \
--without-cuda-driver \
--enable-checking=release
RUN make -j "$(nproc)"
RUN make install-strip DESTDIR=/gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION
RUN export GDB_AUTOLOAD_DIR="/gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/share/gdb/auto-load/usr/lib64" \
&& mkdir -p "$GDB_AUTOLOAD_DIR" \
&& mv \
/gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/lib64/libstdc++.so.*-gdb.py \
$GDB_AUTOLOAD_DIR/
# Generate deb
WORKDIR /
COPY control.m4 /
COPY ld.so.conf /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/etc/ld.so.conf.d/1-gcc-${GCC_MAJOR_VERSION}.conf
RUN mkdir /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/DEBIAN \
&& m4 \
-P \
-DUBUNTU_VERSION=$UBUNTU_VERSION \
-DVERSION=$GCC_VERSION-$BUILD_VERSION \
-DTARGETARCH=$TARGETARCH \
control.m4 > /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/DEBIAN/control \
&& dpkg-deb \
--build \
--root-owner-group \
/gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION \
/gcc${GCC_MAJOR_VERSION}.deb
# Create final image
FROM ubuntu:$UBUNTU_VERSION
ARG GCC_MAJOR_VERSION
COPY --from=build /gcc${GCC_MAJOR_VERSION}.deb /
# Install gcc-${GCC_MAJOR_VERSION}, but also leave gcc${GCC_MAJOR_VERSION}.deb for others to copy if needed
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
binutils \
libc6-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& dpkg -i /gcc${GCC_MAJOR_VERSION}.deb
RUN update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov gcov /usr/bin/gcov-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-${GCC_MAJOR_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-${GCC_MAJOR_VERSION} 100

View File

@@ -1,3 +0,0 @@
# GCC compiler
This image contains GCC compiler to build <https://github.com/XRPLF/clio>.

View File

@@ -1,7 +0,0 @@
Package: gcc-14-ubuntu-UBUNTUVERSION
Version: VERSION
Architecture: TARGETARCH
Maintainer: Alex Kremer <akremer@ripple.com>
Uploaders: Ayaz Salikhov <asalikhov@ripple.com>
Description: GCC VERSION build for ubuntu UBUNTUVERSION
Depends: binutils, libc6-dev

View File

@@ -2,7 +2,7 @@ services:
clio_develop: clio_develop:
image: ghcr.io/xrplf/clio-ci:latest image: ghcr.io/xrplf/clio-ci:latest
volumes: volumes:
- clio_develop_conan_data:/root/.conan2/p - clio_develop_conan_data:/root/.conan/data
- clio_develop_ccache:/root/.ccache - clio_develop_ccache:/root/.ccache
- ../../:/root/clio - ../../:/root/clio
- clio_develop_build:/root/clio/build_docker - clio_develop_build:/root/clio/build_docker

View File

@@ -1,81 +0,0 @@
ARG GHCR_REPO=invalid
ARG GCC_VERSION=invalid
FROM ${GHCR_REPO}/clio-gcc:${GCC_VERSION}
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ARG BUILD_VERSION=1
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
bison \
flex \
ninja-build \
python3 \
python3-pip \
software-properties-common \
wget \
&& pip3 install -q --no-cache-dir \
cmake==3.31.6 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /tmp
ARG MOLD_VERSION=2.40.1
RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v${MOLD_VERSION}.tar.gz" \
&& tar xf "v${MOLD_VERSION}.tar.gz" \
&& cd "mold-${MOLD_VERSION}" \
&& mkdir build \
&& cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG CCACHE_VERSION=4.11.3
RUN wget --progress=dot:giga "https://github.com/ccache/ccache/releases/download/v${CCACHE_VERSION}/ccache-${CCACHE_VERSION}.tar.gz" \
&& tar xf "ccache-${CCACHE_VERSION}.tar.gz" \
&& cd "ccache-${CCACHE_VERSION}" \
&& mkdir build \
&& cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_TESTING=False .. \
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG DOXYGEN_VERSION=1.12.0
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& cd "doxygen-${DOXYGEN_VERSION}" \
&& mkdir build \
&& cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG CLANG_BUILD_ANALYZER_VERSION=1.6.0
RUN wget --progress=dot:giga "https://github.com/aras-p/ClangBuildAnalyzer/archive/refs/tags/v${CLANG_BUILD_ANALYZER_VERSION}.tar.gz" \
&& tar xf "v${CLANG_BUILD_ANALYZER_VERSION}.tar.gz" \
&& cd "ClangBuildAnalyzer-${CLANG_BUILD_ANALYZER_VERSION}" \
&& mkdir build \
&& cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG GIT_CLIFF_VERSION=2.9.1
RUN wget --progress=dot:giga "https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz" \
&& tar xf git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz \
&& mv git-cliff-${GIT_CLIFF_VERSION}/git-cliff /usr/local/bin/git-cliff \
&& rm -rf /tmp/* /var/tmp/*
ARG GH_VERSION=2.74.0
RUN wget --progress=dot:giga "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \
&& rm -rf /tmp/* /var/tmp/*
WORKDIR /root

View File

@@ -22,7 +22,6 @@ WARNINGS = ${LINT}
WARN_NO_PARAMDOC = ${LINT} WARN_NO_PARAMDOC = ${LINT}
WARN_IF_INCOMPLETE_DOC = ${LINT} WARN_IF_INCOMPLETE_DOC = ${LINT}
WARN_IF_UNDOCUMENTED = ${LINT} WARN_IF_UNDOCUMENTED = ${LINT}
WARN_AS_ERROR = ${WARN_AS_ERROR}
GENERATE_LATEX = NO GENERATE_LATEX = NO
GENERATE_HTML = YES GENERATE_HTML = YES

View File

@@ -1,13 +1,12 @@
# How to build Clio # How to build Clio
`Clio` is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io/) for managing dependencies. Clio is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io/) for managing dependencies. It is written in C++20 and therefore requires a modern compiler.
`Clio` is written in C++23 and therefore requires a modern compiler.
## Minimum Requirements ## Minimum Requirements
- [Python 3.7](https://www.python.org/downloads/) - [Python 3.7](https://www.python.org/downloads/)
- [Conan 2.17.0](https://conan.io/downloads.html) - [Conan 1.55](https://conan.io/downloads.html)
- [CMake 3.20, <4.0](https://cmake.org/download/) - [CMake 3.20](https://cmake.org/download/)
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation - [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often - [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
@@ -19,64 +18,41 @@
### Conan Configuration ### Conan Configuration
By default, Conan uses `~/.conan2` as it's home folder. Clio does not require anything other than `compiler.cppstd=20` in your (`~/.conan/profiles/default`) Conan profile.
You can change it by using `$CONAN_HOME` env variable.
[More info about Conan home](https://docs.conan.io/2/reference/environment.html#conan-home).
> [!TIP]
> To setup Conan automatically, you can run `.github/scripts/conan/init.sh`.
> This will delete Conan home directory (if it exists), set up profiles and add Artifactory remote.
The instruction below assumes that `$CONAN_HOME` is not set.
#### Profiles
The default profile is the file in `~/.conan2/profiles/default`.
Here are some examples of possible profiles:
**Mac apple-clang 17 example**:
```text
[settings]
arch={{detect_api.detect_arch()}}
build_type=Release
compiler=apple-clang
compiler.cppstd=20
compiler.libcxx=libc++
compiler.version=17
os=Macos
[conf]
grpc/1.50.1:tools.build:cxxflags+=["-Wno-missing-template-arg-list-after-template-kw"]
```
**Linux gcc-12 example**:
```text
[settings]
arch={{detect_api.detect_arch()}}
build_type=Release
compiler=gcc
compiler.cppstd=20
compiler.libcxx=libstdc++11
compiler.version=12
os=Linux
[conf]
tools.build:compiler_executables={"c": "/usr/bin/gcc-12", "cpp": "/usr/bin/g++-12"}
```
> [!NOTE] > [!NOTE]
> Although Clio is built using C++23, it's required to set `compiler.cppstd=20` in your profile for the time being as some of Clio's dependencies are not yet capable of building under C++23. > Although Clio is built using C++23, it's required to set `compiler.cppstd=20` for the time being as some of Clio's dependencies are not yet capable of building under C++23.
#### global.conf file **Mac example**:
To increase the speed of downloading and uploading packages, add the following to the `~/.conan2/global.conf` file:
```text ```text
core.download:parallel={{os.cpu_count()}} [settings]
core.upload:parallel={{os.cpu_count()}} os=Macos
os_build=Macos
arch=armv8
arch_build=armv8
compiler=apple-clang
compiler.version=15
compiler.libcxx=libc++
build_type=Release
compiler.cppstd=20
[conf]
tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]
```
**Linux example**:
```text
[settings]
os=Linux
os_build=Linux
arch=x86_64
arch_build=x86_64
compiler=gcc
compiler.version=12
compiler.libcxx=libstdc++11
build_type=Release
compiler.cppstd=20
``` ```
#### Artifactory #### Artifactory
@@ -84,24 +60,18 @@ core.upload:parallel={{os.cpu_count()}}
Make sure artifactory is setup with Conan. Make sure artifactory is setup with Conan.
```sh ```sh
conan remote add --index 0 ripple http://18.143.149.228:8081/artifactory/api/conan/dev conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
``` ```
Now you should be able to download the prebuilt dependencies (including `xrpl` package) on supported platforms. Now you should be able to download the prebuilt `xrpl` package on some platforms.
#### Conan lockfile > [!NOTE]
> You may need to edit the `~/.conan/remotes.json` file to ensure that this newly added artifactory is listed last. Otherwise, you could see compilation errors when building the project with gcc version 13 (or newer).
To achieve reproducible dependencies, we use [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html). Remove old packages you may have cached.
The `conan.lock` file in the repository contains a "snapshot" of the current dependencies. ```sh
It is implicitly used when running `conan` commands, you don't need to specify it. conan remove -f xrpl
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
To do that, run the following command in the repository root:
```bash
conan lock create . -o '&:tests=True' -o '&:benchmark=True'
``` ```
## Building Clio ## Building Clio
@@ -110,22 +80,18 @@ Navigate to Clio's root directory and run:
```sh ```sh
mkdir build && cd build mkdir build && cd build
# You can also specify profile explicitly by adding `--profile:all <PROFILE_NAME>` conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True'
# You can also add -GNinja to use Ninja build system instead of Make
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release .. cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous cmake --build . --parallel 8 # or without the number if you feel extra adventurous
``` ```
> [!TIP] > [!TIP]
> You can omit the `-o '&:tests=True'` if you don't want to build `clio_tests`. > You can omit the `-o tests=True` if you don't want to build `clio_tests`.
If successful, `conan install` will find the required packages and `cmake` will do the rest. You should see `clio_server` and `clio_tests` in the `build` directory (the current directory). If successful, `conan install` will find the required packages and `cmake` will do the rest. You should see `clio_server` and `clio_tests` in the `build` directory (the current directory).
> [!TIP] > [!TIP]
> To generate a Code Coverage report, include `-o '&:coverage=True'` in the `conan install` command above, along with `-o '&:tests=True'` to enable tests. > To generate a Code Coverage report, include `-o coverage=True` in the `conan install` command above, along with `-o tests=True` to enable tests. After running the `cmake` commands, execute `make clio_tests-ccov`. The coverage report will be found at `clio_tests-llvm-cov/index.html`.
> After running the `cmake` commands, execute `make clio_tests-ccov`.
> The coverage report will be found at `clio_tests-llvm-cov/index.html`.
<!-- markdownlint-disable-line MD028 --> <!-- markdownlint-disable-line MD028 -->
@@ -134,15 +100,15 @@ If successful, `conan install` will find the required packages and `cmake` will
### Generating API docs for Clio ### Generating API docs for Clio
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.12.0 on your system. The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen on your system.
To generate the API docs: To generate the API docs:
1. First, include `-o '&:docs=True'` in the conan install command. For example: 1. First, include `-o docs=True` in the conan install command. For example:
```sh ```sh
mkdir build && cd build mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True' -o '&:docs=True' conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False -o docs=True
``` ```
2. Once that has completed successfully, run the `cmake` command and add the `--target docs` option: 2. Once that has completed successfully, run the `cmake` command and add the `--target docs` option:
@@ -166,7 +132,7 @@ It is also possible to build Clio using [Docker](https://www.docker.com/) if you
docker run -it ghcr.io/xrplf/clio-ci:latest docker run -it ghcr.io/xrplf/clio-ci:latest
git clone https://github.com/XRPLF/clio git clone https://github.com/XRPLF/clio
mkdir build && cd build mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True' conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release .. cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous cmake --build . --parallel 8 # or without the number if you feel extra adventurous
``` ```
@@ -182,36 +148,34 @@ If you wish to develop against a `rippled` instance running in standalone mode t
Sometimes, during development, you need to build against a custom version of `libxrpl`. (For example, you may be developing compatibility for a proposed amendment that is not yet merged to the main `rippled` codebase.) To build Clio with compatibility for a custom fork or branch of `rippled`, follow these steps: Sometimes, during development, you need to build against a custom version of `libxrpl`. (For example, you may be developing compatibility for a proposed amendment that is not yet merged to the main `rippled` codebase.) To build Clio with compatibility for a custom fork or branch of `rippled`, follow these steps:
1. First, pull/clone the appropriate `rippled` version and switch to the branch you want to build. 1. First, pull/clone the appropriate `rippled` fork and switch to the branch you want to build. For example, the following example uses an in-development build with [XLS-33d Multi-Purpose Tokens](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033d-multi-purpose-tokens):
The following example uses a `2.5.0-rc1` tag of rippled in the main branch:
```sh ```sh
git clone https://github.com/XRPLF/rippled/ git clone https://github.com/shawnxie999/rippled/
cd rippled cd rippled
git checkout 2.5.0-rc1 git switch mpt-1.1
``` ```
2. Export a custom package to your local Conan store using a user/channel: 2. Export a custom package to your local Conan store using a user/channel:
```sh ```sh
conan export . --user=my --channel=feature conan export . my/feature
``` ```
3. Patch your local Clio build to use the right package. 3. Patch your local Clio build to use the right package.
Edit `conanfile.py` in the Clio repository root. Replace the `xrpl` requirement with the custom package version from the previous step. This must also include the current version number from your `rippled` branch. For example: Edit `conanfile.py` (from the Clio repository root). Replace the `xrpl` requirement with the custom package version from the previous step. This must also include the current version number from your `rippled` branch. For example:
```py ```py
# ... (excerpt from conanfile.py) # ... (excerpt from conanfile.py)
requires = [ requires = [
'boost/1.83.0', 'boost/1.82.0',
'cassandra-cpp-driver/2.17.0', 'cassandra-cpp-driver/2.17.0',
'fmt/10.1.1', 'fmt/10.1.1',
'protobuf/3.21.9', 'protobuf/3.21.9',
'grpc/1.50.1', 'grpc/1.50.1',
'openssl/1.1.1v', 'openssl/1.1.1u',
'xrpl/2.5.0-rc1@my/feature', # Use your exported version here 'xrpl/2.3.0-b1@my/feature', # Update this line
'zlib/1.3.1',
'libbacktrace/cci.20210118' 'libbacktrace/cci.20210118'
] ]
``` ```
@@ -222,16 +186,17 @@ Sometimes, during development, you need to build against a custom version of `li
## Using `clang-tidy` for static analysis ## Using `clang-tidy` for static analysis
Clang-tidy can be run by CMake when building the project. The minimum [clang-tidy](https://clang.llvm.org/extra/clang-tidy/) version required is 19.0.
To achieve this, you just need to provide the option `-o '&:lint=True'` for the `conan install` command:
Clang-tidy can be run by Cmake when building the project. To achieve this, you just need to provide the option `-o lint=True` for the `conan install` command:
```sh ```sh
conan install .. --output-folder . --build missing --settings build_type=Release -o '&:tests=True' -o '&:lint=True' --profile:all clang conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=True
``` ```
By default CMake will try to find `clang-tidy` automatically in your system. By default Cmake will try to find `clang-tidy` automatically in your system.
To force CMake to use your desired binary, set the `CLIO_CLANG_TIDY_BIN` environment variable to the path of the `clang-tidy` binary. For example: To force Cmake to use your desired binary, set the `CLIO_CLANG_TIDY_BIN` environment variable to the path of the `clang-tidy` binary. For example:
```sh ```sh
export CLIO_CLANG_TIDY_BIN=/opt/homebrew/opt/llvm/bin/clang-tidy export CLIO_CLANG_TIDY_BIN=/opt/homebrew/opt/llvm@19/bin/clang-tidy
``` ```

View File

@@ -5,6 +5,7 @@
Clio needs access to a `rippled` server in order to work. The following configurations are required for Clio and `rippled` to communicate: Clio needs access to a `rippled` server in order to work. The following configurations are required for Clio and `rippled` to communicate:
1. In the Clio config file, provide the following: 1. In the Clio config file, provide the following:
- The IP of the `rippled` server - The IP of the `rippled` server
- The port on which `rippled` is accepting unencrypted WebSocket connections - The port on which `rippled` is accepting unencrypted WebSocket connections
@@ -12,6 +13,7 @@ Clio needs access to a `rippled` server in order to work. The following configur
- The port on which `rippled` is handling gRPC requests - The port on which `rippled` is handling gRPC requests
2. In the `rippled` config file, you need to open: 2. In the `rippled` config file, you need to open:
- A port to accept unencrypted WebSocket connections - A port to accept unencrypted WebSocket connections
- A port to handle gRPC requests, with the IP(s) of Clio specified in the `secure_gateway` entry - A port to handle gRPC requests, with the IP(s) of Clio specified in the `secure_gateway` entry

View File

@@ -4,17 +4,17 @@
> This is only an example of Grafana dashboard for Clio. It was created for demonstration purposes only and may contain errors. > This is only an example of Grafana dashboard for Clio. It was created for demonstration purposes only and may contain errors.
> Clio team would not recommend to relate on data from this dashboard or use it for monitoring your Clio instances. > Clio team would not recommend to relate on data from this dashboard or use it for monitoring your Clio instances.
This directory contains an example of docker based infrastructure to collect and visualize metrics from clio. This directory contains an example of docker based infrastructure to collect and visualise metrics from clio.
The structure of the directory: The structure of the directory:
- `compose.yaml` - `compose.yaml`
Docker Compose file with Prometheus and Grafana set up. Docker-compose file with Prometheus and Grafana set up.
- `prometheus.yaml` - `prometheus.yaml`
Defines metrics collection from Clio and Prometheus itself. Defines metrics collection from Clio and Prometheus itself.
Demonstrates how to setup Clio target and Clio's admin authorization in Prometheus. Demonstrates how to setup Clio target and Clio's admin authorisation in Prometheus.
- `grafana/clio_dashboard.json` - `grafana/clio_dashboard.json`
Json file containing pre-configured dashboard in Grafana format. Json file containing preconfigured dashboard in Grafana format.
- `grafana/dashboard_local.yaml` - `grafana/dashboard_local.yaml`
Grafana configuration file defining the directory to search for dashboards json files. Grafana configuration file defining the directory to search for dashboards json files.
- `grafana/datasources.yaml` - `grafana/datasources.yaml`
@@ -22,9 +22,9 @@ The structure of the directory:
## How to try ## How to try
1. Make sure you have Docker (with `Docker Compose`) installed. 1. Make sure you have `docker` and `docker-compose` installed.
2. Run `docker compose up -d` from this directory. It will start docker containers with Prometheus and Grafana. 2. Run `docker-compose up -d` from this directory. It will start docker containers with Prometheus and Grafana.
3. Open [http://localhost:3000/dashboards](http://localhost:3000/dashboards). Grafana login `admin`, password `grafana`. 3. Open [http://localhost:3000/dashboards](http://localhost:3000/dashboards). Grafana login `admin`, password `grafana`.
There will be pre-configured Clio dashboard. There will be preconfigured Clio dashboard.
If Clio is not running yet launch Clio to see metrics. Some of the metrics may appear only after requests to Clio. If Clio is not running yet launch Clio to see metrics. Some of the metrics may appear only after requests to Clio.

View File

@@ -4,10 +4,10 @@
- Access to a Cassandra cluster or ScyllaDB cluster. Can be local or remote. - Access to a Cassandra cluster or ScyllaDB cluster. Can be local or remote.
> [!IMPORTANT] > [!IMPORTANT]
> There are some key considerations when using **ScyllaDB**. By default, Scylla reserves all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument. > There are some key considerations when using **ScyllaDB**. By default, Scylla reserves all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument.
> >
> See [ScyllaDB in a Shared Environment](https://docs.scylladb.com/getting-started/scylla-in-a-shared-environment/) to learn more. > See [ScyllaDB in a Shared Environment](https://docs.scylladb.com/getting-started/scylla-in-a-shared-environment/) to learn more.
- Access to one or more `rippled` nodes. Can be local or remote. - Access to one or more `rippled` nodes. Can be local or remote.
@@ -92,4 +92,4 @@ To completely disable Prometheus metrics add `"prometheus": { "enabled": false }
It is important to know that Clio responds to Prometheus request only if they are admin requests. If you are using the admin password feature, the same password should be provided in the Authorization header of Prometheus requests. It is important to know that Clio responds to Prometheus request only if they are admin requests. If you are using the admin password feature, the same password should be provided in the Authorization header of Prometheus requests.
You can find an example Docker Compose file, with Prometheus and Grafana configs, in [examples/infrastructure](../docs/examples/infrastructure/). You can find an example docker-compose file, with Prometheus and Grafana configs, in [examples/infrastructure](../docs/examples/infrastructure/).

View File

@@ -61,7 +61,6 @@ pushd ${DOCDIR} > /dev/null 2>&1
cat ${ROOT}/docs/Doxyfile | \ cat ${ROOT}/docs/Doxyfile | \
sed \ sed \
-e "s/\${LINT}/YES/" \ -e "s/\${LINT}/YES/" \
-e "s/\${WARN_AS_ERROR}/NO/" \
-e "s!\${SOURCE}!${ROOT}!" \ -e "s!\${SOURCE}!${ROOT}!" \
-e "s/\${USE_DOT}/NO/" \ -e "s/\${USE_DOT}/NO/" \
-e "s/\${EXCLUDES}/impl/" \ -e "s/\${EXCLUDES}/impl/" \

View File

@@ -1,9 +0,0 @@
ser
onWs
datas
AtLeast
AtMost
compiletime
tring
trings
strat

View File

@@ -2,43 +2,31 @@
# Note: This script is intended to be run from the root of the repository. # Note: This script is intended to be run from the root of the repository.
# #
# This script will fix local includes in the C++ code for a given file. # This script checks will fix local includes in the C++ code.
# Usage: ./pre-commit-hooks/fix-local-includes.sh <file1> <file2> ...
files="$@" # paths to fix include statements
echo "+ Fixing includes in $files..." sources="src tests"
echo "+ Fixing local includes..."
function grep_code {
grep -l "${1}" ${sources} -r --include \*.hpp --include \*.cpp
}
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false) GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
if [[ "$GNU_SED" == "false" ]]; then # macOS sed if [[ "$GNU_SED" == "false" ]]; then # macOS sed
# make all includes to be <...> style
grep_code '#include ".*"' | xargs sed -i '' -E 's|#include "(.*)"|#include <\1>|g'
# make local includes to be "..." style
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g') main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i '' -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
else else
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g') # make all includes to be <...> style
grep_code '#include ".*"' | xargs sed -i -E 's|#include "(.*)"|#include <\1>|g'
# make local includes to be "..." style
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
fi fi
fix_includes() {
file_path="$1"
file_path_all_global="${file_path}.tmp.global"
file_path_fixed="${file_path}.tmp.fixed"
# Make all includes to be <...> style
sed -E 's|#include "(.*)"|#include <\1>|g' "$file_path" > "$file_path_all_global"
# Make local includes to be "..." style
sed -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g" "$file_path_all_global" > "$file_path_fixed"
rm "$file_path_all_global"
# Check if the temporary file is different from the original file
if ! cmp -s "$file_path" "$file_path_fixed"; then
# Replace the original file with the temporary file
mv "$file_path_fixed" "$file_path"
else
# Remove the temporary file if it's the same as the original
rm "$file_path_fixed"
fi
}
for file in $files; do
fix_includes "$file"
done

View File

@@ -1,76 +0,0 @@
#!/usr/bin/env python3
import argparse
import re
from pathlib import Path
def fix_json_style(cpp_content: str) -> str:
cpp_content = cpp_content.replace('R"json(', 'R"JSON(').replace(')json"', ')JSON"')
pattern = r'R"JSON\((.*?)\)JSON"'
def replace_json(match):
raw_json = match.group(1)
raw_json = (
raw_json.replace(" :", ":")
.replace(" ,", ",")
.replace(" null", "null")
.replace(':"', ': "')
.replace(',"', ', "')
.replace('":{', '": {')
.replace('":[', '": [')
.replace('":true', '": true')
.replace('":false', '": false')
.replace('":null', '": null')
)
for digit in range(10):
raw_json = raw_json.replace(f'":{digit}', f'": {digit}')
return f'R"JSON({raw_json})JSON"'
return re.sub(pattern, replace_json, cpp_content, flags=re.DOTALL)
def process_file(file_path: Path, dry_run: bool) -> bool:
content = file_path.read_text(encoding="utf-8")
new_content = fix_json_style(content)
if new_content != content:
print(f"Processing file: {file_path}")
if dry_run:
print("Dry run: changes won't be written to the file.")
else:
print("Writing changes to file.")
file_path.write_text(new_content, encoding="utf-8")
return new_content == content
def main():
parser = argparse.ArgumentParser(
description="Fix JSON style in C++ files",
)
parser.add_argument(
"--dry-run",
default=False,
action="store_true",
help="Don't modify files, just print what would be changed",
)
parser.add_argument(
"files",
nargs="*",
help="Specific files to process",
)
args = parser.parse_args()
success = True
for file in args.files:
success = success and process_file(Path(file), dry_run=args.dry_run)
if not success:
print("Errors occurred while processing files.")
exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,3 @@
#!/bin/sh
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-checkout' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs post-checkout "$@"

View File

@@ -0,0 +1,3 @@
#!/bin/sh
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-commit' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs post-commit "$@"

View File

@@ -0,0 +1,3 @@
#!/bin/sh
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-merge' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs post-merge "$@"

3
pre-commit-hooks/lfs/pre-push Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/sh
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'pre-push' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs pre-push "$@"

View File

@@ -39,14 +39,16 @@ verify_tag_signed() {
fi fi
} }
# Check some things if we're pushing a branch called "release/" while read local_ref local_oid remote_ref remote_oid; do
if echo "$PRE_COMMIT_REMOTE_BRANCH" | grep ^refs\/heads\/release\/ &> /dev/null ; then # Check some things if we're pushing a branch called "release/"
version=$(git tag --points-at HEAD) if echo "$remote_ref" | grep ^refs\/heads\/release\/ &> /dev/null ; then
echo "Looks like you're trying to push a $version release..." version=$(git tag --points-at HEAD)
echo "Making sure you've signed and tagged it." echo "Looks like you're trying to push a $version release..."
if verify_commit_signed && verify_tag && verify_tag_signed ; then echo "Making sure you've signed and tagged it."
: # Ok, I guess you can push if verify_commit_signed && verify_tag && verify_tag_signed ; then
else : # Ok, I guess you can push
exit 1 else
exit 1
fi
fi fi
fi done

View File

@@ -10,5 +10,4 @@ target_link_libraries(
clio_web clio_web
clio_rpc clio_rpc
clio_migration clio_migration
PRIVATE Boost::program_options
) )

View File

@@ -21,7 +21,7 @@
#include "migration/MigrationApplication.hpp" #include "migration/MigrationApplication.hpp"
#include "util/build/Build.hpp" #include "util/build/Build.hpp"
#include "util/config/ConfigDescription.hpp" #include "util/newconfig/ConfigDescription.hpp"
#include <boost/program_options/options_description.hpp> #include <boost/program_options/options_description.hpp>
#include <boost/program_options/parsers.hpp> #include <boost/program_options/parsers.hpp>
@@ -60,17 +60,17 @@ CliArgs::parse(int argc, char const* argv[])
po::store(po::command_line_parser(argc, argv).options(description).positional(positional).run(), parsed); po::store(po::command_line_parser(argc, argv).options(description).positional(positional).run(), parsed);
po::notify(parsed); po::notify(parsed);
if (parsed.contains("help")) { if (parsed.count("help") != 0u) {
std::cout << "Clio server " << util::build::getClioFullVersionString() << "\n\n" << description; std::cout << "Clio server " << util::build::getClioFullVersionString() << "\n\n" << description;
return Action{Action::Exit{EXIT_SUCCESS}}; return Action{Action::Exit{EXIT_SUCCESS}};
} }
if (parsed.contains("version")) { if (parsed.count("version") != 0u) {
std::cout << util::build::getClioFullVersionString() << '\n'; std::cout << util::build::getClioFullVersionString() << '\n';
return Action{Action::Exit{EXIT_SUCCESS}}; return Action{Action::Exit{EXIT_SUCCESS}};
} }
if (parsed.contains("config-description")) { if (parsed.count("config-description") != 0u) {
std::filesystem::path const filePath = parsed["config-description"].as<std::string>(); std::filesystem::path const filePath = parsed["config-description"].as<std::string>();
auto const res = util::config::ClioConfigDescription::generateConfigDescriptionToFile(filePath); auto const res = util::config::ClioConfigDescription::generateConfigDescriptionToFile(filePath);
@@ -83,17 +83,18 @@ CliArgs::parse(int argc, char const* argv[])
auto configPath = parsed["conf"].as<std::string>(); auto configPath = parsed["conf"].as<std::string>();
if (parsed.contains("migrate")) { if (parsed.count("migrate") != 0u) {
auto const opt = parsed["migrate"].as<std::string>(); auto const opt = parsed["migrate"].as<std::string>();
if (opt == "status") if (opt == "status")
return Action{Action::Migrate{.configPath = std::move(configPath), .subCmd = MigrateSubCmd::status()}}; return Action{Action::Migrate{.configPath = std::move(configPath), .subCmd = MigrateSubCmd::status()}};
return Action{Action::Migrate{.configPath = std::move(configPath), .subCmd = MigrateSubCmd::migration(opt)}}; return Action{Action::Migrate{.configPath = std::move(configPath), .subCmd = MigrateSubCmd::migration(opt)}};
} }
if (parsed.contains("verify")) if (parsed.count("verify") != 0u)
return Action{Action::VerifyConfig{.configPath = std::move(configPath)}}; return Action{Action::VerifyConfig{.configPath = std::move(configPath)}};
return Action{Action::Run{.configPath = std::move(configPath), .useNgWebServer = parsed.contains("ng-web-server")}}; return Action{Action::Run{.configPath = std::move(configPath), .useNgWebServer = parsed.count("ng-web-server") != 0}
};
} }
} // namespace app } // namespace app

View File

@@ -36,17 +36,15 @@
#include "rpc/RPCEngine.hpp" #include "rpc/RPCEngine.hpp"
#include "rpc/WorkQueue.hpp" #include "rpc/WorkQueue.hpp"
#include "rpc/common/impl/HandlerProvider.hpp" #include "rpc/common/impl/HandlerProvider.hpp"
#include "util/Random.hpp"
#include "util/async/context/BasicExecutionContext.hpp"
#include "util/build/Build.hpp" #include "util/build/Build.hpp"
#include "util/config/ConfigDefinition.hpp"
#include "util/log/Logger.hpp" #include "util/log/Logger.hpp"
#include "util/newconfig/ConfigDefinition.hpp"
#include "util/prometheus/Prometheus.hpp"
#include "web/AdminVerificationStrategy.hpp" #include "web/AdminVerificationStrategy.hpp"
#include "web/RPCServerHandler.hpp" #include "web/RPCServerHandler.hpp"
#include "web/Server.hpp" #include "web/Server.hpp"
#include "web/dosguard/DOSGuard.hpp" #include "web/dosguard/DOSGuard.hpp"
#include "web/dosguard/IntervalSweepHandler.hpp" #include "web/dosguard/IntervalSweepHandler.hpp"
#include "web/dosguard/Weights.hpp"
#include "web/dosguard/WhitelistHandler.hpp" #include "web/dosguard/WhitelistHandler.hpp"
#include "web/ng/RPCServerHandler.hpp" #include "web/ng/RPCServerHandler.hpp"
#include "web/ng/Server.hpp" #include "web/ng/Server.hpp"
@@ -90,6 +88,7 @@ ClioApplication::ClioApplication(util::config::ClioConfigDefinition const& confi
: config_(config), signalsHandler_{config_} : config_(config), signalsHandler_{config_}
{ {
LOG(util::LogService::info()) << "Clio version: " << util::build::getClioFullVersionString(); LOG(util::LogService::info()) << "Clio version: " << util::build::getClioFullVersionString();
PrometheusService::init(config);
signalsHandler_.subscribeToStop([this]() { appStopper_.stop(); }); signalsHandler_.subscribeToStop([this]() { appStopper_.stop(); });
} }
@@ -103,14 +102,9 @@ ClioApplication::run(bool const useNgWebServer)
// This is not the only io context in the application. // This is not the only io context in the application.
boost::asio::io_context ioc{threads}; boost::asio::io_context ioc{threads};
// Similarly we need a context to run ETLng on
// In the future we can remove the raw ioc and use ctx instead
util::async::CoroExecutionContext ctx{threads};
// Rate limiter, to prevent abuse // Rate limiter, to prevent abuse
auto whitelistHandler = web::dosguard::WhitelistHandler{config_}; auto whitelistHandler = web::dosguard::WhitelistHandler{config_};
auto const dosguardWeights = web::dosguard::Weights::make(config_); auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler};
auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler, dosguardWeights};
auto sweepHandler = web::dosguard::IntervalSweepHandler{config_, ioc, dosGuard}; auto sweepHandler = web::dosguard::IntervalSweepHandler{config_, ioc, dosGuard};
auto cache = data::LedgerCache{}; auto cache = data::LedgerCache{};
@@ -143,19 +137,14 @@ ClioApplication::run(bool const useNgWebServer)
// The server uses the balancer to forward RPCs to a rippled node. // The server uses the balancer to forward RPCs to a rippled node.
// The balancer itself publishes to streams (transactions_proposed and accounts_proposed) // The balancer itself publishes to streams (transactions_proposed and accounts_proposed)
auto balancer = [&] -> std::shared_ptr<etlng::LoadBalancerInterface> { auto balancer = [&] -> std::shared_ptr<etlng::LoadBalancerInterface> {
if (config_.get<bool>("__ng_etl")) { if (config_.get<bool>("__ng_etl"))
return etlng::LoadBalancer::makeLoadBalancer( return etlng::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
config_, ioc, backend, subscriptions, std::make_unique<util::MTRandomGenerator>(), ledgers
);
}
return etl::LoadBalancer::makeLoadBalancer( return etl::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
config_, ioc, backend, subscriptions, std::make_unique<util::MTRandomGenerator>(), ledgers
);
}(); }();
// ETL is responsible for writing and publishing to streams. In read-only mode, ETL only publishes // ETL is responsible for writing and publishing to streams. In read-only mode, ETL only publishes
auto etl = etl::ETLService::makeETLService(config_, ioc, ctx, backend, subscriptions, balancer, ledgers); auto etl = etl::ETLService::makeETLService(config_, ioc, backend, subscriptions, balancer, ledgers);
auto workQueue = rpc::WorkQueue::makeWorkQueue(config_); auto workQueue = rpc::WorkQueue::makeWorkQueue(config_);
auto counters = rpc::Counters::makeCounters(workQueue); auto counters = rpc::Counters::makeCounters(workQueue);
@@ -169,7 +158,7 @@ ClioApplication::run(bool const useNgWebServer)
RPCEngineType::makeRPCEngine(config_, backend, balancer, dosGuard, workQueue, counters, handlerProvider); RPCEngineType::makeRPCEngine(config_, backend, balancer, dosGuard, workQueue, counters, handlerProvider);
if (useNgWebServer or config_.get<bool>("server.__ng_web_server")) { if (useNgWebServer or config_.get<bool>("server.__ng_web_server")) {
web::ng::RPCServerHandler<RPCEngineType> handler{config_, backend, rpcEngine, etl, dosGuard}; web::ng::RPCServerHandler<RPCEngineType> handler{config_, backend, rpcEngine, etl};
auto expectedAdminVerifier = web::makeAdminVerificationStrategy(config_); auto expectedAdminVerifier = web::makeAdminVerificationStrategy(config_);
if (not expectedAdminVerifier.has_value()) { if (not expectedAdminVerifier.has_value()) {
@@ -187,7 +176,7 @@ ClioApplication::run(bool const useNgWebServer)
httpServer->onGet("/metrics", MetricsHandler{adminVerifier}); httpServer->onGet("/metrics", MetricsHandler{adminVerifier});
httpServer->onGet("/health", HealthCheckHandler{}); httpServer->onGet("/health", HealthCheckHandler{});
auto requestHandler = RequestHandler{adminVerifier, handler}; auto requestHandler = RequestHandler{adminVerifier, handler, dosGuard};
httpServer->onPost("/", requestHandler); httpServer->onPost("/", requestHandler);
httpServer->onWs(std::move(requestHandler)); httpServer->onWs(std::move(requestHandler));
@@ -210,7 +199,7 @@ ClioApplication::run(bool const useNgWebServer)
} }
// Init the web server // Init the web server
auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl, dosGuard); auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl);
auto const httpServer = web::makeHttpServer(config_, ioc, dosGuard, handler); auto const httpServer = web::makeHttpServer(config_, ioc, dosGuard, handler);

View File

@@ -21,7 +21,7 @@
#include "app/Stopper.hpp" #include "app/Stopper.hpp"
#include "util/SignalsHandler.hpp" #include "util/SignalsHandler.hpp"
#include "util/config/ConfigDefinition.hpp" #include "util/newconfig/ConfigDefinition.hpp"
namespace app { namespace app {

View File

@@ -19,8 +19,8 @@
#pragma once #pragma once
#include "util/config/ConfigDefinition.hpp" #include "util/newconfig/ConfigDefinition.hpp"
#include "util/config/ConfigFileJson.hpp" #include "util/newconfig/ConfigFileJson.hpp"
#include <cstdlib> #include <cstdlib>
#include <iostream> #include <iostream>

View File

@@ -97,7 +97,7 @@ HealthCheckHandler::operator()(
boost::asio::yield_context boost::asio::yield_context
) )
{ {
static constexpr auto kHEALTH_CHECK_HTML = R"html( static auto constexpr kHEALTH_CHECK_HTML = R"html(
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head><title>Test page for Clio</title></head> <head><title>Test page for Clio</title></head>

View File

@@ -147,6 +147,7 @@ class RequestHandler {
util::Logger webServerLog_{"WebServer"}; util::Logger webServerLog_{"WebServer"};
std::shared_ptr<web::AdminVerificationStrategy> adminVerifier_; std::shared_ptr<web::AdminVerificationStrategy> adminVerifier_;
std::reference_wrapper<RpcHandlerType> rpcHandler_; std::reference_wrapper<RpcHandlerType> rpcHandler_;
std::reference_wrapper<web::dosguard::DOSGuardInterface> dosguard_;
public: public:
/** /**
@@ -154,9 +155,14 @@ public:
* *
* @param adminVerifier The AdminVerificationStrategy to use for verifying the connection for admin access. * @param adminVerifier The AdminVerificationStrategy to use for verifying the connection for admin access.
* @param rpcHandler The RPC handler to use for handling the request. * @param rpcHandler The RPC handler to use for handling the request.
* @param dosguard The DOSGuardInterface to use for checking the connection.
*/ */
RequestHandler(std::shared_ptr<web::AdminVerificationStrategy> adminVerifier, RpcHandlerType& rpcHandler) RequestHandler(
: adminVerifier_(std::move(adminVerifier)), rpcHandler_(rpcHandler) std::shared_ptr<web::AdminVerificationStrategy> adminVerifier,
RpcHandlerType& rpcHandler,
web::dosguard::DOSGuardInterface& dosguard
)
: adminVerifier_(std::move(adminVerifier)), rpcHandler_(rpcHandler), dosguard_(dosguard)
{ {
} }
@@ -177,6 +183,21 @@ public:
boost::asio::yield_context yield boost::asio::yield_context yield
) )
{ {
if (not dosguard_.get().request(connectionMetadata.ip())) {
auto error = rpc::makeError(rpc::RippledError::rpcSLOW_DOWN);
if (not request.isHttp()) {
try {
auto requestJson = boost::json::parse(request.message());
if (requestJson.is_object() && requestJson.as_object().contains("id"))
error["id"] = requestJson.as_object().at("id");
error["request"] = request.message();
} catch (std::exception const&) {
error["request"] = request.message();
}
}
return web::ng::Response{boost::beast::http::status::service_unavailable, error, request};
}
LOG(webServerLog_.info()) << connectionMetadata.tag() LOG(webServerLog_.info()) << connectionMetadata.tag()
<< "Received request from ip = " << connectionMetadata.ip() << "Received request from ip = " << connectionMetadata.ip()
<< " - posting to WorkQueue"; << " - posting to WorkQueue";
@@ -186,7 +207,20 @@ public:
}); });
try { try {
return rpcHandler_(request, connectionMetadata, std::move(subscriptionContext), yield); auto response = rpcHandler_(request, connectionMetadata, std::move(subscriptionContext), yield);
if (not dosguard_.get().add(connectionMetadata.ip(), response.message().size())) {
auto jsonResponse = boost::json::parse(response.message()).as_object();
jsonResponse["warning"] = "load";
if (jsonResponse.contains("warnings") && jsonResponse["warnings"].is_array()) {
jsonResponse["warnings"].as_array().push_back(rpc::makeWarning(rpc::WarnRpcRateLimit));
} else {
jsonResponse["warnings"] = boost::json::array{rpc::makeWarning(rpc::WarnRpcRateLimit)};
}
response.setMessage(jsonResponse);
}
return response;
} catch (std::exception const&) { } catch (std::exception const&) {
return web::ng::Response{ return web::ng::Response{
boost::beast::http::status::internal_server_error, boost::beast::http::status::internal_server_error,

View File

@@ -36,7 +36,6 @@
#include <chrono> #include <chrono>
#include <ctime> #include <ctime>
#include <memory> #include <memory>
#include <string>
#include <utility> #include <utility>
#include <vector> #include <vector>
@@ -112,12 +111,9 @@ ClusterCommunicationService::selfData() const
return result; return result;
} }
std::expected<std::vector<ClioNode>, std::string> std::vector<ClioNode>
ClusterCommunicationService::clusterData() const ClusterCommunicationService::clusterData() const
{ {
if (not isHealthy_) {
return std::unexpected{"Service is not healthy"};
}
std::vector<ClioNode> result; std::vector<ClioNode> result;
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) { boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) {
result = otherNodesData_; result = otherNodesData_;
@@ -131,13 +127,7 @@ ClusterCommunicationService::doRead(boost::asio::yield_context yield)
{ {
otherNodesData_.clear(); otherNodesData_.clear();
BackendInterface::ClioNodesDataFetchResult expectedResult; auto const expectedResult = backend_->fetchClioNodesData(yield);
try {
expectedResult = backend_->fetchClioNodesData(yield);
} catch (...) {
expectedResult = std::unexpected{"Failed to fecth Clio nodes data"};
}
if (!expectedResult.has_value()) { if (!expectedResult.has_value()) {
LOG(log_.error()) << "Failed to fetch nodes data"; LOG(log_.error()) << "Failed to fetch nodes data";
isHealthy_ = false; isHealthy_ = false;

View File

@@ -34,7 +34,6 @@
#include <chrono> #include <chrono>
#include <memory> #include <memory>
#include <string>
#include <vector> #include <vector>
namespace cluster { namespace cluster {
@@ -126,9 +125,9 @@ public:
/** /**
* @brief Get the data of all nodes in the cluster (including self). * @brief Get the data of all nodes in the cluster (including self).
* *
* @return The data of all nodes in the cluster or error if the service is not healthy. * @return The data of all nodes in the cluster.
*/ */
std::expected<std::vector<ClioNode>, std::string> std::vector<ClioNode>
clusterData() const override; clusterData() const override;
private: private:

View File

@@ -21,8 +21,6 @@
#include "cluster/ClioNode.hpp" #include "cluster/ClioNode.hpp"
#include <expected>
#include <string>
#include <vector> #include <vector>
namespace cluster { namespace cluster {
@@ -45,9 +43,9 @@ public:
/** /**
* @brief Get the data of all nodes in the cluster (including self). * @brief Get the data of all nodes in the cluster (including self).
* *
* @return The data of all nodes in the cluster or error if the service is not healthy. * @return The data of all nodes in the cluster.
*/ */
[[nodiscard]] virtual std::expected<std::vector<ClioNode>, std::string> [[nodiscard]] virtual std::vector<ClioNode>
clusterData() const = 0; clusterData() const = 0;
}; };

View File

@@ -78,20 +78,17 @@ WritingAmendmentKey::WritingAmendmentKey(std::string amendmentName) : AmendmentK
} // namespace impl } // namespace impl
AmendmentKey:: AmendmentKey::operator std::string const&() const
operator std::string const&() const
{ {
return name; return name;
} }
AmendmentKey:: AmendmentKey::operator std::string_view() const
operator std::string_view() const
{ {
return name; return name;
} }
AmendmentKey:: AmendmentKey::operator ripple::uint256() const
operator ripple::uint256() const
{ {
return Amendment::getAmendmentId(name); return Amendment::getAmendmentId(name);
} }

View File

@@ -137,14 +137,6 @@ struct Amendments {
REGISTER(fixInvalidTxFlags); REGISTER(fixInvalidTxFlags);
REGISTER(fixFrozenLPTokenTransfer); REGISTER(fixFrozenLPTokenTransfer);
REGISTER(DeepFreeze); REGISTER(DeepFreeze);
REGISTER(PermissionDelegation);
REGISTER(fixPayChanCancelAfter);
REGISTER(Batch);
REGISTER(PermissionedDEX);
REGISTER(SingleAssetVault);
REGISTER(TokenEscrow);
REGISTER(fixAMMv1_3);
REGISTER(fixEnforceNFTokenTrustlineV2);
// Obsolete but supported by libxrpl // Obsolete but supported by libxrpl
REGISTER(CryptoConditionsSuite); REGISTER(CryptoConditionsSuite);

View File

@@ -49,45 +49,35 @@ durationInMillisecondsSince(std::chrono::steady_clock::time_point const startTim
using namespace util::prometheus; using namespace util::prometheus;
BackendCounters::BackendCounters() BackendCounters::BackendCounters()
: tooBusyCounter_( : tooBusyCounter_(PrometheusService::counterInt(
PrometheusService::counterInt( "backend_too_busy_total_number",
"backend_too_busy_total_number", Labels(),
Labels(), "The total number of times the backend was too busy to process a request"
"The total number of times the backend was too busy to process a request" ))
) , writeSyncCounter_(PrometheusService::counterInt(
) "backend_operations_total_number",
, writeSyncCounter_( Labels({Label{"operation", "write_sync"}}),
PrometheusService::counterInt( "The total number of times the backend had to write synchronously"
"backend_operations_total_number", ))
Labels({Label{"operation", "write_sync"}}), , writeSyncRetryCounter_(PrometheusService::counterInt(
"The total number of times the backend had to write synchronously" "backend_operations_total_number",
) Labels({Label{"operation", "write_sync_retry"}}),
) "The total number of times the backend had to retry a synchronous write"
, writeSyncRetryCounter_( ))
PrometheusService::counterInt(
"backend_operations_total_number",
Labels({Label{"operation", "write_sync_retry"}}),
"The total number of times the backend had to retry a synchronous write"
)
)
, asyncWriteCounters_{"write_async"} , asyncWriteCounters_{"write_async"}
, asyncReadCounters_{"read_async"} , asyncReadCounters_{"read_async"}
, readDurationHistogram_( , readDurationHistogram_(PrometheusService::histogramInt(
PrometheusService::histogramInt( "backend_duration_milliseconds_histogram",
"backend_duration_milliseconds_histogram", Labels({Label{"operation", "read"}}),
Labels({Label{"operation", "read"}}), kHISTOGRAM_BUCKETS,
kHISTOGRAM_BUCKETS, "The duration of backend read operations including retries"
"The duration of backend read operations including retries" ))
) , writeDurationHistogram_(PrometheusService::histogramInt(
) "backend_duration_milliseconds_histogram",
, writeDurationHistogram_( Labels({Label{"operation", "write"}}),
PrometheusService::histogramInt( kHISTOGRAM_BUCKETS,
"backend_duration_milliseconds_histogram", "The duration of backend write operations including retries"
Labels({Label{"operation", "write"}}), ))
kHISTOGRAM_BUCKETS,
"The duration of backend write operations including retries"
)
)
{ {
} }
@@ -180,34 +170,26 @@ BackendCounters::report() const
BackendCounters::AsyncOperationCounters::AsyncOperationCounters(std::string name) BackendCounters::AsyncOperationCounters::AsyncOperationCounters(std::string name)
: name_(std::move(name)) : name_(std::move(name))
, pendingCounter_( , pendingCounter_(PrometheusService::gaugeInt(
PrometheusService::gaugeInt( "backend_operations_current_number",
"backend_operations_current_number", Labels({{"operation", name_}, {"status", "pending"}}),
Labels({{"operation", name_}, {"status", "pending"}}), "The current number of pending " + name_ + " operations"
"The current number of pending " + name_ + " operations" ))
) , completedCounter_(PrometheusService::counterInt(
) "backend_operations_total_number",
, completedCounter_( Labels({{"operation", name_}, {"status", "completed"}}),
PrometheusService::counterInt( "The total number of completed " + name_ + " operations"
"backend_operations_total_number", ))
Labels({{"operation", name_}, {"status", "completed"}}), , retryCounter_(PrometheusService::counterInt(
"The total number of completed " + name_ + " operations" "backend_operations_total_number",
) Labels({{"operation", name_}, {"status", "retry"}}),
) "The total number of retried " + name_ + " operations"
, retryCounter_( ))
PrometheusService::counterInt( , errorCounter_(PrometheusService::counterInt(
"backend_operations_total_number", "backend_operations_total_number",
Labels({{"operation", name_}, {"status", "retry"}}), Labels({{"operation", name_}, {"status", "error"}}),
"The total number of retried " + name_ + " operations" "The total number of errored " + name_ + " operations"
) ))
)
, errorCounter_(
PrometheusService::counterInt(
"backend_operations_total_number",
Labels({{"operation", name_}, {"status", "error"}}),
"The total number of errored " + name_ + " operations"
)
)
{ {
} }

View File

@@ -23,8 +23,8 @@
#include "data/CassandraBackend.hpp" #include "data/CassandraBackend.hpp"
#include "data/LedgerCacheInterface.hpp" #include "data/LedgerCacheInterface.hpp"
#include "data/cassandra/SettingsProvider.hpp" #include "data/cassandra/SettingsProvider.hpp"
#include "util/config/ConfigDefinition.hpp"
#include "util/log/Logger.hpp" #include "util/log/Logger.hpp"
#include "util/newconfig/ConfigDefinition.hpp"
#include <boost/algorithm/string.hpp> #include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/predicate.hpp> #include <boost/algorithm/string/predicate.hpp>

Some files were not shown because too many files have changed in this diff Show More