Compare commits

...

11 Commits

Author SHA1 Message Date
tequ
0c8de81657 Merge branch 'dev' into coverage 2026-01-05 19:49:26 +09:00
Niq Dudfield
a8d7b2619e fix: restore [ips_fixed] to use addFixedPeer instead of addFallbackStrings (#641) 2026-01-05 13:46:02 +10:00
tequ
cb40a9d726 fix cmake-target 2025-12-24 22:03:45 +09:00
Bronek Kozicki
3d9f8aa7a9 test: improve code coverage reporting (#4849)
* Speed up the generation of coverage reports by using multiple cores.

* Add codecov step to coverage workflow.
2025-12-24 22:03:23 +09:00
tequ
d7fd2adb34 Add coverage workflow 2025-12-24 17:10:46 +09:00
Niq Dudfield
775fb3a8b2 fix: increment manifest sequence for client code cache invalidation (#631) 2025-12-24 11:16:00 +10:00
Niq Dudfield
5a118a4e2b fix(logs): formatting fixes, color handling, and debug build defaults (#607) 2025-12-17 09:45:41 +10:00
tequ
960f87857e Self hosted macos runner (#652) 2025-12-17 09:43:25 +10:00
tequ
f731bcfeba Increase ccache size from 10G to 100G in release-builder.sh for improved build performance (#643) 2025-12-16 14:45:45 +10:00
tequ
374b361daa Use Self hosted runner (#639) 2025-12-16 14:16:36 +10:00
tequ
52ccf27aa3 Hook API Refactor1: whitelist api at Enum.h (#605) 2025-12-10 19:32:03 +10:00
22 changed files with 1067 additions and 470 deletions

6
.codecov.yml Normal file
View File

@@ -0,0 +1,6 @@
coverage:
status:
project:
default:
target: 60%
threshold: 2%

View File

@@ -2,6 +2,14 @@ name: build
description: 'Builds the project with ccache integration' description: 'Builds the project with ccache integration'
inputs: inputs:
cmake-target:
description: 'CMake target to build'
required: false
default: all
cmake-args:
description: 'Additional CMake arguments'
required: false
default: null
generator: generator:
description: 'CMake generator to use' description: 'CMake generator to use'
required: true required: true
@@ -20,6 +28,10 @@ inputs:
description: 'C++ compiler to use' description: 'C++ compiler to use'
required: false required: false
default: '' default: ''
gcov:
description: 'Gcov to use'
required: false
default: ''
compiler-id: compiler-id:
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)' description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
required: false required: false
@@ -28,6 +40,10 @@ inputs:
description: 'Cache version for invalidation' description: 'Cache version for invalidation'
required: false required: false
default: '1' default: '1'
gha_cache_enabled:
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
required: false
default: 'true'
ccache_enabled: ccache_enabled:
description: 'Whether to use ccache' description: 'Whether to use ccache'
required: false required: false
@@ -71,56 +87,30 @@ runs:
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-') SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
- name: Restore ccache directory for main branch
if: inputs.ccache_enabled == 'true'
id: ccache-restore
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-main
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-main
- name: Restore ccache directory for current branch
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
id: ccache-restore-current-branch
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-current
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-current
- name: Configure ccache - name: Configure ccache
if: inputs.ccache_enabled == 'true' if: inputs.ccache_enabled == 'true'
shell: bash shell: bash
run: | run: |
# Create cache directories # Create cache directories
mkdir -p ~/.ccache-main ~/.ccache-current mkdir -p ~/.ccache-cache
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Configure ccache settings AFTER cache restore (prevents stale cached config) # Configure ccache settings AFTER cache restore (prevents stale cached config)
ccache --set-config=max_size=${{ inputs.ccache_max_size }} ccache --set-config=max_size=${{ inputs.ccache_max_size }}
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }} ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }} ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
ccache --set-config=cache_dir="$HOME/.ccache-cache"
# Determine if we're on the main branch echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
if [ "${{ steps.safe-branch.outputs.name }}" = "${{ inputs.main_branch }}" ]; then echo "📦 using ~/.ccache-cache as ccache cache directory"
# Main branch: use main branch cache only
ccache --set-config=cache_dir="$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-main" >> $GITHUB_ENV
echo "📦 Main branch: using ~/.ccache-main"
else
# Feature branch: use current branch cache with main as secondary (read-only fallback)
ccache --set-config=cache_dir="$HOME/.ccache-current"
ccache --set-config=secondary_storage="file:$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-current" >> $GITHUB_ENV
echo "📦 Feature branch: using ~/.ccache-current with ~/.ccache-main as secondary"
fi
# Print config for verification # Print config for verification
echo "=== ccache configuration ===" echo "=== ccache configuration ==="
@@ -144,6 +134,11 @@ runs:
export CXX="${{ inputs.cxx }}" export CXX="${{ inputs.cxx }}"
fi fi
if [ -n "${{ inputs.gcov }}" ]; then
ln -sf /usr/bin/${{ inputs.gcov }} /usr/local/bin/gcov
export CMAKE_BUILD_PARALLEL_LEVEL=$(nproc)
fi
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain # Create wrapper toolchain that overlays ccache on top of Conan's toolchain
# This enables ccache for the main app build without affecting Conan dependency builds # This enables ccache for the main app build without affecting Conan dependency builds
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
@@ -205,7 +200,8 @@ runs:
-G "${{ inputs.generator }}" \ -G "${{ inputs.generator }}" \
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \ ${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \ -DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} -DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
${{ inputs.cmake-args }}
- name: Show ccache config before build - name: Show ccache config before build
if: inputs.ccache_enabled == 'true' if: inputs.ccache_enabled == 'true'
@@ -229,23 +225,9 @@ runs:
VERBOSE_FLAG="-- -v" VERBOSE_FLAG="-- -v"
fi fi
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG} cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG} --target ${{ inputs.cmake-target }}
- name: Show ccache statistics - name: Show ccache statistics
if: inputs.ccache_enabled == 'true' if: inputs.ccache_enabled == 'true'
shell: bash shell: bash
run: ccache -s run: ccache -s
- name: Save ccache directory for main branch
if: success() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-main
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
- name: Save ccache directory for current branch
if: success() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-current
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}

View File

@@ -17,10 +17,6 @@ inputs:
description: 'Cache version for invalidation' description: 'Cache version for invalidation'
required: false required: false
default: '1' default: '1'
cache_enabled:
description: 'Whether to use caching'
required: false
default: 'true'
main_branch: main_branch:
description: 'Main branch name for restore keys' description: 'Main branch name for restore keys'
required: false required: false
@@ -63,18 +59,25 @@ outputs:
runs: runs:
using: 'composite' using: 'composite'
steps: steps:
- name: Restore Conan cache - name: Configure Conan cache paths
if: inputs.cache_enabled == 'true' if: inputs.os == 'Linux'
id: cache-restore-conan shell: bash
uses: ./.github/actions/xahau-ga-cache-restore run: |
with: mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
path: ~/.conan2 echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
# Note: compiler-id format is compiler-version-stdlib[-gccversion] echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-${{ inputs.configuration }} echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
restore-keys: |
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}- - name: Configure Conan cache paths
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}- if: inputs.gha_cache_enabled == 'false'
cache-type: Conan shell: bash
# For self-hosted runners, register cache paths to be used as volumes
# This allows the cache to be shared between containers
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Configure Conan - name: Configure Conan
shell: bash shell: bash
@@ -150,10 +153,3 @@ runs:
--build missing \ --build missing \
--settings build_type=${{ inputs.configuration }} \ --settings build_type=${{ inputs.configuration }} \
.. ..
- name: Save Conan cache
if: success() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ~/.conan2
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}

View File

@@ -33,7 +33,7 @@ jobs:
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
build: build:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, xahaud-build]
needs: [checkout] needs: [checkout]
defaults: defaults:
run: run:
@@ -74,7 +74,7 @@ jobs:
fi fi
tests: tests:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, xahaud-build]
needs: [build, checkout] needs: [build, checkout]
defaults: defaults:
run: run:
@@ -84,7 +84,7 @@ jobs:
run: /bin/bash docker-unit-tests.sh run: /bin/bash docker-unit-tests.sh
cleanup: cleanup:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, xahaud-build]
needs: [tests, checkout] needs: [tests, checkout]
if: always() if: always()
steps: steps:

View File

@@ -20,7 +20,7 @@ jobs:
- Ninja - Ninja
configuration: configuration:
- Debug - Debug
runs-on: macos-15 runs-on: [self-hosted, macOS]
env: env:
build_dir: .build build_dir: .build
# Bump this number to invalidate all caches globally. # Bump this number to invalidate all caches globally.
@@ -30,61 +30,29 @@ jobs:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Get commit message - name: Add Homebrew to PATH
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install Conan
run: | run: |
brew install conan echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
# Verify Conan 2 is installed echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
conan --version
- name: Install Coreutils - name: Install Coreutils
run: | run: |
brew install coreutils brew install coreutils
echo "Num proc: $(nproc)" echo "Num proc: $(nproc)"
- name: Install Ninja # To isolate environments for each Runner, instead of installing globally with brew,
if: matrix.generator == 'Ninja' # use mise to isolate environments for each Runner directory.
run: brew install ninja - name: Setup toolchain (mise)
uses: jdx/mise-action@v2
with:
install: true
- name: Install Python - name: Install tools via mise
run: | run: |
if which python3 > /dev/null 2>&1; then mise install
echo "Python 3 executable exists" mise use cmake@3.23.1 python@3.12 pipx@latest conan@2 ninja@latest ccache@latest
python3 --version mise reshim
else echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
brew install python@3.12
fi
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
if ! which python > /dev/null 2>&1; then
sudo ln -sf $(which python3) /usr/local/bin/python
fi
- name: Install CMake
run: |
# Install CMake 3.x to match local dev environments
# With Conan 2 and the policy args passed to CMake, newer versions
# can have issues with dependencies that require cmake_minimum_required < 3.5
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
# Download and install CMake 3.31.7 directly
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
tar -xzf cmake.tar.gz
# Move the entire CMake.app to /Applications
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
/Applications/CMake.app/Contents/bin/cmake --version
- name: Install ccache
run: brew install ccache
- name: Check environment - name: Check environment
run: | run: |
@@ -98,6 +66,14 @@ jobs:
echo "---- Full Environment ----" echo "---- Full Environment ----"
env env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Detect compiler version - name: Detect compiler version
id: detect-compiler id: detect-compiler
run: | run: |
@@ -129,6 +105,7 @@ jobs:
cache_version: ${{ env.CACHE_VERSION }} cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }} main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: libcxx stdlib: libcxx
ccache_max_size: '100G'
- name: Test - name: Test
run: | run: |

View File

@@ -14,7 +14,7 @@ concurrency:
jobs: jobs:
matrix-setup: matrix-setup:
runs-on: ubuntu-latest runs-on: [self-hosted, generic, 20.04]
container: python:3-slim container: python:3-slim
outputs: outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }} matrix: ${{ steps.set-matrix.outputs.matrix }}
@@ -47,7 +47,8 @@ jobs:
"cxx": "g++-11", "cxx": "g++-11",
"compiler_version": 11, "compiler_version": 11,
"stdlib": "libstdcxx", "stdlib": "libstdcxx",
"configuration": "Debug" "configuration": "Debug",
"job_type": "build"
}, },
{ {
"compiler_id": "gcc-13-libstdcxx", "compiler_id": "gcc-13-libstdcxx",
@@ -55,8 +56,19 @@ jobs:
"cc": "gcc-13", "cc": "gcc-13",
"cxx": "g++-13", "cxx": "g++-13",
"compiler_version": 13, "compiler_version": 13,
"configuration": "Debug",
"job_type": "build"
},
{
"compiler_id": "gcc-13-libstdcxx",
"compiler": "gcc",
"cc": "gcc-13",
"cxx": "g++-13",
"gcov": "gcov-13",
"compiler_version": 13,
"stdlib": "libstdcxx", "stdlib": "libstdcxx",
"configuration": "Debug" "configuration": "Debug",
"job_type": "coverage"
}, },
{ {
"compiler_id": "clang-14-libstdcxx-gcc11", "compiler_id": "clang-14-libstdcxx-gcc11",
@@ -66,7 +78,8 @@ jobs:
"compiler_version": 14, "compiler_version": 14,
"stdlib": "libstdcxx", "stdlib": "libstdcxx",
"clang_gcc_toolchain": 11, "clang_gcc_toolchain": 11,
"configuration": "Debug" "configuration": "Debug",
"job_type": "build"
}, },
{ {
"compiler_id": "clang-16-libstdcxx-gcc13", "compiler_id": "clang-16-libstdcxx-gcc13",
@@ -76,7 +89,8 @@ jobs:
"compiler_version": 16, "compiler_version": 16,
"stdlib": "libstdcxx", "stdlib": "libstdcxx",
"clang_gcc_toolchain": 13, "clang_gcc_toolchain": 13,
"configuration": "Debug" "configuration": "Debug",
"job_type": "build"
}, },
{ {
"compiler_id": "clang-17-libcxx", "compiler_id": "clang-17-libcxx",
@@ -85,7 +99,8 @@ jobs:
"cxx": "clang++-17", "cxx": "clang++-17",
"compiler_version": 17, "compiler_version": 17,
"stdlib": "libcxx", "stdlib": "libcxx",
"configuration": "Debug" "configuration": "Debug",
"job_type": "build"
}, },
{ {
# Clang 18 - testing if it's faster than Clang 17 with libc++ # Clang 18 - testing if it's faster than Clang 17 with libc++
@@ -96,14 +111,16 @@ jobs:
"cxx": "clang++-18", "cxx": "clang++-18",
"compiler_version": 18, "compiler_version": 18,
"stdlib": "libcxx", "stdlib": "libcxx",
"configuration": "Debug" "configuration": "Debug",
"job_type": "build"
} }
] ]
# Minimal matrix for PRs and feature branches # Minimal matrix for PRs and feature branches
minimal_matrix = [ minimal_matrix = [
full_matrix[1], # gcc-13 (middle-ground gcc) full_matrix[1], # gcc-13 (middle-ground gcc)
full_matrix[2] # clang-14 (mature, stable clang) full_matrix[2], # gcc-13 coverage
full_matrix[3] # clang-14 (mature, stable clang)
] ]
# Determine which matrix to use based on the target branch # Determine which matrix to use based on the target branch
@@ -161,14 +178,21 @@ jobs:
# Select the appropriate matrix # Select the appropriate matrix
if use_full: if use_full:
if force_full: if force_full:
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag") print(f"Using FULL matrix (7 configs) - forced by [ci-nix-full-matrix] tag")
else: else:
print(f"Using FULL matrix (6 configs) - targeting main branch") print(f"Using FULL matrix (7 configs) - targeting main branch")
matrix = full_matrix matrix = full_matrix
else: else:
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR") print(f"Using MINIMAL matrix (3 configs) - feature branch/PR")
matrix = minimal_matrix matrix = minimal_matrix
# Add runs_on based on job_type
for entry in matrix:
if entry.get("job_type") == "coverage":
entry["runs_on"] = '["self-hosted", "generic", 24.04]'
else:
entry["runs_on"] = '["self-hosted", "generic", 20.04]'
# Output the matrix as JSON # Output the matrix as JSON
output = json.dumps({"include": matrix}) output = json.dumps({"include": matrix})
with open(os.environ['GITHUB_OUTPUT'], 'a') as f: with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
@@ -176,7 +200,15 @@ jobs:
build: build:
needs: matrix-setup needs: matrix-setup
runs-on: ubuntu-latest runs-on: ${{ fromJSON(matrix.runs_on) }}
container:
image: ubuntu:24.04
volumes:
- /home/runner/.conan-cache:/.conan-cache
- /home/runner/.ccache-cache:/github/home/.ccache-cache
defaults:
run:
shell: bash
outputs: outputs:
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }} artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
strategy: strategy:
@@ -191,23 +223,22 @@ jobs:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install build dependencies - name: Install build dependencies
run: | run: |
sudo apt-get update apt-get update
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache apt-get install -y software-properties-common
add-apt-repository ppa:ubuntu-toolchain-r/test -y
apt-get update
apt-get install -y git python3 python-is-python3 pipx
pipx ensurepath
apt-get install -y cmake ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
apt-get install -y perl # for openssl build
apt-get install -y libsqlite3-dev # for xahaud build
# Install the specific GCC version needed for Clang # Install the specific GCC version needed for Clang
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ===" echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ==="
sudo apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
echo "=== GCC versions available after installation ===" echo "=== GCC versions available after installation ==="
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d" ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d"
@@ -238,7 +269,7 @@ jobs:
echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)" echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)"
# Safety check: ensure target doesn't already exist # Safety check: ensure target doesn't already exist
if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then
sudo mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter" mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
else else
echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists" echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists"
exit 1 exit 1
@@ -262,11 +293,18 @@ jobs:
# Install libc++ dev packages if using libc++ (not needed for libstdc++) # Install libc++ dev packages if using libc++ (not needed for libstdc++)
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
sudo apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
fi fi
# Install Conan 2 # Install Conan 2
pip install --upgrade "conan>=2.0,<3" pipx install "conan>=2.0,<3"
echo "$HOME/.local/bin" >> $GITHUB_PATH
# Install gcovr for coverage jobs
if [ "${{ matrix.job_type }}" = "coverage" ]; then
pipx install "gcovr>=7,<9"
apt-get install -y lcov
fi
- name: Check environment - name: Check environment
run: | run: |
@@ -277,9 +315,24 @@ jobs:
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found" which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found" which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
which ccache && ccache --version || echo "ccache not found" which ccache && ccache --version || echo "ccache not found"
# Check gcovr for coverage jobs
if [ "${{ matrix.job_type }}" = "coverage" ]; then
which gcov && gcov --version || echo "gcov not found"
which gcovr && gcovr --version || echo "gcovr not found"
fi
echo "---- Full Environment ----" echo "---- Full Environment ----"
env env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install dependencies - name: Install dependencies
uses: ./.github/actions/xahau-ga-dependencies uses: ./.github/actions/xahau-ga-dependencies
with: with:
@@ -293,8 +346,10 @@ jobs:
cc: ${{ matrix.cc }} cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }} cxx: ${{ matrix.cxx }}
stdlib: ${{ matrix.stdlib }} stdlib: ${{ matrix.stdlib }}
gha_cache_enabled: 'false' # Disable caching for self hosted runner
- name: Build - name: Build
if: matrix.job_type == 'build'
uses: ./.github/actions/xahau-ga-build uses: ./.github/actions/xahau-ga-build
with: with:
generator: Ninja generator: Ninja
@@ -307,8 +362,27 @@ jobs:
main_branch: ${{ env.MAIN_BRANCH_NAME }} main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: ${{ matrix.stdlib }} stdlib: ${{ matrix.stdlib }}
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }} clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
ccache_max_size: '100G'
- name: Build (Coverage)
if: matrix.job_type == 'coverage'
uses: ./.github/actions/xahau-ga-build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
build_dir: ${{ env.build_dir }}
cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }}
gcov: ${{ matrix.gcov }}
compiler-id: ${{ matrix.compiler_id }}
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
cmake-args: '-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_CXX_FLAGS="-O0" -DCMAKE_C_FLAGS="-O0"'
cmake-target: 'coverage'
ccache_max_size: '100G'
- name: Set artifact name - name: Set artifact name
if: matrix.job_type == 'build'
id: set-artifact-name id: set-artifact-name
run: | run: |
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}" ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
@@ -321,6 +395,7 @@ jobs:
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty" ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
- name: Run tests - name: Run tests
if: matrix.job_type == 'build'
run: | run: |
# Ensure the binary exists before trying to run # Ensure the binary exists before trying to run
if [ -f "${{ env.build_dir }}/rippled" ]; then if [ -f "${{ env.build_dir }}/rippled" ]; then
@@ -329,3 +404,33 @@ jobs:
echo "Error: rippled executable not found in ${{ env.build_dir }}" echo "Error: rippled executable not found in ${{ env.build_dir }}"
exit 1 exit 1
fi fi
# Coverage-specific steps
- name: Move coverage report
if: matrix.job_type == 'coverage'
shell: bash
run: |
mv "${{ env.build_dir }}/coverage.xml" ./
- name: Archive coverage report
if: matrix.job_type == 'coverage'
uses: actions/upload-artifact@v4
with:
name: coverage.xml
path: coverage.xml
retention-days: 30
- name: Upload coverage report
if: matrix.job_type == 'coverage'
uses: wandalen/wretry.action/main@v3
with:
action: codecov/codecov-action@v4.3.0
with: |
files: coverage.xml
fail_ci_if_error: true
disable_search: true
verbose: true
plugin: noop
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 210000 # in milliseconds

View File

@@ -258,12 +258,72 @@ can't build earlier Boost versions.
generator. Pass `--help` to see the rest of the command line options. generator. Pass `--help` to see the rest of the command line options.
## Coverage report
The coverage report is intended for developers using compilers GCC
or Clang (including Apple Clang). It is generated by the build target `coverage`,
which is only enabled when the `coverage` option is set, e.g. with
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
Prerequisites for the coverage report:
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
- `gcov` for GCC (installed with the compiler by default) or
- `llvm-cov` for Clang (installed with the compiler by default)
- `Debug` build type
A coverage report is created when the following steps are completed, in order:
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
option mentioned above
2. completed run of unit tests, which populates coverage capture data
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
to assemble both instrumentation data and the coverage capture data into a coverage report
The above steps are automated into a single target `coverage`. The instrumented
`rippled` binary can also be used for regular development or testing work, at
the cost of extra disk space utilization and a small performance hit
(to store coverage capture). In case of a spurious failure of unit tests, it is
possible to re-run the `coverage` target without rebuilding the `rippled` binary
(since it is simply a dependency of the coverage report target). It is also possible
to select only specific tests for the purpose of the coverage report, by setting
the `coverage_test` variable in `cmake`
The default coverage report format is `html-details`, but the user
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
by setting the `coverage_format` variable in `cmake`. It is also possible
to generate more than one format at a time by setting the `coverage_extra_args`
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
set to the number of available CPU cores. This may cause spurious test
errors on Apple. Developers can override the number of unit test jobs with
the `coverage_test_parallelism` variable in `cmake`.
Example use with some cmake variables set:
```
cd .build
conan install .. --output-folder . --build missing --settings build_type=Debug
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
cmake --build . --target coverage
```
After the `coverage` target is completed, the generated coverage report will be
stored inside the build directory, as either of:
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
## Options ## Options
| Option | Default Value | Description | | Option | Default Value | Description |
| --- | ---| ---| | --- | ---| ---|
| `assert` | OFF | Enable assertions. | `assert` | OFF | Enable assertions.
| `reporting` | OFF | Build the reporting mode feature. | | `reporting` | OFF | Build the reporting mode feature. |
| `coverage` | OFF | Prepare the coverage report. |
| `tests` | ON | Build tests. | | `tests` | ON | Build tests. |
| `unity` | ON | Configure a unity build. | | `unity` | ON | Configure a unity build. |
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. | | `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
@@ -456,6 +516,10 @@ but it is more convenient to put them in a [profile][profile].
[1]: https://github.com/conan-io/conan-center-index/issues/13168 [1]: https://github.com/conan-io/conan-center-index/issues/13168
[5]: https://en.wikipedia.org/wiki/Unity_build [5]: https://en.wikipedia.org/wiki/Unity_build
[6]: https://github.com/boostorg/beast/issues/2648
[7]: https://github.com/boostorg/beast/issues/2661
[gcovr]: https://gcovr.com/en/stable/getting-started.html
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html [build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html [runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html [toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html

View File

@@ -0,0 +1,440 @@
# Copyright (c) 2012 - 2017, Lars Bilke
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# CHANGES:
#
# 2012-01-31, Lars Bilke
# - Enable Code Coverage
#
# 2013-09-17, Joakim Söderberg
# - Added support for Clang.
# - Some additional usage instructions.
#
# 2016-02-03, Lars Bilke
# - Refactored functions to use named parameters
#
# 2017-06-02, Lars Bilke
# - Merged with modified version from github.com/ufz/ogs
#
# 2019-05-06, Anatolii Kurotych
# - Remove unnecessary --coverage flag
#
# 2019-12-13, FeRD (Frank Dana)
# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor
# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments.
# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY
# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list
# - Set lcov basedir with -b argument
# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be
# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().)
# - Delete output dir, .info file on 'make clean'
# - Remove Python detection, since version mismatches will break gcovr
# - Minor cleanup (lowercase function names, update examples...)
#
# 2019-12-19, FeRD (Frank Dana)
# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
#
# 2020-01-19, Bob Apthorpe
# - Added gfortran support
#
# 2020-02-17, FeRD (Frank Dana)
# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters
# in EXCLUDEs, and remove manual escaping from gcovr targets
#
# 2021-01-19, Robin Mueller
# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run
# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional
# flags to the gcovr command
#
# 2020-05-04, Mihchael Davis
# - Add -fprofile-abs-path to make gcno files contain absolute paths
# - Fix BASE_DIRECTORY not working when defined
# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
#
# 2021-05-10, Martin Stump
# - Check if the generator is multi-config before warning about non-Debug builds
#
# 2022-02-22, Marko Wehle
# - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output respectively.
# This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS, e.g. GCOVR_ADDITIONAL_ARGS "--txt".
#
# 2022-09-28, Sebastian Mueller
# - fix append_coverage_compiler_flags_to_target to correctly add flags
# - replace "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
#
# 2024-01-04, Bronek Kozicki
# - remove setup_target_for_coverage_lcov (slow) and setup_target_for_coverage_fastcov (no support for Clang)
# - fix Clang support by adding find_program( ... llvm-cov )
# - add Apple Clang support by adding execute_process( COMMAND xcrun -f llvm-cov ... )
# - add CODE_COVERAGE_GCOV_TOOL to explicitly select gcov tool and disable find_program
# - replace both functions setup_target_for_coverage_gcovr_* with a single setup_target_for_coverage_gcovr
# - add support for all gcovr output formats
#
# USAGE:
#
# 1. Copy this file into your cmake modules path.
#
# 2. Add the following line to your CMakeLists.txt (best inside an if-condition
# using a CMake option() to enable it just optionally):
# include(CodeCoverage)
#
# 3. Append necessary compiler flags for all supported source files:
# append_coverage_compiler_flags()
# Or for specific target:
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
#
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
#
# 4. If you need to exclude additional directories from the report, specify them
# using full paths in the COVERAGE_EXCLUDES variable before calling
# setup_target_for_coverage_*().
# Example:
# set(COVERAGE_EXCLUDES
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
# '/path/to/my/src/dir2/*')
# Or, use the EXCLUDE argument to setup_target_for_coverage_*().
# Example:
# setup_target_for_coverage_gcovr(
# NAME coverage
# EXECUTABLE testrunner
# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*")
#
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set
# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR)
# Example:
# set(COVERAGE_EXCLUDES "dir1/*")
# setup_target_for_coverage_gcovr(
# NAME coverage
# EXECUTABLE testrunner
# FORMAT html-details
# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src"
# EXCLUDE "dir2/*")
#
# 4.b If you need to pass specific options to gcovr, specify them in
# GCOVR_ADDITIONAL_ARGS variable.
# Example:
# set (GCOVR_ADDITIONAL_ARGS --exclude-throw-branches --exclude-noncode-lines -s)
# setup_target_for_coverage_gcovr(
# NAME coverage
# EXECUTABLE testrunner
# EXCLUDE "src/dir1" "src/dir2")
#
# 5. Use the functions described below to create a custom make target which
# runs your test executable and produces a code coverage report.
#
# 6. Build a Debug build:
# cmake -DCMAKE_BUILD_TYPE=Debug ..
# make
# make my_coverage_target
include(CMakeParseArguments)
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
# Check prereqs
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
if(DEFINED CODE_COVERAGE_GCOV_TOOL)
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if(APPLE)
execute_process( COMMAND xcrun -f llvm-cov
OUTPUT_VARIABLE LLVMCOV_PATH
OUTPUT_STRIP_TRAILING_WHITESPACE
)
else()
find_program( LLVMCOV_PATH llvm-cov )
endif()
if(LLVMCOV_PATH)
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
endif()
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
find_program( GCOV_PATH gcov )
set(GCOV_TOOL "${GCOV_PATH}")
endif()
# Check supported compiler (Clang, GNU and Flang)
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
foreach(LANG ${LANGUAGES})
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
endif()
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif()
endforeach()
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag)
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if(HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
include(CheckCCompilerFlag)
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if(HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
endif()
set(CMAKE_Fortran_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
FORCE )
set(CMAKE_CXX_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C++ compiler during coverage builds."
FORCE )
set(CMAKE_C_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C compiler during coverage builds."
FORCE )
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used for linking binaries during coverage builds."
FORCE )
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
FORCE )
mark_as_advanced(
CMAKE_Fortran_FLAGS_COVERAGE
CMAKE_CXX_FLAGS_COVERAGE
CMAKE_C_FLAGS_COVERAGE
CMAKE_EXE_LINKER_FLAGS_COVERAGE
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
link_libraries(gcov)
endif()
# Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports.
# NOTE! The executable should always have a ZERO as exit code otherwise
# the coverage generation will not complete.
#
# setup_target_for_coverage_gcovr(
# NAME ctest_coverage # New target name
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
# DEPENDENCIES executable_target # Dependencies to build first
# BASE_DIRECTORY "../" # Base directory for report
# # (defaults to PROJECT_SOURCE_DIR)
# FORMAT "cobertura" # Output format, one of:
# # xml cobertura sonarqube json-summary
# # json-details coveralls csv txt
# # html-single html-nested html-details
# # (xml is an alias to cobertura;
# # if no format is set, defaults to xml)
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
# # to BASE_DIRECTORY, with CMake 3.4+)
# )
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
# GCVOR command.
function(setup_target_for_coverage_gcovr)
set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(NOT GCOV_TOOL)
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
endif()
if(NOT GCOVR_PATH)
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
endif()
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if(DEFINED Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else()
set(BASEDIR ${PROJECT_SOURCE_DIR})
endif()
if(NOT DEFINED Coverage_FORMAT)
set(Coverage_FORMAT xml)
endif()
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
else()
if((Coverage_FORMAT STREQUAL "html-details")
OR (Coverage_FORMAT STREQUAL "html-nested"))
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
elseif(Coverage_FORMAT STREQUAL "html-single")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
elseif((Coverage_FORMAT STREQUAL "json-summary")
OR (Coverage_FORMAT STREQUAL "json-details")
OR (Coverage_FORMAT STREQUAL "coveralls"))
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
elseif(Coverage_FORMAT STREQUAL "txt")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
elseif(Coverage_FORMAT STREQUAL "csv")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
else()
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
endif()
endif()
if((Coverage_FORMAT STREQUAL "cobertura")
OR (Coverage_FORMAT STREQUAL "xml"))
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
set(Coverage_FORMAT cobertura) # overwrite xml
elseif(Coverage_FORMAT STREQUAL "sonarqube")
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "json-summary")
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
elseif(Coverage_FORMAT STREQUAL "json-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
elseif(Coverage_FORMAT STREQUAL "coveralls")
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
elseif(Coverage_FORMAT STREQUAL "csv")
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "txt")
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "html-single")
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
elseif(Coverage_FORMAT STREQUAL "html-nested")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "html-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
else()
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
endif()
# Collect excludes (CMake 3.4+: Also compute absolute paths)
set(GCOVR_EXCLUDES "")
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
endif()
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
endforeach()
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
# Combine excludes to several -e arguments
set(GCOVR_EXCLUDE_ARGS "")
foreach(EXCLUDE ${GCOVR_EXCLUDES})
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
endforeach()
# Set up commands which will be run to generate coverage data
# Run tests
set(GCOVR_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
# Create folder
if(DEFINED GCOVR_CREATE_FOLDER)
set(GCOVR_FOLDER_CMD
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
else()
set(GCOVR_FOLDER_CMD echo) # dummy
endif()
# Running gcovr
set(GCOVR_CMD
${GCOVR_PATH}
--gcov-executable ${GCOV_TOOL}
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
-r ${BASEDIR}
${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS}
--object-directory=${PROJECT_BINARY_DIR}
)
if(CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report")
message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
message(STATUS "Command to create a folder: ")
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
endif()
message(STATUS "Command to generate gcovr coverage data: ")
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
message(STATUS "${GCOVR_CMD_SPACED}")
endif()
add_custom_target(${Coverage_NAME}
COMMAND ${GCOVR_EXEC_TESTS_CMD}
COMMAND ${GCOVR_FOLDER_CMD}
COMMAND ${GCOVR_CMD}
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES}
VERBATIM # Protect arguments to commands
COMMENT "Running gcovr to produce code coverage report."
)
# Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND ;
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
)
endfunction() # setup_target_for_coverage_gcovr
function(append_coverage_compiler_flags)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
endfunction() # append_coverage_compiler_flags
# Setup coverage for specific library
function(append_coverage_compiler_flags_to_target name)
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
target_compile_options(${name} PRIVATE ${_flag_list})
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
target_link_libraries(${name} PRIVATE gcov)
endif()
endfunction()

View File

@@ -48,13 +48,9 @@ target_sources (xrpl_core PRIVATE
src/ripple/beast/net/impl/IPAddressV6.cpp src/ripple/beast/net/impl/IPAddressV6.cpp
src/ripple/beast/net/impl/IPEndpoint.cpp src/ripple/beast/net/impl/IPEndpoint.cpp
src/ripple/beast/utility/src/beast_Journal.cpp src/ripple/beast/utility/src/beast_Journal.cpp
src/ripple/beast/utility/src/beast_PropertyStream.cpp) src/ripple/beast/utility/src/beast_PropertyStream.cpp
# Enhanced logging - compiles to empty when BEAST_ENHANCED_LOGGING is not defined
# Conditionally add enhanced logging source when BEAST_ENHANCED_LOGGING is enabled src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
target_sources(xrpl_core PRIVATE
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
endif()
#[===============================[ #[===============================[
core sources core sources
@@ -162,12 +158,16 @@ target_link_libraries (xrpl_core
date::date date::date
Ripple::opts) Ripple::opts)
# Link date-tz library when enhanced logging is enabled # date-tz for enhanced logging (always linked, code is #ifdef guarded)
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING) if(TARGET date::date-tz)
if(TARGET date::date-tz) target_link_libraries(xrpl_core PUBLIC date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
endif()
endif() endif()
# BEAST_ENHANCED_LOGGING: enable for Debug builds OR when explicitly requested
# Uses generator expression so it works with multi-config generators (Xcode, VS, Ninja Multi-Config)
target_compile_definitions(xrpl_core PUBLIC
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
)
#[=================================[ #[=================================[
main/core headers installation main/core headers installation
#]=================================] #]=================================]

View File

@@ -2,97 +2,37 @@
coverage report target coverage report target
#]===================================================================] #]===================================================================]
if (coverage) if(NOT coverage)
if (is_clang) message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
if (APPLE) endif()
execute_process (COMMAND xcrun -f llvm-profdata
OUTPUT_VARIABLE LLVM_PROFDATA
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program (LLVM_PROFDATA llvm-profdata)
endif ()
if (NOT LLVM_PROFDATA)
message (WARNING "unable to find llvm-profdata - skipping coverage_report target")
endif ()
if (APPLE) if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
execute_process (COMMAND xcrun -f llvm-cov message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
OUTPUT_VARIABLE LLVM_COV return()
OUTPUT_STRIP_TRAILING_WHITESPACE) endif()
else ()
find_program (LLVM_COV llvm-cov)
endif ()
if (NOT LLVM_COV)
message (WARNING "unable to find llvm-cov - skipping coverage_report target")
endif ()
set (extract_pattern "") include(CodeCoverage)
if (coverage_core_only)
set (extract_pattern "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/")
endif ()
if (LLVM_COV AND LLVM_PROFDATA) # The instructions for these commands come from the `CodeCoverage` module,
add_custom_target (coverage_report # which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
USES_TERMINAL # then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage - results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
COMMAND ${LLVM_PROFDATA}
merge -sparse default.profraw -o rip.profdata
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LLVM_COV}
report -instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
# generate html report
COMMAND ${LLVM_COV}
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
-instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
BYPRODUCTS coverage/index.html)
endif ()
elseif (is_gcc)
find_program (LCOV lcov)
if (NOT LCOV)
message (WARNING "unable to find lcov - skipping coverage_report target")
endif ()
find_program (GENHTML genhtml) set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
if (NOT GENHTML) if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
message (WARNING "unable to find genhtml - skipping coverage_report target") separate_arguments(GCOVR_ADDITIONAL_ARGS)
endif () endif()
set (extract_pattern "*") list(APPEND GCOVR_ADDITIONAL_ARGS
if (coverage_core_only) --exclude-throw-branches
set (extract_pattern "*/src/ripple/*") --exclude-noncode-lines
endif () --exclude-unreachable-branches -s
-j ${coverage_test_parallelism})
if (LCOV AND GENHTML) setup_target_for_coverage_gcovr(
add_custom_target (coverage_report NAME coverage
USES_TERMINAL FORMAT ${coverage_format}
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage- results will be in ${CMAKE_BINARY_DIR}/coverage/index.html." EXECUTABLE rippled
# create baseline info file EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
COMMAND ${LCOV} EXCLUDE "src/test" "${CMAKE_BINARY_DIR}/proto_gen" "${CMAKE_BINARY_DIR}/proto_gen_grpc"
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -i -o baseline.info DEPENDENCIES rippled
| grep -v "ignoring data for external file" )
# run tests
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests for coverage report."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
# Create test coverage data file
COMMAND ${LCOV}
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -o tests.info
| grep -v "ignoring data for external file"
# Combine baseline and test coverage data
COMMAND ${LCOV}
-a baseline.info -a tests.info -o lcov-all.info
# extract our files
COMMAND ${LCOV}
-e lcov-all.info "${extract_pattern}" -o lcov.info
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LCOV} --summary lcov.info
# generate HTML report
COMMAND ${GENHTML}
-o ${CMAKE_BINARY_DIR}/coverage lcov.info
BYPRODUCTS coverage/index.html)
endif ()
endif ()
endif ()

View File

@@ -23,15 +23,15 @@ target_compile_options (opts
INTERFACE INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override> $<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer> $<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping> $<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<BOOL:${profile}>:-pg> $<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>) $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries (opts target_link_libraries (opts
INTERFACE INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping> $<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<BOOL:${profile}>:-pg> $<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>) $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)

View File

@@ -2,6 +2,8 @@
convenience variables and sanity checks convenience variables and sanity checks
#]===================================================================] #]===================================================================]
include(ProcessorCount)
if (NOT ep_procs) if (NOT ep_procs)
ProcessorCount(ep_procs) ProcessorCount(ep_procs)
if (ep_procs GREATER 1) if (ep_procs GREATER 1)

View File

@@ -2,121 +2,129 @@
declare user options/settings declare user options/settings
#]===================================================================] #]===================================================================]
option (assert "Enables asserts, even in release builds" OFF) include(ProcessorCount)
option (reporting "Build rippled with reporting mode enabled" OFF) ProcessorCount(PROCESSOR_COUNT)
option (tests "Build tests" ON) option(assert "Enables asserts, even in release builds" OFF)
option (unity "Creates a build using UNITY support in cmake. This is the default" ON) option(reporting "Build rippled with reporting mode enabled" OFF)
if (unity)
if (NOT is_ci) option(tests "Build tests" ON)
set (CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
endif () option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
endif () if(unity)
if (is_gcc OR is_clang) if(NOT is_ci)
option (coverage "Generates coverage info." OFF) set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
option (profile "Add profiling flags" OFF) endif()
set (coverage_test "" CACHE STRING endif()
if(is_gcc OR is_clang)
option(coverage "Generates coverage info." OFF)
option(profile "Add profiling flags" OFF)
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
"Unit tests parallelism for the purpose of coverage report.")
set(coverage_format "html-details" CACHE STRING
"Output format of the coverage report.")
set(coverage_extra_args "" CACHE STRING
"Additional arguments to pass to gcovr.")
set(coverage_test "" CACHE STRING
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.") "On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
if (coverage_test AND NOT coverage) if(coverage_test AND NOT coverage)
set (coverage ON CACHE BOOL "gcc/clang only" FORCE) set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
endif () endif()
option (coverage_core_only option(wextra "compile with extra gcc/clang warnings enabled" ON)
"Include only src/ripple files when generating coverage report. \ else()
Set to OFF to include all sources in coverage report." set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
ON) set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
option (wextra "compile with extra gcc/clang warnings enabled" ON) set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
else () endif()
set (profile OFF CACHE BOOL "gcc/clang only" FORCE) if(is_linux)
set (coverage OFF CACHE BOOL "gcc/clang only" FORCE) option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
set (wextra OFF CACHE BOOL "gcc/clang only" FORCE) option(static "link protobuf, openssl, libc++, and boost statically" ON)
endif () option(perf "Enables flags that assist with perf recording" OFF)
if (is_linux) option(use_gold "enables detection of gold (binutils) linker" ON)
option (BUILD_SHARED_LIBS "build shared ripple libraries" OFF) option(use_mold "enables detection of mold (binutils) linker" ON)
option (static "link protobuf, openssl, libc++, and boost statically" ON) else()
option (perf "Enables flags that assist with perf recording" OFF)
option (use_gold "enables detection of gold (binutils) linker" ON)
else ()
# we are not ready to allow shared-libs on windows because it would require # we are not ready to allow shared-libs on windows because it would require
# export declarations. On macos it's more feasible, but static openssl # export declarations. On macos it's more feasible, but static openssl
# produces odd linker errors, thus we disable shared lib builds for now. # produces odd linker errors, thus we disable shared lib builds for now.
set (BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE) set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
set (static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE) set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE) set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE) set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
endif () set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
if (is_clang) endif()
option (use_lld "enables detection of lld linker" ON) if(is_clang)
else () option(use_lld "enables detection of lld linker" ON)
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE) else()
endif () set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
option (jemalloc "Enables jemalloc for heap profiling" OFF) endif()
option (werr "treat warnings as errors" OFF) option(jemalloc "Enables jemalloc for heap profiling" OFF)
option (local_protobuf option(werr "treat warnings as errors" OFF)
option(local_protobuf
"Force a local build of protobuf instead of looking for an installed version." OFF) "Force a local build of protobuf instead of looking for an installed version." OFF)
option (local_grpc option(local_grpc
"Force a local build of gRPC instead of looking for an installed version." OFF) "Force a local build of gRPC instead of looking for an installed version." OFF)
# this one is a string and therefore can't be an option # this one is a string and therefore can't be an option
set (san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation") set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread") set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
if (san) if(san)
string (TOLOWER ${san} san) string(TOLOWER ${san} san)
set (SAN_FLAG "-fsanitize=${san}") set(SAN_FLAG "-fsanitize=${san}")
set (SAN_LIB "") set(SAN_LIB "")
if (is_gcc) if(is_gcc)
if (san STREQUAL "address") if(san STREQUAL "address")
set (SAN_LIB "asan") set(SAN_LIB "asan")
elseif (san STREQUAL "thread") elseif(san STREQUAL "thread")
set (SAN_LIB "tsan") set(SAN_LIB "tsan")
elseif (san STREQUAL "memory") elseif(san STREQUAL "memory")
set (SAN_LIB "msan") set(SAN_LIB "msan")
elseif (san STREQUAL "undefined") elseif(san STREQUAL "undefined")
set (SAN_LIB "ubsan") set(SAN_LIB "ubsan")
endif () endif()
endif () endif()
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES}) set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}") set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
check_cxx_compiler_flag (${SAN_FLAG} COMPILER_SUPPORTS_SAN) check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL}) set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
if (NOT COMPILER_SUPPORTS_SAN) if(NOT COMPILER_SUPPORTS_SAN)
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler") message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif () endif()
endif () endif()
set (container_label "" CACHE STRING "tag to use for package building containers") set(container_label "" CACHE STRING "tag to use for package building containers")
option (packages_only option(packages_only
"ONLY generate package building targets. This is special use-case and almost \ "ONLY generate package building targets. This is special use-case and almost \
certainly not what you want. Use with caution as you won't be able to build \ certainly not what you want. Use with caution as you won't be able to build \
any compiled targets locally." OFF) any compiled targets locally." OFF)
option (have_package_container option(have_package_container
"Sometimes you already have the tagged container you want to use for package \ "Sometimes you already have the tagged container you want to use for package \
building and you don't want docker to rebuild it. This flag will detach the \ building and you don't want docker to rebuild it. This flag will detach the \
dependency of the package build from the container build. It's an advanced \ dependency of the package build from the container build. It's an advanced \
use case and most likely you should not be touching this flag." OFF) use case and most likely you should not be touching this flag." OFF)
# the remaining options are obscure and rarely used # the remaining options are obscure and rarely used
option (beast_no_unit_test_inline option(beast_no_unit_test_inline
"Prevents unit test definitions from being inserted into global table" "Prevents unit test definitions from being inserted into global table"
OFF) OFF)
option (single_io_service_thread option(single_io_service_thread
"Restricts the number of threads calling io_service::run to one. \ "Restricts the number of threads calling io_service::run to one. \
This can be useful when debugging." This can be useful when debugging."
OFF) OFF)
option (boost_show_deprecated option(boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\ "Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls." to find deprecated calls."
OFF) OFF)
option (beast_hashers option(beast_hashers
"Use local implementations for sha/ripemd hashes (experimental, not recommended)" "Use local implementations for sha/ripemd hashes (experimental, not recommended)"
OFF) OFF)
if (WIN32) if(WIN32)
option (beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF) option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else () else()
set (beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE) set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif () endif()
if (coverage) if(coverage)
message (STATUS "coverage build requested - forcing Debug build") message(STATUS "coverage build requested - forcing Debug build")
set (CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE) set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
endif () endif()

View File

@@ -37,20 +37,11 @@ endif() #git
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/") set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}") add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
# BEAST_ENHANCED_LOGGING option - adds file:line numbers and formatting to logs # BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
# Default to ON for Debug builds, OFF for Release # Automatically enabled for Debug builds via generator expression
if(CMAKE_BUILD_TYPE STREQUAL "Debug") # Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" ON) option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
else() message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" OFF)
endif()
if(BEAST_ENHANCED_LOGGING)
add_definitions(-DBEAST_ENHANCED_LOGGING=1)
message(STATUS "Log line numbers enabled")
else()
message(STATUS "Log line numbers disabled")
endif()
if(thread_safety_analysis) if(thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS) add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
@@ -73,7 +64,6 @@ include (CheckCXXCompilerFlag)
include (FetchContent) include (FetchContent)
include (ExternalProject) include (ExternalProject)
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
include (ProcessorCount)
if (target) if (target)
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build") message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
endif () endif ()
@@ -152,11 +142,14 @@ target_link_libraries(ripple_libs INTERFACE
SQLite::SQLite3 SQLite::SQLite3
) )
if(coverage)
include(RippledCov)
endif()
### ###
include(RippledCore) include(RippledCore)
include(RippledInstall) include(RippledInstall)
include(RippledCov)
include(RippledMultiConfig) include(RippledMultiConfig)
include(RippledDocs) include(RippledDocs)
include(RippledValidatorKeys) include(RippledValidatorKeys)

View File

@@ -192,7 +192,7 @@ ENV PATH=/usr/local/bin:$PATH
# Configure ccache and Conan 2 # Configure ccache and Conan 2
# NOTE: Using echo commands instead of heredocs because heredocs in Docker RUN commands are finnicky # NOTE: Using echo commands instead of heredocs because heredocs in Docker RUN commands are finnicky
RUN /hbb_exe/activate-exec bash -c "ccache -M 10G && \ RUN /hbb_exe/activate-exec bash -c "ccache -M 100G && \
ccache -o cache_dir=/cache/ccache && \ ccache -o cache_dir=/cache/ccache && \
ccache -o compiler_check=content && \ ccache -o compiler_check=content && \
mkdir -p ~/.conan2 /cache/conan2 /cache/conan2_download /cache/conan2_sources && \ mkdir -p ~/.conan2 /cache/conan2 /cache/conan2_download /cache/conan2_sources && \

View File

@@ -367,90 +367,110 @@ const uint8_t max_emit = 255;
const uint8_t max_params = 16; const uint8_t max_params = 16;
const double fee_base_multiplier = 1.1f; const double fee_base_multiplier = 1.1f;
#define I32 0x7FU
#define I64 0x7EU
#define HOOK_WRAP_PARAMS(...) __VA_ARGS__
#define HOOK_API_DEFINITION(RETURN_TYPE, FUNCTION_NAME, PARAMS_TUPLE) \
{ \
#FUNCTION_NAME, \
{ \
RETURN_TYPE, HOOK_WRAP_PARAMS PARAMS_TUPLE \
} \
}
using APIWhitelist = std::map<std::string, std::vector<uint8_t>>;
// RH NOTE: Find descriptions of api functions in ./impl/applyHook.cpp and // RH NOTE: Find descriptions of api functions in ./impl/applyHook.cpp and
// hookapi.h (include for hooks) this is a map of the api name to its return // hookapi.h (include for hooks) this is a map of the api name to its return
// code (vec[0] and its parameters vec[>0]) as wasm type codes // code (vec[0] and its parameters vec[>0]) as wasm type codes
static const std::map<std::string, std::vector<uint8_t>> import_whitelist{ static const APIWhitelist import_whitelist{
{"_g", {0x7FU, 0x7FU, 0x7FU}}, // clang-format off
{"accept", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I32, _g, (I32, I32)),
{"rollback", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, accept, (I32, I32, I64)),
{"util_raddr", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, rollback, (I32, I32, I64)),
{"util_accid", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_raddr, (I32, I32, I32, I32)),
{"util_verify", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_accid, (I32, I32, I32, I32)),
{"util_sha512h", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_verify, (I32, I32, I32, I32, I32, I32)),
{"util_keylet", HOOK_API_DEFINITION(I64, util_sha512h, (I32, I32, I32, I32)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_keylet, (I32, I32, I32, I32, I32, I32, I32, I32, I32)),
{"sto_validate", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_validate, (I32, I32)),
{"sto_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_subfield, (I32, I32, I32)),
{"sto_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_subarray, (I32, I32, I32)),
{"sto_emplace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_emplace, (I32, I32, I32, I32, I32, I32, I32)),
{"sto_erase", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_erase, (I32, I32, I32, I32, I32)),
{"etxn_burden", {0x7EU}}, HOOK_API_DEFINITION(I64, etxn_burden, ()),
{"etxn_details", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_details, (I32, I32)),
{"etxn_fee_base", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_fee_base, (I32, I32)),
{"etxn_reserve", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_reserve, (I32)),
{"etxn_generation", {0x7EU}}, HOOK_API_DEFINITION(I64, etxn_generation, ()),
{"etxn_nonce", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_nonce, (I32, I32)),
{"emit", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, emit, (I32, I32, I32, I32)),
{"float_set", {0x7EU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_set, (I32, I64)),
{"float_multiply", {0x7EU, 0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_multiply, (I64, I64)),
{"float_mulratio", {0x7EU, 0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_mulratio, (I64, I32, I32, I32)),
{"float_negate", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_negate, (I64)),
{"float_compare", {0x7EU, 0x7EU, 0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_compare, (I64, I64, I32)),
{"float_sum", {0x7EU, 0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_sum, (I64, I64)),
{"float_sto", HOOK_API_DEFINITION(I64, float_sto, (I32, I32, I32, I32, I32, I32, I64, I32)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_sto_set, (I32, I32)),
{"float_sto_set", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_invert, (I64)),
{"float_invert", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_divide, (I64, I64)),
{"float_divide", {0x7EU, 0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_one, ()),
{"float_one", {0x7EU}}, HOOK_API_DEFINITION(I64, float_mantissa, (I64)),
{"float_mantissa", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_sign, (I64)),
{"float_sign", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_int, (I64, I32, I32)),
{"float_int", {0x7EU, 0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_log, (I64)),
{"float_log", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_root, (I64, I32)),
{"float_root", {0x7EU, 0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, fee_base, ()),
{"fee_base", {0x7EU}}, HOOK_API_DEFINITION(I64, ledger_seq, ()),
{"ledger_seq", {0x7EU}}, HOOK_API_DEFINITION(I64, ledger_last_time, ()),
{"ledger_last_time", {0x7EU}}, HOOK_API_DEFINITION(I64, ledger_last_hash, (I32, I32)),
{"ledger_last_hash", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, ledger_nonce, (I32, I32)),
{"ledger_nonce", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, ledger_keylet, (I32, I32, I32, I32, I32, I32)),
{"ledger_keylet", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_account, (I32, I32)),
{"hook_account", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_hash, (I32, I32, I32)),
{"hook_hash", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_param_set, (I32, I32, I32, I32, I32, I32)),
{"hook_param_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_param, (I32, I32, I32, I32)),
{"hook_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_again, ()),
{"hook_again", {0x7EU}}, HOOK_API_DEFINITION(I64, hook_skip, (I32, I32, I32)),
{"hook_skip", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_pos, ()),
{"hook_pos", {0x7EU}}, HOOK_API_DEFINITION(I64, slot, (I32, I32, I32)),
{"slot", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_clear, (I32)),
{"slot_clear", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_count, (I32)),
{"slot_count", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_set, (I32, I32, I32)),
{"slot_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_size, (I32)),
{"slot_size", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_subarray, (I32, I32, I32)),
{"slot_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_subfield, (I32, I32, I32)),
{"slot_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_type, (I32, I32)),
{"slot_type", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_float, (I32)),
{"slot_float", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, state_set, (I32, I32, I32, I32)),
{"state_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, state_foreign_set, (I32, I32, I32, I32, I32, I32, I32, I32)),
{"state_foreign_set", HOOK_API_DEFINITION(I64, state, (I32, I32, I32, I32)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, state_foreign, (I32, I32, I32, I32, I32, I32, I32, I32)),
{"state", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, trace, (I32, I32, I32, I32, I32)),
{"state_foreign", HOOK_API_DEFINITION(I64, trace_num, (I32, I32, I64)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, trace_float, (I32, I32, I64)),
{"trace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, otxn_burden, ()),
{"trace_num", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, otxn_field, (I32, I32, I32)),
{"trace_float", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, otxn_generation, ()),
{"otxn_burden", {0x7EU}}, HOOK_API_DEFINITION(I64, otxn_id, (I32, I32, I32)),
{"otxn_field", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, otxn_type, ()),
{"otxn_generation", {0x7EU}}, HOOK_API_DEFINITION(I64, otxn_slot, (I32)),
{"otxn_id", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, otxn_param, (I32, I32, I32, I32)),
{"otxn_type", {0x7EU}}, HOOK_API_DEFINITION(I64, meta_slot, (I32)),
{"otxn_slot", {0x7EU, 0x7FU}}, // clang-format on
{"otxn_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, };
{"meta_slot", {0x7EU, 0x7FU}}};
// featureHooks1 // featureHooks1
static const std::map<std::string, std::vector<uint8_t>> import_whitelist_1{ static const APIWhitelist import_whitelist_1{
{"xpop_slot", {0x7EU, 0x7FU, 0x7FU}}}; // clang-format off
HOOK_API_DEFINITION(I64, xpop_slot, (I32, I32)),
// clang-format on
};
#undef HOOK_API_DEFINITION
#undef I32
#undef I64
}; // namespace hook_api }; // namespace hook_api
#endif #endif

View File

@@ -471,6 +471,10 @@ ManifestCache::applyManifest(Manifest m)
auto masterKey = m.masterKey; auto masterKey = m.masterKey;
map_.emplace(std::move(masterKey), std::move(m)); map_.emplace(std::move(masterKey), std::move(m));
// Increment sequence to invalidate cached manifest messages
seq_++;
return ManifestDisposition::accepted; return ManifestDisposition::accepted;
} }

View File

@@ -360,7 +360,8 @@ Logs::format(
if (!partition.empty()) if (!partition.empty())
{ {
#ifdef BEAST_ENHANCED_LOGGING #ifdef BEAST_ENHANCED_LOGGING
output += beast::detail::get_log_highlight_color(); if (beast::detail::should_log_use_colors())
output += beast::detail::get_log_highlight_color();
#endif #endif
output += partition + ":"; output += partition + ":";
} }
@@ -392,7 +393,8 @@ Logs::format(
} }
#ifdef BEAST_ENHANCED_LOGGING #ifdef BEAST_ENHANCED_LOGGING
output += "\033[0m"; if (beast::detail::should_log_use_colors())
output += "\033[0m";
#endif #endif
output += message; output += message;

View File

@@ -41,6 +41,14 @@ get_log_highlight_color();
constexpr const char* constexpr const char*
strip_source_root(const char* file) strip_source_root(const char* file)
{ {
// Handle relative paths from build/ directory (common with ccache)
// e.g., "../src/ripple/..." -> "ripple/..."
if (file && file[0] == '.' && file[1] == '.' && file[2] == '/' &&
file[3] == 's' && file[4] == 'r' && file[5] == 'c' && file[6] == '/')
{
return file + 7; // skip "../src/"
}
#ifdef SOURCE_ROOT_PATH #ifdef SOURCE_ROOT_PATH
constexpr const char* sourceRoot = SOURCE_ROOT_PATH; constexpr const char* sourceRoot = SOURCE_ROOT_PATH;
constexpr auto strlen_constexpr = [](const char* s) constexpr constexpr auto strlen_constexpr = [](const char* s) constexpr

View File

@@ -17,6 +17,8 @@
*/ */
//============================================================================== //==============================================================================
#ifdef BEAST_ENHANCED_LOGGING
#include <ripple/beast/utility/EnhancedLogging.h> #include <ripple/beast/utility/EnhancedLogging.h>
#include <cstdlib> #include <cstdlib>
#include <cstring> #include <cstring>
@@ -112,3 +114,5 @@ log_write_location_string(std::ostream& os, const char* file, int line)
} // namespace detail } // namespace detail
} // namespace beast } // namespace beast
#endif // BEAST_ENHANCED_LOGGING

View File

@@ -155,14 +155,43 @@ Journal::ScopedStream::~ScopedStream()
#ifdef BEAST_ENHANCED_LOGGING #ifdef BEAST_ENHANCED_LOGGING
// Add suffix if location is enabled // Add suffix if location is enabled
if (file_ && detail::should_show_location() && !s.empty() && s != "\n") if (file_ && detail::should_show_location() && !s.empty())
{ {
std::ostringstream combined; // Single optimized scan from the end
combined << s; size_t const lastNonWhitespace = s.find_last_not_of(" \n\r\t");
if (!s.empty() && s.back() != ' ')
combined << " "; // Skip if message is only whitespace (e.g., just "\n" or " \n\n")
detail::log_write_location_string(combined, file_, line_); if (lastNonWhitespace != std::string::npos)
s = combined.str(); {
// Count only the trailing newlines (tiny range)
size_t trailingNewlines = 0;
for (size_t i = lastNonWhitespace + 1; i < s.length(); ++i)
{
if (s[i] == '\n')
++trailingNewlines;
}
// Build location string once
std::ostringstream locStream;
detail::log_write_location_string(locStream, file_, line_);
std::string const location = locStream.str();
// Pre-allocate exact size → zero reallocations
size_t const finalSize = lastNonWhitespace + 1 + 1 +
location.length() + trailingNewlines;
std::string result;
result.reserve(finalSize);
// Direct string ops (no ostringstream overhead)
result.append(s, 0, lastNonWhitespace + 1);
result.push_back(' ');
result += location;
if (trailingNewlines > 0)
result.append(trailingNewlines, '\n');
s = std::move(result); // Move, no copy
}
} }
#endif #endif

View File

@@ -484,44 +484,61 @@ OverlayImpl::start()
m_peerFinder->setConfig(config); m_peerFinder->setConfig(config);
m_peerFinder->start(); m_peerFinder->start();
auto addIps = [&](std::vector<std::string> bootstrapIps) -> void { auto addIps = [this](std::vector<std::string> ips, bool fixed) {
beast::Journal const& j = app_.journal("Overlay"); beast::Journal const& j = app_.journal("Overlay");
for (auto& ip : bootstrapIps) for (auto& ip : ips)
{ {
std::size_t pos = ip.find('#'); std::size_t pos = ip.find('#');
if (pos != std::string::npos) if (pos != std::string::npos)
ip.erase(pos); ip.erase(pos);
JLOG(j.trace()) << "Found boostrap IP: " << ip; JLOG(j.trace())
<< "Found " << (fixed ? "fixed" : "bootstrap") << " IP: " << ip;
} }
m_resolver.resolve( m_resolver.resolve(
bootstrapIps, ips,
[&](std::string const& name, [this, fixed](
std::string const& name,
std::vector<beast::IP::Endpoint> const& addresses) { std::vector<beast::IP::Endpoint> const& addresses) {
std::vector<std::string> ips;
ips.reserve(addresses.size());
beast::Journal const& j = app_.journal("Overlay"); beast::Journal const& j = app_.journal("Overlay");
std::string const base("config: ");
std::vector<beast::IP::Endpoint> eps;
eps.reserve(addresses.size());
for (auto const& addr : addresses) for (auto const& addr : addresses)
{ {
std::string addrStr = addr.port() == 0 auto ep = addr.port() == 0 ? addr.at_port(DEFAULT_PEER_PORT)
? to_string(addr.at_port(DEFAULT_PEER_PORT)) : addr;
: to_string(addr); JLOG(j.trace())
JLOG(j.trace()) << "Parsed boostrap IP: " << addrStr; << "Parsed " << (fixed ? "fixed" : "bootstrap")
ips.push_back(addrStr); << " IP: " << ep;
eps.push_back(ep);
} }
std::string const base("config: "); if (eps.empty())
if (!ips.empty()) return;
m_peerFinder->addFallbackStrings(base + name, ips);
if (fixed)
{
m_peerFinder->addFixedPeer(base + name, eps);
}
else
{
std::vector<std::string> strs;
strs.reserve(eps.size());
for (auto const& ep : eps)
strs.push_back(to_string(ep));
m_peerFinder->addFallbackStrings(base + name, strs);
}
}); });
}; };
if (!app_.config().IPS.empty()) if (!app_.config().IPS.empty())
addIps(app_.config().IPS); addIps(app_.config().IPS, false);
if (!app_.config().IPS_FIXED.empty()) if (!app_.config().IPS_FIXED.empty())
addIps(app_.config().IPS_FIXED); addIps(app_.config().IPS_FIXED, true);
auto const timer = std::make_shared<Timer>(*this); auto const timer = std::make_shared<Timer>(*this);
std::lock_guard lock(mutex_); std::lock_guard lock(mutex_);