Compare commits

...

11 Commits

Author SHA1 Message Date
tequ
0c8de81657 Merge branch 'dev' into coverage 2026-01-05 19:49:26 +09:00
Niq Dudfield
a8d7b2619e fix: restore [ips_fixed] to use addFixedPeer instead of addFallbackStrings (#641) 2026-01-05 13:46:02 +10:00
tequ
cb40a9d726 fix cmake-target 2025-12-24 22:03:45 +09:00
Bronek Kozicki
3d9f8aa7a9 test: improve code coverage reporting (#4849)
* Speed up the generation of coverage reports by using multiple cores.

* Add codecov step to coverage workflow.
2025-12-24 22:03:23 +09:00
tequ
d7fd2adb34 Add coverage workflow 2025-12-24 17:10:46 +09:00
Niq Dudfield
775fb3a8b2 fix: increment manifest sequence for client code cache invalidation (#631) 2025-12-24 11:16:00 +10:00
Niq Dudfield
5a118a4e2b fix(logs): formatting fixes, color handling, and debug build defaults (#607) 2025-12-17 09:45:41 +10:00
tequ
960f87857e Self hosted macos runner (#652) 2025-12-17 09:43:25 +10:00
tequ
f731bcfeba Increase ccache size from 10G to 100G in release-builder.sh for improved build performance (#643) 2025-12-16 14:45:45 +10:00
tequ
374b361daa Use Self hosted runner (#639) 2025-12-16 14:16:36 +10:00
tequ
52ccf27aa3 Hook API Refactor1: whitelist api at Enum.h (#605) 2025-12-10 19:32:03 +10:00
22 changed files with 1067 additions and 470 deletions

6
.codecov.yml Normal file
View File

@@ -0,0 +1,6 @@
coverage:
status:
project:
default:
target: 60%
threshold: 2%

View File

@@ -2,6 +2,14 @@ name: build
description: 'Builds the project with ccache integration'
inputs:
cmake-target:
description: 'CMake target to build'
required: false
default: all
cmake-args:
description: 'Additional CMake arguments'
required: false
default: null
generator:
description: 'CMake generator to use'
required: true
@@ -20,6 +28,10 @@ inputs:
description: 'C++ compiler to use'
required: false
default: ''
gcov:
description: 'Gcov to use'
required: false
default: ''
compiler-id:
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
required: false
@@ -28,6 +40,10 @@ inputs:
description: 'Cache version for invalidation'
required: false
default: '1'
gha_cache_enabled:
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
required: false
default: 'true'
ccache_enabled:
description: 'Whether to use ccache'
required: false
@@ -71,56 +87,30 @@ runs:
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
- name: Restore ccache directory for main branch
if: inputs.ccache_enabled == 'true'
id: ccache-restore
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-main
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-main
- name: Restore ccache directory for current branch
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
id: ccache-restore-current-branch
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-current
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-current
- name: Configure ccache
if: inputs.ccache_enabled == 'true'
shell: bash
run: |
# Create cache directories
mkdir -p ~/.ccache-main ~/.ccache-current
mkdir -p ~/.ccache-cache
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Configure ccache settings AFTER cache restore (prevents stale cached config)
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
# Determine if we're on the main branch
if [ "${{ steps.safe-branch.outputs.name }}" = "${{ inputs.main_branch }}" ]; then
# Main branch: use main branch cache only
ccache --set-config=cache_dir="$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-main" >> $GITHUB_ENV
echo "📦 Main branch: using ~/.ccache-main"
else
# Feature branch: use current branch cache with main as secondary (read-only fallback)
ccache --set-config=cache_dir="$HOME/.ccache-current"
ccache --set-config=secondary_storage="file:$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-current" >> $GITHUB_ENV
echo "📦 Feature branch: using ~/.ccache-current with ~/.ccache-main as secondary"
fi
ccache --set-config=cache_dir="$HOME/.ccache-cache"
echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
echo "📦 using ~/.ccache-cache as ccache cache directory"
# Print config for verification
echo "=== ccache configuration ==="
@@ -144,6 +134,11 @@ runs:
export CXX="${{ inputs.cxx }}"
fi
if [ -n "${{ inputs.gcov }}" ]; then
ln -sf /usr/bin/${{ inputs.gcov }} /usr/local/bin/gcov
export CMAKE_BUILD_PARALLEL_LEVEL=$(nproc)
fi
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
# This enables ccache for the main app build without affecting Conan dependency builds
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
@@ -205,7 +200,8 @@ runs:
-G "${{ inputs.generator }}" \
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
${{ inputs.cmake-args }}
- name: Show ccache config before build
if: inputs.ccache_enabled == 'true'
@@ -229,23 +225,9 @@ runs:
VERBOSE_FLAG="-- -v"
fi
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG}
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG} --target ${{ inputs.cmake-target }}
- name: Show ccache statistics
if: inputs.ccache_enabled == 'true'
shell: bash
run: ccache -s
- name: Save ccache directory for main branch
if: success() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-main
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
- name: Save ccache directory for current branch
if: success() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-current
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}

View File

@@ -17,10 +17,6 @@ inputs:
description: 'Cache version for invalidation'
required: false
default: '1'
cache_enabled:
description: 'Whether to use caching'
required: false
default: 'true'
main_branch:
description: 'Main branch name for restore keys'
required: false
@@ -63,18 +59,25 @@ outputs:
runs:
using: 'composite'
steps:
- name: Restore Conan cache
if: inputs.cache_enabled == 'true'
id: cache-restore-conan
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.conan2
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-${{ inputs.configuration }}
restore-keys: |
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: Conan
- name: Configure Conan cache paths
if: inputs.os == 'Linux'
shell: bash
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Configure Conan cache paths
if: inputs.gha_cache_enabled == 'false'
shell: bash
# For self-hosted runners, register cache paths to be used as volumes
# This allows the cache to be shared between containers
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Configure Conan
shell: bash
@@ -150,10 +153,3 @@ runs:
--build missing \
--settings build_type=${{ inputs.configuration }} \
..
- name: Save Conan cache
if: success() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ~/.conan2
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}

View File

@@ -33,7 +33,7 @@ jobs:
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
build:
runs-on: [self-hosted, vanity]
runs-on: [self-hosted, xahaud-build]
needs: [checkout]
defaults:
run:
@@ -74,7 +74,7 @@ jobs:
fi
tests:
runs-on: [self-hosted, vanity]
runs-on: [self-hosted, xahaud-build]
needs: [build, checkout]
defaults:
run:
@@ -84,7 +84,7 @@ jobs:
run: /bin/bash docker-unit-tests.sh
cleanup:
runs-on: [self-hosted, vanity]
runs-on: [self-hosted, xahaud-build]
needs: [tests, checkout]
if: always()
steps:

View File

@@ -20,7 +20,7 @@ jobs:
- Ninja
configuration:
- Debug
runs-on: macos-15
runs-on: [self-hosted, macOS]
env:
build_dir: .build
# Bump this number to invalidate all caches globally.
@@ -30,61 +30,29 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install Conan
- name: Add Homebrew to PATH
run: |
brew install conan
# Verify Conan 2 is installed
conan --version
echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
- name: Install Coreutils
run: |
brew install coreutils
echo "Num proc: $(nproc)"
- name: Install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
# To isolate environments for each Runner, instead of installing globally with brew,
# use mise to isolate environments for each Runner directory.
- name: Setup toolchain (mise)
uses: jdx/mise-action@v2
with:
install: true
- name: Install Python
- name: Install tools via mise
run: |
if which python3 > /dev/null 2>&1; then
echo "Python 3 executable exists"
python3 --version
else
brew install python@3.12
fi
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
if ! which python > /dev/null 2>&1; then
sudo ln -sf $(which python3) /usr/local/bin/python
fi
- name: Install CMake
run: |
# Install CMake 3.x to match local dev environments
# With Conan 2 and the policy args passed to CMake, newer versions
# can have issues with dependencies that require cmake_minimum_required < 3.5
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
# Download and install CMake 3.31.7 directly
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
tar -xzf cmake.tar.gz
# Move the entire CMake.app to /Applications
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
/Applications/CMake.app/Contents/bin/cmake --version
- name: Install ccache
run: brew install ccache
mise install
mise use cmake@3.23.1 python@3.12 pipx@latest conan@2 ninja@latest ccache@latest
mise reshim
echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
- name: Check environment
run: |
@@ -98,6 +66,14 @@ jobs:
echo "---- Full Environment ----"
env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Detect compiler version
id: detect-compiler
run: |
@@ -129,6 +105,7 @@ jobs:
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: libcxx
ccache_max_size: '100G'
- name: Test
run: |

View File

@@ -14,7 +14,7 @@ concurrency:
jobs:
matrix-setup:
runs-on: ubuntu-latest
runs-on: [self-hosted, generic, 20.04]
container: python:3-slim
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
@@ -47,7 +47,8 @@ jobs:
"cxx": "g++-11",
"compiler_version": 11,
"stdlib": "libstdcxx",
"configuration": "Debug"
"configuration": "Debug",
"job_type": "build"
},
{
"compiler_id": "gcc-13-libstdcxx",
@@ -55,8 +56,19 @@ jobs:
"cc": "gcc-13",
"cxx": "g++-13",
"compiler_version": 13,
"configuration": "Debug",
"job_type": "build"
},
{
"compiler_id": "gcc-13-libstdcxx",
"compiler": "gcc",
"cc": "gcc-13",
"cxx": "g++-13",
"gcov": "gcov-13",
"compiler_version": 13,
"stdlib": "libstdcxx",
"configuration": "Debug"
"configuration": "Debug",
"job_type": "coverage"
},
{
"compiler_id": "clang-14-libstdcxx-gcc11",
@@ -66,7 +78,8 @@ jobs:
"compiler_version": 14,
"stdlib": "libstdcxx",
"clang_gcc_toolchain": 11,
"configuration": "Debug"
"configuration": "Debug",
"job_type": "build"
},
{
"compiler_id": "clang-16-libstdcxx-gcc13",
@@ -76,7 +89,8 @@ jobs:
"compiler_version": 16,
"stdlib": "libstdcxx",
"clang_gcc_toolchain": 13,
"configuration": "Debug"
"configuration": "Debug",
"job_type": "build"
},
{
"compiler_id": "clang-17-libcxx",
@@ -85,7 +99,8 @@ jobs:
"cxx": "clang++-17",
"compiler_version": 17,
"stdlib": "libcxx",
"configuration": "Debug"
"configuration": "Debug",
"job_type": "build"
},
{
# Clang 18 - testing if it's faster than Clang 17 with libc++
@@ -96,14 +111,16 @@ jobs:
"cxx": "clang++-18",
"compiler_version": 18,
"stdlib": "libcxx",
"configuration": "Debug"
"configuration": "Debug",
"job_type": "build"
}
]
# Minimal matrix for PRs and feature branches
minimal_matrix = [
full_matrix[1], # gcc-13 (middle-ground gcc)
full_matrix[2] # clang-14 (mature, stable clang)
full_matrix[2], # gcc-13 coverage
full_matrix[3] # clang-14 (mature, stable clang)
]
# Determine which matrix to use based on the target branch
@@ -161,14 +178,21 @@ jobs:
# Select the appropriate matrix
if use_full:
if force_full:
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag")
print(f"Using FULL matrix (7 configs) - forced by [ci-nix-full-matrix] tag")
else:
print(f"Using FULL matrix (6 configs) - targeting main branch")
print(f"Using FULL matrix (7 configs) - targeting main branch")
matrix = full_matrix
else:
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR")
print(f"Using MINIMAL matrix (3 configs) - feature branch/PR")
matrix = minimal_matrix
# Add runs_on based on job_type
for entry in matrix:
if entry.get("job_type") == "coverage":
entry["runs_on"] = '["self-hosted", "generic", 24.04]'
else:
entry["runs_on"] = '["self-hosted", "generic", 20.04]'
# Output the matrix as JSON
output = json.dumps({"include": matrix})
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
@@ -176,7 +200,15 @@ jobs:
build:
needs: matrix-setup
runs-on: ubuntu-latest
runs-on: ${{ fromJSON(matrix.runs_on) }}
container:
image: ubuntu:24.04
volumes:
- /home/runner/.conan-cache:/.conan-cache
- /home/runner/.ccache-cache:/github/home/.ccache-cache
defaults:
run:
shell: bash
outputs:
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
strategy:
@@ -191,23 +223,22 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install build dependencies
run: |
sudo apt-get update
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
apt-get update
apt-get install -y software-properties-common
add-apt-repository ppa:ubuntu-toolchain-r/test -y
apt-get update
apt-get install -y git python3 python-is-python3 pipx
pipx ensurepath
apt-get install -y cmake ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
apt-get install -y perl # for openssl build
apt-get install -y libsqlite3-dev # for xahaud build
# Install the specific GCC version needed for Clang
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ==="
sudo apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
echo "=== GCC versions available after installation ==="
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d"
@@ -238,7 +269,7 @@ jobs:
echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)"
# Safety check: ensure target doesn't already exist
if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then
sudo mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
else
echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists"
exit 1
@@ -262,11 +293,18 @@ jobs:
# Install libc++ dev packages if using libc++ (not needed for libstdc++)
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
sudo apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
fi
# Install Conan 2
pip install --upgrade "conan>=2.0,<3"
pipx install "conan>=2.0,<3"
echo "$HOME/.local/bin" >> $GITHUB_PATH
# Install gcovr for coverage jobs
if [ "${{ matrix.job_type }}" = "coverage" ]; then
pipx install "gcovr>=7,<9"
apt-get install -y lcov
fi
- name: Check environment
run: |
@@ -277,9 +315,24 @@ jobs:
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
which ccache && ccache --version || echo "ccache not found"
# Check gcovr for coverage jobs
if [ "${{ matrix.job_type }}" = "coverage" ]; then
which gcov && gcov --version || echo "gcov not found"
which gcovr && gcovr --version || echo "gcovr not found"
fi
echo "---- Full Environment ----"
env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install dependencies
uses: ./.github/actions/xahau-ga-dependencies
with:
@@ -293,8 +346,10 @@ jobs:
cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }}
stdlib: ${{ matrix.stdlib }}
gha_cache_enabled: 'false' # Disable caching for self hosted runner
- name: Build
if: matrix.job_type == 'build'
uses: ./.github/actions/xahau-ga-build
with:
generator: Ninja
@@ -307,8 +362,27 @@ jobs:
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: ${{ matrix.stdlib }}
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
ccache_max_size: '100G'
- name: Build (Coverage)
if: matrix.job_type == 'coverage'
uses: ./.github/actions/xahau-ga-build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
build_dir: ${{ env.build_dir }}
cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }}
gcov: ${{ matrix.gcov }}
compiler-id: ${{ matrix.compiler_id }}
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
cmake-args: '-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_CXX_FLAGS="-O0" -DCMAKE_C_FLAGS="-O0"'
cmake-target: 'coverage'
ccache_max_size: '100G'
- name: Set artifact name
if: matrix.job_type == 'build'
id: set-artifact-name
run: |
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
@@ -321,6 +395,7 @@ jobs:
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
- name: Run tests
if: matrix.job_type == 'build'
run: |
# Ensure the binary exists before trying to run
if [ -f "${{ env.build_dir }}/rippled" ]; then
@@ -329,3 +404,33 @@ jobs:
echo "Error: rippled executable not found in ${{ env.build_dir }}"
exit 1
fi
# Coverage-specific steps
- name: Move coverage report
if: matrix.job_type == 'coverage'
shell: bash
run: |
mv "${{ env.build_dir }}/coverage.xml" ./
- name: Archive coverage report
if: matrix.job_type == 'coverage'
uses: actions/upload-artifact@v4
with:
name: coverage.xml
path: coverage.xml
retention-days: 30
- name: Upload coverage report
if: matrix.job_type == 'coverage'
uses: wandalen/wretry.action/main@v3
with:
action: codecov/codecov-action@v4.3.0
with: |
files: coverage.xml
fail_ci_if_error: true
disable_search: true
verbose: true
plugin: noop
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 210000 # in milliseconds

View File

@@ -258,12 +258,72 @@ can't build earlier Boost versions.
generator. Pass `--help` to see the rest of the command line options.
## Coverage report
The coverage report is intended for developers using compilers GCC
or Clang (including Apple Clang). It is generated by the build target `coverage`,
which is only enabled when the `coverage` option is set, e.g. with
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
Prerequisites for the coverage report:
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
- `gcov` for GCC (installed with the compiler by default) or
- `llvm-cov` for Clang (installed with the compiler by default)
- `Debug` build type
A coverage report is created when the following steps are completed, in order:
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
option mentioned above
2. completed run of unit tests, which populates coverage capture data
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
to assemble both instrumentation data and the coverage capture data into a coverage report
The above steps are automated into a single target `coverage`. The instrumented
`rippled` binary can also be used for regular development or testing work, at
the cost of extra disk space utilization and a small performance hit
(to store coverage capture). In case of a spurious failure of unit tests, it is
possible to re-run the `coverage` target without rebuilding the `rippled` binary
(since it is simply a dependency of the coverage report target). It is also possible
to select only specific tests for the purpose of the coverage report, by setting
the `coverage_test` variable in `cmake`
The default coverage report format is `html-details`, but the user
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
by setting the `coverage_format` variable in `cmake`. It is also possible
to generate more than one format at a time by setting the `coverage_extra_args`
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
set to the number of available CPU cores. This may cause spurious test
errors on Apple. Developers can override the number of unit test jobs with
the `coverage_test_parallelism` variable in `cmake`.
Example use with some cmake variables set:
```
cd .build
conan install .. --output-folder . --build missing --settings build_type=Debug
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
cmake --build . --target coverage
```
After the `coverage` target is completed, the generated coverage report will be
stored inside the build directory, as either of:
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
## Options
| Option | Default Value | Description |
| --- | ---| ---|
| `assert` | OFF | Enable assertions.
| `reporting` | OFF | Build the reporting mode feature. |
| `coverage` | OFF | Prepare the coverage report. |
| `tests` | ON | Build tests. |
| `unity` | ON | Configure a unity build. |
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
@@ -456,6 +516,10 @@ but it is more convenient to put them in a [profile][profile].
[1]: https://github.com/conan-io/conan-center-index/issues/13168
[5]: https://en.wikipedia.org/wiki/Unity_build
[6]: https://github.com/boostorg/beast/issues/2648
[7]: https://github.com/boostorg/beast/issues/2661
[gcovr]: https://gcovr.com/en/stable/getting-started.html
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html

View File

@@ -0,0 +1,440 @@
# Copyright (c) 2012 - 2017, Lars Bilke
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# CHANGES:
#
# 2012-01-31, Lars Bilke
# - Enable Code Coverage
#
# 2013-09-17, Joakim Söderberg
# - Added support for Clang.
# - Some additional usage instructions.
#
# 2016-02-03, Lars Bilke
# - Refactored functions to use named parameters
#
# 2017-06-02, Lars Bilke
# - Merged with modified version from github.com/ufz/ogs
#
# 2019-05-06, Anatolii Kurotych
# - Remove unnecessary --coverage flag
#
# 2019-12-13, FeRD (Frank Dana)
# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor
# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments.
# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY
# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list
# - Set lcov basedir with -b argument
# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be
# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().)
# - Delete output dir, .info file on 'make clean'
# - Remove Python detection, since version mismatches will break gcovr
# - Minor cleanup (lowercase function names, update examples...)
#
# 2019-12-19, FeRD (Frank Dana)
# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
#
# 2020-01-19, Bob Apthorpe
# - Added gfortran support
#
# 2020-02-17, FeRD (Frank Dana)
# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters
# in EXCLUDEs, and remove manual escaping from gcovr targets
#
# 2021-01-19, Robin Mueller
# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run
# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional
# flags to the gcovr command
#
# 2020-05-04, Mihchael Davis
# - Add -fprofile-abs-path to make gcno files contain absolute paths
# - Fix BASE_DIRECTORY not working when defined
# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
#
# 2021-05-10, Martin Stump
# - Check if the generator is multi-config before warning about non-Debug builds
#
# 2022-02-22, Marko Wehle
# - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output respectively.
# This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS, e.g. GCOVR_ADDITIONAL_ARGS "--txt".
#
# 2022-09-28, Sebastian Mueller
# - fix append_coverage_compiler_flags_to_target to correctly add flags
# - replace "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
#
# 2024-01-04, Bronek Kozicki
# - remove setup_target_for_coverage_lcov (slow) and setup_target_for_coverage_fastcov (no support for Clang)
# - fix Clang support by adding find_program( ... llvm-cov )
# - add Apple Clang support by adding execute_process( COMMAND xcrun -f llvm-cov ... )
# - add CODE_COVERAGE_GCOV_TOOL to explicitly select gcov tool and disable find_program
# - replace both functions setup_target_for_coverage_gcovr_* with a single setup_target_for_coverage_gcovr
# - add support for all gcovr output formats
#
# USAGE:
#
# 1. Copy this file into your cmake modules path.
#
# 2. Add the following line to your CMakeLists.txt (best inside an if-condition
# using a CMake option() to enable it just optionally):
# include(CodeCoverage)
#
# 3. Append necessary compiler flags for all supported source files:
# append_coverage_compiler_flags()
# Or for specific target:
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
#
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
#
# 4. If you need to exclude additional directories from the report, specify them
# using full paths in the COVERAGE_EXCLUDES variable before calling
# setup_target_for_coverage_*().
# Example:
# set(COVERAGE_EXCLUDES
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
# '/path/to/my/src/dir2/*')
# Or, use the EXCLUDE argument to setup_target_for_coverage_*().
# Example:
# setup_target_for_coverage_gcovr(
# NAME coverage
# EXECUTABLE testrunner
# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*")
#
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set
# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR)
# Example:
# set(COVERAGE_EXCLUDES "dir1/*")
# setup_target_for_coverage_gcovr(
# NAME coverage
# EXECUTABLE testrunner
# FORMAT html-details
# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src"
# EXCLUDE "dir2/*")
#
# 4.b If you need to pass specific options to gcovr, specify them in
# GCOVR_ADDITIONAL_ARGS variable.
# Example:
# set (GCOVR_ADDITIONAL_ARGS --exclude-throw-branches --exclude-noncode-lines -s)
# setup_target_for_coverage_gcovr(
# NAME coverage
# EXECUTABLE testrunner
# EXCLUDE "src/dir1" "src/dir2")
#
# 5. Use the functions described below to create a custom make target which
# runs your test executable and produces a code coverage report.
#
# 6. Build a Debug build:
# cmake -DCMAKE_BUILD_TYPE=Debug ..
# make
# make my_coverage_target
include(CMakeParseArguments)
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
# Check prereqs
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
if(DEFINED CODE_COVERAGE_GCOV_TOOL)
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if(APPLE)
execute_process( COMMAND xcrun -f llvm-cov
OUTPUT_VARIABLE LLVMCOV_PATH
OUTPUT_STRIP_TRAILING_WHITESPACE
)
else()
find_program( LLVMCOV_PATH llvm-cov )
endif()
if(LLVMCOV_PATH)
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
endif()
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
find_program( GCOV_PATH gcov )
set(GCOV_TOOL "${GCOV_PATH}")
endif()
# Check supported compiler (Clang, GNU and Flang)
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
foreach(LANG ${LANGUAGES})
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
endif()
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif()
endforeach()
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag)
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if(HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
include(CheckCCompilerFlag)
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if(HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
endif()
set(CMAKE_Fortran_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
FORCE )
set(CMAKE_CXX_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C++ compiler during coverage builds."
FORCE )
set(CMAKE_C_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C compiler during coverage builds."
FORCE )
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used for linking binaries during coverage builds."
FORCE )
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
FORCE )
mark_as_advanced(
CMAKE_Fortran_FLAGS_COVERAGE
CMAKE_CXX_FLAGS_COVERAGE
CMAKE_C_FLAGS_COVERAGE
CMAKE_EXE_LINKER_FLAGS_COVERAGE
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
link_libraries(gcov)
endif()
# Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports.
# NOTE! The executable should always have a ZERO as exit code otherwise
# the coverage generation will not complete.
#
# setup_target_for_coverage_gcovr(
# NAME ctest_coverage # New target name
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
# DEPENDENCIES executable_target # Dependencies to build first
# BASE_DIRECTORY "../" # Base directory for report
# # (defaults to PROJECT_SOURCE_DIR)
# FORMAT "cobertura" # Output format, one of:
# # xml cobertura sonarqube json-summary
# # json-details coveralls csv txt
# # html-single html-nested html-details
# # (xml is an alias to cobertura;
# # if no format is set, defaults to xml)
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
# # to BASE_DIRECTORY, with CMake 3.4+)
# )
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
# GCVOR command.
function(setup_target_for_coverage_gcovr)
set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(NOT GCOV_TOOL)
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
endif()
if(NOT GCOVR_PATH)
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
endif()
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if(DEFINED Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else()
set(BASEDIR ${PROJECT_SOURCE_DIR})
endif()
if(NOT DEFINED Coverage_FORMAT)
set(Coverage_FORMAT xml)
endif()
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
else()
if((Coverage_FORMAT STREQUAL "html-details")
OR (Coverage_FORMAT STREQUAL "html-nested"))
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
elseif(Coverage_FORMAT STREQUAL "html-single")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
elseif((Coverage_FORMAT STREQUAL "json-summary")
OR (Coverage_FORMAT STREQUAL "json-details")
OR (Coverage_FORMAT STREQUAL "coveralls"))
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
elseif(Coverage_FORMAT STREQUAL "txt")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
elseif(Coverage_FORMAT STREQUAL "csv")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
else()
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
endif()
endif()
if((Coverage_FORMAT STREQUAL "cobertura")
OR (Coverage_FORMAT STREQUAL "xml"))
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
set(Coverage_FORMAT cobertura) # overwrite xml
elseif(Coverage_FORMAT STREQUAL "sonarqube")
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "json-summary")
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
elseif(Coverage_FORMAT STREQUAL "json-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
elseif(Coverage_FORMAT STREQUAL "coveralls")
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
elseif(Coverage_FORMAT STREQUAL "csv")
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "txt")
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "html-single")
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
elseif(Coverage_FORMAT STREQUAL "html-nested")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
elseif(Coverage_FORMAT STREQUAL "html-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
else()
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
endif()
# Collect excludes (CMake 3.4+: Also compute absolute paths)
set(GCOVR_EXCLUDES "")
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
endif()
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
endforeach()
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
# Combine excludes to several -e arguments
set(GCOVR_EXCLUDE_ARGS "")
foreach(EXCLUDE ${GCOVR_EXCLUDES})
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
endforeach()
# Set up commands which will be run to generate coverage data
# Run tests
set(GCOVR_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
# Create folder
if(DEFINED GCOVR_CREATE_FOLDER)
set(GCOVR_FOLDER_CMD
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
else()
set(GCOVR_FOLDER_CMD echo) # dummy
endif()
# Running gcovr
set(GCOVR_CMD
${GCOVR_PATH}
--gcov-executable ${GCOV_TOOL}
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
-r ${BASEDIR}
${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS}
--object-directory=${PROJECT_BINARY_DIR}
)
if(CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report")
message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
message(STATUS "Command to create a folder: ")
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
endif()
message(STATUS "Command to generate gcovr coverage data: ")
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
message(STATUS "${GCOVR_CMD_SPACED}")
endif()
add_custom_target(${Coverage_NAME}
COMMAND ${GCOVR_EXEC_TESTS_CMD}
COMMAND ${GCOVR_FOLDER_CMD}
COMMAND ${GCOVR_CMD}
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES}
VERBATIM # Protect arguments to commands
COMMENT "Running gcovr to produce code coverage report."
)
# Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND ;
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
)
endfunction() # setup_target_for_coverage_gcovr
function(append_coverage_compiler_flags)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
endfunction() # append_coverage_compiler_flags
# Setup coverage for specific library
function(append_coverage_compiler_flags_to_target name)
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
target_compile_options(${name} PRIVATE ${_flag_list})
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
target_link_libraries(${name} PRIVATE gcov)
endif()
endfunction()

View File

@@ -48,13 +48,9 @@ target_sources (xrpl_core PRIVATE
src/ripple/beast/net/impl/IPAddressV6.cpp
src/ripple/beast/net/impl/IPEndpoint.cpp
src/ripple/beast/utility/src/beast_Journal.cpp
src/ripple/beast/utility/src/beast_PropertyStream.cpp)
# Conditionally add enhanced logging source when BEAST_ENHANCED_LOGGING is enabled
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
target_sources(xrpl_core PRIVATE
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
endif()
src/ripple/beast/utility/src/beast_PropertyStream.cpp
# Enhanced logging - compiles to empty when BEAST_ENHANCED_LOGGING is not defined
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
#[===============================[
core sources
@@ -162,12 +158,16 @@ target_link_libraries (xrpl_core
date::date
Ripple::opts)
# Link date-tz library when enhanced logging is enabled
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
if(TARGET date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
endif()
# date-tz for enhanced logging (always linked, code is #ifdef guarded)
if(TARGET date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
endif()
# BEAST_ENHANCED_LOGGING: enable for Debug builds OR when explicitly requested
# Uses generator expression so it works with multi-config generators (Xcode, VS, Ninja Multi-Config)
target_compile_definitions(xrpl_core PUBLIC
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
)
#[=================================[
main/core headers installation
#]=================================]

View File

@@ -2,97 +2,37 @@
coverage report target
#]===================================================================]
if (coverage)
if (is_clang)
if (APPLE)
execute_process (COMMAND xcrun -f llvm-profdata
OUTPUT_VARIABLE LLVM_PROFDATA
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program (LLVM_PROFDATA llvm-profdata)
endif ()
if (NOT LLVM_PROFDATA)
message (WARNING "unable to find llvm-profdata - skipping coverage_report target")
endif ()
if(NOT coverage)
message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
endif()
if (APPLE)
execute_process (COMMAND xcrun -f llvm-cov
OUTPUT_VARIABLE LLVM_COV
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program (LLVM_COV llvm-cov)
endif ()
if (NOT LLVM_COV)
message (WARNING "unable to find llvm-cov - skipping coverage_report target")
endif ()
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
return()
endif()
set (extract_pattern "")
if (coverage_core_only)
set (extract_pattern "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/")
endif ()
include(CodeCoverage)
if (LLVM_COV AND LLVM_PROFDATA)
add_custom_target (coverage_report
USES_TERMINAL
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage - results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
COMMAND ${LLVM_PROFDATA}
merge -sparse default.profraw -o rip.profdata
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LLVM_COV}
report -instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
# generate html report
COMMAND ${LLVM_COV}
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
-instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
BYPRODUCTS coverage/index.html)
endif ()
elseif (is_gcc)
find_program (LCOV lcov)
if (NOT LCOV)
message (WARNING "unable to find lcov - skipping coverage_report target")
endif ()
# The instructions for these commands come from the `CodeCoverage` module,
# which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
# then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
find_program (GENHTML genhtml)
if (NOT GENHTML)
message (WARNING "unable to find genhtml - skipping coverage_report target")
endif ()
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
separate_arguments(GCOVR_ADDITIONAL_ARGS)
endif()
set (extract_pattern "*")
if (coverage_core_only)
set (extract_pattern "*/src/ripple/*")
endif ()
list(APPEND GCOVR_ADDITIONAL_ARGS
--exclude-throw-branches
--exclude-noncode-lines
--exclude-unreachable-branches -s
-j ${coverage_test_parallelism})
if (LCOV AND GENHTML)
add_custom_target (coverage_report
USES_TERMINAL
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage- results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
# create baseline info file
COMMAND ${LCOV}
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -i -o baseline.info
| grep -v "ignoring data for external file"
# run tests
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests for coverage report."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
# Create test coverage data file
COMMAND ${LCOV}
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -o tests.info
| grep -v "ignoring data for external file"
# Combine baseline and test coverage data
COMMAND ${LCOV}
-a baseline.info -a tests.info -o lcov-all.info
# extract our files
COMMAND ${LCOV}
-e lcov-all.info "${extract_pattern}" -o lcov.info
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LCOV} --summary lcov.info
# generate HTML report
COMMAND ${GENHTML}
-o ${CMAKE_BINARY_DIR}/coverage lcov.info
BYPRODUCTS coverage/index.html)
endif ()
endif ()
endif ()
setup_target_for_coverage_gcovr(
NAME coverage
FORMAT ${coverage_format}
EXECUTABLE rippled
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
EXCLUDE "src/test" "${CMAKE_BINARY_DIR}/proto_gen" "${CMAKE_BINARY_DIR}/proto_gen_grpc"
DEPENDENCIES rippled
)

View File

@@ -23,15 +23,15 @@ target_compile_options (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)

View File

@@ -2,6 +2,8 @@
convenience variables and sanity checks
#]===================================================================]
include(ProcessorCount)
if (NOT ep_procs)
ProcessorCount(ep_procs)
if (ep_procs GREATER 1)

View File

@@ -2,121 +2,129 @@
declare user options/settings
#]===================================================================]
option (assert "Enables asserts, even in release builds" OFF)
include(ProcessorCount)
option (reporting "Build rippled with reporting mode enabled" OFF)
ProcessorCount(PROCESSOR_COUNT)
option (tests "Build tests" ON)
option(assert "Enables asserts, even in release builds" OFF)
option (unity "Creates a build using UNITY support in cmake. This is the default" ON)
if (unity)
if (NOT is_ci)
set (CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
endif ()
endif ()
if (is_gcc OR is_clang)
option (coverage "Generates coverage info." OFF)
option (profile "Add profiling flags" OFF)
set (coverage_test "" CACHE STRING
option(reporting "Build rippled with reporting mode enabled" OFF)
option(tests "Build tests" ON)
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
if(unity)
if(NOT is_ci)
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
endif()
endif()
if(is_gcc OR is_clang)
option(coverage "Generates coverage info." OFF)
option(profile "Add profiling flags" OFF)
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
"Unit tests parallelism for the purpose of coverage report.")
set(coverage_format "html-details" CACHE STRING
"Output format of the coverage report.")
set(coverage_extra_args "" CACHE STRING
"Additional arguments to pass to gcovr.")
set(coverage_test "" CACHE STRING
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
if (coverage_test AND NOT coverage)
set (coverage ON CACHE BOOL "gcc/clang only" FORCE)
endif ()
option (coverage_core_only
"Include only src/ripple files when generating coverage report. \
Set to OFF to include all sources in coverage report."
ON)
option (wextra "compile with extra gcc/clang warnings enabled" ON)
else ()
set (profile OFF CACHE BOOL "gcc/clang only" FORCE)
set (coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set (wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif ()
if (is_linux)
option (BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
option (static "link protobuf, openssl, libc++, and boost statically" ON)
option (perf "Enables flags that assist with perf recording" OFF)
option (use_gold "enables detection of gold (binutils) linker" ON)
else ()
if(coverage_test AND NOT coverage)
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
endif()
option(wextra "compile with extra gcc/clang warnings enabled" ON)
else()
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif()
if(is_linux)
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
option(static "link protobuf, openssl, libc++, and boost statically" ON)
option(perf "Enables flags that assist with perf recording" OFF)
option(use_gold "enables detection of gold (binutils) linker" ON)
option(use_mold "enables detection of mold (binutils) linker" ON)
else()
# we are not ready to allow shared-libs on windows because it would require
# export declarations. On macos it's more feasible, but static openssl
# produces odd linker errors, thus we disable shared lib builds for now.
set (BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
set (static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
endif ()
if (is_clang)
option (use_lld "enables detection of lld linker" ON)
else ()
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif ()
option (jemalloc "Enables jemalloc for heap profiling" OFF)
option (werr "treat warnings as errors" OFF)
option (local_protobuf
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
endif()
if(is_clang)
option(use_lld "enables detection of lld linker" ON)
else()
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif()
option(jemalloc "Enables jemalloc for heap profiling" OFF)
option(werr "treat warnings as errors" OFF)
option(local_protobuf
"Force a local build of protobuf instead of looking for an installed version." OFF)
option (local_grpc
option(local_grpc
"Force a local build of gRPC instead of looking for an installed version." OFF)
# this one is a string and therefore can't be an option
set (san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
if (san)
string (TOLOWER ${san} san)
set (SAN_FLAG "-fsanitize=${san}")
set (SAN_LIB "")
if (is_gcc)
if (san STREQUAL "address")
set (SAN_LIB "asan")
elseif (san STREQUAL "thread")
set (SAN_LIB "tsan")
elseif (san STREQUAL "memory")
set (SAN_LIB "msan")
elseif (san STREQUAL "undefined")
set (SAN_LIB "ubsan")
endif ()
endif ()
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
check_cxx_compiler_flag (${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
if (NOT COMPILER_SUPPORTS_SAN)
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif ()
endif ()
set (container_label "" CACHE STRING "tag to use for package building containers")
option (packages_only
set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
if(san)
string(TOLOWER ${san} san)
set(SAN_FLAG "-fsanitize=${san}")
set(SAN_LIB "")
if(is_gcc)
if(san STREQUAL "address")
set(SAN_LIB "asan")
elseif(san STREQUAL "thread")
set(SAN_LIB "tsan")
elseif(san STREQUAL "memory")
set(SAN_LIB "msan")
elseif(san STREQUAL "undefined")
set(SAN_LIB "ubsan")
endif()
endif()
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
if(NOT COMPILER_SUPPORTS_SAN)
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif()
endif()
set(container_label "" CACHE STRING "tag to use for package building containers")
option(packages_only
"ONLY generate package building targets. This is special use-case and almost \
certainly not what you want. Use with caution as you won't be able to build \
any compiled targets locally." OFF)
option (have_package_container
option(have_package_container
"Sometimes you already have the tagged container you want to use for package \
building and you don't want docker to rebuild it. This flag will detach the \
dependency of the package build from the container build. It's an advanced \
use case and most likely you should not be touching this flag." OFF)
# the remaining options are obscure and rarely used
option (beast_no_unit_test_inline
option(beast_no_unit_test_inline
"Prevents unit test definitions from being inserted into global table"
OFF)
option (single_io_service_thread
option(single_io_service_thread
"Restricts the number of threads calling io_service::run to one. \
This can be useful when debugging."
OFF)
option (boost_show_deprecated
option(boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls."
OFF)
option (beast_hashers
option(beast_hashers
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
OFF)
if (WIN32)
option (beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else ()
set (beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif ()
if (coverage)
message (STATUS "coverage build requested - forcing Debug build")
set (CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
endif ()
if(WIN32)
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else()
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif()
if(coverage)
message(STATUS "coverage build requested - forcing Debug build")
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
endif()

View File

@@ -37,20 +37,11 @@ endif() #git
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
# BEAST_ENHANCED_LOGGING option - adds file:line numbers and formatting to logs
# Default to ON for Debug builds, OFF for Release
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" ON)
else()
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" OFF)
endif()
if(BEAST_ENHANCED_LOGGING)
add_definitions(-DBEAST_ENHANCED_LOGGING=1)
message(STATUS "Log line numbers enabled")
else()
message(STATUS "Log line numbers disabled")
endif()
# BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
# Automatically enabled for Debug builds via generator expression
# Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
if(thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
@@ -73,7 +64,6 @@ include (CheckCXXCompilerFlag)
include (FetchContent)
include (ExternalProject)
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
include (ProcessorCount)
if (target)
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
endif ()
@@ -152,11 +142,14 @@ target_link_libraries(ripple_libs INTERFACE
SQLite::SQLite3
)
if(coverage)
include(RippledCov)
endif()
###
include(RippledCore)
include(RippledInstall)
include(RippledCov)
include(RippledMultiConfig)
include(RippledDocs)
include(RippledValidatorKeys)

View File

@@ -192,7 +192,7 @@ ENV PATH=/usr/local/bin:$PATH
# Configure ccache and Conan 2
# NOTE: Using echo commands instead of heredocs because heredocs in Docker RUN commands are finnicky
RUN /hbb_exe/activate-exec bash -c "ccache -M 10G && \
RUN /hbb_exe/activate-exec bash -c "ccache -M 100G && \
ccache -o cache_dir=/cache/ccache && \
ccache -o compiler_check=content && \
mkdir -p ~/.conan2 /cache/conan2 /cache/conan2_download /cache/conan2_sources && \

View File

@@ -367,90 +367,110 @@ const uint8_t max_emit = 255;
const uint8_t max_params = 16;
const double fee_base_multiplier = 1.1f;
#define I32 0x7FU
#define I64 0x7EU
#define HOOK_WRAP_PARAMS(...) __VA_ARGS__
#define HOOK_API_DEFINITION(RETURN_TYPE, FUNCTION_NAME, PARAMS_TUPLE) \
{ \
#FUNCTION_NAME, \
{ \
RETURN_TYPE, HOOK_WRAP_PARAMS PARAMS_TUPLE \
} \
}
using APIWhitelist = std::map<std::string, std::vector<uint8_t>>;
// RH NOTE: Find descriptions of api functions in ./impl/applyHook.cpp and
// hookapi.h (include for hooks) this is a map of the api name to its return
// code (vec[0] and its parameters vec[>0]) as wasm type codes
static const std::map<std::string, std::vector<uint8_t>> import_whitelist{
{"_g", {0x7FU, 0x7FU, 0x7FU}},
{"accept", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
{"rollback", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
{"util_raddr", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"util_accid", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"util_verify", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"util_sha512h", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"util_keylet",
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"sto_validate", {0x7EU, 0x7FU, 0x7FU}},
{"sto_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"sto_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"sto_emplace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"sto_erase", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"etxn_burden", {0x7EU}},
{"etxn_details", {0x7EU, 0x7FU, 0x7FU}},
{"etxn_fee_base", {0x7EU, 0x7FU, 0x7FU}},
{"etxn_reserve", {0x7EU, 0x7FU}},
{"etxn_generation", {0x7EU}},
{"etxn_nonce", {0x7EU, 0x7FU, 0x7FU}},
{"emit", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"float_set", {0x7EU, 0x7FU, 0x7EU}},
{"float_multiply", {0x7EU, 0x7EU, 0x7EU}},
{"float_mulratio", {0x7EU, 0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"float_negate", {0x7EU, 0x7EU}},
{"float_compare", {0x7EU, 0x7EU, 0x7EU, 0x7FU}},
{"float_sum", {0x7EU, 0x7EU, 0x7EU}},
{"float_sto",
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7EU, 0x7FU}},
{"float_sto_set", {0x7EU, 0x7FU, 0x7FU}},
{"float_invert", {0x7EU, 0x7EU}},
{"float_divide", {0x7EU, 0x7EU, 0x7EU}},
{"float_one", {0x7EU}},
{"float_mantissa", {0x7EU, 0x7EU}},
{"float_sign", {0x7EU, 0x7EU}},
{"float_int", {0x7EU, 0x7EU, 0x7FU, 0x7FU}},
{"float_log", {0x7EU, 0x7EU}},
{"float_root", {0x7EU, 0x7EU, 0x7FU}},
{"fee_base", {0x7EU}},
{"ledger_seq", {0x7EU}},
{"ledger_last_time", {0x7EU}},
{"ledger_last_hash", {0x7EU, 0x7FU, 0x7FU}},
{"ledger_nonce", {0x7EU, 0x7FU, 0x7FU}},
{"ledger_keylet", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"hook_account", {0x7EU, 0x7FU, 0x7FU}},
{"hook_hash", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"hook_param_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"hook_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"hook_again", {0x7EU}},
{"hook_skip", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"hook_pos", {0x7EU}},
{"slot", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"slot_clear", {0x7EU, 0x7FU}},
{"slot_count", {0x7EU, 0x7FU}},
{"slot_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"slot_size", {0x7EU, 0x7FU}},
{"slot_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"slot_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"slot_type", {0x7EU, 0x7FU, 0x7FU}},
{"slot_float", {0x7EU, 0x7FU}},
{"state_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"state_foreign_set",
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"state", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"state_foreign",
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"trace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"trace_num", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
{"trace_float", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
{"otxn_burden", {0x7EU}},
{"otxn_field", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"otxn_generation", {0x7EU}},
{"otxn_id", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
{"otxn_type", {0x7EU}},
{"otxn_slot", {0x7EU, 0x7FU}},
{"otxn_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
{"meta_slot", {0x7EU, 0x7FU}}};
static const APIWhitelist import_whitelist{
// clang-format off
HOOK_API_DEFINITION(I32, _g, (I32, I32)),
HOOK_API_DEFINITION(I64, accept, (I32, I32, I64)),
HOOK_API_DEFINITION(I64, rollback, (I32, I32, I64)),
HOOK_API_DEFINITION(I64, util_raddr, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, util_accid, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, util_verify, (I32, I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, util_sha512h, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, util_keylet, (I32, I32, I32, I32, I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, sto_validate, (I32, I32)),
HOOK_API_DEFINITION(I64, sto_subfield, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, sto_subarray, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, sto_emplace, (I32, I32, I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, sto_erase, (I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, etxn_burden, ()),
HOOK_API_DEFINITION(I64, etxn_details, (I32, I32)),
HOOK_API_DEFINITION(I64, etxn_fee_base, (I32, I32)),
HOOK_API_DEFINITION(I64, etxn_reserve, (I32)),
HOOK_API_DEFINITION(I64, etxn_generation, ()),
HOOK_API_DEFINITION(I64, etxn_nonce, (I32, I32)),
HOOK_API_DEFINITION(I64, emit, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, float_set, (I32, I64)),
HOOK_API_DEFINITION(I64, float_multiply, (I64, I64)),
HOOK_API_DEFINITION(I64, float_mulratio, (I64, I32, I32, I32)),
HOOK_API_DEFINITION(I64, float_negate, (I64)),
HOOK_API_DEFINITION(I64, float_compare, (I64, I64, I32)),
HOOK_API_DEFINITION(I64, float_sum, (I64, I64)),
HOOK_API_DEFINITION(I64, float_sto, (I32, I32, I32, I32, I32, I32, I64, I32)),
HOOK_API_DEFINITION(I64, float_sto_set, (I32, I32)),
HOOK_API_DEFINITION(I64, float_invert, (I64)),
HOOK_API_DEFINITION(I64, float_divide, (I64, I64)),
HOOK_API_DEFINITION(I64, float_one, ()),
HOOK_API_DEFINITION(I64, float_mantissa, (I64)),
HOOK_API_DEFINITION(I64, float_sign, (I64)),
HOOK_API_DEFINITION(I64, float_int, (I64, I32, I32)),
HOOK_API_DEFINITION(I64, float_log, (I64)),
HOOK_API_DEFINITION(I64, float_root, (I64, I32)),
HOOK_API_DEFINITION(I64, fee_base, ()),
HOOK_API_DEFINITION(I64, ledger_seq, ()),
HOOK_API_DEFINITION(I64, ledger_last_time, ()),
HOOK_API_DEFINITION(I64, ledger_last_hash, (I32, I32)),
HOOK_API_DEFINITION(I64, ledger_nonce, (I32, I32)),
HOOK_API_DEFINITION(I64, ledger_keylet, (I32, I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, hook_account, (I32, I32)),
HOOK_API_DEFINITION(I64, hook_hash, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, hook_param_set, (I32, I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, hook_param, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, hook_again, ()),
HOOK_API_DEFINITION(I64, hook_skip, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, hook_pos, ()),
HOOK_API_DEFINITION(I64, slot, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, slot_clear, (I32)),
HOOK_API_DEFINITION(I64, slot_count, (I32)),
HOOK_API_DEFINITION(I64, slot_set, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, slot_size, (I32)),
HOOK_API_DEFINITION(I64, slot_subarray, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, slot_subfield, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, slot_type, (I32, I32)),
HOOK_API_DEFINITION(I64, slot_float, (I32)),
HOOK_API_DEFINITION(I64, state_set, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, state_foreign_set, (I32, I32, I32, I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, state, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, state_foreign, (I32, I32, I32, I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, trace, (I32, I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, trace_num, (I32, I32, I64)),
HOOK_API_DEFINITION(I64, trace_float, (I32, I32, I64)),
HOOK_API_DEFINITION(I64, otxn_burden, ()),
HOOK_API_DEFINITION(I64, otxn_field, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, otxn_generation, ()),
HOOK_API_DEFINITION(I64, otxn_id, (I32, I32, I32)),
HOOK_API_DEFINITION(I64, otxn_type, ()),
HOOK_API_DEFINITION(I64, otxn_slot, (I32)),
HOOK_API_DEFINITION(I64, otxn_param, (I32, I32, I32, I32)),
HOOK_API_DEFINITION(I64, meta_slot, (I32)),
// clang-format on
};
// featureHooks1
static const std::map<std::string, std::vector<uint8_t>> import_whitelist_1{
{"xpop_slot", {0x7EU, 0x7FU, 0x7FU}}};
static const APIWhitelist import_whitelist_1{
// clang-format off
HOOK_API_DEFINITION(I64, xpop_slot, (I32, I32)),
// clang-format on
};
#undef HOOK_API_DEFINITION
#undef I32
#undef I64
}; // namespace hook_api
#endif

View File

@@ -471,6 +471,10 @@ ManifestCache::applyManifest(Manifest m)
auto masterKey = m.masterKey;
map_.emplace(std::move(masterKey), std::move(m));
// Increment sequence to invalidate cached manifest messages
seq_++;
return ManifestDisposition::accepted;
}

View File

@@ -360,7 +360,8 @@ Logs::format(
if (!partition.empty())
{
#ifdef BEAST_ENHANCED_LOGGING
output += beast::detail::get_log_highlight_color();
if (beast::detail::should_log_use_colors())
output += beast::detail::get_log_highlight_color();
#endif
output += partition + ":";
}
@@ -392,7 +393,8 @@ Logs::format(
}
#ifdef BEAST_ENHANCED_LOGGING
output += "\033[0m";
if (beast::detail::should_log_use_colors())
output += "\033[0m";
#endif
output += message;

View File

@@ -41,6 +41,14 @@ get_log_highlight_color();
constexpr const char*
strip_source_root(const char* file)
{
// Handle relative paths from build/ directory (common with ccache)
// e.g., "../src/ripple/..." -> "ripple/..."
if (file && file[0] == '.' && file[1] == '.' && file[2] == '/' &&
file[3] == 's' && file[4] == 'r' && file[5] == 'c' && file[6] == '/')
{
return file + 7; // skip "../src/"
}
#ifdef SOURCE_ROOT_PATH
constexpr const char* sourceRoot = SOURCE_ROOT_PATH;
constexpr auto strlen_constexpr = [](const char* s) constexpr

View File

@@ -17,6 +17,8 @@
*/
//==============================================================================
#ifdef BEAST_ENHANCED_LOGGING
#include <ripple/beast/utility/EnhancedLogging.h>
#include <cstdlib>
#include <cstring>
@@ -112,3 +114,5 @@ log_write_location_string(std::ostream& os, const char* file, int line)
} // namespace detail
} // namespace beast
#endif // BEAST_ENHANCED_LOGGING

View File

@@ -155,14 +155,43 @@ Journal::ScopedStream::~ScopedStream()
#ifdef BEAST_ENHANCED_LOGGING
// Add suffix if location is enabled
if (file_ && detail::should_show_location() && !s.empty() && s != "\n")
if (file_ && detail::should_show_location() && !s.empty())
{
std::ostringstream combined;
combined << s;
if (!s.empty() && s.back() != ' ')
combined << " ";
detail::log_write_location_string(combined, file_, line_);
s = combined.str();
// Single optimized scan from the end
size_t const lastNonWhitespace = s.find_last_not_of(" \n\r\t");
// Skip if message is only whitespace (e.g., just "\n" or " \n\n")
if (lastNonWhitespace != std::string::npos)
{
// Count only the trailing newlines (tiny range)
size_t trailingNewlines = 0;
for (size_t i = lastNonWhitespace + 1; i < s.length(); ++i)
{
if (s[i] == '\n')
++trailingNewlines;
}
// Build location string once
std::ostringstream locStream;
detail::log_write_location_string(locStream, file_, line_);
std::string const location = locStream.str();
// Pre-allocate exact size → zero reallocations
size_t const finalSize = lastNonWhitespace + 1 + 1 +
location.length() + trailingNewlines;
std::string result;
result.reserve(finalSize);
// Direct string ops (no ostringstream overhead)
result.append(s, 0, lastNonWhitespace + 1);
result.push_back(' ');
result += location;
if (trailingNewlines > 0)
result.append(trailingNewlines, '\n');
s = std::move(result); // Move, no copy
}
}
#endif

View File

@@ -484,44 +484,61 @@ OverlayImpl::start()
m_peerFinder->setConfig(config);
m_peerFinder->start();
auto addIps = [&](std::vector<std::string> bootstrapIps) -> void {
auto addIps = [this](std::vector<std::string> ips, bool fixed) {
beast::Journal const& j = app_.journal("Overlay");
for (auto& ip : bootstrapIps)
for (auto& ip : ips)
{
std::size_t pos = ip.find('#');
if (pos != std::string::npos)
ip.erase(pos);
JLOG(j.trace()) << "Found boostrap IP: " << ip;
JLOG(j.trace())
<< "Found " << (fixed ? "fixed" : "bootstrap") << " IP: " << ip;
}
m_resolver.resolve(
bootstrapIps,
[&](std::string const& name,
ips,
[this, fixed](
std::string const& name,
std::vector<beast::IP::Endpoint> const& addresses) {
std::vector<std::string> ips;
ips.reserve(addresses.size());
beast::Journal const& j = app_.journal("Overlay");
std::string const base("config: ");
std::vector<beast::IP::Endpoint> eps;
eps.reserve(addresses.size());
for (auto const& addr : addresses)
{
std::string addrStr = addr.port() == 0
? to_string(addr.at_port(DEFAULT_PEER_PORT))
: to_string(addr);
JLOG(j.trace()) << "Parsed boostrap IP: " << addrStr;
ips.push_back(addrStr);
auto ep = addr.port() == 0 ? addr.at_port(DEFAULT_PEER_PORT)
: addr;
JLOG(j.trace())
<< "Parsed " << (fixed ? "fixed" : "bootstrap")
<< " IP: " << ep;
eps.push_back(ep);
}
std::string const base("config: ");
if (!ips.empty())
m_peerFinder->addFallbackStrings(base + name, ips);
if (eps.empty())
return;
if (fixed)
{
m_peerFinder->addFixedPeer(base + name, eps);
}
else
{
std::vector<std::string> strs;
strs.reserve(eps.size());
for (auto const& ep : eps)
strs.push_back(to_string(ep));
m_peerFinder->addFallbackStrings(base + name, strs);
}
});
};
if (!app_.config().IPS.empty())
addIps(app_.config().IPS);
addIps(app_.config().IPS, false);
if (!app_.config().IPS_FIXED.empty())
addIps(app_.config().IPS_FIXED);
addIps(app_.config().IPS_FIXED, true);
auto const timer = std::make_shared<Timer>(*this);
std::lock_guard lock(mutex_);