mirror of
https://github.com/Xahau/xahaud.git
synced 2026-02-22 14:52:26 +00:00
Compare commits
9 Commits
coverage
...
ci/nix-wor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c44a19d115 | ||
|
|
c3e8039c5a | ||
|
|
d9e7b50395 | ||
|
|
2f1f17f3e6 | ||
|
|
09e1132f9d | ||
|
|
2da8608a7c | ||
|
|
ab0004be8b | ||
|
|
7bc0cbe2fd | ||
|
|
a6cc563ca1 |
@@ -1,6 +0,0 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 60%
|
||||
threshold: 2%
|
||||
29
.github/actions/xahau-ga-build/action.yml
vendored
29
.github/actions/xahau-ga-build/action.yml
vendored
@@ -2,14 +2,6 @@ name: build
|
||||
description: 'Builds the project with ccache integration'
|
||||
|
||||
inputs:
|
||||
cmake-target:
|
||||
description: 'CMake target to build'
|
||||
required: false
|
||||
default: all
|
||||
cmake-args:
|
||||
description: 'Additional CMake arguments'
|
||||
required: false
|
||||
default: null
|
||||
generator:
|
||||
description: 'CMake generator to use'
|
||||
required: true
|
||||
@@ -28,10 +20,6 @@ inputs:
|
||||
description: 'C++ compiler to use'
|
||||
required: false
|
||||
default: ''
|
||||
gcov:
|
||||
description: 'Gcov to use'
|
||||
required: false
|
||||
default: ''
|
||||
compiler-id:
|
||||
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
|
||||
required: false
|
||||
@@ -53,11 +41,10 @@ inputs:
|
||||
required: false
|
||||
default: 'dev'
|
||||
stdlib:
|
||||
description: 'C++ standard library to use (default = compiler default, e.g. GCC always uses libstdc++)'
|
||||
description: 'C++ standard library to use'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- default
|
||||
- libstdcxx
|
||||
- libcxx
|
||||
clang_gcc_toolchain:
|
||||
@@ -100,6 +87,11 @@ runs:
|
||||
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
|
||||
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
|
||||
|
||||
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
|
||||
mkdir -p ~/.config/ccache
|
||||
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
|
||||
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
|
||||
|
||||
# Configure ccache settings AFTER cache restore (prevents stale cached config)
|
||||
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
|
||||
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
|
||||
@@ -130,10 +122,6 @@ runs:
|
||||
export CXX="${{ inputs.cxx }}"
|
||||
fi
|
||||
|
||||
if [ -n "${{ inputs.gcov }}" ]; then
|
||||
ln -sf /usr/bin/${{ inputs.gcov }} /usr/local/bin/gcov
|
||||
fi
|
||||
|
||||
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
|
||||
# This enables ccache for the main app build without affecting Conan dependency builds
|
||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||
@@ -195,8 +183,7 @@ runs:
|
||||
-G "${{ inputs.generator }}" \
|
||||
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
|
||||
${{ inputs.cmake-args }}
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
||||
|
||||
- name: Show ccache config before build
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
@@ -220,7 +207,7 @@ runs:
|
||||
VERBOSE_FLAG="-- -v"
|
||||
fi
|
||||
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) --target ${{ inputs.cmake-target }} ${VERBOSE_FLAG}
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG}
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
|
||||
7
.github/workflows/xahau-ga-macos.yml
vendored
7
.github/workflows/xahau-ga-macos.yml
vendored
@@ -4,7 +4,8 @@ on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
branches: ["**"]
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
@@ -14,6 +15,10 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
if: >
|
||||
github.event_name != 'pull_request' ||
|
||||
contains(fromJson('["dev","candidate","release"]'), github.base_ref) ||
|
||||
contains(join(github.event.pull_request.labels.*.name, ','), 'ci-full-build')
|
||||
strategy:
|
||||
matrix:
|
||||
generator:
|
||||
|
||||
155
.github/workflows/xahau-ga-nix.yml
vendored
155
.github/workflows/xahau-ga-nix.yml
vendored
@@ -4,9 +4,16 @@ on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
branches: ["**"]
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full_matrix:
|
||||
description: "Force full matrix (6 configs)"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -14,6 +21,10 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
matrix-setup:
|
||||
if: >
|
||||
github.event_name != 'pull_request' ||
|
||||
contains(fromJson('["dev","candidate","release"]'), github.base_ref) ||
|
||||
contains(join(github.event.pull_request.labels.*.name, ','), 'ci-full-build')
|
||||
runs-on: [self-hosted, generic, 20.04]
|
||||
container: python:3-slim
|
||||
outputs:
|
||||
@@ -46,9 +57,8 @@ jobs:
|
||||
"cc": "gcc-11",
|
||||
"cxx": "g++-11",
|
||||
"compiler_version": 11,
|
||||
"stdlib": "default",
|
||||
"configuration": "Debug",
|
||||
"job_type": "build"
|
||||
"stdlib": "libstdcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "gcc-13-libstdcxx",
|
||||
@@ -56,20 +66,8 @@ jobs:
|
||||
"cc": "gcc-13",
|
||||
"cxx": "g++-13",
|
||||
"compiler_version": 13,
|
||||
"stdlib": "default",
|
||||
"configuration": "Debug",
|
||||
"job_type": "build"
|
||||
},
|
||||
{
|
||||
"compiler_id": "gcc-13-libstdcxx",
|
||||
"compiler": "gcc",
|
||||
"cc": "gcc-13",
|
||||
"cxx": "g++-13",
|
||||
"gcov": "gcov-13",
|
||||
"compiler_version": 13,
|
||||
"stdlib": "default",
|
||||
"configuration": "Debug",
|
||||
"job_type": "coverage"
|
||||
"stdlib": "libstdcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-14-libstdcxx-gcc11",
|
||||
@@ -79,8 +77,7 @@ jobs:
|
||||
"compiler_version": 14,
|
||||
"stdlib": "libstdcxx",
|
||||
"clang_gcc_toolchain": 11,
|
||||
"configuration": "Debug",
|
||||
"job_type": "build"
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-16-libstdcxx-gcc13",
|
||||
@@ -90,8 +87,7 @@ jobs:
|
||||
"compiler_version": 16,
|
||||
"stdlib": "libstdcxx",
|
||||
"clang_gcc_toolchain": 13,
|
||||
"configuration": "Debug",
|
||||
"job_type": "build"
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-17-libcxx",
|
||||
@@ -100,8 +96,7 @@ jobs:
|
||||
"cxx": "clang++-17",
|
||||
"compiler_version": 17,
|
||||
"stdlib": "libcxx",
|
||||
"configuration": "Debug",
|
||||
"job_type": "build"
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
# Clang 18 - testing if it's faster than Clang 17 with libc++
|
||||
@@ -112,16 +107,14 @@ jobs:
|
||||
"cxx": "clang++-18",
|
||||
"compiler_version": 18,
|
||||
"stdlib": "libcxx",
|
||||
"configuration": "Debug",
|
||||
"job_type": "build"
|
||||
"configuration": "Debug"
|
||||
}
|
||||
]
|
||||
|
||||
# Minimal matrix for PRs and feature branches
|
||||
minimal_matrix = [
|
||||
full_matrix[1], # gcc-13 (middle-ground gcc)
|
||||
full_matrix[2], # gcc-13 coverage
|
||||
full_matrix[3] # clang-14 (mature, stable clang)
|
||||
full_matrix[2] # clang-14 (mature, stable clang)
|
||||
]
|
||||
|
||||
# Determine which matrix to use based on the target branch
|
||||
@@ -129,6 +122,7 @@ jobs:
|
||||
base_ref = "${{ github.base_ref }}" # For PRs, this is the target branch
|
||||
event_name = "${{ github.event_name }}"
|
||||
pr_title = """${{ steps.escape.outputs.title }}"""
|
||||
pr_labels = """${{ join(github.event.pull_request.labels.*.name, ',') }}"""
|
||||
pr_head_sha = "${{ github.event.pull_request.head.sha }}"
|
||||
|
||||
# Get commit message - for PRs, fetch via API since head_commit.message is empty
|
||||
@@ -154,11 +148,24 @@ jobs:
|
||||
print(f"Base ref: {base_ref}")
|
||||
print(f"PR head SHA: {pr_head_sha}")
|
||||
print(f"PR title: {pr_title}")
|
||||
print(f"PR labels: {pr_labels}")
|
||||
print(f"Commit message: {commit_message}")
|
||||
|
||||
# Check for override tags in commit message or PR title
|
||||
force_full = "[ci-nix-full-matrix]" in commit_message or "[ci-nix-full-matrix]" in pr_title
|
||||
# Manual trigger input to force full matrix.
|
||||
manual_full = "${{ github.event.inputs.full_matrix || 'false' }}" == "true"
|
||||
|
||||
# Label/manual overrides, while preserving existing title/commit behavior.
|
||||
force_full = (
|
||||
manual_full
|
||||
or "[ci-nix-full-matrix]" in commit_message
|
||||
or "[ci-nix-full-matrix]" in pr_title
|
||||
or ("ci-full-build" in pr_labels and "ci-nix-full-matrix" in pr_labels)
|
||||
)
|
||||
force_min = (
|
||||
"ci-full-build" in pr_labels
|
||||
)
|
||||
print(f"Force full matrix: {force_full}")
|
||||
print(f"Force min matrix: {force_min}")
|
||||
|
||||
# Check if this is targeting a main branch
|
||||
# For PRs: check base_ref (target branch)
|
||||
@@ -166,8 +173,11 @@ jobs:
|
||||
main_branches = ["refs/heads/dev", "refs/heads/release", "refs/heads/candidate"]
|
||||
|
||||
if force_full:
|
||||
# Override: always use full matrix if tag is present
|
||||
# Override: always use full matrix if forced by manual input or label.
|
||||
use_full = True
|
||||
elif force_min:
|
||||
# Override: always use minimal matrix if ci-full-build label is present.
|
||||
use_full = False
|
||||
elif event_name == "pull_request":
|
||||
# For PRs, base_ref is just the branch name (e.g., "dev", not "refs/heads/dev")
|
||||
# Check if the PR targets release or candidate (more critical branches)
|
||||
@@ -179,21 +189,14 @@ jobs:
|
||||
# Select the appropriate matrix
|
||||
if use_full:
|
||||
if force_full:
|
||||
print(f"Using FULL matrix (7 configs) - forced by [ci-nix-full-matrix] tag")
|
||||
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag")
|
||||
else:
|
||||
print(f"Using FULL matrix (7 configs) - targeting main branch")
|
||||
print(f"Using FULL matrix (6 configs) - targeting main branch")
|
||||
matrix = full_matrix
|
||||
else:
|
||||
print(f"Using MINIMAL matrix (3 configs) - feature branch/PR")
|
||||
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR")
|
||||
matrix = minimal_matrix
|
||||
|
||||
# Add runs_on based on job_type
|
||||
for entry in matrix:
|
||||
if entry.get("job_type") == "coverage":
|
||||
entry["runs_on"] = '["self-hosted", "generic", 24.04]'
|
||||
else:
|
||||
entry["runs_on"] = '["self-hosted", "generic", 20.04]'
|
||||
|
||||
|
||||
# Output the matrix as JSON
|
||||
output = json.dumps({"include": matrix})
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
@@ -201,7 +204,7 @@ jobs:
|
||||
|
||||
build:
|
||||
needs: matrix-setup
|
||||
runs-on: ${{ fromJSON(matrix.runs_on) }}
|
||||
runs-on: [self-hosted, generic, 20.04]
|
||||
container:
|
||||
image: ubuntu:24.04
|
||||
volumes:
|
||||
@@ -230,7 +233,7 @@ jobs:
|
||||
apt-get install -y software-properties-common
|
||||
add-apt-repository ppa:ubuntu-toolchain-r/test -y
|
||||
apt-get update
|
||||
apt-get install -y git python3 python-is-python3 pipx
|
||||
apt-get install -y python3 python-is-python3 pipx
|
||||
pipx ensurepath
|
||||
apt-get install -y cmake ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
|
||||
apt-get install -y perl # for openssl build
|
||||
@@ -301,12 +304,6 @@ jobs:
|
||||
pipx install "conan>=2.0,<3"
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
# Install gcovr for coverage jobs
|
||||
if [ "${{ matrix.job_type }}" = "coverage" ]; then
|
||||
pipx install "gcovr>=7,<9"
|
||||
apt-get install -y lcov
|
||||
fi
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
@@ -316,13 +313,6 @@ jobs:
|
||||
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
|
||||
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
|
||||
which ccache && ccache --version || echo "ccache not found"
|
||||
|
||||
# Check gcovr for coverage jobs
|
||||
if [ "${{ matrix.job_type }}" = "coverage" ]; then
|
||||
which gcov && gcov --version || echo "gcov not found"
|
||||
which gcovr && gcovr --version || echo "gcovr not found"
|
||||
fi
|
||||
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
@@ -350,7 +340,6 @@ jobs:
|
||||
gha_cache_enabled: 'false' # Disable caching for self hosted runner
|
||||
|
||||
- name: Build
|
||||
if: matrix.job_type == 'build'
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: Ninja
|
||||
@@ -365,26 +354,7 @@ jobs:
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
ccache_max_size: '100G'
|
||||
|
||||
- name: Build (Coverage)
|
||||
if: matrix.job_type == 'coverage'
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
gcov: ${{ matrix.gcov }}
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
cmake-args: '-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_CXX_FLAGS="-O0" -DCMAKE_C_FLAGS="-O0"'
|
||||
cmake-target: 'coverage'
|
||||
ccache_max_size: '100G'
|
||||
|
||||
- name: Set artifact name
|
||||
if: matrix.job_type == 'build'
|
||||
id: set-artifact-name
|
||||
run: |
|
||||
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
|
||||
@@ -397,7 +367,6 @@ jobs:
|
||||
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
|
||||
|
||||
- name: Run tests
|
||||
if: matrix.job_type == 'build'
|
||||
run: |
|
||||
# Ensure the binary exists before trying to run
|
||||
if [ -f "${{ env.build_dir }}/rippled" ]; then
|
||||
@@ -406,33 +375,3 @@ jobs:
|
||||
echo "Error: rippled executable not found in ${{ env.build_dir }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Coverage-specific steps
|
||||
- name: Move coverage report
|
||||
if: matrix.job_type == 'coverage'
|
||||
shell: bash
|
||||
run: |
|
||||
mv "${{ env.build_dir }}/coverage.xml" ./
|
||||
|
||||
- name: Archive coverage report
|
||||
if: matrix.job_type == 'coverage'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage.xml
|
||||
path: coverage.xml
|
||||
retention-days: 30
|
||||
|
||||
- name: Upload coverage report
|
||||
if: matrix.job_type == 'coverage'
|
||||
uses: wandalen/wretry.action/main@v3
|
||||
with:
|
||||
action: codecov/codecov-action@v4.3.0
|
||||
with: |
|
||||
files: coverage.xml
|
||||
fail_ci_if_error: true
|
||||
disable_search: true
|
||||
verbose: true
|
||||
plugin: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 210000 # in milliseconds
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -76,6 +76,7 @@ docs/html_doc
|
||||
# Xcode
|
||||
.DS_Store
|
||||
*/build/*
|
||||
!/docs/build/
|
||||
*.pbxuser
|
||||
!default.pbxuser
|
||||
*.mode1v3
|
||||
|
||||
85
BUILD.md
85
BUILD.md
@@ -10,7 +10,7 @@
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
releases](https://github.com/Xahau/xahaud/releases).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
@@ -36,7 +36,7 @@ git checkout develop
|
||||
- [Conan 2.x](https://conan.io/downloads)
|
||||
- [CMake 3.16](https://cmake.org/download/)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
`xahaud` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
@@ -46,11 +46,11 @@ The [minimum compiler versions][2] required are:
|
||||
| Apple Clang | 13.1.6 |
|
||||
| MSVC | 19.23 |
|
||||
|
||||
We don't recommend Windows for `rippled` production at this time. As of
|
||||
January 2023, Ubuntu has the highest level of quality assurance, testing,
|
||||
We don't recommend Windows for `xahaud` production at this time. As of
|
||||
November 2025, Ubuntu has the highest level of quality assurance, testing,
|
||||
and support.
|
||||
|
||||
Windows developers should use Visual Studio 2019. `rippled` isn't
|
||||
Windows developers should use Visual Studio 2019. `xahaud` isn't
|
||||
compatible with [Boost](https://www.boost.org/) 1.78 or 1.79, and Conan
|
||||
can't build earlier Boost versions.
|
||||
|
||||
@@ -100,7 +100,7 @@ can't build earlier Boost versions.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
Windows developers must also build `xahaud` and its dependencies for the x64
|
||||
architecture.
|
||||
|
||||
```
|
||||
@@ -218,13 +218,14 @@ can't build earlier Boost versions.
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
**Note:** You can pass build options for `xahaud` in this step.
|
||||
|
||||
4. Build `rippled`.
|
||||
4. Build `xahaud`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
||||
you must pass the option `--config` to select the build configuration.
|
||||
The output file is currently named 'rippled'.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
@@ -239,7 +240,7 @@ can't build earlier Boost versions.
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
5. Test rippled.
|
||||
5. Test xahaud.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
@@ -254,76 +255,16 @@ can't build earlier Boost versions.
|
||||
./Debug/rippled --unittest
|
||||
```
|
||||
|
||||
The location of `rippled` in your build directory depends on your CMake
|
||||
The location of `xahaud` in your build directory depends on your CMake
|
||||
generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
|
||||
## Coverage report
|
||||
|
||||
The coverage report is intended for developers using compilers GCC
|
||||
or Clang (including Apple Clang). It is generated by the build target `coverage`,
|
||||
which is only enabled when the `coverage` option is set, e.g. with
|
||||
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
|
||||
|
||||
Prerequisites for the coverage report:
|
||||
|
||||
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
|
||||
- `gcov` for GCC (installed with the compiler by default) or
|
||||
- `llvm-cov` for Clang (installed with the compiler by default)
|
||||
- `Debug` build type
|
||||
|
||||
A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
by setting the `coverage_format` variable in `cmake`. It is also possible
|
||||
to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
cd .build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake --build . --target coverage
|
||||
```
|
||||
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| --- | ---| ---|
|
||||
| `assert` | OFF | Enable assertions.
|
||||
| `reporting` | OFF | Build the reporting mode feature. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `tests` | ON | Build tests. |
|
||||
| `unity` | ON | Configure a unity build. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
@@ -516,10 +457,6 @@ but it is more convenient to put them in a [profile][profile].
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[5]: https://en.wikipedia.org/wiki/Unity_build
|
||||
[6]: https://github.com/boostorg/beast/issues/2648
|
||||
[7]: https://github.com/boostorg/beast/issues/2661
|
||||
[gcovr]: https://gcovr.com/en/stable/getting-started.html
|
||||
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
|
||||
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
|
||||
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html
|
||||
|
||||
@@ -1,440 +0,0 @@
|
||||
# Copyright (c) 2012 - 2017, Lars Bilke
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# CHANGES:
|
||||
#
|
||||
# 2012-01-31, Lars Bilke
|
||||
# - Enable Code Coverage
|
||||
#
|
||||
# 2013-09-17, Joakim Söderberg
|
||||
# - Added support for Clang.
|
||||
# - Some additional usage instructions.
|
||||
#
|
||||
# 2016-02-03, Lars Bilke
|
||||
# - Refactored functions to use named parameters
|
||||
#
|
||||
# 2017-06-02, Lars Bilke
|
||||
# - Merged with modified version from github.com/ufz/ogs
|
||||
#
|
||||
# 2019-05-06, Anatolii Kurotych
|
||||
# - Remove unnecessary --coverage flag
|
||||
#
|
||||
# 2019-12-13, FeRD (Frank Dana)
|
||||
# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor
|
||||
# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments.
|
||||
# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY
|
||||
# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list
|
||||
# - Set lcov basedir with -b argument
|
||||
# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be
|
||||
# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().)
|
||||
# - Delete output dir, .info file on 'make clean'
|
||||
# - Remove Python detection, since version mismatches will break gcovr
|
||||
# - Minor cleanup (lowercase function names, update examples...)
|
||||
#
|
||||
# 2019-12-19, FeRD (Frank Dana)
|
||||
# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
|
||||
#
|
||||
# 2020-01-19, Bob Apthorpe
|
||||
# - Added gfortran support
|
||||
#
|
||||
# 2020-02-17, FeRD (Frank Dana)
|
||||
# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters
|
||||
# in EXCLUDEs, and remove manual escaping from gcovr targets
|
||||
#
|
||||
# 2021-01-19, Robin Mueller
|
||||
# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run
|
||||
# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional
|
||||
# flags to the gcovr command
|
||||
#
|
||||
# 2020-05-04, Mihchael Davis
|
||||
# - Add -fprofile-abs-path to make gcno files contain absolute paths
|
||||
# - Fix BASE_DIRECTORY not working when defined
|
||||
# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
|
||||
#
|
||||
# 2021-05-10, Martin Stump
|
||||
# - Check if the generator is multi-config before warning about non-Debug builds
|
||||
#
|
||||
# 2022-02-22, Marko Wehle
|
||||
# - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output respectively.
|
||||
# This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS, e.g. GCOVR_ADDITIONAL_ARGS "--txt".
|
||||
#
|
||||
# 2022-09-28, Sebastian Mueller
|
||||
# - fix append_coverage_compiler_flags_to_target to correctly add flags
|
||||
# - replace "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
|
||||
#
|
||||
# 2024-01-04, Bronek Kozicki
|
||||
# - remove setup_target_for_coverage_lcov (slow) and setup_target_for_coverage_fastcov (no support for Clang)
|
||||
# - fix Clang support by adding find_program( ... llvm-cov )
|
||||
# - add Apple Clang support by adding execute_process( COMMAND xcrun -f llvm-cov ... )
|
||||
# - add CODE_COVERAGE_GCOV_TOOL to explicitly select gcov tool and disable find_program
|
||||
# - replace both functions setup_target_for_coverage_gcovr_* with a single setup_target_for_coverage_gcovr
|
||||
# - add support for all gcovr output formats
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
#
|
||||
# 2. Add the following line to your CMakeLists.txt (best inside an if-condition
|
||||
# using a CMake option() to enable it just optionally):
|
||||
# include(CodeCoverage)
|
||||
#
|
||||
# 3. Append necessary compiler flags for all supported source files:
|
||||
# append_coverage_compiler_flags()
|
||||
# Or for specific target:
|
||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
||||
#
|
||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||
#
|
||||
# 4. If you need to exclude additional directories from the report, specify them
|
||||
# using full paths in the COVERAGE_EXCLUDES variable before calling
|
||||
# setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES
|
||||
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
|
||||
# '/path/to/my/src/dir2/*')
|
||||
# Or, use the EXCLUDE argument to setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*")
|
||||
#
|
||||
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set
|
||||
# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR)
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES "dir1/*")
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# FORMAT html-details
|
||||
# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src"
|
||||
# EXCLUDE "dir2/*")
|
||||
#
|
||||
# 4.b If you need to pass specific options to gcovr, specify them in
|
||||
# GCOVR_ADDITIONAL_ARGS variable.
|
||||
# Example:
|
||||
# set (GCOVR_ADDITIONAL_ARGS --exclude-throw-branches --exclude-noncode-lines -s)
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "src/dir1" "src/dir2")
|
||||
#
|
||||
# 5. Use the functions described below to create a custom make target which
|
||||
# runs your test executable and produces a code coverage report.
|
||||
#
|
||||
# 6. Build a Debug build:
|
||||
# cmake -DCMAKE_BUILD_TYPE=Debug ..
|
||||
# make
|
||||
# make my_coverage_target
|
||||
|
||||
include(CMakeParseArguments)
|
||||
|
||||
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
|
||||
|
||||
# Check prereqs
|
||||
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
|
||||
|
||||
if(DEFINED CODE_COVERAGE_GCOV_TOOL)
|
||||
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
|
||||
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if(APPLE)
|
||||
execute_process( COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVMCOV_PATH
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
else()
|
||||
find_program( LLVMCOV_PATH llvm-cov )
|
||||
endif()
|
||||
if(LLVMCOV_PATH)
|
||||
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
|
||||
endif()
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
|
||||
find_program( GCOV_PATH gcov )
|
||||
set(GCOV_TOOL "${GCOV_PATH}")
|
||||
endif()
|
||||
|
||||
# Check supported compiler (Clang, GNU and Flang)
|
||||
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
|
||||
foreach(LANG ${LANGUAGES})
|
||||
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
|
||||
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
|
||||
endif()
|
||||
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
|
||||
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
|
||||
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
include(CheckCCompilerFlag)
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_C_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
||||
FORCE )
|
||||
mark_as_advanced(
|
||||
CMAKE_Fortran_FLAGS_COVERAGE
|
||||
CMAKE_CXX_FLAGS_COVERAGE
|
||||
CMAKE_C_FLAGS_COVERAGE
|
||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
link_libraries(gcov)
|
||||
endif()
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||
# the coverage generation will not complete.
|
||||
#
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME ctest_coverage # New target name
|
||||
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
|
||||
# DEPENDENCIES executable_target # Dependencies to build first
|
||||
# BASE_DIRECTORY "../" # Base directory for report
|
||||
# # (defaults to PROJECT_SOURCE_DIR)
|
||||
# FORMAT "cobertura" # Output format, one of:
|
||||
# # xml cobertura sonarqube json-summary
|
||||
# # json-details coveralls csv txt
|
||||
# # html-single html-nested html-details
|
||||
# # (xml is an alias to cobertura;
|
||||
# # if no format is set, defaults to xml)
|
||||
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
|
||||
# # to BASE_DIRECTORY, with CMake 3.4+)
|
||||
# )
|
||||
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
|
||||
# GCVOR command.
|
||||
function(setup_target_for_coverage_gcovr)
|
||||
set(options NONE)
|
||||
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
|
||||
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
|
||||
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
if(NOT GCOV_TOOL)
|
||||
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
|
||||
endif()
|
||||
|
||||
if(NOT GCOVR_PATH)
|
||||
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
|
||||
endif()
|
||||
|
||||
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
|
||||
if(DEFINED Coverage_BASE_DIRECTORY)
|
||||
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
|
||||
else()
|
||||
set(BASEDIR ${PROJECT_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED Coverage_FORMAT)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif()
|
||||
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else()
|
||||
if((Coverage_FORMAT STREQUAL "html-details")
|
||||
OR (Coverage_FORMAT STREQUAL "html-nested"))
|
||||
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
|
||||
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
|
||||
elseif((Coverage_FORMAT STREQUAL "json-summary")
|
||||
OR (Coverage_FORMAT STREQUAL "json-details")
|
||||
OR (Coverage_FORMAT STREQUAL "coveralls"))
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
|
||||
else()
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if((Coverage_FORMAT STREQUAL "cobertura")
|
||||
OR (Coverage_FORMAT STREQUAL "xml"))
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
|
||||
set(Coverage_FORMAT cobertura) # overwrite xml
|
||||
elseif(Coverage_FORMAT STREQUAL "sonarqube")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "json-summary")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "json-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "coveralls")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
|
||||
elseif(Coverage_FORMAT STREQUAL "html-nested")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
|
||||
endif()
|
||||
|
||||
# Collect excludes (CMake 3.4+: Also compute absolute paths)
|
||||
set(GCOVR_EXCLUDES "")
|
||||
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
|
||||
if(CMAKE_VERSION VERSION_GREATER 3.4)
|
||||
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
|
||||
endif()
|
||||
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
|
||||
endforeach()
|
||||
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
|
||||
|
||||
# Combine excludes to several -e arguments
|
||||
set(GCOVR_EXCLUDE_ARGS "")
|
||||
foreach(EXCLUDE ${GCOVR_EXCLUDES})
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# Run tests
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
|
||||
# Create folder
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
else()
|
||||
set(GCOVR_FOLDER_CMD echo) # dummy
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
set(GCOVR_CMD
|
||||
${GCOVR_PATH}
|
||||
--gcov-executable ${GCOV_TOOL}
|
||||
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
|
||||
-r ${BASEDIR}
|
||||
${GCOVR_ADDITIONAL_ARGS}
|
||||
${GCOVR_EXCLUDE_ARGS}
|
||||
--object-directory=${PROJECT_BINARY_DIR}
|
||||
)
|
||||
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
|
||||
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
message(STATUS "Command to generate gcovr coverage data: ")
|
||||
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
|
||||
message(STATUS "${GCOVR_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
add_custom_target(${Coverage_NAME}
|
||||
COMMAND ${GCOVR_EXEC_TESTS_CMD}
|
||||
COMMAND ${GCOVR_FOLDER_CMD}
|
||||
COMMAND ${GCOVR_CMD}
|
||||
|
||||
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
DEPENDS ${Coverage_DEPENDENCIES}
|
||||
VERBATIM # Protect arguments to commands
|
||||
COMMENT "Running gcovr to produce code coverage report."
|
||||
)
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function(append_coverage_compiler_flags)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
||||
endfunction() # append_coverage_compiler_flags
|
||||
|
||||
# Setup coverage for specific library
|
||||
function(append_coverage_compiler_flags_to_target name)
|
||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
target_link_libraries(${name} PRIVATE gcov)
|
||||
endif()
|
||||
endfunction()
|
||||
@@ -2,37 +2,97 @@
|
||||
coverage report target
|
||||
#]===================================================================]
|
||||
|
||||
if(NOT coverage)
|
||||
message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
|
||||
endif()
|
||||
if (coverage)
|
||||
if (is_clang)
|
||||
if (APPLE)
|
||||
execute_process (COMMAND xcrun -f llvm-profdata
|
||||
OUTPUT_VARIABLE LLVM_PROFDATA
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program (LLVM_PROFDATA llvm-profdata)
|
||||
endif ()
|
||||
if (NOT LLVM_PROFDATA)
|
||||
message (WARNING "unable to find llvm-profdata - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
|
||||
return()
|
||||
endif()
|
||||
if (APPLE)
|
||||
execute_process (COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVM_COV
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program (LLVM_COV llvm-cov)
|
||||
endif ()
|
||||
if (NOT LLVM_COV)
|
||||
message (WARNING "unable to find llvm-cov - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
include(CodeCoverage)
|
||||
set (extract_pattern "")
|
||||
if (coverage_core_only)
|
||||
set (extract_pattern "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/")
|
||||
endif ()
|
||||
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
# which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
|
||||
# then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
|
||||
if (LLVM_COV AND LLVM_PROFDATA)
|
||||
add_custom_target (coverage_report
|
||||
USES_TERMINAL
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage - results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests."
|
||||
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
|
||||
COMMAND ${LLVM_PROFDATA}
|
||||
merge -sparse default.profraw -o rip.profdata
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
|
||||
COMMAND ${LLVM_COV}
|
||||
report -instr-profile=rip.profdata
|
||||
$<TARGET_FILE:rippled> ${extract_pattern}
|
||||
# generate html report
|
||||
COMMAND ${LLVM_COV}
|
||||
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
|
||||
-instr-profile=rip.profdata
|
||||
$<TARGET_FILE:rippled> ${extract_pattern}
|
||||
BYPRODUCTS coverage/index.html)
|
||||
endif ()
|
||||
elseif (is_gcc)
|
||||
find_program (LCOV lcov)
|
||||
if (NOT LCOV)
|
||||
message (WARNING "unable to find lcov - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
|
||||
if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
|
||||
separate_arguments(GCOVR_ADDITIONAL_ARGS)
|
||||
endif()
|
||||
find_program (GENHTML genhtml)
|
||||
if (NOT GENHTML)
|
||||
message (WARNING "unable to find genhtml - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${coverage_test_parallelism})
|
||||
set (extract_pattern "*")
|
||||
if (coverage_core_only)
|
||||
set (extract_pattern "*/src/ripple/*")
|
||||
endif ()
|
||||
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "${CMAKE_BINARY_DIR}/proto_gen" "${CMAKE_BINARY_DIR}/proto_gen_grpc"
|
||||
DEPENDENCIES rippled
|
||||
)
|
||||
if (LCOV AND GENHTML)
|
||||
add_custom_target (coverage_report
|
||||
USES_TERMINAL
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage- results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
|
||||
# create baseline info file
|
||||
COMMAND ${LCOV}
|
||||
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -i -o baseline.info
|
||||
| grep -v "ignoring data for external file"
|
||||
# run tests
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests for coverage report."
|
||||
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
|
||||
# Create test coverage data file
|
||||
COMMAND ${LCOV}
|
||||
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -o tests.info
|
||||
| grep -v "ignoring data for external file"
|
||||
# Combine baseline and test coverage data
|
||||
COMMAND ${LCOV}
|
||||
-a baseline.info -a tests.info -o lcov-all.info
|
||||
# extract our files
|
||||
COMMAND ${LCOV}
|
||||
-e lcov-all.info "${extract_pattern}" -o lcov.info
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
|
||||
COMMAND ${LCOV} --summary lcov.info
|
||||
# generate HTML report
|
||||
COMMAND ${GENHTML}
|
||||
-o ${CMAKE_BINARY_DIR}/coverage lcov.info
|
||||
BYPRODUCTS coverage/index.html)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
@@ -23,15 +23,15 @@ target_compile_options (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
convenience variables and sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
|
||||
if (NOT ep_procs)
|
||||
ProcessorCount(ep_procs)
|
||||
if (ep_procs GREATER 1)
|
||||
|
||||
@@ -2,129 +2,121 @@
|
||||
declare user options/settings
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
option (assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
option (reporting "Build rippled with reporting mode enabled" OFF)
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
option (tests "Build tests" ON)
|
||||
|
||||
option(reporting "Build rippled with reporting mode enabled" OFF)
|
||||
|
||||
option(tests "Build tests" ON)
|
||||
|
||||
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
||||
if(unity)
|
||||
if(NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
if(is_gcc OR is_clang)
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
|
||||
"Unit tests parallelism for the purpose of coverage report.")
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
set(coverage_test "" CACHE STRING
|
||||
option (unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
||||
if (unity)
|
||||
if (NOT is_ci)
|
||||
set (CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif ()
|
||||
endif ()
|
||||
if (is_gcc OR is_clang)
|
||||
option (coverage "Generates coverage info." OFF)
|
||||
option (profile "Add profiling flags" OFF)
|
||||
set (coverage_test "" CACHE STRING
|
||||
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
|
||||
if(coverage_test AND NOT coverage)
|
||||
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
option(use_mold "enables detection of mold (binutils) linker" ON)
|
||||
else()
|
||||
if (coverage_test AND NOT coverage)
|
||||
set (coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif ()
|
||||
option (coverage_core_only
|
||||
"Include only src/ripple files when generating coverage report. \
|
||||
Set to OFF to include all sources in coverage report."
|
||||
ON)
|
||||
option (wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else ()
|
||||
set (profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set (coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set (wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif ()
|
||||
if (is_linux)
|
||||
option (BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option (static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option (perf "Enables flags that assist with perf recording" OFF)
|
||||
option (use_gold "enables detection of gold (binutils) linker" ON)
|
||||
else ()
|
||||
# we are not ready to allow shared-libs on windows because it would require
|
||||
# export declarations. On macos it's more feasible, but static openssl
|
||||
# produces odd linker errors, thus we disable shared lib builds for now.
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
set (BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set (static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
endif ()
|
||||
if (is_clang)
|
||||
option (use_lld "enables detection of lld linker" ON)
|
||||
else ()
|
||||
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif ()
|
||||
option (jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option (werr "treat warnings as errors" OFF)
|
||||
option (local_protobuf
|
||||
"Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option(local_grpc
|
||||
option (local_grpc
|
||||
"Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
|
||||
# this one is a string and therefore can't be an option
|
||||
set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if(san)
|
||||
string(TOLOWER ${san} san)
|
||||
set(SAN_FLAG "-fsanitize=${san}")
|
||||
set(SAN_LIB "")
|
||||
if(is_gcc)
|
||||
if(san STREQUAL "address")
|
||||
set(SAN_LIB "asan")
|
||||
elseif(san STREQUAL "thread")
|
||||
set(SAN_LIB "tsan")
|
||||
elseif(san STREQUAL "memory")
|
||||
set(SAN_LIB "msan")
|
||||
elseif(san STREQUAL "undefined")
|
||||
set(SAN_LIB "ubsan")
|
||||
endif()
|
||||
endif()
|
||||
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if(NOT COMPILER_SUPPORTS_SAN)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
set(container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option(packages_only
|
||||
set (san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if (san)
|
||||
string (TOLOWER ${san} san)
|
||||
set (SAN_FLAG "-fsanitize=${san}")
|
||||
set (SAN_LIB "")
|
||||
if (is_gcc)
|
||||
if (san STREQUAL "address")
|
||||
set (SAN_LIB "asan")
|
||||
elseif (san STREQUAL "thread")
|
||||
set (SAN_LIB "tsan")
|
||||
elseif (san STREQUAL "memory")
|
||||
set (SAN_LIB "msan")
|
||||
elseif (san STREQUAL "undefined")
|
||||
set (SAN_LIB "ubsan")
|
||||
endif ()
|
||||
endif ()
|
||||
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag (${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if (NOT COMPILER_SUPPORTS_SAN)
|
||||
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif ()
|
||||
endif ()
|
||||
set (container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option (packages_only
|
||||
"ONLY generate package building targets. This is special use-case and almost \
|
||||
certainly not what you want. Use with caution as you won't be able to build \
|
||||
any compiled targets locally." OFF)
|
||||
option(have_package_container
|
||||
option (have_package_container
|
||||
"Sometimes you already have the tagged container you want to use for package \
|
||||
building and you don't want docker to rebuild it. This flag will detach the \
|
||||
dependency of the package build from the container build. It's an advanced \
|
||||
use case and most likely you should not be touching this flag." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
option (beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
option (single_io_service_thread
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
option (boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
option(beast_hashers
|
||||
option (beast_hashers
|
||||
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
|
||||
OFF)
|
||||
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif()
|
||||
if (WIN32)
|
||||
option (beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else ()
|
||||
set (beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif ()
|
||||
if (coverage)
|
||||
message (STATUS "coverage build requested - forcing Debug build")
|
||||
set (CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif ()
|
||||
|
||||
@@ -64,6 +64,7 @@ include (CheckCXXCompilerFlag)
|
||||
include (FetchContent)
|
||||
include (ExternalProject)
|
||||
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
|
||||
include (ProcessorCount)
|
||||
if (target)
|
||||
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
endif ()
|
||||
@@ -142,14 +143,11 @@ target_link_libraries(ripple_libs INTERFACE
|
||||
SQLite::SQLite3
|
||||
)
|
||||
|
||||
if(coverage)
|
||||
include(RippledCov)
|
||||
endif()
|
||||
|
||||
###
|
||||
|
||||
include(RippledCore)
|
||||
include(RippledInstall)
|
||||
include(RippledCov)
|
||||
include(RippledMultiConfig)
|
||||
include(RippledDocs)
|
||||
include(RippledValidatorKeys)
|
||||
|
||||
@@ -1710,27 +1710,15 @@ secure_gateway = 127.0.0.1
|
||||
# when the node has approximately two times the "online_delete" value of
|
||||
# ledgers. No external administrative command is required to initiate
|
||||
# deletion.
|
||||
[ledger_history]
|
||||
256
|
||||
|
||||
[node_db]
|
||||
type=NuDB
|
||||
path=/opt/xahaud/db/nudb
|
||||
online_delete=512
|
||||
online_delete=256
|
||||
advisory_delete=0
|
||||
|
||||
# This is the persistent datastore for shards. It is important for the health
|
||||
# of the Xahau Network that xahaud operators shard as much as practical.
|
||||
# NuDB requires SSD storage. Helpful information can be found at
|
||||
# https://xrpl.org/history-sharding.html
|
||||
#[shard_db]
|
||||
#path=/opt/xahaud/db/shards/nudb
|
||||
#max_historical_shards=50
|
||||
#
|
||||
# This optional section can be configured with a list
|
||||
# of paths to use for storing historical shards. Each
|
||||
# path must correspond to a unique filesystem.
|
||||
#[historical_shard_paths]
|
||||
#/path/1
|
||||
#/path/2
|
||||
|
||||
[database_path]
|
||||
/opt/xahaud/db
|
||||
|
||||
@@ -1755,6 +1743,11 @@ time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
# Use the following [ips] section for the main network:
|
||||
[ips]
|
||||
bacab.alloy.ee 21337
|
||||
hubs.xahau.as16089.net 21337
|
||||
|
||||
# To use the Xahau Test Network
|
||||
# (see https://xahau.network/docs/infrastructure/installing-xahaud),
|
||||
# use the following [ips] section:
|
||||
@@ -1782,6 +1775,13 @@ validators-xahau.txt
|
||||
[ssl_verify]
|
||||
1
|
||||
|
||||
# Define which network xahaud is connecting to
|
||||
# 21337 for the Main Xahau Network
|
||||
# 21338 for the Test Xahau Network
|
||||
[network_id]
|
||||
21337
|
||||
# 21338
|
||||
|
||||
|
||||
# To run in Reporting Mode, uncomment this section and fill in the appropriate
|
||||
# connection info for one or more ETL sources.
|
||||
|
||||
4
docs/build/environment.md
vendored
4
docs/build/environment.md
vendored
@@ -11,11 +11,11 @@ platforms: Linux, macOS, or Windows.
|
||||
Package ecosystems vary across Linux distributions,
|
||||
so there is no one set of instructions that will work for every Linux user.
|
||||
These instructions are written for Ubuntu 22.04.
|
||||
They are largely copied from the [script][1] used to configure our Docker
|
||||
They are largely copied from the [script][1] used to configure a Docker
|
||||
container for continuous integration.
|
||||
That script handles many more responsibilities.
|
||||
These instructions are just the bare minimum to build one configuration of
|
||||
rippled.
|
||||
xahaud.
|
||||
You can check that codebase for other Linux distributions and versions.
|
||||
If you cannot find yours there,
|
||||
then we hope that these instructions can at least guide you in the right
|
||||
|
||||
177
docs/build/install.md
vendored
177
docs/build/install.md
vendored
@@ -1,159 +1,30 @@
|
||||
This document contains instructions for installing rippled.
|
||||
The APT package manager is common on Debian-based Linux distributions like
|
||||
Ubuntu,
|
||||
while the YUM package manager is common on Red Hat-based Linux distributions
|
||||
like CentOS.
|
||||
Installing from source is an option for all platforms,
|
||||
and the only supported option for installing custom builds.
|
||||
Comprehensive instructions for installing and running xahaud are available on the [https://Xahau.Network](https://xahau.network/docs/infrastructure/installing-xahaud) documentation website.
|
||||
|
||||
## Create the Runtime Environment
|
||||
xahaud can be [built from source](../../BUILD.md) or installed using the binary files available from [https://build.xahau.tech](https://build.xahau.tech/). After obtaining a working xahaud binary, users will need to provide a suitable runtime environment. The following setup can be used for Linux or Docker environments.
|
||||
|
||||
## From source
|
||||
|
||||
From a source build, you can install rippled and libxrpl using CMake's
|
||||
`--install` mode:
|
||||
1. Create or download two configuration files: the main xahaud.cfg configuration file and a second validators-xahau.txt file defining which validators or UNL list publishers are trusted. The default location for these files in this xahaud repository is `cfg/`.
|
||||
2. Provide a directory structure that is congruent with the contents of xahaud.cfg. This will include a location for logfiles, such as `/var/log/xahaud/`, as well as database files, `/opt/xahaud/db/`. Configuration files are, by default, sourced from `/etc/xahaud/`. It is possible to provide a symbolic link, if users wish to store configuration files elsewhere.
|
||||
3. If desired, created a xahaud user and group, and change ownership of the binary and directories. Servers used for validating nodes should use the most restrictive permissions possible for `xahaud.cfg`, as the validation token is stored therein.
|
||||
4. If desired, create a systemd service file: `/etc/systemd/system/xahaud.service`, enabling xahaud to run as a daemon. Alternately, run: `/path/to/binary/xahaud --conf=/path/to/xahaud.cfg`.
|
||||
|
||||
## Example systemd Service File
|
||||
```
|
||||
cmake --install . --prefix /opt/local
|
||||
[Unit]
|
||||
Description=Xahaud Daemon
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/path/to/xahaud --silent --conf /path/to/xahaud.cfg
|
||||
Restart=on-failure
|
||||
User=xahaud
|
||||
Group=xahaud
|
||||
LimitNOFILE=65536
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
`C:/Program Files/rippled` on Windows.
|
||||
|
||||
[1]: https://cmake.org/cmake/help/latest/variable/CMAKE_INSTALL_PREFIX.html
|
||||
|
||||
|
||||
## With the APT package manager
|
||||
|
||||
1. Update repositories:
|
||||
|
||||
sudo apt update -y
|
||||
|
||||
2. Install utilities:
|
||||
|
||||
sudo apt install -y apt-transport-https ca-certificates wget gnupg
|
||||
|
||||
3. Add Ripple's package-signing GPG key to your list of trusted keys:
|
||||
|
||||
sudo mkdir /usr/local/share/keyrings/
|
||||
wget -q -O - "https://repos.ripple.com/repos/api/gpg/key/public" | gpg --dearmor > ripple-key.gpg
|
||||
sudo mv ripple-key.gpg /usr/local/share/keyrings
|
||||
|
||||
|
||||
4. Check the fingerprint of the newly-added key:
|
||||
|
||||
gpg /usr/local/share/keyrings/ripple-key.gpg
|
||||
|
||||
The output should include an entry for Ripple such as the following:
|
||||
|
||||
gpg: WARNING: no command supplied. Trying to guess what you mean ...
|
||||
pub rsa3072 2019-02-14 [SC] [expires: 2026-02-17]
|
||||
C0010EC205B35A3310DC90DE395F97FFCCAFD9A2
|
||||
uid TechOps Team at Ripple <techops+rippled@ripple.com>
|
||||
sub rsa3072 2019-02-14 [E] [expires: 2026-02-17]
|
||||
|
||||
|
||||
In particular, make sure that the fingerprint matches. (In the above example, the fingerprint is on the third line, starting with `C001`.)
|
||||
|
||||
4. Add the appropriate Ripple repository for your operating system version:
|
||||
|
||||
echo "deb [signed-by=/usr/local/share/keyrings/ripple-key.gpg] https://repos.ripple.com/repos/rippled-deb focal stable" | \
|
||||
sudo tee -a /etc/apt/sources.list.d/ripple.list
|
||||
|
||||
The above example is appropriate for **Ubuntu 20.04 Focal Fossa**. For other operating systems, replace the word `focal` with one of the following:
|
||||
|
||||
- `jammy` for **Ubuntu 22.04 Jammy Jellyfish**
|
||||
- `bionic` for **Ubuntu 18.04 Bionic Beaver**
|
||||
- `bullseye` for **Debian 11 Bullseye**
|
||||
- `buster` for **Debian 10 Buster**
|
||||
|
||||
If you want access to development or pre-release versions of `rippled`, use one of the following instead of `stable`:
|
||||
|
||||
- `unstable` - Pre-release builds ([`release` branch](https://github.com/ripple/rippled/tree/release))
|
||||
- `nightly` - Experimental/development builds ([`develop` branch](https://github.com/ripple/rippled/tree/develop))
|
||||
|
||||
**Warning:** Unstable and nightly builds may be broken at any time. Do not use these builds for production servers.
|
||||
|
||||
5. Fetch the Ripple repository.
|
||||
|
||||
sudo apt -y update
|
||||
|
||||
6. Install the `rippled` software package:
|
||||
|
||||
sudo apt -y install rippled
|
||||
|
||||
7. Check the status of the `rippled` service:
|
||||
|
||||
systemctl status rippled.service
|
||||
|
||||
The `rippled` service should start automatically. If not, you can start it manually:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
|
||||
8. Optional: allow `rippled` to bind to privileged ports.
|
||||
|
||||
This allows you to serve incoming API requests on port 80 or 443. (If you want to do so, you must also update the config file's port settings.)
|
||||
|
||||
sudo setcap 'cap_net_bind_service=+ep' /opt/ripple/bin/rippled
|
||||
|
||||
|
||||
## With the YUM package manager
|
||||
|
||||
1. Install the Ripple RPM repository:
|
||||
|
||||
Choose the appropriate RPM repository for the stability of releases you want:
|
||||
|
||||
- `stable` for the latest production release (`master` branch)
|
||||
- `unstable` for pre-release builds (`release` branch)
|
||||
- `nightly` for experimental/development builds (`develop` branch)
|
||||
|
||||
*Stable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-stable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/stable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/stable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Unstable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-unstable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/unstable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/unstable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Nightly*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-nightly]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/nightly/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/nightly/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
2. Fetch the latest repo updates:
|
||||
|
||||
sudo yum -y update
|
||||
|
||||
3. Install the new `rippled` package:
|
||||
|
||||
sudo yum install -y rippled
|
||||
|
||||
4. Configure the `rippled` service to start on boot:
|
||||
|
||||
sudo systemctl enable rippled.service
|
||||
|
||||
5. Start the `rippled` service:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
After the systemd service file is installed, it must be loaded with: `systemctl daemon-reload`. xahaud can then be enabled: `systemctl enable --now xahaud`.
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
#include <ripple/app/misc/AmendmentTable.h>
|
||||
#include <ripple/app/misc/NetworkOPs.h>
|
||||
#include <ripple/app/tx/impl/Change.h>
|
||||
#include <ripple/app/tx/impl/SetHook.h>
|
||||
#include <ripple/app/tx/impl/SetSignerList.h>
|
||||
#include <ripple/app/tx/impl/XahauGenesis.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
@@ -584,10 +585,6 @@ Change::activateXahauGenesis()
|
||||
SetSignerList::removeFromLedger(ctx_.app, sb, accid, j_);
|
||||
|
||||
// Step 4: install genesis hooks
|
||||
sle->setFieldU32(
|
||||
sfOwnerCount, sle->getFieldU32(sfOwnerCount) + genesis_hooks.size());
|
||||
sb.update(sle);
|
||||
|
||||
if (sb.exists(keylet::hook(accid)))
|
||||
{
|
||||
JLOG(j_.warn()) << "featureXahauGenesis genesis account already has "
|
||||
@@ -598,6 +595,7 @@ Change::activateXahauGenesis()
|
||||
{
|
||||
ripple::STArray hooks{sfHooks, static_cast<int>(genesis_hooks.size())};
|
||||
int hookCount = 0;
|
||||
uint32_t hookReserve = 0;
|
||||
|
||||
for (auto const& [hookOn, wasmBytes, params] : genesis_hooks)
|
||||
{
|
||||
@@ -703,8 +701,14 @@ Change::activateXahauGenesis()
|
||||
}
|
||||
|
||||
hooks.push_back(hookObj);
|
||||
|
||||
hookReserve += SetHook::computeHookReserve(hookObj);
|
||||
}
|
||||
|
||||
sle->setFieldU32(
|
||||
sfOwnerCount, sle->getFieldU32(sfOwnerCount) + hookReserve);
|
||||
sb.update(sle);
|
||||
|
||||
auto sle = std::make_shared<SLE>(keylet::hook(accid));
|
||||
sle->setFieldArray(sfHooks, hooks);
|
||||
sle->setAccountID(sfAccount, accid);
|
||||
@@ -745,6 +749,8 @@ Change::activateXahauGenesis()
|
||||
ripple::STArray hooks{sfHooks, 1};
|
||||
STObject hookObj{sfHook};
|
||||
hookObj.setFieldH256(sfHookHash, governHash);
|
||||
|
||||
uint32_t hookReserve = 0;
|
||||
// parameters
|
||||
{
|
||||
std::vector<STObject> vec;
|
||||
@@ -760,6 +766,7 @@ Change::activateXahauGenesis()
|
||||
sfHookParameters, STArray(vec, sfHookParameters));
|
||||
}
|
||||
|
||||
hookReserve += SetHook::computeHookReserve(hookObj);
|
||||
hooks.push_back(hookObj);
|
||||
|
||||
auto sle = std::make_shared<SLE>(hookKL);
|
||||
@@ -786,7 +793,8 @@ Change::activateXahauGenesis()
|
||||
|
||||
sle->setAccountID(sfRegularKey, noAccount());
|
||||
sle->setFieldU32(sfFlags, lsfDisableMaster);
|
||||
sle->setFieldU32(sfOwnerCount, sle->getFieldU32(sfOwnerCount) + 1);
|
||||
sle->setFieldU32(
|
||||
sfOwnerCount, sle->getFieldU32(sfOwnerCount) + hookReserve);
|
||||
sb.update(sle);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1221,6 +1221,29 @@ updateHookParameters(
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the reserve required for a hook object.
|
||||
* @param hookObj The hook object to compute the reserve for.(not Transaction
|
||||
* field, use the Hook object inside the ltHook object.)
|
||||
* @return The reserve required for the hook object.
|
||||
*/
|
||||
uint32_t
|
||||
SetHook::computeHookReserve(STObject const& hookObj)
|
||||
{
|
||||
if (!hookObj.isFieldPresent(sfHookHash))
|
||||
return 0;
|
||||
|
||||
int reserve{1};
|
||||
|
||||
if (hookObj.isFieldPresent(sfHookParameters))
|
||||
reserve += hookObj.getFieldArray(sfHookParameters).size();
|
||||
|
||||
if (hookObj.isFieldPresent(sfHookGrants))
|
||||
reserve += hookObj.getFieldArray(sfHookGrants).size();
|
||||
|
||||
return reserve;
|
||||
};
|
||||
|
||||
struct KeyletComparator
|
||||
{
|
||||
bool
|
||||
@@ -1972,28 +1995,14 @@ SetHook::setHook()
|
||||
int oldHookReserve = 0;
|
||||
int newHookReserve = 0;
|
||||
|
||||
auto const computeHookReserve = [](STObject const& hookObj) -> int {
|
||||
if (!hookObj.isFieldPresent(sfHookHash))
|
||||
return 0;
|
||||
|
||||
int reserve{1};
|
||||
|
||||
if (hookObj.isFieldPresent(sfHookParameters))
|
||||
reserve += hookObj.getFieldArray(sfHookParameters).size();
|
||||
|
||||
if (hookObj.isFieldPresent(sfHookGrants))
|
||||
reserve += hookObj.getFieldArray(sfHookGrants).size();
|
||||
|
||||
return reserve;
|
||||
};
|
||||
|
||||
for (int i = 0; i < hook::maxHookChainLength(); ++i)
|
||||
{
|
||||
if (oldHooks && i < oldHookCount)
|
||||
oldHookReserve += computeHookReserve(((*oldHooks).get())[i]);
|
||||
oldHookReserve +=
|
||||
SetHook::computeHookReserve(((*oldHooks).get())[i]);
|
||||
|
||||
if (i < newHooks.size())
|
||||
newHookReserve += computeHookReserve(newHooks[i]);
|
||||
newHookReserve += SetHook::computeHookReserve(newHooks[i]);
|
||||
}
|
||||
|
||||
reserveDelta = newHookReserve - oldHookReserve;
|
||||
|
||||
@@ -91,6 +91,9 @@ public:
|
||||
static HookSetValidation
|
||||
validateHookSetEntry(SetHookCtx& ctx, STObject const& hookSetObj);
|
||||
|
||||
static uint32_t
|
||||
computeHookReserve(STObject const& hookObj);
|
||||
|
||||
private:
|
||||
TER
|
||||
setHook();
|
||||
|
||||
@@ -139,7 +139,9 @@ struct XahauGenesis_test : public beast::unit_test::suite
|
||||
false, // means the calling test already burned some of the genesis
|
||||
bool skipTests = false,
|
||||
bool const testFlag = false,
|
||||
bool const badNetID = false)
|
||||
bool const badNetID = false,
|
||||
uint32_t const expectedOwnerCount =
|
||||
10 /** testFlag ? 10 : 14 (default) */)
|
||||
{
|
||||
using namespace jtx;
|
||||
|
||||
@@ -247,7 +249,10 @@ struct XahauGenesis_test : public beast::unit_test::suite
|
||||
BEAST_EXPECT(
|
||||
genesisAccRoot->getFieldAmount(sfBalance) ==
|
||||
XahauGenesis::GenesisAmount);
|
||||
BEAST_EXPECT(genesisAccRoot->getFieldU32(sfOwnerCount) == 2);
|
||||
BEAST_EXPECT(
|
||||
genesisAccRoot->getFieldU32(sfOwnerCount) == !testFlag
|
||||
? expectedOwnerCount
|
||||
: 14);
|
||||
|
||||
// ensure the definitions are correctly set
|
||||
{
|
||||
@@ -583,7 +588,14 @@ struct XahauGenesis_test : public beast::unit_test::suite
|
||||
toBase58(t), membersStr);
|
||||
}
|
||||
|
||||
activate(__LINE__, env, true, false, true);
|
||||
activate(
|
||||
__LINE__,
|
||||
env,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
{},
|
||||
3 /* IRR,IRD,IMC */ + members.size() + tables.size());
|
||||
|
||||
env.close();
|
||||
env.close();
|
||||
@@ -2235,6 +2247,8 @@ struct XahauGenesis_test : public beast::unit_test::suite
|
||||
BEAST_EXPECT(!!hookLE);
|
||||
uint256 const ns = beast::zero;
|
||||
uint8_t mc = 0;
|
||||
uint8_t paramsCount = 0;
|
||||
|
||||
if (hookLE)
|
||||
{
|
||||
auto const hooksArray = hookLE->getFieldArray(sfHooks);
|
||||
@@ -2242,6 +2256,9 @@ struct XahauGenesis_test : public beast::unit_test::suite
|
||||
hooksArray.size() == 1 &&
|
||||
hooksArray[0].getFieldH256(sfHookHash) == governHookHash);
|
||||
|
||||
paramsCount =
|
||||
hooksArray[0].getFieldArray(sfHookParameters).size();
|
||||
|
||||
for (Account const* m : members)
|
||||
{
|
||||
auto const mVec = vecFromAcc(*m);
|
||||
@@ -2308,7 +2325,9 @@ struct XahauGenesis_test : public beast::unit_test::suite
|
||||
BEAST_EXPECT(!!root);
|
||||
if (root)
|
||||
{
|
||||
BEAST_EXPECT(root->getFieldU32(sfOwnerCount) == mc * 2 + 2);
|
||||
BEAST_EXPECT(
|
||||
root->getFieldU32(sfOwnerCount) ==
|
||||
mc * 2 + 2 + paramsCount);
|
||||
BEAST_EXPECT(root->getFieldU32(sfFlags) & lsfDisableMaster);
|
||||
BEAST_EXPECT(root->getAccountID(sfRegularKey) == noAccount());
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user