Compare commits

..

4 Commits

6 changed files with 52 additions and 128 deletions

View File

@@ -1,6 +0,0 @@
coverage:
status:
project:
default:
target: 60%
threshold: 2%

View File

@@ -2,14 +2,6 @@ name: build
description: 'Builds the project with ccache integration'
inputs:
cmake-target:
description: 'CMake target to build'
required: false
default: all
cmake-args:
description: 'Additional CMake arguments'
required: false
default: null
generator:
description: 'CMake generator to use'
required: true
@@ -28,10 +20,6 @@ inputs:
description: 'C++ compiler to use'
required: false
default: ''
gcov:
description: 'Gcov to use'
required: false
default: ''
compiler-id:
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
required: false
@@ -53,11 +41,10 @@ inputs:
required: false
default: 'dev'
stdlib:
description: 'C++ standard library to use (default = compiler default, e.g. GCC always uses libstdc++)'
description: 'C++ standard library to use'
required: true
type: choice
options:
- default
- libstdcxx
- libcxx
clang_gcc_toolchain:
@@ -100,6 +87,11 @@ runs:
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Configure ccache settings AFTER cache restore (prevents stale cached config)
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
@@ -130,10 +122,6 @@ runs:
export CXX="${{ inputs.cxx }}"
fi
if [ -n "${{ inputs.gcov }}" ]; then
ln -sf /usr/bin/${{ inputs.gcov }} /usr/local/bin/gcov
fi
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
# This enables ccache for the main app build without affecting Conan dependency builds
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
@@ -197,8 +185,7 @@ runs:
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
-Dtests=TRUE \
-Dxrpld=TRUE \
${{ inputs.cmake-args }}
-Dxrpld=TRUE
- name: Show ccache config before build
if: inputs.ccache_enabled == 'true'
@@ -222,7 +209,7 @@ runs:
VERBOSE_FLAG="-- -v"
fi
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) --target ${{ inputs.cmake-target }} ${VERBOSE_FLAG}
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG}
- name: Show ccache statistics
if: inputs.ccache_enabled == 'true'

View File

@@ -57,9 +57,8 @@ jobs:
"cc": "gcc-11",
"cxx": "g++-11",
"compiler_version": 11,
"stdlib": "default",
"configuration": "Debug",
"job_type": "build"
"stdlib": "libstdcxx",
"configuration": "Debug"
},
{
"compiler_id": "gcc-13-libstdcxx",
@@ -67,20 +66,8 @@ jobs:
"cc": "gcc-13",
"cxx": "g++-13",
"compiler_version": 13,
"stdlib": "default",
"configuration": "Debug",
"job_type": "build"
},
{
"compiler_id": "gcc-13-libstdcxx",
"compiler": "gcc",
"cc": "gcc-13",
"cxx": "g++-13",
"gcov": "gcov-13",
"compiler_version": 13,
"stdlib": "default",
"configuration": "Debug",
"job_type": "coverage"
"stdlib": "libstdcxx",
"configuration": "Debug"
},
{
"compiler_id": "clang-14-libstdcxx-gcc11",
@@ -90,8 +77,7 @@ jobs:
"compiler_version": 14,
"stdlib": "libstdcxx",
"clang_gcc_toolchain": 11,
"configuration": "Debug",
"job_type": "build"
"configuration": "Debug"
},
{
"compiler_id": "clang-16-libstdcxx-gcc13",
@@ -101,8 +87,7 @@ jobs:
"compiler_version": 16,
"stdlib": "libstdcxx",
"clang_gcc_toolchain": 13,
"configuration": "Debug",
"job_type": "build"
"configuration": "Debug"
},
{
"compiler_id": "clang-17-libcxx",
@@ -111,8 +96,7 @@ jobs:
"cxx": "clang++-17",
"compiler_version": 17,
"stdlib": "libcxx",
"configuration": "Debug",
"job_type": "build"
"configuration": "Debug"
},
{
# Clang 18 - testing if it's faster than Clang 17 with libc++
@@ -123,16 +107,14 @@ jobs:
"cxx": "clang++-18",
"compiler_version": 18,
"stdlib": "libcxx",
"configuration": "Debug",
"job_type": "build"
"configuration": "Debug"
}
]
# Minimal matrix for PRs and feature branches
minimal_matrix = [
full_matrix[1], # gcc-13 (middle-ground gcc)
full_matrix[2], # gcc-13 coverage
full_matrix[3] # clang-14 (mature, stable clang)
full_matrix[2] # clang-14 (mature, stable clang)
]
# Determine which matrix to use based on the target branch
@@ -207,21 +189,14 @@ jobs:
# Select the appropriate matrix
if use_full:
if force_full:
print(f"Using FULL matrix (7 configs) - forced by [ci-nix-full-matrix] tag")
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag")
else:
print(f"Using FULL matrix (7 configs) - targeting main branch")
print(f"Using FULL matrix (6 configs) - targeting main branch")
matrix = full_matrix
else:
print(f"Using MINIMAL matrix (3 configs) - feature branch/PR")
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR")
matrix = minimal_matrix
# Add runs_on based on job_type
for entry in matrix:
if entry.get("job_type") == "coverage":
entry["runs_on"] = '["self-hosted", "generic", 24.04]'
else:
entry["runs_on"] = '["self-hosted", "generic", 20.04]'
# Output the matrix as JSON
output = json.dumps({"include": matrix})
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
@@ -229,7 +204,7 @@ jobs:
build:
needs: matrix-setup
runs-on: ${{ fromJSON(matrix.runs_on) }}
runs-on: [self-hosted, generic, 20.04]
container:
image: ubuntu:24.04
volumes:
@@ -258,7 +233,7 @@ jobs:
apt-get install -y software-properties-common
add-apt-repository ppa:ubuntu-toolchain-r/test -y
apt-get update
apt-get install -y git python3 python-is-python3 pipx
apt-get install -y python3 python-is-python3 pipx
pipx ensurepath
apt-get install -y cmake ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
apt-get install -y perl # for openssl build
@@ -329,12 +304,6 @@ jobs:
pipx install "conan>=2.0,<3"
echo "$HOME/.local/bin" >> $GITHUB_PATH
# Install gcovr for coverage jobs
if [ "${{ matrix.job_type }}" = "coverage" ]; then
pipx install "gcovr>=7,<9"
apt-get install -y lcov
fi
- name: Check environment
run: |
echo "PATH:"
@@ -344,13 +313,6 @@ jobs:
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
which ccache && ccache --version || echo "ccache not found"
# Check gcovr for coverage jobs
if [ "${{ matrix.job_type }}" = "coverage" ]; then
which gcov && gcov --version || echo "gcov not found"
which gcovr && gcovr --version || echo "gcovr not found"
fi
echo "---- Full Environment ----"
env
@@ -378,7 +340,6 @@ jobs:
gha_cache_enabled: 'false' # Disable caching for self hosted runner
- name: Build
if: matrix.job_type == 'build'
uses: ./.github/actions/xahau-ga-build
with:
generator: Ninja
@@ -393,26 +354,7 @@ jobs:
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
ccache_max_size: '100G'
- name: Build (Coverage)
if: matrix.job_type == 'coverage'
uses: ./.github/actions/xahau-ga-build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
build_dir: ${{ env.build_dir }}
cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }}
gcov: ${{ matrix.gcov }}
compiler-id: ${{ matrix.compiler_id }}
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: ${{ matrix.stdlib }}
cmake-args: '-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_CXX_FLAGS="-O0" -DCMAKE_C_FLAGS="-O0"'
cmake-target: 'coverage'
ccache_max_size: '100G'
- name: Set artifact name
if: matrix.job_type == 'build'
id: set-artifact-name
run: |
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
@@ -425,7 +367,6 @@ jobs:
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
- name: Run tests
if: matrix.job_type == 'build'
run: |
# Ensure the binary exists before trying to run
if [ -f "${{ env.build_dir }}/rippled" ]; then
@@ -435,32 +376,14 @@ jobs:
exit 1
fi
# Coverage-specific steps
- name: Move coverage report
if: matrix.job_type == 'coverage'
shell: bash
- name: Export server definitions
run: |
mv "${{ env.build_dir }}/coverage.xml" ./
${{ env.build_dir }}/rippled --definitions | python3 -m json.tool > server_definitions.json
- name: Archive coverage report
if: matrix.job_type == 'coverage'
uses: actions/upload-artifact@v4
- name: Upload server definitions
if: matrix.compiler_id == 'gcc-13-libstdcxx'
uses: actions/upload-artifact@v7
with:
name: coverage.xml
path: coverage.xml
retention-days: 30
- name: Upload coverage report
if: matrix.job_type == 'coverage'
uses: wandalen/wretry.action/main@v3
with:
action: codecov/codecov-action@v4.3.0
with: |
files: coverage.xml
fail_ci_if_error: true
disable_search: true
verbose: true
plugin: noop
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 210000 # in milliseconds
name: server-definitions
path: server_definitions.json
archive: false

View File

@@ -24,6 +24,7 @@
#include <xrpld/core/TimeKeeper.h>
#include <xrpld/net/RPCCall.h>
#include <xrpld/rpc/RPCHandler.h>
#include <xrpld/rpc/handlers/Handlers.h>
#include <xrpl/basics/Log.h>
#include <xrpl/basics/StringUtilities.h>
#include <xrpl/basics/contract.h>
@@ -387,7 +388,8 @@ run(int argc, char** argv)
po::value<std::string>(),
"Specify the range of present ledgers for testing purposes. Min and "
"max values are comma separated.")(
"version", "Display the build version.");
"version", "Display the build version.")(
"definitions", "Output server definitions as JSON and exit.");
po::options_description data("Ledger/Data Options");
data.add_options()("import", importText.c_str())(
@@ -529,6 +531,13 @@ run(int argc, char** argv)
return 0;
}
if (vm.count("definitions"))
{
auto defs = getStaticServerDefinitions();
std::cout << Json::FastWriter().write(defs);
return 0;
}
#ifndef ENABLE_TESTS
if (vm.count("unittest") || vm.count("unittest-child"))
{

View File

@@ -129,6 +129,8 @@ doRipplePathFind(RPC::JsonContext&);
Json::Value
doServerDefinitions(RPC::JsonContext&);
Json::Value
getStaticServerDefinitions();
Json::Value
doServerInfo(RPC::JsonContext&); // for humans
Json::Value
doServerState(RPC::JsonContext&); // for machines

View File

@@ -523,6 +523,15 @@ public:
}
};
Json::Value
getStaticServerDefinitions()
{
static const Definitions defs{};
Json::Value ret = defs();
ret[jss::hash] = to_string(defs.getHash());
return ret;
}
Json::Value
doServerDefinitions(RPC::JsonContext& context)
{