Compare commits

..

2 Commits

Author SHA1 Message Date
Denis Angell
6a19f94380 Update rippled-example.cfg 2024-02-01 13:33:04 +01:00
Denis Angell
419fb8f730 Update rippled-example.cfg 2024-02-01 12:50:10 +01:00
1956 changed files with 92592 additions and 232496 deletions

View File

@@ -45,11 +45,9 @@ DisableFormat: false
ExperimentalAutoDetectBinPacking: false
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
IncludeCategories:
- Regex: '^<(test)/'
- Regex: '^<(BeastConfig)'
Priority: 0
- Regex: '^<(xrpld)/'
Priority: 1
- Regex: '^<(xrpl)/'
- Regex: '^<(ripple)/'
Priority: 2
- Regex: '^<(boost)/'
Priority: 3
@@ -58,7 +56,6 @@ IncludeCategories:
IncludeIsMainRegex: '$'
IndentCaseLabels: true
IndentFunctionDeclarationAfterType: false
IndentRequiresClause: true
IndentWidth: 4
IndentWrappedFunctionNames: false
KeepEmptyLinesAtTheStartOfBlocks: false
@@ -74,7 +71,6 @@ PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 200
PointerAlignment: Left
ReflowComments: true
RequiresClausePosition: OwnLine
SortIncludes: true
SpaceAfterCStyleCast: false
SpaceBeforeAssignmentOperators: true

View File

@@ -2,7 +2,3 @@
# To use it by default in git blame:
# git config blame.ignoreRevsFile .git-blame-ignore-revs
50760c693510894ca368e90369b0cc2dabfd07f3
e2384885f5f630c8f0ffe4bf21a169b433a16858
241b9ddde9e11beb7480600fd5ed90e1ef109b21
760f16f56835663d9286bd29294d074de26a7ba6
0eebe6a5f4246fced516d52b83ec4e7f47373edd

View File

@@ -1,12 +0,0 @@
#!/bin/bash
# Pre-commit hook that runs the suspicious patterns check on staged files
# Get the repository's root directory
repo_root=$(git rev-parse --show-toplevel)
# Run the suspicious patterns script in pre-commit mode
"$repo_root/suspicious_patterns.sh" --pre-commit
# Exit with the same code as the script
exit $?

View File

@@ -1,4 +0,0 @@
#!/bin/bash
echo "Configuring git to use .githooks directory..."
git config core.hooksPath .githooks

View File

@@ -1,217 +0,0 @@
name: build
description: 'Builds the project with ccache integration'
inputs:
generator:
description: 'CMake generator to use'
required: true
configuration:
description: 'Build configuration (Debug, Release, etc.)'
required: true
build_dir:
description: 'Directory to build in'
required: false
default: '.build'
cc:
description: 'C compiler to use'
required: false
default: ''
cxx:
description: 'C++ compiler to use'
required: false
default: ''
compiler-id:
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
required: false
default: ''
cache_version:
description: 'Cache version for invalidation'
required: false
default: '1'
gha_cache_enabled:
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
required: false
default: 'true'
ccache_enabled:
description: 'Whether to use ccache'
required: false
default: 'true'
main_branch:
description: 'Main branch name for restore keys'
required: false
default: 'dev'
stdlib:
description: 'C++ standard library to use'
required: true
type: choice
options:
- libstdcxx
- libcxx
clang_gcc_toolchain:
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
required: false
default: ''
ccache_max_size:
description: 'Maximum ccache size'
required: false
default: '2G'
ccache_hash_dir:
description: 'Whether to include directory paths in hash'
required: false
default: 'true'
ccache_compiler_check:
description: 'How to check compiler for changes'
required: false
default: 'content'
runs:
using: 'composite'
steps:
- name: Generate safe branch name
if: inputs.ccache_enabled == 'true'
id: safe-branch
shell: bash
run: |
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
- name: Configure ccache
if: inputs.ccache_enabled == 'true'
shell: bash
run: |
# Create cache directories
mkdir -p ~/.ccache-cache
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Configure ccache settings AFTER cache restore (prevents stale cached config)
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
ccache --set-config=cache_dir="$HOME/.ccache-cache"
echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
echo "📦 using ~/.ccache-cache as ccache cache directory"
# Print config for verification
echo "=== ccache configuration ==="
ccache -p
# Zero statistics before the build
ccache -z
- name: Configure project
shell: bash
run: |
mkdir -p ${{ inputs.build_dir }}
cd ${{ inputs.build_dir }}
# Set compiler environment variables if provided
if [ -n "${{ inputs.cc }}" ]; then
export CC="${{ inputs.cc }}"
fi
if [ -n "${{ inputs.cxx }}" ]; then
export CXX="${{ inputs.cxx }}"
fi
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
# This enables ccache for the main app build without affecting Conan dependency builds
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
cat > wrapper_toolchain.cmake <<'EOF'
# Include Conan's generated toolchain first (sets compiler, flags, etc.)
# Note: CMAKE_CURRENT_LIST_DIR is the directory containing this wrapper (.build/)
include(${CMAKE_CURRENT_LIST_DIR}/build/generators/conan_toolchain.cmake)
# Overlay ccache configuration for main application build
# This does NOT affect Conan dependency builds (already completed)
set(CMAKE_C_COMPILER_LAUNCHER ccache CACHE STRING "C compiler launcher" FORCE)
set(CMAKE_CXX_COMPILER_LAUNCHER ccache CACHE STRING "C++ compiler launcher" FORCE)
EOF
TOOLCHAIN_FILE="wrapper_toolchain.cmake"
echo "✅ Created wrapper toolchain with ccache enabled"
else
TOOLCHAIN_FILE="build/generators/conan_toolchain.cmake"
echo " Using Conan toolchain directly (ccache disabled)"
fi
# Configure C++ standard library if specified
# libstdcxx used for clang-14/16 to work around missing lexicographical_compare_three_way in libc++
# libcxx can be used with clang-17+ which has full C++20 support
# Note: -stdlib flag is Clang-specific, GCC always uses libstdc++
CMAKE_CXX_FLAGS=""
if [[ "${{ inputs.cxx }}" == clang* ]]; then
# Only Clang needs the -stdlib flag
if [ "${{ inputs.stdlib }}" = "libstdcxx" ]; then
CMAKE_CXX_FLAGS="-stdlib=libstdc++"
elif [ "${{ inputs.stdlib }}" = "libcxx" ]; then
CMAKE_CXX_FLAGS="-stdlib=libc++"
fi
fi
# GCC always uses libstdc++ and doesn't need/support the -stdlib flag
# Configure GCC toolchain for Clang if specified
if [ -n "${{ inputs.clang_gcc_toolchain }}" ] && [[ "${{ inputs.cxx }}" == clang* ]]; then
# Extract Clang version from compiler executable name (e.g., clang++-14 -> 14)
clang_version=$(echo "${{ inputs.cxx }}" | grep -oE '[0-9]+$')
# Clang 16+ supports --gcc-install-dir (precise path specification)
# Clang <16 only has --gcc-toolchain (uses discovery heuristics)
if [ -n "$clang_version" ] && [ "$clang_version" -ge "16" ]; then
# Clang 16+ uses --gcc-install-dir (canonical, precise)
CMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS --gcc-install-dir=/usr/lib/gcc/x86_64-linux-gnu/${{ inputs.clang_gcc_toolchain }}"
else
# Clang 14-15 uses --gcc-toolchain (deprecated but necessary)
# Note: This still uses discovery, so we hide newer GCC versions in the workflow
CMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS --gcc-toolchain=/usr"
fi
fi
# Run CMake configure
# Note: conanfile.py hardcodes 'build/generators' as the output path.
# If we're in a 'build' folder, Conan detects this and uses just 'generators/'
# If we're in '.build' (non-standard), Conan adds the full 'build/generators/'
# So we get: .build/build/generators/ with our non-standard folder name
cmake .. \
-G "${{ inputs.generator }}" \
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
-Dtests=TRUE \
-Dxrpld=TRUE
- name: Show ccache config before build
if: inputs.ccache_enabled == 'true'
shell: bash
run: |
echo "=========================================="
echo "ccache configuration before build"
echo "=========================================="
ccache -p
echo ""
- name: Build project
shell: bash
run: |
cd ${{ inputs.build_dir }}
# Check for verbose build flag in commit message
VERBOSE_FLAG=""
if echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-cmake-verbose\]'; then
echo "🔊 [ci-ga-cmake-verbose] detected - enabling verbose output"
VERBOSE_FLAG="-- -v"
fi
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG}
- name: Show ccache statistics
if: inputs.ccache_enabled == 'true'
shell: bash
run: ccache -s

View File

@@ -1,146 +0,0 @@
name: 'Cache Restore'
description: 'Restores cache with optional clearing based on commit message tags'
inputs:
path:
description: 'A list of files, directories, and wildcard patterns to cache'
required: true
key:
description: 'An explicit key for restoring the cache'
required: true
restore-keys:
description: 'An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key'
required: false
default: ''
cache-type:
description: 'Type of cache (for logging purposes, e.g., "ccache-main", "Conan")'
required: false
default: 'cache'
fail-on-cache-miss:
description: 'Fail the workflow if cache entry is not found'
required: false
default: 'false'
lookup-only:
description: 'Check if a cache entry exists for the given input(s) without downloading it'
required: false
default: 'false'
additional-clear-keys:
description: 'Additional cache keys to clear (newline separated)'
required: false
default: ''
outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'
value: ${{ steps.restore-cache.outputs.cache-hit }}
cache-primary-key:
description: 'The key that was used to restore the cache'
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
cache-matched-key:
description: 'The key that was used to restore the cache (exact or prefix match)'
value: ${{ steps.restore-cache.outputs.cache-matched-key }}
runs:
using: 'composite'
steps:
- name: Clear cache if requested via commit message
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
echo "=========================================="
echo "${{ inputs.cache-type }} cache clear tag detection"
echo "=========================================="
echo "Searching for: [ci-ga-clear-cache] or [ci-ga-clear-cache:*]"
echo ""
CACHE_KEY="${{ inputs.key }}"
# Extract search terms if present (e.g., "ccache" from "[ci-ga-clear-cache:ccache]")
SEARCH_TERMS=$(echo "${XAHAU_GA_COMMIT_MSG}" | grep -o '\[ci-ga-clear-cache:[^]]*\]' | sed 's/\[ci-ga-clear-cache://;s/\]//' || echo "")
SHOULD_CLEAR=false
if [ -n "${SEARCH_TERMS}" ]; then
# Search terms provided - check if THIS cache key matches ALL terms (AND logic)
echo "🔍 [ci-ga-clear-cache:${SEARCH_TERMS}] detected"
echo "Checking if cache key matches search terms..."
echo " Cache key: ${CACHE_KEY}"
echo " Search terms: ${SEARCH_TERMS}"
echo ""
MATCHES=true
for term in ${SEARCH_TERMS}; do
if ! echo "${CACHE_KEY}" | grep -q "${term}"; then
MATCHES=false
echo " ✗ Key does not contain '${term}'"
break
else
echo " ✓ Key contains '${term}'"
fi
done
if [ "${MATCHES}" = "true" ]; then
echo ""
echo "✅ Cache key matches all search terms - will clear cache"
SHOULD_CLEAR=true
else
echo ""
echo "⏭️ Cache key doesn't match search terms - skipping cache clear"
fi
elif echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-clear-cache\]'; then
# No search terms - always clear this job's cache
echo "🗑️ [ci-ga-clear-cache] detected in commit message"
echo "Clearing ${{ inputs.cache-type }} cache for key: ${CACHE_KEY}"
SHOULD_CLEAR=true
fi
if [ "${SHOULD_CLEAR}" = "true" ]; then
echo ""
echo "Deleting ${{ inputs.cache-type }} caches via GitHub API..."
# Delete primary cache key
echo "Checking for cache: ${CACHE_KEY}"
if gh cache list --key "${CACHE_KEY}" --json key --jq '.[].key' | grep -q "${CACHE_KEY}"; then
echo " Deleting: ${CACHE_KEY}"
gh cache delete "${CACHE_KEY}" || true
echo " ✓ Deleted"
else
echo " Not found"
fi
# Delete additional keys if provided
if [ -n "${{ inputs.additional-clear-keys }}" ]; then
echo ""
echo "Checking additional keys..."
while IFS= read -r key; do
[ -z "${key}" ] && continue
echo "Checking for cache: ${key}"
if gh cache list --key "${key}" --json key --jq '.[].key' | grep -q "${key}"; then
echo " Deleting: ${key}"
gh cache delete "${key}" || true
echo " ✓ Deleted"
else
echo " Not found"
fi
done <<< "${{ inputs.additional-clear-keys }}"
fi
echo ""
echo "✅ ${{ inputs.cache-type }} cache cleared successfully"
echo "Build will proceed from scratch"
else
echo ""
echo " No ${{ inputs.cache-type }} cache clear requested"
fi
echo "=========================================="
- name: Restore cache
id: restore-cache
uses: actions/cache/restore@v4
with:
path: ${{ inputs.path }}
key: ${{ inputs.key }}
restore-keys: ${{ inputs.restore-keys }}
fail-on-cache-miss: ${{ inputs.fail-on-cache-miss }}
lookup-only: ${{ inputs.lookup-only }}

View File

@@ -1,162 +0,0 @@
name: dependencies
description: 'Installs build dependencies with caching'
inputs:
configuration:
description: 'Build configuration (Debug, Release, etc.)'
required: true
build_dir:
description: 'Directory to build dependencies in'
required: false
default: '.build'
compiler-id:
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
required: false
default: ''
cache_version:
description: 'Cache version for invalidation'
required: false
default: '1'
main_branch:
description: 'Main branch name for restore keys'
required: false
default: 'dev'
os:
description: 'Operating system (Linux, Macos)'
required: false
default: 'Linux'
arch:
description: 'Architecture (x86_64, armv8)'
required: false
default: 'x86_64'
compiler:
description: 'Compiler type (gcc, clang, apple-clang)'
required: true
compiler_version:
description: 'Compiler version (11, 13, 14, etc.)'
required: true
cc:
description: 'C compiler executable (gcc-13, clang-14, etc.), empty for macOS'
required: false
default: ''
cxx:
description: 'C++ compiler executable (g++-14, clang++-14, etc.), empty for macOS'
required: false
default: ''
stdlib:
description: 'C++ standard library for Conan configuration (note: also in compiler-id)'
required: true
type: choice
options:
- libstdcxx
- libcxx
outputs:
cache-hit:
description: 'Whether there was a cache hit'
value: ${{ steps.cache-restore-conan.outputs.cache-hit }}
runs:
using: 'composite'
steps:
- name: Configure Conan cache paths
if: inputs.os == 'Linux'
shell: bash
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Configure Conan cache paths
if: inputs.gha_cache_enabled == 'false'
shell: bash
# For self-hosted runners, register cache paths to be used as volumes
# This allows the cache to be shared between containers
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Configure Conan
shell: bash
run: |
# Create the default profile directory if it doesn't exist
mkdir -p ~/.conan2/profiles
# Determine the correct libcxx based on stdlib parameter
if [ "${{ inputs.stdlib }}" = "libcxx" ]; then
LIBCXX="libc++"
else
LIBCXX="libstdc++11"
fi
# Create profile with our specific settings
# This overwrites any cached profile to ensure fresh configuration
cat > ~/.conan2/profiles/default <<EOF
[settings]
arch=${{ inputs.arch }}
build_type=${{ inputs.configuration }}
compiler=${{ inputs.compiler }}
compiler.cppstd=20
compiler.libcxx=${LIBCXX}
compiler.version=${{ inputs.compiler_version }}
os=${{ inputs.os }}
EOF
# Add buildenv and conf sections for Linux (not needed for macOS)
if [ "${{ inputs.os }}" = "Linux" ] && [ -n "${{ inputs.cc }}" ]; then
cat >> ~/.conan2/profiles/default <<EOF
[buildenv]
CC=/usr/bin/${{ inputs.cc }}
CXX=/usr/bin/${{ inputs.cxx }}
[conf]
tools.build:compiler_executables={"c": "/usr/bin/${{ inputs.cc }}", "cpp": "/usr/bin/${{ inputs.cxx }}"}
EOF
fi
# Add macOS-specific conf if needed
if [ "${{ inputs.os }}" = "Macos" ]; then
cat >> ~/.conan2/profiles/default <<EOF
[conf]
# Workaround for gRPC with newer Apple Clang
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
EOF
fi
# Display profile for verification
conan profile show
- name: Export custom recipes
shell: bash
run: |
# Export snappy if not already exported
conan list snappy/1.1.10@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
conan export external/snappy --version 1.1.10 --user xahaud --channel stable
# Export soci if not already exported
conan list soci/4.0.3@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
conan export external/soci --version 4.0.3 --user xahaud --channel stable
# Export wasmedge if not already exported
conan list wasmedge/0.11.2@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
conan export external/wasmedge --version 0.11.2 --user xahaud --channel stable
- name: Install dependencies
shell: bash
env:
CONAN_REQUEST_TIMEOUT: 180 # Increase timeout to 3 minutes for slow mirrors
run: |
# Create build directory
mkdir -p ${{ inputs.build_dir }}
cd ${{ inputs.build_dir }}
# Install dependencies using conan
conan install \
--output-folder . \
--build missing \
--settings build_type=${{ inputs.configuration }} \
..

View File

@@ -1,74 +0,0 @@
name: 'Get Commit Message'
description: 'Gets commit message for both push and pull_request events and sets XAHAU_GA_COMMIT_MSG env var'
inputs:
event-name:
description: 'The event name (push or pull_request)'
required: true
head-commit-message:
description: 'The head commit message (for push events)'
required: false
default: ''
pr-head-sha:
description: 'The PR head SHA (for pull_request events)'
required: false
default: ''
runs:
using: 'composite'
steps:
- name: Get commit message and set environment variable
shell: python
env:
GH_TOKEN: ${{ github.token }}
run: |
import json
import os
import secrets
import urllib.request
event_name = "${{ inputs.event-name }}"
pr_head_sha = "${{ inputs.pr-head-sha }}"
repository = "${{ github.repository }}"
print("==========================================")
print("Setting XAHAU_GA_COMMIT_MSG environment variable")
print("==========================================")
print(f"Event: {event_name}")
if event_name == 'push':
# For push events, use the input directly
message = """${{ inputs.head-commit-message }}"""
print("Source: workflow input (github.event.head_commit.message)")
elif event_name == 'pull_request' and pr_head_sha:
# For PR events, fetch via GitHub API
print(f"Source: GitHub API (fetching commit {pr_head_sha})")
try:
url = f"https://api.github.com/repos/{repository}/commits/{pr_head_sha}"
req = urllib.request.Request(url, headers={
"Accept": "application/vnd.github.v3+json",
"Authorization": f"Bearer {os.environ.get('GH_TOKEN', '')}"
})
with urllib.request.urlopen(req) as response:
data = json.load(response)
message = data["commit"]["message"]
except Exception as e:
print(f"Failed to fetch commit message: {e}")
message = ""
else:
message = ""
print(f"Warning: Unknown event type: {event_name}")
print(f"Commit message (first 100 chars): {message[:100]}")
# Write to GITHUB_ENV using heredoc with random delimiter (prevents injection attacks)
# See: https://securitylab.github.com/resources/github-actions-untrusted-input/
delimiter = f"EOF_{secrets.token_hex(16)}"
with open(os.environ['GITHUB_ENV'], 'a') as f:
f.write(f'XAHAU_GA_COMMIT_MSG<<{delimiter}\n')
f.write(message)
f.write(f'\n{delimiter}\n')
print(f"✓ XAHAU_GA_COMMIT_MSG set (available to all subsequent steps)")
print("==========================================")

View File

@@ -33,7 +33,6 @@ Please check [x] relevant options, delete irrelevant ones.
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] Refactor (non-breaking change that only restructures code)
- [ ] Performance (increase or change in throughput and/or latency)
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
- [ ] Documentation update
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
@@ -59,12 +58,6 @@ Please check [x] relevant options, delete irrelevant ones.
## Before / After
If relevant, use this section for an English description of the change at a technical level.
If this change affects an API, examples should be included here.
For performance-impacting changes, please provide these details:
1. Is this a new feature, bug fix, or improvement to existing functionality?
2. What behavior/functionality does the change impact?
3. In what processing can the impact be measured? Be as specific as possible - e.g. RPC client call, payment transaction that involves LOB, AMM, caching, DB operations, etc.
4. Does this change affect concurrent processing - e.g. does it involve acquiring locks, multi-threaded processing, or async processing?
-->
<!--

View File

@@ -2,94 +2,25 @@ name: Build using Docker
on:
push:
branches: ["dev", "candidate", "release", "jshooks"]
branches: [ "dev", "candidate", "release" ]
pull_request:
branches: ["dev", "candidate", "release", "jshooks"]
branches: [ "dev", "candidate", "release" ]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1
jobs:
checkout:
builder:
runs-on: [self-hosted, vanity]
outputs:
checkout_path: ${{ steps.vars.outputs.checkout_path }}
steps:
- name: Prepare checkout path
id: vars
run: |
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}"
echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT"
- uses: actions/checkout@v3
with:
clean: false
- name: Check for suspicious patterns
run: /bin/bash suspicious_patterns.sh
- name: Build using Docker
run: /bin/bash release-builder.sh
- name: Unit tests
run: /bin/bash docker-unit-tests.sh
- uses: actions/checkout@v4
with:
path: ${{ steps.vars.outputs.checkout_path }}
clean: true
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
build:
runs-on: [self-hosted, xahaud-build]
needs: [checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Set Cleanup Script Path
run: |
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV
- name: Build using Docker
run: /bin/bash release-builder.sh
- name: Stop Container (Cleanup)
if: always()
run: |
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
CLEANUP_EXIT_CODE=$?
if [[ "$CLEANUP_EXIT_CODE" -eq 0 ]]; then
echo "Cleanup script succeeded."
rm -f "$JOB_CLEANUP_SCRIPT"
echo "Cleanup script removed."
else
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
fi
if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
echo "🔍 Checking for leftover containers..."
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
if [[ -n "$BUILD_CONTAINERS" ]]; then
echo "⚠️ WARNING: Some build containers are still running"
echo "$BUILD_CONTAINERS"
else
echo "✅ No build containers found"
fi
fi
tests:
runs-on: [self-hosted, xahaud-build]
needs: [build, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Unit tests
run: /bin/bash docker-unit-tests.sh
cleanup:
runs-on: [self-hosted, xahaud-build]
needs: [tests, checkout]
if: always()
steps:
- name: Cleanup workspace
run: |
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
echo "Cleaning workspace for ${CHECKOUT_PATH}"
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"

View File

@@ -4,11 +4,11 @@ on: [push, pull_request]
jobs:
check:
runs-on: ubuntu-24.04
runs-on: ubuntu-20.04
env:
CLANG_VERSION: 18
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install clang-format
run: |
codename=$( lsb_release --codename --short )
@@ -19,8 +19,10 @@ jobs:
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
sudo apt-get update
sudo apt-get install clang-format-${CLANG_VERSION}
- name: Format first-party sources
run: find include src -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -not -path "src/magic/magic_enum.h" -exec clang-format-${CLANG_VERSION} -i {} +
- name: Format src/ripple
run: find src/ripple -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
- name: Format src/test
run: find src/test -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
- name: Check for differences
id: assert
run: |
@@ -28,7 +30,7 @@ jobs:
git diff --exit-code | tee "clang-format.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
continue-on-error: true
with:
name: clang-format.patch
@@ -50,7 +52,7 @@ jobs:
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run 'git-clang-format --extensions cpp,h,hpp,ipp develop'
2. Run 'git-clang-format --extensions c,cpp,h,cxx,ipp dev'
in your repo, commit, and push.
run: |
echo "${PREAMBLE}"

25
.github/workflows/doxygen.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Build and publish Doxygen documentation
on:
push:
branches:
- dev
jobs:
job:
runs-on: ubuntu-latest
container:
image: docker://rippleci/rippled-ci-builder:2944b78d22db
steps:
- name: checkout
uses: actions/checkout@v2
- name: build
run: |
mkdir build
cd build
cmake -DBoost_NO_BOOST_CMAKE=ON ..
cmake --build . --target docs --parallel $(nproc)
- name: publish
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: build/docs/html

View File

@@ -1,24 +0,0 @@
name: Guard Checker Build
on:
push:
pull_request:
jobs:
guard-checker-build:
strategy:
fail-fast: false
matrix:
include:
- run-on: ubuntu-latest
- run-on: macos-latest
runs-on: ${{ matrix.run-on }}
name: Guard Checker Build - ${{ matrix.run-on }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Build Guard Checker
run: |
cd include/xrpl/hook
make guard_checker

View File

@@ -1,104 +0,0 @@
name: instrumentation
on:
pull_request:
push:
# If the branches list is ever changed, be sure to change it on all
# build/test jobs (nix, macos, windows, instrumentation)
branches:
# Always build the package branches
- develop
- release
- master
# Branches that opt-in to running
- 'ci/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
# NOTE we are not using dependencies built inside nix because nix is lagging
# with compiler versions. Instrumentation requires clang version 16 or later
instrumentation-build:
if: false # disable for now
env:
CLANG_RELEASE: 16
strategy:
fail-fast: false
runs-on: [self-hosted, heavy]
container: debian:bookworm
steps:
- name: install prerequisites
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update
apt-get install --yes --no-install-recommends \
clang-${CLANG_RELEASE} clang++-${CLANG_RELEASE} \
python3-pip python-is-python3 make cmake git wget
apt-get clean
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-${CLANG_RELEASE} 100 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-${CLANG_RELEASE}
update-alternatives --auto clang
pip install --no-cache --break-system-packages "conan<2"
- name: checkout
uses: actions/checkout@v4
- name: prepare environment
run: |
mkdir ${GITHUB_WORKSPACE}/.build
echo "SOURCE_DIR=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "BUILD_DIR=$GITHUB_WORKSPACE/.build" >> $GITHUB_ENV
echo "CC=/usr/bin/clang" >> $GITHUB_ENV
echo "CXX=/usr/bin/clang++" >> $GITHUB_ENV
- name: configure Conan
run: |
conan profile new --detect default
conan profile update settings.compiler=clang default
conan profile update settings.compiler.version=${CLANG_RELEASE} default
conan profile update settings.compiler.libcxx=libstdc++11 default
conan profile update settings.compiler.cppstd=20 default
conan profile update options.rocksdb=False default
conan profile update \
'conf.tools.build:compiler_executables={"c": "/usr/bin/clang", "cpp": "/usr/bin/clang++"}' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
conan export external/snappy snappy/1.1.10@
conan export external/soci soci/4.0.3@
- name: build dependencies
run: |
cd ${BUILD_DIR}
conan install ${SOURCE_DIR} \
--output-folder ${BUILD_DIR} \
--install-folder ${BUILD_DIR} \
--build missing \
--settings build_type=Debug
- name: build with instrumentation
run: |
cd ${BUILD_DIR}
cmake -S ${SOURCE_DIR} -B ${BUILD_DIR} \
-Dvoidstar=ON \
-Dtests=ON \
-Dxrpld=ON \
-DCMAKE_BUILD_TYPE=Debug \
-DSECP256K1_BUILD_BENCHMARK=OFF \
-DSECP256K1_BUILD_TESTS=OFF \
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
cmake --build . --parallel $(nproc)
- name: verify instrumentation enabled
run: |
cd ${BUILD_DIR}
./rippled --version | grep libvoidstar
- name: run unit tests
run: |
cd ${BUILD_DIR}
./rippled -u --unittest-jobs $(( $(nproc)/4 ))

View File

@@ -11,11 +11,6 @@ jobs:
- uses: actions/checkout@v3
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Verify Python Generator Matches Canonical Script
run: |
python3 Builds/levelization/levelization.py \
--results-dir /tmp/levelization-py-results \
--compare-to Builds/levelization/results
- name: Check for differences
id: assert
run: |
@@ -23,7 +18,7 @@ jobs:
git diff --exit-code | tee "levelization.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
continue-on-error: true
with:
name: levelization.patch

View File

@@ -1,50 +0,0 @@
name: Verify Generated Hook Headers
on:
push:
pull_request:
jobs:
verify-generated-headers:
strategy:
fail-fast: false
matrix:
include:
- target: hook/error.h
generator: ./hook/generate_error.sh
- target: hook/extern.h
generator: ./hook/generate_extern.sh
- target: hook/sfcodes.h
generator: bash ./hook/generate_sfcodes.sh
- target: hook/tts.h
generator: ./hook/generate_tts.sh
runs-on: ubuntu-24.04
env:
CLANG_VERSION: 18
name: ${{ matrix.target }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install clang-format
run: |
codename=$( lsb_release --codename --short )
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
EOF
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
sudo apt-get update
sudo apt-get install clang-format-${CLANG_VERSION}
sudo ln -sf /usr/bin/clang-format-${CLANG_VERSION} /usr/local/bin/clang-format
- name: Verify ${{ matrix.target }}
run: |
set -euo pipefail
chmod +x hook/generate_*.sh || true
tmp=$(mktemp)
trap 'rm -f "$tmp"' EXIT
${{ matrix.generator }} > "$tmp"
diff -u ${{ matrix.target }} "$tmp"

View File

@@ -1,125 +0,0 @@
name: MacOS - GA Runner
on:
push:
branches: ["dev", "candidate", "release"]
pull_request:
branches: ["**"]
types: [opened, synchronize, reopened, labeled, unlabeled]
schedule:
- cron: '0 0 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
if: >
github.event_name != 'pull_request' ||
contains(fromJson('["dev","candidate","release"]'), github.base_ref) ||
contains(join(github.event.pull_request.labels.*.name, ','), 'ci-full-build')
strategy:
matrix:
generator:
- Ninja
configuration:
- Debug
runs-on: [self-hosted, macOS]
env:
build_dir: .build
# Bump this number to invalidate all caches globally.
CACHE_VERSION: 3
MAIN_BRANCH_NAME: dev
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Add Homebrew to PATH
run: |
echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
- name: Install Coreutils
run: |
brew install coreutils
echo "Num proc: $(nproc)"
# To isolate environments for each Runner, instead of installing globally with brew,
# use mise to isolate environments for each Runner directory.
- name: Setup toolchain (mise)
uses: jdx/mise-action@v3.6.1
with:
cache: false
install: true
mise_toml: |
[tools]
cmake = "3.25.3"
python = "3.12"
pipx = "latest"
conan = "2"
ninja = "latest"
ccache = "latest"
- name: Install tools via mise
run: |
mise install
mise reshim
echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
- name: Check environment
run: |
echo "PATH:"
echo "${PATH}" | tr ':' '\n'
which python && python --version || echo "Python not found"
which conan && conan --version || echo "Conan not found"
which cmake && cmake --version || echo "CMake not found"
clang --version
ccache --version
echo "---- Full Environment ----"
env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Detect compiler version
id: detect-compiler
run: |
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
echo "compiler_version=${COMPILER_VERSION}" >> $GITHUB_OUTPUT
echo "Detected Apple Clang version: ${COMPILER_VERSION}"
- name: Install dependencies
uses: ./.github/actions/xahau-ga-dependencies
with:
configuration: ${{ matrix.configuration }}
build_dir: ${{ env.build_dir }}
compiler-id: clang
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
os: Macos
arch: armv8
compiler: apple-clang
compiler_version: ${{ steps.detect-compiler.outputs.compiler_version }}
stdlib: libcxx
- name: Build
uses: ./.github/actions/xahau-ga-build
with:
generator: ${{ matrix.generator }}
configuration: ${{ matrix.configuration }}
build_dir: ${{ env.build_dir }}
compiler-id: clang
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: libcxx
ccache_max_size: '100G'
- name: Test
run: |
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)

View File

@@ -1,377 +0,0 @@
name: Nix - GA Runner
on:
push:
branches: ["dev", "candidate", "release"]
pull_request:
branches: ["**"]
types: [opened, synchronize, reopened, labeled, unlabeled]
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
inputs:
full_matrix:
description: "Force full matrix (6 configs)"
required: false
default: "false"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
matrix-setup:
if: >
github.event_name != 'pull_request' ||
contains(fromJson('["dev","candidate","release"]'), github.base_ref) ||
contains(join(github.event.pull_request.labels.*.name, ','), 'ci-full-build')
runs-on: [self-hosted, generic, 20.04]
container: python:3-slim
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: escape double quotes
id: escape
shell: bash
env:
PR_TITLE: ${{ github.event.pull_request.title }}
run: |
ESCAPED_PR_TITLE="${PR_TITLE//\"/\\\"}"
echo "title=${ESCAPED_PR_TITLE}" >> "$GITHUB_OUTPUT"
- name: Generate build matrix
id: set-matrix
shell: python
env:
GH_TOKEN: ${{ github.token }}
run: |
import json
import os
import urllib.request
# Full matrix with all 6 compiler configurations
# Each configuration includes all parameters needed by the build job
full_matrix = [
{
"compiler_id": "gcc-11-libstdcxx",
"compiler": "gcc",
"cc": "gcc-11",
"cxx": "g++-11",
"compiler_version": 11,
"stdlib": "libstdcxx",
"configuration": "Debug"
},
{
"compiler_id": "gcc-13-libstdcxx",
"compiler": "gcc",
"cc": "gcc-13",
"cxx": "g++-13",
"compiler_version": 13,
"stdlib": "libstdcxx",
"configuration": "Debug"
},
{
"compiler_id": "clang-14-libstdcxx-gcc11",
"compiler": "clang",
"cc": "clang-14",
"cxx": "clang++-14",
"compiler_version": 14,
"stdlib": "libstdcxx",
"clang_gcc_toolchain": 11,
"configuration": "Debug"
},
{
"compiler_id": "clang-16-libstdcxx-gcc13",
"compiler": "clang",
"cc": "clang-16",
"cxx": "clang++-16",
"compiler_version": 16,
"stdlib": "libstdcxx",
"clang_gcc_toolchain": 13,
"configuration": "Debug"
},
{
"compiler_id": "clang-17-libcxx",
"compiler": "clang",
"cc": "clang-17",
"cxx": "clang++-17",
"compiler_version": 17,
"stdlib": "libcxx",
"configuration": "Debug"
},
{
# Clang 18 - testing if it's faster than Clang 17 with libc++
# Requires patching Conan v1 settings.yml to add version 18
"compiler_id": "clang-18-libcxx",
"compiler": "clang",
"cc": "clang-18",
"cxx": "clang++-18",
"compiler_version": 18,
"stdlib": "libcxx",
"configuration": "Debug"
}
]
# Minimal matrix for PRs and feature branches
minimal_matrix = [
full_matrix[1], # gcc-13 (middle-ground gcc)
full_matrix[2] # clang-14 (mature, stable clang)
]
# Determine which matrix to use based on the target branch
ref = "${{ github.ref }}"
base_ref = "${{ github.base_ref }}" # For PRs, this is the target branch
event_name = "${{ github.event_name }}"
pr_title = """${{ steps.escape.outputs.title }}"""
pr_labels = """${{ join(github.event.pull_request.labels.*.name, ',') }}"""
pr_head_sha = "${{ github.event.pull_request.head.sha }}"
# Get commit message - for PRs, fetch via API since head_commit.message is empty
if event_name == "pull_request" and pr_head_sha:
try:
url = f"https://api.github.com/repos/${{ github.repository }}/commits/{pr_head_sha}"
req = urllib.request.Request(url, headers={
"Accept": "application/vnd.github.v3+json",
"Authorization": f"Bearer {os.environ.get('GH_TOKEN', '')}"
})
with urllib.request.urlopen(req) as response:
data = json.load(response)
commit_message = data["commit"]["message"]
except Exception as e:
print(f"Failed to fetch commit message: {e}")
commit_message = ""
else:
commit_message = """${{ github.event.head_commit.message }}"""
# Debug logging
print(f"Event: {event_name}")
print(f"Ref: {ref}")
print(f"Base ref: {base_ref}")
print(f"PR head SHA: {pr_head_sha}")
print(f"PR title: {pr_title}")
print(f"PR labels: {pr_labels}")
print(f"Commit message: {commit_message}")
# Manual trigger input to force full matrix.
manual_full = "${{ github.event.inputs.full_matrix || 'false' }}" == "true"
# Label/manual overrides, while preserving existing title/commit behavior.
force_full = (
manual_full
or "[ci-nix-full-matrix]" in commit_message
or "[ci-nix-full-matrix]" in pr_title
or ("ci-full-build" in pr_labels and "ci-nix-full-matrix" in pr_labels)
)
force_min = (
"ci-full-build" in pr_labels
)
print(f"Force full matrix: {force_full}")
print(f"Force min matrix: {force_min}")
# Check if this is targeting a main branch
# For PRs: check base_ref (target branch)
# For pushes: check ref (current branch)
main_branches = ["refs/heads/dev", "refs/heads/release", "refs/heads/candidate"]
if force_full:
# Override: always use full matrix if forced by manual input or label.
use_full = True
elif force_min:
# Override: always use minimal matrix if ci-full-build label is present.
use_full = False
elif event_name == "pull_request":
# For PRs, base_ref is just the branch name (e.g., "dev", not "refs/heads/dev")
# Check if the PR targets release or candidate (more critical branches)
use_full = base_ref in ["release", "candidate"]
else:
# For pushes, ref is the full reference (e.g., "refs/heads/dev")
use_full = ref in main_branches
# Select the appropriate matrix
if use_full:
if force_full:
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag")
else:
print(f"Using FULL matrix (6 configs) - targeting main branch")
matrix = full_matrix
else:
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR")
matrix = minimal_matrix
# Output the matrix as JSON
output = json.dumps({"include": matrix})
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write(f"matrix={output}\n")
build:
needs: matrix-setup
runs-on: [self-hosted, generic, 20.04]
container:
image: ubuntu:24.04
volumes:
- /home/runner/.conan-cache:/.conan-cache
- /home/runner/.ccache-cache:/github/home/.ccache-cache
defaults:
run:
shell: bash
outputs:
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
strategy:
fail-fast: false
matrix: ${{ fromJSON(needs.matrix-setup.outputs.matrix) }}
env:
build_dir: .build
# Bump this number to invalidate all caches globally.
CACHE_VERSION: 3
MAIN_BRANCH_NAME: dev
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install build dependencies
run: |
apt-get update
apt-get install -y software-properties-common
add-apt-repository ppa:ubuntu-toolchain-r/test -y
apt-get update
apt-get install -y python3 python-is-python3 pipx
pipx ensurepath
apt-get install -y cmake ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
apt-get install -y perl # for openssl build
apt-get install -y libsqlite3-dev # for xahaud build
# Install the specific GCC version needed for Clang
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ==="
apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
echo "=== GCC versions available after installation ==="
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d"
fi
# For Clang < 16 with --gcc-toolchain, hide newer GCC versions
# This is needed because --gcc-toolchain still picks the highest version
#
# THE GREAT GCC HIDING TRICK (for Clang < 16):
# Clang versions before 16 don't have --gcc-install-dir, only --gcc-toolchain
# which is deprecated and still uses discovery heuristics that ALWAYS pick
# the highest version number. So we play a sneaky game...
#
# We rename newer GCC versions to very low integers (1, 2, 3...) which makes
# Clang think they're ancient GCC versions. Since 11 > 3 > 2 > 1, Clang will
# pick GCC 11 over our renamed versions. It's dumb but it works!
#
# Example: GCC 12→1, GCC 13→2, GCC 14→3, so Clang picks 11 (highest number)
if [ -n "${{ matrix.clang_gcc_toolchain }}" ] && [ "${{ matrix.compiler_version }}" -lt "16" ]; then
echo "=== Hiding GCC versions newer than ${{ matrix.clang_gcc_toolchain }} for Clang < 16 ==="
target_version=${{ matrix.clang_gcc_toolchain }}
counter=1 # Start with 1 - these will be seen as "GCC version 1, 2, 3" etc
for dir in /usr/lib/gcc/x86_64-linux-gnu/*/; do
if [ -d "$dir" ]; then
version=$(basename "$dir")
# Check if version is numeric and greater than target
if [[ "$version" =~ ^[0-9]+$ ]] && [ "$version" -gt "$target_version" ]; then
echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)"
# Safety check: ensure target doesn't already exist
if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then
mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
else
echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists"
exit 1
fi
counter=$((counter + 1))
fi
fi
done
fi
# Verify what Clang will use
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
echo "=== Verifying GCC toolchain selection ==="
echo "Available GCC versions:"
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d.*[0-9]+$" || true
echo ""
echo "Clang's detected GCC installation:"
${{ matrix.cxx }} -v -E -x c++ /dev/null -o /dev/null 2>&1 | grep "Found candidate GCC installation" || true
fi
# Install libc++ dev packages if using libc++ (not needed for libstdc++)
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
fi
# Install Conan 2
pipx install "conan>=2.0,<3"
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Check environment
run: |
echo "PATH:"
echo "${PATH}" | tr ':' '\n'
which conan && conan --version || echo "Conan not found"
which cmake && cmake --version || echo "CMake not found"
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
which ccache && ccache --version || echo "ccache not found"
echo "---- Full Environment ----"
env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install dependencies
uses: ./.github/actions/xahau-ga-dependencies
with:
configuration: ${{ matrix.configuration }}
build_dir: ${{ env.build_dir }}
compiler-id: ${{ matrix.compiler_id }}
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
compiler: ${{ matrix.compiler }}
compiler_version: ${{ matrix.compiler_version }}
cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }}
stdlib: ${{ matrix.stdlib }}
gha_cache_enabled: 'false' # Disable caching for self hosted runner
- name: Build
uses: ./.github/actions/xahau-ga-build
with:
generator: Ninja
configuration: ${{ matrix.configuration }}
build_dir: ${{ env.build_dir }}
cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }}
compiler-id: ${{ matrix.compiler_id }}
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: ${{ matrix.stdlib }}
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
ccache_max_size: '100G'
- name: Set artifact name
id: set-artifact-name
run: |
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
echo "artifact_name=${ARTIFACT_NAME}" >> "$GITHUB_OUTPUT"
echo "Using artifact name: ${ARTIFACT_NAME}"
- name: Debug build directory
run: |
echo "Checking build directory contents: ${{ env.build_dir }}"
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
- name: Run tests
run: |
# Ensure the binary exists before trying to run
if [ -f "${{ env.build_dir }}/rippled" ]; then
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
else
echo "Error: rippled executable not found in ${{ env.build_dir }}"
exit 1
fi

15
.gitignore vendored
View File

@@ -24,11 +24,6 @@ bin/project-cache.jam
build/docker
# Ignore release builder files
.env
release-build
cmake-*.tar.gz
# Ignore object files.
*.o
build
@@ -76,7 +71,6 @@ docs/html_doc
# Xcode
.DS_Store
*/build/*
!/docs/build/
*.pbxuser
!default.pbxuser
*.mode1v3
@@ -120,12 +114,3 @@ pkg_out
pkg
CMakeUserPresets.json
bld.rippled/
generated
.vscode
# AI docs (local working documents)
.ai-docs/
# Suggested in-tree build directory
/.build/

View File

@@ -1,6 +1,6 @@
# .pre-commit-config.yaml
repos:
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v18.1.3
rev: v10.0.1
hooks:
- id: clang-format

4
.testnet/.gitignore vendored
View File

@@ -1,4 +0,0 @@
output/
__pycache__/
scenarios/odd-cases/
scenarios/suite-experiments.yml

View File

@@ -1,27 +0,0 @@
"""Scenario: ConsensusEntropy amendment crashes non-supporting node.
Votes ConsensusEntropy accept on all nodes except n4, then waits for n4
to crash as the amendment activates without its support.
x-testnet run --scenario-script consensus_entropy_crash.py
"""
async def scenario(ctx, log):
await ctx.wait_for_ledger_close()
ctx.feature("ConsensusEntropy", vetoed=False, exclude_nodes=[4])
log("Waiting for ConsensusEntropy to be voted for...")
await ctx.wait_for_feature(
"ConsensusEntropy",
check=lambda s: not s.get("vetoed"),
exclude_nodes=[4],
timeout=60,
)
log("Waiting for n4 to crash...")
op = await ctx.wait_for_nodes_down(nodes=[4], timeout=600)
ctx.assert_log("unsupported amendments activated", since=op.started, nodes=[4])
ctx.assert_exit_status(0, nodes=[4])
log("PASS: n4 shut down due to unsupported amendment")

View File

@@ -1,52 +0,0 @@
""":descr: entropy stays valid under transaction load"""
from __future__ import annotations
from helpers import require_entropy, get_entropy_tx, assert_valid_entropy
variants = [
{"label": "light", "min_txns": 5, "max_txns": 10},
{"label": "heavy", "min_txns": 50, "max_txns": 60},
{"label": "super_heavy", "min_txns": 90, "max_txns": 120},
]
async def scenario(ctx, log, *, min_txns=5, max_txns=10, **_):
await require_entropy(ctx, log)
gen = ctx.txn_generator(min_txns=min_txns, max_txns=max_txns)
await gen.start()
await gen.wait_until_ready()
log(f"Transaction generator ready ({min_txns}-{max_txns} txns/ledger)")
# Wait for pipeline warmup + a few txn-bearing ledgers.
await ctx.wait_for_ledgers(3, node_id=0, timeout=60)
start_seq = ctx.validated_ledger_index(0)
await ctx.wait_for_ledgers(10, node_id=0, timeout=120)
end_seq = ctx.validated_ledger_index(0)
log(f"Inspecting ledgers {start_seq + 1}{end_seq}")
digests = set()
total_user_txns = 0
for seq in range(start_seq + 1, end_seq + 1):
ce, user_txns = get_entropy_tx(ctx, seq)
digest, count = assert_valid_entropy(ce, seq, seen_digests=digests)
total_user_txns += len(user_txns)
log(
f" Ledger {seq}: EntropyCount={count} "
f"user_txns={len(user_txns)} Digest={digest[:16]}..."
)
await gen.stop()
log(
f"Verified {end_seq - start_seq} ledgers: {total_user_txns} user txns, "
f"all entropy valid and unique"
)
if total_user_txns == 0:
raise AssertionError("No user transactions were included in any ledger")
log("PASS")

View File

@@ -1,117 +0,0 @@
""":descr: 4/5 liveness, 3/5 zero-entropy fallback, recovery"""
from __future__ import annotations
from helpers import require_entropy, get_entropy_tx, entropy_fields
async def scenario(ctx, log):
await require_entropy(ctx, log)
# Baseline: wait 1 ledger to confirm network is healthy.
await ctx.wait_for_ledgers(1, node_id=0, timeout=30)
# --- 4/5 liveness ---
ctx.stop_node(4)
await ctx.wait_for_nodes_down(nodes=[4], timeout=30)
await ctx.wait_for_ledgers(1, node_id=0, timeout=30)
log("4/5: liveness OK")
# Snapshot validated seq before dropping to 3/5.
val_before = ctx.validated_ledger_index(0)
# --- 3/5 degraded window ---
ctx.stop_node(3)
await ctx.wait_for_nodes_down(nodes=[3], timeout=30)
# 10s ≈ 3 rounds at 3s cadence.
await ctx.sleep(10)
val_after = ctx.validated_ledger_index(0)
log(f"3/5: validated ledger {val_before}{val_after}")
# Accepted/built ledgers may still later appear as validated once the full
# network rejoins. For ConsensusEntropy the key invariant is that every
# ledger created during this sub-quorum window carries ZERO entropy.
degraded_zero = 0
degraded_end = val_after or val_before
if val_before and degraded_end and degraded_end > val_before:
for seq in range(val_before + 1, degraded_end + 1):
ce, _ = get_entropy_tx(ctx, seq)
digest, entropy_count, is_zero = entropy_fields(ce)
if not is_zero:
raise AssertionError(
f"Ledger {seq}: expected ZERO entropy during 3/5 window, "
f"got Digest={digest[:16]}... EntropyCount={entropy_count}"
)
degraded_zero += 1
log(f" Degraded ledger {seq}: EntropyCount={entropy_count} ZERO")
log(f"3/5 entropy summary: {degraded_zero} zero")
# Log checks tied to actual transition mechanics:
# - seq=1 proposals are emitted once commit-set phase is entered
# - ConvergingCommit transition is the gateway out of seq=0-only behavior
# - establish gate blocked indicates tx-consensus/pause prevented accept
ctx.log_level("LedgerConsensus", "trace")
op = await ctx.sleep(6, name="stall_window")
ctx.assert_not_log(
r"RNG: transitioned to ConvergingCommit", within=op.window, nodes=[0, 1, 2]
)
ctx.assert_not_log(r"RNG: propose seq=1", within=op.window, nodes=[0, 1, 2])
gate_blocked = ctx.search_logs(
r"STALLDIAG: establish gate blocked reason=(pause|no-tx-consensus)",
within=op.window,
nodes=[0, 1, 2],
)
log(f"3/5: establish gate-blocked logs in 6s: {gate_blocked.count}")
skips = ctx.search_logs(r"RNG: bootstrap skip", within=op.window, nodes=[0, 1, 2])
log(f"3/5: RNG bootstrap skips in 6s: {skips.count}")
# --- Recovery: restart nodes, verify ledger advancement ---
ctx.start_node(3)
ctx.start_node(4)
await ctx.wait_for_ledgers(1, node_id=0, timeout=120)
val_recovered = ctx.validated_ledger_index(0)
pre_recovery = max(v for v in [val_before, val_after] if v is not None)
log(f"Recovered: validated seq {pre_recovery}{val_recovered}")
if not val_recovered or val_recovered <= pre_recovery:
raise AssertionError(
f"Validated ledger did not advance after recovery "
f"({pre_recovery}{val_recovered})"
)
# Inspect post-recovery ledgers separately from the degraded window above.
# Once the network is back at quorum, non-zero entropy is valid again but
# must still be quorum-met.
zero_count = 0
nonzero_count = 0
for seq in range(pre_recovery + 1, val_recovered + 1):
ce, _ = get_entropy_tx(ctx, seq)
digest, entropy_count, is_zero = entropy_fields(ce)
if is_zero:
zero_count += 1
else:
nonzero_count += 1
if entropy_count < 4:
raise AssertionError(
f"Ledger {seq}: non-zero entropy with sub-quorum "
f"EntropyCount={entropy_count} (need >= 4)"
)
log(
f" Ledger {seq}: EntropyCount={entropy_count} "
f"{'ZERO' if is_zero else 'REAL'}"
)
log(f"Entropy summary: {zero_count} zero, {nonzero_count} non-zero")
log("PASS")

View File

@@ -1,46 +0,0 @@
""":descr: drop 2 nodes (3/5 stall), restart both, verify recovery"""
from __future__ import annotations
async def scenario(ctx, log):
await ctx.wait_for_ledger_close(timeout=120)
feature = ctx.feature_check("ConsensusEntropy", node_id=0)
if not feature or not feature.get("enabled", False):
raise AssertionError(f"ConsensusEntropy not enabled: {feature}")
await ctx.wait_for_ledgers(1, node_id=0, timeout=60)
log("Baseline OK")
# Drop 2 nodes → validation stall.
ctx.stop_node(3)
ctx.stop_node(4)
await ctx.wait_for_nodes_down(nodes=[3, 4], timeout=30)
info = ctx.rpc.server_info(node_id=0)
val_before = info.get("info", {}).get("validated_ledger", {}).get("seq", 0)
log(f"Stalled at validated seq {val_before}")
# Let it sit for a few rounds in degraded state.
await ctx.sleep(6)
# Bring both nodes back.
ctx.start_node(3)
ctx.start_node(4)
log("Restarted n3 and n4, waiting for recovery...")
# Recovery: wait for ANY validated ledger advance on n0.
await ctx.wait_for_ledger_close(node_id=0, timeout=60)
info = ctx.rpc.server_info(node_id=0)
val_after = info.get("info", {}).get("validated_ledger", {}).get("seq", 0)
log(f"Recovered: validated seq {val_before}{val_after}")
if val_after <= val_before:
raise AssertionError(
f"Validated ledger did not advance after recovery "
f"({val_before}{val_after})"
)
log("PASS")

View File

@@ -1,27 +0,0 @@
""":descr: all 5 nodes healthy, every ledger has valid unique quorum-met entropy"""
from __future__ import annotations
from helpers import require_entropy, get_entropy_tx, assert_valid_entropy
async def scenario(ctx, log):
await require_entropy(ctx, log)
# Wait for RNG pipeline to warm up past bootstrap skip.
await ctx.wait_for_ledgers(3, node_id=0, timeout=60)
log("Pipeline warmed up")
start_seq = ctx.validated_ledger_index(0)
await ctx.wait_for_ledgers(10, node_id=0, timeout=120)
end_seq = ctx.validated_ledger_index(0)
log(f"Inspecting ledgers {start_seq + 1}{end_seq}")
digests = set()
for seq in range(start_seq + 1, end_seq + 1):
ce, _ = get_entropy_tx(ctx, seq)
digest, count = assert_valid_entropy(ce, seq, seen_digests=digests)
log(f" Ledger {seq}: EntropyCount={count} Digest={digest[:16]}...")
log(f"Verified {end_seq - start_seq} ledgers: all quorum entropy, all unique")
log("PASS")

View File

@@ -1,71 +0,0 @@
defaults:
network:
node_count: 5
launcher: tmux
slave_delay: 0.2
features:
- ConsensusEntropy
- Export
track_features:
- ConsensusEntropy
- Export
log_levels:
TxQ: info
Protocol: debug
Peer: debug
LedgerConsensus: debug
NetworkOPs: info
env:
XAHAU_RESOURCE_PER_PORT: "1"
XAHAU_RNG_POLL_MS: "333"
tests:
# --- CE + Export (80% quorum, SHAMap convergence) ---
- name: steady_state_export_ce
script: .testnet/scenarios/export/steady_state_export.py
- name: retriable_export_ce
script: .testnet/scenarios/export/retriable_export.py
- name: export_degradation_ce
script: .testnet/scenarios/export/export_degradation.py
network:
node_env:
3:
XAHAUD_NO_EXPORT_SIG: "1"
4:
XAHAUD_NO_EXPORT_SIG: "1"
# CE + Export: 1 node suppressed, 4/5 = 80% quorum, should succeed
- name: export_ce_one_node_down
script: .testnet/scenarios/export/export_unanimity.py
params:
expect_success: true
network:
node_env:
4:
XAHAUD_NO_EXPORT_SIG: "1"
# --- Export only, no CE (100% quorum, unanimity) ---
- name: export_unanimity_all_up
script: .testnet/scenarios/export/export_unanimity.py
params:
expect_success: true
network:
features:
- Export
track_features:
- Export
- name: export_unanimity_node_down
script: .testnet/scenarios/export/export_unanimity.py
params:
expect_success: false
network:
features:
- Export
track_features:
- Export
node_env:
4:
XAHAUD_NO_EXPORT_SIG: "1"

View File

@@ -1,102 +0,0 @@
""":descr: Submit ttEXPORT with 2 nodes suppressing export sigs, verify it
retries via terRETRY_EXPORT until LLS expiry (not enough sigs for quorum).
Nodes 3 and 4 have XAHAUD_NO_EXPORT_SIG=1, so only 3/5 nodes provide
export signatures. With 80% quorum = ceil(5*0.8) = 4 required, the
export cannot reach quorum and should expire via tecEXPORT_EXPIRED.
Flow:
1. Fund alice and bob
2. alice submits ttEXPORT with tight LLS
3. Export retries (only 3/5 sigs available, need 4)
4. Verify export expires with tecEXPORT_EXPIRED
5. Verify subsequent payment still works (sequence not permanently blocked)
"""
from __future__ import annotations
from export_helpers import require_export, assert_shadow_ticket
async def scenario(ctx, log):
await require_export(ctx, log)
# --- Setup ---
await ctx.fund_accounts({"alice": 10000, "bob": 1000})
log("Accounts funded")
alice = ctx.account("alice")
bob = ctx.account("bob")
current_seq = ctx.validated_ledger_index(0)
log(f"Current ledger: {current_seq}")
log("Nodes 3,4 have XAHAUD_NO_EXPORT_SIG=1 (3/5 sigs, need 4)")
# --- Submit ttEXPORT (should retry then expire -- only 3/5 sigs) ---
result = await ctx.submit_and_wait(
{
"TransactionType": "Export",
"LastLedgerSequence": current_seq + 8,
"Fee": "1000000",
"ExportedTxn": {
"TransactionType": "Payment",
"Account": alice.address,
"Destination": bob.address,
"Amount": "1000000",
"Fee": "10",
"Sequence": 0,
"TicketSequence": 1,
"FirstLedgerSequence": current_seq + 1,
"LastLedgerSequence": current_seq + 6,
"Flags": 2147483648,
"SigningPubKey": "",
},
},
alice.wallet,
timeout=60,
)
final_seq = ctx.validated_ledger_index(0)
engine_result = result.get("engine_result", "")
log(f"Export completed at ledger {final_seq}, result: {engine_result}")
# With only 3/5 sigs and 80% quorum (4 required), export MUST fail
if engine_result == "tesSUCCESS":
raise AssertionError(
"Export should NOT have succeeded with only 3/5 sigs "
"(need 4 for 80% quorum) -- check XAHAUD_NO_EXPORT_SIG config"
)
# Should be tecEXPORT_EXPIRED (LLS reached without quorum)
if engine_result != "tecEXPORT_EXPIRED":
log(f"WARNING: expected tecEXPORT_EXPIRED, got {engine_result}")
log(f"Export failed as expected ({engine_result})")
# No shadow ticket should exist (export never reached quorum)
assert_shadow_ticket(ctx, alice.address, log, expect_exists=False)
# --- Verify subsequent payment works regardless ---
log("Submitting payment from alice to bob...")
pay_result = await ctx.submit_and_wait(
{
"TransactionType": "Payment",
"Destination": bob.address,
"Amount": "1000000",
"Fee": "12",
},
alice.wallet,
timeout=30,
)
pay_engine = pay_result.get("engine_result", "")
log(f"Payment result: {pay_engine}")
if pay_engine != "tesSUCCESS":
raise AssertionError(
f"Payment failed after expired export: {pay_engine} "
f"-- sequence may be blocked"
)
log("Payment succeeded -- account not permanently blocked")
log("PASS")

View File

@@ -1,144 +0,0 @@
"""Shared helpers for Export scenario tests."""
from __future__ import annotations
async def require_export(ctx, log):
"""Wait for first ledger and assert Export amendment is enabled."""
await ctx.wait_for_ledger_close(timeout=120)
feature = ctx.feature_check("Export", node_id=0)
if not feature or not feature.get("enabled", False):
raise AssertionError(f"Export not enabled: {feature}")
log("Export amendment enabled")
def find_export_txns(ctx, seq):
"""Find Export transactions in a ledger.
Returns list of Export transaction dicts.
"""
result = ctx.ledger(seq, transactions=True)
if not result:
return []
txns = result.get("ledger", {}).get("transactions", [])
return [tx for tx in txns if tx.get("TransactionType") == "Export"]
def dst_param(address):
"""Encode an address as a HookParameter entry for the DST param."""
from xrpl.core.addresscodec import decode_classic_address
dst_hex = decode_classic_address(address).hex().upper()
return {
"HookParameter": {
"HookParameterName": "445354", # "DST"
"HookParameterValue": dst_hex,
}
}
def assert_hook_accepted(meta, log, *, expected_emits=1):
"""Assert hook executed with ACCEPT and the expected emit count.
Checks sfHookExecutions in transaction metadata.
Returns the hook execution entry for further inspection.
"""
hook_execs = meta.get("HookExecutions", [])
if not hook_execs:
raise AssertionError("No HookExecutions in metadata")
exec_entry = hook_execs[0].get("HookExecution", {})
hook_result = exec_entry.get("HookResult", -1)
emit_count = exec_entry.get("HookEmitCount", -1)
return_code = exec_entry.get("HookReturnCode", "")
log(f" HookResult={hook_result} EmitCount={emit_count} ReturnCode={return_code}")
# HookResult 3 = ExitType::ACCEPT
if hook_result != 3:
raise AssertionError(
f"Hook did not ACCEPT: HookResult={hook_result} "
f"ReturnCode={return_code}"
)
if emit_count != expected_emits:
raise AssertionError(
f"Expected {expected_emits} emits, got {emit_count}"
)
# ReturnCode 0 = success; non-zero = ASSERT line number in hook
if return_code and str(return_code) != "0":
raise AssertionError(
f"Hook returned error code {return_code} "
f"(likely ASSERT failure at that line)"
)
return exec_entry
def assert_export_result(meta, log, *, require_signers=True):
"""Assert ExportResult is present and well-formed in metadata.
Returns the ExportResult dict.
"""
export_result = meta.get("ExportResult", {})
if not export_result:
raise AssertionError("ExportResult not found in metadata")
# Must have LedgerSequence and TransactionHash
if "LedgerSequence" not in export_result:
raise AssertionError("ExportResult missing LedgerSequence")
if "TransactionHash" not in export_result:
raise AssertionError("ExportResult missing TransactionHash")
# Must have the inner ExportedTxn object
inner = export_result.get("ExportedTxn", {})
if not inner:
raise AssertionError("ExportResult missing ExportedTxn (multisigned blob)")
log(f" ExportResult: seq={export_result['LedgerSequence']} "
f"hash={export_result['TransactionHash'][:16]}...")
# Inner tx should have Account, Destination, TransactionType
if "Account" not in inner:
raise AssertionError("ExportedTxn missing Account")
if "TransactionType" not in inner:
raise AssertionError("ExportedTxn missing TransactionType")
# Should have empty SigningPubKey (multisigned)
if inner.get("SigningPubKey", "NOT_EMPTY") != "":
raise AssertionError(
f"ExportedTxn SigningPubKey should be empty, "
f"got '{inner.get('SigningPubKey')}'"
)
if require_signers:
signers = inner.get("Signers", [])
if not signers:
raise AssertionError("ExportedTxn has no Signers (multisig not applied)")
log(f" Signers: {len(signers)} validator(s)")
return export_result
def assert_shadow_ticket(ctx, account_address, log, *, expect_exists=True):
"""Assert shadow ticket exists (or doesn't) for the account."""
obj_result = ctx.rpc.request(
0, "account_objects", {"account": account_address}
)
all_objects = (obj_result or {}).get("account_objects", [])
shadow_tickets = [
obj for obj in all_objects
if obj.get("LedgerEntryType") == "ShadowTicket"
]
log(f" Shadow tickets: {len(shadow_tickets)}")
if expect_exists and not shadow_tickets:
raise AssertionError("Expected shadow ticket but none found")
if not expect_exists and shadow_tickets:
raise AssertionError(
f"Expected no shadow tickets but found {len(shadow_tickets)}"
)
return shadow_tickets

View File

@@ -1,111 +0,0 @@
""":descr: Test Export without CE (unanimity mode). When all 5 nodes are up,
100% quorum is reachable and the export should succeed. When 1 node
suppresses sigs (4/5), unanimity fails and the export should expire.
Parameterized via `expect_success` kwarg from suite.yml.
Flow:
1. Fund alice and bob
2. alice submits ttEXPORT
3. Verify result matches expectation (tesSUCCESS or tecEXPORT_EXPIRED)
4. Verify ExportResult + shadow ticket on success, absence on failure
5. Verify subsequent payment works regardless
"""
from __future__ import annotations
from export_helpers import (
require_export,
assert_export_result,
assert_shadow_ticket,
)
async def scenario(ctx, log, expect_success=True):
await require_export(ctx, log)
# --- Setup ---
await ctx.fund_accounts({"alice": 10000, "bob": 1000})
log("Accounts funded")
alice = ctx.account("alice")
bob = ctx.account("bob")
current_seq = ctx.validated_ledger_index(0)
log(f"Current ledger: {current_seq}")
log(f"Expecting export {'success' if expect_success else 'failure (unanimity)'}")
# --- Submit ttEXPORT ---
result = await ctx.submit_and_wait(
{
"TransactionType": "Export",
"LastLedgerSequence": current_seq + 10,
"Fee": "1000000",
"ExportedTxn": {
"TransactionType": "Payment",
"Account": alice.address,
"Destination": bob.address,
"Amount": "1000000",
"Fee": "10",
"Sequence": 0,
"TicketSequence": 1,
"FirstLedgerSequence": current_seq + 1,
"LastLedgerSequence": current_seq + 8,
"Flags": 2147483648,
"SigningPubKey": "",
},
},
alice.wallet,
timeout=60,
)
final_seq = ctx.validated_ledger_index(0)
engine_result = result.get("engine_result", "")
meta = result.get("meta", {})
log(f"Export at ledger {final_seq}, result: {engine_result}")
if expect_success:
if engine_result != "tesSUCCESS":
raise AssertionError(
f"Expected tesSUCCESS, got {engine_result}"
)
# Assert ExportResult is well-formed with signers
assert_export_result(meta, log, require_signers=True)
# Assert shadow ticket was created
assert_shadow_ticket(ctx, alice.address, log, expect_exists=True)
log("Export succeeded as expected (all validators signed)")
else:
if engine_result == "tesSUCCESS":
raise AssertionError(
"Export should NOT have succeeded with sub-unanimity"
)
log(f"Export failed as expected ({engine_result})")
# No shadow ticket should exist
assert_shadow_ticket(ctx, alice.address, log, expect_exists=False)
# --- Verify subsequent payment works ---
log("Submitting payment from alice to bob...")
pay_result = await ctx.submit_and_wait(
{
"TransactionType": "Payment",
"Destination": bob.address,
"Amount": "1000000",
"Fee": "12",
},
alice.wallet,
timeout=30,
)
pay_engine = pay_result.get("engine_result", "")
log(f"Payment result: {pay_engine}")
if pay_engine != "tesSUCCESS":
raise AssertionError(f"Payment failed: {pay_engine}")
log("Payment succeeded -- account not blocked")
log("PASS")

View File

@@ -1,94 +0,0 @@
""":descr: Submit ttEXPORT directly (no hook), verify it succeeds with
ExportResult in metadata. Then submit a payment from the same account
to verify sequence handling doesn't block subsequent transactions.
Flow:
1. Fund alice and bob
2. alice submits ttEXPORT with inner payment -> tesSUCCESS (provisional)
3. Validators attach sigs via proposals -> quorum -> ExportResult in metadata
4. alice submits a Payment to bob -> should succeed (sequence not blocked)
"""
from __future__ import annotations
from export_helpers import require_export, assert_export_result, assert_shadow_ticket
async def scenario(ctx, log):
await require_export(ctx, log)
# --- Setup ---
await ctx.fund_accounts({"alice": 10000, "bob": 1000})
log("Accounts funded")
alice = ctx.account("alice")
bob = ctx.account("bob")
current_seq = ctx.validated_ledger_index(0)
log(f"Current ledger: {current_seq}")
# --- 1. Submit ttEXPORT ---
result = await ctx.submit_and_wait(
{
"TransactionType": "Export",
"LastLedgerSequence": current_seq + 15,
"Fee": "1000000",
"ExportedTxn": {
"TransactionType": "Payment",
"Account": alice.address,
"Destination": bob.address,
"Amount": "1000000",
"Fee": "10",
"Sequence": 0,
"TicketSequence": 1,
"FirstLedgerSequence": current_seq + 1,
"LastLedgerSequence": current_seq + 10,
"Flags": 2147483648,
"SigningPubKey": "",
},
},
alice.wallet,
timeout=60,
)
export_seq = ctx.validated_ledger_index(0)
engine_result = result.get("engine_result", "")
log(f"Export completed at ledger {export_seq}, result: {engine_result}")
if engine_result != "tesSUCCESS":
raise AssertionError(
f"Expected tesSUCCESS for export, got {engine_result}"
)
# Assert ExportResult is well-formed with signers
meta = result.get("meta", {})
assert_export_result(meta, log, require_signers=True)
# Assert shadow ticket was created
assert_shadow_ticket(ctx, alice.address, log, expect_exists=True)
# --- 2. Submit Payment from same account ---
log("Submitting payment from alice to bob...")
pay_result = await ctx.submit_and_wait(
{
"TransactionType": "Payment",
"Destination": bob.address,
"Amount": "1000000",
"Fee": "12",
},
alice.wallet,
timeout=30,
)
pay_engine = pay_result.get("engine_result", "")
log(f"Payment result: {pay_engine}")
if pay_engine != "tesSUCCESS":
raise AssertionError(f"Payment failed: {pay_engine}")
log(
f"Both transactions succeeded: "
f"Export at ledger {export_seq}, Payment at ledger {ctx.validated_ledger_index(0)}"
)
log("Sequence handling OK - export didn't block subsequent txns")
log("PASS")

View File

@@ -1,211 +0,0 @@
""":descr: install xport hook, trigger export, verify emitted ttEXPORT lifecycle
1. Fund alice (hook holder), bob (trigger), carol (export destination)
2. Install xport hook on alice
3. bob pays alice with DST=carol → hook calls xport() → emits ttEXPORT
4. Emitted ttEXPORT enters open ledger, validators attach sigs via proposals
5. Verify Export transaction appears in a subsequent ledger
"""
from __future__ import annotations
from export_helpers import (
require_export,
find_export_txns,
dst_param,
assert_hook_accepted,
assert_export_result,
assert_shadow_ticket,
)
# C source for the xport hook — verbatim from src/test/app/Export_test_hooks.h
# On Payment to the hook account, exports a 1 XAH payment to the DST param.
XPORT_HOOK_C = r"""
#include <stdint.h>
extern int32_t _g(uint32_t id, uint32_t maxiter);
extern int64_t accept(uint32_t read_ptr, uint32_t read_len, int64_t error_code);
extern int64_t rollback(uint32_t read_ptr, uint32_t read_len, int64_t error_code);
extern int64_t xport(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
extern int64_t xport_reserve(uint32_t count);
extern int64_t hook_account(uint32_t write_ptr, uint32_t write_len);
extern int64_t otxn_param(uint32_t write_ptr, uint32_t write_len, uint32_t name_ptr, uint32_t name_len);
extern int64_t otxn_type(void);
extern int64_t ledger_seq(void);
#define SBUF(x) (uint32_t)(x), sizeof(x)
#define ASSERT(x) if (!(x)) rollback((uint32_t)#x, sizeof(#x), __LINE__)
#define ttPAYMENT 0
#define tfCANONICAL 0x80000000UL
#define amAMOUNT 1
#define amFEE 8
#define atACCOUNT 1
#define atDESTINATION 3
#define ENCODE_TT(buf_out, tt) \
buf_out[0] = 0x12U; buf_out[1] = (tt >> 8) & 0xFFU; buf_out[2] = tt & 0xFFU; buf_out += 3;
#define ENCODE_FLAGS(buf_out, flags) \
buf_out[0] = 0x22U; buf_out[1] = (flags >> 24) & 0xFFU; buf_out[2] = (flags >> 16) & 0xFFU; \
buf_out[3] = (flags >> 8) & 0xFFU; buf_out[4] = flags & 0xFFU; buf_out += 5;
#define ENCODE_SEQUENCE(buf_out, seq) \
buf_out[0] = 0x24U; buf_out[1] = (seq >> 24) & 0xFFU; buf_out[2] = (seq >> 16) & 0xFFU; \
buf_out[3] = (seq >> 8) & 0xFFU; buf_out[4] = seq & 0xFFU; buf_out += 5;
#define ENCODE_FLS(buf_out, fls) \
buf_out[0] = 0x20U; buf_out[1] = 0x1AU; buf_out[2] = (fls >> 24) & 0xFFU; \
buf_out[3] = (fls >> 16) & 0xFFU; buf_out[4] = (fls >> 8) & 0xFFU; \
buf_out[5] = fls & 0xFFU; buf_out += 6;
#define ENCODE_LLS(buf_out, lls) \
buf_out[0] = 0x20U; buf_out[1] = 0x1BU; buf_out[2] = (lls >> 24) & 0xFFU; \
buf_out[3] = (lls >> 16) & 0xFFU; buf_out[4] = (lls >> 8) & 0xFFU; \
buf_out[5] = lls & 0xFFU; buf_out += 6;
#define ENCODE_DROPS(buf_out, drops, amt_type) \
buf_out[0] = 0x60U + amt_type; buf_out[1] = 0x40U + ((drops >> 56) & 0x3FU); \
buf_out[2] = (drops >> 48) & 0xFFU; buf_out[3] = (drops >> 40) & 0xFFU; \
buf_out[4] = (drops >> 32) & 0xFFU; buf_out[5] = (drops >> 24) & 0xFFU; \
buf_out[6] = (drops >> 16) & 0xFFU; buf_out[7] = (drops >> 8) & 0xFFU; \
buf_out[8] = drops & 0xFFU; buf_out += 9;
#define ENCODE_SIGNING_PUBKEY_EMPTY(buf_out) \
buf_out[0] = 0x73U; buf_out[1] = 0x00U; buf_out += 2;
#define ENCODE_ACCOUNT(buf_out, acc, acc_type) \
buf_out[0] = 0x80U + acc_type; buf_out[1] = 0x14U; \
for (int i = 0; i < 20; ++i) buf_out[2+i] = acc[i]; buf_out += 22;
#define PREPARE_PAYMENT_SIMPLE_SIZE 270U
int64_t hook(uint32_t reserved) {
_g(1, 1);
if (otxn_type() != ttPAYMENT)
return accept(0, 0, 0);
ASSERT(xport_reserve(1) == 1);
uint8_t dst[20];
int64_t dst_len = otxn_param(SBUF(dst), "DST", 3);
ASSERT(dst_len == 20);
uint8_t acc[20];
ASSERT(hook_account(SBUF(acc)) == 20);
uint32_t cls = (uint32_t)ledger_seq();
uint8_t tx[PREPARE_PAYMENT_SIMPLE_SIZE];
uint8_t* buf = tx;
ENCODE_TT(buf, ttPAYMENT);
ENCODE_FLAGS(buf, tfCANONICAL);
ENCODE_SEQUENCE(buf, 0);
ENCODE_FLS(buf, cls + 1);
ENCODE_LLS(buf, cls + 5);
// sfTicketSequence = UINT32 field 41 = 0x20 0x29
buf[0] = 0x20U; buf[1] = 0x29U;
buf[2] = 0; buf[3] = 0; buf[4] = 0; buf[5] = 1;
buf += 6;
uint64_t drops = 1000000;
ENCODE_DROPS(buf, drops, amAMOUNT);
ENCODE_DROPS(buf, 10, amFEE);
ENCODE_SIGNING_PUBKEY_EMPTY(buf);
ENCODE_ACCOUNT(buf, acc, atACCOUNT);
ENCODE_ACCOUNT(buf, dst, atDESTINATION);
uint8_t hash[32];
int64_t xport_result = xport(SBUF(hash), (uint32_t)tx, buf - tx);
ASSERT(xport_result == 32);
return accept(0, 0, 0);
}
"""
async def scenario(ctx, log):
# Wait for network to start and amendments to activate
await require_export(ctx, log)
# --- Setup ---
await ctx.fund_accounts({"alice": 10000, "bob": 10000, "carol": 1000})
log("Accounts funded")
alice = ctx.account("alice")
carol = ctx.account("carol")
# Compile and install xport hook on alice
wasm = ctx.compile_hook(XPORT_HOOK_C, label="xport")
await ctx.submit_and_wait(
{
"TransactionType": "SetHook",
"Hooks": [
{
"Hook": {
"CreateCode": wasm.hex().upper(),
"HookOn": "0" * 64,
"HookNamespace": "0" * 64,
"HookApiVersion": 0,
"Flags": 1, # hsfOVERRIDE
}
}
],
"Fee": "100000000",
},
alice.wallet,
)
log(
f"Hook installed on alice ({alice.address[:12]}...) "
f"ledger {ctx.validated_ledger_index(0)}"
)
# --- Trigger ---
# bob pays alice → hook calls xport() → emits ttEXPORT
trigger_result = await ctx.submit_and_wait(
{
"TransactionType": "Payment",
"Destination": alice.address,
"Amount": "100000000",
"Fee": "1000000",
"HookParameters": [dst_param(carol.address)],
},
ctx.account("bob").wallet,
)
trigger_seq = ctx.validated_ledger_index(0)
log(f"Export triggered at ledger {trigger_seq}")
# Assert hook fired with ACCEPT and emitted 1 tx
trigger_meta = trigger_result.get("meta", {})
assert_hook_accepted(trigger_meta, log, expected_emits=1)
# --- Verify: check each ledger close for the Export transaction ---
max_ledgers = 10
for i in range(max_ledgers):
await ctx.wait_for_ledgers(1, node_id=0, timeout=30)
seq = ctx.validated_ledger_index(0)
exports = find_export_txns(ctx, seq)
if exports:
export_tx = exports[0]
meta = export_tx.get("meta", export_tx.get("metaData", {}))
result = meta.get("TransactionResult", "")
log(f"Ledger {seq}: Export txn found, result={result}")
if result != "tesSUCCESS":
raise AssertionError(f"Export did not succeed: {result}")
# Assert ExportResult is well-formed with signers and inner tx
assert_export_result(meta, log, require_signers=True)
# Assert shadow ticket was created
assert_shadow_ticket(ctx, alice.address, log, expect_exists=True)
log("PASS")
return
log(f"Ledger {seq}: no Export txn yet")
raise AssertionError(
f"No Export transaction found after {max_ledgers} ledger closes"
)

View File

@@ -1,60 +0,0 @@
"""Shared helpers for ConsensusEntropy scenario tests."""
from __future__ import annotations
ZERO_DIGEST = "0" * 64
async def require_entropy(ctx, log):
"""Wait for first ledger and assert ConsensusEntropy is enabled."""
await ctx.wait_for_ledger_close(timeout=120)
feature = ctx.feature_check("ConsensusEntropy", node_id=0)
if not feature or not feature.get("enabled", False):
raise AssertionError(f"ConsensusEntropy not enabled: {feature}")
log("ConsensusEntropy enabled")
def get_entropy_tx(ctx, seq):
"""Fetch ledger and return (ce_tx, user_txns) or raise."""
result = ctx.ledger(seq, transactions=True)
if not result:
raise AssertionError(f"Ledger {seq}: fetch failed")
txns = result.get("ledger", {}).get("transactions", [])
ce = [tx for tx in txns if tx.get("TransactionType") == "ConsensusEntropy"]
user = [tx for tx in txns if tx.get("TransactionType") != "ConsensusEntropy"]
if len(ce) != 1:
raise AssertionError(
f"Ledger {seq}: expected 1 ConsensusEntropy txn, got {len(ce)}"
)
return ce[0], user
def entropy_fields(ce_tx):
"""Return (digest, entropy_count, is_zero) from a ConsensusEntropy tx."""
digest = ce_tx.get("Digest", "")
entropy_count = ce_tx.get("EntropyCount", -1)
is_zero = digest == ZERO_DIGEST and entropy_count == 0
return digest, entropy_count, is_zero
def assert_valid_entropy(ce_tx, seq, seen_digests=None):
"""Assert non-zero quorum-met entropy. Optionally check uniqueness."""
digest, entropy_count, is_zero = entropy_fields(ce_tx)
if is_zero or not digest:
raise AssertionError(f"Ledger {seq}: zero/empty Digest")
if entropy_count < 4:
raise AssertionError(
f"Ledger {seq}: EntropyCount={entropy_count} < 4 (sub-quorum)"
)
if seen_digests is not None:
if digest in seen_digests:
raise AssertionError(f"Ledger {seq}: duplicate Digest {digest[:16]}...")
seen_digests.add(digest)
return digest, entropy_count

View File

@@ -1,42 +0,0 @@
defaults:
network:
node_count: 5
launcher: tmux
slave_delay: 0.2
features:
- ConsensusEntropy
track_features:
- ConsensusEntropy
log_levels:
TxQ: info
Protocol: debug
Peer: debug
LedgerConsensus: debug
NetworkOPs: info
env:
XAHAU_RESOURCE_PER_PORT: "1"
XAHAU_RNG_POLL_MS: "333"
tests:
- name: steady_state_entropy
script: .testnet/scenarios/entropy/steady_state_entropy.py
- name: steady_state_entropy_fast_start
script: .testnet/scenarios/entropy/steady_state_entropy.py
network:
env:
XAHAUD_BOOTSTRAP_FAST_START: "1"
- name: entropy_with_transactions
script: .testnet/scenarios/entropy/entropy_with_transactions.py
- name: quorum_recovery_smoke
script: .testnet/scenarios/entropy/quorum_recovery_smoke.py
- name: quorum_degradation_smoke
script: .testnet/scenarios/entropy/quorum_degradation_smoke.py
network:
log_levels:
LedgerConsensus: trace
# Export scenarios: see export-suite.yml

View File

@@ -3,11 +3,11 @@
"C_Cpp.clang_format_path": ".clang-format",
"C_Cpp.clang_format_fallbackStyle": "{ ColumnLimit: 0 }",
"[cpp]":{
"editor.wordBasedSuggestions": "off",
"editor.wordBasedSuggestions": false,
"editor.suggest.insertMode": "replace",
"editor.semanticHighlighting.enabled": true,
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
"editor.formatOnSave": true
"editor.formatOnSave": false
}
}

View File

@@ -1,217 +0,0 @@
# API Changelog
This changelog is intended to list all updates to the [public API methods](https://xrpl.org/public-api-methods.html).
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `rippled` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade.
For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release.
## API Version 2
API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
#### Removed methods
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
- `ledger_header` - Instead, use the `ledger` method.
#### Modifications to JSON transaction element in V2
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
- JSON transaction element is named `tx_json`
- Binary transaction element is named `tx_blob`
- JSON transaction metadata element is named `meta`
- Binary transaction metadata element is named `meta_blob`
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
- `ledger_index` - Ledger index (only set on validated ledgers)
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
This change affects the following methods:
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
#### Modification to `Payment` transaction JSON schema
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
#### Modifications to account_info response
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770)
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
## API Version 1
This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified.
The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
### Inconsistency: server_info - network_id
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
## XRP Ledger server version 2.4.0
As of 2025-01-28, version 2.4.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
### Additions and bugfixes in 2.4.0
- `ledger_entry`: `state` is added an alias for `ripple_state`.
- `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name.
- `validators`: Added new field `validator_list_threshold` in response.
- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate)
- Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field.
## XRP Ledger server version 2.3.0
[Version 2.3.0](https://github.com/XRPLF/rippled/releases/tag/2.3.0) was released on Nov 25, 2024.
### Breaking changes in 2.3.0
- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs).
Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC.
### Additions and bugfixes in 2.3.0
- `book_changes`: Returns a `validated` field in its response, which was missing in prior versions.
## XRP Ledger server version 2.2.0
[Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive):
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 2.0.0
[Version 2.0.0](https://github.com/XRPLF/rippled/releases/tag/2.0.0) was released on Jan 9, 2024. The following additions are non-breaking (because they are purely additive):
- `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries.
- In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking.
- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting).
## XRP Ledger server version 2.2.0
The following is a non-breaking addition to the API.
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
## XRP Ledger server version 1.12.0
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive).
- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427)
- `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port.
- This allows crawlers to build a more detailed topology without needing to port-scan nodes.
- (For peers and other non-admin clients, the info about admin ports is excluded.)
- Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). (https://github.com/XRPLF/rippled/pull/4553)
- Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback` (https://github.com/XRPLF/rippled/pull/4617)
- Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well.
- Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks.
- Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields:
- `Account`: The issuer of the asset being clawed back. Must also be the sender of the transaction.
- `Amount`: The amount being clawed back, with the `Amount.issuer` being the token holder's address.
- Adds [AMM](https://github.com/XRPLF/XRPL-Standards/discussions/78) ([#4294](https://github.com/XRPLF/rippled/pull/4294), [#4626](https://github.com/XRPLF/rippled/pull/4626)) feature:
- Adds `amm_info` API to retrieve AMM information for a given tokens pair.
- Adds `AMMCreate` transaction type to create `AMM` instance.
- Adds `AMMDeposit` transaction type to deposit funds into `AMM` instance.
- Adds `AMMWithdraw` transaction type to withdraw funds from `AMM` instance.
- Adds `AMMVote` transaction type to vote for the trading fee of `AMM` instance.
- Adds `AMMBid` transaction type to bid for the Auction Slot of `AMM` instance.
- Adds `AMMDelete` transaction type to delete `AMM` instance.
- Adds `sfAMMID` to `AccountRoot` to indicate that the account is `AMM`'s account. `AMMID` is used to fetch `ltAMM`.
- Adds `lsfAMMNode` `TrustLine` flag to indicate that one side of the `TrustLine` is `AMM` account.
- Adds `tfLPToken`, `tfSingleAsset`, `tfTwoAsset`, `tfOneAssetLPToken`, `tfLimitLPToken`, `tfTwoAssetIfEmpty`,
`tfWithdrawAll`, `tfOneAssetWithdrawAll` which allow a trader to specify different fields combination
for `AMMDeposit` and `AMMWithdraw` transactions.
- Adds new transaction result codes:
- tecUNFUNDED_AMM: insufficient balance to fund AMM. The account does not have funds for liquidity provision.
- tecAMM_BALANCE: AMM has invalid balance. Calculated balances greater than the current pool balances.
- tecAMM_FAILED: AMM transaction failed. Fails due to a processing failure.
- tecAMM_INVALID_TOKENS: AMM invalid LP tokens. Invalid input values, format, or calculated values.
- tecAMM_EMPTY: AMM is in empty state. Transaction requires AMM in non-empty state (LP tokens > 0).
- tecAMM_NOT_EMPTY: AMM is not in empty state. Transaction requires AMM in empty state (LP tokens == 0).
- tecAMM_ACCOUNT: AMM account. Clawback of AMM account.
- tecINCOMPLETE: Some work was completed, but more submissions required to finish. AMMDelete partially deletes the trustlines.
## XRP Ledger server version 1.11.0
[Version 1.11.0](https://github.com/XRPLF/rippled/releases/tag/1.11.0) was released on Jun 20, 2023.
### Breaking changes in 1.11
- Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. (https://github.com/XRPLF/rippled/pull/4291)
- The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`.
- Removed the acceptance of seeds or public keys in place of account addresses. (https://github.com/XRPLF/rippled/pull/4404)
- This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network).
- For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. (https://github.com/XRPLF/rippled/pull/4398)
- If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406))
- There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence.
- This is potentially a breaking change if clients have logic for determining whether an account can be deleted.
- NetworkID
- For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. (https://github.com/XRPLF/rippled/pull/4370)
- This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher.
- The field must be omitted for Mainnet, so there is no change for Mainnet users.
- There are three new local error codes:
- `telNETWORK_ID_MAKES_TX_NON_CANONICAL`: a `NetworkID` is present but the chain's network ID is less than 1025. Remove the field from the transaction, and try again.
- `telREQUIRES_NETWORK_ID`: a `NetworkID` is required, but is not present. Add the field to the transaction, and try again.
- `telWRONG_NETWORK`: a `NetworkID` is specified, but it is for a different network. Submit the transaction to a different server which is connected to the correct network.
### Additions and bug fixes in 1.11
- Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. (https://github.com/XRPLF/rippled/pull/4447)
- Added an `account_flags` object to the `account_info` method response. (https://github.com/XRPLF/rippled/pull/4459)
- Added `NFTokenPages` to the `account_objects` RPC. (https://github.com/XRPLF/rippled/pull/4352)
- Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. (https://github.com/XRPLF/rippled/pull/4361)
## XRP Ledger server version 1.10.0
[Version 1.10.0](https://github.com/XRPLF/rippled/releases/tag/1.10.0)
was released on Mar 14, 2023.
### Breaking changes in 1.10
- If the `XRPFees` feature is enabled, the `fee_ref` field will be
removed from the [ledger subscription stream](https://xrpl.org/subscribe.html#ledger-stream), because it will no longer
have any meaning.
# Unit tests for API changes
The following information is useful to developers contributing to this project:
The purpose of unit tests is to catch bugs and prevent regressions. In general, it often makes sense to create a test function when there is a breaking change to the API. For APIs that have changed in a new API version, the tests should be modified so that both the prior version and the new version are properly tested.
To take one example: for `account_info` version 1, WebSocket and JSON-RPC behavior should be tested. The latest API version, i.e. API version 2, should be tested over WebSocket, JSON-RPC, and command line.

616
BUILD.md
View File

@@ -1,616 +0,0 @@
| :warning: **WARNING** :warning:
|---|
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
> These instructions also assume a basic familiarity with Conan and CMake.
> If you are unfamiliar with Conan,
> you can read our [crash course](./docs/build/conan.md)
> or the official [Getting Started][3] walkthrough.
## Branches
For a stable release, choose the `master` branch or one of the [tagged
releases](https://github.com/Xahau/xahaud/releases).
```
git checkout master
```
For the latest release candidate, choose the `release` branch.
```
git checkout release
```
For the latest set of untested features, or to contribute, choose the `develop`
branch.
```
git checkout develop
```
## Minimum Requirements
See [System Requirements](https://xrpl.org/system-requirements.html).
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
- [Python 3.7](https://www.python.org/downloads/)
- [Conan 2.x](https://conan.io/downloads)
- [CMake 3.16](https://cmake.org/download/)
`xahaud` is written in the C++20 dialect and includes the `<concepts>` header.
The [minimum compiler versions][2] required are:
| Compiler | Version |
|-------------|---------|
| GCC | 11 |
| Clang | 13 |
| Apple Clang | 13.1.6 |
| MSVC | 19.23 |
### Linux
The Ubuntu operating system has received the highest level of
quality assurance, testing, and support.
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
### Mac
Many xahaud engineers use macOS for development.
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
### Windows
We don't recommend Windows for `xahaud` production at this time. As of
November 2025, Ubuntu has the highest level of quality assurance, testing,
and support.
Windows developers should use Visual Studio 2019. `xahaud` isn't
compatible with [Boost](https://www.boost.org/) 1.78 or 1.79, and Conan
can't build earlier Boost versions.
## Steps
### Set Up Conan
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
These instructions assume a basic familiarity with Conan and CMake.
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
You'll need at least one Conan profile:
```
conan profile detect --force
```
Update the compiler settings:
For Conan 2, you can edit the profile directly at `~/.conan2/profiles/default`,
or use the Conan CLI. Ensure C++20 is set:
```
conan profile show
```
Look for `compiler.cppstd=20` in the output. If it's not set, edit the profile:
```
# Edit ~/.conan2/profiles/default and ensure these settings exist:
[settings]
compiler.cppstd=20
```
Configure Conan (1.x only) to use recipe revisions:
```
conan config set general.revisions_enabled=1
```
**Linux** developers will commonly have a default Conan [profile][] that compiles
with GCC and links with libstdc++.
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
then you will need to choose the `libstdc++11` ABI:
```
# In ~/.conan2/profiles/default, ensure:
[settings]
compiler.libcxx=libstdc++11
```
Ensure inter-operability between `boost::string_view` and `std::string_view` types:
```
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_BEAST_USE_STD_STRING_VIEW"]' default
conan profile update 'env.CXXFLAGS="-DBOOST_BEAST_USE_STD_STRING_VIEW"' default
```
If you have other flags in the `conf.tools.build` or `env.CXXFLAGS` sections, make sure to retain the existing flags and append the new ones. You can check them with:
```
conan profile show default
```
**Windows** developers may need to use the x64 native build tools.
An easy way to do that is to run the shortcut "x64 Native Tools Command
Prompt" for the version of Visual Studio that you have installed.
Windows developers must also build `xahaud` and its dependencies for the x64
architecture.
```
# In ~/.conan2/profiles/default, ensure:
[settings]
arch=x86_64
```
### Multiple compilers
```
# In ~/.conan2/profiles/default, add under [conf] section:
[conf]
tools.build:compiler_executables={"c": "<path>", "cpp": "<path>"}
```
For setting environment variables for dependencies:
```
# In ~/.conan2/profiles/default, add under [buildenv] section:
[buildenv]
CC=<path>
CXX=<path>
```
It should choose the compiler for dependencies as well,
but not all of them have a Conan recipe that respects this setting (yet).
For the rest, you can set these environment variables.
Replace `<path>` with paths to the desired compilers:
- `conan profile update env.CC=<path> default`
- `conan profile update env.CXX=<path> default`
Export our [Conan recipe for Snappy](./external/snappy).
It does not explicitly link the C++ standard library,
which allows you to statically link it with GCC, if you want.
```
conan export external/snappy --version 1.1.10 --user xahaud --channel stable
```
Export our [Conan recipe for RocksDB](./external/rocksdb).
It does not override paths to dependencies when building with Visual Studio.
```
# Conan 1.x
conan export external/rocksdb rocksdb/6.29.5@
# Conan 2.x
conan export --version 6.29.5 external/rocksdb
```
Export our [Conan recipe for SOCI](./external/soci).
It patches their CMake to correctly import its dependencies.
```
conan export external/soci --version 4.0.3 --user xahaud --channel stable
```
6. Export our [Conan recipe for WasmEdge](./external/wasmedge).
```
conan export external/wasmedge --version 0.11.2 --user xahaud --channel stable
```
Export our [Conan recipe for NuDB](./external/nudb).
It fixes some source files to add missing `#include`s.
```
# Conan 1.x
conan export external/nudb nudb/2.0.8@
# Conan 2.x
conan export --version 2.0.8 external/nudb
```
### Build and Test
1. Create a build directory and move into it.
```
mkdir .build
cd .build
```
You can use any directory name. Conan treats your working directory as an
install folder and generates files with implementation details.
You don't need to worry about these files, but make sure to change
your working directory to your build directory before calling Conan.
**Note:** You can specify a directory for the installation files by adding
the `install-folder` or `-if` option to every `conan install` command
in the next step.
2. Use conan to generate CMake files for every configuration you want to build:
```
conan install .. --output-folder . --build missing --settings build_type=Release
conan install .. --output-folder . --build missing --settings build_type=Debug
```
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
you only need to run this command once.
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
run it more than once.
Each of these commands should also have a different `build_type` setting.
A second command with the same `build_type` setting will overwrite the files
generated by the first. You can pass the build type on the command line with
`--settings build_type=$BUILD_TYPE` or in the profile itself,
under the section `[settings]` with the key `build_type`.
If you are using a Microsoft Visual C++ compiler,
then you will need to ensure consistency between the `build_type` setting
and the `compiler.runtime` setting.
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
```
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
```
3. Configure CMake and pass the toolchain file generated by Conan, located at
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
Single-config generators:
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
and make sure it matches the one of the `build_type` settings
you chose in the previous step.
For example, to build Debug, in the next command, replace "Release" with "Debug"
```
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
```
Multi-config generators:
```
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
```
**Note:** You can pass build options for `xahaud` in this step.
4. Build `xahaud`.
For a single-configuration generator, it will build whatever configuration
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
you must pass the option `--config` to select the build configuration.
The output file is currently named 'rippled'.
Single-config generators:
```
cmake --build .
```
Multi-config generators:
```
cmake --build . --config Release
cmake --build . --config Debug
```
5. Test xahaud.
Single-config generators:
```
./rippled --unittest
```
Multi-config generators:
```
./Release/rippled --unittest
./Debug/rippled --unittest
```
The location of `xahaud` in your build directory depends on your CMake
generator. Pass `--help` to see the rest of the command line options.
## Coverage report
The coverage report is intended for developers using compilers GCC
or Clang (including Apple Clang). It is generated by the build target `coverage`,
which is only enabled when the `coverage` option is set, e.g. with
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
Prerequisites for the coverage report:
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
- `gcov` for GCC (installed with the compiler by default) or
- `llvm-cov` for Clang (installed with the compiler by default)
- `Debug` build type
A coverage report is created when the following steps are completed, in order:
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
option mentioned above
2. completed run of unit tests, which populates coverage capture data
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
to assemble both instrumentation data and the coverage capture data into a coverage report
The above steps are automated into a single target `coverage`. The instrumented
`rippled` binary can also be used for regular development or testing work, at
the cost of extra disk space utilization and a small performance hit
(to store coverage capture). In case of a spurious failure of unit tests, it is
possible to re-run the `coverage` target without rebuilding the `rippled` binary
(since it is simply a dependency of the coverage report target). It is also possible
to select only specific tests for the purpose of the coverage report, by setting
the `coverage_test` variable in `cmake`
The default coverage report format is `html-details`, but the user
can override it to any of the formats listed in `cmake/CodeCoverage.cmake`
by setting the `coverage_format` variable in `cmake`. It is also possible
to generate more than one format at a time by setting the `coverage_extra_args`
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
set to the number of available CPU cores. This may cause spurious test
errors on Apple. Developers can override the number of unit test jobs with
the `coverage_test_parallelism` variable in `cmake`.
Example use with some cmake variables set:
```
cd .build
conan install .. --output-folder . --build missing --settings build_type=Debug
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dxrpld=ON -Dtests=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
cmake --build . --target coverage
```
After the `coverage` target is completed, the generated coverage report will be
stored inside the build directory, as either of:
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
## Options
| Option | Default Value | Description |
| --- | ---| ---|
| `assert` | OFF | Enable assertions.
| `coverage` | OFF | Prepare the coverage report. |
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
| `tests` | OFF | Build tests. |
| `unity` | ON | Configure a unity build. |
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
[Unity builds][5] may be faster for the first build
(at the cost of much more memory) since they concatenate sources into fewer
translation units. Non-unity builds may be faster for incremental builds,
and can be helpful for detecting `#include` omissions.
## Troubleshooting
### Conan
If you have trouble building dependencies after changing Conan settings,
try removing the Conan cache.
For Conan 2:
```
rm -rf ~/.conan2/p
```
Or clear the entire Conan 2 cache:
```
conan cache clean "*"
```
### macOS compilation with Apple Clang 17+
If you're on macOS with Apple Clang 17 or newer, you need to add a compiler flag to work around a compilation error in gRPC dependencies.
Edit `~/.conan2/profiles/default` and add under the `[conf]` section:
```
[conf]
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
```
### call to 'async_teardown' is ambiguous
If you are compiling with an early version of Clang 16, then you might hit
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
header][7]. You can workaround it by adding this preprocessor definition:
```
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
```
### recompile with -fPIC
If you get a linker error suggesting that you recompile Boost with
position-independent code, such as:
```
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
requires unsupported dynamic reloc 11; recompile with -fPIC
```
Conan most likely downloaded a bad binary distribution of the dependency.
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
for Linux. The solution is to build the dependency locally by passing
`--build boost` when calling `conan install`.
```
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/dc8aedd23a0f0a773a5fcdcfe1ae3e89c4205978/lib/libboost_container.a(alloc_lib.o): requires unsupported dynamic reloc 11; recompile with -fPIC
```
## Add a Dependency
If you want to experiment with a new package, follow these steps:
1. Search for the package on [Conan Center](https://conan.io/center/).
2. Modify [`conanfile.py`](./conanfile.py):
- Add a version of the package to the `requires` property.
- Change any default options for the package by adding them to the
`default_options` property (with syntax `'$package:$option': $value`).
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
- Add a call to `find_package($package REQUIRED)`.
- Link a library from the package to the target `ripple_libs`
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
4. Start coding! Don't forget to include whatever headers you need from the package.
## A crash course in CMake and Conan
To better understand how to use Conan,
we should first understand _why_ we use Conan,
and to understand that,
we need to understand how we use CMake.
### CMake
Technically, you don't need CMake to build this project.
You could manually compile every translation unit into an object file,
using the right compiler options,
and then manually link all those objects together,
using the right linker options.
However, that is very tedious and error-prone,
which is why we lean on tools like CMake.
We have written CMake configuration files
([`CMakeLists.txt`](./CMakeLists.txt) and friends)
for this project so that CMake can be used to correctly compile and link
all of the translation units in it.
Or rather, CMake will generate files for a separate build system
(e.g. Make, Ninja, Visual Studio, Xcode, etc.)
that compile and link all of the translation units.
Even then, CMake has parameters, some of which are platform-specific.
In CMake's parlance, parameters are specially-named **variables** like
[`CMAKE_BUILD_TYPE`][build_type] or
[`CMAKE_MSVC_RUNTIME_LIBRARY`][runtime].
Parameters include:
- what build system to generate files for
- where to find the compiler and linker
- where to find dependencies, e.g. libraries and headers
- how to link dependencies, e.g. any special compiler or linker flags that
need to be used with them, including preprocessor definitions
- how to compile translation units, e.g. with optimizations, debug symbols,
position-independent code, etc.
- on Windows, which runtime library to link with
For some of these parameters, like the build system and compiler,
CMake goes through a complicated search process to choose default values.
For others, like the dependencies,
_we_ had written in the CMake configuration files of this project
our own complicated process to choose defaults.
For most developers, things "just worked"... until they didn't, and then
you were left trying to debug one of these complicated processes, instead of
choosing and manually passing the parameter values yourself.
You can pass every parameter to CMake on the command line,
but writing out these parameters every time we want to configure CMake is
a pain.
Most humans prefer to put them into a configuration file, once, that
CMake can read every time it is configured.
For CMake, that file is a [toolchain file][toolchain].
### Conan
These next few paragraphs on Conan are going to read much like the ones above
for CMake.
Technically, you don't need Conan to build this project.
You could manually download, configure, build, and install all of the
dependencies yourself, and then pass all of the parameters necessary for
CMake to link to those dependencies.
To guarantee ABI compatibility, you must be sure to use the same set of
compiler and linker options for all dependencies _and_ this project.
However, that is very tedious and error-prone, which is why we lean on tools
like Conan.
We have written a Conan configuration file ([`conanfile.py`](./conanfile.py))
so that Conan can be used to correctly download, configure, build, and install
all of the dependencies for this project,
using a single set of compiler and linker options for all of them.
It generates files that contain almost all of the parameters that CMake
expects.
Those files include:
- A single toolchain file.
- For every dependency, a CMake [package configuration file][pcf],
[package version file][pvf], and for every build type, a package
targets file.
Together, these files implement version checking and define `IMPORTED`
targets for the dependencies.
The toolchain file itself amends the search path
([`CMAKE_PREFIX_PATH`][prefix_path]) so that [`find_package()`][find_package]
will [discover][search] the generated package configuration files.
**Nearly all we must do to properly configure CMake is pass the toolchain
file.**
What CMake parameters are left out?
You'll still need to pick a build system generator,
and if you choose a single-configuration generator,
you'll need to pass the `CMAKE_BUILD_TYPE`,
which should match the `build_type` setting you gave to Conan.
Even then, Conan has parameters, some of which are platform-specific.
In Conan's parlance, parameters are either settings or options.
**Settings** are shared by all packages, e.g. the build type.
**Options** are specific to a given package, e.g. whether to build and link
OpenSSL as a shared library.
For settings, Conan goes through a complicated search process to choose
defaults.
For options, each package recipe defines its own defaults.
You can pass every parameter to Conan on the command line,
but it is more convenient to put them in a [profile][profile].
**All we must do to properly configure Conan is edit and pass the profile.**
[1]: https://github.com/conan-io/conan-center-index/issues/13168
[5]: https://en.wikipedia.org/wiki/Unity_build
[6]: https://github.com/boostorg/beast/issues/2648
[7]: https://github.com/boostorg/beast/issues/2661
[gcovr]: https://gcovr.com/en/stable/getting-started.html
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html
[pcf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-configuration-file
[pvf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-version-file
[find_package]: https://cmake.org/cmake/help/latest/command/find_package.html
[search]: https://cmake.org/cmake/help/latest/command/find_package.html#search-procedure
[prefix_path]: https://cmake.org/cmake/help/latest/variable/CMAKE_PREFIX_PATH.html
[profile]: https://docs.conan.io/en/latest/reference/profiles.html

View File

@@ -0,0 +1,207 @@
macro(group_sources_in source_dir curdir)
file(GLOB children RELATIVE ${source_dir}/${curdir}
${source_dir}/${curdir}/*)
foreach (child ${children})
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
group_sources_in(${source_dir} ${curdir}/${child})
else()
string(REPLACE "/" "\\" groupname ${curdir})
source_group(${groupname} FILES
${source_dir}/${curdir}/${child})
endif()
endforeach()
endmacro()
macro(group_sources curdir)
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
endmacro()
macro (exclude_from_default target_)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
endmacro ()
macro (exclude_if_included target_)
get_directory_property(has_parent PARENT_DIRECTORY)
if (has_parent)
exclude_from_default (${target_})
endif ()
endmacro ()
function (print_ep_logs _target)
ExternalProject_Get_Property (${_target} STAMP_DIR)
add_custom_command(TARGET ${_target} POST_BUILD
COMMENT "${_target} BUILD OUTPUT"
COMMAND ${CMAKE_COMMAND}
-DIN_FILE=${STAMP_DIR}/${_target}-build-out.log
-P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/echo_file.cmake
COMMAND ${CMAKE_COMMAND}
-DIN_FILE=${STAMP_DIR}/${_target}-build-err.log
-P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/echo_file.cmake)
endfunction ()
#[=========================================================[
This is a function override for one function in the
standard ExternalProject module. We want to change
the generated build script slightly to include printing
the build logs in the case of failure. Those modifications
have been made here. This function override could break
in the future if the ExternalProject module changes internal
function names or changes the way it generates the build
scripts.
See:
https://gitlab.kitware.com/cmake/cmake/blob/df1ddeec128d68cc636f2dde6c2acd87af5658b6/Modules/ExternalProject.cmake#L1855-1952
#]=========================================================]
function(_ep_write_log_script name step cmd_var)
ExternalProject_Get_Property(${name} stamp_dir)
set(command "${${cmd_var}}")
set(make "")
set(code_cygpath_make "")
if(command MATCHES "^\\$\\(MAKE\\)")
# GNU make recognizes the string "$(MAKE)" as recursive make, so
# ensure that it appears directly in the makefile.
string(REGEX REPLACE "^\\$\\(MAKE\\)" "\${make}" command "${command}")
set(make "-Dmake=$(MAKE)")
if(WIN32 AND NOT CYGWIN)
set(code_cygpath_make "
if(\${make} MATCHES \"^/\")
execute_process(
COMMAND cygpath -w \${make}
OUTPUT_VARIABLE cygpath_make
ERROR_VARIABLE cygpath_make
RESULT_VARIABLE cygpath_error
OUTPUT_STRIP_TRAILING_WHITESPACE
)
if(NOT cygpath_error)
set(make \${cygpath_make})
endif()
endif()
")
endif()
endif()
set(config "")
if("${CMAKE_CFG_INTDIR}" MATCHES "^\\$")
string(REPLACE "${CMAKE_CFG_INTDIR}" "\${config}" command "${command}")
set(config "-Dconfig=${CMAKE_CFG_INTDIR}")
endif()
# Wrap multiple 'COMMAND' lines up into a second-level wrapper
# script so all output can be sent to one log file.
if(command MATCHES "(^|;)COMMAND;")
set(code_execute_process "
${code_cygpath_make}
execute_process(COMMAND \${command} RESULT_VARIABLE result)
if(result)
set(msg \"Command failed (\${result}):\\n\")
foreach(arg IN LISTS command)
set(msg \"\${msg} '\${arg}'\")
endforeach()
message(FATAL_ERROR \"\${msg}\")
endif()
")
set(code "")
set(cmd "")
set(sep "")
foreach(arg IN LISTS command)
if("x${arg}" STREQUAL "xCOMMAND")
if(NOT "x${cmd}" STREQUAL "x")
string(APPEND code "set(command \"${cmd}\")${code_execute_process}")
endif()
set(cmd "")
set(sep "")
else()
string(APPEND cmd "${sep}${arg}")
set(sep ";")
endif()
endforeach()
string(APPEND code "set(command \"${cmd}\")${code_execute_process}")
file(GENERATE OUTPUT "${stamp_dir}/${name}-${step}-$<CONFIG>-impl.cmake" CONTENT "${code}")
set(command ${CMAKE_COMMAND} "-Dmake=\${make}" "-Dconfig=\${config}" -P ${stamp_dir}/${name}-${step}-$<CONFIG>-impl.cmake)
endif()
# Wrap the command in a script to log output to files.
set(script ${stamp_dir}/${name}-${step}-$<CONFIG>.cmake)
set(logbase ${stamp_dir}/${name}-${step})
set(code "
${code_cygpath_make}
function (_echo_file _fil)
file (READ \${_fil} _cont)
execute_process (COMMAND \${CMAKE_COMMAND} -E echo \"\${_cont}\")
endfunction ()
set(command \"${command}\")
execute_process(
COMMAND \${command}
RESULT_VARIABLE result
OUTPUT_FILE \"${logbase}-out.log\"
ERROR_FILE \"${logbase}-err.log\"
)
if(result)
set(msg \"Command failed: \${result}\\n\")
foreach(arg IN LISTS command)
set(msg \"\${msg} '\${arg}'\")
endforeach()
execute_process (COMMAND \${CMAKE_COMMAND} -E echo \"Build output for ${logbase} : \")
_echo_file (\"${logbase}-out.log\")
_echo_file (\"${logbase}-err.log\")
set(msg \"\${msg}\\nSee above\\n\")
message(FATAL_ERROR \"\${msg}\")
else()
set(msg \"${name} ${step} command succeeded. See also ${logbase}-*.log\")
message(STATUS \"\${msg}\")
endif()
")
file(GENERATE OUTPUT "${script}" CONTENT "${code}")
set(command ${CMAKE_COMMAND} ${make} ${config} -P ${script})
set(${cmd_var} "${command}" PARENT_SCOPE)
endfunction()
find_package(Git)
# function that calls git log to get current hash
function (git_hash hash_val)
# note: optional second extra string argument not in signature
if (NOT GIT_FOUND)
return ()
endif ()
set (_hash "")
set (_format "%H")
if (ARGC GREATER_EQUAL 2)
string (TOLOWER ${ARGV1} _short)
if (_short STREQUAL "short")
set (_format "%h")
endif ()
endif ()
execute_process (COMMAND ${GIT_EXECUTABLE} "log" "--pretty=${_format}" "-n1"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE _git_exit_code
OUTPUT_VARIABLE _temp_hash
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET)
if (_git_exit_code EQUAL 0)
set (_hash ${_temp_hash})
endif ()
set (${hash_val} "${_hash}" PARENT_SCOPE)
endfunction ()
function (git_branch branch_val)
if (NOT GIT_FOUND)
return ()
endif ()
set (_branch "")
execute_process (COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE _git_exit_code
OUTPUT_VARIABLE _temp_branch
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET)
if (_git_exit_code EQUAL 0)
set (_branch ${_temp_branch})
endif ()
set (${branch_val} "${_branch}" PARENT_SCOPE)
endfunction ()

View File

@@ -0,0 +1,60 @@
#[=========================================================[
SQLITE doesn't provide build files in the
standard source-only distribution. So we wrote
a simple cmake file and we copy it to the
external project folder so that we can use
this file to build the lib with ExternalProject
#]=========================================================]
add_library (sqlite3 STATIC sqlite3.c)
#[=========================================================[
When compiled with SQLITE_THREADSAFE=1, SQLite operates
in serialized mode. In this mode, SQLite can be safely
used by multiple threads with no restriction.
NOTE: This implies a global mutex!
When compiled with SQLITE_THREADSAFE=2, SQLite can be
used in a multithreaded program so long as no two
threads attempt to use the same database connection at
the same time.
NOTE: This is the preferred threading model, but not
currently enabled because we need to investigate our
use-model and concurrency requirements.
TODO: consider whether any other options should be
used: https://www.sqlite.org/compile.html
#]=========================================================]
target_compile_definitions (sqlite3
PRIVATE
SQLITE_THREADSAFE=1
HAVE_USLEEP=1)
target_compile_options (sqlite3
PRIVATE
$<$<BOOL:${MSVC}>:
-wd4100
-wd4127
-wd4232
-wd4244
-wd4701
-wd4706
-wd4996
>
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-array-bounds>)
install (
TARGETS
sqlite3
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES DESTINATION include)
install (
FILES
sqlite3.h
sqlite3ext.h
DESTINATION include)

View File

@@ -17,9 +17,7 @@ target_compile_features (common INTERFACE cxx_std_20)
target_compile_definitions (common
INTERFACE
$<$<CONFIG:Debug>:DEBUG _DEBUG>
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
OPENSSL_SUPPRESS_DEPRECATED)
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
# defined, so no need to add it explicitly except for
# this special case of (profile ON) and (assert OFF)
@@ -122,7 +120,6 @@ else ()
target_link_libraries (common
INTERFACE
-rdynamic
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
# link to static libc/c++ iff:
# * static option set and
# * NOT APPLE (AppleClang does not support static libc/c++) and
@@ -133,37 +130,7 @@ else ()
>)
endif ()
# Antithesis instrumentation will only be built and deployed using machines running Linux.
if (voidstar)
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...")
elseif (NOT is_linux)
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
endif ()
endif ()
if (use_mold)
# use mold linker if available
execute_process (
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
if ("${LD_VERSION}" MATCHES "mold")
target_link_libraries (common INTERFACE -fuse-ld=mold)
else ()
# Checking for mold linker (< GCC 12.1.0)
execute_process (
COMMAND ${CMAKE_CXX_COMPILER} -B/usr/libexec/mold -Wl,--version
OUTPUT_VARIABLE LD_VERSION_OUT
ERROR_VARIABLE LD_VERSION_ERR)
set(LD_VERSION "${LD_VERSION_OUT}${LD_VERSION_ERR}")
if ("${LD_VERSION}" MATCHES "mold")
target_link_libraries (common INTERFACE -B/usr/libexec/mold)
endif ()
endif ()
unset (LD_VERSION)
elseif (use_gold AND is_gcc)
if (use_gold AND is_gcc)
# use gold linker if available
execute_process (
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
@@ -195,7 +162,9 @@ elseif (use_gold AND is_gcc)
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
endif ()
unset (LD_VERSION)
elseif (use_lld)
endif ()
if (use_lld)
# use lld linker if available
execute_process (
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
@@ -206,7 +175,6 @@ elseif (use_lld)
unset (LD_VERSION)
endif()
if (assert)
foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,98 @@
#[===================================================================[
coverage report target
#]===================================================================]
if (coverage)
if (is_clang)
if (APPLE)
execute_process (COMMAND xcrun -f llvm-profdata
OUTPUT_VARIABLE LLVM_PROFDATA
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program (LLVM_PROFDATA llvm-profdata)
endif ()
if (NOT LLVM_PROFDATA)
message (WARNING "unable to find llvm-profdata - skipping coverage_report target")
endif ()
if (APPLE)
execute_process (COMMAND xcrun -f llvm-cov
OUTPUT_VARIABLE LLVM_COV
OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program (LLVM_COV llvm-cov)
endif ()
if (NOT LLVM_COV)
message (WARNING "unable to find llvm-cov - skipping coverage_report target")
endif ()
set (extract_pattern "")
if (coverage_core_only)
set (extract_pattern "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/")
endif ()
if (LLVM_COV AND LLVM_PROFDATA)
add_custom_target (coverage_report
USES_TERMINAL
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage - results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
COMMAND ${LLVM_PROFDATA}
merge -sparse default.profraw -o rip.profdata
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LLVM_COV}
report -instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
# generate html report
COMMAND ${LLVM_COV}
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
-instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
BYPRODUCTS coverage/index.html)
endif ()
elseif (is_gcc)
find_program (LCOV lcov)
if (NOT LCOV)
message (WARNING "unable to find lcov - skipping coverage_report target")
endif ()
find_program (GENHTML genhtml)
if (NOT GENHTML)
message (WARNING "unable to find genhtml - skipping coverage_report target")
endif ()
set (extract_pattern "*")
if (coverage_core_only)
set (extract_pattern "*/src/ripple/*")
endif ()
if (LCOV AND GENHTML)
add_custom_target (coverage_report
USES_TERMINAL
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage- results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
# create baseline info file
COMMAND ${LCOV}
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -i -o baseline.info
| grep -v "ignoring data for external file"
# run tests
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests for coverage report."
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
# Create test coverage data file
COMMAND ${LCOV}
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -o tests.info
| grep -v "ignoring data for external file"
# Combine baseline and test coverage data
COMMAND ${LCOV}
-a baseline.info -a tests.info -o lcov-all.info
# extract our files
COMMAND ${LCOV}
-e lcov-all.info "${extract_pattern}" -o lcov.info
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LCOV} --summary lcov.info
# generate HTML report
COMMAND ${GENHTML}
-o ${CMAKE_BINARY_DIR}/coverage lcov.info
BYPRODUCTS coverage/index.html)
endif ()
endif ()
endif ()

View File

@@ -1,13 +1,6 @@
#[===================================================================[
docs target (optional)
#]===================================================================]
# Early return if the `docs` directory is missing,
# e.g. when we are building a Conan package.
if(NOT EXISTS docs)
return()
endif()
if (tests)
find_package (Doxygen)
if (NOT TARGET Doxygen::doxygen)
@@ -26,7 +19,8 @@ if (tests)
src/ripple/*.cpp
src/ripple/*.md
src/test/*.h
src/test/*.md)
src/test/*.md
Builds/*/README.md)
list (APPEND doxygen_input
README.md
RELEASENOTES.md

View File

@@ -2,43 +2,30 @@
install stuff
#]===================================================================]
include(create_symbolic_link)
install (
TARGETS
ed25519-donna
common
opts
ripple_syslibs
ripple_boost
xrpl.imports.main
xrpl.libpb
xrpl.libxrpl.basics
xrpl.libxrpl.beast
xrpl.libxrpl.hook
xrpl.libxrpl.crypto
xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl
antithesis-sdk-cpp
xrpl_core
EXPORT RippleExports
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES DESTINATION include)
install(
DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
)
install(CODE "
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(xrpl \
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
")
if(${INSTALL_SECP256K1})
install (
TARGETS
secp256k1
EXPORT RippleExports
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES DESTINATION include)
endif()
install (EXPORT RippleExports
FILE RippleTargets.cmake
@@ -50,9 +37,14 @@ write_basic_package_version_file (
VERSION ${rippled_version}
COMPATIBILITY SameMajorVersion)
if (is_root_project AND TARGET rippled)
if (is_root_project)
install (TARGETS rippled RUNTIME DESTINATION bin)
set_target_properties(rippled PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
install (
FILES
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/RippleConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
DESTINATION lib/cmake/ripple)
# sample configs should not overwrite existing files
# install if-not-exists workaround as suggested by
# https://cmake.org/Bug/view.php?id=12646
@@ -67,16 +59,4 @@ if (is_root_project AND TARGET rippled)
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/rippled-example.cfg\" etc rippled.cfg)
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
")
install(CODE "
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(rippled${suffix} \
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
")
endif ()
install (
FILES
${CMAKE_CURRENT_SOURCE_DIR}/cmake/RippleConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
DESTINATION lib/cmake/ripple)

View File

@@ -7,9 +7,6 @@ add_library (Ripple::opts ALIAS opts)
target_compile_definitions (opts
INTERFACE
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
BOOST_CONTAINER_FWD_BAD_DEQUE
HAS_UNCAUGHT_EXCEPTIONS=1
$<$<BOOL:${boost_show_deprecated}>:
BOOST_ASIO_NO_DEPRECATED
BOOST_FILESYSTEM_NO_DEPRECATED
@@ -21,25 +18,20 @@ target_compile_definitions (opts
>
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>
# Enhanced logging is enabled for Debug builds, or explicitly via
# -DBEAST_ENHANCED_LOGGING=ON for other build types.
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>)
target_compile_options (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
target_link_libraries (opts
INTERFACE
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
$<$<BOOL:${profile}>:-pg>
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)

View File

@@ -0,0 +1,39 @@
#[===================================================================[
multiconfig misc
#]===================================================================]
if (is_multiconfig)
# This code finds all source files in the src subdirectory for inclusion
# in the IDE file tree as non-compiled sources. Since this file list will
# have some overlap with files we have already added to our targets to
# be compiled, we explicitly remove any of these target source files from
# this list.
file (GLOB_RECURSE all_sources RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
CONFIGURE_DEPENDS
src/*.* Builds/*.md docs/*.md src/*.md Builds/*.cmake)
file(GLOB md_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} CONFIGURE_DEPENDS
*.md)
LIST(APPEND all_sources ${md_files})
foreach (_target secp256k1 ed25519-donna pbufs xrpl_core rippled)
get_target_property (_type ${_target} TYPE)
if(_type STREQUAL "INTERFACE_LIBRARY")
continue()
endif()
get_target_property (_src ${_target} SOURCES)
list (REMOVE_ITEM all_sources ${_src})
endforeach ()
target_sources (rippled PRIVATE ${all_sources})
set_property (
SOURCE ${all_sources}
APPEND
PROPERTY HEADER_FILE_ONLY true)
if (MSVC)
set_property(
DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
PROPERTY VS_STARTUP_PROJECT rippled)
endif ()
group_sources(src)
group_sources(docs)
group_sources(Builds)
endif ()

View File

@@ -0,0 +1,33 @@
#[===================================================================[
NIH prefix path..this is where we will download
and build any ExternalProjects, and they will hopefully
survive across build directory deletion (manual cleans)
#]===================================================================]
string (REGEX REPLACE "[ \\/%]+" "_" gen_for_path ${CMAKE_GENERATOR})
string (TOLOWER ${gen_for_path} gen_for_path)
# HACK: trying to shorten paths for windows CI (which hits 260 MAXPATH easily)
# @see: https://issues.jenkins-ci.org/browse/JENKINS-38706?focusedCommentId=339847
string (REPLACE "visual_studio" "vs" gen_for_path ${gen_for_path})
if (NOT DEFINED NIH_CACHE_ROOT)
if (DEFINED ENV{NIH_CACHE_ROOT})
set (NIH_CACHE_ROOT $ENV{NIH_CACHE_ROOT})
else ()
set (NIH_CACHE_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/.nih_c")
endif ()
endif ()
set (nih_cache_path
"${NIH_CACHE_ROOT}/${gen_for_path}/${CMAKE_CXX_COMPILER_ID}_${CMAKE_CXX_COMPILER_VERSION}")
if (NOT is_multiconfig)
set (nih_cache_path "${nih_cache_path}/${CMAKE_BUILD_TYPE}")
endif ()
file(TO_CMAKE_PATH "${nih_cache_path}" nih_cache_path)
message (STATUS "NIH-EP cache path: ${nih_cache_path}")
## two convenience variables:
set (ep_lib_prefix ${CMAKE_STATIC_LIBRARY_PREFIX})
set (ep_lib_suffix ${CMAKE_STATIC_LIBRARY_SUFFIX})
# this is a setting for FetchContent and needs to be
# a cache variable
# https://cmake.org/cmake/help/latest/module/FetchContent.html#populating-the-content
set (FETCHCONTENT_BASE_DIR ${nih_cache_path} CACHE STRING "" FORCE)

View File

@@ -2,8 +2,6 @@
convenience variables and sanity checks
#]===================================================================]
include(ProcessorCount)
if (NOT ep_procs)
ProcessorCount(ep_procs)
if (ep_procs GREATER 1)

View File

@@ -0,0 +1,122 @@
#[===================================================================[
declare user options/settings
#]===================================================================]
option (assert "Enables asserts, even in release builds" OFF)
option (reporting "Build rippled with reporting mode enabled" OFF)
option (tests "Build tests" ON)
option (unity "Creates a build using UNITY support in cmake. This is the default" ON)
if (unity)
if (NOT is_ci)
set (CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
endif ()
endif ()
if (is_gcc OR is_clang)
option (coverage "Generates coverage info." OFF)
option (profile "Add profiling flags" OFF)
set (coverage_test "" CACHE STRING
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
if (coverage_test AND NOT coverage)
set (coverage ON CACHE BOOL "gcc/clang only" FORCE)
endif ()
option (coverage_core_only
"Include only src/ripple files when generating coverage report. \
Set to OFF to include all sources in coverage report."
ON)
option (wextra "compile with extra gcc/clang warnings enabled" ON)
else ()
set (profile OFF CACHE BOOL "gcc/clang only" FORCE)
set (coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set (wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif ()
if (is_linux)
option (BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
option (static "link protobuf, openssl, libc++, and boost statically" ON)
option (perf "Enables flags that assist with perf recording" OFF)
option (use_gold "enables detection of gold (binutils) linker" ON)
else ()
# we are not ready to allow shared-libs on windows because it would require
# export declarations. On macos it's more feasible, but static openssl
# produces odd linker errors, thus we disable shared lib builds for now.
set (BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
set (static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
endif ()
if (is_clang)
option (use_lld "enables detection of lld linker" ON)
else ()
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif ()
option (jemalloc "Enables jemalloc for heap profiling" OFF)
option (werr "treat warnings as errors" OFF)
option (local_protobuf
"Force a local build of protobuf instead of looking for an installed version." OFF)
option (local_grpc
"Force a local build of gRPC instead of looking for an installed version." OFF)
# this one is a string and therefore can't be an option
set (san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
if (san)
string (TOLOWER ${san} san)
set (SAN_FLAG "-fsanitize=${san}")
set (SAN_LIB "")
if (is_gcc)
if (san STREQUAL "address")
set (SAN_LIB "asan")
elseif (san STREQUAL "thread")
set (SAN_LIB "tsan")
elseif (san STREQUAL "memory")
set (SAN_LIB "msan")
elseif (san STREQUAL "undefined")
set (SAN_LIB "ubsan")
endif ()
endif ()
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
check_cxx_compiler_flag (${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
if (NOT COMPILER_SUPPORTS_SAN)
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif ()
endif ()
set (container_label "" CACHE STRING "tag to use for package building containers")
option (packages_only
"ONLY generate package building targets. This is special use-case and almost \
certainly not what you want. Use with caution as you won't be able to build \
any compiled targets locally." OFF)
option (have_package_container
"Sometimes you already have the tagged container you want to use for package \
building and you don't want docker to rebuild it. This flag will detach the \
dependency of the package build from the container build. It's an advanced \
use case and most likely you should not be touching this flag." OFF)
# the remaining options are obscure and rarely used
option (beast_no_unit_test_inline
"Prevents unit test definitions from being inserted into global table"
OFF)
option (single_io_service_thread
"Restricts the number of threads calling io_service::run to one. \
This can be useful when debugging."
OFF)
option (boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls."
OFF)
option (beast_hashers
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
OFF)
if (WIN32)
option (beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else ()
set (beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif ()
if (coverage)
message (STATUS "coverage build requested - forcing Debug build")
set (CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
endif ()

View File

@@ -2,9 +2,9 @@ option (validator_keys "Enables building of validator-keys-tool as a separate ta
if (validator_keys)
git_branch (current_branch)
# default to tracking VK master branch unless we are on release
if (NOT (current_branch STREQUAL "release"))
set (current_branch "master")
# default to tracking VK develop branch unless we are on master/release
if (NOT (current_branch STREQUAL "master" OR current_branch STREQUAL "release"))
set (current_branch "develop")
endif ()
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")

View File

@@ -0,0 +1,15 @@
#[===================================================================[
read version from source
#]===================================================================]
file (STRINGS src/ripple/protocol/impl/BuildInfo.cpp BUILD_INFO)
foreach (line_ ${BUILD_INFO})
if (line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
set (rippled_version ${CMAKE_MATCH_1})
endif ()
endforeach ()
if (rippled_version)
message (STATUS "rippled version: ${rippled_version}")
else ()
message (FATAL_ERROR "unable to determine rippled version")
endif ()

View File

@@ -0,0 +1,106 @@
################################################################################
# SociConfig.cmake - CMake build configuration of SOCI library
################################################################################
# Copyright (C) 2010 Mateusz Loskot <mateusz@loskot.net>
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
################################################################################
include(CheckCXXSymbolExists)
if(WIN32)
check_cxx_symbol_exists("_M_AMD64" "" SOCI_TARGET_ARCH_X64)
if(NOT RTC_ARCH_X64)
check_cxx_symbol_exists("_M_IX86" "" SOCI_TARGET_ARCH_X86)
endif(NOT RTC_ARCH_X64)
# add check for arm here
# see http://msdn.microsoft.com/en-us/library/b0084kay.aspx
else(WIN32)
check_cxx_symbol_exists("__i386__" "" SOCI_TARGET_ARCH_X86)
check_cxx_symbol_exists("__x86_64__" "" SOCI_TARGET_ARCH_X64)
check_cxx_symbol_exists("__arm__" "" SOCI_TARGET_ARCH_ARM)
endif(WIN32)
if(NOT DEFINED LIB_SUFFIX)
if(SOCI_TARGET_ARCH_X64)
set(_lib_suffix "64")
else()
set(_lib_suffix "")
endif()
set(LIB_SUFFIX ${_lib_suffix} CACHE STRING "Specifies suffix for the lib directory")
endif()
#
# C++11 Option
#
if(NOT SOCI_CXX_C11)
set (SOCI_CXX_C11 OFF CACHE BOOL "Build to the C++11 standard")
endif()
#
# Force compilation flags and set desired warnings level
#
if (MSVC)
add_definitions(-D_CRT_SECURE_NO_DEPRECATE)
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
add_definitions(-D_CRT_NONSTDC_NO_WARNING)
add_definitions(-D_SCL_SECURE_NO_WARNINGS)
if(CMAKE_CXX_FLAGS MATCHES "/W[0-4]")
string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /we4266")
endif()
else()
set(SOCI_GCC_CLANG_COMMON_FLAGS "")
# "-pedantic -Werror -Wno-error=parentheses -Wall -Wextra -Wpointer-arith -Wcast-align -Wcast-qual -Wfloat-equal -Woverloaded-virtual -Wredundant-decls -Wno-long-long")
if (SOCI_CXX_C11)
set(SOCI_CXX_VERSION_FLAGS "-std=c++11")
else()
set(SOCI_CXX_VERSION_FLAGS "-std=gnu++98")
endif()
if("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang" OR "${CMAKE_CXX_COMPILER}" MATCHES "clang")
if(NOT CMAKE_CXX_COMPILER_VERSION LESS 3.1 AND SOCI_ASAN)
set(SOCI_GCC_CLANG_COMMON_FLAGS "${SOCI_GCC_CLANG_COMMON_FLAGS} -fsanitize=address")
endif()
# enforce C++11 for Clang
set(SOCI_CXX_C11 ON)
set(SOCI_CXX_VERSION_FLAGS "-std=c++11")
add_definitions(-DCATCH_CONFIG_CPP11_NO_IS_ENUM)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SOCI_GCC_CLANG_COMMON_FLAGS} ${SOCI_CXX_VERSION_FLAGS}")
elseif(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
if(NOT CMAKE_CXX_COMPILER_VERSION LESS 4.8 AND SOCI_ASAN)
set(SOCI_GCC_CLANG_COMMON_FLAGS "${SOCI_GCC_CLANG_COMMON_FLAGS} -fsanitize=address")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SOCI_GCC_CLANG_COMMON_FLAGS} ${SOCI_CXX_VERSION_FLAGS} ")
if (CMAKE_COMPILER_IS_GNUCXX)
if (CMAKE_SYSTEM_NAME MATCHES "FreeBSD")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-variadic-macros")
endif()
endif()
else()
message(WARNING "Unknown toolset - using default flags to build SOCI")
endif()
endif()
# Set SOCI_HAVE_* variables for soci-config.h generator
set(SOCI_HAVE_CXX_C11 ${SOCI_CXX_C11} CACHE INTERNAL "Enables C++11 support")

View File

@@ -0,0 +1,96 @@
#[===================================================================[
NIH dep: boost
#]===================================================================]
if((NOT DEFINED BOOST_ROOT) AND(DEFINED ENV{BOOST_ROOT}))
set(BOOST_ROOT $ENV{BOOST_ROOT})
endif()
file(TO_CMAKE_PATH "${BOOST_ROOT}" BOOST_ROOT)
if(WIN32 OR CYGWIN)
# Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders
if(DEFINED BOOST_ROOT)
if(IS_DIRECTORY ${BOOST_ROOT}/stage64/lib)
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage64/lib)
elseif(IS_DIRECTORY ${BOOST_ROOT}/stage/lib)
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage/lib)
elseif(IS_DIRECTORY ${BOOST_ROOT}/lib)
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/lib)
else()
message(WARNING "Did not find expected boost library dir. "
"Defaulting to ${BOOST_ROOT}")
set(BOOST_LIBRARYDIR ${BOOST_ROOT})
endif()
endif()
endif()
message(STATUS "BOOST_ROOT: ${BOOST_ROOT}")
message(STATUS "BOOST_LIBRARYDIR: ${BOOST_LIBRARYDIR}")
# uncomment the following as needed to debug FindBoost issues:
#set(Boost_DEBUG ON)
#[=========================================================[
boost dynamic libraries don't trivially support @rpath
linking right now (cmake's default), so just force
static linking for macos, or if requested on linux by flag
#]=========================================================]
if(static)
set(Boost_USE_STATIC_LIBS ON)
endif()
set(Boost_USE_MULTITHREADED ON)
if(static AND NOT APPLE)
set(Boost_USE_STATIC_RUNTIME ON)
else()
set(Boost_USE_STATIC_RUNTIME OFF)
endif()
# TBD:
# Boost_USE_DEBUG_RUNTIME: When ON, uses Boost libraries linked against the
find_package(Boost 1.70 REQUIRED
COMPONENTS
chrono
container
context
coroutine
date_time
filesystem
program_options
regex
system
thread)
add_library(ripple_boost INTERFACE)
add_library(Ripple::boost ALIAS ripple_boost)
if(is_xcode)
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
else()
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
endif()
target_link_libraries(ripple_boost
INTERFACE
Boost::boost
Boost::chrono
Boost::container
Boost::coroutine
Boost::date_time
Boost::filesystem
Boost::program_options
Boost::regex
Boost::system
Boost::thread)
if(Boost_COMPILER)
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
endif()
if(san AND is_clang)
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
# for gcc ?
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
endif()
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
target_compile_options(opts
INTERFACE
# ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
endif()

View File

@@ -0,0 +1,28 @@
#[===================================================================[
NIH dep: ed25519-donna
#]===================================================================]
add_library (ed25519-donna STATIC
src/ed25519-donna/ed25519.c)
target_include_directories (ed25519-donna
PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:include>
PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src/ed25519-donna)
#[=========================================================[
NOTE for macos:
https://github.com/floodyberry/ed25519-donna/issues/29
our source for ed25519-donna-portable.h has been
patched to workaround this.
#]=========================================================]
target_link_libraries (ed25519-donna PUBLIC OpenSSL::SSL)
add_library (NIH::ed25519-donna ALIAS ed25519-donna)
target_link_libraries (ripple_libs INTERFACE NIH::ed25519-donna)
#[===========================[
headers installation
#]===========================]
install (
FILES
src/ed25519-donna/ed25519.h
DESTINATION include/ed25519-donna)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,47 @@
# - Try to find jemalloc
# Once done this will define
# JEMALLOC_FOUND - System has jemalloc
# JEMALLOC_INCLUDE_DIRS - The jemalloc include directories
# JEMALLOC_LIBRARIES - The libraries needed to use jemalloc
if(NOT USE_BUNDLED_JEMALLOC)
find_package(PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_check_modules(PC_JEMALLOC QUIET jemalloc)
endif()
else()
set(PC_JEMALLOC_INCLUDEDIR)
set(PC_JEMALLOC_INCLUDE_DIRS)
set(PC_JEMALLOC_LIBDIR)
set(PC_JEMALLOC_LIBRARY_DIRS)
set(LIMIT_SEARCH NO_DEFAULT_PATH)
endif()
set(JEMALLOC_DEFINITIONS ${PC_JEMALLOC_CFLAGS_OTHER})
find_path(JEMALLOC_INCLUDE_DIR jemalloc/jemalloc.h
PATHS ${PC_JEMALLOC_INCLUDEDIR} ${PC_JEMALLOC_INCLUDE_DIRS}
${LIMIT_SEARCH})
# If we're asked to use static linkage, add libjemalloc.a as a preferred library name.
if(JEMALLOC_USE_STATIC)
list(APPEND JEMALLOC_NAMES
"${CMAKE_STATIC_LIBRARY_PREFIX}jemalloc${CMAKE_STATIC_LIBRARY_SUFFIX}")
endif()
list(APPEND JEMALLOC_NAMES jemalloc)
find_library(JEMALLOC_LIBRARY NAMES ${JEMALLOC_NAMES}
HINTS ${PC_JEMALLOC_LIBDIR} ${PC_JEMALLOC_LIBRARY_DIRS}
${LIMIT_SEARCH})
set(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY})
set(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
include(FindPackageHandleStandardArgs)
# handle the QUIETLY and REQUIRED arguments and set JEMALLOC_FOUND to TRUE
# if all listed variables are TRUE
find_package_handle_standard_args(JeMalloc DEFAULT_MSG
JEMALLOC_LIBRARY JEMALLOC_INCLUDE_DIR)
mark_as_advanced(JEMALLOC_INCLUDE_DIR JEMALLOC_LIBRARY)

View File

@@ -0,0 +1,22 @@
find_package (PkgConfig REQUIRED)
pkg_search_module (libarchive_PC QUIET libarchive>=3.4.3)
if(static)
set(LIBARCHIVE_LIB libarchive.a)
else()
set(LIBARCHIVE_LIB archive)
endif()
find_library (archive
NAMES ${LIBARCHIVE_LIB}
HINTS
${libarchive_PC_LIBDIR}
${libarchive_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (LIBARCHIVE_INCLUDE_DIR
NAMES archive.h
HINTS
${libarchive_PC_INCLUDEDIR}
${libarchive_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (lz4_PC QUIET liblz4>=1.9)
endif ()
if(static)
set(LZ4_LIB liblz4.a)
else()
set(LZ4_LIB lz4.so)
endif()
find_library (lz4
NAMES ${LZ4_LIB}
HINTS
${lz4_PC_LIBDIR}
${lz4_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (LZ4_INCLUDE_DIR
NAMES lz4.h
HINTS
${lz4_PC_INCLUDEDIR}
${lz4_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (secp256k1_PC QUIET libsecp256k1)
endif ()
if(static)
set(SECP256K1_LIB libsecp256k1.a)
else()
set(SECP256K1_LIB secp256k1)
endif()
find_library(secp256k1
NAMES ${SECP256K1_LIB}
HINTS
${secp256k1_PC_LIBDIR}
${secp256k1_PC_LIBRARY_PATHS}
NO_DEFAULT_PATH)
find_path (SECP256K1_INCLUDE_DIR
NAMES secp256k1.h
HINTS
${secp256k1_PC_INCLUDEDIR}
${secp256k1_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (snappy_PC QUIET snappy>=1.1.7)
endif ()
if(static)
set(SNAPPY_LIB libsnappy.a)
else()
set(SNAPPY_LIB libsnappy.so)
endif()
find_library (snappy
NAMES ${SNAPPY_LIB}
HINTS
${snappy_PC_LIBDIR}
${snappy_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (SNAPPY_INCLUDE_DIR
NAMES snappy.h
HINTS
${snappy_PC_INCLUDEDIR}
${snappy_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,19 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
# TBD - currently no soci pkgconfig
#pkg_search_module (soci_PC QUIET libsoci_core>=3.2)
endif ()
if(static)
set(SOCI_LIB libsoci.a)
else()
set(SOCI_LIB libsoci_core.so)
endif()
find_library (soci
NAMES ${SOCI_LIB})
find_path (SOCI_INCLUDE_DIR
NAMES soci/soci.h)
message("SOCI FOUND AT: ${SOCI_LIB}")

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (sqlite_PC QUIET sqlite3>=3.26.0)
endif ()
if(static)
set(SQLITE_LIB libsqlite3.a)
else()
set(SQLITE_LIB sqlite3.so)
endif()
find_library (sqlite3
NAMES ${SQLITE_LIB}
HINTS
${sqlite_PC_LIBDIR}
${sqlite_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (SQLITE_INCLUDE_DIR
NAMES sqlite3.h
HINTS
${sqlite_PC_INCLUDEDIR}
${sqlite_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,163 @@
#[===================================================================[
NIH dep: libarchive
#]===================================================================]
option (local_libarchive "use local build of libarchive." OFF)
add_library (archive_lib UNKNOWN IMPORTED GLOBAL)
if (NOT local_libarchive)
if (NOT WIN32)
find_package(libarchive_pc REQUIRED)
endif ()
if (archive)
message (STATUS "Found libarchive using pkg-config. Using ${archive}.")
set_target_properties (archive_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${archive}
IMPORTED_LOCATION_RELEASE
${archive}
INTERFACE_INCLUDE_DIRECTORIES
${LIBARCHIVE_INCLUDE_DIR})
# pkg-config can return extra info for static lib linking
# this is probably needed/useful generally, but apply
# to APPLE for now (mostly for homebrew)
if (APPLE AND static AND libarchive_PC_STATIC_LIBRARIES)
message(STATUS "NOTE: libarchive static libs: ${libarchive_PC_STATIC_LIBRARIES}")
# also, APPLE seems to need iconv...maybe linux does too (TBD)
target_link_libraries (archive_lib
INTERFACE iconv ${libarchive_PC_STATIC_LIBRARIES})
endif ()
else ()
## now try searching using the minimal find module that cmake provides
find_package(LibArchive 3.4.3 QUIET)
if (LibArchive_FOUND)
if (static)
# find module doesn't find static libs currently, so we re-search
get_filename_component(_loc ${LibArchive_LIBRARY} DIRECTORY)
find_library(_la_static
NAMES libarchive.a archive_static.lib archive.lib
PATHS ${_loc})
if (_la_static)
set (_la_lib ${_la_static})
else ()
message (WARNING "unable to find libarchive static lib - switching to local build")
set (local_libarchive ON CACHE BOOL "" FORCE)
endif ()
else ()
set (_la_lib ${LibArchive_LIBRARY})
endif ()
if (NOT local_libarchive)
message (STATUS "Found libarchive using module/config. Using ${_la_lib}.")
set_target_properties (archive_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${_la_lib}
IMPORTED_LOCATION_RELEASE
${_la_lib}
INTERFACE_INCLUDE_DIRECTORIES
${LibArchive_INCLUDE_DIRS})
endif ()
else ()
set (local_libarchive ON CACHE BOOL "" FORCE)
endif ()
endif ()
endif()
if (local_libarchive)
set (lib_post "")
if (MSVC)
set (lib_post "_static")
endif ()
ExternalProject_Add (libarchive
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/libarchive/libarchive.git
GIT_TAG v3.4.3
CMAKE_ARGS
# passing the compiler seems to be needed for windows CI, sadly
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DENABLE_LZ4=ON
-ULZ4_*
-DLZ4_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:lz4_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
# because we are building a static lib, this lz4 library doesn't
# actually matter since you can't generally link static libs to other static
# libs. The include files are needed, but the library itself is not (until
# we link our application, at which point we use the lz4 we built above).
# nonetheless, we need to provide a library to libarchive else it will
# NOT include lz4 support when configuring
-DLZ4_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_RELEASE>>
-DENABLE_WERROR=OFF
-DENABLE_TAR=OFF
-DENABLE_TAR_SHARED=OFF
-DENABLE_INSTALL=ON
-DENABLE_NETTLE=OFF
-DENABLE_OPENSSL=OFF
-DENABLE_LZO=OFF
-DENABLE_LZMA=OFF
-DENABLE_ZLIB=OFF
-DENABLE_BZip2=OFF
-DENABLE_LIBXML2=OFF
-DENABLE_EXPAT=OFF
-DENABLE_PCREPOSIX=OFF
-DENABLE_LibGCC=OFF
-DENABLE_CNG=OFF
-DENABLE_CPIO=OFF
-DENABLE_CPIO_SHARED=OFF
-DENABLE_CAT=OFF
-DENABLE_CAT_SHARED=OFF
-DENABLE_XATTR=OFF
-DENABLE_ACL=OFF
-DENABLE_ICONV=OFF
-DENABLE_TEST=OFF
-DENABLE_COVERAGE=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LIST_SEPARATOR ::
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--target archive_static
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/libarchive/$<CONFIG>/${ep_lib_prefix}archive${lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/libarchive
>
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS lz4_lib
BUILD_BYPRODUCTS
<BINARY_DIR>/libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix}
<BINARY_DIR>/libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix}
)
ExternalProject_Get_Property (libarchive BINARY_DIR)
ExternalProject_Get_Property (libarchive SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (libarchive)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/libarchive)
set_target_properties (archive_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/libarchive
INTERFACE_COMPILE_DEFINITIONS
LIBARCHIVE_STATIC)
endif()
add_dependencies (archive_lib libarchive)
target_link_libraries (archive_lib INTERFACE lz4_lib)
target_link_libraries (ripple_libs INTERFACE archive_lib)
exclude_if_included (libarchive)
exclude_if_included (archive_lib)

View File

@@ -0,0 +1,79 @@
#[===================================================================[
NIH dep: lz4
#]===================================================================]
add_library (lz4_lib STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(lz4)
endif()
if(lz4)
set_target_properties (lz4_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${lz4}
IMPORTED_LOCATION_RELEASE
${lz4}
INTERFACE_INCLUDE_DIRECTORIES
${LZ4_INCLUDE_DIR})
else()
ExternalProject_Add (lz4
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/lz4/lz4.git
GIT_TAG v1.9.2
SOURCE_SUBDIR contrib/cmake_unofficial
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DBUILD_STATIC_LIBS=ON
-DBUILD_SHARED_LIBS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--target lz4_static
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}lz4$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
TEST_COMMAND ""
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}lz4${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}lz4_d${ep_lib_suffix}
)
ExternalProject_Get_Property (lz4 BINARY_DIR)
ExternalProject_Get_Property (lz4 SOURCE_DIR)
file (MAKE_DIRECTORY ${SOURCE_DIR}/lz4)
set_target_properties (lz4_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}lz4_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}lz4${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/lib)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (lz4)
endif ()
add_dependencies (lz4_lib lz4)
target_link_libraries (ripple_libs INTERFACE lz4_lib)
exclude_if_included (lz4)
endif()
exclude_if_included (lz4_lib)

View File

@@ -0,0 +1,31 @@
#[===================================================================[
NIH dep: nudb
NuDB is header-only, thus is an INTERFACE lib in CMake.
TODO: move the library definition into NuDB repo and add
proper targets and export/install
#]===================================================================]
if (is_root_project) # NuDB not needed in the case of xrpl_core inclusion build
add_library (nudb INTERFACE)
FetchContent_Declare(
nudb_src
GIT_REPOSITORY https://github.com/CPPAlliance/NuDB.git
GIT_TAG 2.0.5
)
FetchContent_GetProperties(nudb_src)
if(NOT nudb_src_POPULATED)
message (STATUS "Pausing to download NuDB...")
FetchContent_Populate(nudb_src)
endif()
file(TO_CMAKE_PATH "${nudb_src_SOURCE_DIR}" nudb_src_SOURCE_DIR)
# specify as system includes so as to avoid warnings
target_include_directories (nudb SYSTEM INTERFACE ${nudb_src_SOURCE_DIR}/include)
target_link_libraries (nudb
INTERFACE
Boost::thread
Boost::system)
add_library (NIH::nudb ALIAS nudb)
target_link_libraries (ripple_libs INTERFACE NIH::nudb)
endif ()

View File

@@ -0,0 +1,48 @@
#[===================================================================[
NIH dep: openssl
#]===================================================================]
#[===============================================[
OPENSSL_ROOT_DIR is the only variable that
FindOpenSSL honors for locating, so convert any
OPENSSL_ROOT vars to this
#]===============================================]
if (NOT DEFINED OPENSSL_ROOT_DIR)
if (DEFINED ENV{OPENSSL_ROOT})
set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
elseif (HOMEBREW)
execute_process (COMMAND ${HOMEBREW} --prefix openssl
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
OUTPUT_STRIP_TRAILING_WHITESPACE)
endif ()
file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
endif ()
if (static)
set (OPENSSL_USE_STATIC_LIBS ON)
endif ()
set (OPENSSL_MSVC_STATIC_RT ON)
find_package (OpenSSL 1.1.1 REQUIRED)
target_link_libraries (ripple_libs
INTERFACE
OpenSSL::SSL
OpenSSL::Crypto)
# disable SSLv2...this can also be done when building/configuring OpenSSL
set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2)
#[=========================================================[
https://gitlab.kitware.com/cmake/cmake/issues/16885
depending on how openssl is built, it might depend
on zlib. In fact, the openssl find package should
figure this out for us, but it does not currently...
so let's add zlib ourselves to the lib list
TODO: investigate linking to static zlib for static
build option
#]=========================================================]
find_package (ZLIB)
set (has_zlib FALSE)
if (TARGET ZLIB::ZLIB)
set_target_properties(OpenSSL::Crypto PROPERTIES
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
set (has_zlib TRUE)
endif ()

View File

@@ -0,0 +1,70 @@
if(reporting)
find_package(PostgreSQL)
if(NOT PostgreSQL_FOUND)
message("find_package did not find postgres")
find_library(postgres NAMES pq libpq libpq-dev pq-dev postgresql-devel)
find_path(libpq-fe NAMES libpq-fe.h PATH_SUFFIXES postgresql pgsql include)
if(NOT libpq-fe_FOUND OR NOT postgres_FOUND)
message("No system installed Postgres found. Will build")
add_library(postgres SHARED IMPORTED GLOBAL)
add_library(pgport SHARED IMPORTED GLOBAL)
add_library(pgcommon SHARED IMPORTED GLOBAL)
ExternalProject_Add(postgres_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/postgres/postgres.git
GIT_TAG REL_14_5
CONFIGURE_COMMAND ./configure --without-readline > /dev/null
BUILD_COMMAND ${CMAKE_COMMAND} -E env --unset=MAKELEVEL make
UPDATE_COMMAND ""
BUILD_IN_SOURCE 1
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/src/interfaces/libpq/${ep_lib_prefix}pq.a
<BINARY_DIR>/src/common/${ep_lib_prefix}pgcommon.a
<BINARY_DIR>/src/port/${ep_lib_prefix}pgport.a
LOG_BUILD TRUE
)
ExternalProject_Get_Property (postgres_src SOURCE_DIR)
ExternalProject_Get_Property (postgres_src BINARY_DIR)
set (postgres_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${postgres_src_SOURCE_DIR})
list(APPEND INCLUDE_DIRS
${SOURCE_DIR}/src/include
${SOURCE_DIR}/src/interfaces/libpq
)
set_target_properties(postgres PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/src/interfaces/libpq/${ep_lib_prefix}pq.a
INTERFACE_INCLUDE_DIRECTORIES
"${INCLUDE_DIRS}"
)
set_target_properties(pgcommon PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/src/common/${ep_lib_prefix}pgcommon.a
INTERFACE_INCLUDE_DIRECTORIES
"${INCLUDE_DIRS}"
)
set_target_properties(pgport PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/src/port/${ep_lib_prefix}pgport.a
INTERFACE_INCLUDE_DIRECTORIES
"${INCLUDE_DIRS}"
)
add_dependencies(postgres postgres_src)
add_dependencies(pgcommon postgres_src)
add_dependencies(pgport postgres_src)
file(TO_CMAKE_PATH "${postgres_src_SOURCE_DIR}" postgres_src_SOURCE_DIR)
target_link_libraries(ripple_libs INTERFACE postgres pgcommon pgport)
else()
message("Found system installed Postgres via find_libary")
target_include_directories(ripple_libs INTERFACE ${libpq-fe})
target_link_libraries(ripple_libs INTERFACE ${postgres})
endif()
else()
message("Found system installed Postgres via find_package")
target_include_directories(ripple_libs INTERFACE ${PostgreSQL_INCLUDE_DIRS})
target_link_libraries(ripple_libs INTERFACE ${PostgreSQL_LIBRARIES})
endif()
endif()

View File

@@ -0,0 +1,155 @@
#[===================================================================[
import protobuf (lib and compiler) and create a lib
from our proto message definitions. If the system protobuf
is not found, fallback on EP to download and build a version
from official source.
#]===================================================================]
if (static)
set (Protobuf_USE_STATIC_LIBS ON)
endif ()
find_package (Protobuf 3.8)
if (is_multiconfig)
set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARIES})
else ()
string(TOUPPER ${CMAKE_BUILD_TYPE} upper_cmake_build_type)
set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARY_${upper_cmake_build_type}})
endif ()
if (local_protobuf OR NOT (Protobuf_FOUND AND Protobuf_PROTOC_EXECUTABLE AND protobuf_protoc_lib))
include (GNUInstallDirs)
message (STATUS "using local protobuf build.")
set(protobuf_reqs Protobuf_PROTOC_EXECUTABLE protobuf_protoc_lib)
foreach(lib ${protobuf_reqs})
if(NOT ${lib})
message(STATUS "Couldn't find ${lib}")
endif()
endforeach()
if (WIN32)
# protobuf prepends lib even on windows
set (pbuf_lib_pre "lib")
else ()
set (pbuf_lib_pre ${ep_lib_prefix})
endif ()
# for the external project build of protobuf, we currently ignore the
# static option and always build static libs here. This is consistent
# with our other EP builds. Dynamic libs in an EP would add complexity
# because we'd need to get them into the runtime path, and probably
# install them.
ExternalProject_Add (protobuf_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/protocolbuffers/protobuf.git
GIT_TAG v3.8.0
SOURCE_SUBDIR cmake
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
-Dprotobuf_BUILD_TESTS=OFF
-Dprotobuf_BUILD_EXAMPLES=OFF
-Dprotobuf_BUILD_PROTOC_BINARIES=ON
-Dprotobuf_MSVC_STATIC_RUNTIME=ON
-DBUILD_SHARED_LIBS=OFF
-Dprotobuf_BUILD_SHARED_LIBS=OFF
-DCMAKE_DEBUG_POSTFIX=_d
-Dprotobuf_DEBUG_POSTFIX=_d
-Dprotobuf_WITH_ZLIB=$<IF:$<BOOL:${has_zlib}>,ON,OFF>
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
BUILD_BYPRODUCTS
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix}
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix}
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix}
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix}
<BINARY_DIR>/_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX}
)
ExternalProject_Get_Property (protobuf_src BINARY_DIR)
ExternalProject_Get_Property (protobuf_src SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (protobuf_src)
endif ()
exclude_if_included (protobuf_src)
if (NOT TARGET protobuf::libprotobuf)
add_library (protobuf::libprotobuf STATIC IMPORTED GLOBAL)
endif ()
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
set_target_properties (protobuf::libprotobuf PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (protobuf::libprotobuf protobuf_src)
exclude_if_included (protobuf::libprotobuf)
if (NOT TARGET protobuf::libprotoc)
add_library (protobuf::libprotoc STATIC IMPORTED GLOBAL)
endif ()
set_target_properties (protobuf::libprotoc PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (protobuf::libprotoc protobuf_src)
exclude_if_included (protobuf::libprotoc)
if (NOT TARGET protobuf::protoc)
add_executable (protobuf::protoc IMPORTED)
exclude_if_included (protobuf::protoc)
endif ()
set_target_properties (protobuf::protoc PROPERTIES
IMPORTED_LOCATION "${BINARY_DIR}/_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX}")
add_dependencies (protobuf::protoc protobuf_src)
else ()
if (NOT TARGET protobuf::protoc)
if (EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
add_executable (protobuf::protoc IMPORTED)
set_target_properties (protobuf::protoc PROPERTIES
IMPORTED_LOCATION "${Protobuf_PROTOC_EXECUTABLE}")
else ()
message (FATAL_ERROR "Protobuf import failed")
endif ()
endif ()
endif ()
file (MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen)
set (save_CBD ${CMAKE_CURRENT_BINARY_DIR})
set (CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen)
protobuf_generate_cpp (
PROTO_SRCS
PROTO_HDRS
src/ripple/proto/ripple.proto)
set (CMAKE_CURRENT_BINARY_DIR ${save_CBD})
add_library (pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS})
target_include_directories (pbufs PRIVATE src)
target_include_directories (pbufs
SYSTEM PUBLIC ${CMAKE_BINARY_DIR}/proto_gen)
target_link_libraries (pbufs protobuf::libprotobuf)
target_compile_options (pbufs
PUBLIC
$<$<BOOL:${is_xcode}>:
--system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec
>)
add_library (Ripple::pbufs ALIAS pbufs)
target_link_libraries (ripple_libs INTERFACE Ripple::pbufs)
exclude_if_included (pbufs)

View File

@@ -0,0 +1,177 @@
#[===================================================================[
NIH dep: rocksdb
#]===================================================================]
add_library (rocksdb_lib UNKNOWN IMPORTED GLOBAL)
set_target_properties (rocksdb_lib
PROPERTIES INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1)
option (local_rocksdb "use local build of rocksdb." OFF)
if (NOT local_rocksdb)
find_package (RocksDB 6.27 QUIET CONFIG)
if (TARGET RocksDB::rocksdb)
message (STATUS "Found RocksDB using config.")
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_DEBUG)
if (_rockslib_l)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${_rockslib_l})
endif ()
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_RELEASE)
if (_rockslib_l)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${_rockslib_l})
endif ()
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION)
if (_rockslib_l)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${_rockslib_l})
endif ()
get_target_property (_rockslib_i RocksDB::rocksdb INTERFACE_INCLUDE_DIRECTORIES)
if (_rockslib_i)
set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${_rockslib_i})
endif ()
target_link_libraries (ripple_libs INTERFACE RocksDB::rocksdb)
else ()
# using a find module with rocksdb is difficult because
# you have no idea how it was configured (transitive dependencies).
# the code below will generally find rocksdb using the module, but
# will then result in linker errors for static linkage since the
# transitive dependencies are unknown. force local build here for now, but leave the code as
# a placeholder for future investigation.
if (static)
set (local_rocksdb ON CACHE BOOL "" FORCE)
# TBD if there is some way to extract transitive deps..then:
#set (RocksDB_USE_STATIC ON)
else ()
find_package (RocksDB 6.27 MODULE)
if (ROCKSDB_FOUND)
if (RocksDB_LIBRARY_DEBUG)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${RocksDB_LIBRARY_DEBUG})
endif ()
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${RocksDB_LIBRARIES})
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${RocksDB_LIBRARIES})
set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${RocksDB_INCLUDE_DIRS})
else ()
set (local_rocksdb ON CACHE BOOL "" FORCE)
endif ()
endif ()
endif ()
endif ()
if (local_rocksdb)
message (STATUS "Using local build of RocksDB.")
ExternalProject_Add (rocksdb
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/facebook/rocksdb.git
GIT_TAG v6.27.3
PATCH_COMMAND
# only used by windows build
${CMAKE_COMMAND} -E copy_if_different
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/rocks_thirdparty.inc
<SOURCE_DIR>/thirdparty.inc
COMMAND
# fixup their build version file to keep the values
# from changing always
${CMAKE_COMMAND} -E copy_if_different
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/rocksdb_build_version.cc.in
<SOURCE_DIR>/util/build_version.cc.in
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DBUILD_SHARED_LIBS=OFF
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DWITH_JEMALLOC=$<IF:$<BOOL:${jemalloc}>,ON,OFF>
-DWITH_SNAPPY=ON
-DWITH_LZ4=ON
-DWITH_ZLIB=OFF
-DUSE_RTTI=ON
-DWITH_ZSTD=OFF
-DWITH_GFLAGS=OFF
-DWITH_BZ2=OFF
-ULZ4_*
-Ulz4_*
-Dlz4_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:lz4_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
-Dlz4_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_RELEASE>>
-Dlz4_FOUND=ON
-USNAPPY_*
-Usnappy_*
-USnappy_*
-Dsnappy_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:snappy_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
-Dsnappy_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_RELEASE>>
-Dsnappy_FOUND=ON
-DSnappy_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:snappy_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
-DSnappy_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_RELEASE>>
-DSnappy_FOUND=ON
-DWITH_MD_LIBRARY=OFF
-DWITH_RUNTIME_DEBUG=$<IF:$<CONFIG:Debug>,ON,OFF>
-DFAIL_ON_WARNINGS=OFF
-DWITH_ASAN=OFF
-DWITH_TSAN=OFF
-DWITH_UBSAN=OFF
-DWITH_NUMA=OFF
-DWITH_TBB=OFF
-DWITH_WINDOWS_UTF8_FILENAMES=OFF
-DWITH_XPRESS=OFF
-DPORTABLE=ON
-DFORCE_SSE42=OFF
-DDISABLE_STALL_NOTIF=OFF
-DOPTDBG=ON
-DROCKSDB_LITE=OFF
-DWITH_FALLOCATE=ON
-DWITH_LIBRADOS=OFF
-DWITH_JNI=OFF
-DROCKSDB_INSTALL_ON_WINDOWS=OFF
-DWITH_TESTS=OFF
-DWITH_TOOLS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -MP /DNDEBUG"
>
$<$<NOT:$<BOOL:${MSVC}>>:
"-DCMAKE_CXX_FLAGS=-DNDEBUG"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}rocksdb$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
LIST_SEPARATOR ::
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS snappy_lib lz4_lib
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}rocksdb${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}rocksdb_d${ep_lib_suffix}
)
ExternalProject_Get_Property (rocksdb BINARY_DIR)
ExternalProject_Get_Property (rocksdb SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (rocksdb)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
set_target_properties (rocksdb_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}rocksdb_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}rocksdb${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies (rocksdb_lib rocksdb)
exclude_if_included (rocksdb)
endif ()
target_link_libraries (rocksdb_lib
INTERFACE
snappy_lib
lz4_lib
$<$<BOOL:${MSVC}>:rpcrt4>)
exclude_if_included (rocksdb_lib)
target_link_libraries (ripple_libs INTERFACE rocksdb_lib)

View File

@@ -0,0 +1,58 @@
#[===================================================================[
NIH dep: secp256k1
#]===================================================================]
add_library (secp256k1_lib STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(secp256k1)
endif()
if(secp256k1)
set_target_properties (secp256k1_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${secp256k1}
IMPORTED_LOCATION_RELEASE
${secp256k1}
INTERFACE_INCLUDE_DIRECTORIES
${SECP256K1_INCLUDE_DIR})
add_library (secp256k1 ALIAS secp256k1_lib)
add_library (NIH::secp256k1 ALIAS secp256k1_lib)
else()
set(INSTALL_SECP256K1 true)
add_library (secp256k1 STATIC
src/secp256k1/src/secp256k1.c)
target_compile_definitions (secp256k1
PRIVATE
USE_NUM_NONE
USE_FIELD_10X26
USE_FIELD_INV_BUILTIN
USE_SCALAR_8X32
USE_SCALAR_INV_BUILTIN)
target_include_directories (secp256k1
PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:include>
PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/secp256k1)
target_compile_options (secp256k1
PRIVATE
$<$<BOOL:${MSVC}>:-wd4319>
$<$<NOT:$<BOOL:${MSVC}>>:
-Wno-deprecated-declarations
-Wno-unused-function
>
$<$<BOOL:${is_gcc}>:-Wno-nonnull-compare>)
target_link_libraries (ripple_libs INTERFACE NIH::secp256k1)
#[===========================[
headers installation
#]===========================]
install (
FILES
src/secp256k1/include/secp256k1.h
DESTINATION include/secp256k1/include)
add_library (NIH::secp256k1 ALIAS secp256k1)
endif()

View File

@@ -0,0 +1,77 @@
#[===================================================================[
NIH dep: snappy
#]===================================================================]
add_library (snappy_lib STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(snappy)
endif()
if(snappy)
set_target_properties (snappy_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${snappy}
IMPORTED_LOCATION_RELEASE
${snappy}
INTERFACE_INCLUDE_DIRECTORIES
${SNAPPY_INCLUDE_DIR})
else()
ExternalProject_Add (snappy
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/google/snappy.git
GIT_TAG 1.1.7
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DBUILD_SHARED_LIBS=OFF
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DSNAPPY_BUILD_TESTS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
"-DCMAKE_CXX_FLAGS_DEBUG=-MTd"
"-DCMAKE_CXX_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}snappy$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E copy_if_different <BINARY_DIR>/config.h <BINARY_DIR>/snappy-stubs-public.h <SOURCE_DIR>
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}snappy${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}snappy_d${ep_lib_suffix}
)
ExternalProject_Get_Property (snappy BINARY_DIR)
ExternalProject_Get_Property (snappy SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (snappy)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/snappy)
set_target_properties (snappy_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}snappy_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}snappy${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR})
endif()
add_dependencies (snappy_lib snappy)
target_link_libraries (ripple_libs INTERFACE snappy_lib)
exclude_if_included (snappy)
exclude_if_included (snappy_lib)

View File

@@ -0,0 +1,165 @@
#[===================================================================[
NIH dep: soci
#]===================================================================]
foreach (_comp core empty sqlite3)
add_library ("soci_${_comp}" STATIC IMPORTED GLOBAL)
endforeach ()
if (NOT WIN32)
find_package(soci)
endif()
if (soci)
foreach (_comp core empty sqlite3)
set_target_properties ("soci_${_comp}" PROPERTIES
IMPORTED_LOCATION_DEBUG
${soci}
IMPORTED_LOCATION_RELEASE
${soci}
INTERFACE_INCLUDE_DIRECTORIES
${SOCI_INCLUDE_DIR})
endforeach ()
else()
set (soci_lib_pre ${ep_lib_prefix})
set (soci_lib_post "")
if (WIN32)
# for some reason soci on windows still prepends lib (non-standard)
set (soci_lib_pre lib)
# this version in the name might change if/when we change versions of soci
set (soci_lib_post "_4_0")
endif ()
get_target_property (_boost_incs Boost::date_time INTERFACE_INCLUDE_DIRECTORIES)
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION)
if (NOT _boost_dt)
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_RELEASE)
endif ()
if (NOT _boost_dt)
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_DEBUG)
endif ()
ExternalProject_Add (soci
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/SOCI/soci.git
GIT_TAG 04e1870294918d20761736743bb6136314c42dd5
# We had an issue with soci integer range checking for boost::optional
# and needed to remove the exception that SOCI throws in this case.
# This is *probably* a bug in SOCI, but has never been investigated more
# nor reported to the maintainers.
# This cmake script comments out the lines in question.
# This patch process is likely fragile and should be reviewed carefully
# whenever we update the GIT_TAG above.
PATCH_COMMAND
${CMAKE_COMMAND} -D RIPPLED_SOURCE=${CMAKE_CURRENT_SOURCE_DIR}
-P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/soci_patch.cmake
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
-DCMAKE_PREFIX_PATH=${CMAKE_BINARY_DIR}/sqlite3
-DCMAKE_MODULE_PATH=${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake
-DCMAKE_INCLUDE_PATH=$<JOIN:$<TARGET_PROPERTY:sqlite,INTERFACE_INCLUDE_DIRECTORIES>,::>
-DCMAKE_LIBRARY_PATH=${sqlite_BINARY_DIR}
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DSOCI_CXX_C11=ON
-DSOCI_STATIC=ON
-DSOCI_LIBDIR=lib
-DSOCI_SHARED=OFF
-DSOCI_TESTS=OFF
# hacks to workaround the fact that soci doesn't currently use
# boost imported targets in its cmake. If they switch to
# proper imported targets, this next line can be removed
# (as well as the get_property above that sets _boost_incs)
-DBoost_INCLUDE_DIRS=$<JOIN:${_boost_incs},::>
-DBoost_INCLUDE_DIR=$<JOIN:${_boost_incs},::>
-DBOOST_ROOT=${BOOST_ROOT}
-DWITH_BOOST=ON
-DBoost_FOUND=ON
-DBoost_NO_BOOST_CMAKE=ON
-DBoost_DATE_TIME_FOUND=ON
-DSOCI_HAVE_BOOST=ON
-DSOCI_HAVE_BOOST_DATE_TIME=ON
-DBoost_DATE_TIME_LIBRARY=${_boost_dt}
-DSOCI_DB2=OFF
-DSOCI_FIREBIRD=OFF
-DSOCI_MYSQL=OFF
-DSOCI_ODBC=OFF
-DSOCI_ORACLE=OFF
-DSOCI_POSTGRESQL=OFF
-DSOCI_SQLITE3=ON
-DSQLITE3_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:sqlite,INTERFACE_INCLUDE_DIRECTORIES>,::>
-DSQLITE3_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:sqlite,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:sqlite,IMPORTED_LOCATION_RELEASE>>
$<$<BOOL:${APPLE}>:-DCMAKE_FIND_FRAMEWORK=LAST>
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
"-DCMAKE_CXX_FLAGS_DEBUG=-MTd"
"-DCMAKE_CXX_FLAGS_RELEASE=-MT"
>
$<$<NOT:$<BOOL:${MSVC}>>:
"-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations"
>
# SEE: https://github.com/SOCI/soci/issues/640
$<$<AND:$<BOOL:${is_gcc}>,$<VERSION_GREATER_EQUAL:${CMAKE_CXX_COMPILER_VERSION},8>>:
"-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations -Wno-error=format-overflow -Wno-format-overflow -Wno-error=format-truncation"
>
LIST_SEPARATOR ::
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_core${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_empty${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_sqlite3${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/lib
>
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS sqlite
BUILD_BYPRODUCTS
<BINARY_DIR>/lib/${soci_lib_pre}soci_core${soci_lib_post}${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_core${soci_lib_post}_d${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_empty${soci_lib_post}${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_empty${soci_lib_post}_d${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}_d${ep_lib_suffix}
)
ExternalProject_Get_Property (soci BINARY_DIR)
ExternalProject_Get_Property (soci SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (soci)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
file (MAKE_DIRECTORY ${BINARY_DIR}/include)
foreach (_comp core empty sqlite3)
set_target_properties ("soci_${_comp}" PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
"${SOURCE_DIR}/include;${BINARY_DIR}/include")
add_dependencies ("soci_${_comp}" soci) # something has to depend on the ExternalProject to trigger it
target_link_libraries (ripple_libs INTERFACE "soci_${_comp}")
if (NOT _comp STREQUAL "core")
target_link_libraries ("soci_${_comp}" INTERFACE soci_core)
endif ()
endforeach ()
endif()
foreach (_comp core empty sqlite3)
exclude_if_included ("soci_${_comp}")
endforeach ()
exclude_if_included (soci)

View File

@@ -0,0 +1,93 @@
#[===================================================================[
NIH dep: sqlite
#]===================================================================]
add_library (sqlite STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(sqlite)
endif()
if(sqlite3)
set_target_properties (sqlite PROPERTIES
IMPORTED_LOCATION_DEBUG
${sqlite3}
IMPORTED_LOCATION_RELEASE
${sqlite3}
INTERFACE_INCLUDE_DIRECTORIES
${SQLITE_INCLUDE_DIR})
else()
ExternalProject_Add (sqlite3
PREFIX ${nih_cache_path}
# sqlite doesn't use git, but it provides versioned tarballs
URL https://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip
http://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip
https://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip
http://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip
# ^^^ version is apparent in the URL: 3260000 => 3.26.0
URL_HASH SHA256=de5dcab133aa339a4cf9e97c40aa6062570086d6085d8f9ad7bc6ddf8a52096e
# Don't need to worry about MITM attacks too much because the download
# is checked against a strong hash
TLS_VERIFY false
# we wrote a very simple CMake file to build sqlite
# so that's what we copy here so that we can build with
# CMake. sqlite doesn't generally provided a build system
# for the single amalgamation source file.
PATCH_COMMAND
${CMAKE_COMMAND} -E copy_if_different
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/CMake_sqlite3.txt
<SOURCE_DIR>/CMakeLists.txt
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}sqlite3$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
TEST_COMMAND ""
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}sqlite3${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}sqlite3_d${ep_lib_suffix}
)
ExternalProject_Get_Property (sqlite3 BINARY_DIR)
ExternalProject_Get_Property (sqlite3 SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (sqlite3)
endif ()
set_target_properties (sqlite PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}sqlite3_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}sqlite3${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR})
add_dependencies (sqlite sqlite3)
exclude_if_included (sqlite3)
endif()
target_link_libraries (sqlite INTERFACE $<$<NOT:$<BOOL:${MSVC}>>:dl>)
target_link_libraries (ripple_libs INTERFACE sqlite)
exclude_if_included (sqlite)
set(sqlite_BINARY_DIR ${BINARY_DIR})

View File

@@ -0,0 +1,84 @@
#[===================================================================[
NIH dep: wasmedge: web assembly runtime for hooks.
#]===================================================================]
find_package(Curses)
if(CURSES_FOUND)
include_directories(${CURSES_INCLUDE_DIR})
target_link_libraries(ripple_libs INTERFACE ${CURSES_LIBRARY})
else()
message(WARNING "CURSES library not found... (only important for mac builds)")
endif()
find_package(LLVM REQUIRED CONFIG)
message(STATUS "Found LLVM ${LLVM_PACKAGE_VERSION}")
message(STATUS "Using LLVMConfig.cmake in: ${LLVM_DIR}")
ExternalProject_Add (wasmedge_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/WasmEdge/WasmEdge.git
GIT_TAG 0.11.2
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
-DWASMEDGE_BUILD_SHARED_LIB=OFF
-DWASMEDGE_BUILD_STATIC_LIB=ON
-DWASMEDGE_BUILD_AOT_RUNTIME=ON
-DWASMEDGE_FORCE_DISABLE_LTO=ON
-DWASMEDGE_LINK_LLVM_STATIC=ON
-DWASMEDGE_LINK_TOOLS_STATIC=ON
-DWASMEDGE_BUILD_PLUGINS=OFF
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DLLVM_DIR=${LLVM_DIR}
-DLLVM_LIBRARY_DIR=${LLVM_LIBRARY_DIR}
-DLLVM_ENABLE_TERMINFO=OFF
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP -march=native"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_CONFIGURE ON
LOG_BUILD ON
LOG_CONFIGURE ON
COMMAND
pwd
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
$<$<VERSION_GREATER_EQUAL:${CMAKE_VERSION},3.12>:--parallel ${ep_procs}>
TEST_COMMAND ""
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/lib/api/libwasmedge.a
)
add_library (wasmedge STATIC IMPORTED GLOBAL)
ExternalProject_Get_Property (wasmedge_src BINARY_DIR)
ExternalProject_Get_Property (wasmedge_src SOURCE_DIR)
set (wasmedge_src_BINARY_DIR "${BINARY_DIR}")
add_dependencies (wasmedge wasmedge_src)
execute_process(
COMMAND
mkdir -p "${wasmedge_src_BINARY_DIR}/include/api"
)
set_target_properties (wasmedge PROPERTIES
IMPORTED_LOCATION_DEBUG
"${wasmedge_src_BINARY_DIR}/lib/api/libwasmedge.a"
IMPORTED_LOCATION_RELEASE
"${wasmedge_src_BINARY_DIR}/lib/api/libwasmedge.a"
INTERFACE_INCLUDE_DIRECTORIES
"${wasmedge_src_BINARY_DIR}/include/api/"
)
target_link_libraries (ripple_libs INTERFACE wasmedge)
#RH NOTE: some compilers / versions of some libraries need these, most don't
find_library(XAR_LIBRARY NAMES xar)
if(XAR_LIBRARY)
target_link_libraries(ripple_libs INTERFACE ${XAR_LIBRARY})
else()
message(WARNING "xar library not found... (only important for mac builds)")
endif()
add_library (NIH::WasmEdge ALIAS wasmedge)

View File

@@ -0,0 +1,167 @@
if(reporting)
find_library(cassandra NAMES cassandra)
if(NOT cassandra)
message("System installed Cassandra cpp driver not found. Will build")
find_library(zlib NAMES zlib1g-dev zlib-devel zlib z)
if(NOT zlib)
message("zlib not found. will build")
add_library(zlib STATIC IMPORTED GLOBAL)
ExternalProject_Add(zlib_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/madler/zlib.git
GIT_TAG v1.2.12
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}z.a
LOG_BUILD TRUE
LOG_CONFIGURE TRUE
)
ExternalProject_Get_Property (zlib_src SOURCE_DIR)
ExternalProject_Get_Property (zlib_src BINARY_DIR)
set (zlib_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${zlib_src_SOURCE_DIR}/include)
set_target_properties (zlib PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/${ep_lib_prefix}z.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(zlib zlib_src)
file(TO_CMAKE_PATH "${zlib_src_SOURCE_DIR}" zlib_src_SOURCE_DIR)
endif()
find_library(krb5 NAMES krb5-dev libkrb5-dev)
if(NOT krb5)
message("krb5 not found. will build")
add_library(krb5 STATIC IMPORTED GLOBAL)
ExternalProject_Add(krb5_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/krb5/krb5.git
GIT_TAG krb5-1.20-final
UPDATE_COMMAND ""
CONFIGURE_COMMAND autoreconf src && CFLAGS=-fcommon ./src/configure --enable-static --disable-shared > /dev/null
BUILD_IN_SOURCE 1
BUILD_COMMAND make
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <SOURCE_DIR>/lib/${ep_lib_prefix}krb5.a
LOG_BUILD TRUE
)
ExternalProject_Get_Property (krb5_src SOURCE_DIR)
ExternalProject_Get_Property (krb5_src BINARY_DIR)
set (krb5_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${krb5_src_SOURCE_DIR}/include)
set_target_properties (krb5 PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/lib/${ep_lib_prefix}krb5.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(krb5 krb5_src)
file(TO_CMAKE_PATH "${krb5_src_SOURCE_DIR}" krb5_src_SOURCE_DIR)
endif()
find_library(libuv1 NAMES uv1 libuv1 liubuv1-dev libuv1:amd64)
if(NOT libuv1)
message("libuv1 not found, will build")
add_library(libuv1 STATIC IMPORTED GLOBAL)
ExternalProject_Add(libuv_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/libuv/libuv.git
GIT_TAG v1.44.2
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}uv_a.a
LOG_BUILD TRUE
LOG_CONFIGURE TRUE
)
ExternalProject_Get_Property (libuv_src SOURCE_DIR)
ExternalProject_Get_Property (libuv_src BINARY_DIR)
set (libuv_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${libuv_src_SOURCE_DIR}/include)
set_target_properties (libuv1 PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/${ep_lib_prefix}uv_a.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(libuv1 libuv_src)
file(TO_CMAKE_PATH "${libuv_src_SOURCE_DIR}" libuv_src_SOURCE_DIR)
endif()
add_library (cassandra STATIC IMPORTED GLOBAL)
ExternalProject_Add(cassandra_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/datastax/cpp-driver.git
GIT_TAG 2.16.2
CMAKE_ARGS
-DLIBUV_ROOT_DIR=${BINARY_DIR}
-DLIBUV_LIBARY=${BINARY_DIR}/libuv_a.a
-DLIBUV_INCLUDE_DIR=${SOURCE_DIR}/include
-DCASS_BUILD_STATIC=ON
-DCASS_BUILD_SHARED=OFF
-DOPENSSL_ROOT_DIR=/opt/local/openssl
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}cassandra_static.a
LOG_BUILD TRUE
LOG_CONFIGURE TRUE
)
ExternalProject_Get_Property (cassandra_src SOURCE_DIR)
ExternalProject_Get_Property (cassandra_src BINARY_DIR)
set (cassandra_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${cassandra_src_SOURCE_DIR}/include)
set_target_properties (cassandra PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/${ep_lib_prefix}cassandra_static.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(cassandra cassandra_src)
if(NOT libuv1)
ExternalProject_Add_StepDependencies(cassandra_src build libuv1)
target_link_libraries(cassandra INTERFACE libuv1)
else()
target_link_libraries(cassandra INTERFACE ${libuv1})
endif()
if(NOT krb5)
ExternalProject_Add_StepDependencies(cassandra_src build krb5)
target_link_libraries(cassandra INTERFACE krb5)
else()
target_link_libraries(cassandra INTERFACE ${krb5})
endif()
if(NOT zlib)
ExternalProject_Add_StepDependencies(cassandra_src build zlib)
target_link_libraries(cassandra INTERFACE zlib)
else()
target_link_libraries(cassandra INTERFACE ${zlib})
endif()
file(TO_CMAKE_PATH "${cassandra_src_SOURCE_DIR}" cassandra_src_SOURCE_DIR)
target_link_libraries(ripple_libs INTERFACE cassandra)
else()
message("Found system installed cassandra cpp driver")
find_path(cassandra_includes NAMES cassandra.h REQUIRED)
target_link_libraries (ripple_libs INTERFACE ${cassandra})
target_include_directories(ripple_libs INTERFACE ${cassandra_includes})
endif()
exclude_if_included (cassandra)
endif()

View File

@@ -0,0 +1,18 @@
#[===================================================================[
NIH dep: date
the main library is header-only, thus is an INTERFACE lib in CMake.
NOTE: this has been accepted into c++20 so can likely be replaced
when we update to that standard
#]===================================================================]
find_package (date QUIET)
if (NOT TARGET date::date)
FetchContent_Declare(
hh_date_src
GIT_REPOSITORY https://github.com/HowardHinnant/date.git
GIT_TAG fc4cf092f9674f2670fb9177edcdee870399b829
)
FetchContent_MakeAvailable(hh_date_src)
endif ()

View File

@@ -0,0 +1,364 @@
# currently linking to unsecure versions...if we switch, we'll
# need to add ssl as a link dependency to the grpc targets
option (use_secure_grpc "use TLS version of grpc libs." OFF)
if (use_secure_grpc)
set (grpc_suffix "")
else ()
set (grpc_suffix "_unsecure")
endif ()
find_package (gRPC 1.23 CONFIG QUIET)
if (TARGET gRPC::gpr AND NOT local_grpc)
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_DEBUG)
if (NOT _grpc_l)
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_RELEASE)
endif ()
if (NOT _grpc_l)
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION)
endif ()
message (STATUS "Found cmake config for gRPC. Using ${_grpc_l}.")
else ()
find_package (PkgConfig QUIET)
if (PKG_CONFIG_FOUND)
pkg_check_modules (grpc QUIET "grpc${grpc_suffix}>=1.25" "grpc++${grpc_suffix}" gpr)
endif ()
if (grpc_FOUND)
message (STATUS "Found gRPC using pkg-config. Using ${grpc_gpr_PREFIX}.")
endif ()
add_executable (gRPC::grpc_cpp_plugin IMPORTED)
exclude_if_included (gRPC::grpc_cpp_plugin)
if (grpc_FOUND AND NOT local_grpc)
# use installed grpc (via pkg-config)
macro (add_imported_grpc libname_)
if (static)
set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}${libname_}${CMAKE_STATIC_LIBRARY_SUFFIX}")
else ()
set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}${libname_}${CMAKE_SHARED_LIBRARY_SUFFIX}")
endif()
find_library(_found_${libname_}
NAMES ${_search}
HINTS ${grpc_LIBRARY_DIRS})
if (_found_${libname_})
message (STATUS "importing ${libname_} as ${_found_${libname_}}")
else ()
message (FATAL_ERROR "using pkg-config for grpc, can't find ${_search}")
endif ()
add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL)
set_target_properties ("gRPC::${libname_}" PROPERTIES IMPORTED_LOCATION ${_found_${libname_}})
if (grpc_INCLUDE_DIRS)
set_target_properties ("gRPC::${libname_}" PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${grpc_INCLUDE_DIRS})
endif ()
target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}")
exclude_if_included ("gRPC::${libname_}")
endmacro ()
set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES
IMPORTED_LOCATION "${grpc_gpr_PREFIX}/bin/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}")
pkg_check_modules (cares QUIET libcares)
if (cares_FOUND)
if (static)
set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}cares${CMAKE_STATIC_LIBRARY_SUFFIX}")
set (_prefix cares_STATIC)
set (_static STATIC)
else ()
set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}cares${CMAKE_SHARED_LIBRARY_SUFFIX}")
set (_prefix cares)
set (_static)
endif()
find_library(_location NAMES ${_search} HINTS ${cares_LIBRARY_DIRS})
if (NOT _location)
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
endif ()
add_library (c-ares::cares ${_static} IMPORTED GLOBAL)
set_target_properties (c-ares::cares PROPERTIES
IMPORTED_LOCATION ${_location}
INTERFACE_INCLUDE_DIRECTORIES "${${_prefix}_INCLUDE_DIRS}"
INTERFACE_LINK_OPTIONS "${${_prefix}_LDFLAGS}"
)
exclude_if_included (c-ares::cares)
else ()
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
endif ()
else ()
#[===========================[
c-ares (grpc requires)
#]===========================]
ExternalProject_Add (c-ares_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/c-ares/c-ares.git
GIT_TAG cares-1_15_0
CMAKE_ARGS
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
-DCARES_SHARED=OFF
-DCARES_STATIC=ON
-DCARES_STATIC_PIC=ON
-DCARES_INSTALL=ON
-DCARES_MSVC_STATIC_RUNTIME=ON
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
BUILD_BYPRODUCTS
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix}
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix}
)
exclude_if_included (c-ares_src)
ExternalProject_Get_Property (c-ares_src BINARY_DIR)
set (cares_binary_dir "${BINARY_DIR}")
add_library (c-ares::cares STATIC IMPORTED GLOBAL)
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
set_target_properties (c-ares::cares PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (c-ares::cares c-ares_src)
exclude_if_included (c-ares::cares)
if (NOT has_zlib)
#[===========================[
zlib (grpc requires)
#]===========================]
if (MSVC)
set (zlib_debug_postfix "d") # zlib cmake sets this internally for MSVC, so we really don't have a choice
set (zlib_base "zlibstatic")
else ()
set (zlib_debug_postfix "_d")
set (zlib_base "z")
endif ()
ExternalProject_Add (zlib_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/madler/zlib.git
GIT_TAG v1.2.11
CMAKE_ARGS
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=${zlib_debug_postfix}
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
-DBUILD_SHARED_LIBS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
BUILD_BYPRODUCTS
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix}
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix}
)
exclude_if_included (zlib_src)
ExternalProject_Get_Property (zlib_src BINARY_DIR)
set (zlib_binary_dir "${BINARY_DIR}")
add_library (ZLIB::ZLIB STATIC IMPORTED GLOBAL)
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
set_target_properties (ZLIB::ZLIB PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (ZLIB::ZLIB zlib_src)
exclude_if_included (ZLIB::ZLIB)
endif ()
#[===========================[
grpc
#]===========================]
ExternalProject_Add (grpc_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/grpc/grpc.git
GIT_TAG v1.25.0
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DgRPC_BUILD_TESTS=OFF
-DgRPC_BENCHMARK_PROVIDER=""
-DgRPC_BUILD_CSHARP_EXT=OFF
-DgRPC_MSVC_STATIC_RUNTIME=ON
-DgRPC_INSTALL=OFF
-DgRPC_CARES_PROVIDER=package
-Dc-ares_DIR=${cares_binary_dir}/_installed_/lib/cmake/c-ares
-DgRPC_SSL_PROVIDER=package
-DOPENSSL_ROOT_DIR=${OPENSSL_ROOT_DIR}
-DgRPC_PROTOBUF_PROVIDER=package
-DProtobuf_USE_STATIC_LIBS=$<IF:$<AND:$<BOOL:${Protobuf_FOUND}>,$<NOT:$<BOOL:${static}>>>,OFF,ON>
-DProtobuf_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:protobuf::libprotobuf,INTERFACE_INCLUDE_DIRECTORIES>,:_:>
-DProtobuf_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:protobuf::libprotobuf,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:protobuf::libprotobuf,IMPORTED_LOCATION_RELEASE>>
-DProtobuf_PROTOC_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:protobuf::libprotoc,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:protobuf::libprotoc,IMPORTED_LOCATION_RELEASE>>
-DProtobuf_PROTOC_EXECUTABLE=$<TARGET_PROPERTY:protobuf::protoc,IMPORTED_LOCATION>
-DgRPC_ZLIB_PROVIDER=package
$<$<NOT:$<BOOL:${has_zlib}>>:-DZLIB_ROOT=${zlib_binary_dir}/_installed_>
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}grpc${grpc_suffix}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}grpc++${grpc_suffix}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}address_sorting$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}gpr$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}
<BINARY_DIR>
>
LIST_SEPARATOR :_:
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS c-ares_src
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}grpc${grpc_suffix}${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}grpc${grpc_suffix}_d${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}grpc++${grpc_suffix}${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}grpc++${grpc_suffix}_d${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}address_sorting${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}address_sorting_d${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}gpr${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}gpr_d${ep_lib_suffix}
<BINARY_DIR>/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}
)
if (TARGET protobuf_src)
ExternalProject_Add_StepDependencies(grpc_src build protobuf_src)
endif ()
exclude_if_included (grpc_src)
ExternalProject_Get_Property (grpc_src BINARY_DIR)
ExternalProject_Get_Property (grpc_src SOURCE_DIR)
set (grpc_binary_dir "${BINARY_DIR}")
set (grpc_source_dir "${SOURCE_DIR}")
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (grpc_src)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
macro (add_imported_grpc libname_)
add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL)
set_target_properties ("gRPC::${libname_}" PROPERTIES
IMPORTED_LOCATION_DEBUG
${grpc_binary_dir}/${ep_lib_prefix}${libname_}_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${grpc_binary_dir}/${ep_lib_prefix}${libname_}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${grpc_source_dir}/include)
add_dependencies ("gRPC::${libname_}" grpc_src)
target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}")
exclude_if_included ("gRPC::${libname_}")
endmacro ()
set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES
IMPORTED_LOCATION "${grpc_binary_dir}/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}")
add_dependencies (gRPC::grpc_cpp_plugin grpc_src)
endif ()
add_imported_grpc (gpr)
add_imported_grpc ("grpc${grpc_suffix}")
add_imported_grpc ("grpc++${grpc_suffix}")
add_imported_grpc (address_sorting)
target_link_libraries ("gRPC::grpc${grpc_suffix}" INTERFACE c-ares::cares gRPC::gpr gRPC::address_sorting ZLIB::ZLIB)
target_link_libraries ("gRPC::grpc++${grpc_suffix}" INTERFACE "gRPC::grpc${grpc_suffix}" gRPC::gpr)
endif ()
#[=================================[
generate protobuf sources for
grpc defs and bundle into a
static lib
#]=================================]
set (GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc")
file (MAKE_DIRECTORY ${GRPC_GEN_DIR})
set (GRPC_PROTO_SRCS)
set (GRPC_PROTO_HDRS)
set (GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
file(GLOB_RECURSE GRPC_DEFINITION_FILES LIST_DIRECTORIES false "${GRPC_PROTO_ROOT}/*.proto")
foreach(file ${GRPC_DEFINITION_FILES})
get_filename_component(_abs_file ${file} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${file} NAME_WE)
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set (src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
set (src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
set (hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
set (hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
add_custom_command(
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
COMMAND protobuf::protoc
ARGS --grpc_out=${GRPC_GEN_DIR}
--cpp_out=${GRPC_GEN_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
-I ${_proto_inc} -I ${_rel_dir}
${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc gRPC::grpc_cpp_plugin
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
VERBATIM)
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES GENERATED TRUE)
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
endforeach()
add_library (grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS})
#target_include_directories (grpc_pbufs PRIVATE src)
target_include_directories (grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR})
target_link_libraries (grpc_pbufs protobuf::libprotobuf "gRPC::grpc++${grpc_suffix}")
target_compile_options (grpc_pbufs
PRIVATE
$<$<BOOL:${MSVC}>:-wd4065>
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
PUBLIC
$<$<BOOL:${MSVC}>:-wd4996>
$<$<BOOL:${is_xcode}>:
--system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec
>)
add_library (Ripple::grpc_pbufs ALIAS grpc_pbufs)
target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs)
exclude_if_included (grpc_pbufs)

View File

@@ -0,0 +1,17 @@
#[=========================================================[
This is a CMake script file that is used to write
the contents of a file to stdout (using the cmake
echo command). The input file is passed via the
IN_FILE variable.
#]=========================================================]
if (EXISTS ${IN_FILE})
file (READ ${IN_FILE} contents)
## only print files that actually have some text in them
if (contents MATCHES "[a-z0-9A-Z]+")
execute_process(
COMMAND
${CMAKE_COMMAND} -E echo "${contents}")
endif ()
endif ()

1
Builds/README.md Normal file
View File

@@ -0,0 +1 @@
[Please see the BUILD instructions here](../BUILD.md)

View File

@@ -59,10 +59,6 @@ the rippled source. The only caveat is that it runs much slower
under Windows than in Linux. It hasn't yet been tested under MacOS.
It generates many files of [results](results):
For local iteration speed there is also
[levelization.py](levelization.py), which generates the same artifact set much
faster. The shell script remains canonical for CI/auditing.
* `rawincludes.txt`: The raw dump of the `#includes`
* `paths.txt`: A second dump grouping the source module
to the destination module, deduped, and with frequency counts.
@@ -113,9 +109,6 @@ prevent false alarms and merging issues, and because it's easy to
get those details locally.
1. Run `levelization.sh`
* Faster local loop: `python3 Builds/levelization/levelization.py`
* Optional parity check against canonical shell output:
`python3 Builds/levelization/levelization.py --results-dir /tmp/levelization-py-results --compare-to Builds/levelization/results`
2. Grep the modules in `paths.txt`.
* For example, if a cycle is found `A ~= B`, simply `grep -w
A Builds/levelization/results/paths.txt | grep -w B`

View File

@@ -1,409 +0,0 @@
#!/usr/bin/env python3
"""
Development-oriented levelization generator.
This script produces the same result artifact set as levelization.sh, but is
much faster by doing parsing/counting in-process instead of spawning many
external tools in tight loops.
The shell script remains the canonical CI path. Use this script for local
iteration speed, then run levelization.sh before committing if you need strict
parity with existing workflow.
"""
from __future__ import annotations
import argparse
import concurrent.futures
import os
import posixpath
import re
import shutil
import time
from collections import Counter, defaultdict
from pathlib import Path
INCLUDE_PATTERN = re.compile(r"^\s*#include.*/.*\.h")
INCLUDE_TARGET_PATTERN = re.compile(r'.*["<]([^">]+)[">].*')
PATHS_LINE_PATTERN = re.compile(r"^\s*(\d+)\s+(\S+)\s+(\S+)\s*$")
def dictionary_sort_key(value: str) -> str:
"""Approximate `sort -d` behavior used by the shell script."""
return "".join(ch for ch in value if ch.isalnum() or ch.isspace())
def normalize_level(value: str) -> str:
# Match shell behavior: if level includes a file component (contains "."),
# replace with dirname + "/toplevel".
if "." in value:
parent = posixpath.dirname(value) or "."
value = f"{parent}/toplevel"
return value.replace("/", ".")
def source_level(rel_path: str) -> str:
parts = rel_path.split("/")
return normalize_level("/".join(parts[1:3]))
def include_level(include_line: str) -> str | None:
match = INCLUDE_TARGET_PATTERN.match(include_line)
if not match:
return None
include_path = match.group(1)
parts = include_path.split("/")
return normalize_level("/".join(parts[:2]))
def scan_file(path: Path, repo_root: Path) -> tuple[list[str], list[tuple[str, str]]]:
rel = path.relative_to(repo_root).as_posix()
src_level = source_level(rel)
raw_lines: list[str] = []
paths: list[tuple[str, str]] = []
with path.open("r", encoding="utf-8", errors="ignore") as handle:
for line in handle:
if "boost" in line:
continue
if not INCLUDE_PATTERN.match(line):
continue
line = line.rstrip("\n")
raw_lines.append(f"{rel}:{line}")
dst_level = include_level(line)
if dst_level is None:
continue
if src_level != dst_level:
paths.append((src_level, dst_level))
return raw_lines, paths
def iter_source_files(repo_root: Path) -> list[Path]:
files: list[Path] = []
for top in ("include", "src"):
root = repo_root / top
files.extend(path for path in root.rglob("*") if path.is_file())
files.sort(key=lambda p: p.relative_to(repo_root).as_posix())
return files
def write_relation_db(
results_dir: Path,
edge_counts: list[tuple[tuple[str, str], int]],
) -> tuple[dict[str, list[tuple[str, int]]], dict[str, list[tuple[str, int]]]]:
includes_dir = results_dir / "includes"
includedby_dir = results_dir / "includedby"
includes_dir.mkdir(parents=True, exist_ok=True)
includedby_dir.mkdir(parents=True, exist_ok=True)
includes: dict[str, list[tuple[str, int]]] = defaultdict(list)
includedby: dict[str, list[tuple[str, int]]] = defaultdict(list)
with (results_dir / "paths.txt").open("w", encoding="utf-8") as out:
for (src, dst), count in edge_counts:
out.write(f"{count:7d} {src} {dst}\n")
includes[src].append((dst, count))
includedby[dst].append((src, count))
for src, entries in includes.items():
with (includes_dir / src).open("w", encoding="utf-8") as out:
for dst, count in entries:
out.write(f"{dst} {count}\n")
for dst, entries in includedby.items():
with (includedby_dir / dst).open("w", encoding="utf-8") as out:
for src, count in entries:
out.write(f"{src} {count}\n")
return includes, includedby
def build_loops_and_ordering(
includes: dict[str, list[tuple[str, int]]],
) -> tuple[list[str], list[str]]:
include_map = {
src: {dst: count for dst, count in entries}
for src, entries in includes.items()
}
ordering_lines: list[str] = []
loops_lines: list[str] = []
seen_pairs: set[tuple[str, str]] = set()
for source in sorted(includes.keys()):
for include, includefreq in includes[source]:
if include not in include_map:
continue
sourcefreq = include_map[include].get(source)
if sourcefreq is None:
ordering_lines.append(f"{source} > {include}\n")
continue
if (include, source) in seen_pairs:
continue
seen_pairs.add((source, include))
loops_lines.append(f"Loop: {source} {include}\n")
if includefreq - sourcefreq > 3:
loops_lines.append(f" {source} > {include}\n\n")
elif sourcefreq - includefreq > 3:
loops_lines.append(f" {include} > {source}\n\n")
elif sourcefreq == includefreq:
loops_lines.append(f" {include} == {source}\n\n")
else:
loops_lines.append(f" {include} ~= {source}\n\n")
return ordering_lines, loops_lines
def parse_paths(path: Path) -> dict[tuple[str, str], int]:
out: dict[tuple[str, str], int] = {}
for line in path.read_text(encoding="utf-8").splitlines():
if not line.strip():
continue
match = PATHS_LINE_PATTERN.match(line)
if not match:
raise ValueError(f"Cannot parse paths line: {line!r}")
count = int(match.group(1))
src = match.group(2)
dst = match.group(3)
out[(src, dst)] = count
return out
def parse_relation_dir(path: Path) -> dict[str, Counter[str]]:
out: dict[str, Counter[str]] = {}
if not path.exists():
return out
for file in sorted(p for p in path.iterdir() if p.is_file()):
lines = [line for line in file.read_text(encoding="utf-8").splitlines() if line]
out[file.name] = Counter(lines)
return out
def compare_results(generated: Path, canonical: Path) -> list[str]:
mismatches: list[str] = []
# rawincludes: compare as sets/multisets of lines to ignore traversal order.
gen_raw = Counter(generated.joinpath("rawincludes.txt").read_text(encoding="utf-8").splitlines())
can_raw = Counter(canonical.joinpath("rawincludes.txt").read_text(encoding="utf-8").splitlines())
if gen_raw != can_raw:
mismatches.append("rawincludes.txt differs (line multiset mismatch)")
# paths: compare parsed edge->count map, ignoring ordering/whitespace.
gen_paths = parse_paths(generated / "paths.txt")
can_paths = parse_paths(canonical / "paths.txt")
if gen_paths != can_paths:
mismatches.append("paths.txt differs (edge count mismatch)")
# includes / includedby: compare per-file line multisets.
for rel in ("includes", "includedby"):
gen_rel = parse_relation_dir(generated / rel)
can_rel = parse_relation_dir(canonical / rel)
if gen_rel != can_rel:
mismatches.append(f"{rel}/ differs (file set or content mismatch)")
# ordering and loops are canonical artifacts; require exact bytes.
for name in ("ordering.txt", "loops.txt"):
gen_text = generated.joinpath(name).read_text(encoding="utf-8")
can_text = canonical.joinpath(name).read_text(encoding="utf-8")
if gen_text != can_text:
mismatches.append(f"{name} differs")
return mismatches
def generate(results_dir: Path, repo_root: Path, workers: int) -> None:
if results_dir.exists():
shutil.rmtree(results_dir)
results_dir.mkdir(parents=True)
files = iter_source_files(repo_root)
raw_by_file: dict[str, list[str]] = {}
paths_by_file: dict[str, list[tuple[str, str]]] = {}
start = time.perf_counter()
if workers <= 1:
for file in files:
rel = file.relative_to(repo_root).as_posix()
raw, paths = scan_file(file, repo_root)
raw_by_file[rel] = raw
paths_by_file[rel] = paths
else:
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as pool:
futures = {
file.relative_to(repo_root).as_posix(): pool.submit(
scan_file, file, repo_root
)
for file in files
}
for rel in sorted(futures.keys()):
raw, paths = futures[rel].result()
raw_by_file[rel] = raw
paths_by_file[rel] = paths
raw_lines: list[str] = []
raw_lines.extend(
line
for rel in sorted(raw_by_file.keys())
for line in raw_by_file[rel]
)
with (results_dir / "rawincludes.txt").open("w", encoding="utf-8") as out:
out.write("\n".join(raw_lines))
if raw_lines:
out.write("\n")
path_pairs: list[tuple[str, str]] = []
path_pairs.extend(
pair
for rel in sorted(paths_by_file.keys())
for pair in paths_by_file[rel]
)
counts = Counter(path_pairs)
edge_counts = sorted(
counts.items(),
key=lambda item: (
dictionary_sort_key(f"{item[0][0]} {item[0][1]}"),
item[0][0],
item[0][1],
),
)
includes, _ = write_relation_db(results_dir, edge_counts)
ordering, loops = build_loops_and_ordering(includes)
with (results_dir / "ordering.txt").open("w", encoding="utf-8") as out:
out.writelines(ordering)
with (results_dir / "loops.txt").open("w", encoding="utf-8") as out:
out.writelines(loops)
elapsed = time.perf_counter() - start
print(
f"levelization.py: scanned {len(files)} files, "
f"{len(raw_lines)} includes, {len(edge_counts)} unique paths in "
f"{elapsed:.2f}s"
)
print((results_dir / "ordering.txt").read_text(encoding="utf-8"), end="")
print((results_dir / "loops.txt").read_text(encoding="utf-8"), end="")
def explain_edge(src_level: str, dst_level: str, repo_root: Path, workers: int) -> None:
"""Show every source file and #include line that creates a dependency edge."""
files = iter_source_files(repo_root)
matches: list[tuple[str, str]] = [] # (source_file, include_line)
def check_file(path: Path) -> list[tuple[str, str]]:
rel = path.relative_to(repo_root).as_posix()
sl = source_level(rel)
if sl != src_level:
return []
hits: list[tuple[str, str]] = []
with path.open("r", encoding="utf-8", errors="ignore") as handle:
for line in handle:
if "boost" in line:
continue
if not INCLUDE_PATTERN.match(line):
continue
line = line.rstrip("\n")
dl = include_level(line)
if dl == dst_level:
hits.append((rel, line.strip()))
return hits
if workers <= 1:
for file in files:
matches.extend(check_file(file))
else:
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as pool:
for result in pool.map(check_file, files):
matches.extend(result)
if not matches:
print(f"No includes found from {src_level} -> {dst_level}")
return
print(f"{src_level} > {dst_level} ({len(matches)} include(s)):\n")
for source_file, include_line in matches:
print(f" {source_file}")
print(f" {include_line}")
print()
def main() -> int:
script_dir = Path(__file__).resolve().parent
repo_root = script_dir.parents[1]
parser = argparse.ArgumentParser()
parser.add_argument(
"--repo-root",
type=Path,
default=repo_root,
help="Repository root (defaults based on script location).",
)
parser.add_argument(
"--results-dir",
type=Path,
default=script_dir / "results",
help="Output results directory.",
)
parser.add_argument(
"--workers",
type=int,
default=min(32, (os.cpu_count() or 1)),
help="Thread count for source scanning (default: CPU count, max 32).",
)
parser.add_argument(
"--compare-to",
type=Path,
default=None,
help=(
"Compare generated results against this directory and exit non-zero "
"on mismatch (semantic comparison for rawincludes/paths/includes)."
),
)
parser.add_argument(
"--explain",
nargs=2,
metavar=("SRC", "DST"),
default=None,
help=(
"Show which source files cause a dependency edge. "
"Example: --explain test.csf xrpld.app"
),
)
args = parser.parse_args()
if args.explain is not None:
explain_edge(args.explain[0], args.explain[1], args.repo_root.resolve(), max(1, args.workers))
return 0
generated_dir = args.results_dir.resolve()
generate(
results_dir=generated_dir,
repo_root=args.repo_root.resolve(),
workers=max(1, args.workers),
)
if args.compare_to is not None:
canonical_dir = args.compare_to.resolve()
mismatches = compare_results(generated_dir, canonical_dir)
if mismatches:
print("levelization.py: mismatch against canonical results:")
for mismatch in mismatches:
print(f" - {mismatch}")
return 1
print("levelization.py: matches canonical results")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -7,21 +7,18 @@
pushd $( dirname $0 )
if [[ -n "${PS1-}" ]]
if [ -v PS1 ]
then
# if the shell is interactive, clean up any flotsam before analyzing
git clean -ix
fi
# Ensure all sorting is ASCII-order consistently across platforms.
export LANG=C
rm -rfv results
mkdir results
includes="$( pwd )/results/rawincludes.txt"
pushd ../..
echo Raw includes:
grep -r '^[ ]*#include.*/.*\.h' include src | \
grep -r '#include.*/.*\.h' src/ripple/ src/test/ | \
grep -v boost | tee ${includes}
popd
pushd results

View File

@@ -1,54 +1,60 @@
Loop: ripple.app ripple.core
ripple.app > ripple.core
Loop: ripple.app ripple.ledger
ripple.app > ripple.ledger
Loop: ripple.app ripple.net
ripple.app > ripple.net
Loop: ripple.app ripple.nodestore
ripple.app > ripple.nodestore
Loop: ripple.app ripple.overlay
ripple.overlay ~= ripple.app
Loop: ripple.app ripple.peerfinder
ripple.app > ripple.peerfinder
Loop: ripple.app ripple.protocol
ripple.app > ripple.protocol
Loop: ripple.app ripple.rpc
ripple.rpc > ripple.app
Loop: ripple.app ripple.shamap
ripple.app > ripple.shamap
Loop: ripple.basics ripple.core
ripple.core > ripple.basics
Loop: ripple.basics ripple.json
ripple.json ~= ripple.basics
Loop: ripple.basics ripple.protocol
ripple.protocol > ripple.basics
Loop: ripple.core ripple.net
ripple.net > ripple.core
Loop: ripple.ledger ripple.protocol
ripple.ledger > ripple.protocol
Loop: ripple.net ripple.rpc
ripple.rpc > ripple.net
Loop: ripple.nodestore ripple.overlay
ripple.overlay ~= ripple.nodestore
Loop: ripple.overlay ripple.rpc
ripple.rpc ~= ripple.overlay
Loop: test.app test.jtx
test.app > test.jtx
Loop: test.app test.rpc
test.rpc ~= test.app
Loop: test.jtx test.toplevel
test.toplevel > test.jtx
Loop: test.jtx test.unit_test
test.unit_test == test.jtx
Loop: xrpl.protocol xrpld.app
xrpld.app > xrpl.protocol
Loop: xrpld.app xrpld.core
xrpld.app > xrpld.core
Loop: xrpld.app xrpld.ledger
xrpld.app > xrpld.ledger
Loop: xrpld.app xrpld.net
xrpld.app > xrpld.net
Loop: xrpld.app xrpld.nodestore
xrpld.app > xrpld.nodestore
Loop: xrpld.app xrpld.overlay
xrpld.overlay ~= xrpld.app
Loop: xrpld.app xrpld.peerfinder
xrpld.app > xrpld.peerfinder
Loop: xrpld.app xrpld.rpc
xrpld.rpc > xrpld.app
Loop: xrpld.app xrpld.shamap
xrpld.app > xrpld.shamap
Loop: xrpld.core xrpld.net
xrpld.net > xrpld.core
Loop: xrpld.core xrpld.perflog
xrpld.perflog == xrpld.core
Loop: xrpld.net xrpld.rpc
xrpld.rpc > xrpld.net
Loop: xrpld.overlay xrpld.rpc
xrpld.rpc > xrpld.overlay
Loop: xrpld.perflog xrpld.rpc
xrpld.rpc ~= xrpld.perflog

View File

@@ -1,208 +1,227 @@
libxrpl.basics > xrpl.basics
libxrpl.crypto > xrpl.basics
libxrpl.json > xrpl.basics
libxrpl.json > xrpl.json
libxrpl.protocol > xrpl.basics
libxrpl.protocol > xrpl.hook
libxrpl.protocol > xrpl.json
libxrpl.protocol > xrpl.protocol
libxrpl.resource > xrpl.basics
libxrpl.resource > xrpl.resource
libxrpl.server > xrpl.basics
libxrpl.server > xrpl.json
libxrpl.server > xrpl.protocol
libxrpl.server > xrpl.server
ripple.app > ripple.basics
ripple.app > ripple.beast
ripple.app > ripple.conditions
ripple.app > ripple.consensus
ripple.app > ripple.crypto
ripple.app > ripple.json
ripple.app > ripple.resource
ripple.app > test.unit_test
ripple.basics > ripple.beast
ripple.conditions > ripple.basics
ripple.conditions > ripple.protocol
ripple.consensus > ripple.basics
ripple.consensus > ripple.beast
ripple.consensus > ripple.json
ripple.consensus > ripple.protocol
ripple.core > ripple.beast
ripple.core > ripple.json
ripple.core > ripple.protocol
ripple.crypto > ripple.basics
ripple.json > ripple.beast
ripple.ledger > ripple.basics
ripple.ledger > ripple.beast
ripple.ledger > ripple.core
ripple.ledger > ripple.json
ripple.net > ripple.basics
ripple.net > ripple.beast
ripple.net > ripple.json
ripple.net > ripple.protocol
ripple.net > ripple.resource
ripple.nodestore > ripple.basics
ripple.nodestore > ripple.beast
ripple.nodestore > ripple.core
ripple.nodestore > ripple.json
ripple.nodestore > ripple.protocol
ripple.nodestore > ripple.unity
ripple.overlay > ripple.basics
ripple.overlay > ripple.beast
ripple.overlay > ripple.core
ripple.overlay > ripple.json
ripple.overlay > ripple.peerfinder
ripple.overlay > ripple.protocol
ripple.overlay > ripple.resource
ripple.overlay > ripple.server
ripple.peerfinder > ripple.basics
ripple.peerfinder > ripple.beast
ripple.peerfinder > ripple.core
ripple.peerfinder > ripple.protocol
ripple.perflog > ripple.basics
ripple.perflog > ripple.beast
ripple.perflog > ripple.core
ripple.perflog > ripple.json
ripple.perflog > ripple.nodestore
ripple.perflog > ripple.protocol
ripple.perflog > ripple.rpc
ripple.protocol > ripple.beast
ripple.protocol > ripple.crypto
ripple.protocol > ripple.json
ripple.resource > ripple.basics
ripple.resource > ripple.beast
ripple.resource > ripple.json
ripple.resource > ripple.protocol
ripple.rpc > ripple.basics
ripple.rpc > ripple.beast
ripple.rpc > ripple.core
ripple.rpc > ripple.crypto
ripple.rpc > ripple.json
ripple.rpc > ripple.ledger
ripple.rpc > ripple.nodestore
ripple.rpc > ripple.protocol
ripple.rpc > ripple.resource
ripple.rpc > ripple.server
ripple.rpc > ripple.shamap
ripple.server > ripple.basics
ripple.server > ripple.beast
ripple.server > ripple.crypto
ripple.server > ripple.json
ripple.server > ripple.protocol
ripple.shamap > ripple.basics
ripple.shamap > ripple.beast
ripple.shamap > ripple.crypto
ripple.shamap > ripple.nodestore
ripple.shamap > ripple.protocol
test.app > ripple.app
test.app > ripple.basics
test.app > ripple.beast
test.app > ripple.core
test.app > ripple.json
test.app > ripple.ledger
test.app > ripple.overlay
test.app > ripple.protocol
test.app > ripple.resource
test.app > ripple.rpc
test.app > test.rpc
test.app > test.toplevel
test.app > test.unit_test
test.app > xrpl.basics
test.app > xrpld.app
test.app > xrpld.core
test.app > xrpld.ledger
test.app > xrpld.nodestore
test.app > xrpld.overlay
test.app > xrpld.rpc
test.app > xrpl.hook
test.app > xrpl.json
test.app > xrpl.protocol
test.app > xrpl.resource
test.basics > ripple.basics
test.basics > ripple.beast
test.basics > ripple.json
test.basics > ripple.protocol
test.basics > ripple.rpc
test.basics > test.jtx
test.basics > test.unit_test
test.basics > xrpl.basics
test.basics > xrpld.perflog
test.basics > xrpld.rpc
test.basics > xrpl.json
test.basics > xrpl.protocol
test.beast > xrpl.basics
test.conditions > xrpl.basics
test.conditions > xrpld.conditions
test.beast > ripple.basics
test.beast > ripple.beast
test.conditions > ripple.basics
test.conditions > ripple.beast
test.conditions > ripple.conditions
test.consensus > ripple.app
test.consensus > ripple.basics
test.consensus > ripple.beast
test.consensus > ripple.consensus
test.consensus > ripple.core
test.consensus > ripple.ledger
test.consensus > ripple.protocol
test.consensus > test.csf
test.consensus > test.toplevel
test.consensus > test.unit_test
test.consensus > xrpl.basics
test.consensus > xrpld.app
test.consensus > xrpld.consensus
test.consensus > xrpld.core
test.consensus > xrpld.ledger
test.consensus > xrpl.json
test.consensus > xrpl.protocol
test.core > ripple.basics
test.core > ripple.beast
test.core > ripple.core
test.core > ripple.crypto
test.core > ripple.json
test.core > ripple.server
test.core > test.jtx
test.core > test.toplevel
test.core > test.unit_test
test.core > xrpl.basics
test.core > xrpld.core
test.core > xrpld.perflog
test.core > xrpl.json
test.core > xrpl.server
test.csf > xrpl.basics
test.csf > xrpld.consensus
test.csf > xrpl.json
test.csf > xrpl.protocol
test.csf > ripple.basics
test.csf > ripple.beast
test.csf > ripple.consensus
test.csf > ripple.json
test.csf > ripple.protocol
test.json > ripple.beast
test.json > ripple.json
test.json > test.jtx
test.json > xrpl.json
test.jtx > xrpl.basics
test.jtx > xrpld.app
test.jtx > xrpld.consensus
test.jtx > xrpld.core
test.jtx > xrpld.ledger
test.jtx > xrpld.net
test.jtx > xrpld.rpc
test.jtx > xrpl.hook
test.jtx > xrpl.json
test.jtx > xrpl.protocol
test.jtx > xrpl.resource
test.jtx > xrpl.server
test.jtx > ripple.app
test.jtx > ripple.basics
test.jtx > ripple.beast
test.jtx > ripple.consensus
test.jtx > ripple.core
test.jtx > ripple.json
test.jtx > ripple.ledger
test.jtx > ripple.net
test.jtx > ripple.protocol
test.jtx > ripple.server
test.ledger > ripple.app
test.ledger > ripple.basics
test.ledger > ripple.beast
test.ledger > ripple.core
test.ledger > ripple.ledger
test.ledger > ripple.protocol
test.ledger > test.jtx
test.ledger > test.toplevel
test.ledger > xrpl.basics
test.ledger > xrpld.app
test.ledger > xrpld.core
test.ledger > xrpld.ledger
test.ledger > xrpl.protocol
test.net > ripple.net
test.net > test.jtx
test.net > test.toplevel
test.net > test.unit_test
test.nodestore > ripple.app
test.nodestore > ripple.basics
test.nodestore > ripple.beast
test.nodestore > ripple.core
test.nodestore > ripple.nodestore
test.nodestore > ripple.protocol
test.nodestore > ripple.unity
test.nodestore > test.jtx
test.nodestore > test.toplevel
test.nodestore > test.unit_test
test.nodestore > xrpl.basics
test.nodestore > xrpld.core
test.nodestore > xrpld.nodestore
test.nodestore > xrpld.unity
test.overlay > ripple.app
test.overlay > ripple.basics
test.overlay > ripple.beast
test.overlay > ripple.core
test.overlay > ripple.overlay
test.overlay > ripple.peerfinder
test.overlay > ripple.protocol
test.overlay > ripple.shamap
test.overlay > test.jtx
test.overlay > test.toplevel
test.overlay > test.unit_test
test.overlay > xrpl.basics
test.overlay > xrpld.app
test.overlay > xrpld.overlay
test.overlay > xrpld.peerfinder
test.overlay > xrpld.shamap
test.overlay > xrpl.protocol
test.peerfinder > ripple.basics
test.peerfinder > ripple.beast
test.peerfinder > ripple.core
test.peerfinder > ripple.peerfinder
test.peerfinder > ripple.protocol
test.peerfinder > test.beast
test.peerfinder > test.unit_test
test.peerfinder > xrpl.basics
test.peerfinder > xrpld.core
test.peerfinder > xrpld.peerfinder
test.peerfinder > xrpl.protocol
test.protocol > ripple.basics
test.protocol > ripple.beast
test.protocol > ripple.crypto
test.protocol > ripple.json
test.protocol > ripple.protocol
test.protocol > test.toplevel
test.protocol > xrpl.basics
test.protocol > xrpl.json
test.protocol > xrpl.protocol
test.rdb > test.jtx
test.rdb > test.toplevel
test.rdb > xrpld.app
test.rdb > xrpld.core
test.resource > ripple.basics
test.resource > ripple.beast
test.resource > ripple.resource
test.resource > test.unit_test
test.resource > xrpl.basics
test.resource > xrpl.resource
test.rpc > ripple.app
test.rpc > ripple.basics
test.rpc > ripple.beast
test.rpc > ripple.core
test.rpc > ripple.json
test.rpc > ripple.net
test.rpc > ripple.nodestore
test.rpc > ripple.overlay
test.rpc > ripple.protocol
test.rpc > ripple.resource
test.rpc > ripple.rpc
test.rpc > test.jtx
test.rpc > test.nodestore
test.rpc > test.toplevel
test.rpc > xrpl.basics
test.rpc > xrpld.app
test.rpc > xrpld.core
test.rpc > xrpld.net
test.rpc > xrpld.overlay
test.rpc > xrpld.rpc
test.rpc > xrpl.hook
test.rpc > xrpl.json
test.rpc > xrpl.protocol
test.rpc > xrpl.resource
test.server > ripple.app
test.server > ripple.basics
test.server > ripple.beast
test.server > ripple.core
test.server > ripple.json
test.server > ripple.rpc
test.server > ripple.server
test.server > test.jtx
test.server > test.toplevel
test.server > test.unit_test
test.server > xrpl.basics
test.server > xrpld.app
test.server > xrpld.core
test.server > xrpld.rpc
test.server > xrpl.json
test.server > xrpl.server
test.shamap > ripple.basics
test.shamap > ripple.beast
test.shamap > ripple.nodestore
test.shamap > ripple.protocol
test.shamap > ripple.shamap
test.shamap > test.unit_test
test.shamap > xrpl.basics
test.shamap > xrpld.nodestore
test.shamap > xrpld.shamap
test.shamap > xrpl.protocol
test.toplevel > ripple.json
test.toplevel > test.csf
test.toplevel > xrpl.json
test.unit_test > xrpl.basics
xrpl.hook > xrpl.basics
xrpl.hook > xrpl.protocol
xrpl.json > xrpl.basics
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json
xrpl.resource > xrpl.basics
xrpl.resource > xrpl.json
xrpl.resource > xrpl.protocol
xrpl.server > xrpl.basics
xrpl.server > xrpl.json
xrpl.server > xrpl.protocol
xrpld.app > test.unit_test
xrpld.app > xrpl.basics
xrpld.app > xrpld.conditions
xrpld.app > xrpld.consensus
xrpld.app > xrpld.perflog
xrpld.app > xrpl.hook
xrpld.app > xrpl.json
xrpld.app > xrpl.resource
xrpld.conditions > xrpl.basics
xrpld.conditions > xrpl.protocol
xrpld.consensus > xrpl.basics
xrpld.consensus > xrpl.json
xrpld.consensus > xrpl.protocol
xrpld.core > xrpl.basics
xrpld.core > xrpl.json
xrpld.core > xrpl.protocol
xrpld.ledger > xrpl.basics
xrpld.ledger > xrpld.core
xrpld.ledger > xrpl.json
xrpld.ledger > xrpl.protocol
xrpld.net > xrpl.basics
xrpld.net > xrpl.json
xrpld.net > xrpl.protocol
xrpld.net > xrpl.resource
xrpld.nodestore > xrpl.basics
xrpld.nodestore > xrpld.core
xrpld.nodestore > xrpld.unity
xrpld.nodestore > xrpl.json
xrpld.nodestore > xrpl.protocol
xrpld.overlay > xrpl.basics
xrpld.overlay > xrpld.core
xrpld.overlay > xrpld.peerfinder
xrpld.overlay > xrpld.perflog
xrpld.overlay > xrpl.json
xrpld.overlay > xrpl.protocol
xrpld.overlay > xrpl.resource
xrpld.overlay > xrpl.server
xrpld.peerfinder > xrpl.basics
xrpld.peerfinder > xrpld.core
xrpld.peerfinder > xrpl.protocol
xrpld.perflog > xrpl.basics
xrpld.perflog > xrpl.json
xrpld.perflog > xrpl.protocol
xrpld.rpc > xrpl.basics
xrpld.rpc > xrpld.core
xrpld.rpc > xrpld.ledger
xrpld.rpc > xrpld.nodestore
xrpld.rpc > xrpld.shamap
xrpld.rpc > xrpl.json
xrpld.rpc > xrpl.protocol
xrpld.rpc > xrpl.resource
xrpld.rpc > xrpl.server
xrpld.shamap > xrpl.basics
xrpld.shamap > xrpld.nodestore
xrpld.shamap > xrpl.protocol
test.unit_test > ripple.basics
test.unit_test > ripple.beast

View File

@@ -1,77 +1,42 @@
cmake_minimum_required (VERSION 3.16)
if (POLICY CMP0074)
cmake_policy(SET CMP0074 NEW)
endif ()
project (rippled)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
if(POLICY CMP0074)
cmake_policy(SET CMP0074 NEW)
endif()
if(POLICY CMP0077)
cmake_policy(SET CMP0077 NEW)
endif()
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
if(POLICY CMP0144)
cmake_policy(SET CMP0144 NEW)
endif()
project (xrpl)
set(Boost_NO_BOOST_CMAKE ON)
# make GIT_COMMIT_HASH define available to all sources
find_package(Git)
if(Git_FOUND)
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
execute_process(COMMAND ${GIT_EXECUTABLE} describe --always --abbrev=40
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
if(gch)
set(GIT_COMMIT_HASH "${gch}")
message(STATUS gch: ${GIT_COMMIT_HASH})
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
endif()
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
if(gb)
set(GIT_BRANCH "${gb}")
message(STATUS gb: ${GIT_BRANCH})
add_definitions(-DGIT_BRANCH="${GIT_BRANCH}")
endif()
endif() #git
# make SOURCE_ROOT_PATH define available for logging
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
# BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
# Automatically enabled for Debug builds via generator expression
# Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
if(thread_safety_analysis)
if (thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
add_compile_options("-stdlib=libc++")
add_link_options("-stdlib=libc++")
endif()
option(USE_CONAN "Use Conan package manager for dependencies" OFF)
# Then, auto-detect if conan_toolchain.cmake is being used
if(CMAKE_TOOLCHAIN_FILE)
# Check if the toolchain file path contains "conan_toolchain"
if(CMAKE_TOOLCHAIN_FILE MATCHES "conan_toolchain")
set(USE_CONAN ON CACHE BOOL "Using Conan detected from toolchain file" FORCE)
message(STATUS "Conan toolchain detected: ${CMAKE_TOOLCHAIN_FILE}")
message(STATUS "Building with Conan dependencies")
endif()
endif()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
include (CheckCXXCompilerFlag)
include (FetchContent)
include (ExternalProject)
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
include (ProcessorCount)
if (target)
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
endif ()
@@ -79,7 +44,7 @@ endif ()
include(RippledSanity)
include(RippledVersion)
include(RippledSettings)
include(RippledNIH)
# this check has to remain in the top-level cmake
# because of the early return statement
if (packages_only)
@@ -91,65 +56,31 @@ endif ()
include(RippledCompiler)
include(RippledInterface)
###
include(deps/Boost)
find_package(OpenSSL 1.1.1 REQUIRED)
set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
set(SECP256K1_INSTALL TRUE)
add_subdirectory(external/secp256k1)
add_library(secp256k1::secp256k1 ALIAS secp256k1)
add_subdirectory(external/ed25519-donna)
add_subdirectory(external/antithesis-sdk)
find_package(gRPC REQUIRED)
find_package(lz4 REQUIRED)
# Target names with :: are not allowed in a generator expression.
# We need to pull the include directories and imported location properties
# from separate targets.
find_package(LibArchive REQUIRED)
find_package(SOCI REQUIRED)
find_package(SQLite3 REQUIRED)
include(deps/OpenSSL)
include(deps/Secp256k1)
include(deps/Ed25519-donna)
include(deps/Lz4)
include(deps/Libarchive)
include(deps/Sqlite)
include(deps/Soci)
include(deps/Snappy)
include(deps/Rocksdb)
include(deps/Nudb)
include(deps/date)
include(deps/Protobuf)
include(deps/gRPC)
include(deps/cassandra)
include(deps/Postgres)
include(deps/WasmEdge)
option(rocksdb "Enable RocksDB" ON)
if(rocksdb)
find_package(RocksDB REQUIRED)
set_target_properties(RocksDB::rocksdb PROPERTIES
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
)
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
endif()
###
find_package(nudb REQUIRED)
find_package(date REQUIRED)
find_package(xxHash REQUIRED)
include(deps/WasmEdge)
if(TARGET nudb::core)
set(nudb nudb::core)
elseif(TARGET NuDB::nudb)
set(nudb NuDB::nudb)
else()
message(FATAL_ERROR "unknown nudb target")
endif()
target_link_libraries(ripple_libs INTERFACE ${nudb})
target_link_libraries(ripple_libs INTERFACE
ed25519::ed25519
LibArchive::LibArchive
lz4::lz4
OpenSSL::Crypto
OpenSSL::SSL
secp256k1::secp256k1
soci::soci
SQLite::SQLite3
)
if(coverage)
include(RippledCov)
endif()
set(PROJECT_EXPORT_SET RippleExports)
include(RippledCore)
include(RippledInstall)
include(RippledCov)
include(RippledMultiConfig)
include(RippledDocs)
include(RippledValidatorKeys)

View File

@@ -71,248 +71,27 @@ The source must be formatted according to the style guide below.
Header includes must be [levelized](./Builds/levelization).
Changes should be usually squashed down into a single commit.
Some larger or more complicated change sets make more sense,
and are easier to review if organized into multiple logical commits.
Either way, all commits should fit the following criteria:
* Changes should be presented in a single commit or a logical
sequence of commits.
Specifically, chronological commits that simply
reflect the history of how the author implemented
the change, "warts and all", are not useful to
reviewers.
* Every commit should have a [good message](#good-commit-messages).
to explain a specific aspects of the change.
* Every commit should be signed.
* Every commit should be well-formed (builds successfully,
unit tests passing), as this helps to resolve merge
conflicts, and makes it easier to use `git bisect`
to find bugs.
### Good commit messages
Refer to
["How to Write a Git Commit Message"](https://cbea.ms/git-commit/)
for general rules on writing a good commit message.
tl;dr
> 1. Separate subject from body with a blank line.
> 2. Limit the subject line to 50 characters.
> * [...]shoot for 50 characters, but consider 72 the hard limit.
> 3. Capitalize the subject line.
> 4. Do not end the subject line with a period.
> 5. Use the imperative mood in the subject line.
> * A properly formed Git commit subject line should always be able
> to complete the following sentence: "If applied, this commit will
> _your subject line here_".
> 6. Wrap the body at 72 characters.
> 7. Use the body to explain what and why vs. how.
In addition to those guidelines, please add one of the following
prefixes to the subject line if appropriate.
* `fix:` - The primary purpose is to fix an existing bug.
* `perf:` - The primary purpose is performance improvements.
* `refactor:` - The changes refactor code without affecting
functionality.
* `test:` - The changes _only_ affect unit tests.
* `docs:` - The changes _only_ affect documentation. This can
include code comments in addition to `.md` files like this one.
* `build:` - The changes _only_ affect the build process,
including CMake and/or Conan settings.
* `chore:` - Other tasks that don't affect the binary, but don't fit
any of the other cases. e.g. formatting, git settings, updating
Github Actions jobs.
Whenever possible, when updating commits after the PR is open, please
add the PR number to the end of the subject line. e.g. `test: Add
unit tests for Feature X (#1234)`.
## Pull requests
In general, pull requests use `develop` as the base branch.
(Hotfixes are an exception.)
If your changes are not quite ready, but you want to make it easily available
for preliminary examination or review, you can create a "Draft" pull request.
While a pull request is marked as a "Draft", you can rebase or reorganize the
commits in the pull request as desired.
Github pull requests are created as "Ready" by default, or you can mark
a "Draft" pull request as "Ready".
Once a pull request is marked as "Ready",
any changes must be added as new commits. Do not
force-push to a branch in a pull request under review.
(This includes rebasing your branch onto the updated base branch.
Use a merge operation, instead or hit the "Update branch" button
at the bottom of the Github PR page.)
Changes to pull requests must be added as new commits.
Once code reviewers have started looking at your code, please avoid
force-pushing a branch in a pull request.
This preserves the ability for reviewers to filter changes since their last
review.
A pull request must obtain **approvals from at least two reviewers**
before it can be considered for merge by a Maintainer.
A pull request must obtain **approvals from at least two reviewers** before it
can be considered for merge by a Maintainer.
Maintainers retain discretion to require more approvals if they feel the
credibility of the existing approvals is insufficient.
Pull requests must be merged by [squash-and-merge][2]
to preserve a linear history for the `develop` branch.
### When and how to merge pull requests
#### "Passed"
A pull request should only have the "Passed" label added when it
meets a few criteria:
1. It must have two approving reviews [as described
above](#pull-requests). (Exception: PRs that are deemed "trivial"
only need one approval.)
2. All CI checks must be complete and passed. (One-off failures may
be acceptable if they are related to a known issue.)
3. The PR must have a [good commit message](#good-commit-messages).
* If the PR started with a good commit message, and it doesn't
need to be updated, the author can indicate that in a comment.
* Any contributor, preferably the author, can leave a comment
suggesting a commit message.
* If the author squashes and rebases the code in preparation for
merge, they should also ensure the commit message(s) are updated
as well.
4. The PR branch must be up to date with the base branch (usually
`develop`). This is usually accomplised by merging the base branch
into the feature branch, but if the other criteria are met, the
changes can be squashed and rebased on top of the base branch.
5. Finally, and most importantly, the author of the PR must
positively indicate that the PR is ready to merge. That can be
accomplished by adding the "Passed" label if their role allows,
or by leaving a comment to the effect that the PR is ready to
merge.
Once the "Passed" label is added, a maintainer may merge the PR at
any time, so don't use it lightly.
#### Instructions for maintainers
The maintainer should double-check that the PR has met all the
necessary criteria, and can request additional information from the
owner, or additional reviews, and can always feel free to remove the
"Passed" label if appropriate. The maintainer has final say on
whether a PR gets merged, and are encouraged to communicate and
issues or concerns to other maintainers.
##### Most pull requests: "Squash and merge"
Most pull requests don't need special handling, and can simply be
merged using the "Squash and merge" button on the Github UI. Update
the suggested commit message if necessary.
##### Slightly more complicated pull requests
Some pull requests need to be pushed to `develop` as more than one
commit. There are multiple ways to accomplish this. If the author
describes a process, and it is reasonable, follow it. Otherwise, do
a fast forward only merge (`--ff-only`) on the command line and push.
Either way, check that:
* The commits are based on the current tip of `develop`.
* The commits are clean: No merge commits (except when reverse
merging), no "[FOLD]" or "fixup!" messages.
* All commits are signed. If the commits are not signed by the author, use
`git commit --amend -S` to sign them yourself.
* At least one (but preferably all) of the commits has the PR number
in the commit message.
**Never use the "Create a merge commit" or "Rebase and merge"
functions!**
##### Releases, release candidates, and betas
All releases, including release candidates and betas, are handled
differently from typical PRs. Most importantly, never use
the Github UI to merge a release.
1. There are two possible conditions that the `develop` branch will
be in when preparing a release.
1. Ready or almost ready to go: There may be one or two PRs that
need to be merged, but otherwise, the only change needed is to
update the version number in `BuildInfo.cpp`. In this case,
merge those PRs as appropriate, updating the second one, and
waiting for CI to finish in between. Then update
`BuildInfo.cpp`.
2. Several pending PRs: In this case, do not use the Github UI,
because the delays waiting for CI in between each merge will be
unnecessarily onerous. Instead, create a working branch (e.g.
`develop-next`) based off of `develop`. Squash the changes
from each PR onto the branch, one commit each (unless
more are needed), being sure to sign each commit and update
the commit message to include the PR number. You may be able
to use a fast-forward merge for the first PR. The workflow may
look something like:
```
git fetch upstream
git checkout upstream/develop
git checkout -b develop-next
# Use -S on the ff-only merge if prbranch1 isn't signed.
# Or do another branch first.
git merge --ff-only user1/prbranch1
git merge --squash user2/prbranch2
git commit -S
git merge --squash user3/prbranch3
git commit -S
[...]
git push --set-upstream origin develop-next
</pre>
```
2. Create the Pull Request with `release` as the base branch. If any
of the included PRs are still open,
[use closing keywords](https://docs.github.com/articles/closing-issues-using-keywords)
in the description to ensure they are closed when the code is
released. e.g. "Closes #1234"
3. Instead of the default template, reuse and update the message from
the previous release. Include the following verbiage somewhere in
the description:
```
The base branch is release. All releases (including betas) go in
release. This PR will be merged with --ff-only (not squashed or
rebased, and not using the GitHub UI) to both release and develop.
```
4. Sign-offs for the three platforms usually occur offline, but at
least one approval will be needed on the PR.
5. Once everything is ready to go, open a terminal, and do the
fast-forward merges manually. Do not push any branches until you
verify that all of them update correctly.
```
git fetch upstream
git checkout -b upstream--develop -t upstream/develop || git checkout upstream--develop
git reset --hard upstream/develop
# develop-next must be signed already!
git merge --ff-only origin/develop-next
git checkout -b upstream--release -t upstream/release || git checkout upstream--release
git reset --hard upstream/release
git merge --ff-only origin/develop-next
# Only do these 3 steps if pushing a release. No betas or RCs
git checkout -b upstream--master -t upstream/master || git checkout upstream--master
git reset --hard upstream/master
git merge --ff-only origin/develop-next
# Check that all of the branches are updated
git log -1 --oneline
# The output should look like:
# 02ec8b7962 (HEAD -> upstream--master, origin/develop-next, upstream--release, upstream--develop, develop-next) Set version to 2.2.0-rc1
# Note that all of the upstream--develop/release/master are on this commit.
# (Master will be missing for betas, etc.)
# Just to be safe, do a dry run first:
git push --dry-run upstream-push HEAD:develop
git push --dry-run upstream-push HEAD:release
# git push --dry-run upstream-push HEAD:master
# Now push
git push upstream-push HEAD:develop
git push upstream-push HEAD:release
# git push upstream-push HEAD:master
# Don't forget to tag the release, too.
git tag <version number>
git push upstream-push <version number>
```
6. Finally
[create a new release on Github](https://github.com/XRPLF/rippled/releases).
# Style guide
@@ -338,103 +117,12 @@ this:
You can format individual files in place by running `clang-format -i <file>...`
from any directory within this project.
There is a Continuous Integration job that runs clang-format on pull requests. If the code doesn't comply, a patch file that corrects auto-fixable formatting issues is generated.
To download the patch file:
1. Next to `clang-format / check (pull_request) Failing after #s` -> click **Details** to open the details page.
2. Left menu -> click **Summary**
3. Scroll down to near the bottom-right under `Artifacts` -> click **clang-format.patch**
4. Download the zip file and extract it to your local git repository. Run `git apply [patch-file-name]`.
5. Commit and push.
You can install a pre-commit hook to automatically run `clang-format` before every commit:
```
pip3 install pre-commit
pre-commit install
```
## Contracts and instrumentation
We are using [Antithesis](https://antithesis.com/) for continuous fuzzing,
and keep a copy of [Antithesis C++ SDK](https://github.com/antithesishq/antithesis-sdk-cpp/)
in `external/antithesis-sdk`. One of the aims of fuzzing is to identify bugs
by finding external conditions which cause contracts violations inside `rippled`.
The contracts are expressed as `XRPL_ASSERT` or `UNREACHABLE` (defined in
`include/xrpl/beast/utility/instrumentation.h`), which are effectively (outside
of Antithesis) wrappers for `assert(...)` with added name. The purpose of name
is to provide contracts with stable identity which does not rely on line numbers.
When `rippled` is built with the Antithesis instrumentation enabled
(using `voidstar` CMake option) and ran on the Antithesis platform, the
contracts become
[test properties](https://antithesis.com/docs/using_antithesis/properties.html);
otherwise they are just like a regular `assert`.
To learn more about Antithesis, see
[How Antithesis Works](https://antithesis.com/docs/introduction/how_antithesis_works.html)
and [C++ SDK](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview.html#)
We continue to use the old style `assert` or `assert(false)` in certain
locations, where the reporting of contract violations on the Antithesis
platform is either not possible or not useful.
For this reason:
* The locations where `assert` or `assert(false)` contracts should continue to be used:
* `constexpr` functions
* unit tests i.e. files under `src/test`
* unit tests-related modules (files under `beast/test` and `beast/unit_test`)
* Outside of the listed locations, do not use `assert`; use `XRPL_ASSERT` instead,
giving it unique name, with the short description of the contract.
* Outside of the listed locations, do not use `assert(false)`; use
`UNREACHABLE` instead, giving it unique name, with the description of the
condition being violated
* The contract name should start with a full name (including scope) of the
function, optionally a named lambda, followed by a colon ` : ` and a brief
(typically at most five words) description. `UNREACHABLE` contracts
can use slightly longer descriptions. If there are multiple overloads of the
function, use common sense to balance both brevity and unambiguity of the
function name. NOTE: the purpose of name is to provide stable means of
unique identification of every contract; for this reason try to avoid elements
which can change in some obvious refactors or when reinforcing the condition.
* Contract description typically (except for `UNREACHABLE`) should describe the
_expected_ condition, as in "I assert that _expected_ is true".
* Contract description for `UNREACHABLE` should describe the _unexpected_
situation which caused the line to have been reached.
* Example good name for an
`UNREACHABLE` macro `"Json::operator==(Value, Value) : invalid type"`; example
good name for an `XRPL_ASSERT` macro `"Json::Value::asCString : valid type"`.
* Example **bad** name
`"RFC1751::insert(char* s, int x, int start, int length) : length is greater than or equal zero"`
(missing namespace, unnecessary full function signature, description too verbose).
Good name: `"ripple::RFC1751::insert : minimum length"`.
* In **few** well-justified cases a non-standard name can be used, in which case a
comment should be placed to explain the rationale (example in `contract.cpp`)
* Do **not** rename a contract without a good reason (e.g. the name no longer
reflects the location or the condition being checked)
* Do not use `std::unreachable`
* Do not put contracts where they can be violated by an external condition
(e.g. timing, data payload before mandatory validation etc.) as this creates
bogus bug reports (and causes crashes of Debug builds)
## Unit Tests
To execute all unit tests:
```rippled --unittest --unittest-jobs=<number of cores>```
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
To run a specific set of test suites:
```
rippled --unittest TestSuiteName
```
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
`TestSuiteName1` and `TestSuiteName2` both exist, then both tests will run.
Alternatively, if the unit test name finds an exact match, it will stop
doing partial matches, i.e. if a unit test with a title of `TestSuiteName`
exists, then no other unit test will be executed, apart from `TestSuiteName`.
## Avoid
1. Proliferation of nearly identical code.
@@ -488,11 +176,10 @@ existing maintainer without a vote.
## Current Maintainers
* [Richard Holland](https://github.com/RichardAH) (XRPL Labs + INFTF)
* [Denis Angell](https://github.com/dangell7) (XRPL Labs + INFTF)
* [Wietse Wind](https://github.com/WietseWind) (XRPL Labs + INFTF)
* [tequ](https://github.com/tequdev) (Independent + INFTF)
* [Richard Holland](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
* [Denis Angell](https://github.com/dangell7) (XRPL Labs + XRP Ledger Foundation)
* [Wietse Wind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
[1]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits

View File

@@ -1,8 +1,8 @@
# Xahau
# Xahau
**Note:** Throughout this README, references to "we" or "our" pertain to the community and contributors involved in the Xahau network. It does not imply a legal entity or a specific collection of individuals.
[Xahau](https://xahau.network/) is a decentralized cryptographic ledger that builds upon the robust foundation of the XRP Ledger. It inherits the XRP Ledger's Byzantine Fault Tolerant consensus algorithm and enhances it with additional features and functionalities. Developers and users familiar with the XRP Ledger will find that most documentation and tutorials available on [xrpl.org](https://xrpl.org) are relevant and applicable to Xahau, including those related to running validators and managing validator keys. For Xahau specific documentation you can visit our [documentation](https://xahau.network/)
[Xahau](https://xahau.network/) is a decentralized cryptographic ledger that builds upon the robust foundation of the XRP Ledger. It inherits the XRP Ledger's Byzantine Fault Tolerant consensus algorithm and enhances it with additional features and functionalities. Developers and users familiar with the XRP Ledger will find that most documentation and tutorials available on [xrpl.org](https://xrpl.org) are relevant and applicable to Xahau, including those related to running validators and managing validator keys. For Xahau specific documentation you can visit our [documentation](https://docs.xahau.network/)
## XAH
XAH is the public, counterparty-free asset native to Xahau and functions primarily as network gas. Transactions submitted to the Xahau network must supply an appropriate amount of XAH, to be burnt by the network as a fee, in order to be successfully included in a validated ledger. In addition, XAH also acts as a bridge currency within the Xahau DEX. XAH is traded on the open-market and is available for anyone to access. Xahau was created in 2023 with a supply of 600 million units of XAH.
@@ -12,7 +12,7 @@ The server software that powers Xahau is called `xahaud` and is available in thi
### Build from Source
* [Read the build instructions in our documentation](https://xahau.network/infrastructure/building-xahau)
* [Read the build instructions in our documentation](https://docs.xahau.network/infrastructure/building-xahau)
* If you encounter any issues, please [open an issue](https://github.com/xahau/xahaud/issues)
## Highlights of Xahau
@@ -58,14 +58,14 @@ git-subtree. See those directories' README files for more details.
- **Documentation**: Documentation for XRPL, Xahau and Hooks.
- [Xrpl Documentation](https://xrpl.org)
- [Xahau Documentation](https://xahau.network/)
- [Xahau Documentation](https://docs.xahau.network/)
- [Hooks Technical Documentation](https://xrpl-hooks.readme.io/)
- **Explorers**: Explore the Xahau Network using various explorers:
- **Explorers**: Explore the Xahau ledger using various explorers:
- [xahauexplorer.com](https://xahauexplorer.com)
- [xahscan.com](https://xahscan.com)
- [xahau.xrpl.org](https://xahau.xrpl.org)
- [explorer.xahau.network](https://explorer.xahau.network)
- **Testnet & Faucet**: Test applications and obtain test XAH at [xahau-test.net](https://xahau-test.net) and use the testnet explorer at [explorer.xahau.network](https://explorer.xahau.network).
- **Supporting Wallets**: A list of wallets that support XAH and Xahau-based assets.
- [Xaman](https://xaman.app)
- [Crossmark](https://crossmark.io)
- [Xumm](https://xumm.app)
- [Crossmark](https://crossmark.io)

File diff suppressed because it is too large Load Diff

View File

@@ -61,12 +61,13 @@ For these complaints or reports, please [contact our support team](mailto:bugs@x
### The following type of security problems are excluded
1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `xahaud` and `xahau-lib`.
2. **Relevant**. A security issue, posing a danger to user funds, privacy or the operation of the Xahau Network.
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other peoples software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromise the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
6. **Unused**. If you use the exploit to attack the Xahau Network, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
- (D)DOS attacks
- Error messages or error pages without sensitive data
- Tests & sample data as publicly available in our repositories at Github
- Common issues like browser header warnings or DNS configuration, identified by vulnerability scans
- Vulnerability scan reports for software we publicly use
- Security issues related to outdated OS's, browsers or plugins
- Reports for security problems that we have been notified of before
Please note: Reports that are lacking any proof (such as screenshots or other data), detailed information or details on how to reproduce any unexpected result will be investigated but will not be eligible for any reward.

View File

@@ -1,218 +0,0 @@
#!/bin/bash
set -o errexit
marker_base=f62f74da10c5936c64bd16cd509a8b68f1464e41
marker_commit=${1:-${marker_base}}
if [ $(git merge-base ${marker_commit} ${marker_base}) != ${marker_base} ]; then
echo "first marker commit not an ancestor: ${marker_commit}"
exit 1
fi
if [ $(git merge-base ${marker_commit} HEAD) != $(git rev-parse --verify ${marker_commit}) ]; then
echo "given marker commit not an ancestor: ${marker_commit}"
exit 1
fi
if [ -e Builds/CMake ]; then
echo move CMake
git mv Builds/CMake cmake
git add --update .
git commit -m 'Move CMake directory' --author 'Pretty Printer <cpp@ripple.com>'
fi
if [ -e src/ripple ]; then
echo move protocol buffers
mkdir -p include/xrpl
if [ -e src/ripple/proto ]; then
git mv src/ripple/proto include/xrpl
fi
extract_list() {
git show ${marker_commit}:Builds/CMake/RippledCore.cmake | \
awk "/END ${1}/ { p = 0 } p && /src\/ripple/; /BEGIN ${1}/ { p = 1 }" | \
sed -e 's#src/ripple/##' -e 's#[^a-z]\+$##'
}
move_files() {
oldroot="$1"; shift
newroot="$1"; shift
detail="$1"; shift
files=("$@")
for file in ${files[@]}; do
if [ ! -e ${oldroot}/${file} ]; then
continue
fi
dir=$(dirname ${file})
if [ $(basename ${dir}) == 'details' ]; then
dir=$(dirname ${dir})
fi
if [ $(basename ${dir}) == 'impl' ]; then
dir="$(dirname ${dir})/${detail}"
fi
mkdir -p ${newroot}/${dir}
git mv ${oldroot}/${file} ${newroot}/${dir}
done
}
echo move libxrpl headers
files=$(extract_list 'LIBXRPL HEADERS')
files+=(
basics/SlabAllocator.h
beast/asio/io_latency_probe.h
beast/container/aged_container.h
beast/container/aged_container_utility.h
beast/container/aged_map.h
beast/container/aged_multimap.h
beast/container/aged_multiset.h
beast/container/aged_set.h
beast/container/aged_unordered_map.h
beast/container/aged_unordered_multimap.h
beast/container/aged_unordered_multiset.h
beast/container/aged_unordered_set.h
beast/container/detail/aged_associative_container.h
beast/container/detail/aged_container_iterator.h
beast/container/detail/aged_ordered_container.h
beast/container/detail/aged_unordered_container.h
beast/container/detail/empty_base_optimization.h
beast/core/LockFreeStack.h
beast/insight/Collector.h
beast/insight/Counter.h
beast/insight/CounterImpl.h
beast/insight/Event.h
beast/insight/EventImpl.h
beast/insight/Gauge.h
beast/insight/GaugeImpl.h
beast/insight/Group.h
beast/insight/Groups.h
beast/insight/Hook.h
beast/insight/HookImpl.h
beast/insight/Insight.h
beast/insight/Meter.h
beast/insight/MeterImpl.h
beast/insight/NullCollector.h
beast/insight/StatsDCollector.h
beast/test/fail_counter.h
beast/test/fail_stream.h
beast/test/pipe_stream.h
beast/test/sig_wait.h
beast/test/string_iostream.h
beast/test/string_istream.h
beast/test/string_ostream.h
beast/test/test_allocator.h
beast/test/yield_to.h
beast/utility/hash_pair.h
beast/utility/maybe_const.h
beast/utility/temp_dir.h
# included by only json/impl/json_assert.h
json/json_errors.h
protocol/PayChan.h
protocol/RippleLedgerHash.h
protocol/messages.h
protocol/st.h
)
files+=(
basics/README.md
crypto/README.md
json/README.md
protocol/README.md
resource/README.md
)
move_files src/ripple include/xrpl detail ${files[@]}
echo move libxrpl sources
files=$(extract_list 'LIBXRPL SOURCES')
move_files src/ripple src/libxrpl "" ${files[@]}
echo check leftovers
dirs=$(cd include/xrpl; ls -d */)
dirs=$(cd src/ripple; ls -d ${dirs} 2>/dev/null || true)
files="$(cd src/ripple; find ${dirs} -type f)"
if [ -n "${files}" ]; then
echo "leftover files:"
echo ${files}
exit
fi
echo remove empty directories
empty_dirs="$(cd src/ripple; find ${dirs} -depth -type d)"
for dir in ${empty_dirs[@]}; do
if [ -e ${dir} ]; then
rmdir ${dir}
fi
done
echo move xrpld sources
files=$(
extract_list 'XRPLD SOURCES'
cd src/ripple
find * -regex '.*\.\(h\|ipp\|md\|pu\|uml\|png\)'
)
move_files src/ripple src/xrpld detail ${files[@]}
files="$(cd src/ripple; find . -type f)"
if [ -n "${files}" ]; then
echo "leftover files:"
echo ${files}
exit
fi
fi
rm -rf src/ripple
echo rename .hpp to .h
find include src -name '*.hpp' -exec bash -c 'f="{}"; git mv "${f}" "${f%hpp}h"' \;
echo move PerfLog.h
if [ -e include/xrpl/basics/PerfLog.h ]; then
git mv include/xrpl/basics/PerfLog.h src/xrpld/perflog
fi
# Make sure all protobuf includes have the correct prefix.
protobuf_replace='s:^#include\s*["<].*org/xrpl\([^">]\+\)[">]:#include <xrpl/proto/org/xrpl\1>:'
# Make sure first-party includes use angle brackets and .h extension.
ripple_replace='s:include\s*["<]ripple/\(.*\)\.h\(pp\)\?[">]:include <ripple/\1.h>:'
beast_replace='s:include\s*<beast/:include <xrpl/beast/:'
# Rename impl directories to detail.
impl_rename='s:\(<xrpl.*\)/impl\(/details\)\?/:\1/detail/:'
echo rewrite includes in libxrpl
find include/xrpl src/libxrpl -type f -exec sed -i \
-e "${protobuf_replace}" \
-e "${ripple_replace}" \
-e "${beast_replace}" \
-e 's:^#include <ripple/:#include <xrpl/:' \
-e "${impl_rename}" \
{} +
echo rewrite includes in xrpld
# # https://www.baeldung.com/linux/join-multiple-lines
libxrpl_dirs="$(cd include/xrpl; ls -d1 */ | sed 's:/$::')"
# libxrpl_dirs='a\nb\nc\n'
readarray -t libxrpl_dirs <<< "${libxrpl_dirs}"
# libxrpl_dirs=(a b c)
libxrpl_dirs=$(printf -v txt '%s\\|' "${libxrpl_dirs[@]}"; echo "${txt%\\|}")
# libxrpl_dirs='a\|b\|c'
find src/xrpld src/test -type f -exec sed -i \
-e "${protobuf_replace}" \
-e "${ripple_replace}" \
-e "${beast_replace}" \
-e "s:^#include <ripple/basics/PerfLog.h>:#include <xrpld/perflog/PerfLog.h>:" \
-e "s:^#include <ripple/\(${libxrpl_dirs}\)/:#include <xrpl/\1/:" \
-e 's:^#include <ripple/:#include <xrpld/:' \
-e "${impl_rename}" \
{} +
git commit -m 'Rearrange sources' --author 'Pretty Printer <cpp@ripple.com>'
find include src -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-10 -i {} +
git add --update .
git commit -m 'Rewrite includes' --author 'Pretty Printer <cpp@ripple.com>'
./Builds/levelization/levelization.sh
git add --update .
git commit -m 'Recompute loops' --author 'Pretty Printer <cpp@ripple.com>'

View File

@@ -1,9 +1,4 @@
#!/bin/bash -u
# We use set -e and bash with -u to bail on first non zero exit code of any
# processes launched or upon any unbound variable.
# We use set -x to print commands before running them to help with
# debugging.
set -ex
#!/bin/bash
echo "START INSIDE CONTAINER - CORE"
@@ -12,13 +7,6 @@ echo "-- GITHUB_REPOSITORY: $1"
echo "-- GITHUB_SHA: $2"
echo "-- GITHUB_RUN_NUMBER: $4"
# Use mounted filesystem for temp files to avoid container space limits
export TMPDIR=/io/tmp
export TEMP=/io/tmp
export TMP=/io/tmp
mkdir -p /io/tmp
echo "=== Using temp directory: /io/tmp ==="
umask 0000;
cd /io/ &&
@@ -32,58 +20,29 @@ if [[ "$?" -ne "0" ]]; then
exit 127
fi
BUILD_TYPE=Release
mv cmake/deps/WasmEdge.cmake cmake/deps/WasmEdge.old &&
perl -i -pe "s/^(\\s*)-DBUILD_SHARED_LIBS=OFF/\\1-DBUILD_SHARED_LIBS=OFF\\n\\1-DROCKSDB_BUILD_SHARED=OFF/g" Builds/CMake/deps/Rocksdb.cmake &&
mv Builds/CMake/deps/WasmEdge.cmake Builds/CMake/deps/WasmEdge.old &&
echo "find_package(LLVM REQUIRED CONFIG)
message(STATUS \"Found LLVM \${LLVM_PACKAGE_VERSION}\")
message(STATUS \"Found LLVM ${LLVM_PACKAGE_VERSION}\")
message(STATUS \"Using LLVMConfig.cmake in: \${LLVM_DIR}\")
add_library (wasmedge STATIC IMPORTED GLOBAL)
set_target_properties(wasmedge PROPERTIES IMPORTED_LOCATION \${WasmEdge_LIB})
target_link_libraries (ripple_libs INTERFACE wasmedge)
add_library (wasmedge::wasmedge ALIAS wasmedge)
add_library (NIH::WasmEdge ALIAS wasmedge)
message(\"WasmEdge DONE\")
" > cmake/deps/WasmEdge.cmake &&
export LDFLAGS="-static-libstdc++"
export CMAKE_EXE_LINKER_FLAGS="-static-libstdc++"
export CMAKE_STATIC_LINKER_FLAGS="-static-libstdc++"
git config --global --add safe.directory /io &&
git checkout src/libxrpl/protocol/BuildInfo.cpp &&
sed -i s/\"0.0.0\"/\"$(date +%Y).$(date +%-m).$(date +%-d)-$(git rev-parse --abbrev-ref HEAD)$(if [ -n "$4" ]; then echo "+$4"; fi)\"/g src/libxrpl/protocol/BuildInfo.cpp &&
conan export external/snappy --version 1.1.10 --user xahaud --channel stable &&
conan export external/soci --version 4.0.3 --user xahaud --channel stable &&
" > Builds/CMake/deps/WasmEdge.cmake &&
git checkout src/ripple/protocol/impl/BuildInfo.cpp &&
sed -i s/\"0.0.0\"/\"$(date +%Y).$(date +%-m).$(date +%-d)-$(git rev-parse --abbrev-ref HEAD)+$4\"/g src/ripple/protocol/impl/BuildInfo.cpp &&
cd release-build &&
# Install dependencies - tool_requires in conanfile.py handles glibc 2.28 compatibility
# for build tools (protoc, grpc plugins, b2) in HBB environment
# The tool_requires('b2/5.3.2') in conanfile.py should force b2 to build from source
# with the correct toolchain, avoiding the GLIBCXX_3.4.29 issue
echo "=== Installing dependencies ===" &&
conan install .. --output-folder . --build missing --settings build_type=$BUILD_TYPE \
-o with_wasmedge=False -o tool_requires_b2=True &&
cmake .. -G Ninja \
-DCMAKE_BUILD_TYPE=$BUILD_TYPE \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_EXE_LINKER_FLAGS="-static-libstdc++" \
-DLLVM_DIR=$LLVM_DIR \
-DWasmEdge_LIB=$WasmEdge_LIB \
-Dxrpld=TRUE \
-Dtests=TRUE &&
ccache -z &&
ninja -j $3 && echo "=== Re-running final link with verbose output ===" && rm -f rippled && ninja -v rippled &&
ccache -s &&
cmake .. -DCMAKE_BUILD_TYPE=Release -DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DWasmEdge_LIB=/usr/local/lib64/libwasmedge.a &&
make -j$3 VERBOSE=1 &&
strip -s rippled &&
mv rippled xahaud &&
echo "=== Full ldd output ===" &&
ldd xahaud &&
echo "=== Running libcheck ===" &&
libcheck xahaud &&
echo "Build host: `hostname`" > release.info &&
echo "Build date: `date`" >> release.info &&
echo "Build md5: `md5sum xahaud`" >> release.info &&
echo "Git remotes:" >> release.info &&
git remote -v >> release.info &&
git remote -v >> release.info
echo "Git status:" >> release.info &&
git status -v >> release.info &&
echo "Git log [last 20]:" >> release.info &&
@@ -102,9 +61,9 @@ fi
cd ..;
mv src/xrpld/net/detail/RegisterSSLCerts.cpp.old src/xrpld/net/detail/RegisterSSLCerts.cpp;
mv cmake/deps/WasmEdge.old cmake/deps/WasmEdge.cmake;
rm src/certs/certbundle.h;
git checkout src/libxrpl/protocol/BuildInfo.cpp;
mv src/ripple/net/impl/RegisterSSLCerts.cpp.old src/ripple/net/impl/RegisterSSLCerts.cpp;
mv Builds/CMake/deps/Rocksdb.cmake.old Builds/CMake/deps/Rocksdb.cmake;
mv Builds/CMake/deps/WasmEdge.old Builds/CMake/deps/WasmEdge.cmake;
echo "END INSIDE CONTAINER - CORE"

108
build-full.sh Normal file → Executable file
View File

@@ -1,9 +1,4 @@
#!/bin/bash -u
# We use set -e and bash with -u to bail on first non zero exit code of any
# processes launched or upon any unbound variable.
# We use set -x to print commands before running them to help with
# debugging.
set -e
#!/bin/bash
echo "START INSIDE CONTAINER - FULL"
@@ -14,14 +9,12 @@ echo "-- GITHUB_RUN_NUMBER: $4"
umask 0000;
####
cd /io;
mkdir -p src/certs;
mkdir src/certs;
curl --silent -k https://raw.githubusercontent.com/RichardAH/rippled-release-builder/main/ca-bundle/certbundle.h -o src/certs/certbundle.h;
if [ "`grep certbundle.h src/xrpld/net/detail/RegisterSSLCerts.cpp | wc -l`" -eq "0" ]
if [ "`grep certbundle.h src/ripple/net/impl/RegisterSSLCerts.cpp | wc -l`" -eq "0" ]
then
cp src/xrpld/net/detail/RegisterSSLCerts.cpp src/xrpld/net/detail/RegisterSSLCerts.cpp.old
cp src/ripple/net/impl/RegisterSSLCerts.cpp src/ripple/net/impl/RegisterSSLCerts.cpp.old
perl -i -pe "s/^{/{
#ifdef EMBEDDED_CA_BUNDLE
BIO *cbio = BIO_new_mem_buf(ca_bundle.data(), ca_bundle.size());
@@ -61,18 +54,95 @@ then
BIO_free(cbio);
}
}
#endif/g" src/xrpld/net/detail/RegisterSSLCerts.cpp &&
sed -i "s/#include <xrpld\/net\/RegisterSSLCerts.h>/\0\n#include <certs\/certbundle.h>/g" src/xrpld/net/detail/RegisterSSLCerts.cpp
#endif/g" src/ripple/net/impl/RegisterSSLCerts.cpp &&
sed -i "s/#include <ripple\/net\/RegisterSSLCerts.h>/\0\n#include <certs\/certbundle.h>/g" src/ripple/net/impl/RegisterSSLCerts.cpp
fi
# Environment setup moved to Dockerfile in release-builder.sh
source /opt/rh/gcc-toolset-11/enable
export PATH=/usr/local/bin:$PATH
export CC='/usr/lib64/ccache/gcc' &&
export CXX='/usr/lib64/ccache/g++' &&
mkdir .nih_c;
mkdir .nih_toolchain;
cd .nih_toolchain &&
yum install -y wget lz4 lz4-devel git llvm13-static.x86_64 llvm13-devel.x86_64 devtoolset-10-binutils zlib-static ncurses-static -y \
devtoolset-7-gcc-c++ \
devtoolset-9-gcc-c++ \
devtoolset-10-gcc-c++ \
snappy snappy-devel \
zlib zlib-devel \
lz4-devel \
libasan &&
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-7/g"` &&
echo "-- Install ZStd 1.1.3 --" &&
yum install epel-release -y &&
ZSTD_VERSION="1.1.3" &&
( wget -nc -q -O zstd-${ZSTD_VERSION}.tar.gz https://github.com/facebook/zstd/archive/v${ZSTD_VERSION}.tar.gz; echo "" ) &&
tar xzvf zstd-${ZSTD_VERSION}.tar.gz &&
cd zstd-${ZSTD_VERSION} &&
make -j$3 install &&
cd .. &&
echo "-- Install Cmake 3.23.1 --" &&
pwd &&
( wget -nc -q https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1-linux-x86_64.tar.gz; echo "" ) &&
tar -xzf cmake-3.23.1-linux-x86_64.tar.gz -C /hbb/ &&
echo "-- Install Boost 1.75.0 --" &&
pwd &&
( wget -nc -q https://boostorg.jfrog.io/artifactory/main/release/1.75.0/source/boost_1_75_0.tar.gz; echo "" ) &&
tar -xzf boost_1_75_0.tar.gz &&
cd boost_1_75_0 && ./bootstrap.sh && ./b2 link=static -j$3 && ./b2 install &&
cd ../ &&
echo "-- Install Protobuf 3.20.0 --" &&
pwd &&
( wget -nc -q https://github.com/protocolbuffers/protobuf/releases/download/v3.20.0/protobuf-all-3.20.0.tar.gz; echo "" ) &&
tar -xzf protobuf-all-3.20.0.tar.gz &&
cd protobuf-3.20.0/ &&
./autogen.sh && ./configure --prefix=/usr --disable-shared link=static && make -j$3 && make install &&
cd .. &&
echo "-- Build LLD --" &&
pwd &&
ln /usr/bin/llvm-config-13 /usr/bin/llvm-config &&
mv /opt/rh/devtoolset-9/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar-9 &&
ln /opt/rh/devtoolset-10/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar &&
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/lld-13.0.1.src.tar.xz; echo "" ) &&
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/libunwind-13.0.1.src.tar.xz; echo "" ) &&
tar -xf lld-13.0.1.src.tar.xz &&
tar -xf libunwind-13.0.1.src.tar.xz &&
cp -r libunwind-13.0.1.src/include libunwind-13.0.1.src/src lld-13.0.1.src/ &&
cd lld-13.0.1.src &&
rm -rf build CMakeCache.txt &&
mkdir build &&
cd build &&
cmake .. -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm13/ -DCMAKE_BUILD_TYPE=Release &&
make -j$3 install &&
ln -s /usr/lib64/llvm13/lib/include/lld /usr/include/lld &&
cp /usr/lib64/llvm13/lib/liblld*.a /usr/local/lib/ &&
cd ../../ &&
echo "-- Build WasmEdge --" &&
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; unzip -o 0.11.2.zip; ) &&
cd WasmEdge-0.11.2 &&
( mkdir build; echo "" ) &&
cd build &&
export BOOST_ROOT="/usr/local/src/boost_1_75_0" &&
export Boost_LIBRARY_DIRS="/usr/local/lib" &&
export BOOST_INCLUDEDIR="/usr/local/src/boost_1_75_0" &&
export PATH=`echo $PATH | sed -E "s/devtoolset-7/devtoolset-9/g"` &&
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DWASMEDGE_BUILD_SHARED_LIB=OFF \
-DWASMEDGE_BUILD_STATIC_LIB=ON \
-DWASMEDGE_BUILD_AOT_RUNTIME=ON \
-DWASMEDGE_FORCE_DISABLE_LTO=ON \
-DCMAKE_POSITION_INDEPENDENT_CODE=ON \
-DWASMEDGE_LINK_LLVM_STATIC=ON \
-DWASMEDGE_BUILD_PLUGINS=OFF \
-DWASMEDGE_LINK_TOOLS_STATIC=ON \
-DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ &&
make -j$3 install &&
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-10/g"` &&
cp -r include/api/wasmedge /usr/include/ &&
cd /io/ &&
echo "-- Build Rippled --" &&
pwd &&
cp Builds/CMake/deps/Rocksdb.cmake Builds/CMake/deps/Rocksdb.cmake.old &&
echo "MOVING TO [ build-core.sh ]";
echo "MOVING TO [ build-core.sh ]"
cd /io;
printenv > .env.temp;
cat .env.temp | grep '=' | sed s/\\\(^[^=]\\+=\\\)/\\1\\\"/g|sed s/\$/\\\"/g > .env;

File diff suppressed because it is too large Load Diff

1703
cfg/rippled-reporting.cfg Normal file

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More