mirror of
https://github.com/Xahau/xahaud.git
synced 2026-02-20 13:52:22 +00:00
Compare commits
98 Commits
emit-atomi
...
fixAMMv1_3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8dfb437f63 | ||
|
|
821671a60d | ||
|
|
a61d96386d | ||
|
|
906b559f08 | ||
|
|
66dbb11738 | ||
|
|
d18c4d9b33 | ||
|
|
1288115fb1 | ||
|
|
a0b5a59e36 | ||
|
|
bdc88938c7 | ||
|
|
17baba1901 | ||
|
|
c3e8039c5a | ||
|
|
83ad524f9e | ||
|
|
2d7f8f1d55 | ||
|
|
510b47e8da | ||
|
|
d9e7b50395 | ||
|
|
2ecfbfc8ff | ||
|
|
3c49f80013 | ||
|
|
090e4ad25e | ||
|
|
67a6970031 | ||
|
|
efd5f9f6db | ||
|
|
309e517e70 | ||
|
|
edafb1fed6 | ||
|
|
7a0c914ce9 | ||
|
|
d209272379 | ||
|
|
fb7a0d71de | ||
|
|
9120fffd95 | ||
|
|
d712345c84 | ||
|
|
12e1afb694 | ||
|
|
c355ad9971 | ||
|
|
cc70d48e91 | ||
|
|
a8d7b2619e | ||
|
|
775fb3a8b2 | ||
|
|
8d1aadd23d | ||
|
|
8d2a5e3c4e | ||
|
|
5a118a4e2b | ||
|
|
960f87857e | ||
|
|
f731bcfeba | ||
|
|
374b361daa | ||
|
|
52ccf27aa3 | ||
|
|
f6fe33103c | ||
|
|
b9d966dd32 | ||
|
|
e3ccddfaca | ||
|
|
36e51662fe | ||
|
|
e319619dce | ||
|
|
7e92374436 | ||
|
|
7ef8473c85 | ||
|
|
2073b562f0 | ||
|
|
39353a6557 | ||
|
|
64fb39d033 | ||
|
|
1bfae1a296 | ||
|
|
f6a4e8f36d | ||
|
|
70bbe83525 | ||
|
|
bbff5e29d8 | ||
|
|
c42cb0df62 | ||
|
|
8efc02b2d4 | ||
|
|
ffcb203ce1 | ||
|
|
859391327d | ||
|
|
4a65401448 | ||
|
|
9ec631b1d8 | ||
|
|
8bcebdea42 | ||
|
|
4cc63c028a | ||
|
|
b9ed90e08b | ||
|
|
066f8ed9ef | ||
|
|
9ed20a4f1c | ||
|
|
89ffc1969b | ||
|
|
79fdafe638 | ||
|
|
2a10013dfc | ||
|
|
6f148a8ac7 | ||
|
|
96222baf5e | ||
|
|
74477d2c13 | ||
|
|
9378f1a0ad | ||
|
|
6fa6a96e3a | ||
|
|
b0fcd36bcd | ||
|
|
1ec31e79c9 | ||
|
|
3487e2de67 | ||
|
|
9c8b005406 | ||
|
|
687ccf4203 | ||
|
|
83f09fd8ab | ||
|
|
1da00892d3 | ||
|
|
15c7ad6f78 | ||
|
|
1f12b9ec5a | ||
|
|
ad0531ad6c | ||
|
|
e580f7cfc0 | ||
|
|
094f011006 | ||
|
|
39d1c43901 | ||
|
|
b3e6a902cb | ||
|
|
fa1b93bfd8 | ||
|
|
92e3a927fc | ||
|
|
8f7ebf0377 | ||
|
|
46cf6785ab | ||
|
|
3c4c9c87c5 | ||
|
|
75636ee5c4 | ||
|
|
d1528021e2 | ||
|
|
7a790246fb | ||
|
|
d1395d0f41 | ||
|
|
b4f79257cb | ||
|
|
1a3d2db8ef | ||
|
|
2fc912d54d |
@@ -1,31 +0,0 @@
|
||||
name: 'Configure ccache'
|
||||
description: 'Sets up ccache with consistent configuration'
|
||||
|
||||
inputs:
|
||||
max_size:
|
||||
description: 'Maximum cache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Configure ccache
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ~/.ccache
|
||||
export CONF_PATH="${CCACHE_CONFIGPATH:-${CCACHE_DIR:-$HOME/.ccache}/ccache.conf}"
|
||||
mkdir -p $(dirname "$CONF_PATH")
|
||||
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||
ccache -p # Print config for verification
|
||||
ccache -z # Zero statistics before the build
|
||||
159
.github/actions/xahau-ga-build/action.yml
vendored
159
.github/actions/xahau-ga-build/action.yml
vendored
@@ -21,13 +21,17 @@ inputs:
|
||||
required: false
|
||||
default: ''
|
||||
compiler-id:
|
||||
description: 'Unique identifier for compiler/version combination used for cache keys'
|
||||
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
|
||||
required: false
|
||||
default: ''
|
||||
cache_version:
|
||||
description: 'Cache version for invalidation'
|
||||
required: false
|
||||
default: '1'
|
||||
gha_cache_enabled:
|
||||
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
|
||||
required: false
|
||||
default: 'true'
|
||||
ccache_enabled:
|
||||
description: 'Whether to use ccache'
|
||||
required: false
|
||||
@@ -36,6 +40,29 @@ inputs:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
stdlib:
|
||||
description: 'C++ standard library to use'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- libstdcxx
|
||||
- libcxx
|
||||
clang_gcc_toolchain:
|
||||
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
|
||||
required: false
|
||||
default: ''
|
||||
ccache_max_size:
|
||||
description: 'Maximum ccache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
ccache_hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
ccache_compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -48,18 +75,37 @@ runs:
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory
|
||||
- name: Configure ccache
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-
|
||||
shell: bash
|
||||
run: |
|
||||
# Create cache directories
|
||||
mkdir -p ~/.ccache-cache
|
||||
|
||||
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
|
||||
mkdir -p ~/.config/ccache
|
||||
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
|
||||
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
|
||||
|
||||
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
|
||||
mkdir -p ~/.config/ccache
|
||||
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
|
||||
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
|
||||
|
||||
# Configure ccache settings AFTER cache restore (prevents stale cached config)
|
||||
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
|
||||
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
|
||||
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
|
||||
ccache --set-config=cache_dir="$HOME/.ccache-cache"
|
||||
echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
|
||||
echo "📦 using ~/.ccache-cache as ccache cache directory"
|
||||
|
||||
# Print config for verification
|
||||
echo "=== ccache configuration ==="
|
||||
ccache -p
|
||||
|
||||
# Zero statistics before the build
|
||||
ccache -z
|
||||
|
||||
- name: Configure project
|
||||
shell: bash
|
||||
@@ -75,36 +121,97 @@ runs:
|
||||
if [ -n "${{ inputs.cxx }}" ]; then
|
||||
export CXX="${{ inputs.cxx }}"
|
||||
fi
|
||||
|
||||
# Configure ccache launcher args
|
||||
CCACHE_ARGS=""
|
||||
|
||||
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
|
||||
# This enables ccache for the main app build without affecting Conan dependency builds
|
||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||
CCACHE_ARGS="-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
cat > wrapper_toolchain.cmake <<'EOF'
|
||||
# Include Conan's generated toolchain first (sets compiler, flags, etc.)
|
||||
# Note: CMAKE_CURRENT_LIST_DIR is the directory containing this wrapper (.build/)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/build/generators/conan_toolchain.cmake)
|
||||
|
||||
# Overlay ccache configuration for main application build
|
||||
# This does NOT affect Conan dependency builds (already completed)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER ccache CACHE STRING "C compiler launcher" FORCE)
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER ccache CACHE STRING "C++ compiler launcher" FORCE)
|
||||
EOF
|
||||
TOOLCHAIN_FILE="wrapper_toolchain.cmake"
|
||||
echo "✅ Created wrapper toolchain with ccache enabled"
|
||||
else
|
||||
TOOLCHAIN_FILE="build/generators/conan_toolchain.cmake"
|
||||
echo "ℹ️ Using Conan toolchain directly (ccache disabled)"
|
||||
fi
|
||||
|
||||
# Configure C++ standard library if specified
|
||||
# libstdcxx used for clang-14/16 to work around missing lexicographical_compare_three_way in libc++
|
||||
# libcxx can be used with clang-17+ which has full C++20 support
|
||||
# Note: -stdlib flag is Clang-specific, GCC always uses libstdc++
|
||||
CMAKE_CXX_FLAGS=""
|
||||
if [[ "${{ inputs.cxx }}" == clang* ]]; then
|
||||
# Only Clang needs the -stdlib flag
|
||||
if [ "${{ inputs.stdlib }}" = "libstdcxx" ]; then
|
||||
CMAKE_CXX_FLAGS="-stdlib=libstdc++"
|
||||
elif [ "${{ inputs.stdlib }}" = "libcxx" ]; then
|
||||
CMAKE_CXX_FLAGS="-stdlib=libc++"
|
||||
fi
|
||||
fi
|
||||
# GCC always uses libstdc++ and doesn't need/support the -stdlib flag
|
||||
|
||||
# Configure GCC toolchain for Clang if specified
|
||||
if [ -n "${{ inputs.clang_gcc_toolchain }}" ] && [[ "${{ inputs.cxx }}" == clang* ]]; then
|
||||
# Extract Clang version from compiler executable name (e.g., clang++-14 -> 14)
|
||||
clang_version=$(echo "${{ inputs.cxx }}" | grep -oE '[0-9]+$')
|
||||
|
||||
# Clang 16+ supports --gcc-install-dir (precise path specification)
|
||||
# Clang <16 only has --gcc-toolchain (uses discovery heuristics)
|
||||
if [ -n "$clang_version" ] && [ "$clang_version" -ge "16" ]; then
|
||||
# Clang 16+ uses --gcc-install-dir (canonical, precise)
|
||||
CMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS --gcc-install-dir=/usr/lib/gcc/x86_64-linux-gnu/${{ inputs.clang_gcc_toolchain }}"
|
||||
else
|
||||
# Clang 14-15 uses --gcc-toolchain (deprecated but necessary)
|
||||
# Note: This still uses discovery, so we hide newer GCC versions in the workflow
|
||||
CMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS --gcc-toolchain=/usr"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Run CMake configure
|
||||
# Note: conanfile.py hardcodes 'build/generators' as the output path.
|
||||
# If we're in a 'build' folder, Conan detects this and uses just 'generators/'
|
||||
# If we're in '.build' (non-standard), Conan adds the full 'build/generators/'
|
||||
# So we get: .build/build/generators/ with our non-standard folder name
|
||||
cmake .. \
|
||||
-G "${{ inputs.generator }}" \
|
||||
$CCACHE_ARGS \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
|
||||
-Dtests=TRUE \
|
||||
-Dxrpld=TRUE
|
||||
|
||||
- name: Show ccache config before build
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "ccache configuration before build"
|
||||
echo "=========================================="
|
||||
ccache -p
|
||||
echo ""
|
||||
|
||||
- name: Build project
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${{ inputs.build_dir }}
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc)
|
||||
|
||||
# Check for verbose build flag in commit message
|
||||
VERBOSE_FLAG=""
|
||||
if echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-cmake-verbose\]'; then
|
||||
echo "🔊 [ci-ga-cmake-verbose] detected - enabling verbose output"
|
||||
VERBOSE_FLAG="-- -v"
|
||||
fi
|
||||
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG}
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
|
||||
146
.github/actions/xahau-ga-cache-restore/action.yml
vendored
Normal file
146
.github/actions/xahau-ga-cache-restore/action.yml
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
name: 'Cache Restore'
|
||||
description: 'Restores cache with optional clearing based on commit message tags'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key'
|
||||
required: false
|
||||
default: ''
|
||||
cache-type:
|
||||
description: 'Type of cache (for logging purposes, e.g., "ccache-main", "Conan")'
|
||||
required: false
|
||||
default: 'cache'
|
||||
fail-on-cache-miss:
|
||||
description: 'Fail the workflow if cache entry is not found'
|
||||
required: false
|
||||
default: 'false'
|
||||
lookup-only:
|
||||
description: 'Check if a cache entry exists for the given input(s) without downloading it'
|
||||
required: false
|
||||
default: 'false'
|
||||
additional-clear-keys:
|
||||
description: 'Additional cache keys to clear (newline separated)'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
value: ${{ steps.restore-cache.outputs.cache-hit }}
|
||||
cache-primary-key:
|
||||
description: 'The key that was used to restore the cache'
|
||||
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
cache-matched-key:
|
||||
description: 'The key that was used to restore the cache (exact or prefix match)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-matched-key }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Clear cache if requested via commit message
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "${{ inputs.cache-type }} cache clear tag detection"
|
||||
echo "=========================================="
|
||||
echo "Searching for: [ci-ga-clear-cache] or [ci-ga-clear-cache:*]"
|
||||
echo ""
|
||||
|
||||
CACHE_KEY="${{ inputs.key }}"
|
||||
|
||||
# Extract search terms if present (e.g., "ccache" from "[ci-ga-clear-cache:ccache]")
|
||||
SEARCH_TERMS=$(echo "${XAHAU_GA_COMMIT_MSG}" | grep -o '\[ci-ga-clear-cache:[^]]*\]' | sed 's/\[ci-ga-clear-cache://;s/\]//' || echo "")
|
||||
|
||||
SHOULD_CLEAR=false
|
||||
|
||||
if [ -n "${SEARCH_TERMS}" ]; then
|
||||
# Search terms provided - check if THIS cache key matches ALL terms (AND logic)
|
||||
echo "🔍 [ci-ga-clear-cache:${SEARCH_TERMS}] detected"
|
||||
echo "Checking if cache key matches search terms..."
|
||||
echo " Cache key: ${CACHE_KEY}"
|
||||
echo " Search terms: ${SEARCH_TERMS}"
|
||||
echo ""
|
||||
|
||||
MATCHES=true
|
||||
for term in ${SEARCH_TERMS}; do
|
||||
if ! echo "${CACHE_KEY}" | grep -q "${term}"; then
|
||||
MATCHES=false
|
||||
echo " ✗ Key does not contain '${term}'"
|
||||
break
|
||||
else
|
||||
echo " ✓ Key contains '${term}'"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${MATCHES}" = "true" ]; then
|
||||
echo ""
|
||||
echo "✅ Cache key matches all search terms - will clear cache"
|
||||
SHOULD_CLEAR=true
|
||||
else
|
||||
echo ""
|
||||
echo "⏭️ Cache key doesn't match search terms - skipping cache clear"
|
||||
fi
|
||||
elif echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-clear-cache\]'; then
|
||||
# No search terms - always clear this job's cache
|
||||
echo "🗑️ [ci-ga-clear-cache] detected in commit message"
|
||||
echo "Clearing ${{ inputs.cache-type }} cache for key: ${CACHE_KEY}"
|
||||
SHOULD_CLEAR=true
|
||||
fi
|
||||
|
||||
if [ "${SHOULD_CLEAR}" = "true" ]; then
|
||||
echo ""
|
||||
echo "Deleting ${{ inputs.cache-type }} caches via GitHub API..."
|
||||
|
||||
# Delete primary cache key
|
||||
echo "Checking for cache: ${CACHE_KEY}"
|
||||
if gh cache list --key "${CACHE_KEY}" --json key --jq '.[].key' | grep -q "${CACHE_KEY}"; then
|
||||
echo " Deleting: ${CACHE_KEY}"
|
||||
gh cache delete "${CACHE_KEY}" || true
|
||||
echo " ✓ Deleted"
|
||||
else
|
||||
echo " ℹ️ Not found"
|
||||
fi
|
||||
|
||||
# Delete additional keys if provided
|
||||
if [ -n "${{ inputs.additional-clear-keys }}" ]; then
|
||||
echo ""
|
||||
echo "Checking additional keys..."
|
||||
while IFS= read -r key; do
|
||||
[ -z "${key}" ] && continue
|
||||
echo "Checking for cache: ${key}"
|
||||
if gh cache list --key "${key}" --json key --jq '.[].key' | grep -q "${key}"; then
|
||||
echo " Deleting: ${key}"
|
||||
gh cache delete "${key}" || true
|
||||
echo " ✓ Deleted"
|
||||
else
|
||||
echo " ℹ️ Not found"
|
||||
fi
|
||||
done <<< "${{ inputs.additional-clear-keys }}"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ ${{ inputs.cache-type }} cache cleared successfully"
|
||||
echo "Build will proceed from scratch"
|
||||
else
|
||||
echo ""
|
||||
echo "ℹ️ No ${{ inputs.cache-type }} cache clear requested"
|
||||
fi
|
||||
echo "=========================================="
|
||||
|
||||
- name: Restore cache
|
||||
id: restore-cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ inputs.path }}
|
||||
key: ${{ inputs.key }}
|
||||
restore-keys: ${{ inputs.restore-keys }}
|
||||
fail-on-cache-miss: ${{ inputs.fail-on-cache-miss }}
|
||||
lookup-only: ${{ inputs.lookup-only }}
|
||||
146
.github/actions/xahau-ga-dependencies/action.yml
vendored
146
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -10,21 +10,46 @@ inputs:
|
||||
required: false
|
||||
default: '.build'
|
||||
compiler-id:
|
||||
description: 'Unique identifier for compiler/version combination used for cache keys'
|
||||
description: 'Unique identifier: compiler-version-stdlib[-gccversion] (e.g. clang-14-libstdcxx-gcc11, gcc-13-libstdcxx)'
|
||||
required: false
|
||||
default: ''
|
||||
cache_version:
|
||||
description: 'Cache version for invalidation'
|
||||
required: false
|
||||
default: '1'
|
||||
cache_enabled:
|
||||
description: 'Whether to use caching'
|
||||
required: false
|
||||
default: 'true'
|
||||
main_branch:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
os:
|
||||
description: 'Operating system (Linux, Macos)'
|
||||
required: false
|
||||
default: 'Linux'
|
||||
arch:
|
||||
description: 'Architecture (x86_64, armv8)'
|
||||
required: false
|
||||
default: 'x86_64'
|
||||
compiler:
|
||||
description: 'Compiler type (gcc, clang, apple-clang)'
|
||||
required: true
|
||||
compiler_version:
|
||||
description: 'Compiler version (11, 13, 14, etc.)'
|
||||
required: true
|
||||
cc:
|
||||
description: 'C compiler executable (gcc-13, clang-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
cxx:
|
||||
description: 'C++ compiler executable (g++-14, clang++-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
stdlib:
|
||||
description: 'C++ standard library for Conan configuration (note: also in compiler-id)'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- libstdcxx
|
||||
- libcxx
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
@@ -34,36 +59,96 @@ outputs:
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: safe-branch
|
||||
- name: Configure Conan cache paths
|
||||
if: inputs.os == 'Linux'
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
|
||||
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
|
||||
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
|
||||
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
|
||||
|
||||
- name: Restore Conan cache
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-
|
||||
- name: Configure Conan cache paths
|
||||
if: inputs.gha_cache_enabled == 'false'
|
||||
shell: bash
|
||||
# For self-hosted runners, register cache paths to be used as volumes
|
||||
# This allows the cache to be shared between containers
|
||||
run: |
|
||||
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
|
||||
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
|
||||
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
|
||||
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
|
||||
|
||||
- name: Configure Conan
|
||||
shell: bash
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ inputs.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
# This overwrites any cached profile to ensure fresh configuration
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=${{ inputs.arch }}
|
||||
build_type=${{ inputs.configuration }}
|
||||
compiler=${{ inputs.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ inputs.compiler_version }}
|
||||
os=${{ inputs.os }}
|
||||
EOF
|
||||
|
||||
# Add buildenv and conf sections for Linux (not needed for macOS)
|
||||
if [ "${{ inputs.os }}" = "Linux" ] && [ -n "${{ inputs.cc }}" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ inputs.cc }}
|
||||
CXX=/usr/bin/${{ inputs.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ inputs.cc }}", "cpp": "/usr/bin/${{ inputs.cxx }}"}
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Add macOS-specific conf if needed
|
||||
if [ "${{ inputs.os }}" = "Macos" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Export custom recipes
|
||||
shell: bash
|
||||
run: |
|
||||
conan export external/snappy snappy/1.1.10@
|
||||
conan export external/soci soci/4.0.3@
|
||||
|
||||
# Export snappy if not already exported
|
||||
conan list snappy/1.1.10@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
|
||||
conan export external/snappy --version 1.1.10 --user xahaud --channel stable
|
||||
|
||||
# Export soci if not already exported
|
||||
conan list soci/4.0.3@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
|
||||
conan export external/soci --version 4.0.3 --user xahaud --channel stable
|
||||
|
||||
# Export wasmedge if not already exported
|
||||
conan list wasmedge/0.11.2@xahaud/stable 2>/dev/null | (grep -q "not found" && exit 1 || exit 0) || \
|
||||
conan export external/wasmedge --version 0.11.2 --user xahaud --channel stable
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REQUEST_TIMEOUT: 180 # Increase timeout to 3 minutes for slow mirrors
|
||||
run: |
|
||||
# Create build directory
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
@@ -75,12 +160,3 @@ runs:
|
||||
--build missing \
|
||||
--settings build_type=${{ inputs.configuration }} \
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
|
||||
74
.github/actions/xahau-ga-get-commit-message/action.yml
vendored
Normal file
74
.github/actions/xahau-ga-get-commit-message/action.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
name: 'Get Commit Message'
|
||||
description: 'Gets commit message for both push and pull_request events and sets XAHAU_GA_COMMIT_MSG env var'
|
||||
|
||||
inputs:
|
||||
event-name:
|
||||
description: 'The event name (push or pull_request)'
|
||||
required: true
|
||||
head-commit-message:
|
||||
description: 'The head commit message (for push events)'
|
||||
required: false
|
||||
default: ''
|
||||
pr-head-sha:
|
||||
description: 'The PR head SHA (for pull_request events)'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Get commit message and set environment variable
|
||||
shell: python
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
import json
|
||||
import os
|
||||
import secrets
|
||||
import urllib.request
|
||||
|
||||
event_name = "${{ inputs.event-name }}"
|
||||
pr_head_sha = "${{ inputs.pr-head-sha }}"
|
||||
repository = "${{ github.repository }}"
|
||||
|
||||
print("==========================================")
|
||||
print("Setting XAHAU_GA_COMMIT_MSG environment variable")
|
||||
print("==========================================")
|
||||
print(f"Event: {event_name}")
|
||||
|
||||
if event_name == 'push':
|
||||
# For push events, use the input directly
|
||||
message = """${{ inputs.head-commit-message }}"""
|
||||
print("Source: workflow input (github.event.head_commit.message)")
|
||||
elif event_name == 'pull_request' and pr_head_sha:
|
||||
# For PR events, fetch via GitHub API
|
||||
print(f"Source: GitHub API (fetching commit {pr_head_sha})")
|
||||
try:
|
||||
url = f"https://api.github.com/repos/{repository}/commits/{pr_head_sha}"
|
||||
req = urllib.request.Request(url, headers={
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
"Authorization": f"Bearer {os.environ.get('GH_TOKEN', '')}"
|
||||
})
|
||||
with urllib.request.urlopen(req) as response:
|
||||
data = json.load(response)
|
||||
message = data["commit"]["message"]
|
||||
except Exception as e:
|
||||
print(f"Failed to fetch commit message: {e}")
|
||||
message = ""
|
||||
else:
|
||||
message = ""
|
||||
print(f"Warning: Unknown event type: {event_name}")
|
||||
|
||||
print(f"Commit message (first 100 chars): {message[:100]}")
|
||||
|
||||
# Write to GITHUB_ENV using heredoc with random delimiter (prevents injection attacks)
|
||||
# See: https://securitylab.github.com/resources/github-actions-untrusted-input/
|
||||
delimiter = f"EOF_{secrets.token_hex(16)}"
|
||||
|
||||
with open(os.environ['GITHUB_ENV'], 'a') as f:
|
||||
f.write(f'XAHAU_GA_COMMIT_MSG<<{delimiter}\n')
|
||||
f.write(message)
|
||||
f.write(f'\n{delimiter}\n')
|
||||
|
||||
print(f"✓ XAHAU_GA_COMMIT_MSG set (available to all subsequent steps)")
|
||||
print("==========================================")
|
||||
6
.github/workflows/build-in-docker.yml
vendored
6
.github/workflows/build-in-docker.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
|
||||
|
||||
build:
|
||||
runs-on: [self-hosted, vanity]
|
||||
runs-on: [self-hosted, xahaud-build]
|
||||
needs: [checkout]
|
||||
defaults:
|
||||
run:
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
fi
|
||||
|
||||
tests:
|
||||
runs-on: [self-hosted, vanity]
|
||||
runs-on: [self-hosted, xahaud-build]
|
||||
needs: [build, checkout]
|
||||
defaults:
|
||||
run:
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
run: /bin/bash docker-unit-tests.sh
|
||||
|
||||
cleanup:
|
||||
runs-on: [self-hosted, vanity]
|
||||
runs-on: [self-hosted, xahaud-build]
|
||||
needs: [tests, checkout]
|
||||
if: always()
|
||||
steps:
|
||||
|
||||
24
.github/workflows/guard-checker-build.yml
vendored
Normal file
24
.github/workflows/guard-checker-build.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Guard Checker Build
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
guard-checker-build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- run-on: ubuntu-latest
|
||||
- run-on: macos-latest
|
||||
runs-on: ${{ matrix.run-on }}
|
||||
name: Guard Checker Build - ${{ matrix.run-on }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build Guard Checker
|
||||
run: |
|
||||
cd include/xrpl/hook
|
||||
make guard_checker
|
||||
50
.github/workflows/verify-generated-headers.yml
vendored
Normal file
50
.github/workflows/verify-generated-headers.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Verify Generated Hook Headers
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
verify-generated-headers:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- target: hook/error.h
|
||||
generator: ./hook/generate_error.sh
|
||||
- target: hook/extern.h
|
||||
generator: ./hook/generate_extern.sh
|
||||
- target: hook/sfcodes.h
|
||||
generator: bash ./hook/generate_sfcodes.sh
|
||||
- target: hook/tts.h
|
||||
generator: ./hook/generate_tts.sh
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
CLANG_VERSION: 18
|
||||
name: ${{ matrix.target }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install clang-format
|
||||
run: |
|
||||
codename=$( lsb_release --codename --short )
|
||||
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
||||
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
EOF
|
||||
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
||||
sudo apt-get update
|
||||
sudo apt-get install clang-format-${CLANG_VERSION}
|
||||
sudo ln -sf /usr/bin/clang-format-${CLANG_VERSION} /usr/local/bin/clang-format
|
||||
|
||||
- name: Verify ${{ matrix.target }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
chmod +x hook/generate_*.sh || true
|
||||
|
||||
tmp=$(mktemp)
|
||||
trap 'rm -f "$tmp"' EXIT
|
||||
|
||||
${{ matrix.generator }} > "$tmp"
|
||||
diff -u ${{ matrix.target }} "$tmp"
|
||||
97
.github/workflows/xahau-ga-macos.yml
vendored
97
.github/workflows/xahau-ga-macos.yml
vendored
@@ -4,7 +4,10 @@ on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
branches: ["**"]
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -12,68 +15,57 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
if: >
|
||||
github.event_name != 'pull_request' ||
|
||||
contains(fromJson('["dev","candidate","release"]'), github.base_ref) ||
|
||||
contains(join(github.event.pull_request.labels.*.name, ','), 'ci-full-build')
|
||||
strategy:
|
||||
matrix:
|
||||
generator:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Debug
|
||||
runs-on: macos-15
|
||||
runs-on: [self-hosted, macOS]
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 1
|
||||
CACHE_VERSION: 3
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Conan
|
||||
- name: Add Homebrew to PATH
|
||||
run: |
|
||||
brew install conan@1
|
||||
# Add Conan 1 to the PATH for this job
|
||||
echo "$(brew --prefix conan@1)/bin" >> $GITHUB_PATH
|
||||
echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
|
||||
echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Install Coreutils
|
||||
run: |
|
||||
brew install coreutils
|
||||
echo "Num proc: $(nproc)"
|
||||
|
||||
- name: Install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
|
||||
- name: Install Python
|
||||
run: |
|
||||
if which python3 > /dev/null 2>&1; then
|
||||
echo "Python 3 executable exists"
|
||||
python3 --version
|
||||
else
|
||||
brew install python@3.12
|
||||
fi
|
||||
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
|
||||
if ! which python > /dev/null 2>&1; then
|
||||
sudo ln -sf $(which python3) /usr/local/bin/python
|
||||
fi
|
||||
|
||||
- name: Install CMake
|
||||
run: |
|
||||
if which cmake > /dev/null 2>&1; then
|
||||
echo "cmake executable exists"
|
||||
cmake --version
|
||||
else
|
||||
brew install cmake
|
||||
fi
|
||||
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
# To isolate environments for each Runner, instead of installing globally with brew,
|
||||
# use mise to isolate environments for each Runner directory.
|
||||
- name: Setup toolchain (mise)
|
||||
uses: jdx/mise-action@v3.6.1
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
cache: false
|
||||
install: true
|
||||
mise_toml: |
|
||||
[tools]
|
||||
cmake = "3.25.3"
|
||||
python = "3.12"
|
||||
pipx = "latest"
|
||||
conan = "2"
|
||||
ninja = "latest"
|
||||
ccache = "latest"
|
||||
|
||||
- name: Install tools via mise
|
||||
run: |
|
||||
mise install
|
||||
mise reshim
|
||||
echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
@@ -87,10 +79,20 @@ jobs:
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Configure Conan
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Detect compiler version
|
||||
id: detect-compiler
|
||||
run: |
|
||||
conan profile new default --detect || true # Ignore error if profile exists
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
||||
echo "compiler_version=${COMPILER_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Detected Apple Clang version: ${COMPILER_VERSION}"
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
@@ -100,6 +102,11 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
os: Macos
|
||||
arch: armv8
|
||||
compiler: apple-clang
|
||||
compiler_version: ${{ steps.detect-compiler.outputs.compiler_version }}
|
||||
stdlib: libcxx
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -110,6 +117,8 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: libcxx
|
||||
ccache_max_size: '100G'
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
|
||||
334
.github/workflows/xahau-ga-nix.yml
vendored
334
.github/workflows/xahau-ga-nix.yml
vendored
@@ -4,31 +4,224 @@ on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
branches: ["**"]
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full_matrix:
|
||||
description: "Force full matrix (6 configs)"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-job:
|
||||
runs-on: ubuntu-latest
|
||||
matrix-setup:
|
||||
if: >
|
||||
github.event_name != 'pull_request' ||
|
||||
contains(fromJson('["dev","candidate","release"]'), github.base_ref) ||
|
||||
contains(join(github.event.pull_request.labels.*.name, ','), 'ci-full-build')
|
||||
runs-on: [self-hosted, generic, 20.04]
|
||||
container: python:3-slim
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: escape double quotes
|
||||
id: escape
|
||||
shell: bash
|
||||
env:
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
run: |
|
||||
ESCAPED_PR_TITLE="${PR_TITLE//\"/\\\"}"
|
||||
echo "title=${ESCAPED_PR_TITLE}" >> "$GITHUB_OUTPUT"
|
||||
- name: Generate build matrix
|
||||
id: set-matrix
|
||||
shell: python
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
import json
|
||||
import os
|
||||
import urllib.request
|
||||
|
||||
# Full matrix with all 6 compiler configurations
|
||||
# Each configuration includes all parameters needed by the build job
|
||||
full_matrix = [
|
||||
{
|
||||
"compiler_id": "gcc-11-libstdcxx",
|
||||
"compiler": "gcc",
|
||||
"cc": "gcc-11",
|
||||
"cxx": "g++-11",
|
||||
"compiler_version": 11,
|
||||
"stdlib": "libstdcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "gcc-13-libstdcxx",
|
||||
"compiler": "gcc",
|
||||
"cc": "gcc-13",
|
||||
"cxx": "g++-13",
|
||||
"compiler_version": 13,
|
||||
"stdlib": "libstdcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-14-libstdcxx-gcc11",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-14",
|
||||
"cxx": "clang++-14",
|
||||
"compiler_version": 14,
|
||||
"stdlib": "libstdcxx",
|
||||
"clang_gcc_toolchain": 11,
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-16-libstdcxx-gcc13",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-16",
|
||||
"cxx": "clang++-16",
|
||||
"compiler_version": 16,
|
||||
"stdlib": "libstdcxx",
|
||||
"clang_gcc_toolchain": 13,
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-17-libcxx",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-17",
|
||||
"cxx": "clang++-17",
|
||||
"compiler_version": 17,
|
||||
"stdlib": "libcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
# Clang 18 - testing if it's faster than Clang 17 with libc++
|
||||
# Requires patching Conan v1 settings.yml to add version 18
|
||||
"compiler_id": "clang-18-libcxx",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-18",
|
||||
"cxx": "clang++-18",
|
||||
"compiler_version": 18,
|
||||
"stdlib": "libcxx",
|
||||
"configuration": "Debug"
|
||||
}
|
||||
]
|
||||
|
||||
# Minimal matrix for PRs and feature branches
|
||||
minimal_matrix = [
|
||||
full_matrix[1], # gcc-13 (middle-ground gcc)
|
||||
full_matrix[2] # clang-14 (mature, stable clang)
|
||||
]
|
||||
|
||||
# Determine which matrix to use based on the target branch
|
||||
ref = "${{ github.ref }}"
|
||||
base_ref = "${{ github.base_ref }}" # For PRs, this is the target branch
|
||||
event_name = "${{ github.event_name }}"
|
||||
pr_title = """${{ steps.escape.outputs.title }}"""
|
||||
pr_labels = """${{ join(github.event.pull_request.labels.*.name, ',') }}"""
|
||||
pr_head_sha = "${{ github.event.pull_request.head.sha }}"
|
||||
|
||||
# Get commit message - for PRs, fetch via API since head_commit.message is empty
|
||||
if event_name == "pull_request" and pr_head_sha:
|
||||
try:
|
||||
url = f"https://api.github.com/repos/${{ github.repository }}/commits/{pr_head_sha}"
|
||||
req = urllib.request.Request(url, headers={
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
"Authorization": f"Bearer {os.environ.get('GH_TOKEN', '')}"
|
||||
})
|
||||
with urllib.request.urlopen(req) as response:
|
||||
data = json.load(response)
|
||||
commit_message = data["commit"]["message"]
|
||||
except Exception as e:
|
||||
print(f"Failed to fetch commit message: {e}")
|
||||
commit_message = ""
|
||||
else:
|
||||
commit_message = """${{ github.event.head_commit.message }}"""
|
||||
|
||||
# Debug logging
|
||||
print(f"Event: {event_name}")
|
||||
print(f"Ref: {ref}")
|
||||
print(f"Base ref: {base_ref}")
|
||||
print(f"PR head SHA: {pr_head_sha}")
|
||||
print(f"PR title: {pr_title}")
|
||||
print(f"PR labels: {pr_labels}")
|
||||
print(f"Commit message: {commit_message}")
|
||||
|
||||
# Manual trigger input to force full matrix.
|
||||
manual_full = "${{ github.event.inputs.full_matrix || 'false' }}" == "true"
|
||||
|
||||
# Label/manual overrides, while preserving existing title/commit behavior.
|
||||
force_full = (
|
||||
manual_full
|
||||
or "[ci-nix-full-matrix]" in commit_message
|
||||
or "[ci-nix-full-matrix]" in pr_title
|
||||
or ("ci-full-build" in pr_labels and "ci-nix-full-matrix" in pr_labels)
|
||||
)
|
||||
force_min = (
|
||||
"ci-full-build" in pr_labels
|
||||
)
|
||||
print(f"Force full matrix: {force_full}")
|
||||
print(f"Force min matrix: {force_min}")
|
||||
|
||||
# Check if this is targeting a main branch
|
||||
# For PRs: check base_ref (target branch)
|
||||
# For pushes: check ref (current branch)
|
||||
main_branches = ["refs/heads/dev", "refs/heads/release", "refs/heads/candidate"]
|
||||
|
||||
if force_full:
|
||||
# Override: always use full matrix if forced by manual input or label.
|
||||
use_full = True
|
||||
elif force_min:
|
||||
# Override: always use minimal matrix if ci-full-build label is present.
|
||||
use_full = False
|
||||
elif event_name == "pull_request":
|
||||
# For PRs, base_ref is just the branch name (e.g., "dev", not "refs/heads/dev")
|
||||
# Check if the PR targets release or candidate (more critical branches)
|
||||
use_full = base_ref in ["release", "candidate"]
|
||||
else:
|
||||
# For pushes, ref is the full reference (e.g., "refs/heads/dev")
|
||||
use_full = ref in main_branches
|
||||
|
||||
# Select the appropriate matrix
|
||||
if use_full:
|
||||
if force_full:
|
||||
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag")
|
||||
else:
|
||||
print(f"Using FULL matrix (6 configs) - targeting main branch")
|
||||
matrix = full_matrix
|
||||
else:
|
||||
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR")
|
||||
matrix = minimal_matrix
|
||||
|
||||
# Output the matrix as JSON
|
||||
output = json.dumps({"include": matrix})
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write(f"matrix={output}\n")
|
||||
|
||||
build:
|
||||
needs: matrix-setup
|
||||
runs-on: [self-hosted, generic, 20.04]
|
||||
container:
|
||||
image: ubuntu:24.04
|
||||
volumes:
|
||||
- /home/runner/.conan-cache:/.conan-cache
|
||||
- /home/runner/.ccache-cache:/github/home/.ccache-cache
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
outputs:
|
||||
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
compiler: [gcc]
|
||||
configuration: [Debug]
|
||||
include:
|
||||
- compiler: gcc
|
||||
cc: gcc-11
|
||||
cxx: g++-11
|
||||
compiler_id: gcc-11
|
||||
matrix: ${{ fromJSON(needs.matrix-setup.outputs.matrix) }}
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 1
|
||||
CACHE_VERSION: 3
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -36,36 +229,80 @@ jobs:
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
|
||||
# Install specific Conan version needed
|
||||
pip install --upgrade "conan<2"
|
||||
apt-get update
|
||||
apt-get install -y software-properties-common
|
||||
add-apt-repository ppa:ubuntu-toolchain-r/test -y
|
||||
apt-get update
|
||||
apt-get install -y python3 python-is-python3 pipx
|
||||
pipx ensurepath
|
||||
apt-get install -y cmake ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
|
||||
apt-get install -y perl # for openssl build
|
||||
apt-get install -y libsqlite3-dev # for xahaud build
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
# Install the specific GCC version needed for Clang
|
||||
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
|
||||
echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ==="
|
||||
apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
conan profile new default --detect || true # Ignore error if profile exists
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update settings.compiler=${{ matrix.compiler }} default
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
conan profile update env.CC=/usr/bin/${{ matrix.cc }} default
|
||||
conan profile update env.CXX=/usr/bin/${{ matrix.cxx }} default
|
||||
conan profile update conf.tools.build:compiler_executables='{"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}' default
|
||||
|
||||
# Set correct compiler version based on matrix.compiler
|
||||
if [ "${{ matrix.compiler }}" = "gcc" ]; then
|
||||
conan profile update settings.compiler.version=11 default
|
||||
elif [ "${{ matrix.compiler }}" = "clang" ]; then
|
||||
conan profile update settings.compiler.version=14 default
|
||||
echo "=== GCC versions available after installation ==="
|
||||
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d"
|
||||
fi
|
||||
# Display profile for verification
|
||||
conan profile show default
|
||||
|
||||
# For Clang < 16 with --gcc-toolchain, hide newer GCC versions
|
||||
# This is needed because --gcc-toolchain still picks the highest version
|
||||
#
|
||||
# THE GREAT GCC HIDING TRICK (for Clang < 16):
|
||||
# Clang versions before 16 don't have --gcc-install-dir, only --gcc-toolchain
|
||||
# which is deprecated and still uses discovery heuristics that ALWAYS pick
|
||||
# the highest version number. So we play a sneaky game...
|
||||
#
|
||||
# We rename newer GCC versions to very low integers (1, 2, 3...) which makes
|
||||
# Clang think they're ancient GCC versions. Since 11 > 3 > 2 > 1, Clang will
|
||||
# pick GCC 11 over our renamed versions. It's dumb but it works!
|
||||
#
|
||||
# Example: GCC 12→1, GCC 13→2, GCC 14→3, so Clang picks 11 (highest number)
|
||||
if [ -n "${{ matrix.clang_gcc_toolchain }}" ] && [ "${{ matrix.compiler_version }}" -lt "16" ]; then
|
||||
echo "=== Hiding GCC versions newer than ${{ matrix.clang_gcc_toolchain }} for Clang < 16 ==="
|
||||
target_version=${{ matrix.clang_gcc_toolchain }}
|
||||
counter=1 # Start with 1 - these will be seen as "GCC version 1, 2, 3" etc
|
||||
for dir in /usr/lib/gcc/x86_64-linux-gnu/*/; do
|
||||
if [ -d "$dir" ]; then
|
||||
version=$(basename "$dir")
|
||||
# Check if version is numeric and greater than target
|
||||
if [[ "$version" =~ ^[0-9]+$ ]] && [ "$version" -gt "$target_version" ]; then
|
||||
echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)"
|
||||
# Safety check: ensure target doesn't already exist
|
||||
if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then
|
||||
mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
|
||||
else
|
||||
echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists"
|
||||
exit 1
|
||||
fi
|
||||
counter=$((counter + 1))
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Verify what Clang will use
|
||||
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
|
||||
echo "=== Verifying GCC toolchain selection ==="
|
||||
echo "Available GCC versions:"
|
||||
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d.*[0-9]+$" || true
|
||||
|
||||
echo ""
|
||||
echo "Clang's detected GCC installation:"
|
||||
${{ matrix.cxx }} -v -E -x c++ /dev/null -o /dev/null 2>&1 | grep "Found candidate GCC installation" || true
|
||||
fi
|
||||
|
||||
# Install libc++ dev packages if using libc++ (not needed for libstdc++)
|
||||
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||
apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
|
||||
fi
|
||||
|
||||
# Install Conan 2
|
||||
pipx install "conan>=2.0,<3"
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
@@ -79,6 +316,14 @@ jobs:
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
with:
|
||||
@@ -87,6 +332,12 @@ jobs:
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
compiler: ${{ matrix.compiler }}
|
||||
compiler_version: ${{ matrix.compiler_version }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
gha_cache_enabled: 'false' # Disable caching for self hosted runner
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -99,6 +350,9 @@ jobs:
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
ccache_max_size: '100G'
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
@@ -120,4 +374,4 @@ jobs:
|
||||
else
|
||||
echo "Error: rippled executable not found in ${{ env.build_dir }}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -24,6 +24,11 @@ bin/project-cache.jam
|
||||
|
||||
build/docker
|
||||
|
||||
# Ignore release builder files
|
||||
.env
|
||||
release-build
|
||||
cmake-*.tar.gz
|
||||
|
||||
# Ignore object files.
|
||||
*.o
|
||||
build
|
||||
@@ -71,6 +76,7 @@ docs/html_doc
|
||||
# Xcode
|
||||
.DS_Store
|
||||
*/build/*
|
||||
!/docs/build/
|
||||
*.pbxuser
|
||||
!default.pbxuser
|
||||
*.mode1v3
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -8,6 +8,6 @@
|
||||
"editor.semanticHighlighting.enabled": true,
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
"editor.formatOnSave": false
|
||||
"editor.formatOnSave": true
|
||||
}
|
||||
}
|
||||
|
||||
352
BUILD.md
352
BUILD.md
@@ -1,7 +1,7 @@
|
||||
| :warning: **WARNING** :warning:
|
||||
|---|
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions assume you have a C++ development environment ready
|
||||
> with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up
|
||||
> on Linux, macOS, or Windows, see [our guide](./docs/build/environment.md).
|
||||
>
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
> If you are unfamiliar with Conan,
|
||||
> you can read our [crash course](./docs/build/conan.md)
|
||||
@@ -10,7 +10,7 @@
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
releases](https://github.com/Xahau/xahaud/releases).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
@@ -29,179 +29,125 @@ branch.
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
|
||||
## Minimum Requirements
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.7](https://www.python.org/downloads/)
|
||||
- [Conan 1.60](https://conan.io/downloads.html)[^1]
|
||||
- [Conan 2.x](https://conan.io/downloads)
|
||||
- [CMake 3.16](https://cmake.org/download/)
|
||||
|
||||
[^1]: It is possible to build with Conan 2.x,
|
||||
but the instructions are significantly different,
|
||||
which is why we are not recommending it yet.
|
||||
Notably, the `conan profile update` command is removed in 2.x.
|
||||
Profiles must be edited by hand.
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
`xahaud` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| GCC | 11 |
|
||||
| GCC | 10 |
|
||||
| Clang | 13 |
|
||||
| Apple Clang | 13.1.6 |
|
||||
| MSVC | 19.23 |
|
||||
|
||||
### Linux
|
||||
We don't recommend Windows for `xahaud` production at this time. As of
|
||||
November 2025, Ubuntu has the highest level of quality assurance, testing,
|
||||
and support.
|
||||
|
||||
The Ubuntu operating system has received the highest level of
|
||||
quality assurance, testing, and support.
|
||||
Windows developers should use Visual Studio 2019. `xahaud` isn't
|
||||
compatible with [Boost](https://www.boost.org/) 1.78 or 1.79, and Conan
|
||||
can't build earlier Boost versions.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
||||
**Note:** 32-bit Windows development isn't supported.
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
||||
|
||||
### Windows
|
||||
|
||||
Windows is not recommended for production use at this time.
|
||||
|
||||
- Additionally, 32-bit Windows development is not supported.
|
||||
|
||||
[Boost]: https://www.boost.org/
|
||||
|
||||
## Steps
|
||||
|
||||
|
||||
### Set Up Conan
|
||||
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
|
||||
These instructions assume a basic familiarity with Conan and CMake.
|
||||
|
||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
||||
|
||||
You'll need at least one Conan profile:
|
||||
1. (Optional) If you've never used Conan, use autodetect to set up a default profile.
|
||||
|
||||
```
|
||||
conan profile new default --detect
|
||||
conan profile detect --force
|
||||
```
|
||||
|
||||
Update the compiler settings:
|
||||
2. Update the compiler settings.
|
||||
|
||||
For Conan 2, you can edit the profile directly at `~/.conan2/profiles/default`,
|
||||
or use the Conan CLI. Ensure C++20 is set:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile show
|
||||
```
|
||||
|
||||
Configure Conan (1.x only) to use recipe revisions:
|
||||
Look for `compiler.cppstd=20` in the output. If it's not set, edit the profile:
|
||||
|
||||
```
|
||||
conan config set general.revisions_enabled=1
|
||||
# Edit ~/.conan2/profiles/default and ensure these settings exist:
|
||||
[settings]
|
||||
compiler.cppstd=20
|
||||
```
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
||||
with GCC and links with libstdc++.
|
||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
||||
then you will need to choose the `libstdc++11` ABI:
|
||||
Linux developers will commonly have a default Conan [profile][] that compiles
|
||||
with GCC and links with libstdc++.
|
||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
||||
then you will need to choose the `libstdc++11` ABI.
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
# In ~/.conan2/profiles/default, ensure:
|
||||
[settings]
|
||||
compiler.libcxx=libstdc++11
|
||||
```
|
||||
|
||||
On Windows, you should use the x64 native build tools.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
|
||||
Ensure inter-operability between `boost::string_view` and `std::string_view` types:
|
||||
|
||||
```
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_BEAST_USE_STD_STRING_VIEW"]' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_BEAST_USE_STD_STRING_VIEW"' default
|
||||
```
|
||||
|
||||
If you have other flags in the `conf.tools.build` or `env.CXXFLAGS` sections, make sure to retain the existing flags and append the new ones. You can check them with:
|
||||
```
|
||||
conan profile show default
|
||||
```
|
||||
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
Windows developers must also build `xahaud` and its dependencies for the x64
|
||||
architecture.
|
||||
|
||||
```
|
||||
conan profile update settings.arch=x86_64 default
|
||||
# In ~/.conan2/profiles/default, ensure:
|
||||
[settings]
|
||||
arch=x86_64
|
||||
```
|
||||
|
||||
### Multiple compilers
|
||||
|
||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
||||
default works for some users.
|
||||
|
||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
||||
install another compiler and set Conan and CMake to use it.
|
||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
||||
generated CMake toolchain file.
|
||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
||||
```
|
||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
||||
```
|
||||
|
||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
||||
|
||||
It should choose the compiler for dependencies as well,
|
||||
but not all of them have a Conan recipe that respects this setting (yet).
|
||||
For the rest, you can set these environment variables.
|
||||
Replace `<path>` with paths to the desired compilers:
|
||||
|
||||
- `conan profile update env.CC=<path> default`
|
||||
- `conan profile update env.CXX=<path> default`
|
||||
|
||||
Export our [Conan recipe for Snappy](./external/snappy).
|
||||
It does not explicitly link the C++ standard library,
|
||||
which allows you to statically link it with GCC, if you want.
|
||||
3. (Optional) If you have multiple compilers installed on your platform,
|
||||
make sure that Conan and CMake select the one you want to use.
|
||||
This setting will set the correct variables (`CMAKE_<LANG>_COMPILER`)
|
||||
in the generated CMake toolchain file.
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/snappy snappy/1.1.10@
|
||||
# Conan 2.x
|
||||
conan export --version 1.1.10 external/snappy
|
||||
# In ~/.conan2/profiles/default, add under [conf] section:
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "<path>", "cpp": "<path>"}
|
||||
```
|
||||
|
||||
Export our [Conan recipe for RocksDB](./external/rocksdb).
|
||||
It does not override paths to dependencies when building with Visual Studio.
|
||||
For setting environment variables for dependencies:
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/rocksdb rocksdb/6.29.5@
|
||||
# Conan 2.x
|
||||
conan export --version 6.29.5 external/rocksdb
|
||||
# In ~/.conan2/profiles/default, add under [buildenv] section:
|
||||
[buildenv]
|
||||
CC=<path>
|
||||
CXX=<path>
|
||||
```
|
||||
|
||||
Export our [Conan recipe for SOCI](./external/soci).
|
||||
It patches their CMake to correctly import its dependencies.
|
||||
4. Export our [Conan recipe for Snappy](./external/snappy).
|
||||
It doesn't explicitly link the C++ standard library,
|
||||
which allows you to statically link it with GCC, if you want.
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/soci soci/4.0.3@
|
||||
# Conan 2.x
|
||||
conan export --version 4.0.3 external/soci
|
||||
conan export external/snappy --version 1.1.10 --user xahaud --channel stable
|
||||
```
|
||||
|
||||
Export our [Conan recipe for NuDB](./external/nudb).
|
||||
It fixes some source files to add missing `#include`s.
|
||||
|
||||
5. Export our [Conan recipe for SOCI](./external/soci).
|
||||
It patches their CMake to correctly import its dependencies.
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/nudb nudb/2.0.8@
|
||||
# Conan 2.x
|
||||
conan export --version 2.0.8 external/nudb
|
||||
conan export external/soci --version 4.0.3 --user xahaud --channel stable
|
||||
```
|
||||
|
||||
6. Export our [Conan recipe for WasmEdge](./external/wasmedge).
|
||||
|
||||
```
|
||||
conan export external/wasmedge --version 0.11.2 --user xahaud --channel stable
|
||||
```
|
||||
|
||||
### Build and Test
|
||||
@@ -222,15 +168,13 @@ It fixes some source files to add missing `#include`s.
|
||||
the `install-folder` or `-if` option to every `conan install` command
|
||||
in the next step.
|
||||
|
||||
2. Use conan to generate CMake files for every configuration you want to build:
|
||||
2. Generate CMake files for every configuration you want to build.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
@@ -241,13 +185,13 @@ It fixes some source files to add missing `#include`s.
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
|
||||
```
|
||||
@@ -259,31 +203,29 @@ It fixes some source files to add missing `#include`s.
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||
```
|
||||
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the one of the `build_type` settings
|
||||
you chose in the previous step.
|
||||
and make sure it matches the `build_type` setting you chose in the previous
|
||||
step.
|
||||
|
||||
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
||||
Multi-config gnerators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `xahaud` in this step.
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
5. Build `rippled`.
|
||||
4. Build `xahaud`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
||||
you must pass the option `--config` to select the build configuration.
|
||||
The output file is currently named 'rippled'.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
@@ -292,13 +234,13 @@ It fixes some source files to add missing `#include`s.
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
|
||||
```
|
||||
cmake --build . --config Release
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
6. Test rippled.
|
||||
5. Test xahaud.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
@@ -313,79 +255,19 @@ It fixes some source files to add missing `#include`s.
|
||||
./Debug/rippled --unittest
|
||||
```
|
||||
|
||||
The location of `rippled` in your build directory depends on your CMake
|
||||
The location of `xahaud` in your build directory depends on your CMake
|
||||
generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
|
||||
## Coverage report
|
||||
|
||||
The coverage report is intended for developers using compilers GCC
|
||||
or Clang (including Apple Clang). It is generated by the build target `coverage`,
|
||||
which is only enabled when the `coverage` option is set, e.g. with
|
||||
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
|
||||
|
||||
Prerequisites for the coverage report:
|
||||
|
||||
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
|
||||
- `gcov` for GCC (installed with the compiler by default) or
|
||||
- `llvm-cov` for Clang (installed with the compiler by default)
|
||||
- `Debug` build type
|
||||
|
||||
A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `cmake/CodeCoverage.cmake`
|
||||
by setting the `coverage_format` variable in `cmake`. It is also possible
|
||||
to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
cd .build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dxrpld=ON -Dtests=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake --build . --target coverage
|
||||
```
|
||||
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| --- | ---| ---|
|
||||
| `assert` | OFF | Enable assertions.
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `reporting` | OFF | Build the reporting mode feature. |
|
||||
| `tests` | ON | Build tests. |
|
||||
| `unity` | ON | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
@@ -401,52 +283,26 @@ and can be helpful for detecting `#include` omissions.
|
||||
If you have trouble building dependencies after changing Conan settings,
|
||||
try removing the Conan cache.
|
||||
|
||||
For Conan 2:
|
||||
```
|
||||
rm -rf ~/.conan/data
|
||||
rm -rf ~/.conan2/p
|
||||
```
|
||||
|
||||
Or clear the entire Conan 2 cache:
|
||||
```
|
||||
conan cache clean "*"
|
||||
```
|
||||
|
||||
|
||||
### 'protobuf/port_def.inc' file not found
|
||||
### macOS compilation with Apple Clang 17+
|
||||
|
||||
If `cmake --build .` results in an error due to a missing a protobuf file, then you might have generated CMake files for a different `build_type` than the `CMAKE_BUILD_TYPE` you passed to conan.
|
||||
If you're on macOS with Apple Clang 17 or newer, you need to add a compiler flag to work around a compilation error in gRPC dependencies.
|
||||
|
||||
Edit `~/.conan2/profiles/default` and add under the `[conf]` section:
|
||||
|
||||
```
|
||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||
10 | #include <google/protobuf/port_def.inc>
|
||||
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
1 error generated.
|
||||
```
|
||||
|
||||
For example, if you want to build Debug:
|
||||
|
||||
1. For conan install, pass `--settings build_type=Debug`
|
||||
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
|
||||
### no std::result_of
|
||||
|
||||
If your compiler version is recent enough to have removed `std::result_of` as
|
||||
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
|
||||
definition to your build.
|
||||
|
||||
```
|
||||
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
```
|
||||
|
||||
|
||||
### call to 'async_teardown' is ambiguous
|
||||
|
||||
If you are compiling with an early version of Clang 16, then you might hit
|
||||
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
|
||||
header][7]. You can workaround it by adding this preprocessor definition:
|
||||
|
||||
```
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
|
||||
[conf]
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
```
|
||||
|
||||
|
||||
@@ -601,10 +457,6 @@ but it is more convenient to put them in a [profile][profile].
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[5]: https://en.wikipedia.org/wiki/Unity_build
|
||||
[6]: https://github.com/boostorg/beast/issues/2648
|
||||
[7]: https://github.com/boostorg/beast/issues/2661
|
||||
[gcovr]: https://gcovr.com/en/stable/getting-started.html
|
||||
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
|
||||
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
|
||||
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html
|
||||
|
||||
@@ -10,6 +10,9 @@ Loop: test.jtx test.toplevel
|
||||
Loop: test.jtx test.unit_test
|
||||
test.unit_test == test.jtx
|
||||
|
||||
Loop: xrpl.hook xrpld.app
|
||||
xrpld.app > xrpl.hook
|
||||
|
||||
Loop: xrpl.protocol xrpld.app
|
||||
xrpld.app > xrpl.protocol
|
||||
|
||||
|
||||
@@ -103,6 +103,10 @@ test.protocol > test.toplevel
|
||||
test.protocol > xrpl.basics
|
||||
test.protocol > xrpl.json
|
||||
test.protocol > xrpl.protocol
|
||||
test.rdb > test.jtx
|
||||
test.rdb > test.toplevel
|
||||
test.rdb > xrpld.app
|
||||
test.rdb > xrpld.core
|
||||
test.resource > test.unit_test
|
||||
test.resource > xrpl.basics
|
||||
test.resource > xrpl.resource
|
||||
@@ -136,6 +140,7 @@ test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
xrpl.hook > xrpl.basics
|
||||
xrpl.hook > xrpl.protocol
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
@@ -150,7 +155,6 @@ xrpld.app > xrpl.basics
|
||||
xrpld.app > xrpld.conditions
|
||||
xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.hook
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.resource
|
||||
xrpld.conditions > xrpl.basics
|
||||
|
||||
144
CMakeLists.txt
144
CMakeLists.txt
@@ -41,6 +41,16 @@ if(Git_FOUND)
|
||||
endif()
|
||||
endif() #git
|
||||
|
||||
# make SOURCE_ROOT_PATH define available for logging
|
||||
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
|
||||
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
|
||||
|
||||
# BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
|
||||
# Automatically enabled for Debug builds via generator expression
|
||||
# Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
|
||||
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
|
||||
message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
|
||||
|
||||
if(thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options("-stdlib=libc++")
|
||||
@@ -58,11 +68,6 @@ if(CMAKE_TOOLCHAIN_FILE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT USE_CONAN)
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/deps")
|
||||
endif()
|
||||
|
||||
include (CheckCXXCompilerFlag)
|
||||
include (FetchContent)
|
||||
include (ExternalProject)
|
||||
@@ -74,9 +79,7 @@ endif ()
|
||||
include(RippledSanity)
|
||||
include(RippledVersion)
|
||||
include(RippledSettings)
|
||||
if (NOT USE_CONAN)
|
||||
include(RippledNIH)
|
||||
endif()
|
||||
|
||||
# this check has to remain in the top-level cmake
|
||||
# because of the early return statement
|
||||
if (packages_only)
|
||||
@@ -88,81 +91,56 @@ endif ()
|
||||
include(RippledCompiler)
|
||||
include(RippledInterface)
|
||||
|
||||
###
|
||||
if (NOT USE_CONAN)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
add_subdirectory(external/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(external/ed25519-donna)
|
||||
add_subdirectory(external/antithesis-sdk)
|
||||
include(deps/Boost)
|
||||
include(deps/OpenSSL)
|
||||
# include(deps/Secp256k1)
|
||||
# include(deps/Ed25519-donna)
|
||||
include(deps/Lz4)
|
||||
include(deps/Libarchive)
|
||||
include(deps/Sqlite)
|
||||
include(deps/Soci)
|
||||
include(deps/Rocksdb)
|
||||
include(deps/Nudb)
|
||||
include(deps/date)
|
||||
# include(deps/Protobuf)
|
||||
# include(deps/gRPC)
|
||||
include(deps/cassandra)
|
||||
include(deps/Postgres)
|
||||
include(deps/WasmEdge)
|
||||
else()
|
||||
include(conan/Boost)
|
||||
find_package(OpenSSL 1.1.1 REQUIRED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
add_subdirectory(external/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(external/ed25519-donna)
|
||||
add_subdirectory(external/antithesis-sdk)
|
||||
find_package(gRPC REQUIRED)
|
||||
find_package(lz4 REQUIRED)
|
||||
# Target names with :: are not allowed in a generator expression.
|
||||
# We need to pull the include directories and imported location properties
|
||||
# from separate targets.
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
find_package(wasmedge REQUIRED)
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
|
||||
endif()
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(date REQUIRED)
|
||||
find_package(xxHash REQUIRED)
|
||||
if(TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
elseif(TARGET NuDB::nudb)
|
||||
set(nudb NuDB::nudb)
|
||||
else()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
||||
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
OpenSSL::SSL
|
||||
# Ripple::grpc_pbufs
|
||||
# Ripple::pbufs
|
||||
secp256k1::secp256k1
|
||||
soci::soci
|
||||
SQLite::SQLite3
|
||||
include(deps/Boost)
|
||||
find_package(OpenSSL 1.1.1 REQUIRED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
add_subdirectory(external/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(external/ed25519-donna)
|
||||
add_subdirectory(external/antithesis-sdk)
|
||||
find_package(gRPC REQUIRED)
|
||||
find_package(lz4 REQUIRED)
|
||||
# Target names with :: are not allowed in a generator expression.
|
||||
# We need to pull the include directories and imported location properties
|
||||
# from separate targets.
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
include(deps/WasmEdge)
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
|
||||
endif()
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(date REQUIRED)
|
||||
find_package(xxHash REQUIRED)
|
||||
if(TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
elseif(TARGET NuDB::nudb)
|
||||
set(nudb NuDB::nudb)
|
||||
else()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
||||
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
OpenSSL::SSL
|
||||
# Ripple::grpc_pbufs
|
||||
# Ripple::pbufs
|
||||
secp256k1::secp256k1
|
||||
soci::soci
|
||||
SQLite::SQLite3
|
||||
)
|
||||
|
||||
if(coverage)
|
||||
include(RippledCov)
|
||||
@@ -170,10 +148,6 @@ endif()
|
||||
|
||||
set(PROJECT_EXPORT_SET RippleExports)
|
||||
include(RippledCore)
|
||||
if (NOT USE_CONAN)
|
||||
include(deps/Protobuf)
|
||||
include(deps/gRPC)
|
||||
endif()
|
||||
include(RippledInstall)
|
||||
include(RippledDocs)
|
||||
include(RippledValidatorKeys)
|
||||
|
||||
@@ -488,9 +488,10 @@ existing maintainer without a vote.
|
||||
|
||||
## Current Maintainers
|
||||
|
||||
* [Richard Holland](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
|
||||
* [Denis Angell](https://github.com/dangell7) (XRPL Labs + XRP Ledger Foundation)
|
||||
* [Wietse Wind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
|
||||
* [Richard Holland](https://github.com/RichardAH) (XRPL Labs + INFTF)
|
||||
* [Denis Angell](https://github.com/dangell7) (XRPL Labs + INFTF)
|
||||
* [Wietse Wind](https://github.com/WietseWind) (XRPL Labs + INFTF)
|
||||
* [tequ](https://github.com/tequdev) (Independent + INFTF)
|
||||
|
||||
|
||||
[1]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
**Note:** Throughout this README, references to "we" or "our" pertain to the community and contributors involved in the Xahau network. It does not imply a legal entity or a specific collection of individuals.
|
||||
|
||||
[Xahau](https://xahau.network/) is a decentralized cryptographic ledger that builds upon the robust foundation of the XRP Ledger. It inherits the XRP Ledger's Byzantine Fault Tolerant consensus algorithm and enhances it with additional features and functionalities. Developers and users familiar with the XRP Ledger will find that most documentation and tutorials available on [xrpl.org](https://xrpl.org) are relevant and applicable to Xahau, including those related to running validators and managing validator keys. For Xahau specific documentation you can visit our [documentation](https://docs.xahau.network/)
|
||||
[Xahau](https://xahau.network/) is a decentralized cryptographic ledger that builds upon the robust foundation of the XRP Ledger. It inherits the XRP Ledger's Byzantine Fault Tolerant consensus algorithm and enhances it with additional features and functionalities. Developers and users familiar with the XRP Ledger will find that most documentation and tutorials available on [xrpl.org](https://xrpl.org) are relevant and applicable to Xahau, including those related to running validators and managing validator keys. For Xahau specific documentation you can visit our [documentation](https://xahau.network/)
|
||||
|
||||
## XAH
|
||||
XAH is the public, counterparty-free asset native to Xahau and functions primarily as network gas. Transactions submitted to the Xahau network must supply an appropriate amount of XAH, to be burnt by the network as a fee, in order to be successfully included in a validated ledger. In addition, XAH also acts as a bridge currency within the Xahau DEX. XAH is traded on the open-market and is available for anyone to access. Xahau was created in 2023 with a supply of 600 million units of XAH.
|
||||
@@ -12,7 +12,7 @@ The server software that powers Xahau is called `xahaud` and is available in thi
|
||||
|
||||
### Build from Source
|
||||
|
||||
* [Read the build instructions in our documentation](https://docs.xahau.network/infrastructure/building-xahau)
|
||||
* [Read the build instructions in our documentation](https://xahau.network/infrastructure/building-xahau)
|
||||
* If you encounter any issues, please [open an issue](https://github.com/xahau/xahaud/issues)
|
||||
|
||||
## Highlights of Xahau
|
||||
@@ -58,9 +58,9 @@ git-subtree. See those directories' README files for more details.
|
||||
|
||||
- **Documentation**: Documentation for XRPL, Xahau and Hooks.
|
||||
- [Xrpl Documentation](https://xrpl.org)
|
||||
- [Xahau Documentation](https://docs.xahau.network/)
|
||||
- [Xahau Documentation](https://xahau.network/)
|
||||
- [Hooks Technical Documentation](https://xrpl-hooks.readme.io/)
|
||||
- **Explorers**: Explore the Xahau ledger using various explorers:
|
||||
- **Explorers**: Explore the Xahau Network using various explorers:
|
||||
- [xahauexplorer.com](https://xahauexplorer.com)
|
||||
- [xahscan.com](https://xahscan.com)
|
||||
- [xahau.xrpl.org](https://xahau.xrpl.org)
|
||||
|
||||
@@ -62,11 +62,11 @@ For these complaints or reports, please [contact our support team](mailto:bugs@x
|
||||
### The following type of security problems are excluded
|
||||
|
||||
1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `xahaud` and `xahau-lib`.
|
||||
2. **Relevant**. A security issue, posing a danger to user funds, privacy or the operation of the Xahau Ledger.
|
||||
2. **Relevant**. A security issue, posing a danger to user funds, privacy or the operation of the Xahau Network.
|
||||
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
|
||||
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
|
||||
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other people’s software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromise the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
|
||||
6. **Unused**. If you use the exploit to attack the Xahau Ledger, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
|
||||
6. **Unused**. If you use the exploit to attack the Xahau Network, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
|
||||
|
||||
Please note: Reports that are lacking any proof (such as screenshots or other data), detailed information or details on how to reproduce any unexpected result will be investigated but will not be eligible for any reward.
|
||||
|
||||
|
||||
@@ -5,8 +5,6 @@
|
||||
# debugging.
|
||||
set -ex
|
||||
|
||||
set -e
|
||||
|
||||
echo "START INSIDE CONTAINER - CORE"
|
||||
|
||||
echo "-- BUILD CORES: $3"
|
||||
@@ -14,6 +12,13 @@ echo "-- GITHUB_REPOSITORY: $1"
|
||||
echo "-- GITHUB_SHA: $2"
|
||||
echo "-- GITHUB_RUN_NUMBER: $4"
|
||||
|
||||
# Use mounted filesystem for temp files to avoid container space limits
|
||||
export TMPDIR=/io/tmp
|
||||
export TEMP=/io/tmp
|
||||
export TMP=/io/tmp
|
||||
mkdir -p /io/tmp
|
||||
echo "=== Using temp directory: /io/tmp ==="
|
||||
|
||||
umask 0000;
|
||||
|
||||
cd /io/ &&
|
||||
@@ -27,7 +32,8 @@ if [[ "$?" -ne "0" ]]; then
|
||||
exit 127
|
||||
fi
|
||||
|
||||
perl -i -pe "s/^(\\s*)-DBUILD_SHARED_LIBS=OFF/\\1-DBUILD_SHARED_LIBS=OFF\\n\\1-DROCKSDB_BUILD_SHARED=OFF/g" cmake/deps/Rocksdb.cmake &&
|
||||
BUILD_TYPE=Release
|
||||
|
||||
mv cmake/deps/WasmEdge.cmake cmake/deps/WasmEdge.old &&
|
||||
echo "find_package(LLVM REQUIRED CONFIG)
|
||||
message(STATUS \"Found LLVM \${LLVM_PACKAGE_VERSION}\")
|
||||
@@ -38,20 +44,47 @@ target_link_libraries (ripple_libs INTERFACE wasmedge)
|
||||
add_library (wasmedge::wasmedge ALIAS wasmedge)
|
||||
message(\"WasmEdge DONE\")
|
||||
" > cmake/deps/WasmEdge.cmake &&
|
||||
|
||||
export LDFLAGS="-static-libstdc++"
|
||||
export CMAKE_EXE_LINKER_FLAGS="-static-libstdc++"
|
||||
export CMAKE_STATIC_LINKER_FLAGS="-static-libstdc++"
|
||||
|
||||
git config --global --add safe.directory /io &&
|
||||
git checkout src/libxrpl/protocol/BuildInfo.cpp &&
|
||||
sed -i s/\"0.0.0\"/\"$(date +%Y).$(date +%-m).$(date +%-d)-$(git rev-parse --abbrev-ref HEAD)+$4\"/g src/libxrpl/protocol/BuildInfo.cpp &&
|
||||
sed -i s/\"0.0.0\"/\"$(date +%Y).$(date +%-m).$(date +%-d)-$(git rev-parse --abbrev-ref HEAD)$(if [ -n "$4" ]; then echo "+$4"; fi)\"/g src/libxrpl/protocol/BuildInfo.cpp &&
|
||||
conan export external/snappy --version 1.1.10 --user xahaud --channel stable &&
|
||||
conan export external/soci --version 4.0.3 --user xahaud --channel stable &&
|
||||
conan export external/wasmedge --version 0.11.2 --user xahaud --channel stable &&
|
||||
cd release-build &&
|
||||
which cmake &&
|
||||
cmake --version &&
|
||||
cmake .. -DCMAKE_BUILD_TYPE=Release -DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DWasmEdge_LIB=/usr/local/lib64/libwasmedge.a &&
|
||||
make -j$3 VERBOSE=1 &&
|
||||
# Install dependencies - tool_requires in conanfile.py handles glibc 2.28 compatibility
|
||||
# for build tools (protoc, grpc plugins, b2) in HBB environment
|
||||
# The tool_requires('b2/5.3.2') in conanfile.py should force b2 to build from source
|
||||
# with the correct toolchain, avoiding the GLIBCXX_3.4.29 issue
|
||||
echo "=== Installing dependencies ===" &&
|
||||
conan install .. --output-folder . --build missing --settings build_type=$BUILD_TYPE \
|
||||
-o with_wasmedge=False -o tool_requires_b2=True &&
|
||||
cmake .. -G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=$BUILD_TYPE \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-static-libstdc++" \
|
||||
-DLLVM_DIR=$LLVM_DIR \
|
||||
-DWasmEdge_LIB=$WasmEdge_LIB \
|
||||
-Dxrpld=TRUE \
|
||||
-Dtests=TRUE &&
|
||||
ccache -z &&
|
||||
ninja -j $3 && echo "=== Re-running final link with verbose output ===" && rm -f rippled && ninja -v rippled &&
|
||||
ccache -s &&
|
||||
strip -s rippled &&
|
||||
mv rippled xahaud &&
|
||||
echo "=== Full ldd output ===" &&
|
||||
ldd xahaud &&
|
||||
echo "=== Running libcheck ===" &&
|
||||
libcheck xahaud &&
|
||||
echo "Build host: `hostname`" > release.info &&
|
||||
echo "Build date: `date`" >> release.info &&
|
||||
echo "Build md5: `md5sum xahaud`" >> release.info &&
|
||||
echo "Git remotes:" >> release.info &&
|
||||
git remote -v >> release.info
|
||||
git remote -v >> release.info &&
|
||||
echo "Git status:" >> release.info &&
|
||||
git status -v >> release.info &&
|
||||
echo "Git log [last 20]:" >> release.info &&
|
||||
@@ -71,8 +104,8 @@ fi
|
||||
cd ..;
|
||||
|
||||
mv src/xrpld/net/detail/RegisterSSLCerts.cpp.old src/xrpld/net/detail/RegisterSSLCerts.cpp;
|
||||
mv cmake/deps/Rocksdb.cmake.old cmake/deps/Rocksdb.cmake;
|
||||
mv cmake/deps/WasmEdge.old cmake/deps/WasmEdge.cmake;
|
||||
|
||||
rm src/certs/certbundle.h;
|
||||
git checkout src/libxrpl/protocol/BuildInfo.cpp;
|
||||
|
||||
echo "END INSIDE CONTAINER - CORE"
|
||||
|
||||
100
build-full.sh
100
build-full.sh
@@ -3,8 +3,6 @@
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
set -ex
|
||||
|
||||
set -e
|
||||
|
||||
echo "START INSIDE CONTAINER - FULL"
|
||||
@@ -16,13 +14,6 @@ echo "-- GITHUB_RUN_NUMBER: $4"
|
||||
|
||||
umask 0000;
|
||||
|
||||
echo "Fixing CentOS 7 EOL"
|
||||
|
||||
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-*
|
||||
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-*
|
||||
yum clean all
|
||||
yum-config-manager --disable centos-sclo-sclo
|
||||
|
||||
####
|
||||
|
||||
cd /io;
|
||||
@@ -73,94 +64,15 @@ then
|
||||
#endif/g" src/xrpld/net/detail/RegisterSSLCerts.cpp &&
|
||||
sed -i "s/#include <xrpld\/net\/RegisterSSLCerts.h>/\0\n#include <certs\/certbundle.h>/g" src/xrpld/net/detail/RegisterSSLCerts.cpp
|
||||
fi
|
||||
mkdir -p .nih_c;
|
||||
mkdir -p .nih_toolchain;
|
||||
cd .nih_toolchain &&
|
||||
yum install -y wget lz4 lz4-devel git llvm13-static.x86_64 llvm13-devel.x86_64 devtoolset-10-binutils zlib-static ncurses-static -y \
|
||||
devtoolset-7-gcc-c++ \
|
||||
devtoolset-9-gcc-c++ \
|
||||
devtoolset-10-gcc-c++ \
|
||||
snappy snappy-devel \
|
||||
zlib zlib-devel \
|
||||
lz4-devel \
|
||||
libasan &&
|
||||
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-7/g"` &&
|
||||
echo "-- Install ZStd 1.1.3 --" &&
|
||||
yum install epel-release -y &&
|
||||
ZSTD_VERSION="1.1.3" &&
|
||||
( wget -nc -q -O zstd-${ZSTD_VERSION}.tar.gz https://github.com/facebook/zstd/archive/v${ZSTD_VERSION}.tar.gz; echo "" ) &&
|
||||
tar xzvf zstd-${ZSTD_VERSION}.tar.gz &&
|
||||
cd zstd-${ZSTD_VERSION} &&
|
||||
make -j$3 install &&
|
||||
cd .. &&
|
||||
echo "-- Install Cmake 3.25.3 --" &&
|
||||
pwd &&
|
||||
( wget -nc -q https://github.com/Kitware/CMake/releases/download/v3.25.3/cmake-3.25.3-linux-x86_64.tar.gz; echo "" ) &&
|
||||
tar -xzf cmake-3.25.3-linux-x86_64.tar.gz -C /hbb/ &&
|
||||
ln -sf /hbb/cmake-3.25.3-linux-x86_64/bin/cmake /hbb/bin/cmake &&
|
||||
export PATH="/hbb/cmake-3.25.3-linux-x86_64/bin:$PATH" &&
|
||||
echo "-- Install Boost 1.86.0 --" &&
|
||||
pwd &&
|
||||
( wget -nc -q https://archives.boost.io/release/1.86.0/source/boost_1_86_0.tar.gz; echo "" ) &&
|
||||
tar -xzf boost_1_86_0.tar.gz &&
|
||||
cd boost_1_86_0 && ./bootstrap.sh && ./b2 link=static -j$3 && ./b2 install &&
|
||||
cd ../ &&
|
||||
echo "-- Install Protobuf 3.20.0 --" &&
|
||||
pwd &&
|
||||
( wget -nc -q https://github.com/protocolbuffers/protobuf/releases/download/v3.20.0/protobuf-all-3.20.0.tar.gz; echo "" ) &&
|
||||
tar -xzf protobuf-all-3.20.0.tar.gz &&
|
||||
cd protobuf-3.20.0/ &&
|
||||
./autogen.sh && ./configure --prefix=/usr --disable-shared link=static && make -j$3 && make install &&
|
||||
cd .. &&
|
||||
echo "-- Build LLD --" &&
|
||||
pwd &&
|
||||
ln /usr/bin/llvm-config-13 /usr/bin/llvm-config &&
|
||||
mv /opt/rh/devtoolset-9/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar-9 &&
|
||||
ln /opt/rh/devtoolset-10/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar &&
|
||||
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/lld-13.0.1.src.tar.xz; echo "" ) &&
|
||||
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/libunwind-13.0.1.src.tar.xz; echo "" ) &&
|
||||
tar -xf lld-13.0.1.src.tar.xz &&
|
||||
tar -xf libunwind-13.0.1.src.tar.xz &&
|
||||
cp -r libunwind-13.0.1.src/include libunwind-13.0.1.src/src lld-13.0.1.src/ &&
|
||||
cd lld-13.0.1.src &&
|
||||
rm -rf build CMakeCache.txt &&
|
||||
mkdir -p build &&
|
||||
cd build &&
|
||||
cmake .. -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm13/ -DCMAKE_BUILD_TYPE=Release &&
|
||||
make -j$3 install &&
|
||||
ln -s /usr/lib64/llvm13/lib/include/lld /usr/include/lld &&
|
||||
cp /usr/lib64/llvm13/lib/liblld*.a /usr/local/lib/ &&
|
||||
cd ../../ &&
|
||||
echo "-- Build WasmEdge --" &&
|
||||
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; unzip -o 0.11.2.zip; ) &&
|
||||
cd WasmEdge-0.11.2 &&
|
||||
( mkdir -p build; echo "" ) &&
|
||||
cd build &&
|
||||
export BOOST_ROOT="/usr/local/src/boost_1_86_0" &&
|
||||
export Boost_LIBRARY_DIRS="/usr/local/lib" &&
|
||||
export BOOST_INCLUDEDIR="/usr/local/src/boost_1_86_0" &&
|
||||
export PATH=`echo $PATH | sed -E "s/devtoolset-7/devtoolset-9/g"` &&
|
||||
cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DWASMEDGE_BUILD_SHARED_LIB=OFF \
|
||||
-DWASMEDGE_BUILD_STATIC_LIB=ON \
|
||||
-DWASMEDGE_BUILD_AOT_RUNTIME=ON \
|
||||
-DWASMEDGE_FORCE_DISABLE_LTO=ON \
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON \
|
||||
-DWASMEDGE_LINK_LLVM_STATIC=ON \
|
||||
-DWASMEDGE_BUILD_PLUGINS=OFF \
|
||||
-DWASMEDGE_LINK_TOOLS_STATIC=ON \
|
||||
-DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ &&
|
||||
make -j$3 install &&
|
||||
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-10/g"` &&
|
||||
cp -r include/api/wasmedge /usr/include/ &&
|
||||
cd /io/ &&
|
||||
# Environment setup moved to Dockerfile in release-builder.sh
|
||||
source /opt/rh/gcc-toolset-11/enable
|
||||
export PATH=/usr/local/bin:$PATH
|
||||
export CC='/usr/lib64/ccache/gcc' &&
|
||||
export CXX='/usr/lib64/ccache/g++' &&
|
||||
echo "-- Build Rippled --" &&
|
||||
pwd &&
|
||||
cp cmake/deps/Rocksdb.cmake cmake/deps/Rocksdb.cmake.old &&
|
||||
|
||||
echo "MOVING TO [ build-core.sh ]"
|
||||
cd /io;
|
||||
echo "MOVING TO [ build-core.sh ]";
|
||||
|
||||
printenv > .env.temp;
|
||||
cat .env.temp | grep '=' | sed s/\\\(^[^=]\\+=\\\)/\\1\\\"/g|sed s/\$/\\\"/g > .env;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# Default validators.txt
|
||||
#
|
||||
# This file is located in the same folder as your rippled.cfg file
|
||||
# This file is located in the same folder as your xahaud.cfg file
|
||||
# and defines which validators your server trusts not to collude.
|
||||
#
|
||||
# This file is UTF-8 with DOS, UNIX, or Mac style line endings.
|
||||
@@ -17,18 +17,17 @@
|
||||
# See validator_list_sites and validator_list_keys below.
|
||||
#
|
||||
# Examples:
|
||||
# n9KorY8QtTdRx7TVDpwnG9NvyxsDwHUKUEeDLY3AkiGncVaSXZi5
|
||||
# n9MqiExBcoG19UXwoLjBJnhsxEhAZMuWwJDRdkyDz1EkEkwzQTNt
|
||||
# n9L3GdotB8a3AqtsvS7NXt4BUTQSAYyJUr9xtFj2qXJjfbZsawKY
|
||||
# n9M7G6eLwQtUjfCthWUmTN8L4oEZn1sNr46yvKrpsq58K1C6LAxz
|
||||
#
|
||||
# [validator_list_sites]
|
||||
#
|
||||
# List of URIs serving lists of recommended validators.
|
||||
#
|
||||
# Examples:
|
||||
# https://vl.ripple.com
|
||||
# https://vl.xrplf.org
|
||||
# https://vl.xahau.org
|
||||
# http://127.0.0.1:8000
|
||||
# file:///etc/opt/ripple/vl.txt
|
||||
# file:///etc/opt/xahaud/vl.txt
|
||||
#
|
||||
# [validator_list_keys]
|
||||
#
|
||||
@@ -39,50 +38,48 @@
|
||||
# Validator list keys should be hex-encoded.
|
||||
#
|
||||
# Examples:
|
||||
# ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
# ED307A760EE34F2D0CAA103377B1969117C38B8AA0AA1E2A24DAC1F32FC97087ED
|
||||
# EDA46E9C39B1389894E690E58914DC1029602870370A0993E5B87C4A24EAF4A8E8
|
||||
#
|
||||
# [import_vl_keys]
|
||||
#
|
||||
# This section is used to import the public keys of trusted validator list publishers.
|
||||
# The keys are used to authenticate and accept new lists of trusted validators.
|
||||
# In this example, the key for the publisher "vl.xrplf.org" is imported.
|
||||
# Each key is represented as a hexadecimal string.
|
||||
#
|
||||
# Examples:
|
||||
# ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
# ED45D1840EE724BE327ABE9146503D5848EFD5F38B6D5FEDE71E80ACCE5E6E738B
|
||||
# ED42AEC58B701EEBB77356FFFEC26F83C1F0407263530F068C7C73D392C7E06FD1
|
||||
# ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
|
||||
# The default validator list publishers that the rippled instance
|
||||
# The default validator list publishers that the xahaud instance
|
||||
# trusts.
|
||||
#
|
||||
# WARNING: Changing these values can cause your rippled instance to see a
|
||||
# validated ledger that contradicts other rippled instances'
|
||||
# WARNING: Changing these values can cause your xahaud instance to see a
|
||||
# validated ledger that contradicts other xahaud instances'
|
||||
# validated ledgers (aka a ledger fork) if your validator list(s)
|
||||
# do not sufficiently overlap with the list(s) used by others.
|
||||
# See: https://arxiv.org/pdf/1802.07242.pdf
|
||||
|
||||
[validator_list_sites]
|
||||
https://vl.ripple.com
|
||||
https://vl.xrplf.org
|
||||
https://vl.xahau.org
|
||||
|
||||
[validator_list_keys]
|
||||
#vl.ripple.com
|
||||
ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
# vl.xrplf.org
|
||||
ED45D1840EE724BE327ABE9146503D5848EFD5F38B6D5FEDE71E80ACCE5E6E738B
|
||||
# vl.xahau.org
|
||||
EDA46E9C39B1389894E690E58914DC1029602870370A0993E5B87C4A24EAF4A8E8
|
||||
|
||||
[import_vl_keys]
|
||||
# vl.xrplf.org
|
||||
ED45D1840EE724BE327ABE9146503D5848EFD5F38B6D5FEDE71E80ACCE5E6E738B
|
||||
ED42AEC58B701EEBB77356FFFEC26F83C1F0407263530F068C7C73D392C7E06FD1
|
||||
ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
|
||||
# To use the test network (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# To use the test network (see https://xahau.network/docs/infrastructure/installing-xahaud),
|
||||
# use the following configuration instead:
|
||||
#
|
||||
# [validator_list_sites]
|
||||
# https://vl.altnet.rippletest.net
|
||||
#
|
||||
# [validator_list_keys]
|
||||
# ED264807102805220DA0F312E71FC2C69E1552C9C5790F6C25E3729DEB573D5860
|
||||
# [validators]
|
||||
# nHBoJCE3wPgkTcrNPMHyTJFQ2t77EyCAqcBRspFCpL6JhwCm94VZ
|
||||
# nHUVv4g47bFMySAZFUKVaXUYEmfiUExSoY4FzwXULNwJRzju4XnQ
|
||||
# nHBvr8avSFTz4TFxZvvi4rEJZZtyqE3J6KAAcVWVtifsE7edPM7q
|
||||
# nHUH3Z8TRU57zetHbEPr1ynyrJhxQCwrJvNjr4j1SMjYADyW1WWe
|
||||
#
|
||||
# [import_vl_keys]
|
||||
# ED264807102805220DA0F312E71FC2C69E1552C9C5790F6C25E3729DEB573D5860
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
9
cfg/rippled-standalone.cfg → cfg/xahaud-standalone.cfg
Executable file → Normal file
9
cfg/rippled-standalone.cfg → cfg/xahaud-standalone.cfg
Executable file → Normal file
@@ -1,4 +1,4 @@
|
||||
# standalone: ./rippled -a --ledgerfile config/genesis.json --conf config/rippled-standalone.cfg
|
||||
# standalone: ./xahaud -a --ledgerfile config/genesis.json --conf config/xahaud-standalone.cfg
|
||||
[server]
|
||||
port_rpc_admin_local
|
||||
port_ws_public
|
||||
@@ -21,7 +21,7 @@ ip = 0.0.0.0
|
||||
protocol = ws
|
||||
|
||||
# [port_peer]
|
||||
# port = 51235
|
||||
# port = 21337
|
||||
# ip = 0.0.0.0
|
||||
# protocol = peer
|
||||
|
||||
@@ -69,7 +69,8 @@ time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
[ips]
|
||||
r.ripple.com 51235
|
||||
bacab.alloy.ee 21337
|
||||
hubs.xahau.as16089.net 21337
|
||||
|
||||
[validators_file]
|
||||
validators-example.txt
|
||||
@@ -94,7 +95,7 @@ validators-example.txt
|
||||
1000000
|
||||
|
||||
[network_id]
|
||||
21338
|
||||
21337
|
||||
|
||||
[amendments]
|
||||
740352F2412A9909880C23A559FCECEDA3BE2126FED62FC7660D628A06927F11 Flow
|
||||
@@ -17,7 +17,9 @@ target_compile_features (common INTERFACE cxx_std_20)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED)
|
||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
||||
# defined, so no need to add it explicitly except for
|
||||
# this special case of (profile ON) and (assert OFF)
|
||||
@@ -149,6 +151,16 @@ if (use_mold)
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries (common INTERFACE -fuse-ld=mold)
|
||||
else ()
|
||||
# Checking for mold linker (< GCC 12.1.0)
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -B/usr/libexec/mold -Wl,--version
|
||||
OUTPUT_VARIABLE LD_VERSION_OUT
|
||||
ERROR_VARIABLE LD_VERSION_ERR)
|
||||
set(LD_VERSION "${LD_VERSION_OUT}${LD_VERSION_ERR}")
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries (common INTERFACE -B/usr/libexec/mold)
|
||||
endif ()
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_gold AND is_gcc)
|
||||
|
||||
@@ -78,6 +78,12 @@ target_link_libraries(xrpl.libxrpl.beast PUBLIC
|
||||
xrpl.libpb
|
||||
)
|
||||
|
||||
# Conditionally add enhanced logging source when BEAST_ENHANCED_LOGGING is enabled
|
||||
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
|
||||
target_sources(xrpl.libxrpl.beast PRIVATE
|
||||
src/libxrpl/beast/utility/src/beast_EnhancedLogging.cpp)
|
||||
endif()
|
||||
|
||||
# Level 02
|
||||
add_module(xrpl basics)
|
||||
target_link_libraries(xrpl.libxrpl.basics PUBLIC xrpl.libxrpl.beast)
|
||||
@@ -175,6 +181,11 @@ if(xrpld)
|
||||
Ripple::opts
|
||||
Ripple::libs
|
||||
xrpl.libxrpl
|
||||
# Workaround for a Conan 1.x bug that prevents static linking of libstdc++
|
||||
# when a dependency (snappy) modifies system_libs. See the comment in
|
||||
# external/snappy/conanfile.py for a full explanation.
|
||||
# This is likely not strictly necessary, but listed explicitly as a good practice.
|
||||
m
|
||||
)
|
||||
exclude_if_included(rippled)
|
||||
# define a macro for tests that might need to
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH prefix path..this is where we will download
|
||||
and build any ExternalProjects, and they will hopefully
|
||||
survive across build directory deletion (manual cleans)
|
||||
#]===================================================================]
|
||||
|
||||
string (REGEX REPLACE "[ \\/%]+" "_" gen_for_path ${CMAKE_GENERATOR})
|
||||
string (TOLOWER ${gen_for_path} gen_for_path)
|
||||
# HACK: trying to shorten paths for windows CI (which hits 260 MAXPATH easily)
|
||||
# @see: https://issues.jenkins-ci.org/browse/JENKINS-38706?focusedCommentId=339847
|
||||
string (REPLACE "visual_studio" "vs" gen_for_path ${gen_for_path})
|
||||
if (NOT DEFINED NIH_CACHE_ROOT)
|
||||
if (DEFINED ENV{NIH_CACHE_ROOT})
|
||||
set (NIH_CACHE_ROOT $ENV{NIH_CACHE_ROOT})
|
||||
else ()
|
||||
set (NIH_CACHE_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/.nih_c")
|
||||
endif ()
|
||||
endif ()
|
||||
set (nih_cache_path
|
||||
"${NIH_CACHE_ROOT}/${gen_for_path}/${CMAKE_CXX_COMPILER_ID}_${CMAKE_CXX_COMPILER_VERSION}")
|
||||
if (NOT is_multiconfig)
|
||||
set (nih_cache_path "${nih_cache_path}/${CMAKE_BUILD_TYPE}")
|
||||
endif ()
|
||||
file(TO_CMAKE_PATH "${nih_cache_path}" nih_cache_path)
|
||||
message (STATUS "NIH-EP cache path: ${nih_cache_path}")
|
||||
## two convenience variables:
|
||||
set (ep_lib_prefix ${CMAKE_STATIC_LIBRARY_PREFIX})
|
||||
set (ep_lib_suffix ${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
|
||||
# this is a setting for FetchContent and needs to be
|
||||
# a cache variable
|
||||
# https://cmake.org/cmake/help/latest/module/FetchContent.html#populating-the-content
|
||||
set (FETCHCONTENT_BASE_DIR ${nih_cache_path} CACHE STRING "" FORCE)
|
||||
@@ -1,54 +0,0 @@
|
||||
find_package(Boost 1.83 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread
|
||||
)
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
add_library(Ripple::boost ALIAS ripple_boost)
|
||||
if(XCODE)
|
||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
||||
else()
|
||||
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
target_link_libraries(ripple_boost
|
||||
INTERFACE
|
||||
Boost::boost
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
Boost::iostreams
|
||||
Boost::thread)
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
|
||||
endif()
|
||||
if(san AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||
# for gcc ?
|
||||
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
|
||||
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif()
|
||||
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
|
||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
|
||||
target_compile_options(opts
|
||||
INTERFACE
|
||||
# ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
endif()
|
||||
@@ -1,52 +1,4 @@
|
||||
#[===================================================================[
|
||||
NIH dep: boost
|
||||
#]===================================================================]
|
||||
if((NOT DEFINED BOOST_ROOT) AND(DEFINED ENV{BOOST_ROOT}))
|
||||
set(BOOST_ROOT $ENV{BOOST_ROOT})
|
||||
endif()
|
||||
if((NOT DEFINED BOOST_LIBRARYDIR) AND(DEFINED ENV{BOOST_LIBRARYDIR}))
|
||||
set(BOOST_LIBRARYDIR $ENV{BOOST_LIBRARYDIR})
|
||||
endif()
|
||||
file(TO_CMAKE_PATH "${BOOST_ROOT}" BOOST_ROOT)
|
||||
if(WIN32 OR CYGWIN)
|
||||
# Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders
|
||||
if((NOT DEFINED BOOST_LIBRARYDIR) AND (DEFINED BOOST_ROOT))
|
||||
if(IS_DIRECTORY ${BOOST_ROOT}/stage64/lib)
|
||||
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage64/lib)
|
||||
elseif(IS_DIRECTORY ${BOOST_ROOT}/stage/lib)
|
||||
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage/lib)
|
||||
elseif(IS_DIRECTORY ${BOOST_ROOT}/lib)
|
||||
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/lib)
|
||||
else()
|
||||
message(WARNING "Did not find expected boost library dir. "
|
||||
"Defaulting to ${BOOST_ROOT}")
|
||||
set(BOOST_LIBRARYDIR ${BOOST_ROOT})
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
message(STATUS "BOOST_ROOT: ${BOOST_ROOT}")
|
||||
message(STATUS "BOOST_LIBRARYDIR: ${BOOST_LIBRARYDIR}")
|
||||
|
||||
# uncomment the following as needed to debug FindBoost issues:
|
||||
#set(Boost_DEBUG ON)
|
||||
|
||||
#[=========================================================[
|
||||
boost dynamic libraries don't trivially support @rpath
|
||||
linking right now (cmake's default), so just force
|
||||
static linking for macos, or if requested on linux by flag
|
||||
#]=========================================================]
|
||||
if(static)
|
||||
set(Boost_USE_STATIC_LIBS ON)
|
||||
endif()
|
||||
set(Boost_USE_MULTITHREADED ON)
|
||||
if(static AND NOT APPLE)
|
||||
set(Boost_USE_STATIC_RUNTIME ON)
|
||||
else()
|
||||
set(Boost_USE_STATIC_RUNTIME OFF)
|
||||
endif()
|
||||
# TBD:
|
||||
# Boost_USE_DEBUG_RUNTIME: When ON, uses Boost libraries linked against the
|
||||
find_package(Boost 1.86 REQUIRED
|
||||
find_package(Boost 1.83 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
@@ -58,12 +10,12 @@ find_package(Boost 1.86 REQUIRED
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
iostreams
|
||||
thread)
|
||||
thread
|
||||
)
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
add_library(Ripple::boost ALIAS ripple_boost)
|
||||
if(is_xcode)
|
||||
if(XCODE)
|
||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
||||
else()
|
||||
@@ -83,6 +35,7 @@ target_link_libraries(ripple_boost
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
Boost::iostreams
|
||||
Boost::thread)
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: ed25519-donna
|
||||
#]===================================================================]
|
||||
|
||||
add_library (ed25519-donna STATIC
|
||||
external/ed25519-donna/ed25519.c)
|
||||
target_include_directories (ed25519-donna
|
||||
PUBLIC
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/external>
|
||||
$<INSTALL_INTERFACE:include>
|
||||
PRIVATE
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/external/ed25519-donna)
|
||||
#[=========================================================[
|
||||
NOTE for macos:
|
||||
https://github.com/floodyberry/ed25519-donna/issues/29
|
||||
our source for ed25519-donna-portable.h has been
|
||||
patched to workaround this.
|
||||
#]=========================================================]
|
||||
target_link_libraries (ed25519-donna PUBLIC OpenSSL::SSL)
|
||||
add_library (NIH::ed25519-donna ALIAS ed25519-donna)
|
||||
target_link_libraries (ripple_libs INTERFACE NIH::ed25519-donna)
|
||||
#[===========================[
|
||||
headers installation
|
||||
#]===========================]
|
||||
install (
|
||||
FILES
|
||||
external/ed25519-donna/ed25519.h
|
||||
DESTINATION include/ed25519-donna)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,47 +0,0 @@
|
||||
# - Try to find jemalloc
|
||||
# Once done this will define
|
||||
# JEMALLOC_FOUND - System has jemalloc
|
||||
# JEMALLOC_INCLUDE_DIRS - The jemalloc include directories
|
||||
# JEMALLOC_LIBRARIES - The libraries needed to use jemalloc
|
||||
|
||||
if(NOT USE_BUNDLED_JEMALLOC)
|
||||
find_package(PkgConfig)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
pkg_check_modules(PC_JEMALLOC QUIET jemalloc)
|
||||
endif()
|
||||
else()
|
||||
set(PC_JEMALLOC_INCLUDEDIR)
|
||||
set(PC_JEMALLOC_INCLUDE_DIRS)
|
||||
set(PC_JEMALLOC_LIBDIR)
|
||||
set(PC_JEMALLOC_LIBRARY_DIRS)
|
||||
set(LIMIT_SEARCH NO_DEFAULT_PATH)
|
||||
endif()
|
||||
|
||||
set(JEMALLOC_DEFINITIONS ${PC_JEMALLOC_CFLAGS_OTHER})
|
||||
|
||||
find_path(JEMALLOC_INCLUDE_DIR jemalloc/jemalloc.h
|
||||
PATHS ${PC_JEMALLOC_INCLUDEDIR} ${PC_JEMALLOC_INCLUDE_DIRS}
|
||||
${LIMIT_SEARCH})
|
||||
|
||||
# If we're asked to use static linkage, add libjemalloc.a as a preferred library name.
|
||||
if(JEMALLOC_USE_STATIC)
|
||||
list(APPEND JEMALLOC_NAMES
|
||||
"${CMAKE_STATIC_LIBRARY_PREFIX}jemalloc${CMAKE_STATIC_LIBRARY_SUFFIX}")
|
||||
endif()
|
||||
|
||||
list(APPEND JEMALLOC_NAMES jemalloc)
|
||||
|
||||
find_library(JEMALLOC_LIBRARY NAMES ${JEMALLOC_NAMES}
|
||||
HINTS ${PC_JEMALLOC_LIBDIR} ${PC_JEMALLOC_LIBRARY_DIRS}
|
||||
${LIMIT_SEARCH})
|
||||
|
||||
set(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY})
|
||||
set(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
# handle the QUIETLY and REQUIRED arguments and set JEMALLOC_FOUND to TRUE
|
||||
# if all listed variables are TRUE
|
||||
find_package_handle_standard_args(JeMalloc DEFAULT_MSG
|
||||
JEMALLOC_LIBRARY JEMALLOC_INCLUDE_DIR)
|
||||
|
||||
mark_as_advanced(JEMALLOC_INCLUDE_DIR JEMALLOC_LIBRARY)
|
||||
@@ -1,22 +0,0 @@
|
||||
find_package (PkgConfig REQUIRED)
|
||||
pkg_search_module (libarchive_PC QUIET libarchive>=3.4.3)
|
||||
|
||||
if(static)
|
||||
set(LIBARCHIVE_LIB libarchive.a)
|
||||
else()
|
||||
set(LIBARCHIVE_LIB archive)
|
||||
endif()
|
||||
|
||||
find_library (archive
|
||||
NAMES ${LIBARCHIVE_LIB}
|
||||
HINTS
|
||||
${libarchive_PC_LIBDIR}
|
||||
${libarchive_PC_LIBRARY_DIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
|
||||
find_path (LIBARCHIVE_INCLUDE_DIR
|
||||
NAMES archive.h
|
||||
HINTS
|
||||
${libarchive_PC_INCLUDEDIR}
|
||||
${libarchive_PC_INCLUDEDIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
@@ -1,24 +0,0 @@
|
||||
find_package (PkgConfig)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
pkg_search_module (lz4_PC QUIET liblz4>=1.9)
|
||||
endif ()
|
||||
|
||||
if(static)
|
||||
set(LZ4_LIB liblz4.a)
|
||||
else()
|
||||
set(LZ4_LIB lz4.so)
|
||||
endif()
|
||||
|
||||
find_library (lz4
|
||||
NAMES ${LZ4_LIB}
|
||||
HINTS
|
||||
${lz4_PC_LIBDIR}
|
||||
${lz4_PC_LIBRARY_DIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
|
||||
find_path (LZ4_INCLUDE_DIR
|
||||
NAMES lz4.h
|
||||
HINTS
|
||||
${lz4_PC_INCLUDEDIR}
|
||||
${lz4_PC_INCLUDEDIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
@@ -1,24 +0,0 @@
|
||||
find_package (PkgConfig)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
pkg_search_module (secp256k1_PC QUIET libsecp256k1)
|
||||
endif ()
|
||||
|
||||
if(static)
|
||||
set(SECP256K1_LIB libsecp256k1.a)
|
||||
else()
|
||||
set(SECP256K1_LIB secp256k1)
|
||||
endif()
|
||||
|
||||
find_library(secp256k1
|
||||
NAMES ${SECP256K1_LIB}
|
||||
HINTS
|
||||
${secp256k1_PC_LIBDIR}
|
||||
${secp256k1_PC_LIBRARY_PATHS}
|
||||
NO_DEFAULT_PATH)
|
||||
|
||||
find_path (SECP256K1_INCLUDE_DIR
|
||||
NAMES secp256k1.h
|
||||
HINTS
|
||||
${secp256k1_PC_INCLUDEDIR}
|
||||
${secp256k1_PC_INCLUDEDIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
@@ -1,24 +0,0 @@
|
||||
find_package (PkgConfig)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
pkg_search_module (snappy_PC QUIET snappy>=1.1.7)
|
||||
endif ()
|
||||
|
||||
if(static)
|
||||
set(SNAPPY_LIB libsnappy.a)
|
||||
else()
|
||||
set(SNAPPY_LIB libsnappy.so)
|
||||
endif()
|
||||
|
||||
find_library (snappy
|
||||
NAMES ${SNAPPY_LIB}
|
||||
HINTS
|
||||
${snappy_PC_LIBDIR}
|
||||
${snappy_PC_LIBRARY_DIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
|
||||
find_path (SNAPPY_INCLUDE_DIR
|
||||
NAMES snappy.h
|
||||
HINTS
|
||||
${snappy_PC_INCLUDEDIR}
|
||||
${snappy_PC_INCLUDEDIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
@@ -1,19 +0,0 @@
|
||||
find_package (PkgConfig)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
# TBD - currently no soci pkgconfig
|
||||
#pkg_search_module (soci_PC QUIET libsoci_core>=3.2)
|
||||
endif ()
|
||||
|
||||
if(static)
|
||||
set(SOCI_LIB libsoci.a)
|
||||
else()
|
||||
set(SOCI_LIB libsoci_core.so)
|
||||
endif()
|
||||
|
||||
find_library (soci
|
||||
NAMES ${SOCI_LIB})
|
||||
|
||||
find_path (SOCI_INCLUDE_DIR
|
||||
NAMES soci/soci.h)
|
||||
|
||||
message("SOCI FOUND AT: ${SOCI_LIB}")
|
||||
@@ -1,24 +0,0 @@
|
||||
find_package (PkgConfig)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
pkg_search_module (sqlite_PC QUIET sqlite3>=3.26.0)
|
||||
endif ()
|
||||
|
||||
if(static)
|
||||
set(SQLITE_LIB libsqlite3.a)
|
||||
else()
|
||||
set(SQLITE_LIB sqlite3.so)
|
||||
endif()
|
||||
|
||||
find_library (sqlite3
|
||||
NAMES ${SQLITE_LIB}
|
||||
HINTS
|
||||
${sqlite_PC_LIBDIR}
|
||||
${sqlite_PC_LIBRARY_DIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
|
||||
find_path (SQLITE_INCLUDE_DIR
|
||||
NAMES sqlite3.h
|
||||
HINTS
|
||||
${sqlite_PC_INCLUDEDIR}
|
||||
${sqlite_PC_INCLUDEDIRS}
|
||||
NO_DEFAULT_PATH)
|
||||
@@ -1,163 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: libarchive
|
||||
#]===================================================================]
|
||||
|
||||
option (local_libarchive "use local build of libarchive." OFF)
|
||||
add_library (archive_lib UNKNOWN IMPORTED GLOBAL)
|
||||
|
||||
if (NOT local_libarchive)
|
||||
if (NOT WIN32)
|
||||
find_package(libarchive_pc REQUIRED)
|
||||
endif ()
|
||||
if (archive)
|
||||
message (STATUS "Found libarchive using pkg-config. Using ${archive}.")
|
||||
set_target_properties (archive_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${archive}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${archive}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${LIBARCHIVE_INCLUDE_DIR})
|
||||
# pkg-config can return extra info for static lib linking
|
||||
# this is probably needed/useful generally, but apply
|
||||
# to APPLE for now (mostly for homebrew)
|
||||
if (APPLE AND static AND libarchive_PC_STATIC_LIBRARIES)
|
||||
message(STATUS "NOTE: libarchive static libs: ${libarchive_PC_STATIC_LIBRARIES}")
|
||||
# also, APPLE seems to need iconv...maybe linux does too (TBD)
|
||||
target_link_libraries (archive_lib
|
||||
INTERFACE iconv ${libarchive_PC_STATIC_LIBRARIES})
|
||||
endif ()
|
||||
else ()
|
||||
## now try searching using the minimal find module that cmake provides
|
||||
find_package(LibArchive 3.4.3 QUIET)
|
||||
if (LibArchive_FOUND)
|
||||
if (static)
|
||||
# find module doesn't find static libs currently, so we re-search
|
||||
get_filename_component(_loc ${LibArchive_LIBRARY} DIRECTORY)
|
||||
find_library(_la_static
|
||||
NAMES libarchive.a archive_static.lib archive.lib
|
||||
PATHS ${_loc})
|
||||
if (_la_static)
|
||||
set (_la_lib ${_la_static})
|
||||
else ()
|
||||
message (WARNING "unable to find libarchive static lib - switching to local build")
|
||||
set (local_libarchive ON CACHE BOOL "" FORCE)
|
||||
endif ()
|
||||
else ()
|
||||
set (_la_lib ${LibArchive_LIBRARY})
|
||||
endif ()
|
||||
if (NOT local_libarchive)
|
||||
message (STATUS "Found libarchive using module/config. Using ${_la_lib}.")
|
||||
set_target_properties (archive_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${_la_lib}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${_la_lib}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${LibArchive_INCLUDE_DIRS})
|
||||
endif ()
|
||||
else ()
|
||||
set (local_libarchive ON CACHE BOOL "" FORCE)
|
||||
endif ()
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (local_libarchive)
|
||||
set (lib_post "")
|
||||
if (MSVC)
|
||||
set (lib_post "_static")
|
||||
endif ()
|
||||
ExternalProject_Add (libarchive
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/libarchive/libarchive.git
|
||||
GIT_TAG v3.4.3
|
||||
CMAKE_ARGS
|
||||
# passing the compiler seems to be needed for windows CI, sadly
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DENABLE_LZ4=ON
|
||||
-ULZ4_*
|
||||
-DLZ4_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:lz4_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
|
||||
# because we are building a static lib, this lz4 library doesn't
|
||||
# actually matter since you can't generally link static libs to other static
|
||||
# libs. The include files are needed, but the library itself is not (until
|
||||
# we link our application, at which point we use the lz4 we built above).
|
||||
# nonetheless, we need to provide a library to libarchive else it will
|
||||
# NOT include lz4 support when configuring
|
||||
-DLZ4_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_RELEASE>>
|
||||
-DENABLE_WERROR=OFF
|
||||
-DENABLE_TAR=OFF
|
||||
-DENABLE_TAR_SHARED=OFF
|
||||
-DENABLE_INSTALL=ON
|
||||
-DENABLE_NETTLE=OFF
|
||||
-DENABLE_OPENSSL=OFF
|
||||
-DENABLE_LZO=OFF
|
||||
-DENABLE_LZMA=OFF
|
||||
-DENABLE_ZLIB=OFF
|
||||
-DENABLE_BZip2=OFF
|
||||
-DENABLE_LIBXML2=OFF
|
||||
-DENABLE_EXPAT=OFF
|
||||
-DENABLE_PCREPOSIX=OFF
|
||||
-DENABLE_LibGCC=OFF
|
||||
-DENABLE_CNG=OFF
|
||||
-DENABLE_CPIO=OFF
|
||||
-DENABLE_CPIO_SHARED=OFF
|
||||
-DENABLE_CAT=OFF
|
||||
-DENABLE_CAT_SHARED=OFF
|
||||
-DENABLE_XATTR=OFF
|
||||
-DENABLE_ACL=OFF
|
||||
-DENABLE_ICONV=OFF
|
||||
-DENABLE_TEST=OFF
|
||||
-DENABLE_COVERAGE=OFF
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
|
||||
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
|
||||
"-DCMAKE_C_FLAGS_RELEASE=-MT"
|
||||
>
|
||||
LIST_SEPARATOR ::
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--target archive_static
|
||||
--parallel ${ep_procs}
|
||||
$<$<BOOL:${is_multiconfig}>:
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy
|
||||
<BINARY_DIR>/libarchive/$<CONFIG>/${ep_lib_prefix}archive${lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/libarchive
|
||||
>
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
DEPENDS lz4_lib
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix}
|
||||
<BINARY_DIR>/libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix}
|
||||
)
|
||||
ExternalProject_Get_Property (libarchive BINARY_DIR)
|
||||
ExternalProject_Get_Property (libarchive SOURCE_DIR)
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (libarchive)
|
||||
endif ()
|
||||
file (MAKE_DIRECTORY ${SOURCE_DIR}/libarchive)
|
||||
set_target_properties (archive_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR}/libarchive
|
||||
INTERFACE_COMPILE_DEFINITIONS
|
||||
LIBARCHIVE_STATIC)
|
||||
endif()
|
||||
|
||||
add_dependencies (archive_lib libarchive)
|
||||
target_link_libraries (archive_lib INTERFACE lz4_lib)
|
||||
target_link_libraries (ripple_libs INTERFACE archive_lib)
|
||||
exclude_if_included (libarchive)
|
||||
exclude_if_included (archive_lib)
|
||||
@@ -1,79 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: lz4
|
||||
#]===================================================================]
|
||||
|
||||
add_library (lz4_lib STATIC IMPORTED GLOBAL)
|
||||
|
||||
if (NOT WIN32)
|
||||
find_package(lz4)
|
||||
endif()
|
||||
|
||||
if(lz4)
|
||||
set_target_properties (lz4_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${lz4}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${lz4}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${LZ4_INCLUDE_DIR})
|
||||
|
||||
else()
|
||||
ExternalProject_Add (lz4
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/lz4/lz4.git
|
||||
GIT_TAG v1.9.2
|
||||
SOURCE_SUBDIR contrib/cmake_unofficial
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DBUILD_STATIC_LIBS=ON
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
|
||||
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
|
||||
"-DCMAKE_C_FLAGS_RELEASE=-MT"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--target lz4_static
|
||||
--parallel ${ep_procs}
|
||||
$<$<BOOL:${is_multiconfig}>:
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}lz4$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>
|
||||
>
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/${ep_lib_prefix}lz4${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}lz4_d${ep_lib_suffix}
|
||||
)
|
||||
ExternalProject_Get_Property (lz4 BINARY_DIR)
|
||||
ExternalProject_Get_Property (lz4 SOURCE_DIR)
|
||||
|
||||
file (MAKE_DIRECTORY ${SOURCE_DIR}/lz4)
|
||||
set_target_properties (lz4_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/${ep_lib_prefix}lz4_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/${ep_lib_prefix}lz4${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR}/lib)
|
||||
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (lz4)
|
||||
endif ()
|
||||
add_dependencies (lz4_lib lz4)
|
||||
target_link_libraries (ripple_libs INTERFACE lz4_lib)
|
||||
exclude_if_included (lz4)
|
||||
endif()
|
||||
|
||||
exclude_if_included (lz4_lib)
|
||||
@@ -1,31 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: nudb
|
||||
|
||||
NuDB is header-only, thus is an INTERFACE lib in CMake.
|
||||
TODO: move the library definition into NuDB repo and add
|
||||
proper targets and export/install
|
||||
#]===================================================================]
|
||||
|
||||
if (is_root_project) # NuDB not needed in the case of xrpl.libpb inclusion build
|
||||
add_library (nudb INTERFACE)
|
||||
FetchContent_Declare(
|
||||
nudb_src
|
||||
GIT_REPOSITORY https://github.com/CPPAlliance/NuDB.git
|
||||
GIT_TAG 2.0.5
|
||||
)
|
||||
FetchContent_GetProperties(nudb_src)
|
||||
if(NOT nudb_src_POPULATED)
|
||||
message (STATUS "Pausing to download NuDB...")
|
||||
FetchContent_Populate(nudb_src)
|
||||
endif()
|
||||
|
||||
file(TO_CMAKE_PATH "${nudb_src_SOURCE_DIR}" nudb_src_SOURCE_DIR)
|
||||
# specify as system includes so as to avoid warnings
|
||||
target_include_directories (nudb SYSTEM INTERFACE ${nudb_src_SOURCE_DIR}/include)
|
||||
target_link_libraries (nudb
|
||||
INTERFACE
|
||||
Boost::thread
|
||||
Boost::system)
|
||||
add_library (NIH::nudb ALIAS nudb)
|
||||
target_link_libraries (ripple_libs INTERFACE NIH::nudb)
|
||||
endif ()
|
||||
@@ -1,48 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: openssl
|
||||
#]===================================================================]
|
||||
|
||||
#[===============================================[
|
||||
OPENSSL_ROOT_DIR is the only variable that
|
||||
FindOpenSSL honors for locating, so convert any
|
||||
OPENSSL_ROOT vars to this
|
||||
#]===============================================]
|
||||
if (NOT DEFINED OPENSSL_ROOT_DIR)
|
||||
if (DEFINED ENV{OPENSSL_ROOT})
|
||||
set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
|
||||
elseif (HOMEBREW)
|
||||
execute_process (COMMAND ${HOMEBREW} --prefix openssl
|
||||
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif ()
|
||||
file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
|
||||
endif ()
|
||||
|
||||
if (static)
|
||||
set (OPENSSL_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set (OPENSSL_MSVC_STATIC_RT ON)
|
||||
find_package (OpenSSL 1.1.1 REQUIRED)
|
||||
target_link_libraries (ripple_libs
|
||||
INTERFACE
|
||||
OpenSSL::SSL
|
||||
OpenSSL::Crypto)
|
||||
# disable SSLv2...this can also be done when building/configuring OpenSSL
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2)
|
||||
#[=========================================================[
|
||||
https://gitlab.kitware.com/cmake/cmake/issues/16885
|
||||
depending on how openssl is built, it might depend
|
||||
on zlib. In fact, the openssl find package should
|
||||
figure this out for us, but it does not currently...
|
||||
so let's add zlib ourselves to the lib list
|
||||
TODO: investigate linking to static zlib for static
|
||||
build option
|
||||
#]=========================================================]
|
||||
find_package (ZLIB)
|
||||
set (has_zlib FALSE)
|
||||
if (TARGET ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
set (has_zlib TRUE)
|
||||
endif ()
|
||||
@@ -1,70 +0,0 @@
|
||||
if(reporting)
|
||||
find_package(PostgreSQL)
|
||||
if(NOT PostgreSQL_FOUND)
|
||||
message("find_package did not find postgres")
|
||||
find_library(postgres NAMES pq libpq libpq-dev pq-dev postgresql-devel)
|
||||
find_path(libpq-fe NAMES libpq-fe.h PATH_SUFFIXES postgresql pgsql include)
|
||||
|
||||
if(NOT libpq-fe_FOUND OR NOT postgres_FOUND)
|
||||
message("No system installed Postgres found. Will build")
|
||||
add_library(postgres SHARED IMPORTED GLOBAL)
|
||||
add_library(pgport SHARED IMPORTED GLOBAL)
|
||||
add_library(pgcommon SHARED IMPORTED GLOBAL)
|
||||
ExternalProject_Add(postgres_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/postgres/postgres.git
|
||||
GIT_TAG REL_14_5
|
||||
CONFIGURE_COMMAND ./configure --without-readline > /dev/null
|
||||
BUILD_COMMAND ${CMAKE_COMMAND} -E env --unset=MAKELEVEL make
|
||||
UPDATE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/src/interfaces/libpq/${ep_lib_prefix}pq.a
|
||||
<BINARY_DIR>/src/common/${ep_lib_prefix}pgcommon.a
|
||||
<BINARY_DIR>/src/port/${ep_lib_prefix}pgport.a
|
||||
LOG_BUILD TRUE
|
||||
)
|
||||
ExternalProject_Get_Property (postgres_src SOURCE_DIR)
|
||||
ExternalProject_Get_Property (postgres_src BINARY_DIR)
|
||||
|
||||
set (postgres_src_SOURCE_DIR "${SOURCE_DIR}")
|
||||
file (MAKE_DIRECTORY ${postgres_src_SOURCE_DIR})
|
||||
list(APPEND INCLUDE_DIRS
|
||||
${SOURCE_DIR}/src/include
|
||||
${SOURCE_DIR}/src/interfaces/libpq
|
||||
)
|
||||
set_target_properties(postgres PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${BINARY_DIR}/src/interfaces/libpq/${ep_lib_prefix}pq.a
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
"${INCLUDE_DIRS}"
|
||||
)
|
||||
set_target_properties(pgcommon PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${BINARY_DIR}/src/common/${ep_lib_prefix}pgcommon.a
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
"${INCLUDE_DIRS}"
|
||||
)
|
||||
set_target_properties(pgport PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${BINARY_DIR}/src/port/${ep_lib_prefix}pgport.a
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
"${INCLUDE_DIRS}"
|
||||
)
|
||||
add_dependencies(postgres postgres_src)
|
||||
add_dependencies(pgcommon postgres_src)
|
||||
add_dependencies(pgport postgres_src)
|
||||
file(TO_CMAKE_PATH "${postgres_src_SOURCE_DIR}" postgres_src_SOURCE_DIR)
|
||||
target_link_libraries(ripple_libs INTERFACE postgres pgcommon pgport)
|
||||
else()
|
||||
message("Found system installed Postgres via find_libary")
|
||||
target_include_directories(ripple_libs INTERFACE ${libpq-fe})
|
||||
target_link_libraries(ripple_libs INTERFACE ${postgres})
|
||||
endif()
|
||||
else()
|
||||
message("Found system installed Postgres via find_package")
|
||||
target_include_directories(ripple_libs INTERFACE ${PostgreSQL_INCLUDE_DIRS})
|
||||
target_link_libraries(ripple_libs INTERFACE ${PostgreSQL_LIBRARIES})
|
||||
endif()
|
||||
endif()
|
||||
@@ -1,156 +0,0 @@
|
||||
#[===================================================================[
|
||||
import protobuf (lib and compiler) and create a lib
|
||||
from our proto message definitions. If the system protobuf
|
||||
is not found, fallback on EP to download and build a version
|
||||
from official source.
|
||||
#]===================================================================]
|
||||
|
||||
if (static)
|
||||
set (Protobuf_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
find_package (Protobuf 3.8)
|
||||
if (is_multiconfig)
|
||||
set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARIES})
|
||||
else ()
|
||||
string(TOUPPER ${CMAKE_BUILD_TYPE} upper_cmake_build_type)
|
||||
set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARY_${upper_cmake_build_type}})
|
||||
endif ()
|
||||
if (local_protobuf OR NOT (Protobuf_FOUND AND Protobuf_PROTOC_EXECUTABLE AND protobuf_protoc_lib))
|
||||
include (GNUInstallDirs)
|
||||
message (STATUS "using local protobuf build.")
|
||||
set(protobuf_reqs Protobuf_PROTOC_EXECUTABLE protobuf_protoc_lib)
|
||||
foreach(lib ${protobuf_reqs})
|
||||
if(NOT ${lib})
|
||||
message(STATUS "Couldn't find ${lib}")
|
||||
endif()
|
||||
endforeach()
|
||||
if (WIN32)
|
||||
# protobuf prepends lib even on windows
|
||||
set (pbuf_lib_pre "lib")
|
||||
else ()
|
||||
set (pbuf_lib_pre ${ep_lib_prefix})
|
||||
endif ()
|
||||
# for the external project build of protobuf, we currently ignore the
|
||||
# static option and always build static libs here. This is consistent
|
||||
# with our other EP builds. Dynamic libs in an EP would add complexity
|
||||
# because we'd need to get them into the runtime path, and probably
|
||||
# install them.
|
||||
ExternalProject_Add (protobuf_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/protocolbuffers/protobuf.git
|
||||
GIT_TAG v3.8.0
|
||||
SOURCE_SUBDIR cmake
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
|
||||
-Dprotobuf_BUILD_TESTS=OFF
|
||||
-Dprotobuf_BUILD_EXAMPLES=OFF
|
||||
-Dprotobuf_BUILD_PROTOC_BINARIES=ON
|
||||
-Dprotobuf_MSVC_STATIC_RUNTIME=ON
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-Dprotobuf_BUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
-Dprotobuf_DEBUG_POSTFIX=_d
|
||||
-Dprotobuf_WITH_ZLIB=$<IF:$<BOOL:${has_zlib}>,ON,OFF>
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND
|
||||
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix}
|
||||
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix}
|
||||
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX}
|
||||
)
|
||||
ExternalProject_Get_Property (protobuf_src BINARY_DIR)
|
||||
ExternalProject_Get_Property (protobuf_src SOURCE_DIR)
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (protobuf_src)
|
||||
endif ()
|
||||
exclude_if_included (protobuf_src)
|
||||
|
||||
if (NOT TARGET protobuf::libprotobuf)
|
||||
add_library (protobuf::libprotobuf STATIC IMPORTED GLOBAL)
|
||||
endif ()
|
||||
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
|
||||
set_target_properties (protobuf::libprotobuf PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${BINARY_DIR}/_installed_/include)
|
||||
add_dependencies (protobuf::libprotobuf protobuf_src)
|
||||
exclude_if_included (protobuf::libprotobuf)
|
||||
|
||||
if (NOT TARGET protobuf::libprotoc)
|
||||
add_library (protobuf::libprotoc STATIC IMPORTED GLOBAL)
|
||||
endif ()
|
||||
set_target_properties (protobuf::libprotoc PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${BINARY_DIR}/_installed_/include)
|
||||
add_dependencies (protobuf::libprotoc protobuf_src)
|
||||
exclude_if_included (protobuf::libprotoc)
|
||||
|
||||
if (NOT TARGET protobuf::protoc)
|
||||
add_executable (protobuf::protoc IMPORTED)
|
||||
exclude_if_included (protobuf::protoc)
|
||||
endif ()
|
||||
set_target_properties (protobuf::protoc PROPERTIES
|
||||
IMPORTED_LOCATION "${BINARY_DIR}/_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX}")
|
||||
add_dependencies (protobuf::protoc protobuf_src)
|
||||
else ()
|
||||
if (NOT TARGET protobuf::protoc)
|
||||
if (EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
add_executable (protobuf::protoc IMPORTED)
|
||||
set_target_properties (protobuf::protoc PROPERTIES
|
||||
IMPORTED_LOCATION "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
else ()
|
||||
message (FATAL_ERROR "Protobuf import failed")
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
set(output_dir ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
file(MAKE_DIRECTORY ${output_dir})
|
||||
set(ccbd ${CMAKE_CURRENT_BINARY_DIR})
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${output_dir})
|
||||
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto)
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${ccbd})
|
||||
|
||||
target_include_directories(xrpl.libpb SYSTEM PUBLIC
|
||||
# The generated implementation imports the header relative to the output
|
||||
# directory.
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/src>
|
||||
)
|
||||
target_sources(xrpl.libpb PRIVATE ${output_dir}/src/ripple/proto/ripple.pb.cc)
|
||||
install(
|
||||
FILES ${output_dir}/src/ripple/proto/ripple.pb.h
|
||||
DESTINATION include/ripple/proto)
|
||||
target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf)
|
||||
target_compile_options(xrpl.libpb
|
||||
PUBLIC
|
||||
$<$<BOOL:${is_xcode}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>
|
||||
)
|
||||
@@ -1,177 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: rocksdb
|
||||
#]===================================================================]
|
||||
|
||||
add_library (rocksdb_lib UNKNOWN IMPORTED GLOBAL)
|
||||
set_target_properties (rocksdb_lib
|
||||
PROPERTIES INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1)
|
||||
|
||||
option (local_rocksdb "use local build of rocksdb." OFF)
|
||||
if (NOT local_rocksdb)
|
||||
find_package (RocksDB 6.27 QUIET CONFIG)
|
||||
if (TARGET RocksDB::rocksdb)
|
||||
message (STATUS "Found RocksDB using config.")
|
||||
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_DEBUG)
|
||||
if (_rockslib_l)
|
||||
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${_rockslib_l})
|
||||
endif ()
|
||||
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_RELEASE)
|
||||
if (_rockslib_l)
|
||||
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${_rockslib_l})
|
||||
endif ()
|
||||
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION)
|
||||
if (_rockslib_l)
|
||||
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${_rockslib_l})
|
||||
endif ()
|
||||
get_target_property (_rockslib_i RocksDB::rocksdb INTERFACE_INCLUDE_DIRECTORIES)
|
||||
if (_rockslib_i)
|
||||
set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${_rockslib_i})
|
||||
endif ()
|
||||
target_link_libraries (ripple_libs INTERFACE RocksDB::rocksdb)
|
||||
else ()
|
||||
# using a find module with rocksdb is difficult because
|
||||
# you have no idea how it was configured (transitive dependencies).
|
||||
# the code below will generally find rocksdb using the module, but
|
||||
# will then result in linker errors for static linkage since the
|
||||
# transitive dependencies are unknown. force local build here for now, but leave the code as
|
||||
# a placeholder for future investigation.
|
||||
if (static)
|
||||
set (local_rocksdb ON CACHE BOOL "" FORCE)
|
||||
# TBD if there is some way to extract transitive deps..then:
|
||||
#set (RocksDB_USE_STATIC ON)
|
||||
else ()
|
||||
find_package (RocksDB 6.27 MODULE)
|
||||
if (ROCKSDB_FOUND)
|
||||
if (RocksDB_LIBRARY_DEBUG)
|
||||
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${RocksDB_LIBRARY_DEBUG})
|
||||
endif ()
|
||||
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${RocksDB_LIBRARIES})
|
||||
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${RocksDB_LIBRARIES})
|
||||
set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${RocksDB_INCLUDE_DIRS})
|
||||
else ()
|
||||
set (local_rocksdb ON CACHE BOOL "" FORCE)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (local_rocksdb)
|
||||
message (STATUS "Using local build of RocksDB.")
|
||||
ExternalProject_Add (rocksdb
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/facebook/rocksdb.git
|
||||
GIT_TAG v6.27.3
|
||||
PATCH_COMMAND
|
||||
# only used by windows build
|
||||
${CMAKE_COMMAND} -E copy_if_different
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/rocks_thirdparty.inc
|
||||
<SOURCE_DIR>/thirdparty.inc
|
||||
COMMAND
|
||||
# fixup their build version file to keep the values
|
||||
# from changing always
|
||||
${CMAKE_COMMAND} -E copy_if_different
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/rocksdb_build_version.cc.in
|
||||
<SOURCE_DIR>/util/build_version.cc.in
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DWITH_JEMALLOC=$<IF:$<BOOL:${jemalloc}>,ON,OFF>
|
||||
-DWITH_SNAPPY=ON
|
||||
-DWITH_LZ4=ON
|
||||
-DWITH_ZLIB=OFF
|
||||
-DUSE_RTTI=ON
|
||||
-DWITH_ZSTD=OFF
|
||||
-DWITH_GFLAGS=OFF
|
||||
-DWITH_BZ2=OFF
|
||||
-ULZ4_*
|
||||
-Ulz4_*
|
||||
-Dlz4_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:lz4_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
|
||||
-Dlz4_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_RELEASE>>
|
||||
-Dlz4_FOUND=ON
|
||||
-USNAPPY_*
|
||||
-Usnappy_*
|
||||
-USnappy_*
|
||||
-Dsnappy_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:snappy_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
|
||||
-Dsnappy_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_RELEASE>>
|
||||
-Dsnappy_FOUND=ON
|
||||
-DSnappy_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:snappy_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
|
||||
-DSnappy_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_RELEASE>>
|
||||
-DSnappy_FOUND=ON
|
||||
-DWITH_MD_LIBRARY=OFF
|
||||
-DWITH_RUNTIME_DEBUG=$<IF:$<CONFIG:Debug>,ON,OFF>
|
||||
-DFAIL_ON_WARNINGS=OFF
|
||||
-DWITH_ASAN=OFF
|
||||
-DWITH_TSAN=OFF
|
||||
-DWITH_UBSAN=OFF
|
||||
-DWITH_NUMA=OFF
|
||||
-DWITH_TBB=OFF
|
||||
-DWITH_WINDOWS_UTF8_FILENAMES=OFF
|
||||
-DWITH_XPRESS=OFF
|
||||
-DPORTABLE=ON
|
||||
-DFORCE_SSE42=OFF
|
||||
-DDISABLE_STALL_NOTIF=OFF
|
||||
-DOPTDBG=ON
|
||||
-DROCKSDB_LITE=OFF
|
||||
-DWITH_FALLOCATE=ON
|
||||
-DWITH_LIBRADOS=OFF
|
||||
-DWITH_JNI=OFF
|
||||
-DROCKSDB_INSTALL_ON_WINDOWS=OFF
|
||||
-DWITH_TESTS=OFF
|
||||
-DWITH_TOOLS=OFF
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -MP /DNDEBUG"
|
||||
>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:
|
||||
"-DCMAKE_CXX_FLAGS=-DNDEBUG"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
$<$<BOOL:${is_multiconfig}>:
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}rocksdb$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>
|
||||
>
|
||||
LIST_SEPARATOR ::
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
DEPENDS snappy_lib lz4_lib
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/${ep_lib_prefix}rocksdb${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}rocksdb_d${ep_lib_suffix}
|
||||
)
|
||||
ExternalProject_Get_Property (rocksdb BINARY_DIR)
|
||||
ExternalProject_Get_Property (rocksdb SOURCE_DIR)
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (rocksdb)
|
||||
endif ()
|
||||
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
|
||||
set_target_properties (rocksdb_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/${ep_lib_prefix}rocksdb_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/${ep_lib_prefix}rocksdb${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR}/include)
|
||||
add_dependencies (rocksdb_lib rocksdb)
|
||||
exclude_if_included (rocksdb)
|
||||
endif ()
|
||||
|
||||
target_link_libraries (rocksdb_lib
|
||||
INTERFACE
|
||||
snappy_lib
|
||||
lz4_lib
|
||||
$<$<BOOL:${MSVC}>:rpcrt4>)
|
||||
exclude_if_included (rocksdb_lib)
|
||||
target_link_libraries (ripple_libs INTERFACE rocksdb_lib)
|
||||
@@ -1,58 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: secp256k1
|
||||
#]===================================================================]
|
||||
|
||||
add_library (secp256k1_lib STATIC IMPORTED GLOBAL)
|
||||
|
||||
if (NOT WIN32)
|
||||
find_package(secp256k1)
|
||||
endif()
|
||||
|
||||
if(secp256k1)
|
||||
set_target_properties (secp256k1_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${secp256k1}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${secp256k1}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SECP256K1_INCLUDE_DIR})
|
||||
|
||||
add_library (secp256k1 ALIAS secp256k1_lib)
|
||||
add_library (NIH::secp256k1 ALIAS secp256k1_lib)
|
||||
|
||||
else()
|
||||
set(INSTALL_SECP256K1 true)
|
||||
|
||||
add_library (secp256k1 STATIC
|
||||
external/secp256k1/src/secp256k1.c)
|
||||
target_compile_definitions (secp256k1
|
||||
PRIVATE
|
||||
USE_NUM_NONE
|
||||
USE_FIELD_10X26
|
||||
USE_FIELD_INV_BUILTIN
|
||||
USE_SCALAR_8X32
|
||||
USE_SCALAR_INV_BUILTIN)
|
||||
target_include_directories (secp256k1
|
||||
PUBLIC
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/external>
|
||||
$<INSTALL_INTERFACE:include>
|
||||
PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/external/secp256k1)
|
||||
target_compile_options (secp256k1
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4319>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:
|
||||
-Wno-deprecated-declarations
|
||||
-Wno-unused-function
|
||||
>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-nonnull-compare>)
|
||||
target_link_libraries (ripple_libs INTERFACE NIH::secp256k1)
|
||||
#[===========================[
|
||||
headers installation
|
||||
#]===========================]
|
||||
install (
|
||||
FILES
|
||||
external/secp256k1/include/secp256k1.h
|
||||
DESTINATION include/secp256k1/include)
|
||||
|
||||
add_library (NIH::secp256k1 ALIAS secp256k1)
|
||||
endif()
|
||||
@@ -1,77 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: snappy
|
||||
#]===================================================================]
|
||||
|
||||
add_library (snappy_lib STATIC IMPORTED GLOBAL)
|
||||
|
||||
if (NOT WIN32)
|
||||
find_package(snappy)
|
||||
endif()
|
||||
|
||||
if(snappy)
|
||||
set_target_properties (snappy_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${snappy}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${snappy}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SNAPPY_INCLUDE_DIR})
|
||||
|
||||
else()
|
||||
ExternalProject_Add (snappy
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/google/snappy.git
|
||||
GIT_TAG 1.1.7
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DSNAPPY_BUILD_TESTS=OFF
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
|
||||
"-DCMAKE_CXX_FLAGS_DEBUG=-MTd"
|
||||
"-DCMAKE_CXX_FLAGS_RELEASE=-MT"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
$<$<BOOL:${is_multiconfig}>:
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}snappy$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>
|
||||
>
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND
|
||||
${CMAKE_COMMAND} -E copy_if_different <BINARY_DIR>/config.h <BINARY_DIR>/snappy-stubs-public.h <SOURCE_DIR>
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/${ep_lib_prefix}snappy${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}snappy_d${ep_lib_suffix}
|
||||
)
|
||||
ExternalProject_Get_Property (snappy BINARY_DIR)
|
||||
ExternalProject_Get_Property (snappy SOURCE_DIR)
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (snappy)
|
||||
endif ()
|
||||
file (MAKE_DIRECTORY ${SOURCE_DIR}/snappy)
|
||||
set_target_properties (snappy_lib PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/${ep_lib_prefix}snappy_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/${ep_lib_prefix}snappy${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
add_dependencies (snappy_lib snappy)
|
||||
target_link_libraries (ripple_libs INTERFACE snappy_lib)
|
||||
exclude_if_included (snappy)
|
||||
exclude_if_included (snappy_lib)
|
||||
@@ -1,165 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: soci
|
||||
#]===================================================================]
|
||||
|
||||
foreach (_comp core empty sqlite3)
|
||||
add_library ("soci_${_comp}" STATIC IMPORTED GLOBAL)
|
||||
endforeach ()
|
||||
|
||||
if (NOT WIN32)
|
||||
find_package(soci)
|
||||
endif()
|
||||
|
||||
if (soci)
|
||||
foreach (_comp core empty sqlite3)
|
||||
set_target_properties ("soci_${_comp}" PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${soci}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${soci}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOCI_INCLUDE_DIR})
|
||||
endforeach ()
|
||||
|
||||
else()
|
||||
set (soci_lib_pre ${ep_lib_prefix})
|
||||
set (soci_lib_post "")
|
||||
if (WIN32)
|
||||
# for some reason soci on windows still prepends lib (non-standard)
|
||||
set (soci_lib_pre lib)
|
||||
# this version in the name might change if/when we change versions of soci
|
||||
set (soci_lib_post "_4_0")
|
||||
endif ()
|
||||
get_target_property (_boost_incs Boost::date_time INTERFACE_INCLUDE_DIRECTORIES)
|
||||
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION)
|
||||
if (NOT _boost_dt)
|
||||
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_RELEASE)
|
||||
endif ()
|
||||
if (NOT _boost_dt)
|
||||
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_DEBUG)
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add (soci
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/SOCI/soci.git
|
||||
GIT_TAG 04e1870294918d20761736743bb6136314c42dd5
|
||||
# We had an issue with soci integer range checking for boost::optional
|
||||
# and needed to remove the exception that SOCI throws in this case.
|
||||
# This is *probably* a bug in SOCI, but has never been investigated more
|
||||
# nor reported to the maintainers.
|
||||
# This cmake script comments out the lines in question.
|
||||
# This patch process is likely fragile and should be reviewed carefully
|
||||
# whenever we update the GIT_TAG above.
|
||||
PATCH_COMMAND
|
||||
${CMAKE_COMMAND} -D RIPPLED_SOURCE=${CMAKE_CURRENT_SOURCE_DIR}
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/soci_patch.cmake
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
|
||||
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>
|
||||
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
|
||||
-DCMAKE_PREFIX_PATH=${CMAKE_BINARY_DIR}/sqlite3
|
||||
-DCMAKE_MODULE_PATH=${CMAKE_CURRENT_SOURCE_DIR}/cmake
|
||||
-DCMAKE_INCLUDE_PATH=$<JOIN:$<TARGET_PROPERTY:sqlite,INTERFACE_INCLUDE_DIRECTORIES>,::>
|
||||
-DCMAKE_LIBRARY_PATH=${sqlite_BINARY_DIR}
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DSOCI_CXX_C11=ON
|
||||
-DSOCI_STATIC=ON
|
||||
-DSOCI_LIBDIR=lib
|
||||
-DSOCI_SHARED=OFF
|
||||
-DSOCI_TESTS=OFF
|
||||
# hacks to workaround the fact that soci doesn't currently use
|
||||
# boost imported targets in its cmake. If they switch to
|
||||
# proper imported targets, this next line can be removed
|
||||
# (as well as the get_property above that sets _boost_incs)
|
||||
-DBoost_INCLUDE_DIRS=$<JOIN:${_boost_incs},::>
|
||||
-DBoost_INCLUDE_DIR=$<JOIN:${_boost_incs},::>
|
||||
-DBOOST_ROOT=${BOOST_ROOT}
|
||||
-DWITH_BOOST=ON
|
||||
-DBoost_FOUND=ON
|
||||
-DBoost_NO_BOOST_CMAKE=ON
|
||||
-DBoost_DATE_TIME_FOUND=ON
|
||||
-DSOCI_HAVE_BOOST=ON
|
||||
-DSOCI_HAVE_BOOST_DATE_TIME=ON
|
||||
-DBoost_DATE_TIME_LIBRARY=${_boost_dt}
|
||||
-DSOCI_DB2=OFF
|
||||
-DSOCI_FIREBIRD=OFF
|
||||
-DSOCI_MYSQL=OFF
|
||||
-DSOCI_ODBC=OFF
|
||||
-DSOCI_ORACLE=OFF
|
||||
-DSOCI_POSTGRESQL=OFF
|
||||
-DSOCI_SQLITE3=ON
|
||||
-DSQLITE3_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:sqlite,INTERFACE_INCLUDE_DIRECTORIES>,::>
|
||||
-DSQLITE3_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:sqlite,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:sqlite,IMPORTED_LOCATION_RELEASE>>
|
||||
$<$<BOOL:${APPLE}>:-DCMAKE_FIND_FRAMEWORK=LAST>
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
|
||||
"-DCMAKE_CXX_FLAGS_DEBUG=-MTd"
|
||||
"-DCMAKE_CXX_FLAGS_RELEASE=-MT"
|
||||
>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:
|
||||
"-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations"
|
||||
>
|
||||
# SEE: https://github.com/SOCI/soci/issues/640
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<VERSION_GREATER_EQUAL:${CMAKE_CXX_COMPILER_VERSION},8>>:
|
||||
"-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations -Wno-error=format-overflow -Wno-format-overflow -Wno-error=format-truncation"
|
||||
>
|
||||
LIST_SEPARATOR ::
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
$<$<BOOL:${is_multiconfig}>:
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy
|
||||
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_core${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_empty${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_sqlite3${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib
|
||||
>
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
DEPENDS sqlite
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/lib/${soci_lib_pre}soci_core${soci_lib_post}${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib/${soci_lib_pre}soci_core${soci_lib_post}_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib/${soci_lib_pre}soci_empty${soci_lib_post}${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib/${soci_lib_pre}soci_empty${soci_lib_post}_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}${ep_lib_suffix}
|
||||
<BINARY_DIR>/lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}_d${ep_lib_suffix}
|
||||
)
|
||||
ExternalProject_Get_Property (soci BINARY_DIR)
|
||||
ExternalProject_Get_Property (soci SOURCE_DIR)
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (soci)
|
||||
endif ()
|
||||
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
|
||||
file (MAKE_DIRECTORY ${BINARY_DIR}/include)
|
||||
foreach (_comp core empty sqlite3)
|
||||
set_target_properties ("soci_${_comp}" PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
"${SOURCE_DIR}/include;${BINARY_DIR}/include")
|
||||
add_dependencies ("soci_${_comp}" soci) # something has to depend on the ExternalProject to trigger it
|
||||
target_link_libraries (ripple_libs INTERFACE "soci_${_comp}")
|
||||
if (NOT _comp STREQUAL "core")
|
||||
target_link_libraries ("soci_${_comp}" INTERFACE soci_core)
|
||||
endif ()
|
||||
endforeach ()
|
||||
endif()
|
||||
|
||||
foreach (_comp core empty sqlite3)
|
||||
exclude_if_included ("soci_${_comp}")
|
||||
endforeach ()
|
||||
|
||||
|
||||
exclude_if_included (soci)
|
||||
@@ -1,93 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: sqlite
|
||||
#]===================================================================]
|
||||
|
||||
add_library (sqlite STATIC IMPORTED GLOBAL)
|
||||
|
||||
if (NOT WIN32)
|
||||
find_package(sqlite)
|
||||
endif()
|
||||
|
||||
|
||||
if(sqlite3)
|
||||
set_target_properties (sqlite PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${sqlite3}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${sqlite3}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SQLITE_INCLUDE_DIR})
|
||||
|
||||
else()
|
||||
ExternalProject_Add (sqlite3
|
||||
PREFIX ${nih_cache_path}
|
||||
# sqlite doesn't use git, but it provides versioned tarballs
|
||||
URL https://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip
|
||||
http://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip
|
||||
https://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip
|
||||
http://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip
|
||||
# ^^^ version is apparent in the URL: 3260000 => 3.26.0
|
||||
URL_HASH SHA256=de5dcab133aa339a4cf9e97c40aa6062570086d6085d8f9ad7bc6ddf8a52096e
|
||||
# Don't need to worry about MITM attacks too much because the download
|
||||
# is checked against a strong hash
|
||||
TLS_VERIFY false
|
||||
# we wrote a very simple CMake file to build sqlite
|
||||
# so that's what we copy here so that we can build with
|
||||
# CMake. sqlite doesn't generally provided a build system
|
||||
# for the single amalgamation source file.
|
||||
PATCH_COMMAND
|
||||
${CMAKE_COMMAND} -E copy_if_different
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/CMake_sqlite3.txt
|
||||
<SOURCE_DIR>/CMakeLists.txt
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
|
||||
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
|
||||
"-DCMAKE_C_FLAGS_RELEASE=-MT"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
$<$<BOOL:${is_multiconfig}>:
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}sqlite3$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>
|
||||
>
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/${ep_lib_prefix}sqlite3${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}sqlite3_d${ep_lib_suffix}
|
||||
)
|
||||
ExternalProject_Get_Property (sqlite3 BINARY_DIR)
|
||||
ExternalProject_Get_Property (sqlite3 SOURCE_DIR)
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (sqlite3)
|
||||
endif ()
|
||||
|
||||
set_target_properties (sqlite PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/${ep_lib_prefix}sqlite3_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/${ep_lib_prefix}sqlite3${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR})
|
||||
|
||||
add_dependencies (sqlite sqlite3)
|
||||
exclude_if_included (sqlite3)
|
||||
endif()
|
||||
|
||||
target_link_libraries (sqlite INTERFACE $<$<NOT:$<BOOL:${MSVC}>>:dl>)
|
||||
target_link_libraries (ripple_libs INTERFACE sqlite)
|
||||
exclude_if_included (sqlite)
|
||||
set(sqlite_BINARY_DIR ${BINARY_DIR})
|
||||
@@ -1,84 +1 @@
|
||||
#[===================================================================[
|
||||
NIH dep: wasmedge: web assembly runtime for hooks.
|
||||
#]===================================================================]
|
||||
|
||||
find_package(Curses)
|
||||
if(CURSES_FOUND)
|
||||
include_directories(${CURSES_INCLUDE_DIR})
|
||||
target_link_libraries(ripple_libs INTERFACE ${CURSES_LIBRARY})
|
||||
else()
|
||||
message(WARNING "CURSES library not found... (only important for mac builds)")
|
||||
endif()
|
||||
|
||||
|
||||
find_package(LLVM REQUIRED CONFIG)
|
||||
message(STATUS "Found LLVM ${LLVM_PACKAGE_VERSION}")
|
||||
message(STATUS "Using LLVMConfig.cmake in: ${LLVM_DIR}")
|
||||
ExternalProject_Add (wasmedge_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/WasmEdge/WasmEdge.git
|
||||
GIT_TAG 0.11.2
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
-DWASMEDGE_BUILD_SHARED_LIB=OFF
|
||||
-DWASMEDGE_BUILD_STATIC_LIB=ON
|
||||
-DWASMEDGE_BUILD_AOT_RUNTIME=ON
|
||||
-DWASMEDGE_FORCE_DISABLE_LTO=ON
|
||||
-DWASMEDGE_LINK_LLVM_STATIC=ON
|
||||
-DWASMEDGE_LINK_TOOLS_STATIC=ON
|
||||
-DWASMEDGE_BUILD_PLUGINS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DLLVM_DIR=${LLVM_DIR}
|
||||
-DLLVM_LIBRARY_DIR=${LLVM_LIBRARY_DIR}
|
||||
-DLLVM_ENABLE_TERMINFO=OFF
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP -march=native"
|
||||
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
|
||||
"-DCMAKE_C_FLAGS_RELEASE=-MT"
|
||||
>
|
||||
LOG_CONFIGURE ON
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
COMMAND
|
||||
pwd
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
$<$<VERSION_GREATER_EQUAL:${CMAKE_VERSION},3.12>:--parallel ${ep_procs}>
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/lib/api/libwasmedge.a
|
||||
)
|
||||
add_library (wasmedge STATIC IMPORTED GLOBAL)
|
||||
ExternalProject_Get_Property (wasmedge_src BINARY_DIR)
|
||||
ExternalProject_Get_Property (wasmedge_src SOURCE_DIR)
|
||||
set (wasmedge_src_BINARY_DIR "${BINARY_DIR}")
|
||||
add_dependencies (wasmedge wasmedge_src)
|
||||
execute_process(
|
||||
COMMAND
|
||||
mkdir -p "${wasmedge_src_BINARY_DIR}/include/api"
|
||||
)
|
||||
set_target_properties (wasmedge PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
"${wasmedge_src_BINARY_DIR}/lib/api/libwasmedge.a"
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
"${wasmedge_src_BINARY_DIR}/lib/api/libwasmedge.a"
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
"${wasmedge_src_BINARY_DIR}/include/api/"
|
||||
)
|
||||
target_link_libraries (ripple_libs INTERFACE wasmedge)
|
||||
#RH NOTE: some compilers / versions of some libraries need these, most don't
|
||||
|
||||
find_library(XAR_LIBRARY NAMES xar)
|
||||
if(XAR_LIBRARY)
|
||||
target_link_libraries(ripple_libs INTERFACE ${XAR_LIBRARY})
|
||||
else()
|
||||
message(WARNING "xar library not found... (only important for mac builds)")
|
||||
endif()
|
||||
add_library (wasmedge::wasmedge ALIAS wasmedge)
|
||||
find_package(wasmedge REQUIRED)
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
if(reporting)
|
||||
find_library(cassandra NAMES cassandra)
|
||||
if(NOT cassandra)
|
||||
|
||||
message("System installed Cassandra cpp driver not found. Will build")
|
||||
|
||||
find_library(zlib NAMES zlib1g-dev zlib-devel zlib z)
|
||||
if(NOT zlib)
|
||||
message("zlib not found. will build")
|
||||
add_library(zlib STATIC IMPORTED GLOBAL)
|
||||
ExternalProject_Add(zlib_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/madler/zlib.git
|
||||
GIT_TAG v1.2.12
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}z.a
|
||||
LOG_BUILD TRUE
|
||||
LOG_CONFIGURE TRUE
|
||||
)
|
||||
|
||||
|
||||
ExternalProject_Get_Property (zlib_src SOURCE_DIR)
|
||||
ExternalProject_Get_Property (zlib_src BINARY_DIR)
|
||||
set (zlib_src_SOURCE_DIR "${SOURCE_DIR}")
|
||||
file (MAKE_DIRECTORY ${zlib_src_SOURCE_DIR}/include)
|
||||
|
||||
set_target_properties (zlib PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${BINARY_DIR}/${ep_lib_prefix}z.a
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR}/include)
|
||||
add_dependencies(zlib zlib_src)
|
||||
|
||||
file(TO_CMAKE_PATH "${zlib_src_SOURCE_DIR}" zlib_src_SOURCE_DIR)
|
||||
endif()
|
||||
|
||||
|
||||
|
||||
|
||||
find_library(krb5 NAMES krb5-dev libkrb5-dev)
|
||||
|
||||
if(NOT krb5)
|
||||
message("krb5 not found. will build")
|
||||
add_library(krb5 STATIC IMPORTED GLOBAL)
|
||||
ExternalProject_Add(krb5_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/krb5/krb5.git
|
||||
GIT_TAG krb5-1.20-final
|
||||
UPDATE_COMMAND ""
|
||||
CONFIGURE_COMMAND autoreconf src && CFLAGS=-fcommon ./src/configure --enable-static --disable-shared > /dev/null
|
||||
BUILD_IN_SOURCE 1
|
||||
BUILD_COMMAND make
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS <SOURCE_DIR>/lib/${ep_lib_prefix}krb5.a
|
||||
LOG_BUILD TRUE
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property (krb5_src SOURCE_DIR)
|
||||
ExternalProject_Get_Property (krb5_src BINARY_DIR)
|
||||
set (krb5_src_SOURCE_DIR "${SOURCE_DIR}")
|
||||
file (MAKE_DIRECTORY ${krb5_src_SOURCE_DIR}/include)
|
||||
|
||||
set_target_properties (krb5 PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${BINARY_DIR}/lib/${ep_lib_prefix}krb5.a
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR}/include)
|
||||
add_dependencies(krb5 krb5_src)
|
||||
|
||||
file(TO_CMAKE_PATH "${krb5_src_SOURCE_DIR}" krb5_src_SOURCE_DIR)
|
||||
endif()
|
||||
|
||||
|
||||
find_library(libuv1 NAMES uv1 libuv1 liubuv1-dev libuv1:amd64)
|
||||
|
||||
|
||||
if(NOT libuv1)
|
||||
message("libuv1 not found, will build")
|
||||
add_library(libuv1 STATIC IMPORTED GLOBAL)
|
||||
ExternalProject_Add(libuv_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/libuv/libuv.git
|
||||
GIT_TAG v1.44.2
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}uv_a.a
|
||||
LOG_BUILD TRUE
|
||||
LOG_CONFIGURE TRUE
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property (libuv_src SOURCE_DIR)
|
||||
ExternalProject_Get_Property (libuv_src BINARY_DIR)
|
||||
set (libuv_src_SOURCE_DIR "${SOURCE_DIR}")
|
||||
file (MAKE_DIRECTORY ${libuv_src_SOURCE_DIR}/include)
|
||||
|
||||
set_target_properties (libuv1 PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${BINARY_DIR}/${ep_lib_prefix}uv_a.a
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR}/include)
|
||||
add_dependencies(libuv1 libuv_src)
|
||||
|
||||
file(TO_CMAKE_PATH "${libuv_src_SOURCE_DIR}" libuv_src_SOURCE_DIR)
|
||||
endif()
|
||||
|
||||
add_library (cassandra STATIC IMPORTED GLOBAL)
|
||||
ExternalProject_Add(cassandra_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/datastax/cpp-driver.git
|
||||
GIT_TAG 2.16.2
|
||||
CMAKE_ARGS
|
||||
-DLIBUV_ROOT_DIR=${BINARY_DIR}
|
||||
-DLIBUV_LIBARY=${BINARY_DIR}/libuv_a.a
|
||||
-DLIBUV_INCLUDE_DIR=${SOURCE_DIR}/include
|
||||
-DCASS_BUILD_STATIC=ON
|
||||
-DCASS_BUILD_SHARED=OFF
|
||||
-DOPENSSL_ROOT_DIR=/opt/local/openssl
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}cassandra_static.a
|
||||
LOG_BUILD TRUE
|
||||
LOG_CONFIGURE TRUE
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property (cassandra_src SOURCE_DIR)
|
||||
ExternalProject_Get_Property (cassandra_src BINARY_DIR)
|
||||
set (cassandra_src_SOURCE_DIR "${SOURCE_DIR}")
|
||||
file (MAKE_DIRECTORY ${cassandra_src_SOURCE_DIR}/include)
|
||||
|
||||
set_target_properties (cassandra PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${BINARY_DIR}/${ep_lib_prefix}cassandra_static.a
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${SOURCE_DIR}/include)
|
||||
add_dependencies(cassandra cassandra_src)
|
||||
|
||||
if(NOT libuv1)
|
||||
ExternalProject_Add_StepDependencies(cassandra_src build libuv1)
|
||||
target_link_libraries(cassandra INTERFACE libuv1)
|
||||
else()
|
||||
target_link_libraries(cassandra INTERFACE ${libuv1})
|
||||
endif()
|
||||
if(NOT krb5)
|
||||
|
||||
ExternalProject_Add_StepDependencies(cassandra_src build krb5)
|
||||
target_link_libraries(cassandra INTERFACE krb5)
|
||||
else()
|
||||
target_link_libraries(cassandra INTERFACE ${krb5})
|
||||
endif()
|
||||
|
||||
if(NOT zlib)
|
||||
ExternalProject_Add_StepDependencies(cassandra_src build zlib)
|
||||
target_link_libraries(cassandra INTERFACE zlib)
|
||||
else()
|
||||
target_link_libraries(cassandra INTERFACE ${zlib})
|
||||
endif()
|
||||
|
||||
file(TO_CMAKE_PATH "${cassandra_src_SOURCE_DIR}" cassandra_src_SOURCE_DIR)
|
||||
target_link_libraries(ripple_libs INTERFACE cassandra)
|
||||
else()
|
||||
message("Found system installed cassandra cpp driver")
|
||||
|
||||
find_path(cassandra_includes NAMES cassandra.h REQUIRED)
|
||||
target_link_libraries (ripple_libs INTERFACE ${cassandra})
|
||||
target_include_directories(ripple_libs INTERFACE ${cassandra_includes})
|
||||
endif()
|
||||
|
||||
exclude_if_included (cassandra)
|
||||
endif()
|
||||
@@ -1,18 +0,0 @@
|
||||
#[===================================================================[
|
||||
NIH dep: date
|
||||
|
||||
the main library is header-only, thus is an INTERFACE lib in CMake.
|
||||
|
||||
NOTE: this has been accepted into c++20 so can likely be replaced
|
||||
when we update to that standard
|
||||
#]===================================================================]
|
||||
|
||||
find_package (date QUIET)
|
||||
if (NOT TARGET date::date)
|
||||
FetchContent_Declare(
|
||||
hh_date_src
|
||||
GIT_REPOSITORY https://github.com/HowardHinnant/date.git
|
||||
GIT_TAG fc4cf092f9674f2670fb9177edcdee870399b829
|
||||
)
|
||||
FetchContent_MakeAvailable(hh_date_src)
|
||||
endif ()
|
||||
@@ -1,392 +0,0 @@
|
||||
|
||||
# currently linking to unsecure versions...if we switch, we'll
|
||||
# need to add ssl as a link dependency to the grpc targets
|
||||
option (use_secure_grpc "use TLS version of grpc libs." OFF)
|
||||
if (use_secure_grpc)
|
||||
set (grpc_suffix "")
|
||||
else ()
|
||||
set (grpc_suffix "_unsecure")
|
||||
endif ()
|
||||
|
||||
find_package (gRPC 1.23 CONFIG QUIET)
|
||||
if (TARGET gRPC::gpr AND NOT local_grpc)
|
||||
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_DEBUG)
|
||||
if (NOT _grpc_l)
|
||||
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_RELEASE)
|
||||
endif ()
|
||||
if (NOT _grpc_l)
|
||||
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION)
|
||||
endif ()
|
||||
message (STATUS "Found cmake config for gRPC. Using ${_grpc_l}.")
|
||||
else ()
|
||||
find_package (PkgConfig QUIET)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
pkg_check_modules (grpc QUIET "grpc${grpc_suffix}>=1.25" "grpc++${grpc_suffix}" gpr)
|
||||
endif ()
|
||||
|
||||
if (grpc_FOUND)
|
||||
message (STATUS "Found gRPC using pkg-config. Using ${grpc_gpr_PREFIX}.")
|
||||
endif ()
|
||||
|
||||
add_executable (gRPC::grpc_cpp_plugin IMPORTED)
|
||||
exclude_if_included (gRPC::grpc_cpp_plugin)
|
||||
|
||||
if (grpc_FOUND AND NOT local_grpc)
|
||||
# use installed grpc (via pkg-config)
|
||||
macro (add_imported_grpc libname_)
|
||||
if (static)
|
||||
set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}${libname_}${CMAKE_STATIC_LIBRARY_SUFFIX}")
|
||||
else ()
|
||||
set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}${libname_}${CMAKE_SHARED_LIBRARY_SUFFIX}")
|
||||
endif()
|
||||
find_library(_found_${libname_}
|
||||
NAMES ${_search}
|
||||
HINTS ${grpc_LIBRARY_DIRS})
|
||||
if (_found_${libname_})
|
||||
message (STATUS "importing ${libname_} as ${_found_${libname_}}")
|
||||
else ()
|
||||
message (FATAL_ERROR "using pkg-config for grpc, can't find ${_search}")
|
||||
endif ()
|
||||
add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL)
|
||||
set_target_properties ("gRPC::${libname_}" PROPERTIES IMPORTED_LOCATION ${_found_${libname_}})
|
||||
if (grpc_INCLUDE_DIRS)
|
||||
set_target_properties ("gRPC::${libname_}" PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${grpc_INCLUDE_DIRS})
|
||||
endif ()
|
||||
target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}")
|
||||
exclude_if_included ("gRPC::${libname_}")
|
||||
endmacro ()
|
||||
|
||||
set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES
|
||||
IMPORTED_LOCATION "${grpc_gpr_PREFIX}/bin/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}")
|
||||
|
||||
pkg_check_modules (cares QUIET libcares)
|
||||
if (cares_FOUND)
|
||||
if (static)
|
||||
set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}cares${CMAKE_STATIC_LIBRARY_SUFFIX}")
|
||||
set (_prefix cares_STATIC)
|
||||
set (_static STATIC)
|
||||
else ()
|
||||
set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}cares${CMAKE_SHARED_LIBRARY_SUFFIX}")
|
||||
set (_prefix cares)
|
||||
set (_static)
|
||||
endif()
|
||||
find_library(_location NAMES ${_search} HINTS ${cares_LIBRARY_DIRS})
|
||||
if (NOT _location)
|
||||
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
|
||||
endif ()
|
||||
if(${_location} MATCHES "\\.a$")
|
||||
add_library(c-ares::cares STATIC IMPORTED GLOBAL)
|
||||
else()
|
||||
add_library(c-ares::cares SHARED IMPORTED GLOBAL)
|
||||
endif()
|
||||
set_target_properties (c-ares::cares PROPERTIES
|
||||
IMPORTED_LOCATION ${_location}
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${${_prefix}_INCLUDE_DIRS}"
|
||||
INTERFACE_LINK_OPTIONS "${${_prefix}_LDFLAGS}"
|
||||
)
|
||||
exclude_if_included (c-ares::cares)
|
||||
else ()
|
||||
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
|
||||
endif ()
|
||||
else ()
|
||||
#[===========================[
|
||||
c-ares (grpc requires)
|
||||
#]===========================]
|
||||
ExternalProject_Add (c-ares_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/c-ares/c-ares.git
|
||||
GIT_TAG cares-1_15_0
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
|
||||
-DCARES_SHARED=OFF
|
||||
-DCARES_STATIC=ON
|
||||
-DCARES_STATIC_PIC=ON
|
||||
-DCARES_INSTALL=ON
|
||||
-DCARES_MSVC_STATIC_RUNTIME=ON
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND
|
||||
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix}
|
||||
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix}
|
||||
)
|
||||
exclude_if_included (c-ares_src)
|
||||
ExternalProject_Get_Property (c-ares_src BINARY_DIR)
|
||||
set (cares_binary_dir "${BINARY_DIR}")
|
||||
|
||||
add_library (c-ares::cares STATIC IMPORTED GLOBAL)
|
||||
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
|
||||
set_target_properties (c-ares::cares PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${BINARY_DIR}/_installed_/include)
|
||||
add_dependencies (c-ares::cares c-ares_src)
|
||||
exclude_if_included (c-ares::cares)
|
||||
|
||||
if (NOT has_zlib)
|
||||
#[===========================[
|
||||
zlib (grpc requires)
|
||||
#]===========================]
|
||||
if (MSVC)
|
||||
set (zlib_debug_postfix "d") # zlib cmake sets this internally for MSVC, so we really don't have a choice
|
||||
set (zlib_base "zlibstatic")
|
||||
else ()
|
||||
set (zlib_debug_postfix "_d")
|
||||
set (zlib_base "z")
|
||||
endif ()
|
||||
ExternalProject_Add (zlib_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/madler/zlib.git
|
||||
GIT_TAG v1.2.11
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
-DCMAKE_DEBUG_POSTFIX=${zlib_debug_postfix}
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
|
||||
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
|
||||
"-DCMAKE_C_FLAGS_RELEASE=-MT"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND
|
||||
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix}
|
||||
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix}
|
||||
)
|
||||
exclude_if_included (zlib_src)
|
||||
ExternalProject_Get_Property (zlib_src BINARY_DIR)
|
||||
set (zlib_binary_dir "${BINARY_DIR}")
|
||||
|
||||
add_library (ZLIB::ZLIB STATIC IMPORTED GLOBAL)
|
||||
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
|
||||
set_target_properties (ZLIB::ZLIB PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${BINARY_DIR}/_installed_/include)
|
||||
add_dependencies (ZLIB::ZLIB zlib_src)
|
||||
exclude_if_included (ZLIB::ZLIB)
|
||||
endif ()
|
||||
|
||||
#[===========================[
|
||||
grpc
|
||||
#]===========================]
|
||||
ExternalProject_Add (grpc_src
|
||||
PREFIX ${nih_cache_path}
|
||||
GIT_REPOSITORY https://github.com/grpc/grpc.git
|
||||
GIT_TAG v1.25.0
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DCMAKE_CXX_STANDARD=17
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
|
||||
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>
|
||||
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
|
||||
-DCMAKE_DEBUG_POSTFIX=_d
|
||||
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
|
||||
-DgRPC_BUILD_TESTS=OFF
|
||||
-DgRPC_BENCHMARK_PROVIDER=""
|
||||
-DgRPC_BUILD_CSHARP_EXT=OFF
|
||||
-DgRPC_MSVC_STATIC_RUNTIME=ON
|
||||
-DgRPC_INSTALL=OFF
|
||||
-DgRPC_CARES_PROVIDER=package
|
||||
-Dc-ares_DIR=${cares_binary_dir}/_installed_/lib/cmake/c-ares
|
||||
-DgRPC_SSL_PROVIDER=package
|
||||
-DOPENSSL_ROOT_DIR=${OPENSSL_ROOT_DIR}
|
||||
-DgRPC_PROTOBUF_PROVIDER=package
|
||||
-DProtobuf_USE_STATIC_LIBS=$<IF:$<AND:$<BOOL:${Protobuf_FOUND}>,$<NOT:$<BOOL:${static}>>>,OFF,ON>
|
||||
-DProtobuf_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:protobuf::libprotobuf,INTERFACE_INCLUDE_DIRECTORIES>,:_:>
|
||||
-DProtobuf_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:protobuf::libprotobuf,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:protobuf::libprotobuf,IMPORTED_LOCATION_RELEASE>>
|
||||
-DProtobuf_PROTOC_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:protobuf::libprotoc,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:protobuf::libprotoc,IMPORTED_LOCATION_RELEASE>>
|
||||
-DProtobuf_PROTOC_EXECUTABLE=$<TARGET_PROPERTY:protobuf::protoc,IMPORTED_LOCATION>
|
||||
-DgRPC_ZLIB_PROVIDER=package
|
||||
$<$<NOT:$<BOOL:${has_zlib}>>:-DZLIB_ROOT=${zlib_binary_dir}/_installed_>
|
||||
$<$<BOOL:${MSVC}>:
|
||||
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
|
||||
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
|
||||
>
|
||||
LOG_BUILD ON
|
||||
LOG_CONFIGURE ON
|
||||
BUILD_COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build .
|
||||
--config $<CONFIG>
|
||||
--parallel ${ep_procs}
|
||||
$<$<BOOL:${is_multiconfig}>:
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E copy
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}grpc${grpc_suffix}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}grpc++${grpc_suffix}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}address_sorting$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}gpr$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
|
||||
<BINARY_DIR>/$<CONFIG>/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}
|
||||
<BINARY_DIR>
|
||||
>
|
||||
LIST_SEPARATOR :_:
|
||||
TEST_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
DEPENDS c-ares_src
|
||||
BUILD_BYPRODUCTS
|
||||
<BINARY_DIR>/${ep_lib_prefix}grpc${grpc_suffix}${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}grpc${grpc_suffix}_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}grpc++${grpc_suffix}${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}grpc++${grpc_suffix}_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}address_sorting${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}address_sorting_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}gpr${ep_lib_suffix}
|
||||
<BINARY_DIR>/${ep_lib_prefix}gpr_d${ep_lib_suffix}
|
||||
<BINARY_DIR>/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}
|
||||
)
|
||||
if (TARGET protobuf_src)
|
||||
ExternalProject_Add_StepDependencies(grpc_src build protobuf_src)
|
||||
endif ()
|
||||
exclude_if_included (grpc_src)
|
||||
ExternalProject_Get_Property (grpc_src BINARY_DIR)
|
||||
ExternalProject_Get_Property (grpc_src SOURCE_DIR)
|
||||
set (grpc_binary_dir "${BINARY_DIR}")
|
||||
set (grpc_source_dir "${SOURCE_DIR}")
|
||||
if (CMAKE_VERBOSE_MAKEFILE)
|
||||
print_ep_logs (grpc_src)
|
||||
endif ()
|
||||
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
|
||||
|
||||
macro (add_imported_grpc libname_)
|
||||
add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL)
|
||||
set_target_properties ("gRPC::${libname_}" PROPERTIES
|
||||
IMPORTED_LOCATION_DEBUG
|
||||
${grpc_binary_dir}/${ep_lib_prefix}${libname_}_d${ep_lib_suffix}
|
||||
IMPORTED_LOCATION_RELEASE
|
||||
${grpc_binary_dir}/${ep_lib_prefix}${libname_}${ep_lib_suffix}
|
||||
INTERFACE_INCLUDE_DIRECTORIES
|
||||
${grpc_source_dir}/include)
|
||||
add_dependencies ("gRPC::${libname_}" grpc_src)
|
||||
target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}")
|
||||
exclude_if_included ("gRPC::${libname_}")
|
||||
endmacro ()
|
||||
|
||||
set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES
|
||||
IMPORTED_LOCATION "${grpc_binary_dir}/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}")
|
||||
add_dependencies (gRPC::grpc_cpp_plugin grpc_src)
|
||||
endif ()
|
||||
|
||||
add_imported_grpc (gpr)
|
||||
add_imported_grpc ("grpc${grpc_suffix}")
|
||||
add_imported_grpc ("grpc++${grpc_suffix}")
|
||||
add_imported_grpc (address_sorting)
|
||||
|
||||
target_link_libraries ("gRPC::grpc${grpc_suffix}" INTERFACE c-ares::cares gRPC::gpr gRPC::address_sorting ZLIB::ZLIB)
|
||||
target_link_libraries ("gRPC::grpc++${grpc_suffix}" INTERFACE "gRPC::grpc${grpc_suffix}" gRPC::gpr)
|
||||
endif ()
|
||||
|
||||
#[=================================[
|
||||
generate protobuf sources for
|
||||
grpc defs and bundle into a
|
||||
static lib
|
||||
#]=================================]
|
||||
set(output_dir "${CMAKE_BINARY_DIR}/proto_gen_grpc")
|
||||
set(GRPC_GEN_DIR "${output_dir}/ripple/proto")
|
||||
file(MAKE_DIRECTORY ${GRPC_GEN_DIR})
|
||||
set(GRPC_PROTO_SRCS)
|
||||
set(GRPC_PROTO_HDRS)
|
||||
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
|
||||
file(GLOB_RECURSE GRPC_DEFINITION_FILES "${GRPC_PROTO_ROOT}/*.proto")
|
||||
foreach(file ${GRPC_DEFINITION_FILES})
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1/get_ledger.proto
|
||||
get_filename_component(_abs_file ${file} ABSOLUTE)
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1
|
||||
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||
# get_ledger
|
||||
get_filename_component(_basename ${file} NAME_WE)
|
||||
# /home/user/rippled/src/ripple/proto
|
||||
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
|
||||
# org/.../v1/get_ledger.proto
|
||||
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
|
||||
# org/.../v1
|
||||
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
|
||||
# src/ripple/proto/org/.../v1
|
||||
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||
|
||||
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
|
||||
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
|
||||
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
|
||||
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
|
||||
add_custom_command(
|
||||
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
|
||||
COMMAND protobuf::protoc
|
||||
ARGS --grpc_out=${GRPC_GEN_DIR}
|
||||
--cpp_out=${GRPC_GEN_DIR}
|
||||
--plugin=protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
|
||||
-I ${_proto_inc} -I ${_rel_dir}
|
||||
${_abs_file}
|
||||
DEPENDS ${_abs_file} protobuf::protoc gRPC::grpc_cpp_plugin
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
|
||||
VERBATIM)
|
||||
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES
|
||||
GENERATED TRUE
|
||||
SKIP_UNITY_BUILD_INCLUSION ON
|
||||
)
|
||||
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
|
||||
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
|
||||
endforeach()
|
||||
|
||||
target_include_directories(xrpl.libpb SYSTEM PUBLIC
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/ripple/proto>
|
||||
# The generated sources include headers relative to this path. Fix it later.
|
||||
$<INSTALL_INTERFACE:include/ripple/proto>
|
||||
)
|
||||
target_sources(xrpl.libpb PRIVATE ${GRPC_PROTO_SRCS})
|
||||
install(
|
||||
DIRECTORY ${output_dir}/ripple
|
||||
DESTINATION include/
|
||||
FILES_MATCHING PATTERN "*.h"
|
||||
)
|
||||
target_link_libraries(xrpl.libpb PUBLIC
|
||||
"gRPC::grpc++"
|
||||
# libgrpc is missing references.
|
||||
absl::random_random
|
||||
)
|
||||
target_compile_options(xrpl.libpb
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
|
||||
PUBLIC
|
||||
$<$<BOOL:${MSVC}>:-wd4996>
|
||||
$<$<BOOL:${is_xcode}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>)
|
||||
# target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs)
|
||||
# exclude_if_included (grpc_pbufs)
|
||||
32
conanfile.py
32
conanfile.py
@@ -21,6 +21,8 @@ class Xrpl(ConanFile):
|
||||
'tests': [True, False],
|
||||
'unity': [True, False],
|
||||
'xrpld': [True, False],
|
||||
'with_wasmedge': [True, False],
|
||||
'tool_requires_b2': [True, False],
|
||||
}
|
||||
|
||||
requires = [
|
||||
@@ -28,15 +30,14 @@ class Xrpl(ConanFile):
|
||||
'grpc/1.50.1',
|
||||
'libarchive/3.7.6',
|
||||
'nudb/2.0.8',
|
||||
'openssl/1.1.1v',
|
||||
'soci/4.0.3',
|
||||
'openssl/3.6.0',
|
||||
'soci/4.0.3@xahaud/stable',
|
||||
'xxhash/0.8.2',
|
||||
'wasmedge/0.11.2',
|
||||
'zlib/1.3.1',
|
||||
]
|
||||
|
||||
tool_requires = [
|
||||
'protobuf/3.21.9',
|
||||
'protobuf/3.21.12',
|
||||
]
|
||||
|
||||
default_options = {
|
||||
@@ -49,9 +50,11 @@ class Xrpl(ConanFile):
|
||||
'static': True,
|
||||
'tests': False,
|
||||
'unity': False,
|
||||
'with_wasmedge': True,
|
||||
'tool_requires_b2': False,
|
||||
'xrpld': False,
|
||||
|
||||
'date/*:header_only': True,
|
||||
'date/*:header_only': False,
|
||||
'grpc/*:shared': False,
|
||||
'grpc/*:secure': True,
|
||||
'libarchive/*:shared': False,
|
||||
@@ -95,15 +98,28 @@ class Xrpl(ConanFile):
|
||||
match = next(m for m in matches if m)
|
||||
self.version = match.group(1)
|
||||
|
||||
def build_requirements(self):
|
||||
self.tool_requires('grpc/1.50.1')
|
||||
# Explicitly require b2 (e.g. for building from source for glibc compatibility)
|
||||
if self.options.tool_requires_b2:
|
||||
self.tool_requires('b2/5.3.2')
|
||||
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
self.options['boost'].visibility = 'global'
|
||||
self.options['boost/*'].visibility = 'global'
|
||||
|
||||
def requirements(self):
|
||||
self.requires('boost/1.86.0', force=True)
|
||||
# Force boost version for all dependencies to avoid conflicts
|
||||
self.requires('boost/1.86.0', override=True)
|
||||
self.requires('lz4/1.10.0', force=True)
|
||||
self.requires('protobuf/3.21.9', force=True)
|
||||
self.requires('sqlite3/3.47.0', force=True)
|
||||
# Force sqlite3 version to avoid conflicts with soci
|
||||
self.requires('sqlite3/3.47.0', override=True)
|
||||
# Force our custom snappy build for all dependencies
|
||||
self.requires('snappy/1.1.10@xahaud/stable', override=True)
|
||||
|
||||
if self.options.with_wasmedge:
|
||||
self.requires('wasmedge/0.11.2@xahaud/stable')
|
||||
if self.options.jemalloc:
|
||||
self.requires('jemalloc/5.3.0')
|
||||
if self.options.rocksdb:
|
||||
|
||||
@@ -8,4 +8,4 @@ if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||
fi
|
||||
|
||||
echo "Mounting $(pwd)/io in ubuntu and running unit tests"
|
||||
docker run --rm -i -v $(pwd):/io -e BUILD_CORES=$BUILD_CORES ubuntu sh -c '/io/release-build/xahaud --unittest-jobs $BUILD_CORES -u'
|
||||
docker run --rm -i -v $(pwd):/io --platform=linux/amd64 -e BUILD_CORES=$BUILD_CORES ubuntu sh -c '/io/release-build/xahaud --unittest-jobs $BUILD_CORES -u'
|
||||
|
||||
4
docs/build/environment.md
vendored
4
docs/build/environment.md
vendored
@@ -11,11 +11,11 @@ platforms: Linux, macOS, or Windows.
|
||||
Package ecosystems vary across Linux distributions,
|
||||
so there is no one set of instructions that will work for every Linux user.
|
||||
These instructions are written for Ubuntu 22.04.
|
||||
They are largely copied from the [script][1] used to configure our Docker
|
||||
They are largely copied from the [script][1] used to configure a Docker
|
||||
container for continuous integration.
|
||||
That script handles many more responsibilities.
|
||||
These instructions are just the bare minimum to build one configuration of
|
||||
rippled.
|
||||
xahaud.
|
||||
You can check that codebase for other Linux distributions and versions.
|
||||
If you cannot find yours there,
|
||||
then we hope that these instructions can at least guide you in the right
|
||||
|
||||
177
docs/build/install.md
vendored
177
docs/build/install.md
vendored
@@ -1,159 +1,30 @@
|
||||
This document contains instructions for installing rippled.
|
||||
The APT package manager is common on Debian-based Linux distributions like
|
||||
Ubuntu,
|
||||
while the YUM package manager is common on Red Hat-based Linux distributions
|
||||
like CentOS.
|
||||
Installing from source is an option for all platforms,
|
||||
and the only supported option for installing custom builds.
|
||||
Comprehensive instructions for installing and running xahaud are available on the [https://Xahau.Network](https://xahau.network/docs/infrastructure/installing-xahaud) documentation website.
|
||||
|
||||
## Create the Runtime Environment
|
||||
xahaud can be [built from source](../../BUILD.md) or installed using the binary files available from [https://build.xahau.tech](https://build.xahau.tech/). After obtaining a working xahaud binary, users will need to provide a suitable runtime environment. The following setup can be used for Linux or Docker environments.
|
||||
|
||||
## From source
|
||||
|
||||
From a source build, you can install rippled and libxrpl using CMake's
|
||||
`--install` mode:
|
||||
1. Create or download two configuration files: the main xahaud.cfg configuration file and a second validators-xahau.txt file defining which validators or UNL list publishers are trusted. The default location for these files in this xahaud repository is `cfg/`.
|
||||
2. Provide a directory structure that is congruent with the contents of xahaud.cfg. This will include a location for logfiles, such as `/var/log/xahaud/`, as well as database files, `/opt/xahaud/db/`. Configuration files are, by default, sourced from `/etc/xahaud/`. It is possible to provide a symbolic link, if users wish to store configuration files elsewhere.
|
||||
3. If desired, created a xahaud user and group, and change ownership of the binary and directories. Servers used for validating nodes should use the most restrictive permissions possible for `xahaud.cfg`, as the validation token is stored therein.
|
||||
4. If desired, create a systemd service file: `/etc/systemd/system/xahaud.service`, enabling xahaud to run as a daemon. Alternately, run: `/path/to/binary/xahaud --conf=/path/to/xahaud.cfg`.
|
||||
|
||||
## Example systemd Service File
|
||||
```
|
||||
cmake --install . --prefix /opt/local
|
||||
[Unit]
|
||||
Description=Xahaud Daemon
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/path/to/xahaud --silent --conf /path/to/xahaud.cfg
|
||||
Restart=on-failure
|
||||
User=xahaud
|
||||
Group=xahaud
|
||||
LimitNOFILE=65536
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
`C:/Program Files/rippled` on Windows.
|
||||
|
||||
[1]: https://cmake.org/cmake/help/latest/variable/CMAKE_INSTALL_PREFIX.html
|
||||
|
||||
|
||||
## With the APT package manager
|
||||
|
||||
1. Update repositories:
|
||||
|
||||
sudo apt update -y
|
||||
|
||||
2. Install utilities:
|
||||
|
||||
sudo apt install -y apt-transport-https ca-certificates wget gnupg
|
||||
|
||||
3. Add Ripple's package-signing GPG key to your list of trusted keys:
|
||||
|
||||
sudo mkdir /usr/local/share/keyrings/
|
||||
wget -q -O - "https://repos.ripple.com/repos/api/gpg/key/public" | gpg --dearmor > ripple-key.gpg
|
||||
sudo mv ripple-key.gpg /usr/local/share/keyrings
|
||||
|
||||
|
||||
4. Check the fingerprint of the newly-added key:
|
||||
|
||||
gpg /usr/local/share/keyrings/ripple-key.gpg
|
||||
|
||||
The output should include an entry for Ripple such as the following:
|
||||
|
||||
gpg: WARNING: no command supplied. Trying to guess what you mean ...
|
||||
pub rsa3072 2019-02-14 [SC] [expires: 2026-02-17]
|
||||
C0010EC205B35A3310DC90DE395F97FFCCAFD9A2
|
||||
uid TechOps Team at Ripple <techops+rippled@ripple.com>
|
||||
sub rsa3072 2019-02-14 [E] [expires: 2026-02-17]
|
||||
|
||||
|
||||
In particular, make sure that the fingerprint matches. (In the above example, the fingerprint is on the third line, starting with `C001`.)
|
||||
|
||||
4. Add the appropriate Ripple repository for your operating system version:
|
||||
|
||||
echo "deb [signed-by=/usr/local/share/keyrings/ripple-key.gpg] https://repos.ripple.com/repos/rippled-deb focal stable" | \
|
||||
sudo tee -a /etc/apt/sources.list.d/ripple.list
|
||||
|
||||
The above example is appropriate for **Ubuntu 20.04 Focal Fossa**. For other operating systems, replace the word `focal` with one of the following:
|
||||
|
||||
- `jammy` for **Ubuntu 22.04 Jammy Jellyfish**
|
||||
- `bionic` for **Ubuntu 18.04 Bionic Beaver**
|
||||
- `bullseye` for **Debian 11 Bullseye**
|
||||
- `buster` for **Debian 10 Buster**
|
||||
|
||||
If you want access to development or pre-release versions of `rippled`, use one of the following instead of `stable`:
|
||||
|
||||
- `unstable` - Pre-release builds ([`release` branch](https://github.com/ripple/rippled/tree/release))
|
||||
- `nightly` - Experimental/development builds ([`develop` branch](https://github.com/ripple/rippled/tree/develop))
|
||||
|
||||
**Warning:** Unstable and nightly builds may be broken at any time. Do not use these builds for production servers.
|
||||
|
||||
5. Fetch the Ripple repository.
|
||||
|
||||
sudo apt -y update
|
||||
|
||||
6. Install the `rippled` software package:
|
||||
|
||||
sudo apt -y install rippled
|
||||
|
||||
7. Check the status of the `rippled` service:
|
||||
|
||||
systemctl status rippled.service
|
||||
|
||||
The `rippled` service should start automatically. If not, you can start it manually:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
|
||||
8. Optional: allow `rippled` to bind to privileged ports.
|
||||
|
||||
This allows you to serve incoming API requests on port 80 or 443. (If you want to do so, you must also update the config file's port settings.)
|
||||
|
||||
sudo setcap 'cap_net_bind_service=+ep' /opt/ripple/bin/rippled
|
||||
|
||||
|
||||
## With the YUM package manager
|
||||
|
||||
1. Install the Ripple RPM repository:
|
||||
|
||||
Choose the appropriate RPM repository for the stability of releases you want:
|
||||
|
||||
- `stable` for the latest production release (`master` branch)
|
||||
- `unstable` for pre-release builds (`release` branch)
|
||||
- `nightly` for experimental/development builds (`develop` branch)
|
||||
|
||||
*Stable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-stable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/stable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/stable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Unstable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-unstable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/unstable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/unstable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Nightly*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-nightly]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/nightly/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/nightly/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
2. Fetch the latest repo updates:
|
||||
|
||||
sudo yum -y update
|
||||
|
||||
3. Install the new `rippled` package:
|
||||
|
||||
sudo yum install -y rippled
|
||||
|
||||
4. Configure the `rippled` service to start on boot:
|
||||
|
||||
sudo systemctl enable rippled.service
|
||||
|
||||
5. Start the `rippled` service:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
After the systemd service file is installed, it must be loaded with: `systemctl daemon-reload`. xahaud can then be enabled: `systemctl enable --now xahaud`.
|
||||
|
||||
11
external/snappy/conanfile.py
vendored
11
external/snappy/conanfile.py
vendored
@@ -77,9 +77,14 @@ class SnappyConan(ConanFile):
|
||||
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
|
||||
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.components["snappylib"].libs = ["snappy"]
|
||||
if not self.options.shared:
|
||||
if self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.components["snappylib"].system_libs.append("m")
|
||||
# The following block is commented out as a workaround for a bug in the
|
||||
# Conan 1.x CMakeDeps generator. Including system_libs ("m") here
|
||||
# incorrectly triggers a heuristic that adds a dynamic link to `stdc++`
|
||||
# (-lstdc++), preventing a fully static build.
|
||||
# This behavior is expected to be corrected in Conan 2.
|
||||
# if not self.options.shared:
|
||||
# if self.settings.os in ["Linux", "FreeBSD"]:
|
||||
# self.cpp_info.components["snappylib"].system_libs.append("m")
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "Snappy"
|
||||
|
||||
11
external/wasmedge/conanfile.py
vendored
11
external/wasmedge/conanfile.py
vendored
@@ -38,8 +38,15 @@ class WasmedgeConan(ConanFile):
|
||||
raise ConanInvalidConfiguration("Binaries for this combination of version/os/arch/compiler are not available")
|
||||
|
||||
def package_id(self):
|
||||
del self.info.settings.compiler.version
|
||||
self.info.settings.compiler = self._compiler_alias
|
||||
# Make binary compatible across compiler versions (since we're downloading prebuilt)
|
||||
self.info.settings.rm_safe("compiler.version")
|
||||
# Group compilers by their binary compatibility
|
||||
# Note: We must use self.info.settings here, not self.settings (forbidden in Conan 2)
|
||||
compiler_name = str(self.info.settings.compiler)
|
||||
if compiler_name in ["Visual Studio", "msvc"]:
|
||||
self.info.settings.compiler = "Visual Studio"
|
||||
else:
|
||||
self.info.settings.compiler = "gcc"
|
||||
|
||||
def build(self):
|
||||
# This is packaging binaries so the download needs to be in build
|
||||
|
||||
@@ -48,4 +48,4 @@
|
||||
#define TOO_MANY_STATE_MODIFICATIONS -44
|
||||
#define TOO_MANY_NAMESPACES -45
|
||||
#define HOOK_ERROR_CODES
|
||||
#endif //HOOK_ERROR_CODES
|
||||
#endif //HOOK_ERROR_CODES
|
||||
|
||||
@@ -12,8 +12,6 @@ accept(uint32_t read_ptr, uint32_t read_len, int64_t error_code);
|
||||
extern int64_t
|
||||
rollback(uint32_t read_ptr, uint32_t read_len, int64_t error_code);
|
||||
|
||||
// UTIL
|
||||
|
||||
extern int64_t
|
||||
util_raddr(
|
||||
uint32_t write_ptr,
|
||||
@@ -56,8 +54,6 @@ util_keylet(
|
||||
uint32_t e,
|
||||
uint32_t f);
|
||||
|
||||
// STO
|
||||
|
||||
extern int64_t
|
||||
sto_validate(uint32_t tread_ptr, uint32_t tread_len);
|
||||
|
||||
@@ -85,10 +81,8 @@ sto_erase(
|
||||
uint32_t read_len,
|
||||
uint32_t field_id);
|
||||
|
||||
// EMITTED TXN
|
||||
|
||||
extern int64_t
|
||||
etxn_burden(void);
|
||||
etxn_burden();
|
||||
|
||||
extern int64_t
|
||||
etxn_details(uint32_t write_ptr, uint32_t write_len);
|
||||
@@ -100,7 +94,7 @@ extern int64_t
|
||||
etxn_reserve(uint32_t count);
|
||||
|
||||
extern int64_t
|
||||
etxn_generation(void);
|
||||
etxn_generation();
|
||||
|
||||
extern int64_t
|
||||
etxn_nonce(uint32_t write_ptr, uint32_t write_len);
|
||||
@@ -112,8 +106,6 @@ emit(
|
||||
uint32_t read_ptr,
|
||||
uint32_t read_len);
|
||||
|
||||
// FLOAT
|
||||
|
||||
extern int64_t
|
||||
float_set(int32_t exponent, int64_t mantissa);
|
||||
|
||||
@@ -157,7 +149,7 @@ extern int64_t
|
||||
float_divide(int64_t float1, int64_t float2);
|
||||
|
||||
extern int64_t
|
||||
float_one(void);
|
||||
float_one();
|
||||
|
||||
extern int64_t
|
||||
float_mantissa(int64_t float1);
|
||||
@@ -174,16 +166,14 @@ float_log(int64_t float1);
|
||||
extern int64_t
|
||||
float_root(int64_t float1, uint32_t n);
|
||||
|
||||
// LEDGER
|
||||
extern int64_t
|
||||
fee_base();
|
||||
|
||||
extern int64_t
|
||||
fee_base(void);
|
||||
ledger_seq();
|
||||
|
||||
extern int64_t
|
||||
ledger_seq(void);
|
||||
|
||||
extern int64_t
|
||||
ledger_last_time(void);
|
||||
ledger_last_time();
|
||||
|
||||
extern int64_t
|
||||
ledger_last_hash(uint32_t write_ptr, uint32_t write_len);
|
||||
@@ -200,8 +190,6 @@ ledger_keylet(
|
||||
uint32_t hread_ptr,
|
||||
uint32_t hread_len);
|
||||
|
||||
// HOOK
|
||||
|
||||
extern int64_t
|
||||
hook_account(uint32_t write_ptr, uint32_t write_len);
|
||||
|
||||
@@ -225,15 +213,13 @@ hook_param(
|
||||
uint32_t read_len);
|
||||
|
||||
extern int64_t
|
||||
hook_again(void);
|
||||
hook_again();
|
||||
|
||||
extern int64_t
|
||||
hook_skip(uint32_t read_ptr, uint32_t read_len, uint32_t flags);
|
||||
|
||||
extern int64_t
|
||||
hook_pos(void);
|
||||
|
||||
// SLOT
|
||||
hook_pos();
|
||||
|
||||
extern int64_t
|
||||
slot(uint32_t write_ptr, uint32_t write_len, uint32_t slot);
|
||||
@@ -262,8 +248,6 @@ slot_type(uint32_t slot_no, uint32_t flags);
|
||||
extern int64_t
|
||||
slot_float(uint32_t slot_no);
|
||||
|
||||
// STATE
|
||||
|
||||
extern int64_t
|
||||
state_set(
|
||||
uint32_t read_ptr,
|
||||
@@ -300,8 +284,6 @@ state_foreign(
|
||||
uint32_t aread_ptr,
|
||||
uint32_t aread_len);
|
||||
|
||||
// TRACE
|
||||
|
||||
extern int64_t
|
||||
trace(
|
||||
uint32_t mread_ptr,
|
||||
@@ -316,22 +298,20 @@ trace_num(uint32_t read_ptr, uint32_t read_len, int64_t number);
|
||||
extern int64_t
|
||||
trace_float(uint32_t read_ptr, uint32_t read_len, int64_t float1);
|
||||
|
||||
// OTXN
|
||||
|
||||
extern int64_t
|
||||
otxn_burden(void);
|
||||
otxn_burden();
|
||||
|
||||
extern int64_t
|
||||
otxn_field(uint32_t write_ptr, uint32_t write_len, uint32_t field_id);
|
||||
|
||||
extern int64_t
|
||||
otxn_generation(void);
|
||||
otxn_generation();
|
||||
|
||||
extern int64_t
|
||||
otxn_id(uint32_t write_ptr, uint32_t write_len, uint32_t flags);
|
||||
|
||||
extern int64_t
|
||||
otxn_type(void);
|
||||
otxn_type();
|
||||
|
||||
extern int64_t
|
||||
otxn_slot(uint32_t slot_no);
|
||||
@@ -346,9 +326,15 @@ otxn_param(
|
||||
extern int64_t
|
||||
meta_slot(uint32_t slot_no);
|
||||
|
||||
// featureHooks1
|
||||
extern int64_t
|
||||
xpop_slot(uint32_t slot_no_tx, uint32_t slot_no_meta);
|
||||
|
||||
extern int64_t xpop_slot(uint32_t, uint32_t);
|
||||
extern int64_t
|
||||
prepare(
|
||||
uint32_t write_ptr,
|
||||
uint32_t write_len,
|
||||
uint32_t read_ptr,
|
||||
uint32_t read_len);
|
||||
|
||||
#define HOOK_EXTERN
|
||||
#endif // HOOK_EXTERN
|
||||
|
||||
58
hook/generate_error.sh
Executable file
58
hook/generate_error.sh
Executable file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
SCRIPT_DIR=$(cd "$SCRIPT_DIR" && pwd)
|
||||
|
||||
ENUM_FILE="$SCRIPT_DIR/../include/xrpl/hook/Enum.h"
|
||||
|
||||
echo '// For documentation please see: https://xrpl-hooks.readme.io/reference/'
|
||||
echo '// Generated using generate_error.sh'
|
||||
echo '#ifndef HOOK_ERROR_CODES'
|
||||
sed -n '/enum hook_return_code/,/};/p' "$ENUM_FILE" |
|
||||
awk '
|
||||
function ltrim(s) { sub(/^[[:space:]]+/, "", s); return s }
|
||||
function rtrim(s) { sub(/[[:space:]]+$/, "", s); return s }
|
||||
function trim(s) { return rtrim(ltrim(s)) }
|
||||
function emit(entry) {
|
||||
entry = trim(entry)
|
||||
if (entry == "")
|
||||
return
|
||||
gsub(/,[[:space:]]*$/, "", entry)
|
||||
split(entry, parts, "=")
|
||||
if (length(parts) < 2)
|
||||
return
|
||||
name = trim(parts[1])
|
||||
value = trim(parts[2])
|
||||
if (name == "" || value == "")
|
||||
return
|
||||
printf "#define %s %s\n", name, value
|
||||
}
|
||||
|
||||
{
|
||||
line = $0
|
||||
if (line ~ /enum[[:space:]]+hook_return_code/)
|
||||
next
|
||||
if (line ~ /^[[:space:]]*\{/)
|
||||
next
|
||||
|
||||
sub(/\/\/.*$/, "", line)
|
||||
|
||||
if (line ~ /^[[:space:]]*\};/) {
|
||||
emit(buffer)
|
||||
exit
|
||||
}
|
||||
|
||||
if (line ~ /^[[:space:]]*$/)
|
||||
next
|
||||
|
||||
buffer = buffer line " "
|
||||
|
||||
if (line ~ /,[[:space:]]*$/) {
|
||||
emit(buffer)
|
||||
buffer = ""
|
||||
}
|
||||
}
|
||||
'
|
||||
echo '#define HOOK_ERROR_CODES'
|
||||
echo '#endif //HOOK_ERROR_CODES'
|
||||
54
hook/generate_extern.sh
Executable file
54
hook/generate_extern.sh
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
SCRIPT_DIR=$(cd "$SCRIPT_DIR" && pwd)
|
||||
|
||||
APPLY_HOOK="$SCRIPT_DIR/../include/xrpl/hook/hook_api.macro"
|
||||
|
||||
{
|
||||
echo '// For documentation please see: https://xrpl-hooks.readme.io/reference/'
|
||||
echo '// Generated using generate_extern.sh'
|
||||
echo '#include <stdint.h>'
|
||||
echo '#ifndef HOOK_EXTERN'
|
||||
echo
|
||||
awk '
|
||||
function trim(s) {
|
||||
sub(/^[[:space:]]+/, "", s);
|
||||
sub(/[[:space:]]+$/, "", s);
|
||||
return s;
|
||||
}
|
||||
|
||||
{
|
||||
line = $0;
|
||||
|
||||
# Skip block comments
|
||||
if (line ~ /\/\*/) {
|
||||
next;
|
||||
}
|
||||
|
||||
# Look for comment lines that start with // and contain function signature
|
||||
if (line ~ /^[[:space:]]*\/\/[[:space:]]*[a-zA-Z_][a-zA-Z0-9_]*[[:space:]]+[a-zA-Z_][a-zA-Z0-9_]*[[:space:]]*\(/) {
|
||||
# Remove leading // and trim
|
||||
sub(/^[[:space:]]*\/\/[[:space:]]*/, "", line);
|
||||
line = trim(line);
|
||||
|
||||
# Check if function name is "_g" to add attribute
|
||||
if (line ~ /[[:space:]]+_g[[:space:]]*\(/) {
|
||||
# Insert __attribute__((noduplicate)) before _g
|
||||
sub(/[[:space:]]+_g/, " __attribute__((noduplicate)) _g", line);
|
||||
}
|
||||
|
||||
# printf("\n");
|
||||
|
||||
printf("extern %s\n\n", line);
|
||||
}
|
||||
}
|
||||
' "$APPLY_HOOK"
|
||||
|
||||
echo '#define HOOK_EXTERN'
|
||||
echo '#endif // HOOK_EXTERN'
|
||||
} | (
|
||||
cd "$SCRIPT_DIR/.."
|
||||
clang-format --style=file -
|
||||
)
|
||||
@@ -1,5 +1,9 @@
|
||||
#!/bin/bash
|
||||
RIPPLED_ROOT="../include/xrpl"
|
||||
set -eu
|
||||
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
SCRIPT_DIR=$(cd "$SCRIPT_DIR" && pwd)
|
||||
RIPPLED_ROOT="$SCRIPT_DIR/../include/xrpl"
|
||||
echo '// For documentation please see: https://xrpl-hooks.readme.io/reference/'
|
||||
echo '// Generated using generate_sfcodes.sh'
|
||||
cat $RIPPLED_ROOT/protocol/detail/sfields.macro | grep -E '^(TYPED_SFIELD|UNTYPED_SFIELD)' |
|
||||
|
||||
35
hook/generate_tts.sh
Executable file
35
hook/generate_tts.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
SCRIPT_DIR=$(cd "$SCRIPT_DIR" && pwd)
|
||||
|
||||
RIPPLED_ROOT="$SCRIPT_DIR/../include/xrpl"
|
||||
TX_FORMATS="$RIPPLED_ROOT/protocol/detail/transactions.macro"
|
||||
|
||||
echo '// For documentation please see: https://xrpl-hooks.readme.io/reference/'
|
||||
echo '// Generated using generate_tts.sh'
|
||||
cat "$TX_FORMATS" |
|
||||
awk '
|
||||
function ltrim(s) { sub(/^[[:space:]]+/, "", s); return s }
|
||||
function rtrim(s) { sub(/[[:space:]]+$/, "", s); return s }
|
||||
function trim(s) { return rtrim(ltrim(s)) }
|
||||
|
||||
/^TRANSACTION\(tt/ {
|
||||
line = $0
|
||||
sub(/^TRANSACTION\(/, "", line)
|
||||
|
||||
split(line, parts, ",")
|
||||
if (length(parts) < 2)
|
||||
next
|
||||
|
||||
name = trim(parts[1])
|
||||
value = trim(parts[2])
|
||||
if (name == "" || value == "")
|
||||
next
|
||||
|
||||
prefix = deprecated ? "// " : ""
|
||||
postfix = deprecated ? " // deprecated" : ""
|
||||
printf "%s#define %s %s%s\n", prefix, name, value, postfix
|
||||
}
|
||||
'
|
||||
@@ -37,6 +37,7 @@
|
||||
#define KEYLET_NFT_OFFER 23
|
||||
#define KEYLET_HOOK_DEFINITION 24
|
||||
#define KEYLET_HOOK_STATE_DIR 25
|
||||
#define KEYLET_CRON 26
|
||||
|
||||
#define COMPARE_EQUAL 1U
|
||||
#define COMPARE_LESS 2U
|
||||
|
||||
@@ -20,7 +20,8 @@
|
||||
#define sfHookEmitCount ((1U << 16U) + 18U)
|
||||
#define sfHookExecutionIndex ((1U << 16U) + 19U)
|
||||
#define sfHookApiVersion ((1U << 16U) + 20U)
|
||||
#define sfLedgerFixType ((1U << 16U) + 21U)
|
||||
#define sfHookStateScale ((1U << 16U) + 21U)
|
||||
#define sfLedgerFixType ((1U << 16U) + 22U)
|
||||
#define sfNetworkID ((2U << 16U) + 1U)
|
||||
#define sfFlags ((2U << 16U) + 2U)
|
||||
#define sfSourceTag ((2U << 16U) + 3U)
|
||||
@@ -71,6 +72,9 @@
|
||||
#define sfLockCount ((2U << 16U) + 49U)
|
||||
#define sfFirstNFTokenSequence ((2U << 16U) + 50U)
|
||||
#define sfOracleDocumentID ((2U << 16U) + 51U)
|
||||
#define sfStartTime ((2U << 16U) + 93U)
|
||||
#define sfRepeatCount ((2U << 16U) + 94U)
|
||||
#define sfDelaySeconds ((2U << 16U) + 95U)
|
||||
#define sfXahauActivationLgrSeq ((2U << 16U) + 96U)
|
||||
#define sfImportSequence ((2U << 16U) + 97U)
|
||||
#define sfRewardTime ((2U << 16U) + 98U)
|
||||
@@ -152,6 +156,9 @@
|
||||
#define sfGovernanceMarks ((5U << 16U) + 98U)
|
||||
#define sfEmittedTxnID ((5U << 16U) + 97U)
|
||||
#define sfHookCanEmit ((5U << 16U) + 96U)
|
||||
#define sfCron ((5U << 16U) + 95U)
|
||||
#define sfHookOnIncoming ((5U << 16U) + 94U)
|
||||
#define sfHookOnOutgoing ((5U << 16U) + 93U)
|
||||
#define sfNumber ((9U << 16U) + 1U)
|
||||
#define sfAmount ((6U << 16U) + 1U)
|
||||
#define sfBalance ((6U << 16U) + 2U)
|
||||
@@ -313,4 +320,4 @@
|
||||
#define sfActiveValidators ((15U << 16U) + 95U)
|
||||
#define sfImportVLKeys ((15U << 16U) + 94U)
|
||||
#define sfHookEmissions ((15U << 16U) + 93U)
|
||||
#define sfAmounts ((15U << 16U) + 92U)
|
||||
#define sfAmounts ((15U << 16U) + 92U)
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
// For documentation please see: https://xrpl-hooks.readme.io/reference/
|
||||
// Generated using generate_tts.sh
|
||||
#define ttPAYMENT 0
|
||||
#define ttESCROW_CREATE 1
|
||||
#define ttESCROW_FINISH 2
|
||||
#define ttACCOUNT_SET 3
|
||||
#define ttESCROW_CANCEL 4
|
||||
#define ttREGULAR_KEY_SET 5
|
||||
// #define ttNICKNAME_SET 6 // deprecated
|
||||
#define ttOFFER_CREATE 7
|
||||
#define ttOFFER_CANCEL 8
|
||||
#define ttTICKET_CREATE 10
|
||||
// #define ttSPINAL_TAP 11 // deprecated
|
||||
#define ttSIGNER_LIST_SET 12
|
||||
#define ttPAYCHAN_CREATE 13
|
||||
#define ttPAYCHAN_FUND 14
|
||||
@@ -62,6 +61,8 @@
|
||||
#define ttNFTOKEN_MODIFY 70
|
||||
#define ttPERMISSIONED_DOMAIN_SET 71
|
||||
#define ttPERMISSIONED_DOMAIN_DELETE 72
|
||||
#define ttCRON 92
|
||||
#define ttCRON_SET 93
|
||||
#define ttREMARKS_SET 94
|
||||
#define ttREMIT 95
|
||||
#define ttGENESIS_MINT 96
|
||||
@@ -72,4 +73,4 @@
|
||||
#define ttFEE 101
|
||||
#define ttUNL_MODIFY 102
|
||||
#define ttEMIT_FAILURE 103
|
||||
#define ttUNL_REPORT 104
|
||||
#define ttUNL_REPORT 104
|
||||
|
||||
@@ -138,14 +138,14 @@ public:
|
||||
template <typename U>
|
||||
requires std::convertible_to<U, T>
|
||||
constexpr Expected(U&& r)
|
||||
: Base(boost::outcome_v2::in_place_type_t<T>{}, std::forward<U>(r))
|
||||
: Base(boost::outcome_v2::success(T(std::forward<U>(r))))
|
||||
{
|
||||
}
|
||||
|
||||
template <typename U>
|
||||
requires std::convertible_to<U, E> && (!std::is_reference_v<U>)
|
||||
constexpr Expected(Unexpected<U> e)
|
||||
: Base(boost::outcome_v2::in_place_type_t<E>{}, std::move(e.value()))
|
||||
: Base(boost::outcome_v2::failure(E(std::move(e.value()))))
|
||||
{
|
||||
}
|
||||
|
||||
@@ -230,7 +230,8 @@ public:
|
||||
|
||||
template <typename U>
|
||||
requires std::convertible_to<U, E> && (!std::is_reference_v<U>)
|
||||
constexpr Expected(Unexpected<U> e) : Base(E(std::move(e.value())))
|
||||
constexpr Expected(Unexpected<U> e)
|
||||
: Base(boost::outcome_v2::failure(E(std::move(e.value()))))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -253,13 +253,22 @@ private:
|
||||
// Wraps a Journal::Stream to skip evaluation of
|
||||
// expensive argument lists if the stream is not active.
|
||||
#ifndef JLOG
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
#define JLOG(x) \
|
||||
if (!x) \
|
||||
if (!(x)) \
|
||||
{ \
|
||||
} \
|
||||
else \
|
||||
(x).withLocation(__FILE__, __LINE__)
|
||||
#else
|
||||
#define JLOG(x) \
|
||||
if (!(x)) \
|
||||
{ \
|
||||
} \
|
||||
else \
|
||||
x
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef CLOG
|
||||
#define CLOG(ss) \
|
||||
|
||||
91
include/xrpl/beast/utility/EnhancedLogging.h
Normal file
91
include/xrpl/beast/utility/EnhancedLogging.h
Normal file
@@ -0,0 +1,91 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of Beast: https://github.com/vinniefalco/Beast
|
||||
Copyright 2013, Vinnie Falco <vinnie.falco@gmail.com>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef BEAST_UTILITY_ENHANCEDLOGGING_H_INCLUDED
|
||||
#define BEAST_UTILITY_ENHANCEDLOGGING_H_INCLUDED
|
||||
|
||||
#include <cstddef> // for size_t
|
||||
#include <iosfwd> // for std::ostream
|
||||
|
||||
namespace beast {
|
||||
namespace detail {
|
||||
|
||||
// Check if we should use colors - cached at startup
|
||||
bool
|
||||
should_log_use_colors();
|
||||
|
||||
// Get the log highlight color - can be overridden via
|
||||
// LOG_HIGHLIGHT_COLOR
|
||||
const char*
|
||||
get_log_highlight_color();
|
||||
|
||||
// Strip source root path from __FILE__ at compile time
|
||||
// IMPORTANT: This MUST stay in the header as constexpr for compile-time
|
||||
// evaluation!
|
||||
constexpr const char*
|
||||
strip_source_root(const char* file)
|
||||
{
|
||||
// Handle relative paths from build/ directory (common with ccache)
|
||||
// e.g., "../src/ripple/..." -> "ripple/..."
|
||||
if (file && file[0] == '.' && file[1] == '.' && file[2] == '/' &&
|
||||
file[3] == 's' && file[4] == 'r' && file[5] == 'c' && file[6] == '/')
|
||||
{
|
||||
return file + 7; // skip "../src/"
|
||||
}
|
||||
|
||||
#ifdef SOURCE_ROOT_PATH
|
||||
constexpr const char* sourceRoot = SOURCE_ROOT_PATH;
|
||||
constexpr auto strlen_constexpr = [](const char* s) constexpr {
|
||||
const char* p = s;
|
||||
while (*p)
|
||||
++p;
|
||||
return p - s;
|
||||
};
|
||||
constexpr auto strncmp_constexpr =
|
||||
[](const char* a, const char* b, size_t n) constexpr {
|
||||
for (size_t i = 0; i < n; ++i)
|
||||
{
|
||||
if (a[i] != b[i])
|
||||
return a[i] - b[i];
|
||||
if (a[i] == '\0')
|
||||
break;
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
constexpr size_t sourceRootLen = strlen_constexpr(sourceRoot);
|
||||
return (strncmp_constexpr(file, sourceRoot, sourceRootLen) == 0)
|
||||
? file + sourceRootLen
|
||||
: file;
|
||||
#else
|
||||
return file;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Check if location info should be shown - cached at startup
|
||||
bool
|
||||
should_show_location();
|
||||
|
||||
// Helper to write location string (no leading/trailing space)
|
||||
void
|
||||
log_write_location_string(std::ostream& os, const char* file, int line);
|
||||
|
||||
} // namespace detail
|
||||
} // namespace beast
|
||||
|
||||
#endif
|
||||
@@ -155,6 +155,10 @@ public:
|
||||
|
||||
ScopedStream(Sink& sink, Severity level);
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
ScopedStream(Sink& sink, Severity level, const char* file, int line);
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
ScopedStream(Stream const& stream, T const& t);
|
||||
|
||||
@@ -182,6 +186,10 @@ public:
|
||||
Sink& m_sink;
|
||||
Severity const m_level;
|
||||
std::ostringstream mutable m_ostream;
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
const char* file_ = nullptr;
|
||||
int line_ = 0;
|
||||
#endif
|
||||
};
|
||||
|
||||
#ifndef __INTELLISENSE__
|
||||
@@ -200,6 +208,33 @@ public:
|
||||
//--------------------------------------------------------------------------
|
||||
public:
|
||||
/** Provide a light-weight way to check active() before string formatting */
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
/** Stream with location information that prepends file:line to the first
|
||||
* message */
|
||||
class StreamWithLocation
|
||||
{
|
||||
public:
|
||||
StreamWithLocation(Stream const& stream, const char* file, int line)
|
||||
: file_(file), line_(line), stream_(stream)
|
||||
{
|
||||
}
|
||||
|
||||
/** Override to inject file:line before the first output */
|
||||
template <typename T>
|
||||
ScopedStream
|
||||
operator<<(T const& t) const;
|
||||
|
||||
ScopedStream
|
||||
operator<<(std::ostream& manip(std::ostream&)) const;
|
||||
|
||||
private:
|
||||
const char* file_;
|
||||
int line_;
|
||||
const Stream& stream_;
|
||||
};
|
||||
#endif
|
||||
|
||||
class Stream
|
||||
{
|
||||
public:
|
||||
@@ -267,6 +302,15 @@ public:
|
||||
operator<<(T const& t) const;
|
||||
/** @} */
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
/** Create a StreamWithLocation that prepends file:line info */
|
||||
StreamWithLocation
|
||||
withLocation(const char* file, int line) const
|
||||
{
|
||||
return StreamWithLocation(*this, file, line);
|
||||
}
|
||||
#endif
|
||||
|
||||
private:
|
||||
Sink& m_sink;
|
||||
Severity m_level;
|
||||
@@ -366,6 +410,8 @@ static_assert(std::is_nothrow_destructible<Journal>::value == true, "");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
template <typename T>
|
||||
Journal::ScopedStream::ScopedStream(Journal::Stream const& stream, T const& t)
|
||||
: ScopedStream(stream.sink(), stream.level())
|
||||
@@ -390,6 +436,21 @@ Journal::Stream::operator<<(T const& t) const
|
||||
return ScopedStream(*this, t);
|
||||
}
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
template <typename T>
|
||||
Journal::ScopedStream
|
||||
Journal::StreamWithLocation::operator<<(T const& t) const
|
||||
{
|
||||
// Create a ScopedStream with location info
|
||||
ScopedStream scoped(stream_.sink(), stream_.level(), file_, line_);
|
||||
scoped.ostream() << t;
|
||||
return scoped;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
namespace detail {
|
||||
|
||||
template <class CharT, class Traits = std::char_traits<CharT>>
|
||||
|
||||
@@ -1,9 +1,33 @@
|
||||
#include <cstdint>
|
||||
#include <map>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#ifndef HOOKENUM_INCLUDED
|
||||
#define HOOKENUM_INCLUDED 1
|
||||
|
||||
#ifndef GUARD_CHECKER_BUILD
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
#include <xrpl/protocol/Feature.h>
|
||||
#include <xrpl/protocol/Rules.h>
|
||||
#else
|
||||
// Override uint256, Feature and Rules for guard checker build
|
||||
#define uint256 std::string
|
||||
#define featureHooksUpdate1 "1"
|
||||
#define featureHooksUpdate2 "1"
|
||||
#define fix20250131 "1"
|
||||
namespace hook_api {
|
||||
struct Rules
|
||||
{
|
||||
constexpr bool
|
||||
enabled(const uint256& feature) const
|
||||
{
|
||||
return true;
|
||||
}
|
||||
};
|
||||
} // namespace hook_api
|
||||
#endif
|
||||
|
||||
namespace ripple {
|
||||
enum HookSetOperation : int8_t {
|
||||
hsoINVALID = -1,
|
||||
@@ -28,6 +52,8 @@ enum HookEmissionFlags : uint16_t {
|
||||
};
|
||||
} // namespace ripple
|
||||
|
||||
using namespace ripple;
|
||||
|
||||
namespace hook {
|
||||
// RH TODO: put these somewhere better, and allow rules to be fed in
|
||||
inline uint32_t
|
||||
@@ -42,10 +68,26 @@ maxHookParameterValueSize(void)
|
||||
return 256;
|
||||
}
|
||||
|
||||
inline uint32_t
|
||||
maxHookStateDataSize(void)
|
||||
inline uint16_t
|
||||
maxHookStateScale(void)
|
||||
{
|
||||
return 256U;
|
||||
return 16;
|
||||
}
|
||||
|
||||
inline uint32_t
|
||||
maxHookStateDataSize(uint16_t hookStateScale)
|
||||
{
|
||||
if (hookStateScale == 0)
|
||||
{
|
||||
// should not happen, but just in case
|
||||
return 256U;
|
||||
}
|
||||
if (hookStateScale > maxHookStateScale())
|
||||
{
|
||||
// should not happen, but just in case
|
||||
return 256 * maxHookStateScale();
|
||||
}
|
||||
return 256U * hookStateScale;
|
||||
}
|
||||
|
||||
inline uint32_t
|
||||
@@ -259,8 +301,17 @@ enum keylet_code : uint32_t {
|
||||
NFT_OFFER = 23,
|
||||
HOOK_DEFINITION = 24,
|
||||
HOOK_STATE_DIR = 25,
|
||||
LAST_KLTYPE_V0 = HOOK_DEFINITION,
|
||||
LAST_KLTYPE_V1 = HOOK_STATE_DIR,
|
||||
CRON = 26,
|
||||
AMM = 27,
|
||||
BRIDGE = 28,
|
||||
XCHAIN_OWNED_CLAIM_ID = 29,
|
||||
XCHAIN_OWNED_CREATE_ACCOUNT_CLAIM_ID = 30,
|
||||
DID = 31,
|
||||
ORACLE = 32,
|
||||
MPTOKEN_ISSUANCE = 33,
|
||||
MPTOKEN = 34,
|
||||
CREDENTIAL = 35,
|
||||
PERMISSIONED_DOMAIN = 36,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -349,90 +400,59 @@ const uint8_t max_emit = 255;
|
||||
const uint8_t max_params = 16;
|
||||
const double fee_base_multiplier = 1.1f;
|
||||
|
||||
using APIWhitelist = std::map<std::string, std::vector<uint8_t>>;
|
||||
|
||||
// RH NOTE: Find descriptions of api functions in ./impl/applyHook.cpp and
|
||||
// hookapi.h (include for hooks) this is a map of the api name to its return
|
||||
// code (vec[0] and its parameters vec[>0]) as wasm type codes
|
||||
static const std::map<std::string, std::vector<uint8_t>> import_whitelist{
|
||||
{"_g", {0x7FU, 0x7FU, 0x7FU}},
|
||||
{"accept", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
|
||||
{"rollback", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
|
||||
{"util_raddr", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"util_accid", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"util_verify", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"util_sha512h", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"util_keylet",
|
||||
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"sto_validate", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"sto_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"sto_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"sto_emplace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"sto_erase", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"etxn_burden", {0x7EU}},
|
||||
{"etxn_details", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"etxn_fee_base", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"etxn_reserve", {0x7EU, 0x7FU}},
|
||||
{"etxn_generation", {0x7EU}},
|
||||
{"etxn_nonce", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"emit", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"float_set", {0x7EU, 0x7FU, 0x7EU}},
|
||||
{"float_multiply", {0x7EU, 0x7EU, 0x7EU}},
|
||||
{"float_mulratio", {0x7EU, 0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"float_negate", {0x7EU, 0x7EU}},
|
||||
{"float_compare", {0x7EU, 0x7EU, 0x7EU, 0x7FU}},
|
||||
{"float_sum", {0x7EU, 0x7EU, 0x7EU}},
|
||||
{"float_sto",
|
||||
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7EU, 0x7FU}},
|
||||
{"float_sto_set", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"float_invert", {0x7EU, 0x7EU}},
|
||||
{"float_divide", {0x7EU, 0x7EU, 0x7EU}},
|
||||
{"float_one", {0x7EU}},
|
||||
{"float_mantissa", {0x7EU, 0x7EU}},
|
||||
{"float_sign", {0x7EU, 0x7EU}},
|
||||
{"float_int", {0x7EU, 0x7EU, 0x7FU, 0x7FU}},
|
||||
{"float_log", {0x7EU, 0x7EU}},
|
||||
{"float_root", {0x7EU, 0x7EU, 0x7FU}},
|
||||
{"fee_base", {0x7EU}},
|
||||
{"ledger_seq", {0x7EU}},
|
||||
{"ledger_last_time", {0x7EU}},
|
||||
{"ledger_last_hash", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"ledger_nonce", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"ledger_keylet", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"hook_account", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"hook_hash", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"hook_param_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"hook_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"hook_again", {0x7EU}},
|
||||
{"hook_skip", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"hook_pos", {0x7EU}},
|
||||
{"slot", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"slot_clear", {0x7EU, 0x7FU}},
|
||||
{"slot_count", {0x7EU, 0x7FU}},
|
||||
{"slot_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"slot_size", {0x7EU, 0x7FU}},
|
||||
{"slot_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"slot_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"slot_type", {0x7EU, 0x7FU, 0x7FU}},
|
||||
{"slot_float", {0x7EU, 0x7FU}},
|
||||
{"state_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"state_foreign_set",
|
||||
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"state", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"state_foreign",
|
||||
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"trace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"trace_num", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
|
||||
{"trace_float", {0x7EU, 0x7FU, 0x7FU, 0x7EU}},
|
||||
{"otxn_burden", {0x7EU}},
|
||||
{"otxn_field", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"otxn_generation", {0x7EU}},
|
||||
{"otxn_id", {0x7EU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"otxn_type", {0x7EU}},
|
||||
{"otxn_slot", {0x7EU, 0x7FU}},
|
||||
{"otxn_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}},
|
||||
{"meta_slot", {0x7EU, 0x7FU}}};
|
||||
inline APIWhitelist
|
||||
getImportWhitelist(Rules const& rules)
|
||||
{
|
||||
APIWhitelist whitelist;
|
||||
|
||||
#pragma push_macro("HOOK_API_DEFINITION")
|
||||
#undef HOOK_API_DEFINITION
|
||||
|
||||
#define int64_t 0x7EU
|
||||
#define int32_t 0x7FU
|
||||
#define uint32_t 0x7FU
|
||||
|
||||
#define HOOK_WRAP_PARAMS(...) __VA_ARGS__
|
||||
|
||||
#define HOOK_API_DEFINITION( \
|
||||
RETURN_TYPE, FUNCTION_NAME, PARAMS_TUPLE, AMENDMENT) \
|
||||
if (AMENDMENT == uint256{} || rules.enabled(AMENDMENT)) \
|
||||
whitelist[#FUNCTION_NAME] = { \
|
||||
RETURN_TYPE, HOOK_WRAP_PARAMS PARAMS_TUPLE};
|
||||
|
||||
#include "hook_api.macro"
|
||||
|
||||
#undef HOOK_API_DEFINITION
|
||||
#undef HOOK_WRAP_PARAMS
|
||||
#undef int64_t
|
||||
#undef int32_t
|
||||
#undef uint32_t
|
||||
#pragma pop_macro("HOOK_API_DEFINITION")
|
||||
|
||||
return whitelist;
|
||||
}
|
||||
|
||||
#undef HOOK_API_DEFINITION
|
||||
#undef I32
|
||||
#undef I64
|
||||
|
||||
enum GuardRulesVersion : uint64_t {
|
||||
GuardRuleFix20250131 = 0x00000001,
|
||||
};
|
||||
|
||||
inline uint64_t
|
||||
getGuardRulesVersion(Rules const& rules)
|
||||
{
|
||||
uint64_t version = 0;
|
||||
if (rules.enabled(fix20250131))
|
||||
version |= GuardRuleFix20250131;
|
||||
return version;
|
||||
}
|
||||
|
||||
// featureHooks1
|
||||
static const std::map<std::string, std::vector<uint8_t>> import_whitelist_1{
|
||||
{"xpop_slot", {0x7EU, 0x7FU, 0x7FU}}};
|
||||
}; // namespace hook_api
|
||||
#endif
|
||||
|
||||
@@ -634,7 +634,7 @@ check_guard(
|
||||
}
|
||||
else if (fc_type == 10) // memory.copy
|
||||
{
|
||||
if (rulesVersion & 0x02U)
|
||||
if (rulesVersion & hook_api::GuardRuleFix20250131)
|
||||
GUARD_ERROR("Memory.copy instruction is not allowed.");
|
||||
|
||||
REQUIRE(2);
|
||||
@@ -642,7 +642,7 @@ check_guard(
|
||||
}
|
||||
else if (fc_type == 11) // memory.fill
|
||||
{
|
||||
if (rulesVersion & 0x02U)
|
||||
if (rulesVersion & hook_api::GuardRuleFix20250131)
|
||||
GUARD_ERROR("Memory.fill instruction is not allowed.");
|
||||
|
||||
ADVANCE(1);
|
||||
@@ -826,6 +826,7 @@ validateGuards(
|
||||
std::vector<uint8_t> const& wasm,
|
||||
GuardLog guardLog,
|
||||
std::string guardLogAccStr,
|
||||
hook_api::APIWhitelist const import_whitelist,
|
||||
/* RH NOTE:
|
||||
* rules version is a bit field, so rule update 1 is 0x01, update 2 is 0x02
|
||||
* and update 3 is 0x04 ideally at rule version 3 all bits so far are set
|
||||
@@ -835,7 +836,7 @@ validateGuards(
|
||||
* might have unforeseen consequences, without also rolling back further
|
||||
* changes that are fine.
|
||||
*/
|
||||
uint64_t rulesVersion = 0)
|
||||
uint64_t rulesVersion = 0x00)
|
||||
{
|
||||
uint64_t byteCount = wasm.size();
|
||||
|
||||
@@ -1020,31 +1021,24 @@ validateGuards(
|
||||
int type_idx = parseLeb128(wasm, i, &i);
|
||||
CHECK_SHORT_HOOK();
|
||||
|
||||
auto it = import_whitelist.find(import_name);
|
||||
auto it_end = import_whitelist.end();
|
||||
bool found_in_whitelist = (it != it_end);
|
||||
|
||||
if (import_name == "_g")
|
||||
{
|
||||
guard_import_number = func_upto;
|
||||
}
|
||||
else if (
|
||||
hook_api::import_whitelist.find(import_name) ==
|
||||
hook_api::import_whitelist.end())
|
||||
if (!found_in_whitelist)
|
||||
{
|
||||
if (rulesVersion > 0 &&
|
||||
hook_api::import_whitelist_1.find(import_name) !=
|
||||
hook_api::import_whitelist_1.end())
|
||||
{
|
||||
// PASS, this is a version 1 api
|
||||
}
|
||||
else
|
||||
{
|
||||
GUARDLOG(hook::log::IMPORT_ILLEGAL)
|
||||
<< "Malformed transaction. "
|
||||
<< "Hook attempted to import a function that does "
|
||||
"not "
|
||||
<< "appear in the hook_api function set: `"
|
||||
<< import_name << "`"
|
||||
<< "\n";
|
||||
return {};
|
||||
}
|
||||
GUARDLOG(hook::log::IMPORT_ILLEGAL)
|
||||
<< "Malformed transaction. "
|
||||
<< "Hook attempted to import a function that does "
|
||||
"not "
|
||||
<< "appear in the hook_api function set: `"
|
||||
<< import_name << "`"
|
||||
<< "\n";
|
||||
return {};
|
||||
}
|
||||
|
||||
// add to import map
|
||||
@@ -1259,11 +1253,7 @@ validateGuards(
|
||||
for (auto const& [import_idx, api_name] : usage->second)
|
||||
{
|
||||
auto const& api_signature =
|
||||
hook_api::import_whitelist.find(api_name) !=
|
||||
hook_api::import_whitelist.end()
|
||||
? hook_api::import_whitelist.find(api_name)->second
|
||||
: hook_api::import_whitelist_1.find(api_name)
|
||||
->second;
|
||||
import_whitelist.find(api_name)->second;
|
||||
|
||||
if (!first_signature)
|
||||
{
|
||||
|
||||
350
include/xrpl/hook/HookAPI.h
Normal file
350
include/xrpl/hook/HookAPI.h
Normal file
@@ -0,0 +1,350 @@
|
||||
#ifndef HOOK_API_INCLUDED
|
||||
#define HOOK_API_INCLUDED 1
|
||||
|
||||
#include <xrpld/app/misc/Transaction.h>
|
||||
#include <xrpl/hook/Enum.h>
|
||||
|
||||
namespace hook {
|
||||
using namespace ripple;
|
||||
using HookReturnCode = hook_api::hook_return_code;
|
||||
|
||||
using Bytes = std::vector<std::uint8_t>;
|
||||
|
||||
struct HookContext; // defined in applyHook.h
|
||||
|
||||
class HookAPI
|
||||
{
|
||||
public:
|
||||
explicit HookAPI(HookContext& ctx) : hookCtx(ctx)
|
||||
{
|
||||
}
|
||||
|
||||
/// control APIs
|
||||
// _g
|
||||
// accept
|
||||
// rollback
|
||||
|
||||
/// util APIs
|
||||
Expected<std::string, HookReturnCode>
|
||||
util_raddr(Bytes const& accountID) const;
|
||||
|
||||
Expected<Bytes, HookReturnCode>
|
||||
util_accid(std::string raddress) const;
|
||||
|
||||
Expected<bool, HookReturnCode>
|
||||
util_verify(Slice const& data, Slice const& sig, Slice const& key) const;
|
||||
|
||||
uint256
|
||||
util_sha512h(Slice const& data) const;
|
||||
|
||||
// util_keylet()
|
||||
|
||||
/// sto APIs
|
||||
Expected<bool, HookReturnCode>
|
||||
sto_validate(Bytes const& data) const;
|
||||
|
||||
Expected<std::pair<uint32_t, uint32_t>, HookReturnCode>
|
||||
sto_subfield(Bytes const& data, uint32_t field_id) const;
|
||||
|
||||
Expected<std::pair<uint32_t, uint32_t>, HookReturnCode>
|
||||
sto_subarray(Bytes const& data, uint32_t index_id) const;
|
||||
|
||||
Expected<Bytes, HookReturnCode>
|
||||
sto_emplace(
|
||||
Bytes const& source_object,
|
||||
std::optional<Bytes> const& field_object,
|
||||
uint32_t field_id) const;
|
||||
|
||||
// sto_erase(): same as sto_emplace with field_object = nullopt
|
||||
|
||||
/// etxn APIs
|
||||
Expected<Bytes, HookReturnCode>
|
||||
prepare(Slice const& txBlob) const;
|
||||
|
||||
Expected<std::shared_ptr<Transaction>, HookReturnCode>
|
||||
emit(Slice const& txBlob) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
etxn_burden() const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
etxn_fee_base(Slice const& txBlob) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
etxn_details(uint8_t* out_ptr) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
etxn_reserve(uint64_t count) const;
|
||||
|
||||
uint32_t
|
||||
etxn_generation() const;
|
||||
|
||||
Expected<uint256, HookReturnCode>
|
||||
etxn_nonce() const;
|
||||
|
||||
/// float APIs
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_set(int32_t exponent, int64_t mantissa) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_multiply(uint64_t float1, uint64_t float2) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_mulratio(
|
||||
uint64_t float1,
|
||||
uint32_t round_up,
|
||||
uint32_t numerator,
|
||||
uint32_t denominator) const;
|
||||
|
||||
uint64_t
|
||||
float_negate(uint64_t float1) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_compare(uint64_t float1, uint64_t float2, uint32_t mode) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_sum(uint64_t float1, uint64_t float2) const;
|
||||
|
||||
Expected<Bytes, HookReturnCode>
|
||||
float_sto(
|
||||
std::optional<Currency> currency,
|
||||
std::optional<AccountID> issuer,
|
||||
uint64_t float1,
|
||||
uint32_t field_code,
|
||||
uint32_t write_len) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_sto_set(Bytes const& data) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_invert(uint64_t float1) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_divide(uint64_t float1, uint64_t float2) const;
|
||||
|
||||
uint64_t
|
||||
float_one() const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_mantissa(uint64_t float1) const;
|
||||
|
||||
uint64_t
|
||||
float_sign(uint64_t float1) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_int(uint64_t float1, uint32_t decimal_places, uint32_t absolute)
|
||||
const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_log(uint64_t float1) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
float_root(uint64_t float1, uint32_t n) const;
|
||||
|
||||
/// otxn APIs
|
||||
uint64_t
|
||||
otxn_burden() const;
|
||||
|
||||
uint32_t
|
||||
otxn_generation() const;
|
||||
|
||||
Expected<const STBase*, HookReturnCode>
|
||||
otxn_field(uint32_t field_id) const;
|
||||
|
||||
Expected<uint256, HookReturnCode>
|
||||
otxn_id(uint32_t flags) const;
|
||||
|
||||
TxType
|
||||
otxn_type() const;
|
||||
|
||||
Expected<uint32_t, HookReturnCode>
|
||||
otxn_slot(uint32_t slot_into) const;
|
||||
|
||||
Expected<Blob, HookReturnCode>
|
||||
otxn_param(Bytes const& param_name) const;
|
||||
|
||||
/// hook APIs
|
||||
AccountID
|
||||
hook_account() const;
|
||||
|
||||
Expected<ripple::uint256, HookReturnCode>
|
||||
hook_hash(int32_t hook_no) const;
|
||||
|
||||
Expected<int64_t, HookReturnCode>
|
||||
hook_again() const;
|
||||
|
||||
Expected<Blob, HookReturnCode>
|
||||
hook_param(Bytes const& paramName) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
hook_param_set(
|
||||
uint256 const& hash,
|
||||
Bytes const& paramName,
|
||||
Bytes const& paramValue) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
hook_skip(uint256 const& hash, uint32_t flags) const;
|
||||
|
||||
uint8_t
|
||||
hook_pos() const;
|
||||
|
||||
/// ledger APIs
|
||||
uint64_t
|
||||
fee_base() const;
|
||||
|
||||
uint32_t
|
||||
ledger_seq() const;
|
||||
|
||||
uint256
|
||||
ledger_last_hash() const;
|
||||
|
||||
uint64_t
|
||||
ledger_last_time() const;
|
||||
|
||||
Expected<uint256, HookReturnCode>
|
||||
ledger_nonce() const;
|
||||
|
||||
Expected<Keylet, HookReturnCode>
|
||||
ledger_keylet(Keylet const& klLo, Keylet const& klHi) const;
|
||||
|
||||
/// state APIs
|
||||
|
||||
// state(): same as state_foreign with ns = 0 and account = hook_account()
|
||||
|
||||
Expected<Bytes, HookReturnCode>
|
||||
state_foreign(
|
||||
uint256 const& key,
|
||||
uint256 const& ns,
|
||||
AccountID const& account) const;
|
||||
|
||||
// state_set(): same as state_foreign_set with ns = 0 and account =
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
state_foreign_set(
|
||||
uint256 const& key,
|
||||
uint256 const& ns,
|
||||
AccountID const& account,
|
||||
Bytes& data) const;
|
||||
|
||||
/// slot APIs
|
||||
Expected<const STBase*, HookReturnCode>
|
||||
slot(uint32_t slot_no) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
slot_clear(uint32_t slot_no) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
slot_count(uint32_t slot_no) const;
|
||||
|
||||
Expected<uint32_t, HookReturnCode>
|
||||
slot_set(Bytes const& data, uint32_t slot_no) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
slot_size(uint32_t slot_no) const;
|
||||
|
||||
Expected<uint32_t, HookReturnCode>
|
||||
slot_subarray(uint32_t parent_slot, uint32_t array_id, uint32_t new_slot)
|
||||
const;
|
||||
|
||||
Expected<uint32_t, HookReturnCode>
|
||||
slot_subfield(uint32_t parent_slot, uint32_t field_id, uint32_t new_slot)
|
||||
const;
|
||||
|
||||
Expected<std::variant<STBase, STAmount>, HookReturnCode>
|
||||
slot_type(uint32_t slot_no, uint32_t flags) const;
|
||||
|
||||
Expected<uint64_t, HookReturnCode>
|
||||
slot_float(uint32_t slot_no) const;
|
||||
|
||||
/// trace APIs
|
||||
// trace
|
||||
// trace_num
|
||||
// trace_float
|
||||
|
||||
Expected<uint32_t, HookReturnCode>
|
||||
meta_slot(uint32_t slot_into) const;
|
||||
|
||||
Expected<std::pair<uint32_t, uint32_t>, HookReturnCode>
|
||||
xpop_slot(uint32_t slot_into_tx, uint32_t slot_into_meta) const;
|
||||
|
||||
private:
|
||||
HookContext& hookCtx;
|
||||
|
||||
inline int32_t
|
||||
no_free_slots() const;
|
||||
|
||||
inline std::optional<int32_t>
|
||||
get_free_slot() const;
|
||||
|
||||
inline Expected<uint64_t, HookReturnCode>
|
||||
float_multiply_internal_parts(
|
||||
uint64_t man1,
|
||||
int32_t exp1,
|
||||
bool neg1,
|
||||
uint64_t man2,
|
||||
int32_t exp2,
|
||||
bool neg2) const;
|
||||
|
||||
inline Expected<uint64_t, HookReturnCode>
|
||||
mulratio_internal(
|
||||
int64_t& man1,
|
||||
int32_t& exp1,
|
||||
bool round_up,
|
||||
uint32_t numerator,
|
||||
uint32_t denominator) const;
|
||||
|
||||
inline Expected<uint64_t, HookReturnCode>
|
||||
float_divide_internal(uint64_t float1, uint64_t float2) const;
|
||||
|
||||
inline Expected<uint64_t, HookReturnCode>
|
||||
double_to_xfl(double x) const;
|
||||
|
||||
std::optional<ripple::Keylet>
|
||||
unserialize_keylet(Bytes const& data) const;
|
||||
|
||||
// update the state cache
|
||||
inline std::optional<
|
||||
std::reference_wrapper<std::pair<bool, ripple::Blob> const>>
|
||||
lookup_state_cache(
|
||||
AccountID const& acc,
|
||||
uint256 const& ns,
|
||||
uint256 const& key) const;
|
||||
|
||||
// check the state cache
|
||||
inline Expected<uint64_t, HookReturnCode>
|
||||
set_state_cache(
|
||||
AccountID const& acc,
|
||||
uint256 const& ns,
|
||||
uint256 const& key,
|
||||
Bytes const& data,
|
||||
bool modified) const;
|
||||
|
||||
// these are only used by get_stobject_length below
|
||||
enum parse_error {
|
||||
pe_unexpected_end = -1,
|
||||
pe_unknown_type_early = -2, // detected early
|
||||
pe_unknown_type_late = -3, // end of function
|
||||
pe_excessive_nesting = -4,
|
||||
pe_excessive_size = -5
|
||||
};
|
||||
|
||||
inline Expected<
|
||||
int32_t,
|
||||
parse_error>
|
||||
get_stobject_length(
|
||||
unsigned char* start, // in - begin iterator
|
||||
unsigned char* maxptr, // in - end iterator
|
||||
int& type, // out - populated by serialized type code
|
||||
int& field, // out - populated by serialized field code
|
||||
int& payload_start, // out - the start of actual payload data for
|
||||
// this type
|
||||
int& payload_length, // out - the length of actual payload data for
|
||||
// this type
|
||||
Rules const& rules,
|
||||
int recursion_depth = 0) // used internally
|
||||
const;
|
||||
};
|
||||
|
||||
} // namespace hook
|
||||
|
||||
#endif // HOOK_API_INCLUDED
|
||||
@@ -25,7 +25,8 @@
|
||||
_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, N, ...) \
|
||||
N
|
||||
#define VA_NARGS(__drop, ...) \
|
||||
VA_NARGS_IMPL(__VA_ARGS__, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1)
|
||||
VA_NARGS_IMPL( \
|
||||
__VA_OPT__(__VA_ARGS__, ) 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)
|
||||
#define FIRST(a, b) a
|
||||
#define SECOND(a, b) b
|
||||
#define STRIP_TYPES(...) FOR_VARS(SECOND, 0, __VA_ARGS__)
|
||||
@@ -88,30 +89,18 @@
|
||||
|
||||
#define WASM_VAL_TYPE(T, b) CAT2(TYP_, T)
|
||||
|
||||
#define DECLARE_HOOK_FUNCTION(R, F, ...) \
|
||||
R F(hook::HookContext& hookCtx, \
|
||||
WasmEdge_CallingFrameContext const& frameCtx, \
|
||||
__VA_ARGS__); \
|
||||
extern WasmEdge_Result WasmFunction##F( \
|
||||
void* data_ptr, \
|
||||
const WasmEdge_CallingFrameContext* frameCtx, \
|
||||
const WasmEdge_Value* in, \
|
||||
WasmEdge_Value* out); \
|
||||
extern WasmEdge_ValType WasmFunctionParams##F[]; \
|
||||
extern WasmEdge_ValType WasmFunctionResult##F[]; \
|
||||
extern WasmEdge_FunctionTypeContext* WasmFunctionType##F; \
|
||||
extern WasmEdge_String WasmFunctionName##F;
|
||||
|
||||
#define DECLARE_HOOK_FUNCNARG(R, F) \
|
||||
R F(hook::HookContext& hookCtx, \
|
||||
WasmEdge_CallingFrameContext const& frameCtx); \
|
||||
extern WasmEdge_Result WasmFunction##F( \
|
||||
void* data_ptr, \
|
||||
const WasmEdge_CallingFrameContext* frameCtx, \
|
||||
const WasmEdge_Value* in, \
|
||||
WasmEdge_Value* out); \
|
||||
extern WasmEdge_ValType WasmFunctionResult##F[]; \
|
||||
extern WasmEdge_FunctionTypeContext* WasmFunctionType##F; \
|
||||
#define DECLARE_HOOK_FUNCTION(R, F, ...) \
|
||||
R F(hook::HookContext& hookCtx, \
|
||||
WasmEdge_CallingFrameContext const& frameCtx __VA_OPT__( \
|
||||
COMMA __VA_ARGS__)); \
|
||||
extern WasmEdge_Result WasmFunction##F( \
|
||||
void* data_ptr, \
|
||||
const WasmEdge_CallingFrameContext* frameCtx, \
|
||||
const WasmEdge_Value* in, \
|
||||
WasmEdge_Value* out); \
|
||||
extern WasmEdge_ValType WasmFunctionParams##F[]; \
|
||||
extern WasmEdge_ValType WasmFunctionResult##F[]; \
|
||||
extern WasmEdge_FunctionTypeContext* WasmFunctionType##F; \
|
||||
extern WasmEdge_String WasmFunctionName##F;
|
||||
|
||||
#define DEFINE_HOOK_FUNCTION(R, F, ...) \
|
||||
@@ -121,61 +110,35 @@
|
||||
const WasmEdge_Value* in, \
|
||||
WasmEdge_Value* out) \
|
||||
{ \
|
||||
int _stack = 0; \
|
||||
FOR_VARS(VAR_ASSIGN, 2, __VA_ARGS__); \
|
||||
__VA_OPT__(int _stack = 0;) \
|
||||
__VA_OPT__(FOR_VARS(VAR_ASSIGN, 2, __VA_ARGS__);) \
|
||||
hook::HookContext* hookCtx = \
|
||||
reinterpret_cast<hook::HookContext*>(data_ptr); \
|
||||
R return_code = hook_api::F( \
|
||||
*hookCtx, \
|
||||
*const_cast<WasmEdge_CallingFrameContext*>(frameCtx), \
|
||||
STRIP_TYPES(__VA_ARGS__)); \
|
||||
*const_cast<WasmEdge_CallingFrameContext*>(frameCtx) \
|
||||
__VA_OPT__(COMMA STRIP_TYPES(__VA_ARGS__))); \
|
||||
if (return_code == RC_ROLLBACK || return_code == RC_ACCEPT) \
|
||||
return WasmEdge_Result_Terminate; \
|
||||
out[0] = RET_ASSIGN(R, return_code); \
|
||||
return WasmEdge_Result_Success; \
|
||||
}; \
|
||||
WasmEdge_ValType hook_api::WasmFunctionParams##F[] = { \
|
||||
FOR_VARS(WASM_VAL_TYPE, 0, __VA_ARGS__)}; \
|
||||
__VA_OPT__(FOR_VARS(WASM_VAL_TYPE, 0, __VA_ARGS__))}; \
|
||||
WasmEdge_ValType hook_api::WasmFunctionResult##F[1] = { \
|
||||
WASM_VAL_TYPE(R, dummy)}; \
|
||||
WasmEdge_FunctionTypeContext* hook_api::WasmFunctionType##F = \
|
||||
WasmEdge_FunctionTypeCreate( \
|
||||
WasmFunctionParams##F, \
|
||||
VA_NARGS(NULL, __VA_ARGS__), \
|
||||
VA_NARGS(NULL __VA_OPT__(, __VA_ARGS__)), \
|
||||
WasmFunctionResult##F, \
|
||||
1); \
|
||||
WasmEdge_String hook_api::WasmFunctionName##F = \
|
||||
WasmEdge_StringCreateByCString(#F); \
|
||||
R hook_api::F( \
|
||||
hook::HookContext& hookCtx, \
|
||||
WasmEdge_CallingFrameContext const& frameCtx, \
|
||||
__VA_ARGS__)
|
||||
|
||||
#define DEFINE_HOOK_FUNCNARG(R, F) \
|
||||
WasmEdge_Result hook_api::WasmFunction##F( \
|
||||
void* data_ptr, \
|
||||
const WasmEdge_CallingFrameContext* frameCtx, \
|
||||
const WasmEdge_Value* in, \
|
||||
WasmEdge_Value* out) \
|
||||
{ \
|
||||
hook::HookContext* hookCtx = \
|
||||
reinterpret_cast<hook::HookContext*>(data_ptr); \
|
||||
R return_code = hook_api::F( \
|
||||
*hookCtx, *const_cast<WasmEdge_CallingFrameContext*>(frameCtx)); \
|
||||
if (return_code == RC_ROLLBACK || return_code == RC_ACCEPT) \
|
||||
return WasmEdge_Result_Terminate; \
|
||||
out[0] = CAT2(RET_, R(return_code)); \
|
||||
return WasmEdge_Result_Success; \
|
||||
}; \
|
||||
WasmEdge_ValType hook_api::WasmFunctionResult##F[1] = { \
|
||||
WASM_VAL_TYPE(R, dummy)}; \
|
||||
WasmEdge_FunctionTypeContext* hook_api::WasmFunctionType##F = \
|
||||
WasmEdge_FunctionTypeCreate({}, 0, WasmFunctionResult##F, 1); \
|
||||
WasmEdge_String hook_api::WasmFunctionName##F = \
|
||||
WasmEdge_StringCreateByCString(#F); \
|
||||
R hook_api::F( \
|
||||
hook::HookContext& hookCtx, \
|
||||
WasmEdge_CallingFrameContext const& frameCtx)
|
||||
WasmEdge_CallingFrameContext const& frameCtx __VA_OPT__( \
|
||||
COMMA __VA_ARGS__))
|
||||
|
||||
#define HOOK_SETUP() \
|
||||
try \
|
||||
@@ -190,6 +153,7 @@
|
||||
[[maybe_unused]] const uint64_t memory_length = \
|
||||
WasmEdge_MemoryInstanceGetPageSize(memoryCtx) * \
|
||||
WasmEdge_kPageSize; \
|
||||
[[maybe_unused]] auto& api = hookCtx.api(); \
|
||||
if (!memoryCtx || !memory || !memory_length) \
|
||||
return INTERNAL_ERROR;
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#define GUARD_CHECKER_BUILD
|
||||
#include "Enum.h"
|
||||
#include "Guard.h"
|
||||
#include <fcntl.h>
|
||||
#include <iostream>
|
||||
@@ -79,7 +81,15 @@ main(int argc, char** argv)
|
||||
|
||||
close(fd);
|
||||
|
||||
auto result = validateGuards(hook, std::cout, "", 3);
|
||||
// Dummy rules for guard checker build
|
||||
hook_api::Rules rules;
|
||||
|
||||
auto result = validateGuards(
|
||||
hook,
|
||||
std::cout,
|
||||
"",
|
||||
hook_api::getImportWhitelist(rules),
|
||||
hook_api::getGuardRulesVersion(rules));
|
||||
|
||||
if (!result)
|
||||
{
|
||||
|
||||
374
include/xrpl/hook/hook_api.macro
Normal file
374
include/xrpl/hook/hook_api.macro
Normal file
@@ -0,0 +1,374 @@
|
||||
// int32_t _g(uint32_t guard_id, uint32_t maxiter);
|
||||
HOOK_API_DEFINITION(
|
||||
int32_t, _g, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t accept(uint32_t read_ptr, uint32_t read_len, int64_t error_code);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, accept, (uint32_t, uint32_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t rollback(uint32_t read_ptr, uint32_t read_len, int64_t error_code);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, rollback, (uint32_t, uint32_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t util_raddr(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, util_raddr, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t util_accid(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, util_accid, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t util_verify(uint32_t dread_ptr, uint32_t dread_len, uint32_t sread_ptr, uint32_t sread_len, uint32_t kread_ptr, uint32_t kread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, util_verify, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t util_sha512h(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, util_sha512h, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t util_keylet(uint32_t write_ptr, uint32_t write_len, uint32_t keylet_type, uint32_t a, uint32_t b, uint32_t c, uint32_t d, uint32_t e, uint32_t f);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, util_keylet, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t sto_validate(uint32_t tread_ptr, uint32_t tread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, sto_validate, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t sto_subfield(uint32_t read_ptr, uint32_t read_len, uint32_t field_id);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, sto_subfield, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t sto_subarray(uint32_t read_ptr, uint32_t read_len, uint32_t array_id);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, sto_subarray, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t sto_emplace(uint32_t write_ptr, uint32_t write_len, uint32_t sread_ptr, uint32_t sread_len, uint32_t fread_ptr, uint32_t fread_len, uint32_t field_id);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, sto_emplace, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t sto_erase(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len, uint32_t field_id);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, sto_erase, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t etxn_burden();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, etxn_burden, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t etxn_details(uint32_t write_ptr, uint32_t write_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, etxn_details, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t etxn_fee_base(uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, etxn_fee_base, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t etxn_reserve(uint32_t count);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, etxn_reserve, (uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t etxn_generation();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, etxn_generation, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t etxn_nonce(uint32_t write_ptr, uint32_t write_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, etxn_nonce, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t emit(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, emit, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_set(int32_t exponent, int64_t mantissa);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_set, (int32_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_multiply(int64_t float1, int64_t float2);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_multiply, (int64_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_mulratio(int64_t float1, uint32_t round_up, uint32_t numerator, uint32_t denominator);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_mulratio, (int64_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_negate(int64_t float1);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_negate, (int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_compare(int64_t float1, int64_t float2, uint32_t mode);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_compare, (int64_t, int64_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_sum(int64_t float1, int64_t float2);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_sum, (int64_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_sto(uint32_t write_ptr, uint32_t write_len, uint32_t cread_ptr, uint32_t cread_len, uint32_t iread_ptr, uint32_t iread_len, int64_t float1, uint32_t field_code);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_sto, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, int64_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_sto_set(uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_sto_set, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_invert(int64_t float1);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_invert, (int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_divide(int64_t float1, int64_t float2);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_divide, (int64_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_one();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_one, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_mantissa(int64_t float1);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_mantissa, (int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_sign(int64_t float1);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_sign, (int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_int(int64_t float1, uint32_t decimal_places, uint32_t abs);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_int, (int64_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_log(int64_t float1);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_log, (int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t float_root(int64_t float1, uint32_t n);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, float_root, (int64_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t fee_base();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, fee_base, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t ledger_seq();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, ledger_seq, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t ledger_last_time();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, ledger_last_time, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t ledger_last_hash(uint32_t write_ptr, uint32_t write_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, ledger_last_hash, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t ledger_nonce(uint32_t write_ptr, uint32_t write_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, ledger_nonce, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t ledger_keylet(uint32_t write_ptr, uint32_t write_len, uint32_t lread_ptr, uint32_t lread_len, uint32_t hread_ptr, uint32_t hread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, ledger_keylet, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t hook_account(uint32_t write_ptr, uint32_t write_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, hook_account, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t hook_hash(uint32_t write_ptr, uint32_t write_len, int32_t hook_no);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, hook_hash, (uint32_t, uint32_t, int32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t hook_param_set(uint32_t read_ptr, uint32_t read_len, uint32_t kread_ptr, uint32_t kread_len, uint32_t hread_ptr, uint32_t hread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, hook_param_set, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t hook_param(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, hook_param, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t hook_again();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, hook_again, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t hook_skip(uint32_t read_ptr, uint32_t read_len, uint32_t flags);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, hook_skip, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t hook_pos();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, hook_pos, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot(uint32_t write_ptr, uint32_t write_len, uint32_t slot);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_clear(uint32_t slot);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_clear, (uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_count(uint32_t slot);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_count, (uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_set(uint32_t read_ptr, uint32_t read_len, uint32_t slot);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_set, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_size(uint32_t slot);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_size, (uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_subarray(uint32_t parent_slot, uint32_t array_id, uint32_t new_slot);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_subarray, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_subfield(uint32_t parent_slot, uint32_t field_id, uint32_t new_slot);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_subfield, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_type(uint32_t slot_no, uint32_t flags);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_type, (uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t slot_float(uint32_t slot_no);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, slot_float, (uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t state_set(uint32_t read_ptr, uint32_t read_len, uint32_t kread_ptr, uint32_t kread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, state_set, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t state_foreign_set(uint32_t read_ptr, uint32_t read_len, uint32_t kread_ptr, uint32_t kread_len, uint32_t nread_ptr, uint32_t nread_len, uint32_t aread_ptr, uint32_t aread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, state_foreign_set, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t state(uint32_t write_ptr, uint32_t write_len, uint32_t kread_ptr, uint32_t kread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, state, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t state_foreign(uint32_t write_ptr, uint32_t write_len, uint32_t kread_ptr, uint32_t kread_len, uint32_t nread_ptr, uint32_t nread_len, uint32_t aread_ptr, uint32_t aread_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, state_foreign, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t trace(uint32_t mread_ptr, uint32_t mread_len, uint32_t dread_ptr, uint32_t dread_len, uint32_t as_hex);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, trace, (uint32_t, uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t trace_num(uint32_t read_ptr, uint32_t read_len, int64_t number);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, trace_num, (uint32_t, uint32_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t trace_float(uint32_t read_ptr, uint32_t read_len, int64_t float1);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, trace_float, (uint32_t, uint32_t, int64_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t otxn_burden();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, otxn_burden, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t otxn_field(uint32_t write_ptr, uint32_t write_len, uint32_t field_id);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, otxn_field, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t otxn_generation();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, otxn_generation, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t otxn_id(uint32_t write_ptr, uint32_t write_len, uint32_t flags);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, otxn_id, (uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t otxn_type();
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, otxn_type, (),
|
||||
uint256{})
|
||||
|
||||
// int64_t otxn_slot(uint32_t slot_no);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, otxn_slot, (uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t otxn_param(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, otxn_param, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t meta_slot(uint32_t slot_no);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, meta_slot, (uint32_t),
|
||||
uint256{})
|
||||
|
||||
// int64_t xpop_slot(uint32_t slot_no_tx, uint32_t slot_no_meta);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, xpop_slot, (uint32_t, uint32_t),
|
||||
featureHooksUpdate1)
|
||||
|
||||
// int64_t prepare(uint32_t write_ptr, uint32_t write_len, uint32_t read_ptr, uint32_t read_len);
|
||||
HOOK_API_DEFINITION(
|
||||
int64_t, prepare, (uint32_t, uint32_t, uint32_t, uint32_t),
|
||||
featureHooksUpdate2)
|
||||
@@ -80,7 +80,7 @@ namespace detail {
|
||||
// Feature.cpp. Because it's only used to reserve storage, and determine how
|
||||
// large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than
|
||||
// the actual number of amendments. A LogicError on startup will verify this.
|
||||
static constexpr std::size_t numFeatures = 105;
|
||||
static constexpr std::size_t numFeatures = 114;
|
||||
|
||||
/** Amendments that this server supports and the default voting behavior.
|
||||
Whether they are enabled depends on the Rules defined in the validated
|
||||
|
||||
@@ -97,6 +97,12 @@ public:
|
||||
|
||||
static IOUAmount
|
||||
minPositiveAmount();
|
||||
|
||||
friend std::ostream&
|
||||
operator<<(std::ostream& os, IOUAmount const& x)
|
||||
{
|
||||
return os << to_string(x);
|
||||
}
|
||||
};
|
||||
|
||||
inline IOUAmount::IOUAmount(beast::Zero)
|
||||
|
||||
@@ -306,6 +306,9 @@ import_vlseq(PublicKey const& key) noexcept;
|
||||
Keylet
|
||||
uritoken(AccountID const& issuer, Blob const& uri);
|
||||
|
||||
Keylet
|
||||
cron(uint32_t timestamp, std::optional<AccountID> const& id = std::nullopt);
|
||||
|
||||
/** AMM entry */
|
||||
Keylet
|
||||
amm(Asset const& issue1, Asset const& issue2) noexcept;
|
||||
|
||||
@@ -28,6 +28,9 @@
|
||||
|
||||
namespace ripple {
|
||||
|
||||
bool
|
||||
isFeatureEnabled(uint256 const& feature);
|
||||
|
||||
class DigestAwareReadView;
|
||||
|
||||
/** Rules controlling protocol behavior. */
|
||||
|
||||
@@ -348,19 +348,20 @@ enum TECcodes : TERUnderlyingType {
|
||||
tecXCHAIN_PAYMENT_FAILED = 184,
|
||||
tecXCHAIN_SELF_COMMIT = 185,
|
||||
tecXCHAIN_CREATE_ACCOUNT_DISABLED = 186,
|
||||
tecXCHAIN_BAD_PUBLIC_KEY_ACCOUNT_PAIR = 191,
|
||||
tecXCHAIN_BAD_PUBLIC_KEY_ACCOUNT_PAIR = 192,
|
||||
tecINSUF_RESERVE_SELLER = 187,
|
||||
tecIMMUTABLE = 188,
|
||||
tecTOO_MANY_REMARKS = 189,
|
||||
tecINCOMPLETE = 190,
|
||||
// 191: tecXCHAIN_BAD_PUBLIC_KEY_ACCOUNT_PAIR
|
||||
tecEMPTY_DID = 192,
|
||||
tecINVALID_UPDATE_TIME = 193,
|
||||
tecTOKEN_PAIR_NOT_FOUND = 194,
|
||||
tecARRAY_EMPTY = 195,
|
||||
tecARRAY_TOO_LARGE = 196,
|
||||
tecLOCKED = 197,
|
||||
tecBAD_CREDENTIALS = 198,
|
||||
tecHAS_HOOK_STATE = 190,
|
||||
tecINCOMPLETE = 191,
|
||||
// 192: tecXCHAIN_BAD_PUBLIC_KEY_ACCOUNT_PAIR
|
||||
tecEMPTY_DID = 193,
|
||||
tecINVALID_UPDATE_TIME = 194,
|
||||
tecTOKEN_PAIR_NOT_FOUND = 195,
|
||||
tecARRAY_EMPTY = 196,
|
||||
tecARRAY_TOO_LARGE = 197,
|
||||
tecLOCKED = 198,
|
||||
tecBAD_CREDENTIALS = 199,
|
||||
tecLAST_POSSIBLE_ENTRY = 255,
|
||||
};
|
||||
|
||||
|
||||
@@ -239,6 +239,12 @@ constexpr std::uint32_t const tfImmutable = 1;
|
||||
// Clawback flags:
|
||||
constexpr std::uint32_t const tfClawbackMask = ~tfUniversal;
|
||||
|
||||
// CronSet Flags:
|
||||
enum CronSetFlags : uint32_t {
|
||||
tfCronUnset = 0x00000001,
|
||||
};
|
||||
constexpr std::uint32_t const tfCronSetMask = ~(tfUniversal | tfCronUnset);
|
||||
|
||||
// AMM Flags:
|
||||
constexpr std::uint32_t tfLPToken = 0x00010000;
|
||||
constexpr std::uint32_t tfWithdrawAll = 0x00020000;
|
||||
|
||||
@@ -24,11 +24,14 @@
|
||||
#error "undefined macro: XRPL_FIX"
|
||||
#endif
|
||||
|
||||
// clang-format off
|
||||
|
||||
// Add new amendments to the top of this list.
|
||||
// Keep it sorted in reverse chronological order.
|
||||
// If you add an amendment here, then do not forget to increment `numFeatures`
|
||||
// in include/xrpl/protocol/Feature.h.
|
||||
|
||||
XRPL_FEATURE(HookAPISerializedType240, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionedDomains, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DynamicNFT, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Credentials, Supported::no, VoteBehavior::DefaultNo)
|
||||
@@ -50,6 +53,13 @@ XRPL_FEATURE(DID, Supported::no, VoteBehavior::DefaultNo
|
||||
XRPL_FIX (DisallowIncomingV1, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(XChainBridge, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(AMM, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(HookOnV2, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(HooksUpdate2, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (HookAPI20251128, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (CronStacking, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Cron, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (InvalidTxFlags, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(IOUIssuerWeakTSH, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DeepFreeze, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Clawback, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -137,3 +147,4 @@ XRPL_FIX (NFTokenDirV1, Supported::yes, VoteBehavior::Obsolete)
|
||||
XRPL_FEATURE(NonFungibleTokensV1, Supported::yes, VoteBehavior::Obsolete)
|
||||
XRPL_FEATURE(CryptoConditionsSuite, Supported::yes, VoteBehavior::Obsolete)
|
||||
|
||||
// clang-format on
|
||||
|
||||
@@ -54,6 +54,20 @@ LEDGER_ENTRY(ltNFTOKEN_OFFER, 0x0037, NFTokenOffer, nft_offer, ({
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
}))
|
||||
|
||||
/** A ledger object representing a scheduled cron execution on an account.
|
||||
|
||||
\sa keylet::cron
|
||||
*/
|
||||
LEDGER_ENTRY_DUPLICATE(ltCRON, 0x0041, Cron, cron, ({
|
||||
{sfOwner, soeREQUIRED},
|
||||
{sfStartTime, soeREQUIRED},
|
||||
{sfDelaySeconds, soeREQUIRED},
|
||||
{sfRepeatCount, soeREQUIRED},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED}
|
||||
}))
|
||||
|
||||
/** A ledger object which describes a check.
|
||||
|
||||
\sa keylet::check
|
||||
@@ -79,7 +93,9 @@ LEDGER_ENTRY(ltCHECK, 0x0043, Check, check, ({
|
||||
*/
|
||||
LEDGER_ENTRY(ltHOOK_DEFINITION, 'D', HookDefinition, hook_definition, ({
|
||||
{sfHookHash, soeREQUIRED},
|
||||
{sfHookOn, soeREQUIRED},
|
||||
{sfHookOn, soeOPTIONAL},
|
||||
{sfHookOnIncoming, soeOPTIONAL},
|
||||
{sfHookOnOutgoing, soeOPTIONAL},
|
||||
{sfHookCanEmit, soeOPTIONAL},
|
||||
{sfHookNamespace, soeREQUIRED},
|
||||
{sfHookParameters, soeREQUIRED},
|
||||
@@ -241,6 +257,8 @@ LEDGER_ENTRY(ltACCOUNT_ROOT, 0x0061, AccountRoot, account, ({
|
||||
{sfGovernanceMarks, soeOPTIONAL},
|
||||
{sfAccountIndex, soeOPTIONAL},
|
||||
{sfTouchCount, soeOPTIONAL},
|
||||
{sfHookStateScale, soeOPTIONAL},
|
||||
{sfCron, soeOPTIONAL},
|
||||
{sfAMMID, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
|
||||
@@ -57,7 +57,8 @@ TYPED_SFIELD(sfHookStateChangeCount, UINT16, 17)
|
||||
TYPED_SFIELD(sfHookEmitCount, UINT16, 18)
|
||||
TYPED_SFIELD(sfHookExecutionIndex, UINT16, 19)
|
||||
TYPED_SFIELD(sfHookApiVersion, UINT16, 20)
|
||||
TYPED_SFIELD(sfLedgerFixType, UINT16, 21)
|
||||
TYPED_SFIELD(sfHookStateScale, UINT16, 21)
|
||||
TYPED_SFIELD(sfLedgerFixType, UINT16, 22)
|
||||
|
||||
// 32-bit integers (common)
|
||||
TYPED_SFIELD(sfNetworkID, UINT32, 1)
|
||||
@@ -114,6 +115,9 @@ TYPED_SFIELD(sfLockCount, UINT32, 49)
|
||||
TYPED_SFIELD(sfFirstNFTokenSequence, UINT32, 50)
|
||||
TYPED_SFIELD(sfOracleDocumentID, UINT32, 51)
|
||||
|
||||
TYPED_SFIELD(sfStartTime, UINT32, 93)
|
||||
TYPED_SFIELD(sfRepeatCount, UINT32, 94)
|
||||
TYPED_SFIELD(sfDelaySeconds, UINT32, 95)
|
||||
TYPED_SFIELD(sfXahauActivationLgrSeq, UINT32, 96)
|
||||
TYPED_SFIELD(sfImportSequence, UINT32, 97)
|
||||
TYPED_SFIELD(sfRewardTime, UINT32, 98)
|
||||
@@ -210,6 +214,9 @@ TYPED_SFIELD(sfGovernanceFlags, UINT256, 99)
|
||||
TYPED_SFIELD(sfGovernanceMarks, UINT256, 98)
|
||||
TYPED_SFIELD(sfEmittedTxnID, UINT256, 97)
|
||||
TYPED_SFIELD(sfHookCanEmit, UINT256, 96)
|
||||
TYPED_SFIELD(sfCron, UINT256, 95)
|
||||
TYPED_SFIELD(sfHookOnIncoming, UINT256, 94)
|
||||
TYPED_SFIELD(sfHookOnOutgoing, UINT256, 93)
|
||||
|
||||
// number (common)
|
||||
TYPED_SFIELD(sfNumber, NUMBER, 1)
|
||||
|
||||
@@ -73,6 +73,7 @@ TRANSACTION(ttACCOUNT_SET, 3, AccountSet, ({
|
||||
{sfClearFlag, soeOPTIONAL},
|
||||
{sfTickSize, soeOPTIONAL},
|
||||
{sfNFTokenMinter, soeOPTIONAL},
|
||||
{sfHookStateScale, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction type cancels an existing escrow. */
|
||||
@@ -499,6 +500,20 @@ TRANSACTION(ttPERMISSIONED_DOMAIN_DELETE, 72, PermissionedDomainDelete, ({
|
||||
{sfDomainID, soeREQUIRED},
|
||||
}))
|
||||
|
||||
/* A pseudo-txn alarm signal for invoking a hook, emitted by validators after alarm set conditions are met */
|
||||
TRANSACTION(ttCRON, 92, Cron, ({
|
||||
{sfOwner, soeREQUIRED},
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
}))
|
||||
|
||||
/* Sechedule an alarm for later */
|
||||
TRANSACTION(ttCRON_SET, 93, CronSet, ({
|
||||
{sfDelaySeconds, soeOPTIONAL},
|
||||
{sfRepeatCount, soeOPTIONAL},
|
||||
{sfStartTime, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
|
||||
/* A note attaching transactor that allows the owner or issuer (on a object by object basis) to attach remarks */
|
||||
TRANSACTION(ttREMARKS_SET, 94, SetRemarks, ({
|
||||
{sfObjectID, soeREQUIRED},
|
||||
|
||||
@@ -78,6 +78,8 @@ JSS(HookCanEmit); // field
|
||||
JSS(HookHash); // field
|
||||
JSS(HookNamespace); // field
|
||||
JSS(HookOn); // field
|
||||
JSS(HookOnIncoming); // field
|
||||
JSS(HookOnOutgoing); // field
|
||||
JSS(Hooks); // field
|
||||
JSS(HookGrants); // field
|
||||
JSS(HookParameters); // field
|
||||
@@ -96,6 +98,7 @@ JSS(Issuer); // in: Credential transactions
|
||||
JSS(InvoiceID); // field
|
||||
JSS(LastLedgerSequence); // in: TransactionSign; field
|
||||
JSS(LastUpdateTime); // field.
|
||||
JSS(FirstLedgerSequence); // in: TransactionSign; field
|
||||
JSS(LimitAmount); // field.
|
||||
JSS(NetworkID); // field.
|
||||
JSS(LPTokenOut); // in: AMM Liquidity Provider deposit tokens
|
||||
@@ -119,6 +122,7 @@ JSS(Signer); // field.
|
||||
JSS(Signers); // field.
|
||||
JSS(HookStateData); // field.
|
||||
JSS(HookStateKey); // field.
|
||||
JSS(EmitDetails); // field.
|
||||
JSS(SigningPubKey); // field.
|
||||
JSS(Subject); // in: Credential transactions
|
||||
JSS(TakerGets); // field.
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
#!/bin/bash -u
|
||||
#!/bin/bash
|
||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
|
||||
set -ex
|
||||
|
||||
echo "START BUILDING (HOST)"
|
||||
@@ -14,7 +15,7 @@ BUILD_CORES=$(echo "scale=0 ; `nproc` / 1.337" | bc)
|
||||
|
||||
if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||
#Default
|
||||
BUILD_CORES=8
|
||||
BUILD_CORES=${BUILD_CORES:-8}
|
||||
fi
|
||||
|
||||
# Ensure still works outside of GH Actions by setting these to /dev/null
|
||||
@@ -46,29 +47,192 @@ fi
|
||||
|
||||
STATIC_CONTAINER=$(docker ps -a | grep $CONTAINER_NAME |wc -l)
|
||||
|
||||
CACHE_VOLUME_NAME="xahau-release-builder-cache"
|
||||
|
||||
# if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
|
||||
if false; then
|
||||
echo "Static container, execute in static container to have max. cache"
|
||||
docker start $CONTAINER_NAME
|
||||
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && bash -x /io/build-core.sh '$GITHUB_REPOSITORY' '$GITHUB_SHA' '$BUILD_CORES' '$GITHUB_RUN_NUMBER'"
|
||||
docker stop $CONTAINER_NAME
|
||||
else
|
||||
echo "No static container, build on temp container"
|
||||
rm -rf release-build;
|
||||
mkdir -p release-build;
|
||||
|
||||
docker volume create $CACHE_VOLUME_NAME
|
||||
|
||||
# Create inline Dockerfile with environment setup for build-full.sh
|
||||
DOCKERFILE_CONTENT=$(cat <<'DOCKERFILE_EOF'
|
||||
FROM ghcr.io/phusion/holy-build-box:4.0.1-amd64
|
||||
|
||||
ARG BUILD_CORES=8
|
||||
|
||||
# Enable repositories and install dependencies
|
||||
RUN /hbb_exe/activate-exec bash -c "dnf install -y epel-release && \
|
||||
dnf config-manager --set-enabled powertools || dnf config-manager --set-enabled crb && \
|
||||
dnf install -y --enablerepo=devel \
|
||||
wget git \
|
||||
gcc-toolset-11-gcc-c++ gcc-toolset-11-binutils gcc-toolset-11-libatomic-devel \
|
||||
lz4 lz4-devel \
|
||||
ncurses-static ncurses-devel \
|
||||
snappy snappy-devel \
|
||||
zlib zlib-devel zlib-static \
|
||||
libasan \
|
||||
python3 python3-pip \
|
||||
ccache \
|
||||
ninja-build \
|
||||
mold \
|
||||
patch \
|
||||
glibc-devel glibc-static \
|
||||
libxml2-devel \
|
||||
autoconf \
|
||||
automake \
|
||||
texinfo \
|
||||
libtool \
|
||||
llvm14-static llvm14-devel && \
|
||||
dnf clean all"
|
||||
|
||||
# Install Conan 2 and CMake
|
||||
RUN /hbb_exe/activate-exec pip3 install "conan>=2.0,<3.0" && \
|
||||
/hbb_exe/activate-exec wget -q https://github.com/Kitware/CMake/releases/download/v3.25.3/cmake-3.25.3-linux-x86_64.tar.gz -O cmake.tar.gz && \
|
||||
mkdir cmake && \
|
||||
tar -xzf cmake.tar.gz --strip-components=1 -C cmake && \
|
||||
rm cmake.tar.gz
|
||||
|
||||
# Dual Boost configuration in HBB environment:
|
||||
# - Manual Boost in /usr/local (minimal: for WasmEdge which is pre-built in Docker)
|
||||
# - Conan Boost (full: for the application and all dependencies via toolchain)
|
||||
#
|
||||
# Install minimal Boost 1.86.0 for WasmEdge only (filesystem and its dependencies)
|
||||
# The main application will use Conan-provided Boost for all other components
|
||||
# IMPORTANT: Understanding Boost linking options:
|
||||
# - link=static: Creates static Boost libraries (.a files) instead of shared (.so files)
|
||||
# - runtime-link=shared: Links Boost libraries against shared libc (glibc)
|
||||
# WasmEdge only needs boost::filesystem and boost::system
|
||||
RUN /hbb_exe/activate-exec bash -c "echo 'Boost cache bust: v5-minimal' && \
|
||||
rm -rf /usr/local/lib/libboost* /usr/local/include/boost && \
|
||||
cd /tmp && \
|
||||
wget -q https://archives.boost.io/release/1.86.0/source/boost_1_86_0.tar.gz -O boost.tar.gz && \
|
||||
mkdir boost && \
|
||||
tar -xzf boost.tar.gz --strip-components=1 -C boost && \
|
||||
cd boost && \
|
||||
./bootstrap.sh && \
|
||||
./b2 install \
|
||||
link=static runtime-link=shared -j${BUILD_CORES} \
|
||||
--with-filesystem --with-system && \
|
||||
cd /tmp && \
|
||||
rm -rf boost boost.tar.gz"
|
||||
|
||||
ENV CMAKE_EXE_LINKER_FLAGS="-static-libstdc++"
|
||||
|
||||
ENV LLVM_DIR=/usr/lib64/llvm14/lib/cmake/llvm
|
||||
ENV WasmEdge_LIB=/usr/local/lib64/libwasmedge.a
|
||||
|
||||
# Install LLD
|
||||
RUN /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && \
|
||||
cd /tmp && \
|
||||
wget -q https://github.com/llvm/llvm-project/releases/download/llvmorg-14.0.3/lld-14.0.3.src.tar.xz && \
|
||||
wget -q https://github.com/llvm/llvm-project/releases/download/llvmorg-14.0.3/libunwind-14.0.3.src.tar.xz && \
|
||||
tar -xf lld-14.0.3.src.tar.xz && \
|
||||
tar -xf libunwind-14.0.3.src.tar.xz && \
|
||||
cp -r libunwind-14.0.3.src/include libunwind-14.0.3.src/src lld-14.0.3.src/ && \
|
||||
cd lld-14.0.3.src && \
|
||||
mkdir -p build && cd build && \
|
||||
cmake .. \
|
||||
-DLLVM_LIBRARY_DIR=/usr/lib64/llvm14/lib/ \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm14/ \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_EXE_LINKER_FLAGS=\"\$CMAKE_EXE_LINKER_FLAGS\" && \
|
||||
make -j${BUILD_CORES} install && \
|
||||
ln -s /usr/lib64/llvm14/lib/include/lld /usr/include/lld && \
|
||||
cp /usr/lib64/llvm14/lib/liblld*.a /usr/local/lib/ && \
|
||||
cd /tmp && rm -rf lld-* libunwind-*"
|
||||
|
||||
# Build and install WasmEdge (static version)
|
||||
# Note: Conan only provides WasmEdge with shared library linking.
|
||||
# For a fully static build, we need to manually install:
|
||||
# * Boost: Static C++ libraries for filesystem and system operations (built from source above)
|
||||
# * LLVM: Static LLVM libraries for WebAssembly compilation (installed via llvm14-static package)
|
||||
# * LLD: Static linker to produce the final static binary (built from source above)
|
||||
# These were installed above to enable WASMEDGE_LINK_LLVM_STATIC=ON
|
||||
RUN cd /tmp && \
|
||||
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; unzip -o 0.11.2.zip; ) && \
|
||||
cd WasmEdge-0.11.2 && \
|
||||
( mkdir -p build; echo "" ) && \
|
||||
cd build && \
|
||||
/hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && \
|
||||
ln -sf /opt/rh/gcc-toolset-11/root/usr/bin/ar /usr/bin/ar && \
|
||||
ln -sf /opt/rh/gcc-toolset-11/root/usr/bin/ranlib /usr/bin/ranlib && \
|
||||
echo '=== Binutils version check ===' && \
|
||||
ar --version | head -1 && \
|
||||
ranlib --version | head -1 && \
|
||||
cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr/local \
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON \
|
||||
-DWASMEDGE_BUILD_SHARED_LIB=OFF \
|
||||
-DWASMEDGE_BUILD_STATIC_LIB=ON \
|
||||
-DWASMEDGE_BUILD_AOT_RUNTIME=ON \
|
||||
-DWASMEDGE_FORCE_DISABLE_LTO=ON \
|
||||
-DWASMEDGE_LINK_LLVM_STATIC=ON \
|
||||
-DWASMEDGE_BUILD_PLUGINS=OFF \
|
||||
-DWASMEDGE_LINK_TOOLS_STATIC=ON \
|
||||
-DBoost_NO_BOOST_CMAKE=ON \
|
||||
-DCMAKE_EXE_LINKER_FLAGS=\"\$CMAKE_EXE_LINKER_FLAGS\" \
|
||||
&& \
|
||||
make -j${BUILD_CORES} install" && \
|
||||
cp -r include/api/wasmedge /usr/include/ && \
|
||||
cd /tmp && rm -rf WasmEdge* 0.11.2.zip
|
||||
|
||||
# Set environment variables
|
||||
ENV PATH=/usr/local/bin:$PATH
|
||||
|
||||
# Configure ccache and Conan 2
|
||||
# NOTE: Using echo commands instead of heredocs because heredocs in Docker RUN commands are finnicky
|
||||
RUN /hbb_exe/activate-exec bash -c "ccache -M 100G && \
|
||||
ccache -o cache_dir=/cache/ccache && \
|
||||
ccache -o compiler_check=content && \
|
||||
mkdir -p ~/.conan2 /cache/conan2 /cache/conan2_download /cache/conan2_sources && \
|
||||
echo 'core.cache:storage_path=/cache/conan2' > ~/.conan2/global.conf && \
|
||||
echo 'core.download:download_cache=/cache/conan2_download' >> ~/.conan2/global.conf && \
|
||||
echo 'core.sources:download_cache=/cache/conan2_sources' >> ~/.conan2/global.conf && \
|
||||
conan profile detect --force && \
|
||||
echo '[settings]' > ~/.conan2/profiles/default && \
|
||||
echo 'arch=x86_64' >> ~/.conan2/profiles/default && \
|
||||
echo 'build_type=Release' >> ~/.conan2/profiles/default && \
|
||||
echo 'compiler=gcc' >> ~/.conan2/profiles/default && \
|
||||
echo 'compiler.cppstd=20' >> ~/.conan2/profiles/default && \
|
||||
echo 'compiler.libcxx=libstdc++11' >> ~/.conan2/profiles/default && \
|
||||
echo 'compiler.version=11' >> ~/.conan2/profiles/default && \
|
||||
echo 'os=Linux' >> ~/.conan2/profiles/default && \
|
||||
echo '' >> ~/.conan2/profiles/default && \
|
||||
echo '[conf]' >> ~/.conan2/profiles/default && \
|
||||
echo '# Force building from source for packages with binary compatibility issues' >> ~/.conan2/profiles/default && \
|
||||
echo '*:tools.system.package_manager:mode=build' >> ~/.conan2/profiles/default && \
|
||||
ln -s ../../bin/ccache /usr/lib64/ccache/g++ && \
|
||||
ln -s ../../bin/ccache /usr/lib64/ccache/c++"
|
||||
|
||||
DOCKERFILE_EOF
|
||||
)
|
||||
|
||||
# Build custom Docker image
|
||||
IMAGE_NAME="xahaud-builder:latest"
|
||||
echo "Building custom Docker image with dependencies..."
|
||||
echo "$DOCKERFILE_CONTENT" | docker build --build-arg BUILD_CORES="$BUILD_CORES" -t "$IMAGE_NAME" - || exit 1
|
||||
|
||||
if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||
# Non GH, local building
|
||||
echo "Non-GH runner, local building, temp container"
|
||||
docker run -i --user 0:$(id -g) --rm -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||
docker run -i --user 0:$(id -g) --rm -v /data/builds:/data/builds -v `pwd`:/io -v "$CACHE_VOLUME_NAME":/cache --network host "$IMAGE_NAME" /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && bash -x /io/build-full.sh '$GITHUB_REPOSITORY' '$GITHUB_SHA' '$BUILD_CORES' '$GITHUB_RUN_NUMBER'"
|
||||
else
|
||||
# GH Action, runner
|
||||
echo "GH Action, runner, clean & re-create create persistent container"
|
||||
docker rm -f $CONTAINER_NAME
|
||||
echo "echo 'Stopping container: $CONTAINER_NAME'" >> "$JOB_CLEANUP_SCRIPT"
|
||||
echo "docker stop --time=15 \"$CONTAINER_NAME\" || echo 'Failed to stop container or container not running'" >> "$JOB_CLEANUP_SCRIPT"
|
||||
docker run -di --user 0:$(id -g) --name $CONTAINER_NAME -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash
|
||||
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||
docker run -di --user 0:$(id -g) --name $CONTAINER_NAME -v /data/builds:/data/builds -v `pwd`:/io -v "$CACHE_VOLUME_NAME":/cache --network host "$IMAGE_NAME" /hbb_exe/activate-exec bash
|
||||
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && bash -x /io/build-full.sh '$GITHUB_REPOSITORY' '$GITHUB_SHA' '$BUILD_CORES' '$GITHUB_RUN_NUMBER'"
|
||||
docker stop $CONTAINER_NAME
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -17,11 +17,20 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <date/date.h>
|
||||
|
||||
#include <xrpl/basics/Log.h>
|
||||
#include <xrpl/basics/chrono.h>
|
||||
#include <xrpl/basics/contract.h>
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
#include <xrpl/beast/utility/EnhancedLogging.h>
|
||||
#include <date/tz.h>
|
||||
#endif
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <ctime>
|
||||
#include <fstream>
|
||||
#include <functional>
|
||||
#include <iostream>
|
||||
@@ -324,11 +333,47 @@ Logs::format(
|
||||
{
|
||||
output.reserve(message.size() + partition.size() + 100);
|
||||
|
||||
output = to_string(std::chrono::system_clock::now());
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
// Environment variables are used instead of config file because:
|
||||
// 1. Logging starts before config parsing (needed to debug config issues)
|
||||
// 2. This is a developer feature - devs can easily set env vars
|
||||
// 3. Allows per-run overrides without editing config files
|
||||
static const char* fmt = []() {
|
||||
const char* env = std::getenv("LOG_DATE_FORMAT");
|
||||
return env ? env : "%Y-%b-%d %T %Z"; // Default format
|
||||
}();
|
||||
|
||||
// Check if we should use local time
|
||||
static const bool useLocalTime = []() {
|
||||
const char* env = std::getenv("LOG_DATE_LOCAL");
|
||||
return env && std::strcmp(env, "1") == 0;
|
||||
}();
|
||||
|
||||
if (useLocalTime)
|
||||
{
|
||||
auto now = std::chrono::system_clock::now();
|
||||
auto local = date::make_zoned(date::current_zone(), now);
|
||||
output = date::format(fmt, local);
|
||||
}
|
||||
else
|
||||
{
|
||||
output = date::format(fmt, std::chrono::system_clock::now());
|
||||
}
|
||||
#else
|
||||
output = to_string(std::chrono::system_clock::now());
|
||||
#endif
|
||||
|
||||
if (!output.empty()) // Allow setting date format to an empty string
|
||||
output += " ";
|
||||
|
||||
output += " ";
|
||||
if (!partition.empty())
|
||||
{
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
if (beast::detail::should_log_use_colors())
|
||||
output += beast::detail::get_log_highlight_color();
|
||||
#endif
|
||||
output += partition + ":";
|
||||
}
|
||||
|
||||
using namespace beast::severities;
|
||||
switch (severity)
|
||||
@@ -356,6 +401,11 @@ Logs::format(
|
||||
break;
|
||||
}
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
if (beast::detail::should_log_use_colors())
|
||||
output += "\033[0m";
|
||||
#endif
|
||||
|
||||
output += message;
|
||||
|
||||
// Limit the maximum length of the output
|
||||
|
||||
118
src/libxrpl/beast/utility/src/beast_EnhancedLogging.cpp
Normal file
118
src/libxrpl/beast/utility/src/beast_EnhancedLogging.cpp
Normal file
@@ -0,0 +1,118 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of Beast: https://github.com/vinniefalco/Beast
|
||||
Copyright 2013, Vinnie Falco <vinnie.falco@gmail.com>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
|
||||
#include <xrpl/beast/utility/EnhancedLogging.h>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <ostream>
|
||||
#include <unistd.h>
|
||||
|
||||
namespace beast {
|
||||
namespace detail {
|
||||
|
||||
// Check if we should use colors - cached at startup
|
||||
bool
|
||||
should_log_use_colors()
|
||||
{
|
||||
static const bool use_colors = []() {
|
||||
// Honor NO_COLOR environment variable (standard)
|
||||
if (std::getenv("NO_COLOR"))
|
||||
return false;
|
||||
|
||||
// Honor FORCE_COLOR to override terminal detection
|
||||
if (std::getenv("FORCE_COLOR"))
|
||||
return true;
|
||||
|
||||
// Check if stderr is a terminal
|
||||
return isatty(STDERR_FILENO) != 0;
|
||||
}();
|
||||
return use_colors;
|
||||
}
|
||||
|
||||
// Get the log highlight color - can be overridden via
|
||||
// LOG_HIGHLIGHT_COLOR
|
||||
const char*
|
||||
get_log_highlight_color()
|
||||
{
|
||||
static const char* escape = []() {
|
||||
const char* env = std::getenv("LOG_HIGHLIGHT_COLOR");
|
||||
if (!env)
|
||||
return "\033[36m"; // Default: cyan
|
||||
|
||||
// Simple map of color names to escape sequences
|
||||
if (std::strcmp(env, "red") == 0)
|
||||
return "\033[31m";
|
||||
if (std::strcmp(env, "green") == 0)
|
||||
return "\033[32m";
|
||||
if (std::strcmp(env, "yellow") == 0)
|
||||
return "\033[33m";
|
||||
if (std::strcmp(env, "blue") == 0)
|
||||
return "\033[34m";
|
||||
if (std::strcmp(env, "magenta") == 0)
|
||||
return "\033[35m";
|
||||
if (std::strcmp(env, "cyan") == 0)
|
||||
return "\033[36m";
|
||||
if (std::strcmp(env, "white") == 0)
|
||||
return "\033[37m";
|
||||
if (std::strcmp(env, "gray") == 0 || std::strcmp(env, "grey") == 0)
|
||||
return "\033[90m"; // Bright black (gray)
|
||||
if (std::strcmp(env, "orange") == 0)
|
||||
return "\033[93m"; // Bright yellow (appears orange-ish)
|
||||
if (std::strcmp(env, "none") == 0)
|
||||
return "";
|
||||
|
||||
// Default to cyan if unknown color name
|
||||
return "\033[36m";
|
||||
}();
|
||||
return escape;
|
||||
}
|
||||
|
||||
// Check if location info should be shown - cached at startup
|
||||
bool
|
||||
should_show_location()
|
||||
{
|
||||
static const bool show = []() {
|
||||
const char* env = std::getenv("LOG_DISABLE");
|
||||
// Show location by default, hide if LOG_DISABLE=1
|
||||
return !env || std::strcmp(env, "1") != 0;
|
||||
}();
|
||||
return show;
|
||||
}
|
||||
|
||||
// Helper to write location string (no leading/trailing space)
|
||||
void
|
||||
log_write_location_string(std::ostream& os, const char* file, int line)
|
||||
{
|
||||
if (detail::should_log_use_colors())
|
||||
{
|
||||
os << detail::get_log_highlight_color() << "["
|
||||
<< detail::strip_source_root(file) << ":" << line << "]\033[0m";
|
||||
}
|
||||
else
|
||||
{
|
||||
os << "[" << detail::strip_source_root(file) << ":" << line << "]";
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace detail
|
||||
} // namespace beast
|
||||
|
||||
#endif // BEAST_ENHANCED_LOGGING
|
||||
@@ -19,6 +19,12 @@
|
||||
|
||||
#include <xrpl/beast/utility/Journal.h>
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
#include <cassert>
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
#include <xrpl/beast/utility/EnhancedLogging.h>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#endif
|
||||
|
||||
namespace beast {
|
||||
|
||||
@@ -138,9 +144,65 @@ Journal::ScopedStream::ScopedStream(
|
||||
m_ostream << manip;
|
||||
}
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
Journal::ScopedStream::ScopedStream(
|
||||
Sink& sink,
|
||||
Severity level,
|
||||
const char* file,
|
||||
int line)
|
||||
: m_sink(sink), m_level(level), file_(file), line_(line)
|
||||
{
|
||||
// Modifiers applied from all ctors
|
||||
m_ostream << std::boolalpha << std::showbase;
|
||||
}
|
||||
#endif
|
||||
|
||||
Journal::ScopedStream::~ScopedStream()
|
||||
{
|
||||
std::string const& s(m_ostream.str());
|
||||
std::string s(m_ostream.str());
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
// Add suffix if location is enabled
|
||||
if (file_ && detail::should_show_location() && !s.empty())
|
||||
{
|
||||
// Single optimized scan from the end
|
||||
size_t const lastNonWhitespace = s.find_last_not_of(" \n\r\t");
|
||||
|
||||
// Skip if message is only whitespace (e.g., just "\n" or " \n\n")
|
||||
if (lastNonWhitespace != std::string::npos)
|
||||
{
|
||||
// Count only the trailing newlines (tiny range)
|
||||
size_t trailingNewlines = 0;
|
||||
for (size_t i = lastNonWhitespace + 1; i < s.length(); ++i)
|
||||
{
|
||||
if (s[i] == '\n')
|
||||
++trailingNewlines;
|
||||
}
|
||||
|
||||
// Build location string once
|
||||
std::ostringstream locStream;
|
||||
detail::log_write_location_string(locStream, file_, line_);
|
||||
std::string const location = locStream.str();
|
||||
|
||||
// Pre-allocate exact size → zero reallocations
|
||||
size_t const finalSize = lastNonWhitespace + 1 + 1 +
|
||||
location.length() + trailingNewlines;
|
||||
|
||||
std::string result;
|
||||
result.reserve(finalSize);
|
||||
|
||||
// Direct string ops (no ostringstream overhead)
|
||||
result.append(s, 0, lastNonWhitespace + 1);
|
||||
result.push_back(' ');
|
||||
result += location;
|
||||
if (trailingNewlines > 0)
|
||||
result.append(trailingNewlines, '\n');
|
||||
|
||||
s = std::move(result); // Move, no copy
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!s.empty())
|
||||
{
|
||||
if (s == "\n")
|
||||
@@ -164,4 +226,18 @@ Journal::Stream::operator<<(std::ostream& manip(std::ostream&)) const
|
||||
return ScopedStream(*this, manip);
|
||||
}
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
|
||||
// Implementation moved to use new constructor
|
||||
Journal::ScopedStream
|
||||
Journal::StreamWithLocation::operator<<(
|
||||
std::ostream& manip(std::ostream&)) const
|
||||
{
|
||||
// Create a ScopedStream with location info
|
||||
ScopedStream scoped(stream_.sink(), stream_.level(), file_, line_);
|
||||
scoped.ostream() << manip;
|
||||
return scoped;
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace beast
|
||||
|
||||
@@ -78,6 +78,7 @@ enum class LedgerNameSpace : std::uint16_t {
|
||||
URI_TOKEN = 'U',
|
||||
IMPORT_VLSEQ = 'I',
|
||||
UNL_REPORT = 'R',
|
||||
CRON = 'L',
|
||||
AMM = 'A',
|
||||
BRIDGE = 'H',
|
||||
XCHAIN_CLAIM_ID = 'Q',
|
||||
@@ -491,6 +492,58 @@ uritoken(AccountID const& issuer, Blob const& uri)
|
||||
LedgerNameSpace::URI_TOKEN, issuer, Slice{uri.data(), uri.size()})};
|
||||
}
|
||||
|
||||
// Constructs an ordered CRON keylet (32 bytes):
|
||||
// [8-byte namespace][4-byte timestamp (big-endian, seconds)][20-byte
|
||||
// AccountID]
|
||||
//
|
||||
// Properties
|
||||
// - Namespacing: first 8 bytes are the most-significant bytes of
|
||||
// indexHash(LedgerNameSpace::CRON).
|
||||
// - Uniqueness (per ts,acct): exactly ONE cron per (timestamp, AccountID).
|
||||
// Insert is upsert (last-write-wins).
|
||||
// This is fine because we only ever allow one cron per account at a time—if
|
||||
// the same (ts,acct) is written, it’s simply an update to that single entry
|
||||
// (idempotent; no duplicate leaves).
|
||||
// - Iteration order: chronological by timestamp (BE), then by raw AccountID
|
||||
// bytes.
|
||||
// NOTE: raw AccountID ordering may bias priority; consider hashing AccountID
|
||||
// for uniform per-timestamp spread.
|
||||
// - Expected accidental prefix collisions (foreign objects sharing the 8-byte
|
||||
// namespace): n / 2^64,
|
||||
// assuming uniform high-64-bit distribution of other objects.
|
||||
// Examples: 100M → ~5.4e-12, 1B → ~5.4e-11, 10B → ~5.4e-10, 100B → ~5.4e-9
|
||||
// (negligible).
|
||||
Keylet
|
||||
cron(uint32_t timestamp, std::optional<AccountID> const& id)
|
||||
{
|
||||
static const uint256 ns = indexHash(LedgerNameSpace::CRON);
|
||||
|
||||
uint8_t h[32];
|
||||
|
||||
// first 8 bytes are the namespacing
|
||||
std::memcpy(h, ns.data(), 8);
|
||||
|
||||
// next 4 bytes are the timestamp in BE
|
||||
h[8] = static_cast<uint8_t>((timestamp >> 24) & 0xFFU);
|
||||
h[9] = static_cast<uint8_t>((timestamp >> 16) & 0xFFU);
|
||||
h[10] = static_cast<uint8_t>((timestamp >> 8) & 0xFFU);
|
||||
h[11] = static_cast<uint8_t>((timestamp >> 0) & 0xFFU);
|
||||
|
||||
if (!id.has_value())
|
||||
{
|
||||
// final 20 bytes are zero
|
||||
std::memset(h + 12, 0, 20);
|
||||
return {ltCRON, uint256::fromVoid(h)};
|
||||
}
|
||||
|
||||
const uint256 accHash = indexHash(LedgerNameSpace::CRON, timestamp, *id);
|
||||
|
||||
// final 20 bytes are account ID
|
||||
std::memcpy(h + 12, accHash.cdata(), 20);
|
||||
|
||||
return {ltCRON, uint256::fromVoid(h)};
|
||||
}
|
||||
|
||||
Keylet
|
||||
amm(Asset const& issue1, Asset const& issue2) noexcept
|
||||
{
|
||||
|
||||
@@ -92,7 +92,9 @@ InnerObjectFormats::InnerObjectFormats()
|
||||
{{sfCreateCode, soeREQUIRED},
|
||||
{sfHookNamespace, soeREQUIRED},
|
||||
{sfHookParameters, soeREQUIRED},
|
||||
{sfHookOn, soeREQUIRED},
|
||||
{sfHookOn, soeOPTIONAL},
|
||||
{sfHookOnIncoming, soeOPTIONAL},
|
||||
{sfHookOnOutgoing, soeOPTIONAL},
|
||||
{sfHookCanEmit, soeOPTIONAL},
|
||||
{sfHookApiVersion, soeREQUIRED},
|
||||
{sfFlags, soeREQUIRED},
|
||||
@@ -106,6 +108,8 @@ InnerObjectFormats::InnerObjectFormats()
|
||||
{sfHookNamespace, soeOPTIONAL},
|
||||
{sfHookParameters, soeOPTIONAL},
|
||||
{sfHookOn, soeOPTIONAL},
|
||||
{sfHookOnIncoming, soeOPTIONAL},
|
||||
{sfHookOnOutgoing, soeOPTIONAL},
|
||||
{sfHookCanEmit, soeOPTIONAL},
|
||||
{sfHookApiVersion, soeOPTIONAL},
|
||||
{sfFlags, soeOPTIONAL}});
|
||||
|
||||
@@ -153,4 +153,12 @@ Rules::operator!=(Rules const& other) const
|
||||
{
|
||||
return !(*this == other);
|
||||
}
|
||||
|
||||
bool
|
||||
isFeatureEnabled(uint256 const& feature)
|
||||
{
|
||||
auto const& rules = getCurrentTransactionRules();
|
||||
return rules && rules->enabled(feature);
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -487,7 +487,7 @@ isMemoOkay(STObject const& st, std::string& reason)
|
||||
if (!paramObj->isFieldPresent(sfHookParameterValue) ||
|
||||
paramObj->getFieldVL(sfHookParameterValue).size() > maxVal)
|
||||
{
|
||||
reason = "HookParameterValue cannot exceed 128 bytes.";
|
||||
reason = "HookParameterValue cannot exceed 256 bytes.";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -684,7 +684,7 @@ isPseudoTx(STObject const& tx)
|
||||
|
||||
auto tt = safe_cast<TxType>(*t);
|
||||
return tt == ttAMENDMENT || tt == ttFEE || tt == ttUNL_MODIFY ||
|
||||
tt == ttEMIT_FAILURE || tt == ttUNL_REPORT;
|
||||
tt == ttEMIT_FAILURE || tt == ttUNL_REPORT || tt == ttCRON;
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -98,6 +98,7 @@ transResults()
|
||||
MAKE_ERROR(tecINSUF_RESERVE_SELLER, "The seller of an object has insufficient reserves, and thus cannot complete the sale."),
|
||||
MAKE_ERROR(tecIMMUTABLE, "The remark is marked immutable on the object, and therefore cannot be updated."),
|
||||
MAKE_ERROR(tecTOO_MANY_REMARKS, "The number of remarks on the object would exceed the limit of 32."),
|
||||
MAKE_ERROR(tecHAS_HOOK_STATE, "Delete all hook state before reducing scale"),
|
||||
MAKE_ERROR(tecINCOMPLETE, "Some work was completed, but more submissions required to finish."),
|
||||
MAKE_ERROR(tecXCHAIN_BAD_TRANSFER_ISSUE, "Bad xchain transfer issue."),
|
||||
MAKE_ERROR(tecXCHAIN_NO_CLAIM_ID, "No such xchain claim id."),
|
||||
|
||||
@@ -582,7 +582,7 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env.close();
|
||||
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(4000), EUR(5000), IOUAmount{4472135954999580, -12}));
|
||||
USD(4000), EUR(5000), IOUAmount{4472135954999579, -12}));
|
||||
|
||||
// gw clawback 1000 USD from the AMM pool
|
||||
env(amm::ammClawback(gw, alice, USD, EUR, USD(1000)),
|
||||
@@ -602,11 +602,11 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
// 1000 USD and 1250 EUR was withdrawn from the AMM pool, so the
|
||||
// current balance is 3000 USD and 3750 EUR.
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(3000), EUR(3750), IOUAmount{3354101966249685, -12}));
|
||||
USD(3000), EUR(3750), IOUAmount{3354101966249684, -12}));
|
||||
|
||||
// Alice has 3/4 of its initial lptokens Left.
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{3354101966249685, -12}));
|
||||
amm.expectLPTokens(alice, IOUAmount{3354101966249684, -12}));
|
||||
|
||||
// gw clawback another 500 USD from the AMM pool.
|
||||
env(amm::ammClawback(gw, alice, USD, EUR, USD(500)),
|
||||
@@ -618,13 +618,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env.require(balance(alice, gw["USD"](2000)));
|
||||
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
STAmount{USD, UINT64_C(2500000000000001), -12},
|
||||
STAmount{EUR, UINT64_C(3125000000000001), -12},
|
||||
IOUAmount{2795084971874738, -12}));
|
||||
USD(2500), EUR(3125), IOUAmount{2795084971874737, -12}));
|
||||
|
||||
BEAST_EXPECT(
|
||||
env.balance(alice, EUR) ==
|
||||
STAmount(EUR, UINT64_C(2874999999999999), -12));
|
||||
BEAST_EXPECT(env.balance(alice, EUR) == EUR(2875));
|
||||
|
||||
// gw clawback small amount, 1 USD.
|
||||
env(amm::ammClawback(gw, alice, USD, EUR, USD(1)), ter(tesSUCCESS));
|
||||
@@ -634,13 +630,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env.require(balance(alice, gw["USD"](2000)));
|
||||
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
STAmount{USD, UINT64_C(2499000000000002), -12},
|
||||
STAmount{EUR, UINT64_C(3123750000000002), -12},
|
||||
IOUAmount{2793966937885989, -12}));
|
||||
USD(2499), EUR(3123.75), IOUAmount{2793966937885987, -12}));
|
||||
|
||||
BEAST_EXPECT(
|
||||
env.balance(alice, EUR) ==
|
||||
STAmount(EUR, UINT64_C(2876249999999998), -12));
|
||||
BEAST_EXPECT(env.balance(alice, EUR) == EUR(2876.25));
|
||||
|
||||
// gw clawback 4000 USD, exceeding the current balance. We
|
||||
// will clawback all.
|
||||
@@ -714,13 +706,13 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
// gw2 creates AMM pool of XRP/EUR, alice and bob deposit XRP/EUR.
|
||||
AMM amm2(env, gw2, XRP(3000), EUR(1000), ter(tesSUCCESS));
|
||||
BEAST_EXPECT(amm2.expectBalances(
|
||||
EUR(1000), XRP(3000), IOUAmount{1732050807568878, -9}));
|
||||
EUR(1000), XRP(3000), IOUAmount{1732050807568877, -9}));
|
||||
amm2.deposit(alice, EUR(1000), XRP(3000));
|
||||
BEAST_EXPECT(amm2.expectBalances(
|
||||
EUR(2000), XRP(6000), IOUAmount{3464101615137756, -9}));
|
||||
EUR(2000), XRP(6000), IOUAmount{3464101615137754, -9}));
|
||||
amm2.deposit(bob, EUR(1000), XRP(3000));
|
||||
BEAST_EXPECT(amm2.expectBalances(
|
||||
EUR(3000), XRP(9000), IOUAmount{5196152422706634, -9}));
|
||||
EUR(3000), XRP(9000), IOUAmount{5196152422706631, -9}));
|
||||
env.close();
|
||||
|
||||
auto aliceXrpBalance = env.balance(alice, XRP);
|
||||
@@ -744,9 +736,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
expectLedgerEntryRoot(env, alice, aliceXrpBalance + XRP(1000)));
|
||||
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(2500), XRP(5000), IOUAmount{3535533905932738, -9}));
|
||||
USD(2500), XRP(5000), IOUAmount{3535533905932737, -9}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{7071067811865480, -10}));
|
||||
amm.expectLPTokens(alice, IOUAmount{7071067811865474, -10}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(bob, IOUAmount{1414213562373095, -9}));
|
||||
|
||||
@@ -761,13 +753,11 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(
|
||||
expectLedgerEntryRoot(env, bob, bobXrpBalance + XRP(20)));
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
STAmount{USD, UINT64_C(2490000000000001), -12},
|
||||
XRP(4980),
|
||||
IOUAmount{3521391770309008, -9}));
|
||||
USD(2'490), XRP(4980), IOUAmount{3521391770309006, -9}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{7071067811865480, -10}));
|
||||
amm.expectLPTokens(alice, IOUAmount{7071067811865474, -10}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(bob, IOUAmount{1400071426749365, -9}));
|
||||
amm.expectLPTokens(bob, IOUAmount{1400071426749364, -9}));
|
||||
|
||||
// gw2 clawback 200 EUR from amm2.
|
||||
env(amm::ammClawback(gw2, alice, EUR, XRP, EUR(200)),
|
||||
@@ -781,11 +771,11 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env, alice, aliceXrpBalance + XRP(1000) + XRP(600)));
|
||||
BEAST_EXPECT(amm2.expectBalances(
|
||||
EUR(2800), XRP(8400), IOUAmount{4849742261192859, -9}));
|
||||
EUR(2800), XRP(8400), IOUAmount{4849742261192856, -9}));
|
||||
BEAST_EXPECT(
|
||||
amm2.expectLPTokens(alice, IOUAmount{1385640646055103, -9}));
|
||||
amm2.expectLPTokens(alice, IOUAmount{1385640646055102, -9}));
|
||||
BEAST_EXPECT(
|
||||
amm2.expectLPTokens(bob, IOUAmount{1732050807568878, -9}));
|
||||
amm2.expectLPTokens(bob, IOUAmount{1732050807568877, -9}));
|
||||
|
||||
// gw claw back 1000 USD from alice in amm, which exceeds alice's
|
||||
// balance. This will clawback all the remaining LP tokens of alice
|
||||
@@ -801,14 +791,15 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env,
|
||||
alice,
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000)));
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000) -
|
||||
XRPAmount{1}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(alice, IOUAmount(0)));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(bob, IOUAmount{1400071426749365, -9}));
|
||||
amm.expectLPTokens(bob, IOUAmount{1400071426749364, -9}));
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
STAmount{USD, UINT64_C(1990000000000001), -12},
|
||||
XRP(3980),
|
||||
IOUAmount{2814284989122460, -9}));
|
||||
USD(1'990),
|
||||
XRPAmount{3'980'000'001},
|
||||
IOUAmount{2814284989122459, -9}));
|
||||
|
||||
// gw clawback 1000 USD from bob in amm, which also exceeds bob's
|
||||
// balance in amm. All bob's lptoken in amm will be consumed, which
|
||||
@@ -823,7 +814,8 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env,
|
||||
alice,
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000)));
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000) -
|
||||
XRPAmount{1}));
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env, bob, bobXrpBalance + XRP(20) + XRP(1980)));
|
||||
|
||||
@@ -846,8 +838,8 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env,
|
||||
alice,
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000) +
|
||||
XRP(2400)));
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000) + XRP(2400) -
|
||||
XRPAmount{1}));
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env, bob, bobXrpBalance + XRP(20) + XRP(1980)));
|
||||
|
||||
@@ -855,9 +847,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(amm2.expectLPTokens(alice, IOUAmount(0)));
|
||||
|
||||
BEAST_EXPECT(amm2.expectBalances(
|
||||
EUR(2000), XRP(6000), IOUAmount{3464101615137756, -9}));
|
||||
EUR(2000), XRP(6000), IOUAmount{3464101615137754, -9}));
|
||||
|
||||
// gw2 claw back 2000 EUR from bib in amm2, which exceeds bob's
|
||||
// gw2 claw back 2000 EUR from bob in amm2, which exceeds bob's
|
||||
// balance. All bob's lptokens will be consumed, which corresponds
|
||||
// to 1000EUR / 3000 XRP.
|
||||
env(amm::ammClawback(gw2, bob, EUR, XRP, EUR(2000)),
|
||||
@@ -872,8 +864,8 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env,
|
||||
alice,
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000) +
|
||||
XRP(2400)));
|
||||
aliceXrpBalance + XRP(1000) + XRP(600) + XRP(1000) + XRP(2400) -
|
||||
XRPAmount{1}));
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env, bob, bobXrpBalance + XRP(20) + XRP(1980) + XRP(3000)));
|
||||
|
||||
@@ -882,7 +874,7 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(amm2.expectLPTokens(bob, IOUAmount(0)));
|
||||
|
||||
BEAST_EXPECT(amm2.expectBalances(
|
||||
EUR(1000), XRP(3000), IOUAmount{1732050807568878, -9}));
|
||||
EUR(1000), XRP(3000), IOUAmount{1732050807568877, -9}));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -941,20 +933,20 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env.close();
|
||||
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(4000), EUR(5000), IOUAmount{4472135954999580, -12}));
|
||||
USD(4000), EUR(5000), IOUAmount{4472135954999579, -12}));
|
||||
amm.deposit(bob, USD(2000), EUR(2500));
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(6000), EUR(7500), IOUAmount{6708203932499370, -12}));
|
||||
USD(6000), EUR(7500), IOUAmount{6708203932499368, -12}));
|
||||
amm.deposit(carol, USD(1000), EUR(1250));
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(7000), EUR(8750), IOUAmount{7826237921249265, -12}));
|
||||
USD(7000), EUR(8750), IOUAmount{7826237921249262, -12}));
|
||||
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{4472135954999580, -12}));
|
||||
amm.expectLPTokens(alice, IOUAmount{4472135954999579, -12}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(bob, IOUAmount{2236067977499790, -12}));
|
||||
amm.expectLPTokens(bob, IOUAmount{2236067977499789, -12}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(carol, IOUAmount{1118033988749895, -12}));
|
||||
amm.expectLPTokens(carol, IOUAmount{1118033988749894, -12}));
|
||||
|
||||
env.require(balance(alice, gw["USD"](2000)));
|
||||
env.require(balance(alice, gw2["EUR"](1000)));
|
||||
@@ -969,15 +961,15 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env.close();
|
||||
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
STAmount{USD, UINT64_C(4999999999999999), -12},
|
||||
STAmount{EUR, UINT64_C(6249999999999999), -12},
|
||||
IOUAmount{5590169943749475, -12}));
|
||||
STAmount{USD, UINT64_C(5000000000000001), -12},
|
||||
STAmount{EUR, UINT64_C(6250000000000001), -12},
|
||||
IOUAmount{5590169943749473, -12}));
|
||||
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{4472135954999580, -12}));
|
||||
amm.expectLPTokens(alice, IOUAmount{4472135954999579, -12}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(bob, IOUAmount(0)));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(carol, IOUAmount{1118033988749895, -12}));
|
||||
amm.expectLPTokens(carol, IOUAmount{1118033988749894, -12}));
|
||||
|
||||
// Bob will get 2500 EUR back.
|
||||
env.require(balance(alice, gw["USD"](2000)));
|
||||
@@ -988,7 +980,7 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
|
||||
BEAST_EXPECT(
|
||||
env.balance(bob, EUR) ==
|
||||
STAmount(EUR, UINT64_C(5000000000000001), -12));
|
||||
STAmount(EUR, UINT64_C(4999999999999999), -12));
|
||||
env.require(balance(carol, gw["USD"](3000)));
|
||||
env.require(balance(carol, gw2["EUR"](2750)));
|
||||
|
||||
@@ -997,12 +989,12 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
ter(tesSUCCESS));
|
||||
env.close();
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
STAmount{USD, UINT64_C(3999999999999999), -12},
|
||||
STAmount{EUR, UINT64_C(4999999999999999), -12},
|
||||
IOUAmount{4472135954999580, -12}));
|
||||
STAmount{USD, UINT64_C(4000000000000001), -12},
|
||||
STAmount{EUR, UINT64_C(5000000000000002), -12},
|
||||
IOUAmount{4472135954999579, -12}));
|
||||
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{4472135954999580, -12}));
|
||||
amm.expectLPTokens(alice, IOUAmount{4472135954999579, -12}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(bob, IOUAmount(0)));
|
||||
BEAST_EXPECT(amm.expectLPTokens(carol, IOUAmount(0)));
|
||||
|
||||
@@ -1042,13 +1034,13 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
// gw creates AMM pool of XRP/USD, alice and bob deposit XRP/USD.
|
||||
AMM amm(env, gw, XRP(2000), USD(10000), ter(tesSUCCESS));
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(10000), XRP(2000), IOUAmount{4472135954999580, -9}));
|
||||
USD(10000), XRP(2000), IOUAmount{4472135954999579, -9}));
|
||||
amm.deposit(alice, USD(1000), XRP(200));
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(11000), XRP(2200), IOUAmount{4919349550499538, -9}));
|
||||
USD(11000), XRP(2200), IOUAmount{4919349550499536, -9}));
|
||||
amm.deposit(bob, USD(2000), XRP(400));
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(13000), XRP(2600), IOUAmount{5813776741499453, -9}));
|
||||
USD(13000), XRP(2600), IOUAmount{5813776741499451, -9}));
|
||||
env.close();
|
||||
|
||||
auto aliceXrpBalance = env.balance(alice, XRP);
|
||||
@@ -1059,9 +1051,11 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
ter(tesSUCCESS));
|
||||
env.close();
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(12000), XRP(2400), IOUAmount{5366563145999495, -9}));
|
||||
BEAST_EXPECT(
|
||||
expectLedgerEntryRoot(env, alice, aliceXrpBalance + XRP(200)));
|
||||
USD(12000),
|
||||
XRPAmount(2400000001),
|
||||
IOUAmount{5366563145999494, -9}));
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env, alice, aliceXrpBalance + XRP(200) - XRPAmount{1}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(alice, IOUAmount(0)));
|
||||
|
||||
// gw clawback all bob's USD in amm. (2000 USD / 400 XRP)
|
||||
@@ -1069,7 +1063,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
ter(tesSUCCESS));
|
||||
env.close();
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(10000), XRP(2000), IOUAmount{4472135954999580, -9}));
|
||||
USD(10000),
|
||||
XRPAmount(2000000001),
|
||||
IOUAmount{4472135954999579, -9}));
|
||||
BEAST_EXPECT(
|
||||
expectLedgerEntryRoot(env, bob, bobXrpBalance + XRP(400)));
|
||||
BEAST_EXPECT(amm.expectLPTokens(alice, IOUAmount(0)));
|
||||
@@ -1125,10 +1121,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
amm.deposit(bob, USD(4000), EUR(1000));
|
||||
BEAST_EXPECT(
|
||||
amm.expectBalances(USD(12000), EUR(3000), IOUAmount(6000)));
|
||||
amm.deposit(carol, USD(2000), EUR(500));
|
||||
amm.deposit(carol, USD(2000.25), EUR(500));
|
||||
BEAST_EXPECT(
|
||||
amm.expectBalances(USD(14000), EUR(3500), IOUAmount(7000)));
|
||||
|
||||
// gw clawback 1000 USD from carol.
|
||||
env(amm::ammClawback(gw, carol, USD, EUR, USD(1000)), ter(tesSUCCESS));
|
||||
env.close();
|
||||
@@ -1142,7 +1137,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(env.balance(alice, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(bob, USD) == USD(5000));
|
||||
BEAST_EXPECT(env.balance(bob, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(carol, USD) == USD(6000));
|
||||
BEAST_EXPECT(
|
||||
env.balance(carol, USD) ==
|
||||
STAmount(USD, UINT64_C(5999'999999999999), -12));
|
||||
// 250 EUR goes back to carol.
|
||||
BEAST_EXPECT(env.balance(carol, EUR) == EUR(7750));
|
||||
|
||||
@@ -1164,7 +1161,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(env.balance(bob, USD) == USD(5000));
|
||||
// 250 EUR did not go back to bob because tfClawTwoAssets is set.
|
||||
BEAST_EXPECT(env.balance(bob, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(carol, USD) == USD(6000));
|
||||
BEAST_EXPECT(
|
||||
env.balance(carol, USD) ==
|
||||
STAmount(USD, UINT64_C(5999'999999999999), -12));
|
||||
BEAST_EXPECT(env.balance(carol, EUR) == EUR(7750));
|
||||
|
||||
// gw clawback all USD from alice and set tfClawTwoAssets.
|
||||
@@ -1181,7 +1180,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(env.balance(alice, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(bob, USD) == USD(5000));
|
||||
BEAST_EXPECT(env.balance(bob, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(carol, USD) == USD(6000));
|
||||
BEAST_EXPECT(
|
||||
env.balance(carol, USD) ==
|
||||
STAmount(USD, UINT64_C(5999'999999999999), -12));
|
||||
BEAST_EXPECT(env.balance(carol, EUR) == EUR(7750));
|
||||
}
|
||||
|
||||
@@ -1367,11 +1368,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env(amm::ammClawback(gw2, gw, EUR, USD, EUR(1000)), ter(tesSUCCESS));
|
||||
env.close();
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(4500),
|
||||
STAmount(EUR, UINT64_C(9000000000000001), -12),
|
||||
IOUAmount{6363961030678928, -12}));
|
||||
USD(4500), EUR(9000), IOUAmount{6363961030678928, -12}));
|
||||
|
||||
BEAST_EXPECT(amm.expectLPTokens(gw, IOUAmount{7071067811865480, -13}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(gw, IOUAmount{7071067811865475, -13}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(gw2, IOUAmount{1414213562373095, -12}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{4242640687119285, -12}));
|
||||
@@ -1385,11 +1384,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env(amm::ammClawback(gw2, alice, EUR, USD, EUR(4000)), ter(tesSUCCESS));
|
||||
env.close();
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
USD(2500),
|
||||
STAmount(EUR, UINT64_C(5000000000000001), -12),
|
||||
IOUAmount{3535533905932738, -12}));
|
||||
USD(2500), EUR(5000), IOUAmount{3535533905932738, -12}));
|
||||
|
||||
BEAST_EXPECT(amm.expectLPTokens(gw, IOUAmount{7071067811865480, -13}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(gw, IOUAmount{7071067811865475, -13}));
|
||||
BEAST_EXPECT(amm.expectLPTokens(gw2, IOUAmount{1414213562373095, -12}));
|
||||
BEAST_EXPECT(
|
||||
amm.expectLPTokens(alice, IOUAmount{1414213562373095, -12}));
|
||||
@@ -1653,7 +1650,7 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
amm.deposit(bob, USD(4000), EUR(1000));
|
||||
BEAST_EXPECT(
|
||||
amm.expectBalances(USD(12000), EUR(3000), IOUAmount(6000)));
|
||||
amm.deposit(carol, USD(2000), EUR(500));
|
||||
amm.deposit(carol, USD(2000.25), EUR(500));
|
||||
BEAST_EXPECT(
|
||||
amm.expectBalances(USD(14000), EUR(3500), IOUAmount(7000)));
|
||||
|
||||
@@ -1675,7 +1672,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(env.balance(alice, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(bob, USD) == USD(5000));
|
||||
BEAST_EXPECT(env.balance(bob, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(carol, USD) == USD(6000));
|
||||
BEAST_EXPECT(
|
||||
env.balance(carol, USD) ==
|
||||
STAmount(USD, UINT64_C(5999'999999999999), -12));
|
||||
// 250 EUR goes back to carol.
|
||||
BEAST_EXPECT(env.balance(carol, EUR) == EUR(7750));
|
||||
|
||||
@@ -1697,7 +1696,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(env.balance(bob, USD) == USD(5000));
|
||||
// 250 EUR did not go back to bob because tfClawTwoAssets is set.
|
||||
BEAST_EXPECT(env.balance(bob, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(carol, USD) == USD(6000));
|
||||
BEAST_EXPECT(
|
||||
env.balance(carol, USD) ==
|
||||
STAmount(USD, UINT64_C(5999'999999999999), -12));
|
||||
BEAST_EXPECT(env.balance(carol, EUR) == EUR(7750));
|
||||
|
||||
// gw clawback all USD from alice and set tfClawTwoAssets.
|
||||
@@ -1715,7 +1716,9 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
BEAST_EXPECT(env.balance(alice, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(bob, USD) == USD(5000));
|
||||
BEAST_EXPECT(env.balance(bob, EUR) == EUR(8000));
|
||||
BEAST_EXPECT(env.balance(carol, USD) == USD(6000));
|
||||
BEAST_EXPECT(
|
||||
env.balance(carol, USD) ==
|
||||
STAmount(USD, UINT64_C(5999'999999999999), -12));
|
||||
BEAST_EXPECT(env.balance(carol, EUR) == EUR(7750));
|
||||
}
|
||||
}
|
||||
@@ -1764,12 +1767,12 @@ class AMMClawback_test : public jtx::AMMTest
|
||||
env.close();
|
||||
|
||||
BEAST_EXPECT(amm.expectBalances(
|
||||
STAmount(USD, UINT64_C(5656854249492380), -13),
|
||||
XRP(70.710678),
|
||||
STAmount(USD, UINT64_C(565'685424949238), -12),
|
||||
XRP(70.710679),
|
||||
IOUAmount(200000)));
|
||||
BEAST_EXPECT(amm.expectLPTokens(alice, IOUAmount(0)));
|
||||
BEAST_EXPECT(expectLedgerEntryRoot(
|
||||
env, alice, aliceXrpBalance + XRP(29.289322)));
|
||||
env, alice, aliceXrpBalance + XRP(29.289321)));
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
@@ -1478,7 +1478,6 @@ private:
|
||||
auto const AUD = gw["AUD"];
|
||||
env.fund(XRP(10'000), alice, bob, carol, gw);
|
||||
env(rate(gw, 1.1));
|
||||
env.close();
|
||||
env.trust(AUD(2'000), bob, carol);
|
||||
env(pay(gw, carol, AUD(51)));
|
||||
env.close();
|
||||
@@ -1888,7 +1887,7 @@ private:
|
||||
|
||||
{
|
||||
// simple IOU/IOU offer
|
||||
Env env(*this, features);
|
||||
Env env(*this, features - featureXahauGenesis - featureTouch);
|
||||
|
||||
fund(
|
||||
env,
|
||||
@@ -1899,7 +1898,6 @@ private:
|
||||
Fund::All);
|
||||
|
||||
AMM ammBob(env, bob, BTC(100), USD(150));
|
||||
env.close();
|
||||
|
||||
env(pay(alice, carol, USD(50)), path(~USD), sendmax(BTC(50)));
|
||||
|
||||
@@ -1987,11 +1985,10 @@ private:
|
||||
}
|
||||
{
|
||||
// test unfunded offers are removed when payment succeeds
|
||||
Env env(*this, features);
|
||||
Env env(*this, features - featureXahauGenesis - featureTouch);
|
||||
|
||||
env.fund(XRP(10'000), alice, carol, gw);
|
||||
env.fund(XRP(10'000), bob);
|
||||
env.close();
|
||||
env.trust(USD(1'000), alice, bob, carol);
|
||||
env.trust(BTC(1'000), alice, bob, carol);
|
||||
env.trust(EUR(1'000), alice, bob, carol);
|
||||
@@ -2374,7 +2371,7 @@ private:
|
||||
|
||||
{
|
||||
// payment via AMM
|
||||
Env env(*this, features);
|
||||
Env env(*this, features - featureXahauGenesis - featureTouch);
|
||||
|
||||
fund(
|
||||
env,
|
||||
@@ -2400,7 +2397,6 @@ private:
|
||||
GBP(1'120),
|
||||
STAmount{USD, UINT64_C(892'8571428571429), -13},
|
||||
amm.tokens()));
|
||||
|
||||
// 25% of 85.7142USD is paid in tr fee
|
||||
// 85.7142*1.25 = 107.1428USD
|
||||
BEAST_EXPECT(expectLine(
|
||||
@@ -3208,7 +3204,7 @@ private:
|
||||
testcase("RippleState Freeze");
|
||||
|
||||
using namespace test::jtx;
|
||||
Env env(*this, features);
|
||||
Env env(*this, features - featureXahauGenesis - featureTouch);
|
||||
|
||||
Account const G1{"G1"};
|
||||
Account const alice{"alice"};
|
||||
@@ -3262,8 +3258,7 @@ private:
|
||||
env(trust(G1, bob["USD"](0), tfSetFreeze));
|
||||
auto affected = env.meta()->getJson(
|
||||
JsonOptions::none)[sfAffectedNodes.fieldName];
|
||||
if (!BEAST_EXPECT(checkArraySize(
|
||||
affected, 2u + 1u))) // 2u + 1u(Issuer Account as Touch)
|
||||
if (!BEAST_EXPECT(checkArraySize(affected, 2u)))
|
||||
return;
|
||||
auto ff =
|
||||
affected[1u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
@@ -3281,11 +3276,10 @@ private:
|
||||
env(offer(bob, G1["USD"](5), XRP(25)));
|
||||
auto affected = env.meta()->getJson(
|
||||
JsonOptions::none)[sfAffectedNodes.fieldName];
|
||||
if (!BEAST_EXPECT(checkArraySize(
|
||||
affected, 4u + 1u))) // 4u + 1u(Issuer Account as Touch)
|
||||
if (!BEAST_EXPECT(checkArraySize(affected, 4u)))
|
||||
return;
|
||||
auto ff =
|
||||
affected[3u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
affected[1u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
BEAST_EXPECT(
|
||||
ff[sfHighLimit.fieldName] ==
|
||||
bob["USD"](100).value().getJson(JsonOptions::none));
|
||||
@@ -3352,8 +3346,7 @@ private:
|
||||
env(trust(G1, bob["USD"](0), tfClearFreeze));
|
||||
auto affected = env.meta()->getJson(
|
||||
JsonOptions::none)[sfAffectedNodes.fieldName];
|
||||
if (!BEAST_EXPECT(checkArraySize(
|
||||
affected, 2u + 1u))) // 2u + 1u(Issuer Account as Touch)
|
||||
if (!BEAST_EXPECT(checkArraySize(affected, 2u)))
|
||||
return;
|
||||
auto ff =
|
||||
affected[1u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
@@ -3510,7 +3503,7 @@ private:
|
||||
testcase("Offers for Frozen Trust Lines");
|
||||
|
||||
using namespace test::jtx;
|
||||
Env env(*this, features);
|
||||
Env env(*this, features - featureXahauGenesis - featureTouch);
|
||||
|
||||
Account G1{"G1"};
|
||||
Account A2{"A2"};
|
||||
@@ -3555,11 +3548,10 @@ private:
|
||||
BEAST_EXPECT(info[jss::amm][jss::asset2_frozen].asBool());
|
||||
auto affected =
|
||||
env.meta()->getJson(JsonOptions::none)[sfAffectedNodes.fieldName];
|
||||
if (!BEAST_EXPECT(checkArraySize(
|
||||
affected, 2u + 1u))) // 2u + 1u(Issuer Account as Touch)
|
||||
if (!BEAST_EXPECT(checkArraySize(affected, 2u)))
|
||||
return;
|
||||
auto ff =
|
||||
affected[0u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
affected[1u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
BEAST_EXPECT(
|
||||
ff[sfHighLimit.fieldName] ==
|
||||
G1["USD"](0).value().getJson(JsonOptions::none));
|
||||
@@ -3579,10 +3571,9 @@ private:
|
||||
env(trust(G1, A4["USD"](0), tfSetFreeze));
|
||||
affected =
|
||||
env.meta()->getJson(JsonOptions::none)[sfAffectedNodes.fieldName];
|
||||
if (!BEAST_EXPECT(checkArraySize(
|
||||
affected, 2u + 1u))) // 2u + 1u(Issuer Account as Touch)
|
||||
if (!BEAST_EXPECT(checkArraySize(affected, 2u)))
|
||||
return;
|
||||
ff = affected[1u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
ff = affected[0u][sfModifiedNode.fieldName][sfFinalFields.fieldName];
|
||||
BEAST_EXPECT(
|
||||
ff[sfLowLimit.fieldName] ==
|
||||
G1["USD"](0).value().getJson(JsonOptions::none));
|
||||
@@ -3596,7 +3587,7 @@ private:
|
||||
env.meta()->getJson(JsonOptions::none)[sfAffectedNodes.fieldName];
|
||||
if (!BEAST_EXPECT(checkArraySize(affected, 8u)))
|
||||
return;
|
||||
auto created = affected[5u][sfCreatedNode.fieldName];
|
||||
auto created = affected[0u][sfCreatedNode.fieldName];
|
||||
BEAST_EXPECT(
|
||||
created[sfNewFields.fieldName][jss::Account] == A2.human());
|
||||
env.close();
|
||||
@@ -3778,10 +3769,9 @@ private:
|
||||
auto const CNY = gw["CNY"];
|
||||
|
||||
{
|
||||
Env env(*this, features);
|
||||
Env env(*this, features - featureXahauGenesis - featureTouch);
|
||||
|
||||
env.fund(XRP(10'000), alice, bob, carol, gw);
|
||||
env.close();
|
||||
env.trust(USD(10'000), alice, bob, carol);
|
||||
|
||||
env(pay(gw, bob, USD(100)));
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user