Compare commits

..

14 Commits

Author SHA1 Message Date
RichardAH
8329d78f32 Update src/ripple/app/tx/impl/Import.cpp
Co-authored-by: tequ <git@tequ.dev>
2025-12-21 13:42:46 +10:00
RichardAH
bf4579c1d1 Update src/ripple/app/tx/impl/Change.cpp
Co-authored-by: tequ <git@tequ.dev>
2025-12-21 13:42:37 +10:00
RichardAH
73e099eb23 Update src/ripple/app/hook/impl/applyHook.cpp
Co-authored-by: tequ <git@tequ.dev>
2025-12-21 13:42:29 +10:00
RichardAH
2e311b4259 Update src/ripple/app/hook/applyHook.h
Co-authored-by: tequ <git@tequ.dev>
2025-12-21 13:42:20 +10:00
RichardAH
7c8e940091 Merge branch 'dev' into export 2025-12-19 13:27:02 +10:00
Richard Holland
9b90c50789 featureExport compiling, untested 2025-12-19 14:19:17 +11:00
Niq Dudfield
5a118a4e2b fix(logs): formatting fixes, color handling, and debug build defaults (#607) 2025-12-17 09:45:41 +10:00
tequ
960f87857e Self hosted macos runner (#652) 2025-12-17 09:43:25 +10:00
tequ
f731bcfeba Increase ccache size from 10G to 100G in release-builder.sh for improved build performance (#643) 2025-12-16 14:45:45 +10:00
tequ
374b361daa Use Self hosted runner (#639) 2025-12-16 14:16:36 +10:00
Richard Holland
a18e2cb2c6 remainder of the export feature... untested uncompiled 2025-12-14 19:04:37 +11:00
Richard Holland
be5f425122 change symbol name to xport 2025-12-14 13:27:44 +11:00
Richard Holland
fc6f4762da export hook apis, untested 2025-12-13 15:46:08 +11:00
tequ
52ccf27aa3 Hook API Refactor1: whitelist api at Enum.h (#605) 2025-12-10 19:32:03 +10:00
38 changed files with 953 additions and 274 deletions

View File

@@ -28,6 +28,10 @@ inputs:
description: 'Cache version for invalidation' description: 'Cache version for invalidation'
required: false required: false
default: '1' default: '1'
gha_cache_enabled:
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
required: false
default: 'true'
ccache_enabled: ccache_enabled:
description: 'Whether to use ccache' description: 'Whether to use ccache'
required: false required: false
@@ -71,56 +75,30 @@ runs:
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-') SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
- name: Restore ccache directory for main branch
if: inputs.ccache_enabled == 'true'
id: ccache-restore
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-main
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-main
- name: Restore ccache directory for current branch
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
id: ccache-restore-current-branch
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-current
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-current
- name: Configure ccache - name: Configure ccache
if: inputs.ccache_enabled == 'true' if: inputs.ccache_enabled == 'true'
shell: bash shell: bash
run: | run: |
# Create cache directories # Create cache directories
mkdir -p ~/.ccache-main ~/.ccache-current mkdir -p ~/.ccache-cache
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Configure ccache settings AFTER cache restore (prevents stale cached config) # Configure ccache settings AFTER cache restore (prevents stale cached config)
ccache --set-config=max_size=${{ inputs.ccache_max_size }} ccache --set-config=max_size=${{ inputs.ccache_max_size }}
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }} ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }} ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
ccache --set-config=cache_dir="$HOME/.ccache-cache"
# Determine if we're on the main branch echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
if [ "${{ steps.safe-branch.outputs.name }}" = "${{ inputs.main_branch }}" ]; then echo "📦 using ~/.ccache-cache as ccache cache directory"
# Main branch: use main branch cache only
ccache --set-config=cache_dir="$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-main" >> $GITHUB_ENV
echo "📦 Main branch: using ~/.ccache-main"
else
# Feature branch: use current branch cache with main as secondary (read-only fallback)
ccache --set-config=cache_dir="$HOME/.ccache-current"
ccache --set-config=secondary_storage="file:$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-current" >> $GITHUB_ENV
echo "📦 Feature branch: using ~/.ccache-current with ~/.ccache-main as secondary"
fi
# Print config for verification # Print config for verification
echo "=== ccache configuration ===" echo "=== ccache configuration ==="
@@ -235,17 +213,3 @@ runs:
if: inputs.ccache_enabled == 'true' if: inputs.ccache_enabled == 'true'
shell: bash shell: bash
run: ccache -s run: ccache -s
- name: Save ccache directory for main branch
if: success() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-main
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
- name: Save ccache directory for current branch
if: success() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-current
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}

View File

@@ -17,10 +17,6 @@ inputs:
description: 'Cache version for invalidation' description: 'Cache version for invalidation'
required: false required: false
default: '1' default: '1'
cache_enabled:
description: 'Whether to use caching'
required: false
default: 'true'
main_branch: main_branch:
description: 'Main branch name for restore keys' description: 'Main branch name for restore keys'
required: false required: false
@@ -63,18 +59,25 @@ outputs:
runs: runs:
using: 'composite' using: 'composite'
steps: steps:
- name: Restore Conan cache - name: Configure Conan cache paths
if: inputs.cache_enabled == 'true' if: inputs.os == 'Linux'
id: cache-restore-conan shell: bash
uses: ./.github/actions/xahau-ga-cache-restore run: |
with: mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
path: ~/.conan2 echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
# Note: compiler-id format is compiler-version-stdlib[-gccversion] echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-${{ inputs.configuration }} echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
restore-keys: |
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}- - name: Configure Conan cache paths
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}- if: inputs.gha_cache_enabled == 'false'
cache-type: Conan shell: bash
# For self-hosted runners, register cache paths to be used as volumes
# This allows the cache to be shared between containers
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Configure Conan - name: Configure Conan
shell: bash shell: bash
@@ -150,10 +153,3 @@ runs:
--build missing \ --build missing \
--settings build_type=${{ inputs.configuration }} \ --settings build_type=${{ inputs.configuration }} \
.. ..
- name: Save Conan cache
if: success() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ~/.conan2
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}

View File

@@ -33,7 +33,7 @@ jobs:
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
build: build:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, xahaud-build]
needs: [checkout] needs: [checkout]
defaults: defaults:
run: run:
@@ -74,7 +74,7 @@ jobs:
fi fi
tests: tests:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, xahaud-build]
needs: [build, checkout] needs: [build, checkout]
defaults: defaults:
run: run:
@@ -84,7 +84,7 @@ jobs:
run: /bin/bash docker-unit-tests.sh run: /bin/bash docker-unit-tests.sh
cleanup: cleanup:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, xahaud-build]
needs: [tests, checkout] needs: [tests, checkout]
if: always() if: always()
steps: steps:

View File

@@ -20,7 +20,7 @@ jobs:
- Ninja - Ninja
configuration: configuration:
- Debug - Debug
runs-on: macos-15 runs-on: [self-hosted, macOS]
env: env:
build_dir: .build build_dir: .build
# Bump this number to invalidate all caches globally. # Bump this number to invalidate all caches globally.
@@ -30,61 +30,29 @@ jobs:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Get commit message - name: Add Homebrew to PATH
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install Conan
run: | run: |
brew install conan echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
# Verify Conan 2 is installed echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
conan --version
- name: Install Coreutils - name: Install Coreutils
run: | run: |
brew install coreutils brew install coreutils
echo "Num proc: $(nproc)" echo "Num proc: $(nproc)"
- name: Install Ninja # To isolate environments for each Runner, instead of installing globally with brew,
if: matrix.generator == 'Ninja' # use mise to isolate environments for each Runner directory.
run: brew install ninja - name: Setup toolchain (mise)
uses: jdx/mise-action@v2
with:
install: true
- name: Install Python - name: Install tools via mise
run: | run: |
if which python3 > /dev/null 2>&1; then mise install
echo "Python 3 executable exists" mise use cmake@3.23.1 python@3.12 pipx@latest conan@2 ninja@latest ccache@latest
python3 --version mise reshim
else echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
brew install python@3.12
fi
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
if ! which python > /dev/null 2>&1; then
sudo ln -sf $(which python3) /usr/local/bin/python
fi
- name: Install CMake
run: |
# Install CMake 3.x to match local dev environments
# With Conan 2 and the policy args passed to CMake, newer versions
# can have issues with dependencies that require cmake_minimum_required < 3.5
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
# Download and install CMake 3.31.7 directly
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
tar -xzf cmake.tar.gz
# Move the entire CMake.app to /Applications
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
/Applications/CMake.app/Contents/bin/cmake --version
- name: Install ccache
run: brew install ccache
- name: Check environment - name: Check environment
run: | run: |
@@ -98,6 +66,14 @@ jobs:
echo "---- Full Environment ----" echo "---- Full Environment ----"
env env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Detect compiler version - name: Detect compiler version
id: detect-compiler id: detect-compiler
run: | run: |
@@ -129,6 +105,7 @@ jobs:
cache_version: ${{ env.CACHE_VERSION }} cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }} main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: libcxx stdlib: libcxx
ccache_max_size: '100G'
- name: Test - name: Test
run: | run: |

View File

@@ -14,7 +14,7 @@ concurrency:
jobs: jobs:
matrix-setup: matrix-setup:
runs-on: ubuntu-latest runs-on: [self-hosted, generic, 20.04]
container: python:3-slim container: python:3-slim
outputs: outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }} matrix: ${{ steps.set-matrix.outputs.matrix }}
@@ -176,7 +176,15 @@ jobs:
build: build:
needs: matrix-setup needs: matrix-setup
runs-on: ubuntu-latest runs-on: [self-hosted, generic, 20.04]
container:
image: ubuntu:24.04
volumes:
- /home/runner/.conan-cache:/.conan-cache
- /home/runner/.ccache-cache:/github/home/.ccache-cache
defaults:
run:
shell: bash
outputs: outputs:
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }} artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
strategy: strategy:
@@ -191,23 +199,22 @@ jobs:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install build dependencies - name: Install build dependencies
run: | run: |
sudo apt-get update apt-get update
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache apt-get install -y software-properties-common
add-apt-repository ppa:ubuntu-toolchain-r/test -y
apt-get update
apt-get install -y python3 python-is-python3 pipx
pipx ensurepath
apt-get install -y cmake ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
apt-get install -y perl # for openssl build
apt-get install -y libsqlite3-dev # for xahaud build
# Install the specific GCC version needed for Clang # Install the specific GCC version needed for Clang
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ===" echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ==="
sudo apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
echo "=== GCC versions available after installation ===" echo "=== GCC versions available after installation ==="
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d" ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d"
@@ -238,7 +245,7 @@ jobs:
echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)" echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)"
# Safety check: ensure target doesn't already exist # Safety check: ensure target doesn't already exist
if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then
sudo mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter" mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
else else
echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists" echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists"
exit 1 exit 1
@@ -262,11 +269,12 @@ jobs:
# Install libc++ dev packages if using libc++ (not needed for libstdc++) # Install libc++ dev packages if using libc++ (not needed for libstdc++)
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
sudo apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
fi fi
# Install Conan 2 # Install Conan 2
pip install --upgrade "conan>=2.0,<3" pipx install "conan>=2.0,<3"
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Check environment - name: Check environment
run: | run: |
@@ -280,6 +288,14 @@ jobs:
echo "---- Full Environment ----" echo "---- Full Environment ----"
env env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install dependencies - name: Install dependencies
uses: ./.github/actions/xahau-ga-dependencies uses: ./.github/actions/xahau-ga-dependencies
with: with:
@@ -293,6 +309,7 @@ jobs:
cc: ${{ matrix.cc }} cc: ${{ matrix.cc }}
cxx: ${{ matrix.cxx }} cxx: ${{ matrix.cxx }}
stdlib: ${{ matrix.stdlib }} stdlib: ${{ matrix.stdlib }}
gha_cache_enabled: 'false' # Disable caching for self hosted runner
- name: Build - name: Build
uses: ./.github/actions/xahau-ga-build uses: ./.github/actions/xahau-ga-build
@@ -307,6 +324,7 @@ jobs:
main_branch: ${{ env.MAIN_BRANCH_NAME }} main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: ${{ matrix.stdlib }} stdlib: ${{ matrix.stdlib }}
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }} clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
ccache_max_size: '100G'
- name: Set artifact name - name: Set artifact name
id: set-artifact-name id: set-artifact-name

View File

@@ -48,13 +48,9 @@ target_sources (xrpl_core PRIVATE
src/ripple/beast/net/impl/IPAddressV6.cpp src/ripple/beast/net/impl/IPAddressV6.cpp
src/ripple/beast/net/impl/IPEndpoint.cpp src/ripple/beast/net/impl/IPEndpoint.cpp
src/ripple/beast/utility/src/beast_Journal.cpp src/ripple/beast/utility/src/beast_Journal.cpp
src/ripple/beast/utility/src/beast_PropertyStream.cpp) src/ripple/beast/utility/src/beast_PropertyStream.cpp
# Enhanced logging - compiles to empty when BEAST_ENHANCED_LOGGING is not defined
# Conditionally add enhanced logging source when BEAST_ENHANCED_LOGGING is enabled src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
target_sources(xrpl_core PRIVATE
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
endif()
#[===============================[ #[===============================[
core sources core sources
@@ -162,12 +158,16 @@ target_link_libraries (xrpl_core
date::date date::date
Ripple::opts) Ripple::opts)
# Link date-tz library when enhanced logging is enabled # date-tz for enhanced logging (always linked, code is #ifdef guarded)
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING) if(TARGET date::date-tz)
if(TARGET date::date-tz) target_link_libraries(xrpl_core PUBLIC date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
endif()
endif() endif()
# BEAST_ENHANCED_LOGGING: enable for Debug builds OR when explicitly requested
# Uses generator expression so it works with multi-config generators (Xcode, VS, Ninja Multi-Config)
target_compile_definitions(xrpl_core PUBLIC
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
)
#[=================================[ #[=================================[
main/core headers installation main/core headers installation
#]=================================] #]=================================]

View File

@@ -37,20 +37,11 @@ endif() #git
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/") set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}") add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
# BEAST_ENHANCED_LOGGING option - adds file:line numbers and formatting to logs # BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
# Default to ON for Debug builds, OFF for Release # Automatically enabled for Debug builds via generator expression
if(CMAKE_BUILD_TYPE STREQUAL "Debug") # Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" ON) option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
else() message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" OFF)
endif()
if(BEAST_ENHANCED_LOGGING)
add_definitions(-DBEAST_ENHANCED_LOGGING=1)
message(STATUS "Log line numbers enabled")
else()
message(STATUS "Log line numbers disabled")
endif()
if(thread_safety_analysis) if(thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS) add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)

View File

@@ -192,7 +192,7 @@ ENV PATH=/usr/local/bin:$PATH
# Configure ccache and Conan 2 # Configure ccache and Conan 2
# NOTE: Using echo commands instead of heredocs because heredocs in Docker RUN commands are finnicky # NOTE: Using echo commands instead of heredocs because heredocs in Docker RUN commands are finnicky
RUN /hbb_exe/activate-exec bash -c "ccache -M 10G && \ RUN /hbb_exe/activate-exec bash -c "ccache -M 100G && \
ccache -o cache_dir=/cache/ccache && \ ccache -o cache_dir=/cache/ccache && \
ccache -o compiler_check=content && \ ccache -o compiler_check=content && \
mkdir -p ~/.conan2 /cache/conan2 /cache/conan2_download /cache/conan2_sources && \ mkdir -p ~/.conan2 /cache/conan2 /cache/conan2_download /cache/conan2_sources && \

View File

@@ -350,7 +350,10 @@ enum hook_return_code : int64_t {
MEM_OVERLAP = -43, // one or more specified buffers are the same memory MEM_OVERLAP = -43, // one or more specified buffers are the same memory
TOO_MANY_STATE_MODIFICATIONS = -44, // more than 5000 modified state TOO_MANY_STATE_MODIFICATIONS = -44, // more than 5000 modified state
// entires in the combined hook chains // entires in the combined hook chains
TOO_MANY_NAMESPACES = -45 TOO_MANY_NAMESPACES = -45,
EXPORT_FAILURE = -46,
TOO_MANY_EXPORTED_TXN = -47,
}; };
enum ExitType : uint8_t { enum ExitType : uint8_t {
@@ -364,93 +367,121 @@ const uint16_t max_state_modifications = 256;
const uint8_t max_slots = 255; const uint8_t max_slots = 255;
const uint8_t max_nonce = 255; const uint8_t max_nonce = 255;
const uint8_t max_emit = 255; const uint8_t max_emit = 255;
const uint8_t max_export = 4;
const uint8_t max_params = 16; const uint8_t max_params = 16;
const double fee_base_multiplier = 1.1f; const double fee_base_multiplier = 1.1f;
#define I32 0x7FU
#define I64 0x7EU
#define HOOK_WRAP_PARAMS(...) __VA_ARGS__
#define HOOK_API_DEFINITION(RETURN_TYPE, FUNCTION_NAME, PARAMS_TUPLE) \
{ \
#FUNCTION_NAME, \
{ \
RETURN_TYPE, HOOK_WRAP_PARAMS PARAMS_TUPLE \
} \
}
using APIWhitelist = std::map<std::string, std::vector<uint8_t>>;
// RH NOTE: Find descriptions of api functions in ./impl/applyHook.cpp and // RH NOTE: Find descriptions of api functions in ./impl/applyHook.cpp and
// hookapi.h (include for hooks) this is a map of the api name to its return // hookapi.h (include for hooks) this is a map of the api name to its return
// code (vec[0] and its parameters vec[>0]) as wasm type codes // code (vec[0] and its parameters vec[>0]) as wasm type codes
static const std::map<std::string, std::vector<uint8_t>> import_whitelist{ static const APIWhitelist import_whitelist{
{"_g", {0x7FU, 0x7FU, 0x7FU}}, // clang-format off
{"accept", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I32, _g, (I32, I32)),
{"rollback", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, accept, (I32, I32, I64)),
{"util_raddr", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, rollback, (I32, I32, I64)),
{"util_accid", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_raddr, (I32, I32, I32, I32)),
{"util_verify", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_accid, (I32, I32, I32, I32)),
{"util_sha512h", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_verify, (I32, I32, I32, I32, I32, I32)),
{"util_keylet", HOOK_API_DEFINITION(I64, util_sha512h, (I32, I32, I32, I32)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, util_keylet, (I32, I32, I32, I32, I32, I32, I32, I32, I32)),
{"sto_validate", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_validate, (I32, I32)),
{"sto_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_subfield, (I32, I32, I32)),
{"sto_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_subarray, (I32, I32, I32)),
{"sto_emplace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_emplace, (I32, I32, I32, I32, I32, I32, I32)),
{"sto_erase", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, sto_erase, (I32, I32, I32, I32, I32)),
{"etxn_burden", {0x7EU}}, HOOK_API_DEFINITION(I64, etxn_burden, ()),
{"etxn_details", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_details, (I32, I32)),
{"etxn_fee_base", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_fee_base, (I32, I32)),
{"etxn_reserve", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_reserve, (I32)),
{"etxn_generation", {0x7EU}}, HOOK_API_DEFINITION(I64, etxn_generation, ()),
{"etxn_nonce", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, etxn_nonce, (I32, I32)),
{"emit", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, emit, (I32, I32, I32, I32)),
{"float_set", {0x7EU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_set, (I32, I64)),
{"float_multiply", {0x7EU, 0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_multiply, (I64, I64)),
{"float_mulratio", {0x7EU, 0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_mulratio, (I64, I32, I32, I32)),
{"float_negate", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_negate, (I64)),
{"float_compare", {0x7EU, 0x7EU, 0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_compare, (I64, I64, I32)),
{"float_sum", {0x7EU, 0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_sum, (I64, I64)),
{"float_sto", HOOK_API_DEFINITION(I64, float_sto, (I32, I32, I32, I32, I32, I32, I64, I32)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_sto_set, (I32, I32)),
{"float_sto_set", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_invert, (I64)),
{"float_invert", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_divide, (I64, I64)),
{"float_divide", {0x7EU, 0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_one, ()),
{"float_one", {0x7EU}}, HOOK_API_DEFINITION(I64, float_mantissa, (I64)),
{"float_mantissa", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_sign, (I64)),
{"float_sign", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_int, (I64, I32, I32)),
{"float_int", {0x7EU, 0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, float_log, (I64)),
{"float_log", {0x7EU, 0x7EU}}, HOOK_API_DEFINITION(I64, float_root, (I64, I32)),
{"float_root", {0x7EU, 0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, fee_base, ()),
{"fee_base", {0x7EU}}, HOOK_API_DEFINITION(I64, ledger_seq, ()),
{"ledger_seq", {0x7EU}}, HOOK_API_DEFINITION(I64, ledger_last_time, ()),
{"ledger_last_time", {0x7EU}}, HOOK_API_DEFINITION(I64, ledger_last_hash, (I32, I32)),
{"ledger_last_hash", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, ledger_nonce, (I32, I32)),
{"ledger_nonce", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, ledger_keylet, (I32, I32, I32, I32, I32, I32)),
{"ledger_keylet", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_account, (I32, I32)),
{"hook_account", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_hash, (I32, I32, I32)),
{"hook_hash", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_param_set, (I32, I32, I32, I32, I32, I32)),
{"hook_param_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_param, (I32, I32, I32, I32)),
{"hook_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_again, ()),
{"hook_again", {0x7EU}}, HOOK_API_DEFINITION(I64, hook_skip, (I32, I32, I32)),
{"hook_skip", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, hook_pos, ()),
{"hook_pos", {0x7EU}}, HOOK_API_DEFINITION(I64, slot, (I32, I32, I32)),
{"slot", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_clear, (I32)),
{"slot_clear", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_count, (I32)),
{"slot_count", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_set, (I32, I32, I32)),
{"slot_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_size, (I32)),
{"slot_size", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_subarray, (I32, I32, I32)),
{"slot_subarray", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_subfield, (I32, I32, I32)),
{"slot_subfield", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_type, (I32, I32)),
{"slot_type", {0x7EU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, slot_float, (I32)),
{"slot_float", {0x7EU, 0x7FU}}, HOOK_API_DEFINITION(I64, state_set, (I32, I32, I32, I32)),
{"state_set", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, state_foreign_set, (I32, I32, I32, I32, I32, I32, I32, I32)),
{"state_foreign_set", HOOK_API_DEFINITION(I64, state, (I32, I32, I32, I32)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, state_foreign, (I32, I32, I32, I32, I32, I32, I32, I32)),
{"state", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, trace, (I32, I32, I32, I32, I32)),
{"state_foreign", HOOK_API_DEFINITION(I64, trace_num, (I32, I32, I64)),
{0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, trace_float, (I32, I32, I64)),
{"trace", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, otxn_burden, ()),
{"trace_num", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, otxn_field, (I32, I32, I32)),
{"trace_float", {0x7EU, 0x7FU, 0x7FU, 0x7EU}}, HOOK_API_DEFINITION(I64, otxn_generation, ()),
{"otxn_burden", {0x7EU}}, HOOK_API_DEFINITION(I64, otxn_id, (I32, I32, I32)),
{"otxn_field", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, otxn_type, ()),
{"otxn_generation", {0x7EU}}, HOOK_API_DEFINITION(I64, otxn_slot, (I32)),
{"otxn_id", {0x7EU, 0x7FU, 0x7FU, 0x7FU}}, HOOK_API_DEFINITION(I64, otxn_param, (I32, I32, I32, I32)),
{"otxn_type", {0x7EU}}, HOOK_API_DEFINITION(I64, meta_slot, (I32)),
{"otxn_slot", {0x7EU, 0x7FU}}, // clang-format on
{"otxn_param", {0x7EU, 0x7FU, 0x7FU, 0x7FU, 0x7FU}}, };
{"meta_slot", {0x7EU, 0x7FU}}};
// featureHooks1 // featureHooks1
static const std::map<std::string, std::vector<uint8_t>> import_whitelist_1{ static const APIWhitelist import_whitelist_1{
{"xpop_slot", {0x7EU, 0x7FU, 0x7FU}}}; // clang-format off
HOOK_API_DEFINITION(I64, xpop_slot, (I32, I32)),
// clang-format on
};
static const APIWhitelist import_whitelist_2{
// clang-format off
HOOK_API_DEFINITION(I64, xport, (I32, I32)),
HOOK_API_DEFINITION(I64, xport_reserve, (I32)),
// clang-format on
};
#undef HOOK_API_DEFINITION
#undef I32
#undef I64
}; // namespace hook_api }; // namespace hook_api
#endif #endif

View File

@@ -1034,6 +1034,12 @@ validateGuards(
{ {
// PASS, this is a version 1 api // PASS, this is a version 1 api
} }
else if (rulesVersion & 0x04U &&
hook_api::import_whitelist_2.find(import_name) !=
hook_api::import_whitelist_2.end())
{
// PASS, this is an export api
}
else else
{ {
GUARDLOG(hook::log::IMPORT_ILLEGAL) GUARDLOG(hook::log::IMPORT_ILLEGAL)

View File

@@ -406,6 +406,17 @@ DECLARE_HOOK_FUNCTION(
uint32_t slot_no_tx, uint32_t slot_no_tx,
uint32_t slot_no_meta); uint32_t slot_no_meta);
DECLARE_HOOK_FUNCTION(
int64_t,
xport,
uint32_t write_ptr,
uint32_t write_len,
uint32_t read_ptr,
uint32_t read_len);
DECLARE_HOOK_FUNCTION(
int64_t,
xport_reserve,
uint32_t count);
/* /*
DECLARE_HOOK_FUNCTION(int64_t, str_find, uint32_t hread_ptr, DECLARE_HOOK_FUNCTION(int64_t, str_find, uint32_t hread_ptr,
uint32_t hread_len, uint32_t nread_ptr, uint32_t nread_len, uint32_t mode, uint32_t hread_len, uint32_t nread_ptr, uint32_t nread_len, uint32_t mode,
@@ -485,6 +496,8 @@ struct HookResult
std::queue<std::shared_ptr<ripple::Transaction>> std::queue<std::shared_ptr<ripple::Transaction>>
emittedTxn{}; // etx stored here until accept/rollback emittedTxn{}; // etx stored here until accept/rollback
std::queue<std::shared_ptr<ripple::Transaction>>
exportedTxn{};
HookStateMap& stateMap; HookStateMap& stateMap;
uint16_t changedStateCount = 0; uint16_t changedStateCount = 0;
std::map< std::map<
@@ -541,6 +554,7 @@ struct HookContext
uint16_t ledger_nonce_counter{0}; uint16_t ledger_nonce_counter{0};
int64_t expected_etxn_count{-1}; // make this a 64bit int so the uint32 int64_t expected_etxn_count{-1}; // make this a 64bit int so the uint32
// from the hookapi cant overflow it // from the hookapi cant overflow it
int64_t expected_export_count{-1};
std::map<ripple::uint256, bool> nonce_used{}; std::map<ripple::uint256, bool> nonce_used{};
uint32_t generation = uint32_t generation =
0; // used for caching, only generated when txn_generation is called 0; // used for caching, only generated when txn_generation is called
@@ -877,6 +891,9 @@ public:
ADD_HOOK_FUNCTION(meta_slot, ctx); ADD_HOOK_FUNCTION(meta_slot, ctx);
ADD_HOOK_FUNCTION(xpop_slot, ctx); ADD_HOOK_FUNCTION(xpop_slot, ctx);
ADD_HOOK_FUNCTION(xport, ctx);
ADD_HOOK_FUNCTION(xport_reserve, ctx);
/* /*
ADD_HOOK_FUNCTION(str_find, ctx); ADD_HOOK_FUNCTION(str_find, ctx);
ADD_HOOK_FUNCTION(str_replace, ctx); ADD_HOOK_FUNCTION(str_replace, ctx);

View File

@@ -79,7 +79,7 @@ main(int argc, char** argv)
close(fd); close(fd);
auto result = validateGuards(hook, std::cout, "", 3); auto result = validateGuards(hook, std::cout, "", 7);
if (!result) if (!result)
{ {

View File

@@ -1971,6 +1971,8 @@ hook::finalizeHookResult(
// directory) if we are allowed to // directory) if we are allowed to
std::vector<std::pair<uint256 /* txnid */, uint256 /* emit nonce */>> std::vector<std::pair<uint256 /* txnid */, uint256 /* emit nonce */>>
emission_txnid; emission_txnid;
std::vector<uint256 /* txnid */>
exported_txnid;
if (doEmit) if (doEmit)
{ {
@@ -2026,6 +2028,58 @@ hook::finalizeHookResult(
} }
} }
} }
DBG_PRINTF("exported txn count: %d\n", hookResult.exportedTxn.size());
for (; hookResult.exportedTxn.size() > 0; hookResult.exportedTxn.pop())
{
auto& tpTrans = hookResult.exportedTxn.front();
auto& id = tpTrans->getID();
JLOG(j.trace()) << "HookExport[" << HR_ACC() << "]: " << id;
// exported txns must be marked bad by the hash router to ensure under
// no circumstances they will enter consensus on *this* chain.
applyCtx.app.getHashRouter().setFlags(id, SF_BAD);
std::shared_ptr<const ripple::STTx> ptr =
tpTrans->getSTransaction();
auto exportedId = keylet::exportedTxn(id);
auto sleExported = applyCtx.view().peek(exportedId);
if (!sleExported)
{
exported_txnid.emplace_back(id);
sleExported = std::make_shared<SLE>(exportedId);
// RH TODO: add a new constructor to STObject to avoid this
// serder thing
ripple::Serializer s;
ptr->add(s);
SerialIter sit(s.slice());
sleExported->emplace_back(ripple::STObject(sit, sfExportedTxn));
auto page = applyCtx.view().dirInsert(
keylet::exportedDir(), exportedId, [&](SLE::ref sle) {
(*sle)[sfFlags] = lsfEmittedDir;
});
if (page)
{
(*sleExported)[sfOwnerNode] = *page;
applyCtx.view().insert(sleExported);
}
else
{
JLOG(j.warn())
<< "HookError[" << HR_ACC() << "]: "
<< "Export Directory full when trying to insert "
<< id;
return tecDIR_FULL;
}
}
}
} }
bool const fixV2 = applyCtx.view().rules().enabled(fixXahauV2); bool const fixV2 = applyCtx.view().rules().enabled(fixXahauV2);
@@ -2052,6 +2106,12 @@ hook::finalizeHookResult(
meta.setFieldU16( meta.setFieldU16(
sfHookEmitCount, sfHookEmitCount,
emission_txnid.size()); // this will never wrap, hard limit emission_txnid.size()); // this will never wrap, hard limit
if (applyCtx.view().rules().enabled(featureExport))
{
meta.setFieldU16(
sfHookExportCount,
exported_txnid.size());
}
meta.setFieldU16(sfHookExecutionIndex, exec_index); meta.setFieldU16(sfHookExecutionIndex, exec_index);
meta.setFieldU16(sfHookStateChangeCount, hookResult.changedStateCount); meta.setFieldU16(sfHookStateChangeCount, hookResult.changedStateCount);
meta.setFieldH256(sfHookHash, hookResult.hookHash); meta.setFieldH256(sfHookHash, hookResult.hookHash);
@@ -3888,6 +3948,27 @@ DEFINE_HOOK_FUNCTION(int64_t, etxn_reserve, uint32_t count)
HOOK_TEARDOWN(); HOOK_TEARDOWN();
} }
DEFINE_HOOK_FUNCTION(int64_t, xport_reserve, uint32_t count)
{
HOOK_SETUP(); // populates memory_ctx, memory, memory_length, applyCtx,
// hookCtx on current stack
if (hookCtx.expected_export_count > -1)
return ALREADY_SET;
if (count < 1)
return TOO_SMALL;
if (count > hook_api::max_export)
return TOO_BIG;
hookCtx.expected_export_count = count;
return count;
HOOK_TEARDOWN();
}
// Compute the burden of an emitted transaction based on a number of factors // Compute the burden of an emitted transaction based on a number of factors
DEFINE_HOOK_FUNCNARG(int64_t, etxn_burden) DEFINE_HOOK_FUNCNARG(int64_t, etxn_burden)
{ {
@@ -6156,6 +6237,92 @@ DEFINE_HOOK_FUNCTION(
HOOK_TEARDOWN(); HOOK_TEARDOWN();
} }
DEFINE_HOOK_FUNCTION(
int64_t,
xport,
uint32_t write_ptr,
uint32_t write_len,
uint32_t read_ptr,
uint32_t read_len)
{
HOOK_SETUP();
if (NOT_IN_BOUNDS(read_ptr, read_len, memory_length))
return OUT_OF_BOUNDS;
if (NOT_IN_BOUNDS(write_ptr, write_len, memory_length))
return OUT_OF_BOUNDS;
if (write_len < 32)
return TOO_SMALL;
auto& app = hookCtx.applyCtx.app;
if (hookCtx.expected_export_count < 0)
return PREREQUISITE_NOT_MET;
if (hookCtx.result.exportedTxn.size() >= hookCtx.expected_export_count)
return TOO_MANY_EXPORTED_TXN;
ripple::Blob blob{memory + read_ptr, memory + read_ptr + read_len};
std::shared_ptr<STTx const> stpTrans;
try
{
stpTrans = std::make_shared<STTx const>(
SerialIter{memory + read_ptr, read_len});
}
catch (std::exception& e)
{
JLOG(j.trace()) << "HookExport[" << HC_ACC() << "]: Failed " << e.what()
<< "\n";
return EXPORT_FAILURE;
}
if (!stpTrans->isFieldPresent(sfAccount) ||
stpTrans->getAccountID(sfAccount) != hookCtx.result.account)
{
JLOG(j.trace()) << "HookExport[" << HC_ACC()
<< "]: Attempted to export a txn that's not for this Hook's Account ID.";
return EXPORT_FAILURE;
}
std::string reason;
auto tpTrans = std::make_shared<Transaction>(stpTrans, reason, app);
// RHTODO: is this needed or wise? VVV
if (tpTrans->getStatus() != NEW)
{
JLOG(j.trace()) << "HookExport[" << HC_ACC()
<< "]: tpTrans->getStatus() != NEW";
return EXPORT_FAILURE;
}
auto const& txID = tpTrans->getID();
if (txID.size() > write_len)
return TOO_SMALL;
if (NOT_IN_BOUNDS(write_ptr, txID.size(), memory_length))
return OUT_OF_BOUNDS;
auto const write_txid = [&]() -> int64_t {
WRITE_WASM_MEMORY_AND_RETURN(
write_ptr,
txID.size(),
txID.data(),
txID.size(),
memory,
memory_length);
};
int64_t result = write_txid();
if (result == 32)
hookCtx.result.exportedTxn.push(tpTrans);
return result;
HOOK_TEARDOWN();
}
/* /*
DEFINE_HOOK_FUNCTION( DEFINE_HOOK_FUNCTION(

View File

@@ -599,6 +599,13 @@ public:
return validatorKeys_.publicKey; return validatorKeys_.publicKey;
} }
ValidatorKeys const&
getValidatorKeys() const override
{
return validatorKeys_;
}
NetworkOPs& NetworkOPs&
getOPs() override getOPs() override
{ {

View File

@@ -240,7 +240,8 @@ public:
virtual PublicKey const& virtual PublicKey const&
getValidationPublicKey() const = 0; getValidationPublicKey() const = 0;
virtual ValidatorKeys const&
getValidatorKeys() const = 0;
virtual Resource::Manager& virtual Resource::Manager&
getResourceManager() = 0; getResourceManager() = 0;
virtual PathRequests& virtual PathRequests&

View File

@@ -27,6 +27,8 @@
#include <ripple/protocol/Feature.h> #include <ripple/protocol/Feature.h>
#include <ripple/protocol/jss.h> #include <ripple/protocol/jss.h>
#include <ripple/protocol/st.h> #include <ripple/protocol/st.h>
#include <ripple/app/misc/ValidatorKeys.h>
#include <ripple/protocol/Sign.h>
#include <algorithm> #include <algorithm>
#include <limits> #include <limits>
#include <numeric> #include <numeric>
@@ -1539,6 +1541,247 @@ TxQ::accept(Application& app, OpenView& view)
} }
} }
// Inject exported transactions/signatures, if any
if (view.rules().enabled(featureExport))
{
do
{
// if we're not a validator we do nothing here
if (app.getValidationPublicKey().empty())
break;
auto const& keys = app.getValidatorKeys();
if (keys.configInvalid())
break;
// and if we're not on the UNLReport we also do nothing
auto const unlRep = view.read(keylet::UNLReport());
if (!unlRep || !unlRep->isFieldPresent(sfActiveValidators))
{
// nothing to do without a unlreport object
break;
}
bool found = false;
auto const& avs = unlRep->getFieldArray(sfActiveValidators);
for (auto const& av : avs)
{
if (PublicKey(av[sfPublicKey]) == keys.masterPublicKey)
{
found = true;
break;
}
}
if (!found)
break;
// execution to here means we're a validator and on the UNLReport
AccountID signingAcc = calcAccountID(keys.publicKey);
Keylet const exportedDirKeylet{keylet::exportedDir()};
if (dirIsEmpty(view, exportedDirKeylet))
break;
std::shared_ptr<SLE const> sleDirNode{};
unsigned int uDirEntry{0};
uint256 dirEntry{beast::zero};
if (!cdirFirst(
view,
exportedDirKeylet.key,
sleDirNode,
uDirEntry,
dirEntry))
break;
do
{
Keylet const itemKeylet{ltCHILD, dirEntry};
auto sleItem = view.read(itemKeylet);
if (!sleItem)
{
// Directory node has an invalid index. Bail out.
JLOG(j_.warn())
<< "ExportedTxn processing: directory node in ledger "
<< view.seq()
<< " has index to object that is missing: "
<< to_string(dirEntry);
// RH TODO: if this ever happens the entry should be
// gracefully removed (somehow)
continue;
}
LedgerEntryType const nodeType{
safe_cast<LedgerEntryType>((*sleItem)[sfLedgerEntryType])};
if (nodeType != ltEXPORTED_TXN)
{
JLOG(j_.warn())
<< "ExportedTxn processing: emitted directory contained "
"non ltEMITTED_TXN type";
// RH TODO: if this ever happens the entry should be
// gracefully removed (somehow)
continue;
}
JLOG(j_.info()) << "Processing exported txn: " << *sleItem;
auto const& exported =
const_cast<ripple::STLedgerEntry&>(*sleItem)
.getField(sfExportedTxn)
.downcast<STObject>();
auto const& txnHash = sleItem->getFieldH256(sfTransactionHash);
auto exportedLgrSeq = exported.getFieldU32(sfLedgerSequence);
auto const seq = view.seq();
if (exportedLgrSeq == seq)
{
// this shouldn't happen, but do nothing
continue;
}
if (exportedLgrSeq < seq - 1)
{
// all old entries need to be turned into Export transactions so they can be removed
// from the directory
// in the previous ledger all the ExportSign transactions were executed, and one-by-one
// added the validators' signatures to the ltEXPORTED_TXN's sfSigners array.
// now we need to collect these together and place them inside the ExportedTxn blob
// and publish the blob in the Export transaction type.
STArray signers = sleItem->getFieldArray(sfSigners);
auto s = std::make_shared<ripple::Serializer>();
exported.add(*s);
SerialIter sitTrans(s->slice());
try
{
auto stpTrans =
std::make_shared<STTx>(std::ref(sitTrans));
if (!stpTrans->isFieldPresent(sfAccount) ||
stpTrans->getAccountID(sfAccount) == beast::zero)
{
JLOG(j_.warn()) << "Hook: Export failure: "
<< "sfAccount missing or zero.";
// RH TODO: if this ever happens the entry should be
// gracefully removed (somehow)
continue;
}
// RH TODO: should we force remove signingpubkey here?
stpTrans->setFieldArray(sfSigners, signers);
Blob const& blob = stpTrans->getSerializer().peekData();
STTx exportTx(ttEXPORT, [&](auto& obj) {
obj.setFieldVL(sfExportedTxn, blob);
obj.setFieldU32(sfLedgerSequence, seq);
obj.setFieldH256(sfTransactionHash, txnHash);
obj.setFieldArray(sfSigners, signers);
});
// submit to the ledger
{
uint256 txID = exportTx.getTransactionID();
auto s = std::make_shared<ripple::Serializer>();
exportTx.add(*s);
app.getHashRouter().setFlags(txID, SF_PRIVATE2);
app.getHashRouter().setFlags(txID, SF_EMITTED);
view.rawTxInsert(txID, std::move(s), nullptr);
ledgerChanged = true;
}
}
catch (std::exception& e)
{
JLOG(j_.warn())
<< "ExportedTxn Processing: Failure: " << e.what()
<< "\n";
}
continue;
}
// this ledger is the one after the exported txn was added to the directory
// so generate the export sign txns
auto s = std::make_shared<ripple::Serializer>();
exported.add(*s);
SerialIter sitTrans(s->slice());
try
{
auto const& stpTrans =
std::make_shared<STTx const>(std::ref(sitTrans));
if (!stpTrans->isFieldPresent(sfAccount) ||
stpTrans->getAccountID(sfAccount) == beast::zero)
{
JLOG(j_.warn()) << "Hook: Export failure: "
<< "sfAccount missing or zero.";
// RH TODO: if this ever happens the entry should be
// gracefully removed (somehow)
continue;
}
auto seq = view.info().seq;
auto txnHash = stpTrans->getTransactionID();
Serializer s =
buildMultiSigningData(*stpTrans, signingAcc);
auto multisig = ripple::sign(keys.publicKey, keys.secretKey, s.slice());
STTx exportSignTx(ttEXPORT_SIGN, [&](auto& obj) {
obj.set(([&]() {
auto inner = std::make_unique<STObject>(sfSigner);
inner->setFieldVL(sfSigningPubKey, keys.publicKey);
inner->setAccountID(sfAccount, signingAcc);
inner->setFieldVL(sfTxnSignature, multisig);
return inner;
})());
obj.setFieldU32(sfLedgerSequence, seq);
obj.setFieldH256(sfTransactionHash, txnHash);
});
// submit to the ledger
{
uint256 txID = exportSignTx.getTransactionID();
auto s = std::make_shared<ripple::Serializer>();
exportSignTx.add(*s);
app.getHashRouter().setFlags(txID, SF_PRIVATE2);
app.getHashRouter().setFlags(txID, SF_EMITTED);
view.rawTxInsert(txID, std::move(s), nullptr);
ledgerChanged = true;
}
}
catch (std::exception& e)
{
JLOG(j_.warn())
<< "ExportedTxn Processing: Failure: " << e.what()
<< "\n";
}
} while (cdirNext(
view, exportedDirKeylet.key, sleDirNode, uDirEntry, dirEntry));
} while (0);
}
// Inject emitted transactions if any // Inject emitted transactions if any
if (view.rules().enabled(featureHooks)) if (view.rules().enabled(featureHooks))
do do

View File

@@ -96,6 +96,13 @@ Change::preflight(PreflightContext const& ctx)
} }
} }
if ((ctx.tx.getTxnType() == ttEXPORT_SIGN || ctx.tx.getTxnType() == ttEXPORT) &&
!ctx.rules.enabled(featureExport))
{
JLOG(ctx.j.warn()) << "Change: Export not enabled";
return temDISABLED;
}
return tesSUCCESS; return tesSUCCESS;
} }
@@ -154,6 +161,8 @@ Change::preclaim(PreclaimContext const& ctx)
case ttAMENDMENT: case ttAMENDMENT:
case ttUNL_MODIFY: case ttUNL_MODIFY:
case ttEMIT_FAILURE: case ttEMIT_FAILURE:
case ttEXPORT:
case ttEXPORT_SIGN:
return tesSUCCESS; return tesSUCCESS;
case ttUNL_REPORT: { case ttUNL_REPORT: {
if (!ctx.tx.isFieldPresent(sfImportVLKey) || if (!ctx.tx.isFieldPresent(sfImportVLKey) ||
@@ -209,6 +218,11 @@ Change::doApply()
return applyEmitFailure(); return applyEmitFailure();
case ttUNL_REPORT: case ttUNL_REPORT:
return applyUNLReport(); return applyUNLReport();
case ttEXPORT:
return applyExport();
case ttEXPORT_SIGN:
return applyExportSign();
default: default:
assert(0); assert(0);
return tefFAILURE; return tefFAILURE;
@@ -606,7 +620,8 @@ Change::activateXahauGenesis()
loggerStream, loggerStream,
"rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh", "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
(ctx_.view().rules().enabled(featureHooksUpdate1) ? 1 : 0) + (ctx_.view().rules().enabled(featureHooksUpdate1) ? 1 : 0) +
(ctx_.view().rules().enabled(fix20250131) ? 2 : 0)); (ctx_.view().rules().enabled(fix20250131) ? 2 : 0) +
(ctx_.view().rules().enabled(featureExport) ? 4 : 0));
if (!result) if (!result)
{ {
@@ -1072,6 +1087,80 @@ Change::applyEmitFailure()
return tesSUCCESS; return tesSUCCESS;
} }
TER
Change::applyExport()
{
uint256 txnID(ctx_.tx.getFieldH256(sfTransactionHash));
do
{
JLOG(j_.info()) << "HookExport[" << txnID
<< "]: ttExport exporting transaction";
auto key = keylet::exportedTxn(txnID);
auto const& sle = view().peek(key);
if (!sle)
{
// most likely explanation is that this was somehow a double-up, so just ignore
JLOG(j_.warn())
<< "HookError[" << txnID << "]: ttExport could not find exported txn in ledger";
break;
}
if (!view().dirRemove(
keylet::exportedDir(),
sle->getFieldU64(sfOwnerNode),
key,
false))
{
JLOG(j_.fatal()) << "HookError[" << txnID
<< "]: ttExport (Change) tefBAD_LEDGER";
return tefBAD_LEDGER;
}
view().erase(sle);
} while (0);
return tesSUCCESS;
}
TER
Change::applyExportSign()
{
uint256 txnID(ctx_.tx.getFieldH256(sfTransactionHash));
do
{
JLOG(j_.info()) << "HookExport[" << txnID
<< "]: ttExportSign adding signature to transaction";
auto key = keylet::exportedTxn(txnID);
auto const& sle = view().peek(key);
if (!sle)
{
// most likely explanation is that this was somehow a double-up, so just ignore
JLOG(j_.warn())
<< "HookError[" << txnID << "]: ttExportSign could not find exported txn in ledger";
break;
}
// grab the signer object off the txn
STObject signerObj = const_cast<ripple::STTx&>(ctx_.tx)
.getField(sfSigner)
.downcast<STObject>();
// append it to the signers field in the ledger object
STArray signers = sle->getFieldArray(sfSigners);
signers.push_back(signerObj);
sle->setFieldArray(sfSigners, signers);
// done
view().update(sle);
} while (0);
return tesSUCCESS;
}
TER TER
Change::applyUNLModify() Change::applyUNLModify()
{ {

View File

@@ -74,6 +74,12 @@ private:
TER TER
applyEmitFailure(); applyEmitFailure();
TER
applyExport();
TER
applyExportSign();
TER TER
applyUNLReport(); applyUNLReport();
}; };

View File

@@ -37,9 +37,12 @@
#include <charconv> #include <charconv>
#include <iostream> #include <iostream>
#include <vector> #include <vector>
#include <ripple/app/hook/applyHook.h>
namespace ripple { namespace ripple {
static const uint256 shadowTicketNamespace = uint256::fromVoid("RESERVED NAMESPACE SHADOW TICKET");
TxConsequences TxConsequences
Import::makeTxConsequences(PreflightContext const& ctx) Import::makeTxConsequences(PreflightContext const& ctx)
{ {
@@ -197,7 +200,7 @@ Import::preflight(PreflightContext const& ctx)
if (!stpTrans || !meta) if (!stpTrans || !meta)
return temMALFORMED; return temMALFORMED;
if (stpTrans->isFieldPresent(sfTicketSequence)) if (stpTrans->isFieldPresent(sfTicketSequence) && !ctx.rules.enabled(featureExport))
{ {
JLOG(ctx.j.warn()) << "Import: cannot use TicketSequence XPOP."; JLOG(ctx.j.warn()) << "Import: cannot use TicketSequence XPOP.";
return temMALFORMED; return temMALFORMED;
@@ -888,6 +891,26 @@ Import::preclaim(PreclaimContext const& ctx)
return tefINTERNAL; return tefINTERNAL;
} }
bool const hasTicket = stpTrans->isFieldPresent(sfTicketSequence);
if (hasTicket)
{
if (!ctx.view.rules().enabled(featureExport))
return tefINTERNAL;
auto const acc = stpTrans->getAccountID(sfAccount);
uint256 const seq = uint256(stpTrans->getFieldU32(sfTicketSequence));
// check if there is a shadow ticket, and if not we won't allow
// the txn to pass into consensus
if (!ctx.view.exists(keylet::hookState(acc, seq, shadowTicketNamespace)))
{
JLOG(ctx.j.warn()) << "Import: attempted to import a txn without shadow ticket.";
return telSHADOW_TICKET_REQUIRED; // tel code to avoid consensus/forward without SF_BAD
}
}
auto const& sle = ctx.view.read(keylet::account(ctx.tx[sfAccount])); auto const& sle = ctx.view.read(keylet::account(ctx.tx[sfAccount]));
auto const tt = stpTrans->getTxnType(); auto const tt = stpTrans->getTxnType();
@@ -928,13 +951,17 @@ Import::preclaim(PreclaimContext const& ctx)
} while (0); } while (0);
} }
if (sle && sle->isFieldPresent(sfImportSequence)) if (!hasTicket)
{ {
uint32_t sleImportSequence = sle->getFieldU32(sfImportSequence);
// replay attempt if (sle && sle->isFieldPresent(sfImportSequence))
if (sleImportSequence >= stpTrans->getFieldU32(sfSequence)) {
return tefPAST_IMPORT_SEQ; uint32_t sleImportSequence = sle->getFieldU32(sfImportSequence);
// replay attempt
if (sleImportSequence >= stpTrans->getFieldU32(sfSequence))
return tefPAST_IMPORT_SEQ;
}
} }
// when importing for the first time the fee must be zero // when importing for the first time the fee must be zero
@@ -1242,7 +1269,11 @@ Import::doApply()
auto const id = ctx_.tx[sfAccount]; auto const id = ctx_.tx[sfAccount];
auto sle = view().peek(keylet::account(id)); auto sle = view().peek(keylet::account(id));
if (sle && sle->getFieldU32(sfImportSequence) >= importSequence) std::optional<uint256> ticket;
if (stpTrans->isFieldPresent(sfTicketSequence))
ticket = uint256(stpTrans->getFieldU32(sfTicketSequence));
if (sle && !ticket.has_value() && sle->getFieldU32(sfImportSequence) >= importSequence)
{ {
// make double sure import seq hasn't passed // make double sure import seq hasn't passed
JLOG(ctx_.journal.warn()) << "Import: ImportSequence passed"; JLOG(ctx_.journal.warn()) << "Import: ImportSequence passed";
@@ -1335,8 +1366,24 @@ Import::doApply()
} }
} }
sle->setFieldU32(sfImportSequence, importSequence); if (!ticket.has_value())
sle->setFieldU32(sfImportSequence, importSequence);
sle->setFieldAmount(sfBalance, finalBal); sle->setFieldAmount(sfBalance, finalBal);
if (ticket.has_value())
{
auto sleTicket = view().peek(keylet::hookState(id, *ticket, shadowTicketNamespace));
if (!sleTicket)
return tefINTERNAL;
TER result = hook::setHookState(ctx_, id, shadowTicketNamespace, *ticket, {});
if (result != tesSUCCESS)
return result;
// RHUPTO: ticketseq billing?
}
if (create) if (create)
{ {

View File

@@ -491,7 +491,8 @@ SetHook::validateHookSetEntry(SetHookCtx& ctx, STObject const& hookSetObj)
logger, logger,
hsacc, hsacc,
(ctx.rules.enabled(featureHooksUpdate1) ? 1 : 0) + (ctx.rules.enabled(featureHooksUpdate1) ? 1 : 0) +
(ctx.rules.enabled(fix20250131) ? 2 : 0)); (ctx.rules.enabled(fix20250131) ? 2 : 0) +
(ctx.rules.enabled(featureExport) ? 4 : 0));
if (ctx.j.trace()) if (ctx.j.trace())
{ {

View File

@@ -374,6 +374,8 @@ invoke_calculateBaseFee(ReadView const& view, STTx const& tx)
case ttUNL_MODIFY: case ttUNL_MODIFY:
case ttUNL_REPORT: case ttUNL_REPORT:
case ttEMIT_FAILURE: case ttEMIT_FAILURE:
case ttEXPORT_SIGN:
case ttEXPORT:
return Change::calculateBaseFee(view, tx); return Change::calculateBaseFee(view, tx);
case ttNFTOKEN_MINT: case ttNFTOKEN_MINT:
return NFTokenMint::calculateBaseFee(view, tx); return NFTokenMint::calculateBaseFee(view, tx);
@@ -544,6 +546,8 @@ invoke_apply(ApplyContext& ctx)
case ttFEE: case ttFEE:
case ttUNL_MODIFY: case ttUNL_MODIFY:
case ttUNL_REPORT: case ttUNL_REPORT:
case ttEXPORT:
case ttEXPORT_SIGN:
case ttEMIT_FAILURE: { case ttEMIT_FAILURE: {
Change p(ctx); Change p(ctx);
return p(); return p();

View File

@@ -360,7 +360,8 @@ Logs::format(
if (!partition.empty()) if (!partition.empty())
{ {
#ifdef BEAST_ENHANCED_LOGGING #ifdef BEAST_ENHANCED_LOGGING
output += beast::detail::get_log_highlight_color(); if (beast::detail::should_log_use_colors())
output += beast::detail::get_log_highlight_color();
#endif #endif
output += partition + ":"; output += partition + ":";
} }
@@ -392,7 +393,8 @@ Logs::format(
} }
#ifdef BEAST_ENHANCED_LOGGING #ifdef BEAST_ENHANCED_LOGGING
output += "\033[0m"; if (beast::detail::should_log_use_colors())
output += "\033[0m";
#endif #endif
output += message; output += message;

View File

@@ -41,6 +41,14 @@ get_log_highlight_color();
constexpr const char* constexpr const char*
strip_source_root(const char* file) strip_source_root(const char* file)
{ {
// Handle relative paths from build/ directory (common with ccache)
// e.g., "../src/ripple/..." -> "ripple/..."
if (file && file[0] == '.' && file[1] == '.' && file[2] == '/' &&
file[3] == 's' && file[4] == 'r' && file[5] == 'c' && file[6] == '/')
{
return file + 7; // skip "../src/"
}
#ifdef SOURCE_ROOT_PATH #ifdef SOURCE_ROOT_PATH
constexpr const char* sourceRoot = SOURCE_ROOT_PATH; constexpr const char* sourceRoot = SOURCE_ROOT_PATH;
constexpr auto strlen_constexpr = [](const char* s) constexpr constexpr auto strlen_constexpr = [](const char* s) constexpr

View File

@@ -17,6 +17,8 @@
*/ */
//============================================================================== //==============================================================================
#ifdef BEAST_ENHANCED_LOGGING
#include <ripple/beast/utility/EnhancedLogging.h> #include <ripple/beast/utility/EnhancedLogging.h>
#include <cstdlib> #include <cstdlib>
#include <cstring> #include <cstring>
@@ -112,3 +114,5 @@ log_write_location_string(std::ostream& os, const char* file, int line)
} // namespace detail } // namespace detail
} // namespace beast } // namespace beast
#endif // BEAST_ENHANCED_LOGGING

View File

@@ -155,14 +155,43 @@ Journal::ScopedStream::~ScopedStream()
#ifdef BEAST_ENHANCED_LOGGING #ifdef BEAST_ENHANCED_LOGGING
// Add suffix if location is enabled // Add suffix if location is enabled
if (file_ && detail::should_show_location() && !s.empty() && s != "\n") if (file_ && detail::should_show_location() && !s.empty())
{ {
std::ostringstream combined; // Single optimized scan from the end
combined << s; size_t const lastNonWhitespace = s.find_last_not_of(" \n\r\t");
if (!s.empty() && s.back() != ' ')
combined << " "; // Skip if message is only whitespace (e.g., just "\n" or " \n\n")
detail::log_write_location_string(combined, file_, line_); if (lastNonWhitespace != std::string::npos)
s = combined.str(); {
// Count only the trailing newlines (tiny range)
size_t trailingNewlines = 0;
for (size_t i = lastNonWhitespace + 1; i < s.length(); ++i)
{
if (s[i] == '\n')
++trailingNewlines;
}
// Build location string once
std::ostringstream locStream;
detail::log_write_location_string(locStream, file_, line_);
std::string const location = locStream.str();
// Pre-allocate exact size → zero reallocations
size_t const finalSize = lastNonWhitespace + 1 + 1 +
location.length() + trailingNewlines;
std::string result;
result.reserve(finalSize);
// Direct string ops (no ostringstream overhead)
result.append(s, 0, lastNonWhitespace + 1);
result.push_back(' ');
result += location;
if (trailingNewlines > 0)
result.append(trailingNewlines, '\n');
s = std::move(result); // Move, no copy
}
} }
#endif #endif

View File

@@ -74,7 +74,7 @@ namespace detail {
// Feature.cpp. Because it's only used to reserve storage, and determine how // Feature.cpp. Because it's only used to reserve storage, and determine how
// large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than // large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than
// the actual number of amendments. A LogicError on startup will verify this. // the actual number of amendments. A LogicError on startup will verify this.
static constexpr std::size_t numFeatures = 90; static constexpr std::size_t numFeatures = 91;
/** Amendments that this server supports and the default voting behavior. /** Amendments that this server supports and the default voting behavior.
Whether they are enabled depends on the Rules defined in the validated Whether they are enabled depends on the Rules defined in the validated
@@ -378,6 +378,7 @@ extern uint256 const fixInvalidTxFlags;
extern uint256 const featureExtendedHookState; extern uint256 const featureExtendedHookState;
extern uint256 const fixCronStacking; extern uint256 const fixCronStacking;
extern uint256 const fixHookAPI20251128; extern uint256 const fixHookAPI20251128;
extern uint256 const featureExport;
} // namespace ripple } // namespace ripple
#endif #endif

View File

@@ -56,9 +56,15 @@ namespace keylet {
Keylet const& Keylet const&
emittedDir() noexcept; emittedDir() noexcept;
Keylet const&
exportedDir() noexcept;
Keylet Keylet
emittedTxn(uint256 const& id) noexcept; emittedTxn(uint256 const& id) noexcept;
Keylet
exportedTxn(uint256 const& id) noexcept;
Keylet Keylet
hookDefinition(uint256 const& hash) noexcept; hookDefinition(uint256 const& hash) noexcept;

View File

@@ -260,6 +260,8 @@ enum LedgerEntryType : std::uint16_t
\sa keylet::emitted \sa keylet::emitted
*/ */
ltEMITTED_TXN = 'E', ltEMITTED_TXN = 'E',
ltEXPORTED_TXN = 0x4578, // Ex (exported transaction)
}; };
// clang-format off // clang-format off
@@ -318,7 +320,8 @@ enum LedgerSpecificFlags {
// ltDIR_NODE // ltDIR_NODE
lsfNFTokenBuyOffers = 0x00000001, lsfNFTokenBuyOffers = 0x00000001,
lsfNFTokenSellOffers = 0x00000002, lsfNFTokenSellOffers = 0x00000002,
lsfEmittedDir = 0x00000004, lsfEmittedDir = 0x00000004,
lsfExportedDir = 0x00000008,
// ltNFTOKEN_OFFER // ltNFTOKEN_OFFER
lsfSellNFToken = 0x00000001, lsfSellNFToken = 0x00000001,

View File

@@ -355,6 +355,7 @@ extern SF_UINT16 const sfHookEmitCount;
extern SF_UINT16 const sfHookExecutionIndex; extern SF_UINT16 const sfHookExecutionIndex;
extern SF_UINT16 const sfHookApiVersion; extern SF_UINT16 const sfHookApiVersion;
extern SF_UINT16 const sfHookStateScale; extern SF_UINT16 const sfHookStateScale;
extern SF_UINT16 const sfHookExportCount;
// 32-bit integers (common) // 32-bit integers (common)
extern SF_UINT32 const sfNetworkID; extern SF_UINT32 const sfNetworkID;
@@ -595,6 +596,7 @@ extern SField const sfSigner;
extern SField const sfMajority; extern SField const sfMajority;
extern SField const sfDisabledValidator; extern SField const sfDisabledValidator;
extern SField const sfEmittedTxn; extern SField const sfEmittedTxn;
extern SField const sfExportedTxn;
extern SField const sfHookExecution; extern SField const sfHookExecution;
extern SField const sfHookDefinition; extern SField const sfHookDefinition;
extern SField const sfHookParameter; extern SField const sfHookParameter;

View File

@@ -67,6 +67,7 @@ enum TELcodes : TERUnderlyingType {
telNON_LOCAL_EMITTED_TXN, telNON_LOCAL_EMITTED_TXN,
telIMPORT_VL_KEY_NOT_RECOGNISED, telIMPORT_VL_KEY_NOT_RECOGNISED,
telCAN_NOT_QUEUE_IMPORT, telCAN_NOT_QUEUE_IMPORT,
telSHADOW_TICKET_REQUIRED,
}; };
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------

View File

@@ -149,6 +149,12 @@ enum TxType : std::uint16_t
ttURITOKEN_CREATE_SELL_OFFER = 48, ttURITOKEN_CREATE_SELL_OFFER = 48,
ttURITOKEN_CANCEL_SELL_OFFER = 49, ttURITOKEN_CANCEL_SELL_OFFER = 49,
/* A pseudo-txn containing an exported transaction plus signatures from the validators */
ttEXPORT = 90,
/* A pseudo-txn containing a validator's signature for an export transaction */
ttEXPORT_SIGN = 91,
/* A pseudo-txn alarm signal for invoking a hook, emitted by validators after alarm set conditions are met */ /* A pseudo-txn alarm signal for invoking a hook, emitted by validators after alarm set conditions are met */
ttCRON = 92, ttCRON = 92,

View File

@@ -484,6 +484,7 @@ REGISTER_FIX (fixInvalidTxFlags, Supported::yes, VoteBehavior::De
REGISTER_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo); REGISTER_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo);
REGISTER_FIX (fixCronStacking, Supported::yes, VoteBehavior::DefaultYes); REGISTER_FIX (fixCronStacking, Supported::yes, VoteBehavior::DefaultYes);
REGISTER_FIX (fixHookAPI20251128, Supported::yes, VoteBehavior::DefaultYes); REGISTER_FIX (fixHookAPI20251128, Supported::yes, VoteBehavior::DefaultYes);
REGISTER_FEATURE(Export, Supported::yes, VoteBehavior::DefaultNo);
// The following amendments are obsolete, but must remain supported // The following amendments are obsolete, but must remain supported
// because they could potentially get enabled. // because they could potentially get enabled.

View File

@@ -66,6 +66,8 @@ enum class LedgerNameSpace : std::uint16_t {
HOOK_DEFINITION = 'D', HOOK_DEFINITION = 'D',
EMITTED_TXN = 'E', EMITTED_TXN = 'E',
EMITTED_DIR = 'F', EMITTED_DIR = 'F',
EXPORTED_TXN = 0x4578, // Ex
EXPORTED_DIR = 0x4564, // Ed
NFTOKEN_OFFER = 'q', NFTOKEN_OFFER = 'q',
NFTOKEN_BUY_OFFERS = 'h', NFTOKEN_BUY_OFFERS = 'h',
NFTOKEN_SELL_OFFERS = 'i', NFTOKEN_SELL_OFFERS = 'i',
@@ -147,6 +149,14 @@ emittedDir() noexcept
return ret; return ret;
} }
Keylet const&
exportedDir() noexcept
{
static Keylet const ret{
ltDIR_NODE, indexHash(LedgerNameSpace::EXPORTED_DIR)};
return ret;
}
Keylet Keylet
hookStateDir(AccountID const& id, uint256 const& ns) noexcept hookStateDir(AccountID const& id, uint256 const& ns) noexcept
{ {
@@ -159,6 +169,12 @@ emittedTxn(uint256 const& id) noexcept
return {ltEMITTED_TXN, indexHash(LedgerNameSpace::EMITTED_TXN, id)}; return {ltEMITTED_TXN, indexHash(LedgerNameSpace::EMITTED_TXN, id)};
} }
Keylet
exportedTxn(uint256 const& id) noexcept
{
return {ltEXPORTED_TXN, indexHash(LedgerNameSpace::EXPORTED_TXN, id)};
}
Keylet Keylet
hook(AccountID const& id) noexcept hook(AccountID const& id) noexcept
{ {

View File

@@ -380,6 +380,15 @@ LedgerFormats::LedgerFormats()
{sfPreviousTxnLgrSeq, soeREQUIRED} {sfPreviousTxnLgrSeq, soeREQUIRED}
}, },
commonFields); commonFields);
add(jss::ExportedTxn,
ltEXPORTED_TXN,
{
{sfExportedTxn, soeOPTIONAL},
{sfOwnerNode, soeREQUIRED},
{sfLedgerSequence, soeREQUIRED},
},
commonFields);
// clang-format on // clang-format on
} }

View File

@@ -103,6 +103,7 @@ CONSTRUCT_TYPED_SFIELD(sfHookEmitCount, "HookEmitCount", UINT16,
CONSTRUCT_TYPED_SFIELD(sfHookExecutionIndex, "HookExecutionIndex", UINT16, 19); CONSTRUCT_TYPED_SFIELD(sfHookExecutionIndex, "HookExecutionIndex", UINT16, 19);
CONSTRUCT_TYPED_SFIELD(sfHookApiVersion, "HookApiVersion", UINT16, 20); CONSTRUCT_TYPED_SFIELD(sfHookApiVersion, "HookApiVersion", UINT16, 20);
CONSTRUCT_TYPED_SFIELD(sfHookStateScale, "HookStateScale", UINT16, 21); CONSTRUCT_TYPED_SFIELD(sfHookStateScale, "HookStateScale", UINT16, 21);
CONSTRUCT_TYPED_SFIELD(sfHookExportCount, "HookExportCount", UINT16, 22);
// 32-bit integers (common) // 32-bit integers (common)
CONSTRUCT_TYPED_SFIELD(sfNetworkID, "NetworkID", UINT32, 1); CONSTRUCT_TYPED_SFIELD(sfNetworkID, "NetworkID", UINT32, 1);
@@ -361,6 +362,7 @@ CONSTRUCT_UNTYPED_SFIELD(sfImportVLKey, "ImportVLKey", OBJECT,
CONSTRUCT_UNTYPED_SFIELD(sfHookEmission, "HookEmission", OBJECT, 93); CONSTRUCT_UNTYPED_SFIELD(sfHookEmission, "HookEmission", OBJECT, 93);
CONSTRUCT_UNTYPED_SFIELD(sfMintURIToken, "MintURIToken", OBJECT, 92); CONSTRUCT_UNTYPED_SFIELD(sfMintURIToken, "MintURIToken", OBJECT, 92);
CONSTRUCT_UNTYPED_SFIELD(sfAmountEntry, "AmountEntry", OBJECT, 91); CONSTRUCT_UNTYPED_SFIELD(sfAmountEntry, "AmountEntry", OBJECT, 91);
CONSTRUCT_UNTYPED_SFIELD(sfExportedTxn, "ExportedTxn", OBJECT, 90);
// array of objects // array of objects
// ARRAY/1 is reserved for end of array // ARRAY/1 is reserved for end of array

View File

@@ -141,6 +141,7 @@ transResults()
MAKE_ERROR(telNON_LOCAL_EMITTED_TXN, "Emitted transaction cannot be applied because it was not generated locally."), MAKE_ERROR(telNON_LOCAL_EMITTED_TXN, "Emitted transaction cannot be applied because it was not generated locally."),
MAKE_ERROR(telIMPORT_VL_KEY_NOT_RECOGNISED, "Import vl key was not recognized."), MAKE_ERROR(telIMPORT_VL_KEY_NOT_RECOGNISED, "Import vl key was not recognized."),
MAKE_ERROR(telCAN_NOT_QUEUE_IMPORT, "Import transaction was not able to be directly applied and cannot be queued."), MAKE_ERROR(telCAN_NOT_QUEUE_IMPORT, "Import transaction was not able to be directly applied and cannot be queued."),
MAKE_ERROR(telSHADOW_TICKET_REQUIRED, "The imported transaction uses a TicketSequence but no shadow ticket exists."),
MAKE_ERROR(temMALFORMED, "Malformed transaction."), MAKE_ERROR(temMALFORMED, "Malformed transaction."),
MAKE_ERROR(temBAD_AMOUNT, "Can only send positive amounts."), MAKE_ERROR(temBAD_AMOUNT, "Can only send positive amounts."),
MAKE_ERROR(temBAD_CURRENCY, "Malformed: Bad currency."), MAKE_ERROR(temBAD_CURRENCY, "Malformed: Bad currency."),

View File

@@ -490,6 +490,26 @@ TxFormats::TxFormats()
{sfStartTime, soeOPTIONAL}, {sfStartTime, soeOPTIONAL},
}, },
commonFields); commonFields);
add(jss::ExportSign,
ttEXPORT_SIGN,
{
{sfSigner, soeREQUIRED},
{sfLedgerSequence, soeREQUIRED},
{sfTransactionHash, soeREQUIRED},
},
commonFields);
add(jss::Export,
ttEXPORT,
{
{sfTransactionHash, soeREQUIRED},
{sfExportedTxn, soeREQUIRED},
{sfSigners, soeREQUIRED},
{sfLedgerSequence, soeREQUIRED},
},
commonFields);
} }
TxFormats const& TxFormats const&

View File

@@ -140,6 +140,9 @@ JSS(HookState); // ledger type.
JSS(HookStateData); // field. JSS(HookStateData); // field.
JSS(HookStateKey); // field. JSS(HookStateKey); // field.
JSS(EmittedTxn); // ledger type. JSS(EmittedTxn); // ledger type.
JSS(ExportedTxn);
JSS(Export);
JSS(ExportSign);
JSS(SignerList); // ledger type. JSS(SignerList); // ledger type.
JSS(SignerListSet); // transaction type. JSS(SignerListSet); // transaction type.
JSS(SigningPubKey); // field. JSS(SigningPubKey); // field.