mirror of
https://github.com/Xahau/xahaud.git
synced 2026-01-11 18:25:16 +00:00
Compare commits
4 Commits
export
...
ccache-100
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9b1ac4b7b3 | ||
|
|
10cf8d8a01 | ||
|
|
693b42a530 | ||
|
|
6e78104e3d |
63
.github/actions/xahau-ga-build/action.yml
vendored
63
.github/actions/xahau-ga-build/action.yml
vendored
@@ -75,17 +75,37 @@ runs:
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory for main branch
|
||||
if: inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: ./.github/actions/xahau-ga-cache-restore
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
cache-type: ccache-main
|
||||
|
||||
- name: Restore ccache directory for current branch
|
||||
if: inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
id: ccache-restore-current-branch
|
||||
uses: ./.github/actions/xahau-ga-cache-restore
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
cache-type: ccache-current
|
||||
|
||||
- name: Configure ccache
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
# Create cache directories
|
||||
mkdir -p ~/.ccache-cache
|
||||
|
||||
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
|
||||
mkdir -p ~/.config/ccache
|
||||
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
|
||||
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
|
||||
mkdir -p ~/.ccache-main ~/.ccache-current
|
||||
|
||||
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
|
||||
mkdir -p ~/.config/ccache
|
||||
@@ -96,9 +116,20 @@ runs:
|
||||
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
|
||||
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
|
||||
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
|
||||
ccache --set-config=cache_dir="$HOME/.ccache-cache"
|
||||
echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
|
||||
echo "📦 using ~/.ccache-cache as ccache cache directory"
|
||||
|
||||
# Determine if we're on the main branch
|
||||
if [ "${{ steps.safe-branch.outputs.name }}" = "${{ inputs.main_branch }}" ]; then
|
||||
# Main branch: use main branch cache only
|
||||
ccache --set-config=cache_dir="$HOME/.ccache-main"
|
||||
echo "CCACHE_DIR=$HOME/.ccache-main" >> $GITHUB_ENV
|
||||
echo "📦 Main branch: using ~/.ccache-main"
|
||||
else
|
||||
# Feature branch: use current branch cache with main as secondary (read-only fallback)
|
||||
ccache --set-config=cache_dir="$HOME/.ccache-current"
|
||||
ccache --set-config=secondary_storage="file:$HOME/.ccache-main"
|
||||
echo "CCACHE_DIR=$HOME/.ccache-current" >> $GITHUB_ENV
|
||||
echo "📦 Feature branch: using ~/.ccache-current with ~/.ccache-main as secondary"
|
||||
fi
|
||||
|
||||
# Print config for verification
|
||||
echo "=== ccache configuration ==="
|
||||
@@ -213,3 +244,17 @@ runs:
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory for main branch
|
||||
if: success() && inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Save ccache directory for current branch
|
||||
if: success() && inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}
|
||||
|
||||
31
.github/actions/xahau-ga-dependencies/action.yml
vendored
31
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -17,6 +17,10 @@ inputs:
|
||||
description: 'Cache version for invalidation'
|
||||
required: false
|
||||
default: '1'
|
||||
gha_cache_enabled:
|
||||
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
|
||||
required: false
|
||||
default: 'true'
|
||||
main_branch:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
@@ -59,14 +63,18 @@ outputs:
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Configure Conan cache paths
|
||||
if: inputs.os == 'Linux'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
|
||||
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
|
||||
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
|
||||
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
|
||||
- name: Restore Conan cache
|
||||
if: inputs.gha_cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: ./.github/actions/xahau-ga-cache-restore
|
||||
with:
|
||||
path: ~/.conan2
|
||||
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
cache-type: Conan
|
||||
|
||||
- name: Configure Conan cache paths
|
||||
if: inputs.gha_cache_enabled == 'false'
|
||||
@@ -153,3 +161,10 @@ runs:
|
||||
--build missing \
|
||||
--settings build_type=${{ inputs.configuration }} \
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: success() && inputs.gha_cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
|
||||
71
.github/workflows/xahau-ga-macos.yml
vendored
71
.github/workflows/xahau-ga-macos.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Debug
|
||||
runs-on: [self-hosted, macOS]
|
||||
runs-on: macos-15
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
@@ -30,29 +30,61 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Add Homebrew to PATH
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Install Conan
|
||||
run: |
|
||||
echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
|
||||
echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
|
||||
brew install conan
|
||||
# Verify Conan 2 is installed
|
||||
conan --version
|
||||
|
||||
- name: Install Coreutils
|
||||
run: |
|
||||
brew install coreutils
|
||||
echo "Num proc: $(nproc)"
|
||||
|
||||
# To isolate environments for each Runner, instead of installing globally with brew,
|
||||
# use mise to isolate environments for each Runner directory.
|
||||
- name: Setup toolchain (mise)
|
||||
uses: jdx/mise-action@v2
|
||||
with:
|
||||
install: true
|
||||
- name: Install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
|
||||
- name: Install tools via mise
|
||||
- name: Install Python
|
||||
run: |
|
||||
mise install
|
||||
mise use cmake@3.23.1 python@3.12 pipx@latest conan@2 ninja@latest ccache@latest
|
||||
mise reshim
|
||||
echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
|
||||
if which python3 > /dev/null 2>&1; then
|
||||
echo "Python 3 executable exists"
|
||||
python3 --version
|
||||
else
|
||||
brew install python@3.12
|
||||
fi
|
||||
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
|
||||
if ! which python > /dev/null 2>&1; then
|
||||
sudo ln -sf $(which python3) /usr/local/bin/python
|
||||
fi
|
||||
|
||||
- name: Install CMake
|
||||
run: |
|
||||
# Install CMake 3.x to match local dev environments
|
||||
# With Conan 2 and the policy args passed to CMake, newer versions
|
||||
# can have issues with dependencies that require cmake_minimum_required < 3.5
|
||||
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
|
||||
|
||||
# Download and install CMake 3.31.7 directly
|
||||
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
|
||||
tar -xzf cmake.tar.gz
|
||||
|
||||
# Move the entire CMake.app to /Applications
|
||||
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
|
||||
|
||||
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
|
||||
/Applications/CMake.app/Contents/bin/cmake --version
|
||||
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
@@ -66,14 +98,6 @@ jobs:
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Detect compiler version
|
||||
id: detect-compiler
|
||||
run: |
|
||||
@@ -105,7 +129,6 @@ jobs:
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: libcxx
|
||||
ccache_max_size: '100G'
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
|
||||
4
.github/workflows/xahau-ga-nix.yml
vendored
4
.github/workflows/xahau-ga-nix.yml
vendored
@@ -181,7 +181,8 @@ jobs:
|
||||
image: ubuntu:24.04
|
||||
volumes:
|
||||
- /home/runner/.conan-cache:/.conan-cache
|
||||
- /home/runner/.ccache-cache:/github/home/.ccache-cache
|
||||
- /home/runner/.ccache-main:/github/home/.ccache-main
|
||||
- /home/runner/.ccache-current:/github/home/.ccache-current
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -324,6 +325,7 @@ jobs:
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
gha_cache_enabled: 'false' # Disable caching for self hosted runner
|
||||
ccache_max_size: '100G'
|
||||
|
||||
- name: Set artifact name
|
||||
|
||||
@@ -48,9 +48,13 @@ target_sources (xrpl_core PRIVATE
|
||||
src/ripple/beast/net/impl/IPAddressV6.cpp
|
||||
src/ripple/beast/net/impl/IPEndpoint.cpp
|
||||
src/ripple/beast/utility/src/beast_Journal.cpp
|
||||
src/ripple/beast/utility/src/beast_PropertyStream.cpp
|
||||
# Enhanced logging - compiles to empty when BEAST_ENHANCED_LOGGING is not defined
|
||||
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
|
||||
src/ripple/beast/utility/src/beast_PropertyStream.cpp)
|
||||
|
||||
# Conditionally add enhanced logging source when BEAST_ENHANCED_LOGGING is enabled
|
||||
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
|
||||
target_sources(xrpl_core PRIVATE
|
||||
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
|
||||
endif()
|
||||
|
||||
#[===============================[
|
||||
core sources
|
||||
@@ -158,16 +162,12 @@ target_link_libraries (xrpl_core
|
||||
date::date
|
||||
Ripple::opts)
|
||||
|
||||
# date-tz for enhanced logging (always linked, code is #ifdef guarded)
|
||||
if(TARGET date::date-tz)
|
||||
target_link_libraries(xrpl_core PUBLIC date::date-tz)
|
||||
# Link date-tz library when enhanced logging is enabled
|
||||
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
|
||||
if(TARGET date::date-tz)
|
||||
target_link_libraries(xrpl_core PUBLIC date::date-tz)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# BEAST_ENHANCED_LOGGING: enable for Debug builds OR when explicitly requested
|
||||
# Uses generator expression so it works with multi-config generators (Xcode, VS, Ninja Multi-Config)
|
||||
target_compile_definitions(xrpl_core PUBLIC
|
||||
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
|
||||
)
|
||||
#[=================================[
|
||||
main/core headers installation
|
||||
#]=================================]
|
||||
|
||||
@@ -37,11 +37,20 @@ endif() #git
|
||||
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
|
||||
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
|
||||
|
||||
# BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
|
||||
# Automatically enabled for Debug builds via generator expression
|
||||
# Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
|
||||
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
|
||||
message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
|
||||
# BEAST_ENHANCED_LOGGING option - adds file:line numbers and formatting to logs
|
||||
# Default to ON for Debug builds, OFF for Release
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" ON)
|
||||
else()
|
||||
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" OFF)
|
||||
endif()
|
||||
|
||||
if(BEAST_ENHANCED_LOGGING)
|
||||
add_definitions(-DBEAST_ENHANCED_LOGGING=1)
|
||||
message(STATUS "Log line numbers enabled")
|
||||
else()
|
||||
message(STATUS "Log line numbers disabled")
|
||||
endif()
|
||||
|
||||
if(thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
|
||||
@@ -350,10 +350,7 @@ enum hook_return_code : int64_t {
|
||||
MEM_OVERLAP = -43, // one or more specified buffers are the same memory
|
||||
TOO_MANY_STATE_MODIFICATIONS = -44, // more than 5000 modified state
|
||||
// entires in the combined hook chains
|
||||
TOO_MANY_NAMESPACES = -45,
|
||||
EXPORT_FAILURE = -46,
|
||||
TOO_MANY_EXPORTED_TXN = -47,
|
||||
|
||||
TOO_MANY_NAMESPACES = -45
|
||||
};
|
||||
|
||||
enum ExitType : uint8_t {
|
||||
@@ -367,7 +364,6 @@ const uint16_t max_state_modifications = 256;
|
||||
const uint8_t max_slots = 255;
|
||||
const uint8_t max_nonce = 255;
|
||||
const uint8_t max_emit = 255;
|
||||
const uint8_t max_export = 4;
|
||||
const uint8_t max_params = 16;
|
||||
const double fee_base_multiplier = 1.1f;
|
||||
|
||||
@@ -473,13 +469,6 @@ static const APIWhitelist import_whitelist_1{
|
||||
// clang-format on
|
||||
};
|
||||
|
||||
static const APIWhitelist import_whitelist_2{
|
||||
// clang-format off
|
||||
HOOK_API_DEFINITION(I64, xport, (I32, I32)),
|
||||
HOOK_API_DEFINITION(I64, xport_reserve, (I32)),
|
||||
// clang-format on
|
||||
};
|
||||
|
||||
#undef HOOK_API_DEFINITION
|
||||
#undef I32
|
||||
#undef I64
|
||||
|
||||
@@ -1034,12 +1034,6 @@ validateGuards(
|
||||
{
|
||||
// PASS, this is a version 1 api
|
||||
}
|
||||
else if (rulesVersion & 0x04U &&
|
||||
hook_api::import_whitelist_2.find(import_name) !=
|
||||
hook_api::import_whitelist_2.end())
|
||||
{
|
||||
// PASS, this is an export api
|
||||
}
|
||||
else
|
||||
{
|
||||
GUARDLOG(hook::log::IMPORT_ILLEGAL)
|
||||
|
||||
@@ -406,17 +406,6 @@ DECLARE_HOOK_FUNCTION(
|
||||
uint32_t slot_no_tx,
|
||||
uint32_t slot_no_meta);
|
||||
|
||||
DECLARE_HOOK_FUNCTION(
|
||||
int64_t,
|
||||
xport,
|
||||
uint32_t write_ptr,
|
||||
uint32_t write_len,
|
||||
uint32_t read_ptr,
|
||||
uint32_t read_len);
|
||||
DECLARE_HOOK_FUNCTION(
|
||||
int64_t,
|
||||
xport_reserve,
|
||||
uint32_t count);
|
||||
/*
|
||||
DECLARE_HOOK_FUNCTION(int64_t, str_find, uint32_t hread_ptr,
|
||||
uint32_t hread_len, uint32_t nread_ptr, uint32_t nread_len, uint32_t mode,
|
||||
@@ -496,8 +485,6 @@ struct HookResult
|
||||
|
||||
std::queue<std::shared_ptr<ripple::Transaction>>
|
||||
emittedTxn{}; // etx stored here until accept/rollback
|
||||
std::queue<std::shared_ptr<ripple::Transaction>>
|
||||
exportedTxn{};
|
||||
HookStateMap& stateMap;
|
||||
uint16_t changedStateCount = 0;
|
||||
std::map<
|
||||
@@ -554,7 +541,6 @@ struct HookContext
|
||||
uint16_t ledger_nonce_counter{0};
|
||||
int64_t expected_etxn_count{-1}; // make this a 64bit int so the uint32
|
||||
// from the hookapi cant overflow it
|
||||
int64_t expected_export_count{-1};
|
||||
std::map<ripple::uint256, bool> nonce_used{};
|
||||
uint32_t generation =
|
||||
0; // used for caching, only generated when txn_generation is called
|
||||
@@ -891,9 +877,6 @@ public:
|
||||
ADD_HOOK_FUNCTION(meta_slot, ctx);
|
||||
ADD_HOOK_FUNCTION(xpop_slot, ctx);
|
||||
|
||||
ADD_HOOK_FUNCTION(xport, ctx);
|
||||
ADD_HOOK_FUNCTION(xport_reserve, ctx);
|
||||
|
||||
/*
|
||||
ADD_HOOK_FUNCTION(str_find, ctx);
|
||||
ADD_HOOK_FUNCTION(str_replace, ctx);
|
||||
|
||||
@@ -79,7 +79,7 @@ main(int argc, char** argv)
|
||||
|
||||
close(fd);
|
||||
|
||||
auto result = validateGuards(hook, std::cout, "", 7);
|
||||
auto result = validateGuards(hook, std::cout, "", 3);
|
||||
|
||||
if (!result)
|
||||
{
|
||||
|
||||
@@ -1971,8 +1971,6 @@ hook::finalizeHookResult(
|
||||
// directory) if we are allowed to
|
||||
std::vector<std::pair<uint256 /* txnid */, uint256 /* emit nonce */>>
|
||||
emission_txnid;
|
||||
std::vector<uint256 /* txnid */>
|
||||
exported_txnid;
|
||||
|
||||
if (doEmit)
|
||||
{
|
||||
@@ -2028,58 +2026,6 @@ hook::finalizeHookResult(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
DBG_PRINTF("exported txn count: %d\n", hookResult.exportedTxn.size());
|
||||
for (; hookResult.exportedTxn.size() > 0; hookResult.exportedTxn.pop())
|
||||
{
|
||||
auto& tpTrans = hookResult.exportedTxn.front();
|
||||
auto& id = tpTrans->getID();
|
||||
JLOG(j.trace()) << "HookExport[" << HR_ACC() << "]: " << id;
|
||||
|
||||
// exported txns must be marked bad by the hash router to ensure under
|
||||
// no circumstances they will enter consensus on *this* chain.
|
||||
applyCtx.app.getHashRouter().setFlags(id, SF_BAD);
|
||||
|
||||
std::shared_ptr<const ripple::STTx> ptr =
|
||||
tpTrans->getSTransaction();
|
||||
|
||||
auto exportedId = keylet::exportedTxn(id);
|
||||
auto sleExported = applyCtx.view().peek(exportedId);
|
||||
|
||||
if (!sleExported)
|
||||
{
|
||||
exported_txnid.emplace_back(id);
|
||||
|
||||
sleExported = std::make_shared<SLE>(exportedId);
|
||||
|
||||
// RH TODO: add a new constructor to STObject to avoid this
|
||||
// serder thing
|
||||
ripple::Serializer s;
|
||||
ptr->add(s);
|
||||
SerialIter sit(s.slice());
|
||||
|
||||
sleExported->emplace_back(ripple::STObject(sit, sfExportedTxn));
|
||||
auto page = applyCtx.view().dirInsert(
|
||||
keylet::exportedDir(), exportedId, [&](SLE::ref sle) {
|
||||
(*sle)[sfFlags] = lsfEmittedDir;
|
||||
});
|
||||
|
||||
if (page)
|
||||
{
|
||||
(*sleExported)[sfOwnerNode] = *page;
|
||||
applyCtx.view().insert(sleExported);
|
||||
}
|
||||
else
|
||||
{
|
||||
JLOG(j.warn())
|
||||
<< "HookError[" << HR_ACC() << "]: "
|
||||
<< "Export Directory full when trying to insert "
|
||||
<< id;
|
||||
return tecDIR_FULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool const fixV2 = applyCtx.view().rules().enabled(fixXahauV2);
|
||||
@@ -2106,12 +2052,6 @@ hook::finalizeHookResult(
|
||||
meta.setFieldU16(
|
||||
sfHookEmitCount,
|
||||
emission_txnid.size()); // this will never wrap, hard limit
|
||||
if (applyCtx.view().rules().enabled(featureExport))
|
||||
{
|
||||
meta.setFieldU16(
|
||||
sfHookExportCount,
|
||||
exported_txnid.size());
|
||||
}
|
||||
meta.setFieldU16(sfHookExecutionIndex, exec_index);
|
||||
meta.setFieldU16(sfHookStateChangeCount, hookResult.changedStateCount);
|
||||
meta.setFieldH256(sfHookHash, hookResult.hookHash);
|
||||
@@ -3948,27 +3888,6 @@ DEFINE_HOOK_FUNCTION(int64_t, etxn_reserve, uint32_t count)
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
DEFINE_HOOK_FUNCTION(int64_t, xport_reserve, uint32_t count)
|
||||
{
|
||||
HOOK_SETUP(); // populates memory_ctx, memory, memory_length, applyCtx,
|
||||
// hookCtx on current stack
|
||||
|
||||
if (hookCtx.expected_export_count > -1)
|
||||
return ALREADY_SET;
|
||||
|
||||
if (count < 1)
|
||||
return TOO_SMALL;
|
||||
|
||||
if (count > hook_api::max_export)
|
||||
return TOO_BIG;
|
||||
|
||||
hookCtx.expected_export_count = count;
|
||||
|
||||
return count;
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
// Compute the burden of an emitted transaction based on a number of factors
|
||||
DEFINE_HOOK_FUNCNARG(int64_t, etxn_burden)
|
||||
{
|
||||
@@ -6237,92 +6156,6 @@ DEFINE_HOOK_FUNCTION(
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
DEFINE_HOOK_FUNCTION(
|
||||
int64_t,
|
||||
xport,
|
||||
uint32_t write_ptr,
|
||||
uint32_t write_len,
|
||||
uint32_t read_ptr,
|
||||
uint32_t read_len)
|
||||
{
|
||||
HOOK_SETUP();
|
||||
|
||||
if (NOT_IN_BOUNDS(read_ptr, read_len, memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
if (NOT_IN_BOUNDS(write_ptr, write_len, memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
if (write_len < 32)
|
||||
return TOO_SMALL;
|
||||
|
||||
auto& app = hookCtx.applyCtx.app;
|
||||
|
||||
if (hookCtx.expected_export_count < 0)
|
||||
return PREREQUISITE_NOT_MET;
|
||||
|
||||
if (hookCtx.result.exportedTxn.size() >= hookCtx.expected_export_count)
|
||||
return TOO_MANY_EXPORTED_TXN;
|
||||
|
||||
ripple::Blob blob{memory + read_ptr, memory + read_ptr + read_len};
|
||||
|
||||
std::shared_ptr<STTx const> stpTrans;
|
||||
try
|
||||
{
|
||||
stpTrans = std::make_shared<STTx const>(
|
||||
SerialIter{memory + read_ptr, read_len});
|
||||
}
|
||||
catch (std::exception& e)
|
||||
{
|
||||
JLOG(j.trace()) << "HookExport[" << HC_ACC() << "]: Failed " << e.what()
|
||||
<< "\n";
|
||||
return EXPORT_FAILURE;
|
||||
}
|
||||
|
||||
if (!stpTrans->isFieldPresent(sfAccount) ||
|
||||
stpTrans->getAccountID(sfAccount) != hookCtx.result.account)
|
||||
{
|
||||
JLOG(j.trace()) << "HookExport[" << HC_ACC()
|
||||
<< "]: Attempted to export a txn that's not for this Hook's Account ID.";
|
||||
return EXPORT_FAILURE;
|
||||
}
|
||||
|
||||
std::string reason;
|
||||
auto tpTrans = std::make_shared<Transaction>(stpTrans, reason, app);
|
||||
// RHTODO: is this needed or wise? VVV
|
||||
if (tpTrans->getStatus() != NEW)
|
||||
{
|
||||
JLOG(j.trace()) << "HookExport[" << HC_ACC()
|
||||
<< "]: tpTrans->getStatus() != NEW";
|
||||
return EXPORT_FAILURE;
|
||||
}
|
||||
auto const& txID = tpTrans->getID();
|
||||
|
||||
if (txID.size() > write_len)
|
||||
return TOO_SMALL;
|
||||
|
||||
if (NOT_IN_BOUNDS(write_ptr, txID.size(), memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
auto const write_txid = [&]() -> int64_t {
|
||||
WRITE_WASM_MEMORY_AND_RETURN(
|
||||
write_ptr,
|
||||
txID.size(),
|
||||
txID.data(),
|
||||
txID.size(),
|
||||
memory,
|
||||
memory_length);
|
||||
};
|
||||
|
||||
int64_t result = write_txid();
|
||||
|
||||
if (result == 32)
|
||||
hookCtx.result.exportedTxn.push(tpTrans);
|
||||
|
||||
return result;
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
/*
|
||||
|
||||
DEFINE_HOOK_FUNCTION(
|
||||
|
||||
@@ -599,13 +599,6 @@ public:
|
||||
return validatorKeys_.publicKey;
|
||||
}
|
||||
|
||||
ValidatorKeys const&
|
||||
getValidatorKeys() const override
|
||||
{
|
||||
return validatorKeys_;
|
||||
}
|
||||
|
||||
|
||||
NetworkOPs&
|
||||
getOPs() override
|
||||
{
|
||||
|
||||
@@ -240,8 +240,7 @@ public:
|
||||
|
||||
virtual PublicKey const&
|
||||
getValidationPublicKey() const = 0;
|
||||
virtual ValidatorKeys const&
|
||||
getValidatorKeys() const = 0;
|
||||
|
||||
virtual Resource::Manager&
|
||||
getResourceManager() = 0;
|
||||
virtual PathRequests&
|
||||
|
||||
@@ -27,8 +27,6 @@
|
||||
#include <ripple/protocol/Feature.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <ripple/protocol/st.h>
|
||||
#include <ripple/app/misc/ValidatorKeys.h>
|
||||
#include <ripple/protocol/Sign.h>
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <numeric>
|
||||
@@ -1541,247 +1539,6 @@ TxQ::accept(Application& app, OpenView& view)
|
||||
}
|
||||
}
|
||||
|
||||
// Inject exported transactions/signatures, if any
|
||||
if (view.rules().enabled(featureExport))
|
||||
{
|
||||
do
|
||||
{
|
||||
// if we're not a validator we do nothing here
|
||||
if (app.getValidationPublicKey().empty())
|
||||
break;
|
||||
|
||||
auto const& keys = app.getValidatorKeys();
|
||||
|
||||
if (keys.configInvalid())
|
||||
break;
|
||||
|
||||
// and if we're not on the UNLReport we also do nothing
|
||||
|
||||
auto const unlRep = view.read(keylet::UNLReport());
|
||||
if (!unlRep || !unlRep->isFieldPresent(sfActiveValidators))
|
||||
{
|
||||
// nothing to do without a unlreport object
|
||||
break;
|
||||
}
|
||||
|
||||
bool found = false;
|
||||
auto const& avs = unlRep->getFieldArray(sfActiveValidators);
|
||||
for (auto const& av : avs)
|
||||
{
|
||||
if (PublicKey(av[sfPublicKey]) == keys.masterPublicKey)
|
||||
{
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found)
|
||||
break;
|
||||
|
||||
// execution to here means we're a validator and on the UNLReport
|
||||
|
||||
AccountID signingAcc = calcAccountID(keys.publicKey);
|
||||
|
||||
Keylet const exportedDirKeylet{keylet::exportedDir()};
|
||||
if (dirIsEmpty(view, exportedDirKeylet))
|
||||
break;
|
||||
|
||||
std::shared_ptr<SLE const> sleDirNode{};
|
||||
unsigned int uDirEntry{0};
|
||||
uint256 dirEntry{beast::zero};
|
||||
|
||||
if (!cdirFirst(
|
||||
view,
|
||||
exportedDirKeylet.key,
|
||||
sleDirNode,
|
||||
uDirEntry,
|
||||
dirEntry))
|
||||
break;
|
||||
|
||||
do
|
||||
{
|
||||
Keylet const itemKeylet{ltCHILD, dirEntry};
|
||||
auto sleItem = view.read(itemKeylet);
|
||||
if (!sleItem)
|
||||
{
|
||||
// Directory node has an invalid index. Bail out.
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn processing: directory node in ledger "
|
||||
<< view.seq()
|
||||
<< " has index to object that is missing: "
|
||||
<< to_string(dirEntry);
|
||||
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
LedgerEntryType const nodeType{
|
||||
safe_cast<LedgerEntryType>((*sleItem)[sfLedgerEntryType])};
|
||||
|
||||
if (nodeType != ltEXPORTED_TXN)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn processing: emitted directory contained "
|
||||
"non ltEMITTED_TXN type";
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
JLOG(j_.info()) << "Processing exported txn: " << *sleItem;
|
||||
|
||||
auto const& exported =
|
||||
const_cast<ripple::STLedgerEntry&>(*sleItem)
|
||||
.getField(sfExportedTxn)
|
||||
.downcast<STObject>();
|
||||
|
||||
auto const& txnHash = sleItem->getFieldH256(sfTransactionHash);
|
||||
|
||||
auto exportedLgrSeq = exported.getFieldU32(sfLedgerSequence);
|
||||
|
||||
auto const seq = view.seq();
|
||||
|
||||
if (exportedLgrSeq == seq)
|
||||
{
|
||||
// this shouldn't happen, but do nothing
|
||||
continue;
|
||||
}
|
||||
|
||||
if (exportedLgrSeq < seq - 1)
|
||||
{
|
||||
// all old entries need to be turned into Export transactions so they can be removed
|
||||
// from the directory
|
||||
|
||||
// in the previous ledger all the ExportSign transactions were executed, and one-by-one
|
||||
// added the validators' signatures to the ltEXPORTED_TXN's sfSigners array.
|
||||
// now we need to collect these together and place them inside the ExportedTxn blob
|
||||
// and publish the blob in the Export transaction type.
|
||||
|
||||
STArray signers = sleItem->getFieldArray(sfSigners);
|
||||
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exported.add(*s);
|
||||
SerialIter sitTrans(s->slice());
|
||||
try
|
||||
{
|
||||
auto stpTrans =
|
||||
std::make_shared<STTx>(std::ref(sitTrans));
|
||||
|
||||
if (!stpTrans->isFieldPresent(sfAccount) ||
|
||||
stpTrans->getAccountID(sfAccount) == beast::zero)
|
||||
{
|
||||
JLOG(j_.warn()) << "Hook: Export failure: "
|
||||
<< "sfAccount missing or zero.";
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
// RH TODO: should we force remove signingpubkey here?
|
||||
|
||||
stpTrans->setFieldArray(sfSigners, signers);
|
||||
|
||||
Blob const& blob = stpTrans->getSerializer().peekData();
|
||||
|
||||
STTx exportTx(ttEXPORT, [&](auto& obj) {
|
||||
obj.setFieldVL(sfExportedTxn, blob);
|
||||
obj.setFieldU32(sfLedgerSequence, seq);
|
||||
obj.setFieldH256(sfTransactionHash, txnHash);
|
||||
obj.setFieldArray(sfSigners, signers);
|
||||
});
|
||||
|
||||
// submit to the ledger
|
||||
{
|
||||
uint256 txID = exportTx.getTransactionID();
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exportTx.add(*s);
|
||||
app.getHashRouter().setFlags(txID, SF_PRIVATE2);
|
||||
app.getHashRouter().setFlags(txID, SF_EMITTED);
|
||||
view.rawTxInsert(txID, std::move(s), nullptr);
|
||||
ledgerChanged = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
catch (std::exception& e)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn Processing: Failure: " << e.what()
|
||||
<< "\n";
|
||||
}
|
||||
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// this ledger is the one after the exported txn was added to the directory
|
||||
// so generate the export sign txns
|
||||
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exported.add(*s);
|
||||
SerialIter sitTrans(s->slice());
|
||||
try
|
||||
{
|
||||
auto const& stpTrans =
|
||||
std::make_shared<STTx const>(std::ref(sitTrans));
|
||||
|
||||
if (!stpTrans->isFieldPresent(sfAccount) ||
|
||||
stpTrans->getAccountID(sfAccount) == beast::zero)
|
||||
{
|
||||
JLOG(j_.warn()) << "Hook: Export failure: "
|
||||
<< "sfAccount missing or zero.";
|
||||
// RH TODO: if this ever happens the entry should be
|
||||
// gracefully removed (somehow)
|
||||
continue;
|
||||
}
|
||||
|
||||
auto seq = view.info().seq;
|
||||
auto txnHash = stpTrans->getTransactionID();
|
||||
|
||||
Serializer s =
|
||||
buildMultiSigningData(*stpTrans, signingAcc);
|
||||
|
||||
auto multisig = ripple::sign(keys.publicKey, keys.secretKey, s.slice());
|
||||
|
||||
STTx exportSignTx(ttEXPORT_SIGN, [&](auto& obj) {
|
||||
obj.set(([&]() {
|
||||
auto inner = std::make_unique<STObject>(sfSigner);
|
||||
inner->setFieldVL(sfSigningPubKey, keys.publicKey);
|
||||
inner->setAccountID(sfAccount, signingAcc);
|
||||
inner->setFieldVL(sfTxnSignature, multisig);
|
||||
return inner;
|
||||
})());
|
||||
obj.setFieldU32(sfLedgerSequence, seq);
|
||||
obj.setFieldH256(sfTransactionHash, txnHash);
|
||||
});
|
||||
|
||||
// submit to the ledger
|
||||
{
|
||||
uint256 txID = exportSignTx.getTransactionID();
|
||||
auto s = std::make_shared<ripple::Serializer>();
|
||||
exportSignTx.add(*s);
|
||||
app.getHashRouter().setFlags(txID, SF_PRIVATE2);
|
||||
app.getHashRouter().setFlags(txID, SF_EMITTED);
|
||||
view.rawTxInsert(txID, std::move(s), nullptr);
|
||||
ledgerChanged = true;
|
||||
}
|
||||
}
|
||||
|
||||
catch (std::exception& e)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "ExportedTxn Processing: Failure: " << e.what()
|
||||
<< "\n";
|
||||
}
|
||||
|
||||
} while (cdirNext(
|
||||
view, exportedDirKeylet.key, sleDirNode, uDirEntry, dirEntry));
|
||||
|
||||
} while (0);
|
||||
|
||||
}
|
||||
|
||||
// Inject emitted transactions if any
|
||||
if (view.rules().enabled(featureHooks))
|
||||
do
|
||||
|
||||
@@ -96,13 +96,6 @@ Change::preflight(PreflightContext const& ctx)
|
||||
}
|
||||
}
|
||||
|
||||
if ((ctx.tx.getTxnType() == ttEXPORT_SIGN || ctx.tx.getTxnType() == ttEXPORT) &&
|
||||
!ctx.rules.enabled(featureExport))
|
||||
{
|
||||
JLOG(ctx.j.warn()) << "Change: Export not enabled";
|
||||
return temDISABLED;
|
||||
}
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
@@ -161,8 +154,6 @@ Change::preclaim(PreclaimContext const& ctx)
|
||||
case ttAMENDMENT:
|
||||
case ttUNL_MODIFY:
|
||||
case ttEMIT_FAILURE:
|
||||
case ttEXPORT:
|
||||
case ttEXPORT_SIGN:
|
||||
return tesSUCCESS;
|
||||
case ttUNL_REPORT: {
|
||||
if (!ctx.tx.isFieldPresent(sfImportVLKey) ||
|
||||
@@ -218,11 +209,6 @@ Change::doApply()
|
||||
return applyEmitFailure();
|
||||
case ttUNL_REPORT:
|
||||
return applyUNLReport();
|
||||
case ttEXPORT:
|
||||
return applyExport();
|
||||
case ttEXPORT_SIGN:
|
||||
return applyExportSign();
|
||||
|
||||
default:
|
||||
assert(0);
|
||||
return tefFAILURE;
|
||||
@@ -620,8 +606,7 @@ Change::activateXahauGenesis()
|
||||
loggerStream,
|
||||
"rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
|
||||
(ctx_.view().rules().enabled(featureHooksUpdate1) ? 1 : 0) +
|
||||
(ctx_.view().rules().enabled(fix20250131) ? 2 : 0) +
|
||||
(ctx_.view().rules().enabled(featureExport) ? 4 : 0));
|
||||
(ctx_.view().rules().enabled(fix20250131) ? 2 : 0));
|
||||
|
||||
if (!result)
|
||||
{
|
||||
@@ -1087,80 +1072,6 @@ Change::applyEmitFailure()
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
TER
|
||||
Change::applyExport()
|
||||
{
|
||||
uint256 txnID(ctx_.tx.getFieldH256(sfTransactionHash));
|
||||
do
|
||||
{
|
||||
JLOG(j_.info()) << "HookExport[" << txnID
|
||||
<< "]: ttExport exporting transaction";
|
||||
|
||||
auto key = keylet::exportedTxn(txnID);
|
||||
|
||||
auto const& sle = view().peek(key);
|
||||
|
||||
if (!sle)
|
||||
{
|
||||
// most likely explanation is that this was somehow a double-up, so just ignore
|
||||
JLOG(j_.warn())
|
||||
<< "HookError[" << txnID << "]: ttExport could not find exported txn in ledger";
|
||||
break;
|
||||
}
|
||||
|
||||
if (!view().dirRemove(
|
||||
keylet::exportedDir(),
|
||||
sle->getFieldU64(sfOwnerNode),
|
||||
key,
|
||||
false))
|
||||
{
|
||||
JLOG(j_.fatal()) << "HookError[" << txnID
|
||||
<< "]: ttExport (Change) tefBAD_LEDGER";
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
view().erase(sle);
|
||||
} while (0);
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
TER
|
||||
Change::applyExportSign()
|
||||
{
|
||||
uint256 txnID(ctx_.tx.getFieldH256(sfTransactionHash));
|
||||
do
|
||||
{
|
||||
JLOG(j_.info()) << "HookExport[" << txnID
|
||||
<< "]: ttExportSign adding signature to transaction";
|
||||
|
||||
auto key = keylet::exportedTxn(txnID);
|
||||
|
||||
auto const& sle = view().peek(key);
|
||||
|
||||
if (!sle)
|
||||
{
|
||||
// most likely explanation is that this was somehow a double-up, so just ignore
|
||||
JLOG(j_.warn())
|
||||
<< "HookError[" << txnID << "]: ttExportSign could not find exported txn in ledger";
|
||||
break;
|
||||
}
|
||||
|
||||
// grab the signer object off the txn
|
||||
STObject signerObj = const_cast<ripple::STTx&>(ctx_.tx)
|
||||
.getField(sfSigner)
|
||||
.downcast<STObject>();
|
||||
|
||||
// append it to the signers field in the ledger object
|
||||
STArray signers = sle->getFieldArray(sfSigners);
|
||||
signers.push_back(signerObj);
|
||||
sle->setFieldArray(sfSigners, signers);
|
||||
|
||||
// done
|
||||
view().update(sle);
|
||||
} while (0);
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
TER
|
||||
Change::applyUNLModify()
|
||||
{
|
||||
|
||||
@@ -74,12 +74,6 @@ private:
|
||||
TER
|
||||
applyEmitFailure();
|
||||
|
||||
TER
|
||||
applyExport();
|
||||
|
||||
TER
|
||||
applyExportSign();
|
||||
|
||||
TER
|
||||
applyUNLReport();
|
||||
};
|
||||
|
||||
@@ -37,12 +37,9 @@
|
||||
#include <charconv>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <ripple/app/hook/applyHook.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
static const uint256 shadowTicketNamespace = uint256::fromVoid("RESERVED NAMESPACE SHADOW TICKET");
|
||||
|
||||
TxConsequences
|
||||
Import::makeTxConsequences(PreflightContext const& ctx)
|
||||
{
|
||||
@@ -200,7 +197,7 @@ Import::preflight(PreflightContext const& ctx)
|
||||
if (!stpTrans || !meta)
|
||||
return temMALFORMED;
|
||||
|
||||
if (stpTrans->isFieldPresent(sfTicketSequence) && !ctx.rules.enabled(featureExport))
|
||||
if (stpTrans->isFieldPresent(sfTicketSequence))
|
||||
{
|
||||
JLOG(ctx.j.warn()) << "Import: cannot use TicketSequence XPOP.";
|
||||
return temMALFORMED;
|
||||
@@ -891,26 +888,6 @@ Import::preclaim(PreclaimContext const& ctx)
|
||||
return tefINTERNAL;
|
||||
}
|
||||
|
||||
bool const hasTicket = stpTrans->isFieldPresent(sfTicketSequence);
|
||||
|
||||
if (hasTicket)
|
||||
{
|
||||
if (!ctx.view.rules().enabled(featureExport))
|
||||
return tefINTERNAL;
|
||||
|
||||
auto const acc = stpTrans->getAccountID(sfAccount);
|
||||
uint256 const seq = uint256(stpTrans->getFieldU32(sfTicketSequence));
|
||||
|
||||
// check if there is a shadow ticket, and if not we won't allow
|
||||
// the txn to pass into consensus
|
||||
|
||||
if (!ctx.view.exists(keylet::hookState(acc, seq, shadowTicketNamespace)))
|
||||
{
|
||||
JLOG(ctx.j.warn()) << "Import: attempted to import a txn without shadow ticket.";
|
||||
return telSHADOW_TICKET_REQUIRED; // tel code to avoid consensus/forward without SF_BAD
|
||||
}
|
||||
}
|
||||
|
||||
auto const& sle = ctx.view.read(keylet::account(ctx.tx[sfAccount]));
|
||||
|
||||
auto const tt = stpTrans->getTxnType();
|
||||
@@ -951,17 +928,13 @@ Import::preclaim(PreclaimContext const& ctx)
|
||||
} while (0);
|
||||
}
|
||||
|
||||
if (!hasTicket)
|
||||
if (sle && sle->isFieldPresent(sfImportSequence))
|
||||
{
|
||||
uint32_t sleImportSequence = sle->getFieldU32(sfImportSequence);
|
||||
|
||||
if (sle && sle->isFieldPresent(sfImportSequence))
|
||||
{
|
||||
uint32_t sleImportSequence = sle->getFieldU32(sfImportSequence);
|
||||
|
||||
// replay attempt
|
||||
if (sleImportSequence >= stpTrans->getFieldU32(sfSequence))
|
||||
return tefPAST_IMPORT_SEQ;
|
||||
}
|
||||
// replay attempt
|
||||
if (sleImportSequence >= stpTrans->getFieldU32(sfSequence))
|
||||
return tefPAST_IMPORT_SEQ;
|
||||
}
|
||||
|
||||
// when importing for the first time the fee must be zero
|
||||
@@ -1269,11 +1242,7 @@ Import::doApply()
|
||||
auto const id = ctx_.tx[sfAccount];
|
||||
auto sle = view().peek(keylet::account(id));
|
||||
|
||||
std::optional<uint256> ticket;
|
||||
if (stpTrans->isFieldPresent(sfTicketSequence))
|
||||
ticket = uint256(stpTrans->getFieldU32(sfTicketSequence));
|
||||
|
||||
if (sle && !ticket.has_value() && sle->getFieldU32(sfImportSequence) >= importSequence)
|
||||
if (sle && sle->getFieldU32(sfImportSequence) >= importSequence)
|
||||
{
|
||||
// make double sure import seq hasn't passed
|
||||
JLOG(ctx_.journal.warn()) << "Import: ImportSequence passed";
|
||||
@@ -1366,24 +1335,8 @@ Import::doApply()
|
||||
}
|
||||
}
|
||||
|
||||
if (!ticket.has_value())
|
||||
sle->setFieldU32(sfImportSequence, importSequence);
|
||||
|
||||
sle->setFieldU32(sfImportSequence, importSequence);
|
||||
sle->setFieldAmount(sfBalance, finalBal);
|
||||
|
||||
if (ticket.has_value())
|
||||
{
|
||||
auto sleTicket = view().peek(keylet::hookState(id, *ticket, shadowTicketNamespace));
|
||||
if (!sleTicket)
|
||||
return tefINTERNAL;
|
||||
|
||||
TER result = hook::setHookState(ctx_, id, shadowTicketNamespace, *ticket, {});
|
||||
if (result != tesSUCCESS)
|
||||
return result;
|
||||
|
||||
// RHUPTO: ticketseq billing?
|
||||
}
|
||||
|
||||
|
||||
if (create)
|
||||
{
|
||||
|
||||
@@ -491,8 +491,7 @@ SetHook::validateHookSetEntry(SetHookCtx& ctx, STObject const& hookSetObj)
|
||||
logger,
|
||||
hsacc,
|
||||
(ctx.rules.enabled(featureHooksUpdate1) ? 1 : 0) +
|
||||
(ctx.rules.enabled(fix20250131) ? 2 : 0) +
|
||||
(ctx.rules.enabled(featureExport) ? 4 : 0));
|
||||
(ctx.rules.enabled(fix20250131) ? 2 : 0));
|
||||
|
||||
if (ctx.j.trace())
|
||||
{
|
||||
|
||||
@@ -374,8 +374,6 @@ invoke_calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
case ttUNL_MODIFY:
|
||||
case ttUNL_REPORT:
|
||||
case ttEMIT_FAILURE:
|
||||
case ttEXPORT_SIGN:
|
||||
case ttEXPORT:
|
||||
return Change::calculateBaseFee(view, tx);
|
||||
case ttNFTOKEN_MINT:
|
||||
return NFTokenMint::calculateBaseFee(view, tx);
|
||||
@@ -546,8 +544,6 @@ invoke_apply(ApplyContext& ctx)
|
||||
case ttFEE:
|
||||
case ttUNL_MODIFY:
|
||||
case ttUNL_REPORT:
|
||||
case ttEXPORT:
|
||||
case ttEXPORT_SIGN:
|
||||
case ttEMIT_FAILURE: {
|
||||
Change p(ctx);
|
||||
return p();
|
||||
|
||||
@@ -360,8 +360,7 @@ Logs::format(
|
||||
if (!partition.empty())
|
||||
{
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
if (beast::detail::should_log_use_colors())
|
||||
output += beast::detail::get_log_highlight_color();
|
||||
output += beast::detail::get_log_highlight_color();
|
||||
#endif
|
||||
output += partition + ":";
|
||||
}
|
||||
@@ -393,8 +392,7 @@ Logs::format(
|
||||
}
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
if (beast::detail::should_log_use_colors())
|
||||
output += "\033[0m";
|
||||
output += "\033[0m";
|
||||
#endif
|
||||
|
||||
output += message;
|
||||
|
||||
@@ -41,14 +41,6 @@ get_log_highlight_color();
|
||||
constexpr const char*
|
||||
strip_source_root(const char* file)
|
||||
{
|
||||
// Handle relative paths from build/ directory (common with ccache)
|
||||
// e.g., "../src/ripple/..." -> "ripple/..."
|
||||
if (file && file[0] == '.' && file[1] == '.' && file[2] == '/' &&
|
||||
file[3] == 's' && file[4] == 'r' && file[5] == 'c' && file[6] == '/')
|
||||
{
|
||||
return file + 7; // skip "../src/"
|
||||
}
|
||||
|
||||
#ifdef SOURCE_ROOT_PATH
|
||||
constexpr const char* sourceRoot = SOURCE_ROOT_PATH;
|
||||
constexpr auto strlen_constexpr = [](const char* s) constexpr
|
||||
|
||||
@@ -17,8 +17,6 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
|
||||
#include <ripple/beast/utility/EnhancedLogging.h>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
@@ -114,5 +112,3 @@ log_write_location_string(std::ostream& os, const char* file, int line)
|
||||
|
||||
} // namespace detail
|
||||
} // namespace beast
|
||||
|
||||
#endif // BEAST_ENHANCED_LOGGING
|
||||
|
||||
@@ -155,43 +155,14 @@ Journal::ScopedStream::~ScopedStream()
|
||||
|
||||
#ifdef BEAST_ENHANCED_LOGGING
|
||||
// Add suffix if location is enabled
|
||||
if (file_ && detail::should_show_location() && !s.empty())
|
||||
if (file_ && detail::should_show_location() && !s.empty() && s != "\n")
|
||||
{
|
||||
// Single optimized scan from the end
|
||||
size_t const lastNonWhitespace = s.find_last_not_of(" \n\r\t");
|
||||
|
||||
// Skip if message is only whitespace (e.g., just "\n" or " \n\n")
|
||||
if (lastNonWhitespace != std::string::npos)
|
||||
{
|
||||
// Count only the trailing newlines (tiny range)
|
||||
size_t trailingNewlines = 0;
|
||||
for (size_t i = lastNonWhitespace + 1; i < s.length(); ++i)
|
||||
{
|
||||
if (s[i] == '\n')
|
||||
++trailingNewlines;
|
||||
}
|
||||
|
||||
// Build location string once
|
||||
std::ostringstream locStream;
|
||||
detail::log_write_location_string(locStream, file_, line_);
|
||||
std::string const location = locStream.str();
|
||||
|
||||
// Pre-allocate exact size → zero reallocations
|
||||
size_t const finalSize = lastNonWhitespace + 1 + 1 +
|
||||
location.length() + trailingNewlines;
|
||||
|
||||
std::string result;
|
||||
result.reserve(finalSize);
|
||||
|
||||
// Direct string ops (no ostringstream overhead)
|
||||
result.append(s, 0, lastNonWhitespace + 1);
|
||||
result.push_back(' ');
|
||||
result += location;
|
||||
if (trailingNewlines > 0)
|
||||
result.append(trailingNewlines, '\n');
|
||||
|
||||
s = std::move(result); // Move, no copy
|
||||
}
|
||||
std::ostringstream combined;
|
||||
combined << s;
|
||||
if (!s.empty() && s.back() != ' ')
|
||||
combined << " ";
|
||||
detail::log_write_location_string(combined, file_, line_);
|
||||
s = combined.str();
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ namespace detail {
|
||||
// Feature.cpp. Because it's only used to reserve storage, and determine how
|
||||
// large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than
|
||||
// the actual number of amendments. A LogicError on startup will verify this.
|
||||
static constexpr std::size_t numFeatures = 91;
|
||||
static constexpr std::size_t numFeatures = 90;
|
||||
|
||||
/** Amendments that this server supports and the default voting behavior.
|
||||
Whether they are enabled depends on the Rules defined in the validated
|
||||
@@ -378,7 +378,6 @@ extern uint256 const fixInvalidTxFlags;
|
||||
extern uint256 const featureExtendedHookState;
|
||||
extern uint256 const fixCronStacking;
|
||||
extern uint256 const fixHookAPI20251128;
|
||||
extern uint256 const featureExport;
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -56,15 +56,9 @@ namespace keylet {
|
||||
Keylet const&
|
||||
emittedDir() noexcept;
|
||||
|
||||
Keylet const&
|
||||
exportedDir() noexcept;
|
||||
|
||||
Keylet
|
||||
emittedTxn(uint256 const& id) noexcept;
|
||||
|
||||
Keylet
|
||||
exportedTxn(uint256 const& id) noexcept;
|
||||
|
||||
Keylet
|
||||
hookDefinition(uint256 const& hash) noexcept;
|
||||
|
||||
|
||||
@@ -260,8 +260,6 @@ enum LedgerEntryType : std::uint16_t
|
||||
\sa keylet::emitted
|
||||
*/
|
||||
ltEMITTED_TXN = 'E',
|
||||
|
||||
ltEXPORTED_TXN = 0x4578, // Ex (exported transaction)
|
||||
};
|
||||
// clang-format off
|
||||
|
||||
@@ -320,8 +318,7 @@ enum LedgerSpecificFlags {
|
||||
// ltDIR_NODE
|
||||
lsfNFTokenBuyOffers = 0x00000001,
|
||||
lsfNFTokenSellOffers = 0x00000002,
|
||||
lsfEmittedDir = 0x00000004,
|
||||
lsfExportedDir = 0x00000008,
|
||||
lsfEmittedDir = 0x00000004,
|
||||
|
||||
// ltNFTOKEN_OFFER
|
||||
lsfSellNFToken = 0x00000001,
|
||||
|
||||
@@ -355,7 +355,6 @@ extern SF_UINT16 const sfHookEmitCount;
|
||||
extern SF_UINT16 const sfHookExecutionIndex;
|
||||
extern SF_UINT16 const sfHookApiVersion;
|
||||
extern SF_UINT16 const sfHookStateScale;
|
||||
extern SF_UINT16 const sfHookExportCount;
|
||||
|
||||
// 32-bit integers (common)
|
||||
extern SF_UINT32 const sfNetworkID;
|
||||
@@ -596,7 +595,6 @@ extern SField const sfSigner;
|
||||
extern SField const sfMajority;
|
||||
extern SField const sfDisabledValidator;
|
||||
extern SField const sfEmittedTxn;
|
||||
extern SField const sfExportedTxn;
|
||||
extern SField const sfHookExecution;
|
||||
extern SField const sfHookDefinition;
|
||||
extern SField const sfHookParameter;
|
||||
|
||||
@@ -67,7 +67,6 @@ enum TELcodes : TERUnderlyingType {
|
||||
telNON_LOCAL_EMITTED_TXN,
|
||||
telIMPORT_VL_KEY_NOT_RECOGNISED,
|
||||
telCAN_NOT_QUEUE_IMPORT,
|
||||
telSHADOW_TICKET_REQUIRED,
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -149,12 +149,6 @@ enum TxType : std::uint16_t
|
||||
ttURITOKEN_CREATE_SELL_OFFER = 48,
|
||||
ttURITOKEN_CANCEL_SELL_OFFER = 49,
|
||||
|
||||
/* A pseudo-txn containing an exported transaction plus signatures from the validators */
|
||||
ttEXPORT = 90,
|
||||
|
||||
/* A pseudo-txn containing a validator's signature for an export transaction */
|
||||
ttEXPORT_SIGN = 91,
|
||||
|
||||
/* A pseudo-txn alarm signal for invoking a hook, emitted by validators after alarm set conditions are met */
|
||||
ttCRON = 92,
|
||||
|
||||
|
||||
@@ -484,7 +484,6 @@ REGISTER_FIX (fixInvalidTxFlags, Supported::yes, VoteBehavior::De
|
||||
REGISTER_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo);
|
||||
REGISTER_FIX (fixCronStacking, Supported::yes, VoteBehavior::DefaultYes);
|
||||
REGISTER_FIX (fixHookAPI20251128, Supported::yes, VoteBehavior::DefaultYes);
|
||||
REGISTER_FEATURE(Export, Supported::yes, VoteBehavior::DefaultNo);
|
||||
|
||||
// The following amendments are obsolete, but must remain supported
|
||||
// because they could potentially get enabled.
|
||||
|
||||
@@ -66,8 +66,6 @@ enum class LedgerNameSpace : std::uint16_t {
|
||||
HOOK_DEFINITION = 'D',
|
||||
EMITTED_TXN = 'E',
|
||||
EMITTED_DIR = 'F',
|
||||
EXPORTED_TXN = 0x4578, // Ex
|
||||
EXPORTED_DIR = 0x4564, // Ed
|
||||
NFTOKEN_OFFER = 'q',
|
||||
NFTOKEN_BUY_OFFERS = 'h',
|
||||
NFTOKEN_SELL_OFFERS = 'i',
|
||||
@@ -149,14 +147,6 @@ emittedDir() noexcept
|
||||
return ret;
|
||||
}
|
||||
|
||||
Keylet const&
|
||||
exportedDir() noexcept
|
||||
{
|
||||
static Keylet const ret{
|
||||
ltDIR_NODE, indexHash(LedgerNameSpace::EXPORTED_DIR)};
|
||||
return ret;
|
||||
}
|
||||
|
||||
Keylet
|
||||
hookStateDir(AccountID const& id, uint256 const& ns) noexcept
|
||||
{
|
||||
@@ -169,12 +159,6 @@ emittedTxn(uint256 const& id) noexcept
|
||||
return {ltEMITTED_TXN, indexHash(LedgerNameSpace::EMITTED_TXN, id)};
|
||||
}
|
||||
|
||||
Keylet
|
||||
exportedTxn(uint256 const& id) noexcept
|
||||
{
|
||||
return {ltEXPORTED_TXN, indexHash(LedgerNameSpace::EXPORTED_TXN, id)};
|
||||
}
|
||||
|
||||
Keylet
|
||||
hook(AccountID const& id) noexcept
|
||||
{
|
||||
|
||||
@@ -380,15 +380,6 @@ LedgerFormats::LedgerFormats()
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED}
|
||||
},
|
||||
commonFields);
|
||||
|
||||
add(jss::ExportedTxn,
|
||||
ltEXPORTED_TXN,
|
||||
{
|
||||
{sfExportedTxn, soeOPTIONAL},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@@ -103,7 +103,6 @@ CONSTRUCT_TYPED_SFIELD(sfHookEmitCount, "HookEmitCount", UINT16,
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookExecutionIndex, "HookExecutionIndex", UINT16, 19);
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookApiVersion, "HookApiVersion", UINT16, 20);
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookStateScale, "HookStateScale", UINT16, 21);
|
||||
CONSTRUCT_TYPED_SFIELD(sfHookExportCount, "HookExportCount", UINT16, 22);
|
||||
|
||||
// 32-bit integers (common)
|
||||
CONSTRUCT_TYPED_SFIELD(sfNetworkID, "NetworkID", UINT32, 1);
|
||||
@@ -362,7 +361,6 @@ CONSTRUCT_UNTYPED_SFIELD(sfImportVLKey, "ImportVLKey", OBJECT,
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfHookEmission, "HookEmission", OBJECT, 93);
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfMintURIToken, "MintURIToken", OBJECT, 92);
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfAmountEntry, "AmountEntry", OBJECT, 91);
|
||||
CONSTRUCT_UNTYPED_SFIELD(sfExportedTxn, "ExportedTxn", OBJECT, 90);
|
||||
|
||||
// array of objects
|
||||
// ARRAY/1 is reserved for end of array
|
||||
|
||||
@@ -141,7 +141,6 @@ transResults()
|
||||
MAKE_ERROR(telNON_LOCAL_EMITTED_TXN, "Emitted transaction cannot be applied because it was not generated locally."),
|
||||
MAKE_ERROR(telIMPORT_VL_KEY_NOT_RECOGNISED, "Import vl key was not recognized."),
|
||||
MAKE_ERROR(telCAN_NOT_QUEUE_IMPORT, "Import transaction was not able to be directly applied and cannot be queued."),
|
||||
MAKE_ERROR(telSHADOW_TICKET_REQUIRED, "The imported transaction uses a TicketSequence but no shadow ticket exists."),
|
||||
MAKE_ERROR(temMALFORMED, "Malformed transaction."),
|
||||
MAKE_ERROR(temBAD_AMOUNT, "Can only send positive amounts."),
|
||||
MAKE_ERROR(temBAD_CURRENCY, "Malformed: Bad currency."),
|
||||
|
||||
@@ -490,26 +490,6 @@ TxFormats::TxFormats()
|
||||
{sfStartTime, soeOPTIONAL},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
add(jss::ExportSign,
|
||||
ttEXPORT_SIGN,
|
||||
{
|
||||
{sfSigner, soeREQUIRED},
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
{sfTransactionHash, soeREQUIRED},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
add(jss::Export,
|
||||
ttEXPORT,
|
||||
{
|
||||
{sfTransactionHash, soeREQUIRED},
|
||||
{sfExportedTxn, soeREQUIRED},
|
||||
{sfSigners, soeREQUIRED},
|
||||
{sfLedgerSequence, soeREQUIRED},
|
||||
},
|
||||
commonFields);
|
||||
|
||||
}
|
||||
|
||||
TxFormats const&
|
||||
|
||||
@@ -140,9 +140,6 @@ JSS(HookState); // ledger type.
|
||||
JSS(HookStateData); // field.
|
||||
JSS(HookStateKey); // field.
|
||||
JSS(EmittedTxn); // ledger type.
|
||||
JSS(ExportedTxn);
|
||||
JSS(Export);
|
||||
JSS(ExportSign);
|
||||
JSS(SignerList); // ledger type.
|
||||
JSS(SignerListSet); // transaction type.
|
||||
JSS(SigningPubKey); // field.
|
||||
|
||||
Reference in New Issue
Block a user