Compare commits

..

9 Commits

17 changed files with 2087 additions and 477 deletions

View File

@@ -75,37 +75,17 @@ runs:
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
- name: Restore ccache directory for main branch
if: inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true'
id: ccache-restore
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-main
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-main
- name: Restore ccache directory for current branch
if: inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
id: ccache-restore-current-branch
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-current
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-current
- name: Configure ccache
if: inputs.ccache_enabled == 'true'
shell: bash
run: |
# Create cache directories
mkdir -p ~/.ccache-main ~/.ccache-current
mkdir -p ~/.ccache-cache
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
@@ -116,20 +96,9 @@ runs:
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
# Determine if we're on the main branch
if [ "${{ steps.safe-branch.outputs.name }}" = "${{ inputs.main_branch }}" ]; then
# Main branch: use main branch cache only
ccache --set-config=cache_dir="$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-main" >> $GITHUB_ENV
echo "📦 Main branch: using ~/.ccache-main"
else
# Feature branch: use current branch cache with main as secondary (read-only fallback)
ccache --set-config=cache_dir="$HOME/.ccache-current"
ccache --set-config=secondary_storage="file:$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-current" >> $GITHUB_ENV
echo "📦 Feature branch: using ~/.ccache-current with ~/.ccache-main as secondary"
fi
ccache --set-config=cache_dir="$HOME/.ccache-cache"
echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
echo "📦 using ~/.ccache-cache as ccache cache directory"
# Print config for verification
echo "=== ccache configuration ==="
@@ -244,17 +213,3 @@ runs:
if: inputs.ccache_enabled == 'true'
shell: bash
run: ccache -s
- name: Save ccache directory for main branch
if: success() && inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-main
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
- name: Save ccache directory for current branch
if: success() && inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-current
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}

View File

@@ -17,10 +17,6 @@ inputs:
description: 'Cache version for invalidation'
required: false
default: '1'
gha_cache_enabled:
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
required: false
default: 'true'
main_branch:
description: 'Main branch name for restore keys'
required: false
@@ -63,18 +59,14 @@ outputs:
runs:
using: 'composite'
steps:
- name: Restore Conan cache
if: inputs.gha_cache_enabled == 'true'
id: cache-restore-conan
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.conan2
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-${{ inputs.configuration }}
restore-keys: |
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: Conan
- name: Configure Conan cache paths
if: inputs.os == 'Linux'
shell: bash
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Configure Conan cache paths
if: inputs.gha_cache_enabled == 'false'
@@ -161,10 +153,3 @@ runs:
--build missing \
--settings build_type=${{ inputs.configuration }} \
..
- name: Save Conan cache
if: success() && inputs.gha_cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ~/.conan2
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}

View File

@@ -20,7 +20,7 @@ jobs:
- Ninja
configuration:
- Debug
runs-on: macos-15
runs-on: [self-hosted, macOS]
env:
build_dir: .build
# Bump this number to invalidate all caches globally.
@@ -30,61 +30,29 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install Conan
- name: Add Homebrew to PATH
run: |
brew install conan
# Verify Conan 2 is installed
conan --version
echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
- name: Install Coreutils
run: |
brew install coreutils
echo "Num proc: $(nproc)"
- name: Install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
# To isolate environments for each Runner, instead of installing globally with brew,
# use mise to isolate environments for each Runner directory.
- name: Setup toolchain (mise)
uses: jdx/mise-action@v2
with:
install: true
- name: Install Python
- name: Install tools via mise
run: |
if which python3 > /dev/null 2>&1; then
echo "Python 3 executable exists"
python3 --version
else
brew install python@3.12
fi
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
if ! which python > /dev/null 2>&1; then
sudo ln -sf $(which python3) /usr/local/bin/python
fi
- name: Install CMake
run: |
# Install CMake 3.x to match local dev environments
# With Conan 2 and the policy args passed to CMake, newer versions
# can have issues with dependencies that require cmake_minimum_required < 3.5
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
# Download and install CMake 3.31.7 directly
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
tar -xzf cmake.tar.gz
# Move the entire CMake.app to /Applications
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
/Applications/CMake.app/Contents/bin/cmake --version
- name: Install ccache
run: brew install ccache
mise install
mise use cmake@3.23.1 python@3.12 pipx@latest conan@2 ninja@latest ccache@latest
mise reshim
echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
- name: Check environment
run: |
@@ -98,6 +66,14 @@ jobs:
echo "---- Full Environment ----"
env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Detect compiler version
id: detect-compiler
run: |
@@ -129,6 +105,7 @@ jobs:
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: libcxx
ccache_max_size: '100G'
- name: Test
run: |

View File

@@ -181,8 +181,7 @@ jobs:
image: ubuntu:24.04
volumes:
- /home/runner/.conan-cache:/.conan-cache
- /home/runner/.ccache-main:/github/home/.ccache-main
- /home/runner/.ccache-current:/github/home/.ccache-current
- /home/runner/.ccache-cache:/github/home/.ccache-cache
defaults:
run:
shell: bash
@@ -325,7 +324,6 @@ jobs:
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: ${{ matrix.stdlib }}
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
gha_cache_enabled: 'false' # Disable caching for self hosted runner
ccache_max_size: '100G'
- name: Set artifact name

2
.gitignore vendored
View File

@@ -121,3 +121,5 @@ CMakeUserPresets.json
bld.rippled/
generated
guard_checker
guard_checker.dSYM

View File

@@ -48,13 +48,9 @@ target_sources (xrpl_core PRIVATE
src/ripple/beast/net/impl/IPAddressV6.cpp
src/ripple/beast/net/impl/IPEndpoint.cpp
src/ripple/beast/utility/src/beast_Journal.cpp
src/ripple/beast/utility/src/beast_PropertyStream.cpp)
# Conditionally add enhanced logging source when BEAST_ENHANCED_LOGGING is enabled
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
target_sources(xrpl_core PRIVATE
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
endif()
src/ripple/beast/utility/src/beast_PropertyStream.cpp
# Enhanced logging - compiles to empty when BEAST_ENHANCED_LOGGING is not defined
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
#[===============================[
core sources
@@ -162,12 +158,16 @@ target_link_libraries (xrpl_core
date::date
Ripple::opts)
# Link date-tz library when enhanced logging is enabled
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
if(TARGET date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
endif()
# date-tz for enhanced logging (always linked, code is #ifdef guarded)
if(TARGET date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
endif()
# BEAST_ENHANCED_LOGGING: enable for Debug builds OR when explicitly requested
# Uses generator expression so it works with multi-config generators (Xcode, VS, Ninja Multi-Config)
target_compile_definitions(xrpl_core PUBLIC
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
)
#[=================================[
main/core headers installation
#]=================================]

View File

@@ -37,20 +37,11 @@ endif() #git
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
# BEAST_ENHANCED_LOGGING option - adds file:line numbers and formatting to logs
# Default to ON for Debug builds, OFF for Release
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" ON)
else()
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" OFF)
endif()
if(BEAST_ENHANCED_LOGGING)
add_definitions(-DBEAST_ENHANCED_LOGGING=1)
message(STATUS "Log line numbers enabled")
else()
message(STATUS "Log line numbers disabled")
endif()
# BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
# Automatically enabled for Debug builds via generator expression
# Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
if(thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)

View File

@@ -5,6 +5,7 @@
#include <memory>
#include <optional>
#include <ostream>
#include <set>
#include <stack>
#include <string>
#include <string_view>
@@ -282,7 +283,8 @@ check_guard(
* might have unforeseen consequences, without also rolling back further
* changes that are fine.
*/
uint64_t rulesVersion = 0
uint64_t rulesVersion = 0,
std::set<int>* out_callees = nullptr
)
{
@@ -492,17 +494,27 @@ check_guard(
{
REQUIRE(1);
uint64_t callee_idx = LEB();
// disallow calling of user defined functions inside a hook
// record user-defined function calls if tracking is enabled
if (callee_idx > last_import_idx)
{
GUARDLOG(hook::log::CALL_ILLEGAL)
<< "GuardCheck "
<< "Hook calls a function outside of the whitelisted "
"imports "
<< "codesec: " << codesec << " hook byte offset: " << i
<< "\n";
if (out_callees != nullptr)
{
// record the callee for call graph analysis
out_callees->insert(callee_idx);
}
else
{
// if not tracking, maintain original behavior: reject
GUARDLOG(hook::log::CALL_ILLEGAL)
<< "GuardCheck "
<< "Hook calls a function outside of the whitelisted "
"imports "
<< "codesec: " << codesec << " hook byte offset: " << i
<< "\n";
return {};
return {};
}
}
// enforce guard call limit
@@ -837,6 +849,42 @@ validateGuards(
*/
uint64_t rulesVersion = 0)
{
// Structure to track function call graph information
struct FunctionInfo
{
int func_idx;
std::set<int> callees; // functions this function calls
std::set<int> callers; // functions that call this function
bool has_loops; // whether this function contains loops
uint64_t local_wce; // local worst-case execution count
uint64_t total_wce; // total WCE including callees
bool wce_calculated; // whether total_wce has been computed
bool in_calculation; // for cycle detection in WCE calculation
FunctionInfo()
: func_idx(-1)
, has_loops(false)
, local_wce(0)
, total_wce(0)
, wce_calculated(false)
, in_calculation(false)
{
}
FunctionInfo(int idx, uint64_t local_wce_val, bool has_loops_val)
: func_idx(idx)
, has_loops(has_loops_val)
, local_wce(local_wce_val)
, total_wce(0)
, wce_calculated(false)
, in_calculation(false)
{
}
};
// Call graph: maps function index to its information
std::map<int, FunctionInfo> call_graph;
uint64_t byteCount = wasm.size();
// 63 bytes is the smallest possible valid hook wasm
@@ -1176,6 +1224,12 @@ validateGuards(
if (DEBUG_GUARD)
printf("Function map: func %d -> type %d\n", j, type_idx);
func_type_map[j] = type_idx;
// Step 4: Initialize FunctionInfo for each user-defined
// function func_idx starts from last_import_number + 1
int actual_func_idx = last_import_number + 1 + j;
call_graph[actual_func_idx] = FunctionInfo();
call_graph[actual_func_idx].func_idx = actual_func_idx;
}
}
@@ -1217,9 +1271,6 @@ validateGuards(
return {};
}
int64_t maxInstrCountHook = 0;
int64_t maxInstrCountCbak = 0;
// second pass... where we check all the guard function calls follow the
// guard rules minimal other validation in this pass because first pass
// caught most of it
@@ -1253,6 +1304,7 @@ validateGuards(
std::optional<
std::reference_wrapper<std::vector<uint8_t> const>>
first_signature;
bool helper_function = false;
if (auto const& usage = import_type_map.find(j);
usage != import_type_map.end())
{
@@ -1288,7 +1340,7 @@ validateGuards(
}
}
}
else if (j == hook_type_idx)
else if (j == hook_type_idx) // hook() or cbak() function type
{
// pass
}
@@ -1301,7 +1353,8 @@ validateGuards(
<< "Codesec: " << section_type << " "
<< "Local: " << j << " "
<< "Offset: " << i << "\n";
return {};
// return {};
helper_function = true;
}
int param_count = parseLeb128(wasm, i, &i);
@@ -1318,12 +1371,19 @@ validateGuards(
return {};
}
}
else if (helper_function)
{
// pass
}
else if (param_count != (*first_signature).get().size() - 1)
{
GUARDLOG(hook::log::FUNC_TYPE_INVALID)
<< "Malformed transaction. "
<< "Hook API: " << *first_name
<< " has the wrong number of parameters.\n";
<< " has the wrong number of parameters.\n"
<< "param_count: " << param_count << " "
<< "first_signature: "
<< (*first_signature).get().size() - 1 << "\n";
return {};
}
@@ -1370,6 +1430,10 @@ validateGuards(
return {};
}
}
else if (helper_function)
{
// pass
}
else if ((*first_signature).get()[k + 1] != param_type)
{
GUARDLOG(hook::log::FUNC_PARAM_INVALID)
@@ -1446,6 +1510,10 @@ validateGuards(
return {};
}
}
else if (helper_function)
{
// pass
}
else if ((*first_signature).get()[0] != result_type)
{
GUARDLOG(hook::log::FUNC_RETURN_INVALID)
@@ -1497,6 +1565,17 @@ validateGuards(
// execution to here means we are up to the actual expr for the
// codesec/function
// Step 5: Calculate actual function index and prepare callees
// tracking
int actual_func_idx = last_import_number + 1 + j;
std::set<int>* out_callees_ptr = nullptr;
// Only track callees if this function is in the call_graph
if (call_graph.find(actual_func_idx) != call_graph.end())
{
out_callees_ptr = &call_graph[actual_func_idx].callees;
}
auto valid = check_guard(
wasm,
j,
@@ -1506,33 +1585,188 @@ validateGuards(
last_import_number,
guardLog,
guardLogAccStr,
rulesVersion);
rulesVersion,
out_callees_ptr);
if (!valid)
return {};
if (hook_func_idx && *hook_func_idx == j)
maxInstrCountHook = *valid;
else if (cbak_func_idx && *cbak_func_idx == j)
maxInstrCountCbak = *valid;
else
// Step 5: Store local WCE and build bidirectional call
// relationships
if (call_graph.find(actual_func_idx) != call_graph.end())
{
if (DEBUG_GUARD)
printf(
"code section: %d not hook_func_idx: %d or "
"cbak_func_idx: %d\n",
j,
*hook_func_idx,
(cbak_func_idx ? *cbak_func_idx : -1));
// assert(false);
call_graph[actual_func_idx].local_wce = *valid;
// Build bidirectional relationships: for each callee, add
// this function as a caller
for (int callee_idx : call_graph[actual_func_idx].callees)
{
if (call_graph.find(callee_idx) != call_graph.end())
{
call_graph[callee_idx].callers.insert(
actual_func_idx);
}
}
}
// Note: We will calculate total WCE later after processing all
// functions
i = code_end;
}
}
i = next_section;
}
// execution to here means guards are installed correctly
// Step 6: Cycle detection using DFS
// Lambda function for DFS-based cycle detection
std::set<int> visited;
std::set<int> rec_stack;
std::function<bool(int)> detect_cycles_dfs = [&](int func_idx) -> bool {
if (rec_stack.find(func_idx) != rec_stack.end())
{
// Found a cycle: func_idx is already in the recursion stack
return true;
}
return std::pair<uint64_t, uint64_t>{maxInstrCountHook, maxInstrCountCbak};
if (visited.find(func_idx) != visited.end())
{
// Already visited and no cycle found from this node
return false;
}
visited.insert(func_idx);
rec_stack.insert(func_idx);
// Check all callees
if (call_graph.find(func_idx) != call_graph.end())
{
for (int callee_idx : call_graph[func_idx].callees)
{
if (detect_cycles_dfs(callee_idx))
{
return true;
}
}
}
rec_stack.erase(func_idx);
return false;
};
// Run cycle detection on all user-defined functions
for (const auto& [func_idx, func_info] : call_graph)
{
if (detect_cycles_dfs(func_idx))
{
GUARDLOG(hook::log::CALL_ILLEGAL)
<< "GuardCheck: Recursive function calls detected. "
<< "Hooks cannot contain recursive or mutually recursive "
"functions.\n";
return {};
}
}
// Step 7: Calculate total WCE for each function using bottom-up approach
// Lambda function for recursive WCE calculation with memoization
std::function<uint64_t(int)> calculate_function_wce =
[&](int func_idx) -> uint64_t {
// Check if function exists in call graph
if (call_graph.find(func_idx) == call_graph.end())
{
// This is an imported function, WCE = 0 (already accounted for)
return 0;
}
FunctionInfo& func_info = call_graph[func_idx];
// If already calculated, return cached result
if (func_info.wce_calculated)
{
return func_info.total_wce;
}
// Detect circular dependency in WCE calculation (should not happen
// after cycle detection)
if (func_info.in_calculation)
{
GUARDLOG(hook::log::CALL_ILLEGAL)
<< "GuardCheck: Internal error - circular dependency detected "
"during WCE calculation.\n";
return 0xFFFFFFFFU; // Return large value to trigger overflow error
}
func_info.in_calculation = true;
// Start with local WCE
uint64_t total = func_info.local_wce;
// Add WCE of all callees
for (int callee_idx : func_info.callees)
{
uint64_t callee_wce = calculate_function_wce(callee_idx);
// Check for overflow
if (total > 0xFFFFU || callee_wce > 0xFFFFU ||
(total + callee_wce) > 0xFFFFU)
{
func_info.in_calculation = false;
return 0xFFFFFFFFU; // Signal overflow
}
total += callee_wce;
}
func_info.total_wce = total;
func_info.wce_calculated = true;
func_info.in_calculation = false;
return total;
};
// Calculate WCE for hook and cbak functions
int64_t hook_wce_actual = 0;
int64_t cbak_wce_actual = 0;
if (hook_func_idx)
{
int actual_hook_idx = last_import_number + 1 + *hook_func_idx;
hook_wce_actual = calculate_function_wce(actual_hook_idx);
if (hook_wce_actual >= 0xFFFFU)
{
GUARDLOG(hook::log::INSTRUCTION_EXCESS)
<< "GuardCheck: hook() function exceeds maximum instruction "
"count (65535). "
<< "Total WCE including called functions: " << hook_wce_actual
<< "\n";
return {};
}
if (DEBUG_GUARD)
printf("hook() total WCE: %ld\n", hook_wce_actual);
}
if (cbak_func_idx)
{
int actual_cbak_idx = last_import_number + 1 + *cbak_func_idx;
cbak_wce_actual = calculate_function_wce(actual_cbak_idx);
if (cbak_wce_actual >= 0xFFFFU)
{
GUARDLOG(hook::log::INSTRUCTION_EXCESS)
<< "GuardCheck: cbak() function exceeds maximum instruction "
"count (65535). "
<< "Total WCE including called functions: " << cbak_wce_actual
<< "\n";
return {};
}
if (DEBUG_GUARD)
printf("cbak() total WCE: %ld\n", cbak_wce_actual);
}
// execution to here means guards are installed correctly and WCE is within
// limits
return std::pair<uint64_t, uint64_t>{hook_wce_actual, cbak_wce_actual};
}

View File

@@ -471,6 +471,10 @@ ManifestCache::applyManifest(Manifest m)
auto masterKey = m.masterKey;
map_.emplace(std::move(masterKey), std::move(m));
// Increment sequence to invalidate cached manifest messages
seq_++;
return ManifestDisposition::accepted;
}

View File

@@ -360,7 +360,8 @@ Logs::format(
if (!partition.empty())
{
#ifdef BEAST_ENHANCED_LOGGING
output += beast::detail::get_log_highlight_color();
if (beast::detail::should_log_use_colors())
output += beast::detail::get_log_highlight_color();
#endif
output += partition + ":";
}
@@ -392,7 +393,8 @@ Logs::format(
}
#ifdef BEAST_ENHANCED_LOGGING
output += "\033[0m";
if (beast::detail::should_log_use_colors())
output += "\033[0m";
#endif
output += message;

View File

@@ -41,6 +41,14 @@ get_log_highlight_color();
constexpr const char*
strip_source_root(const char* file)
{
// Handle relative paths from build/ directory (common with ccache)
// e.g., "../src/ripple/..." -> "ripple/..."
if (file && file[0] == '.' && file[1] == '.' && file[2] == '/' &&
file[3] == 's' && file[4] == 'r' && file[5] == 'c' && file[6] == '/')
{
return file + 7; // skip "../src/"
}
#ifdef SOURCE_ROOT_PATH
constexpr const char* sourceRoot = SOURCE_ROOT_PATH;
constexpr auto strlen_constexpr = [](const char* s) constexpr

View File

@@ -17,6 +17,8 @@
*/
//==============================================================================
#ifdef BEAST_ENHANCED_LOGGING
#include <ripple/beast/utility/EnhancedLogging.h>
#include <cstdlib>
#include <cstring>
@@ -112,3 +114,5 @@ log_write_location_string(std::ostream& os, const char* file, int line)
} // namespace detail
} // namespace beast
#endif // BEAST_ENHANCED_LOGGING

View File

@@ -155,14 +155,43 @@ Journal::ScopedStream::~ScopedStream()
#ifdef BEAST_ENHANCED_LOGGING
// Add suffix if location is enabled
if (file_ && detail::should_show_location() && !s.empty() && s != "\n")
if (file_ && detail::should_show_location() && !s.empty())
{
std::ostringstream combined;
combined << s;
if (!s.empty() && s.back() != ' ')
combined << " ";
detail::log_write_location_string(combined, file_, line_);
s = combined.str();
// Single optimized scan from the end
size_t const lastNonWhitespace = s.find_last_not_of(" \n\r\t");
// Skip if message is only whitespace (e.g., just "\n" or " \n\n")
if (lastNonWhitespace != std::string::npos)
{
// Count only the trailing newlines (tiny range)
size_t trailingNewlines = 0;
for (size_t i = lastNonWhitespace + 1; i < s.length(); ++i)
{
if (s[i] == '\n')
++trailingNewlines;
}
// Build location string once
std::ostringstream locStream;
detail::log_write_location_string(locStream, file_, line_);
std::string const location = locStream.str();
// Pre-allocate exact size → zero reallocations
size_t const finalSize = lastNonWhitespace + 1 + 1 +
location.length() + trailingNewlines;
std::string result;
result.reserve(finalSize);
// Direct string ops (no ostringstream overhead)
result.append(s, 0, lastNonWhitespace + 1);
result.push_back(' ');
result += location;
if (trailingNewlines > 0)
result.append(trailingNewlines, '\n');
s = std::move(result); // Move, no copy
}
}
#endif

View File

@@ -484,44 +484,61 @@ OverlayImpl::start()
m_peerFinder->setConfig(config);
m_peerFinder->start();
auto addIps = [&](std::vector<std::string> bootstrapIps) -> void {
auto addIps = [this](std::vector<std::string> ips, bool fixed) {
beast::Journal const& j = app_.journal("Overlay");
for (auto& ip : bootstrapIps)
for (auto& ip : ips)
{
std::size_t pos = ip.find('#');
if (pos != std::string::npos)
ip.erase(pos);
JLOG(j.trace()) << "Found boostrap IP: " << ip;
JLOG(j.trace())
<< "Found " << (fixed ? "fixed" : "bootstrap") << " IP: " << ip;
}
m_resolver.resolve(
bootstrapIps,
[&](std::string const& name,
ips,
[this, fixed](
std::string const& name,
std::vector<beast::IP::Endpoint> const& addresses) {
std::vector<std::string> ips;
ips.reserve(addresses.size());
beast::Journal const& j = app_.journal("Overlay");
std::string const base("config: ");
std::vector<beast::IP::Endpoint> eps;
eps.reserve(addresses.size());
for (auto const& addr : addresses)
{
std::string addrStr = addr.port() == 0
? to_string(addr.at_port(DEFAULT_PEER_PORT))
: to_string(addr);
JLOG(j.trace()) << "Parsed boostrap IP: " << addrStr;
ips.push_back(addrStr);
auto ep = addr.port() == 0 ? addr.at_port(DEFAULT_PEER_PORT)
: addr;
JLOG(j.trace())
<< "Parsed " << (fixed ? "fixed" : "bootstrap")
<< " IP: " << ep;
eps.push_back(ep);
}
std::string const base("config: ");
if (!ips.empty())
m_peerFinder->addFallbackStrings(base + name, ips);
if (eps.empty())
return;
if (fixed)
{
m_peerFinder->addFixedPeer(base + name, eps);
}
else
{
std::vector<std::string> strs;
strs.reserve(eps.size());
for (auto const& ep : eps)
strs.push_back(to_string(ep));
m_peerFinder->addFallbackStrings(base + name, strs);
}
});
};
if (!app_.config().IPS.empty())
addIps(app_.config().IPS);
addIps(app_.config().IPS, false);
if (!app_.config().IPS_FIXED.empty())
addIps(app_.config().IPS_FIXED);
addIps(app_.config().IPS_FIXED, true);
auto const timer = std::make_shared<Timer>(*this);
std::lock_guard lock(mutex_);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -58,8 +58,21 @@ cat $INPUT_FILE | tr '\n' '\f' |
then
echo '#include "api.h"' > "$WASM_DIR/test-$COUNTER-gen.c"
tr '\f' '\n' <<< $line >> "$WASM_DIR/test-$COUNTER-gen.c"
DECLARED="`tr '\f' '\n' <<< $line | grep -E '(extern|define) ' | grep -Eo '[a-z\-\_]+ *\(' | grep -v 'sizeof' | sed -E 's/[^a-z\-\_]//g' | sort | uniq`"
USED="`tr '\f' '\n' <<< $line | grep -vE '(extern|define) ' | grep -Eo '[a-z\-\_]+\(' | grep -v 'sizeof' | sed -E 's/[^a-z\-\_]//g' | grep -vE '^(hook|cbak)' | sort | uniq`"
DECLARED="`tr '\f' '\n' <<< $line \
| grep -E '(extern|static|define) ' \
| grep -Eo '[a-z\-\_]+ *\(' \
| grep -v 'sizeof' \
| sed -E 's/[^a-z\-\_]//g' \
| grep -vE '^__attribute__$' \
| sort | uniq`"
USED="`tr '\f' '\n' <<< $line \
| grep -vE '(extern|static|define) ' \
| grep -Eo '[a-z\-\_]+\(' \
| grep -v 'sizeof' \
| sed -E 's/[^a-z\-\_]//g' \
| grep -vE '^(__attribute__|hook|cbak)$' \
| sort | uniq`"
ONCE="`echo $DECLARED $USED | tr ' ' '\n' | sort | uniq -c | grep '1 ' | sed -E 's/^ *1 //g'`"
FILTER="`echo $DECLARED | tr ' ' '|' | sed -E 's/\|$//g'`"
UNDECL="`echo $ONCE | grep -v -E $FILTER 2>/dev/null || echo ''`"
@@ -69,7 +82,7 @@ cat $INPUT_FILE | tr '\n' '\f' |
echo "$line"
exit 1
fi
wasmcc -x c /dev/stdin -o /dev/stdout -O2 -Wl,--allow-undefined <<< "`tr '\f' '\n' <<< $line`" |
wasmcc -x c /dev/stdin -o /dev/stdout -O2 -Wl,--allow-undefined,--export=hook,--export=cbak <<< "`tr '\f' '\n' <<< $line`" |
hook-cleaner - - 2>/dev/null |
xxd -p -u -c 10 |
sed -E 's/../0x&U,/g' | sed -E 's/^/ /g' >> $OUTPUT_FILE