Compare commits

..

14 Commits

Author SHA1 Message Date
Nicholas Dudfield
8e2c69deb2 Scope hook test include path to external sources 2026-04-01 12:29:25 +07:00
Nicholas Dudfield
ff763a500c feat: log transform for test output (r-address → Account(name), prefix)
- Log.h: add setTransform/applyTransform on Logs for message rewriting
- SuiteJournal.h: wire transform into SuiteJournalSink so test output
  goes through it (previously bypassed Logs::write entirely)
- Env.h: pass Logs* to SuiteJournalSink
- TestEnv.h: add setPrefix() for per-phase log labels, prepend prefix
  in transform

usage:
  auto env = makeEnv(features);
  auto const& alice = env.account("alice");
  env.setPrefix("deposit phase");
  // logs: TRC:HooksTrace [deposit phase] HookTrace[Account(alice)-...]: ...

  TESTENV_LOGGING="HooksTrace=trace,View=debug"
2026-03-31 17:45:44 +07:00
Nicholas Dudfield
a605aec57a chore: remove unused SuiteLogsWithOverrides.h 2026-03-31 16:44:26 +07:00
Nicholas Dudfield
bfcbbc3c5e feat: migrate coverage from sancov to hookz __on_source_line
replace sancov-based coverage instrumentation with hookz's DWARF-based
__on_source_line(line, col) approach. line/col arrive as direct arguments
so no post-processing symbolication step is needed.

- Guard.h: allow result_count == 0 for void-returning whitelisted imports
- Enum.h: replace sancov whitelist entries with __on_source_line
- applyHook.h: replace sancov callbacks with onSourceLine, emit line:col
- SetHook.cpp: re-enable guard validation (was disabled for sancov testing)
- CMake: use hookz build-test-hooks, add HOOKS_FORCE_RECOMPILE support
- remove obsolete HookCoverage sancov test files
2026-03-31 16:38:28 +07:00
Nicholas Dudfield
d782f8cab4 feat: snapshot cmake change 2026-03-31 13:11:17 +07:00
Nicholas Dudfield
8a61dd44e0 feat: chorse: 2026-03-27 22:54:03 +07:00
Nicholas Dudfield
a8ca62a148 feat: add TestEnv with named accounts, log transform, and env-var logging
TestEnv wraps Env with:
- account("name"): auto-registers r-address → Account(name) in logs
- TESTENV_LOGGING env var: "HooksTrace=trace,View=debug" sets
  per-partition log levels without code changes
2026-03-27 22:19:13 +07:00
Nicholas Dudfield
b7aeff95a9 feat: add log transform to Logs for test-time message rewriting
Logs::setTransform(fn) installs a function that transforms every log
message before output. Useful in tests to replace raw r-addresses
with human-readable account names.

Usage:
  env.app().logs().setTransform([&](std::string const& text) {
      std::string out = text;
      // replace rG1QQv2... with Account(alice)
      boost::algorithm::replace_all(out, toBase58(alice.id()), "Account(alice)");
      return out;
  });
  // Pass nullptr to clear:
  env.app().logs().setTransform(nullptr);
2026-03-27 21:59:31 +07:00
tequ
b880c80c2b Fix BEAST_ENHANCED_LOGGING not working and restore original behavior 2026-03-27 21:27:38 +07:00
Nicholas Dudfield
8666cdfb71 fix: remove stdout duplicate from StderrJournalSink 2026-03-27 20:53:22 +07:00
Nicholas Dudfield
6d2a0b4e8b feat: also write overridden journal output to stdout with prefix 2026-03-27 20:43:47 +07:00
Nicholas Dudfield
739ebfaba4 rename: HooksApi journal → HooksTrace 2026-03-27 20:28:38 +07:00
Nicholas Dudfield
65166a9329 feat: route hook trace output to dedicated HooksApi journal
- Macro.h: add `jh` journal for HooksApi partition in HOOK_SETUP()
- applyHook.cpp: trace, trace_num, trace_float now use jh + JLOG macro
  for line numbers and separate partition filtering
- SuiteLogsWithOverrides.h: per-partition severity overrides for tests

Usage in tests:
  Env env{*this, envconfig(), features,
      std::make_unique<SuiteLogsWithOverrides>(*this,
          SuiteLogsWithOverrides::Overrides{{"HooksApi", Sev::kTrace}})};
2026-03-27 20:10:56 +07:00
Nicholas Dudfield
ca469b5d22 feat: wasm hook coverage instrumentation support
- Enum.h: add sancov callbacks to import whitelist with void_t return
- applyHook.h: sancov host callbacks (trace guard + init), global
  coverage accumulator with label support, coverageReset/Hits/Dump API
- SetHook.cpp: bypass guard validation for coverage-instrumented hooks
- RippledCore.cmake: HOOKS_TEST_DIR, HOOKS_C_DIR, HOOKS_COVERAGE,
  HOOKS_TEST_ONLY env vars for external hook test compilation
2026-03-27 19:32:43 +07:00
17 changed files with 660 additions and 337 deletions

View File

@@ -10,7 +10,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Check levelization
run: python Builds/levelization/levelization.py
run: Builds/levelization/levelization.sh
- name: Check for differences
id: assert
run: |
@@ -40,7 +40,7 @@ jobs:
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run 'python Builds/levelization/levelization.py' in your repo,
2. Run './Builds/levelization/levelization.sh' in your repo,
commit, and push.
See Builds/levelization/README.md for more info.

3
.gitignore vendored
View File

@@ -53,9 +53,6 @@ Builds/levelization/results/paths.txt
Builds/levelization/results/includes/
Builds/levelization/results/includedby/
# Python
__pycache__
# Ignore tmp directory.
tmp

View File

@@ -50,7 +50,7 @@ that `test` code should *never* be included in `ripple` code.)
## Validation
The [levelization.py](levelization.py) script takes no parameters,
The [levelization.sh](levelization.sh) script takes no parameters,
reads no environment variables, and can be run from any directory,
as long as it is in the expected location in the rippled repo.
It can be run at any time from within a checked out repo, and will
@@ -84,7 +84,7 @@ It generates many files of [results](results):
Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or
done anything else to improve levelization, run `levelization.py`,
done anything else to improve levelization, run `levelization.sh`,
and commit the updated results.
The `loops.txt` and `ordering.txt` files relate the modules
@@ -108,7 +108,7 @@ The committed files hide the detailed values intentionally, to
prevent false alarms and merging issues, and because it's easy to
get those details locally.
1. Run `levelization.py`
1. Run `levelization.sh`
2. Grep the modules in `paths.txt`.
* For example, if a cycle is found `A ~= B`, simply `grep -w
A Builds/levelization/results/paths.txt | grep -w B`

View File

@@ -1,283 +0,0 @@
#!/usr/bin/env python3
"""
Usage: levelization.py
This script takes no parameters, and can be called from any directory in the file system.
"""
import os
import re
import sys
from collections import defaultdict
from pathlib import Path
# Compile regex patterns once at module level
INCLUDE_PATTERN = re.compile(r"^\s*#include.*/.*\.h")
INCLUDE_PATH_PATTERN = re.compile(r'[<"]([^>"]+)[>"]')
def dictionary_sort_key(s):
"""
Create a sort key that mimics 'sort -d' (dictionary order).
Dictionary order only considers blanks and alphanumeric characters.
"""
return "".join(c for c in s if c.isalnum() or c.isspace())
def get_level(file_path):
"""
Extract the level from a file path (second and third directory components).
Equivalent to bash: cut -d/ -f 2,3
Examples:
src/ripple/app/main.cpp -> ripple.app
src/test/app/Import_test.cpp -> test.app
"""
parts = file_path.split("/")
if len(parts) >= 3:
level = f"{parts[1]}/{parts[2]}"
elif len(parts) >= 2:
level = f"{parts[1]}/toplevel"
else:
level = file_path
# If the "level" indicates a file, cut off the filename
if "." in level.split("/")[-1]:
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level = level.rsplit("/", 1)[0] + "/toplevel"
return level.replace("/", ".")
def extract_include_level(include_line):
"""
Extract the include path from an #include directive.
Gets the first two directory components from the include path.
Equivalent to bash: cut -d/ -f 1,2
Examples:
#include <ripple/basics/base_uint.h> -> ripple.basics
#include "ripple/app/main/Application.h" -> ripple.app
"""
match = INCLUDE_PATH_PATTERN.search(include_line)
if not match:
return None
include_path = match.group(1)
parts = include_path.split("/")
if len(parts) >= 2:
include_level = f"{parts[0]}/{parts[1]}"
else:
include_level = include_path
# If the "includelevel" indicates a file, cut off the filename
if "." in include_level.split("/")[-1]:
include_level = include_level.rsplit("/", 1)[0] + "/toplevel"
return include_level.replace("/", ".")
def find_repository_directories(start_path, depth_limit=10):
"""
Find the repository root by looking for src or include folders.
Walks up the directory tree from the start path.
"""
current = start_path.resolve()
for _ in range(depth_limit):
src_path = current / "src"
include_path = current / "include"
has_src = src_path.exists()
has_include = include_path.exists()
if has_src or has_include:
dirs = []
if has_src:
dirs.append(src_path)
if has_include:
dirs.append(include_path)
return current, dirs
parent = current.parent
if parent == current:
break
current = parent
raise RuntimeError(
"Could not find repository root. "
"Expected to find a directory containing 'src' and/or 'include' folders."
)
def main():
script_dir = Path(__file__).parent.resolve()
os.chdir(script_dir)
# Clean up and create results directory.
results_dir = script_dir / "results"
if results_dir.exists():
import shutil
shutil.rmtree(results_dir)
results_dir.mkdir()
# Find the repository root.
try:
repo_root, scan_dirs = find_repository_directories(script_dir)
print(f"Found repository root: {repo_root}")
for scan_dir in scan_dirs:
print(f" Scanning: {scan_dir.relative_to(repo_root)}")
except RuntimeError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
# Find all #include directives.
print("\nScanning for raw includes...")
raw_includes = []
rawincludes_file = results_dir / "rawincludes.txt"
with open(rawincludes_file, "w", buffering=8192) as raw_f:
for dir_path in scan_dirs:
for file_path in dir_path.rglob("*"):
if not file_path.is_file():
continue
try:
rel_path_str = str(file_path.relative_to(repo_root))
with open(
file_path, "r", encoding="utf-8", errors="ignore", buffering=8192
) as f:
for line in f:
if "#include" not in line or "boost" in line:
continue
if INCLUDE_PATTERN.match(line):
line_stripped = line.strip()
entry = f"{rel_path_str}:{line_stripped}\n"
print(entry, end="")
raw_f.write(entry)
raw_includes.append((rel_path_str, line_stripped))
except Exception as e:
print(f"Error reading {file_path}: {e}", file=sys.stderr)
# Build levelization paths and count directly.
print("Build levelization paths")
path_counts = defaultdict(int)
for file_path, include_line in raw_includes:
include_level = extract_include_level(include_line)
if not include_level:
continue
level = get_level(file_path)
if level != include_level:
path_counts[(level, include_level)] += 1
# Sort and deduplicate paths.
print("Sort and deduplicate paths")
sorted_items = sorted(
path_counts.items(),
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
)
paths_file = results_dir / "paths.txt"
with open(paths_file, "w") as f:
for (level, include_level), count in sorted_items:
line = f"{count:7} {level} {include_level}\n"
print(line.rstrip())
f.write(line)
# Split into flat-file database.
print("Split into flat-file database")
includes_dir = results_dir / "includes"
includedby_dir = results_dir / "includedby"
includes_dir.mkdir()
includedby_dir.mkdir()
includes_data = defaultdict(list)
includedby_data = defaultdict(list)
for (level, include_level), count in sorted_items:
includes_data[level].append((include_level, count))
includedby_data[include_level].append((level, count))
for level in sorted(includes_data.keys(), key=dictionary_sort_key):
with open(includes_dir / level, "w") as f:
for include_level, count in includes_data[level]:
line = f"{include_level} {count}\n"
print(line.rstrip())
f.write(line)
for include_level in sorted(includedby_data.keys(), key=dictionary_sort_key):
with open(includedby_dir / include_level, "w") as f:
for level, count in includedby_data[include_level]:
line = f"{level} {count}\n"
print(line.rstrip())
f.write(line)
# Search for loops.
print("Search for loops")
loops_file = results_dir / "loops.txt"
ordering_file = results_dir / "ordering.txt"
# Pre-load all include files into memory for fast lookup.
includes_cache = {}
includes_lookup = {}
for include_file in sorted(includes_dir.iterdir(), key=lambda p: p.name):
if not include_file.is_file():
continue
includes_cache[include_file.name] = []
includes_lookup[include_file.name] = {}
with open(include_file, "r") as f:
for line in f:
parts = line.strip().split()
if len(parts) >= 2:
name, count = parts[0], int(parts[1])
includes_cache[include_file.name].append((name, count))
includes_lookup[include_file.name][name] = count
loops_found = set()
with open(loops_file, "w", buffering=8192) as loops_f, open(
ordering_file, "w", buffering=8192
) as ordering_f:
for source in sorted(includes_cache.keys()):
for include, include_freq in includes_cache[source]:
if include not in includes_lookup:
continue
source_freq = includes_lookup[include].get(source)
if source_freq is not None:
loop_key = tuple(sorted([source, include]))
if loop_key in loops_found:
continue
loops_found.add(loop_key)
loops_f.write(f"Loop: {source} {include}\n")
diff = include_freq - source_freq
if diff > 3:
loops_f.write(f" {source} > {include}\n\n")
elif diff < -3:
loops_f.write(f" {include} > {source}\n\n")
elif source_freq == include_freq:
loops_f.write(f" {include} == {source}\n\n")
else:
loops_f.write(f" {include} ~= {source}\n\n")
else:
ordering_f.write(f"{source} > {include}\n")
# Print results.
print("\nOrdering:")
with open(ordering_file, "r") as f:
print(f.read(), end="")
print("\nLoops:")
with open(loops_file, "r") as f:
print(f.read(), end="")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,130 @@
#!/bin/bash
# Usage: levelization.sh
# This script takes no parameters, reads no environment variables,
# and can be run from any directory, as long as it is in the expected
# location in the repo.
pushd $( dirname $0 )
if [ -v PS1 ]
then
# if the shell is interactive, clean up any flotsam before analyzing
git clean -ix
fi
# Ensure all sorting is ASCII-order consistently across platforms.
export LANG=C
rm -rfv results
mkdir results
includes="$( pwd )/results/rawincludes.txt"
pushd ../..
echo Raw includes:
grep -r '^[ ]*#include.*/.*\.h' include src | \
grep -v boost | tee ${includes}
popd
pushd results
oldifs=${IFS}
IFS=:
mkdir includes
mkdir includedby
echo Build levelization paths
exec 3< ${includes} # open rawincludes.txt for input
while read -r -u 3 file include
do
level=$( echo ${file} | cut -d/ -f 2,3 )
# If the "level" indicates a file, cut off the filename
if [[ "${level##*.}" != "${level}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level="$( dirname ${level} )/toplevel"
fi
level=$( echo ${level} | tr '/' '.' )
includelevel=$( echo ${include} | sed 's/.*["<]//; s/[">].*//' | \
cut -d/ -f 1,2 )
if [[ "${includelevel##*.}" != "${includelevel}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
includelevel="$( dirname ${includelevel} )/toplevel"
fi
includelevel=$( echo ${includelevel} | tr '/' '.' )
if [[ "$level" != "$includelevel" ]]
then
echo $level $includelevel | tee -a paths.txt
fi
done
echo Sort and dedup paths
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
mv sortedpaths.txt paths.txt
exec 3>&- #close fd 3
IFS=${oldifs}
unset oldifs
echo Split into flat-file database
exec 4<paths.txt # open paths.txt for input
while read -r -u 4 count level include
do
echo ${include} ${count} | tee -a includes/${level}
echo ${level} ${count} | tee -a includedby/${include}
done
exec 4>&- #close fd 4
loops="$( pwd )/loops.txt"
ordering="$( pwd )/ordering.txt"
pushd includes
echo Search for loops
# Redirect stdout to a file
exec 4>&1
exec 1>"${loops}"
for source in *
do
if [[ -f "$source" ]]
then
exec 5<"${source}" # open for input
while read -r -u 5 include includefreq
do
if [[ -f $include ]]
then
if grep -q -w $source $include
then
if grep -q -w "Loop: $include $source" "${loops}"
then
continue
fi
sourcefreq=$( grep -w $source $include | cut -d\ -f2 )
echo "Loop: $source $include"
# If the counts are close, indicate that the two modules are
# on the same level, though they shouldn't be
if [[ $(( $includefreq - $sourcefreq )) -gt 3 ]]
then
echo -e " $source > $include\n"
elif [[ $(( $sourcefreq - $includefreq )) -gt 3 ]]
then
echo -e " $include > $source\n"
elif [[ $sourcefreq -eq $includefreq ]]
then
echo -e " $include == $source\n"
else
echo -e " $include ~= $source\n"
fi
else
echo "$source > $include" >> "${ordering}"
fi
fi
done
exec 5>&- #close fd 5
fi
done
exec 1>&4 #close fd 1
exec 4>&- #close fd 4
cat "${ordering}"
cat "${loops}"
popd
popd
popd

View File

@@ -68,6 +68,17 @@ target_link_libraries(xrpl.imports.main
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
)
# date-tz for enhanced logging (always linked, code is #ifdef guarded)
if(TARGET date::date-tz)
target_link_libraries(xrpl.imports.main INTERFACE date::date-tz)
endif()
# BEAST_ENHANCED_LOGGING: enable for Debug builds OR when explicitly requested
# Uses generator expression so it works with multi-config generators (Xcode, VS, Ninja Multi-Config)
target_compile_definitions(xrpl.imports.main INTERFACE
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
)
include(add_module)
include(target_link_modules)
@@ -167,7 +178,108 @@ if(xrpld)
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
)
if(HOOKS_TEST_ONLY OR DEFINED ENV{HOOKS_TEST_ONLY})
# Keep test infra but drop the individual *_test.cpp files
list(FILTER sources EXCLUDE REGEX "_test\\.cpp$")
message(STATUS "HOOKS_TEST_ONLY: excluded *_test.cpp from src/test/")
endif()
target_sources(rippled PRIVATE ${sources})
# Optional: include external hook test sources from another directory.
# Set via -DHOOKS_TEST_DIR=/path/to/tests or env HOOKS_TEST_DIR.
# Optionally set HOOKS_C_DIR to pass --hooks-c-dir args to the compiler
# (e.g. "tipbot=/path/to/hooks" — multiple values separated by ";").
#
# hookz build-test-hooks must be on PATH. It auto-compiles hooks referenced
# in each *_test.cpp and generates *_test_hooks.h next to the test file.
if(NOT HOOKS_TEST_DIR AND DEFINED ENV{HOOKS_TEST_DIR})
set(HOOKS_TEST_DIR $ENV{HOOKS_TEST_DIR})
endif()
if(NOT HOOKS_C_DIR AND DEFINED ENV{HOOKS_C_DIR})
set(HOOKS_C_DIR $ENV{HOOKS_C_DIR})
endif()
if(HOOKS_TEST_DIR AND EXISTS "${HOOKS_TEST_DIR}")
file(GLOB EXTERNAL_HOOK_TESTS CONFIGURE_DEPENDS
"${HOOKS_TEST_DIR}/*_test.cpp"
)
if(EXTERNAL_HOOK_TESTS)
# Build extra args for hookz build-test-hooks
set(_hooks_extra_args "")
set(_hooks_source_deps "")
if(HOOKS_C_DIR)
foreach(_dir ${HOOKS_C_DIR})
list(APPEND _hooks_extra_args "--hooks-c-dir" "${_dir}")
string(REGEX REPLACE "^[^=]+=" "" _hook_dir "${_dir}")
if(EXISTS "${_hook_dir}")
file(GLOB_RECURSE _hook_dir_deps CONFIGURE_DEPENDS
"${_hook_dir}/*.c"
"${_hook_dir}/*.h"
)
if(HOOKS_TEST_DIR)
list(FILTER _hook_dir_deps EXCLUDE REGEX "^${HOOKS_TEST_DIR}/")
endif()
list(APPEND _hooks_source_deps ${_hook_dir_deps})
endif()
endforeach()
list(REMOVE_DUPLICATES _hooks_source_deps)
endif()
if(HOOKS_COVERAGE OR DEFINED ENV{HOOKS_COVERAGE})
list(APPEND _hooks_extra_args "--hook-coverage")
message(STATUS "Hook coverage enabled: compiling hooks with hookz")
endif()
if(HOOKS_FORCE_RECOMPILE OR DEFINED ENV{HOOKS_FORCE_RECOMPILE})
list(APPEND _hooks_extra_args "--force-write" "--no-cache")
message(STATUS "Hook force recompile enabled (cache bypassed)")
endif()
# Run hookz build-test-hooks on each test file before compilation
foreach(_test_file ${EXTERNAL_HOOK_TESTS})
get_filename_component(_stem ${_test_file} NAME_WE)
set(_hooks_header "${HOOKS_TEST_DIR}/${_stem}_hooks.h")
if(HOOKS_FORCE_RECOMPILE OR DEFINED ENV{HOOKS_FORCE_RECOMPILE})
# Always run — no DEPENDS, no OUTPUT caching
add_custom_target(compile_hooks_${_stem} ALL
COMMAND hookz build-test-hooks "${_test_file}" ${_hooks_extra_args}
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
COMMENT "Compiling hooks for ${_stem} (forced)"
VERBATIM
)
list(APPEND EXTERNAL_HOOK_TARGETS compile_hooks_${_stem})
else()
add_custom_command(
OUTPUT "${_hooks_header}"
COMMAND hookz build-test-hooks "${_test_file}" ${_hooks_extra_args}
DEPENDS "${_test_file}" ${_hooks_source_deps}
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
COMMENT "Compiling hooks for ${_stem}"
VERBATIM
)
list(APPEND EXTERNAL_HOOK_HEADERS "${_hooks_header}")
endif()
endforeach()
# Ensure headers are generated before rippled compiles
if(HOOKS_FORCE_RECOMPILE OR DEFINED ENV{HOOKS_FORCE_RECOMPILE})
foreach(_tgt ${EXTERNAL_HOOK_TARGETS})
add_dependencies(rippled ${_tgt})
endforeach()
else()
add_custom_target(compile_external_hooks DEPENDS ${EXTERNAL_HOOK_HEADERS})
add_dependencies(rippled compile_external_hooks)
endif()
target_sources(rippled PRIVATE ${EXTERNAL_HOOK_TESTS})
# Keep the generated hook-header include path scoped to the external
# test sources so changing HOOKS_TEST_DIR doesn't invalidate the
# compile command for the rest of rippled.
set_property(
SOURCE ${EXTERNAL_HOOK_TESTS}
APPEND PROPERTY INCLUDE_DIRECTORIES "${HOOKS_TEST_DIR}"
)
message(STATUS "Including external hook tests from: ${HOOKS_TEST_DIR}")
endif()
endif()
endif()
target_link_libraries(rippled

View File

@@ -27,6 +27,7 @@
#include <fstream>
#include <map>
#include <memory>
#include <functional>
#include <mutex>
#include <utility>
@@ -165,6 +166,7 @@ private:
beast::severities::Severity thresh_;
File file_;
bool silent_ = false;
std::function<std::string(std::string const&)> transform_;
public:
Logs(beast::severities::Severity level);
@@ -203,6 +205,33 @@ public:
std::string const& text,
bool console);
/** Set a transform applied to every log message before output.
* Useful in tests to replace raw account IDs with human-readable names.
* Pass nullptr to clear.
*
* TODO: This is test-only infrastructure (used by TestEnv). Consider
* moving to SuiteLogs or a test-specific subclass if the Logs interface
* needs to stay clean for production.
*/
void
setTransform(std::function<std::string(std::string const&)> fn)
{
std::lock_guard lock(mutex_);
transform_ = std::move(fn);
}
/** Apply the current transform to text (or return as-is if none set). */
std::string const&
applyTransform(std::string const& text) const
{
if (!transform_)
return text;
// Store in thread_local to return a const ref
thread_local std::string buf;
buf = transform_(text);
return buf;
}
std::string
rotate();

View File

@@ -416,6 +416,7 @@ getImportWhitelist(Rules const& rules)
#define int64_t 0x7EU
#define int32_t 0x7FU
#define uint32_t 0x7FU
#define void_t 0x00U
#define HOOK_WRAP_PARAMS(...) __VA_ARGS__
@@ -427,11 +428,15 @@ getImportWhitelist(Rules const& rules)
#include "hook_api.macro"
// Coverage callback: void __on_source_line(uint32_t line, uint32_t col)
whitelist["__on_source_line"] = {void_t, uint32_t, uint32_t};
#undef HOOK_API_DEFINITION
#undef HOOK_WRAP_PARAMS
#undef int64_t
#undef int32_t
#undef uint32_t
#undef void_t
#pragma pop_macro("HOOK_API_DEFINITION")
return whitelist;

View File

@@ -1374,21 +1374,52 @@ validateGuards(
int result_count = parseLeb128(wasm, i, &i);
CHECK_SHORT_HOOK();
// this needs a reliable hook cleaner otherwise it will catch
// most compilers out
if (result_count != 1)
if (j == hook_type_idx)
{
GUARDLOG(hook::log::FUNC_RETURN_COUNT)
<< "Malformed transaction. "
<< "Hook declares a function type that returns fewer "
"or more than one value. "
<< "\n";
return {};
// hook/cbak must return exactly one value (i64)
if (result_count != 1)
{
GUARDLOG(hook::log::FUNC_RETURN_COUNT)
<< "Malformed transaction. "
<< "hook/cbak function type must return exactly "
"one value. "
<< "\n";
return {};
}
}
else if (first_signature)
{
// For whitelisted imports, check expected return count.
// void_t (0x00) means 0 return values.
uint8_t expected_return =
(*first_signature).get()[0];
int expected_result_count =
(expected_return == 0x00U) ? 0 : 1;
if (result_count != expected_result_count)
{
GUARDLOG(hook::log::FUNC_RETURN_COUNT)
<< "Malformed transaction. "
<< "Hook API: " << *first_name
<< " has wrong return count "
<< "(expected " << expected_result_count
<< ", got " << result_count << ")."
<< "\n";
return {};
}
}
else
{
if (result_count != 1)
{
GUARDLOG(hook::log::FUNC_RETURN_COUNT)
<< "Malformed transaction. "
<< "Hook declares a function type that returns "
"fewer or more than one value. "
<< "\n";
return {};
}
}
// this can only ever be 1 in production, but in testing it may
// also be 0 or >1 so for completeness this loop is here but can
// be taken out in prod
for (int k = 0; k < result_count; ++k)
{
int result_type = parseLeb128(wasm, i, &i);

View File

@@ -146,6 +146,7 @@
[[maybe_unused]] ApplyContext& applyCtx = hookCtx.applyCtx; \
[[maybe_unused]] auto& view = applyCtx.view(); \
[[maybe_unused]] auto j = applyCtx.app.journal("View"); \
[[maybe_unused]] auto jh = applyCtx.app.journal("HooksTrace"); \
[[maybe_unused]] WasmEdge_MemoryInstanceContext* memoryCtx = \
WasmEdge_CallingFrameGetMemoryInstance(&frameCtx, 0); \
[[maybe_unused]] unsigned char* memory = \

View File

@@ -196,9 +196,10 @@ Logs::write(
std::string const& text,
bool console)
{
std::string s;
format(s, text, level, partition);
std::lock_guard lock(mutex_);
std::string const& transformed = transform_ ? transform_(text) : text;
std::string s;
format(s, transformed, level, partition);
file_.writeln(s);
if (!silent_)
std::cerr << s << '\n';

View File

@@ -106,7 +106,8 @@ public:
std::string const& partition,
beast::severities::Severity threshold) override
{
return std::make_unique<SuiteJournalSink>(partition, threshold, suite_);
return std::make_unique<SuiteJournalSink>(
partition, threshold, suite_, this);
}
};

148
src/test/jtx/TestEnv.h Normal file
View File

@@ -0,0 +1,148 @@
#ifndef TEST_JTX_TESTENV_H_INCLUDED
#define TEST_JTX_TESTENV_H_INCLUDED
#include <test/jtx/Env.h>
#include <xrpl/basics/Log.h>
#include <xrpl/protocol/AccountID.h>
#include <cstdlib>
#include <cstring>
#include <map>
#include <sstream>
#include <string>
namespace ripple {
namespace test {
namespace jtx {
/**
* TestEnv wraps Env with:
* - Named account registry: env.account("alice")
* - Auto log transform: replaces r-addresses with Account(name) in log output
* - Env-var driven per-partition log levels via TESTENV_LOGGING
*
* Usage:
* TestEnv env{suite, features};
* auto const& alice = env.account("alice");
* auto const& bob = env.account("bob");
* env.fund(XRP(10000), alice, bob);
* // Logs now show Account(alice), Account(bob) instead of r-addresses
*
* Log levels via env var:
* TESTENV_LOGGING="HooksTrace=trace,View=debug"
*
* Valid levels: trace, debug, info, warning, error, fatal
*/
class TestEnv : public Env
{
std::map<std::string, Account> accounts_;
std::string prefix_;
public:
TestEnv(beast::unit_test::suite& suite, FeatureBitset features)
: Env(suite, features)
{
installTransform();
applyLoggingEnvVar();
}
TestEnv(
beast::unit_test::suite& suite,
std::unique_ptr<Config> config,
FeatureBitset features,
std::unique_ptr<Logs> logs = nullptr,
beast::severities::Severity thresh = beast::severities::kError)
: Env(suite, std::move(config), features, std::move(logs), thresh)
{
installTransform();
applyLoggingEnvVar();
}
~TestEnv()
{
app().logs().setTransform(nullptr);
}
/// Get or create a named account.
/// First call creates the Account; subsequent calls return the same one.
Account const&
account(std::string const& name)
{
auto [it, inserted] = accounts_.try_emplace(name, name);
return it->second;
}
/// Set a prefix that appears at the start of every log line.
/// Useful for visually separating test phases in trace output.
/// Pass empty string to clear.
void
setPrefix(std::string const& prefix)
{
prefix_ = prefix.empty() ? "" : "[" + prefix + "] ";
}
private:
static beast::severities::Severity
parseSeverity(std::string const& s)
{
if (s == "trace")
return beast::severities::kTrace;
if (s == "debug")
return beast::severities::kDebug;
if (s == "info")
return beast::severities::kInfo;
if (s == "warning")
return beast::severities::kWarning;
if (s == "error")
return beast::severities::kError;
if (s == "fatal")
return beast::severities::kFatal;
return beast::severities::kError;
}
void
applyLoggingEnvVar()
{
// Parse TESTENV_LOGGING="Partition1=level,Partition2=level"
auto const* envVal = std::getenv("TESTENV_LOGGING");
if (!envVal || !envVal[0])
return;
std::istringstream ss(envVal);
std::string pair;
while (std::getline(ss, pair, ','))
{
auto eq = pair.find('=');
if (eq == std::string::npos)
continue;
auto partition = pair.substr(0, eq);
auto level = pair.substr(eq + 1);
app().logs().get(partition).threshold(parseSeverity(level));
}
}
void
installTransform()
{
app().logs().setTransform([this](std::string const& text) {
std::string out = prefix_ + text;
for (auto const& [name, acc] : accounts_)
{
auto raddr = toBase58(acc.id());
std::string::size_type pos = 0;
std::string replacement = "Account(" + name + ")";
while ((pos = out.find(raddr, pos)) != std::string::npos)
{
out.replace(pos, raddr.size(), replacement);
pos += replacement.size();
}
}
return out;
});
}
};
} // namespace jtx
} // namespace test
} // namespace ripple
#endif

View File

@@ -19,6 +19,7 @@
#ifndef TEST_UNIT_TEST_SUITE_JOURNAL_H
#define TEST_UNIT_TEST_SUITE_JOURNAL_H
#include <xrpl/basics/Log.h>
#include <xrpl/beast/unit_test.h>
#include <xrpl/beast/utility/Journal.h>
#include <mutex>
@@ -31,13 +32,18 @@ class SuiteJournalSink : public beast::Journal::Sink
{
std::string partition_;
beast::unit_test::suite& suite_;
Logs* logs_ = nullptr;
public:
SuiteJournalSink(
std::string const& partition,
beast::severities::Severity threshold,
beast::unit_test::suite& suite)
: Sink(threshold, false), partition_(partition + " "), suite_(suite)
beast::unit_test::suite& suite,
Logs* logs = nullptr)
: Sink(threshold, false)
, partition_(partition + " ")
, suite_(suite)
, logs_(logs)
{
}
@@ -97,11 +103,12 @@ SuiteJournalSink::writeAlways(
// Only write the string if the level at least equals the threshold.
if (level >= threshold())
{
std::string const& output = logs_ ? logs_->applyTransform(text) : text;
// std::endl flushes → sync() → str()/str("") race in shared buffer →
// crashes
static std::mutex log_mutex;
std::lock_guard lock(log_mutex);
suite_.log << s << partition_ << text << std::endl;
suite_.log << s << partition_ << output << std::endl;
}
}

View File

@@ -12,9 +12,11 @@
#include <xrpl/protocol/TER.h>
#include <xrpl/protocol/digest.h>
#include <any>
#include <fstream>
#include <memory>
#include <optional>
#include <queue>
#include <set>
#include <vector>
#include <wasmedge/wasmedge.h>
@@ -302,6 +304,130 @@ static WasmEdge_String hookFunctionName =
// see: lib/system/allocator.cpp
#define WasmEdge_kPageSize 65536ULL
// --- Coverage infrastructure ---
//
// Global coverage accumulator keyed by hook hash. Persists across all hook
// executions in the process. Each __on_source_line call records a (line, col)
// pair under the executing hook's hash.
//
// Test API:
// hook::coverageReset() — clear all accumulated data
// hook::coverageHits(hookHash) — get hits for a specific hook
// hook::coverageLabel(hash, label) — register a human-readable label
// hook::coverageDump(path) — write all data to a file
//
// The dump file format is:
// [label or hash]
// hits=<line:col>,<line:col>,...
struct CoverageData
{
std::set<uint32_t> hits{};
};
// Global accumulator — survives across HookContext lifetimes
inline std::map<ripple::uint256, CoverageData>&
coverageMap()
{
static std::map<ripple::uint256, CoverageData> map;
return map;
}
// Hash → label mapping (e.g. hash → "file:tipbot/tip.c")
inline std::map<ripple::uint256, std::string>&
coverageLabels()
{
static std::map<ripple::uint256, std::string> labels;
return labels;
}
inline void
coverageReset()
{
coverageMap().clear();
coverageLabels().clear();
}
inline void
coverageLabel(ripple::uint256 const& hookHash, std::string const& label)
{
coverageLabels()[hookHash] = label;
}
inline std::set<uint32_t> const*
coverageHits(ripple::uint256 const& hookHash)
{
auto& map = coverageMap();
auto it = map.find(hookHash);
if (it == map.end())
return nullptr;
return &it->second.hits;
}
inline bool
coverageDump(std::string const& path)
{
auto& map = coverageMap();
if (map.empty())
return false;
auto& labels = coverageLabels();
std::ofstream out(path);
if (!out)
return false;
for (auto const& [hash, data] : map)
{
auto it = labels.find(hash);
if (it != labels.end())
out << "[" << it->second << "]\n";
else
out << "[" << to_string(hash) << "]\n";
out << "hits=";
bool first = true;
for (auto key : data.hits)
{
if (!first)
out << ",";
out << (key >> 16) << ":" << (key & 0xFFFF);
first = false;
}
out << "\n\n";
}
return true;
}
// --- Coverage host callback ---
inline WasmEdge_Result
onSourceLine(
void* data_ptr,
const WasmEdge_CallingFrameContext* frameCtx,
const WasmEdge_Value* in,
WasmEdge_Value* out)
{
// Called by hookz-instrumented WASM at each DWARF source location.
// in[0] = line number, in[1] = column number.
(void)out;
(void)frameCtx;
auto* hookCtx = reinterpret_cast<HookContext*>(data_ptr);
if (!hookCtx)
return WasmEdge_Result_Success;
uint32_t line = WasmEdge_ValueGetI32(in[0]);
uint32_t col = WasmEdge_ValueGetI32(in[1]);
// Pack (line, col) into a single uint32_t key.
// Limits: line < 65536, col < 65536 — more than sufficient for hooks.
uint32_t key = (line << 16) | (col & 0xFFFF);
coverageMap()[hookCtx->result.hookHash].hits.insert(key);
return WasmEdge_Result_Success;
}
/**
* HookExecutor is effectively a two-part function:
* The first part sets up the Hook Api inside the wasm import, ready for use
@@ -480,6 +606,22 @@ public:
#undef HOOK_WRAP_PARAMS
#pragma pop_macro("HOOK_API_DEFINITION")
// Coverage callback: void __on_source_line(i32 line, i32 col)
// Registered unconditionally — production hooks don't import it,
// so it's harmless. Instrumented hooks call it at each DWARF
// source location to record line:col coverage hits.
{
static WasmEdge_ValType paramsOSL[] = {
WasmEdge_ValType_I32, WasmEdge_ValType_I32};
static auto* ftOSL =
WasmEdge_FunctionTypeCreate(paramsOSL, 2, nullptr, 0);
auto* hfOSL = WasmEdge_FunctionInstanceCreate(
ftOSL, hook::onSourceLine, (void*)(&ctx), 0);
static auto nameOSL =
WasmEdge_StringCreateByCString("__on_source_line");
WasmEdge_ModuleInstanceAddFunction(importObj, nameOSL, hfOSL);
}
WasmEdge_TableInstanceContext* hostTable =
WasmEdge_TableInstanceCreate(tableType);
WasmEdge_ModuleInstanceAddTable(importObj, tableName, hostTable);

View File

@@ -1267,7 +1267,7 @@ DEFINE_HOOK_FUNCTION(
if (NOT_IN_BOUNDS(read_ptr, read_len, memory_length))
return OUT_OF_BOUNDS;
if (!j.trace())
if (!jh.trace())
return 0;
if (read_len > 128)
@@ -1281,16 +1281,16 @@ DEFINE_HOOK_FUNCTION(
if (read_len > 0)
{
j.trace() << "HookTrace[" << HC_ACC() << "]: "
<< std::string_view(
(const char*)memory + read_ptr, read_len)
<< ": " << number;
JLOG(jh.trace()) << "HookTrace[" << HC_ACC() << "]: "
<< std::string_view(
(const char*)memory + read_ptr, read_len)
<< ": " << number;
return 0;
}
}
j.trace() << "HookTrace[" << HC_ACC() << "]: " << number;
JLOG(jh.trace()) << "HookTrace[" << HC_ACC() << "]: " << number;
return 0;
HOOK_TEARDOWN();
}
@@ -1310,7 +1310,7 @@ DEFINE_HOOK_FUNCTION(
NOT_IN_BOUNDS(dread_ptr, dread_len, memory_length))
return OUT_OF_BOUNDS;
if (!j.trace())
if (!jh.trace())
return 0;
if (mread_len > 128)
@@ -1370,8 +1370,8 @@ DEFINE_HOOK_FUNCTION(
if (out_len > 0)
{
j.trace() << "HookTrace[" << HC_ACC() << "]: "
<< std::string_view((const char*)output_storage, out_len);
JLOG(jh.trace()) << "HookTrace[" << HC_ACC() << "]: "
<< std::string_view((const char*)output_storage, out_len);
}
return 0;
@@ -3547,7 +3547,7 @@ DEFINE_HOOK_FUNCTION(
if (NOT_IN_BOUNDS(read_ptr, read_len, memory_length))
return OUT_OF_BOUNDS;
if (!j.trace())
if (!jh.trace())
return 0;
if (read_len > 128)
@@ -3560,12 +3560,12 @@ DEFINE_HOOK_FUNCTION(
if (float1 == 0)
{
j.trace() << "HookTrace[" << HC_ACC() << "]: "
<< (read_len == 0
? ""
: std::string_view(
(const char*)memory + read_ptr, read_len))
<< ": Float 0*10^(0) <ZERO>";
JLOG(jh.trace()) << "HookTrace[" << HC_ACC() << "]: "
<< (read_len == 0
? ""
: std::string_view(
(const char*)memory + read_ptr, read_len))
<< ": Float 0*10^(0) <ZERO>";
return 0;
}
@@ -3575,20 +3575,22 @@ DEFINE_HOOK_FUNCTION(
if (man < minMantissa || man > maxMantissa || exp < minExponent ||
exp > maxExponent)
{
j.trace() << "HookTrace[" << HC_ACC() << "]:"
<< (read_len == 0
? ""
: std::string_view(
(const char*)memory + read_ptr, read_len))
<< ": Float <INVALID>";
JLOG(jh.trace()) << "HookTrace[" << HC_ACC() << "]:"
<< (read_len == 0
? ""
: std::string_view(
(const char*)memory + read_ptr, read_len))
<< ": Float <INVALID>";
return 0;
}
j.trace() << "HookTrace[" << HC_ACC() << "]:"
<< (read_len == 0 ? ""
: std::string_view(
(const char*)memory + read_ptr, read_len))
<< ": Float " << (neg ? "-" : "") << man << "*10^(" << exp << ")";
JLOG(jh.trace()) << "HookTrace[" << HC_ACC() << "]:"
<< (read_len == 0
? ""
: std::string_view(
(const char*)memory + read_ptr, read_len))
<< ": Float " << (neg ? "-" : "") << man << "*10^(" << exp
<< ")";
return 0;
HOOK_TEARDOWN();

View File

@@ -534,7 +534,7 @@ SetHook::validateHookSetEntry(SetHookCtx& ctx, STObject const& hookSetObj)
}
auto result = validateGuards(
hook, // wasm to verify
hook,
logger,
hsacc,
hook_api::getImportWhitelist(ctx.rules),