Compare commits

..

36 Commits

Author SHA1 Message Date
tequ
4a9fcb49f8 Merge branch 'dev' into self-hosted 2025-12-12 16:15:32 +09:00
tequ
68cccde1ad Merge branch 'dev' into self-hosted 2025-12-01 20:17:56 +09:00
tequ
1b86cdc256 fix 2025-11-28 13:36:01 +09:00
tequ
01bacf90a6 remove --privileged flag 2025-11-28 13:13:41 +09:00
tequ
2162674931 rename cache_enabled to gha_cache_enabled 2025-11-28 13:13:14 +09:00
tequ
8be811c7d1 change runner [ci-nix-full-matrix] 2025-11-27 18:22:29 +09:00
tequ
09c00851b4 fix ccache config dir [ci-nix-full-matrix] 2025-11-27 17:30:21 +09:00
tequ
b8622e9312 check [ci-nix-full-matrix] 2025-11-27 16:43:15 +09:00
tequ
4510d9c6f2 some changes 2025-11-27 16:11:18 +09:00
tequ
699765041f Enhance Conan configuration in xahau-ga-dependencies action by adding cache path setup for self-hosted runners and separating cache configuration into distinct steps. 2025-11-27 15:56:01 +09:00
tequ
1d9eed58b5 default shell bash 2025-11-27 14:46:03 +09:00
tequ
7d1622e54d conan cache 2025-11-27 13:41:52 +09:00
tequ
be3c3d2bde echo conan profile path 2025-11-27 13:27:47 +09:00
tequ
0fba76363a debug containers 2025-11-27 13:17:02 +09:00
tequ
466491e3f5 try container name 2025-11-27 12:59:49 +09:00
tequ
177a9f4c91 Add conan_profiles_path input to xahau-ga-dependencies action and update workflow to use it 2025-11-27 12:13:41 +09:00
tequ
8f272aa950 revert matrix conf 2025-11-27 11:44:31 +09:00
tequ
73b78625c0 test 2025-11-27 10:59:16 +09:00
tequ
5cc7e6dc19 fix clang_gcc_toolchain to 13 2025-11-27 10:40:27 +09:00
tequ
95f753b1d3 Merge branch 'dev' into self-hosted 2025-11-27 10:32:46 +09:00
tequ
f5857b4e4f try comment out clang_gcc_toolchain 2025-11-27 10:30:11 +09:00
tequ
02112928ec add cache_enabled option to xahau-ga-build 2025-11-27 01:40:00 +09:00
tequ
c815533e6a disabled action/cache in self hosted / ccache 100GB 2025-11-27 01:34:07 +09:00
tequ
915fcaaa95 install sqlite3 2025-11-27 01:18:37 +09:00
tequ
55a938a9b3 fix container volume path 2025-11-27 01:12:10 +09:00
tequ
1d34d880ec . 2025-11-27 00:58:14 +09:00
tequ
97be8fa41e try container 2025-11-27 00:17:29 +09:00
tequ
94e0cef62f use pipx instead pip3 2025-11-27 00:08:21 +09:00
tequ
f6f96865c1 use only 24.04 2025-11-26 23:35:20 +09:00
tequ
2e4e5eaff1 install cmake 2025-11-26 02:18:26 +09:00
tequ
2e96ae905d install python if not installed (ubuntu/self-hosted) 2025-11-26 02:11:40 +09:00
tequ
d02af3c891 rever macos runner 2025-11-26 01:37:36 +09:00
tequ
6f559a6032 use hosted runner in light workflow 2025-11-26 00:18:08 +09:00
tequ
9980e8f9be fix to use self-hosted for subsequent jobs. 2025-11-26 00:17:38 +09:00
Wietse Wind
fe17dde005 Update workers to self hosted 2025-11-26 00:14:15 +09:00
tequ
4eb1e4105a Revert "Update workers to self hosted"
This reverts commit c42cb0df62.
2025-11-26 00:12:45 +09:00
24 changed files with 186 additions and 297 deletions

View File

@@ -75,17 +75,37 @@ runs:
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
- name: Restore ccache directory for main branch
if: inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true'
id: ccache-restore
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-main
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-main
- name: Restore ccache directory for current branch
if: inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
id: ccache-restore-current-branch
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.ccache-current
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
restore-keys: |
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: ccache-current
- name: Configure ccache
if: inputs.ccache_enabled == 'true'
shell: bash
run: |
# Create cache directories
mkdir -p ~/.ccache-cache
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
export CCACHE_CONFIGPATH="$HOME/.config/ccache/ccache.conf"
echo "CCACHE_CONFIGPATH=$CCACHE_CONFIGPATH" >> $GITHUB_ENV
mkdir -p ~/.ccache-main ~/.ccache-current
# Keep config separate from cache_dir so configs aren't swapped when CCACHE_DIR changes between steps
mkdir -p ~/.config/ccache
@@ -96,9 +116,20 @@ runs:
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
ccache --set-config=cache_dir="$HOME/.ccache-cache"
echo "CCACHE_DIR=$HOME/.ccache-cache" >> $GITHUB_ENV
echo "📦 using ~/.ccache-cache as ccache cache directory"
# Determine if we're on the main branch
if [ "${{ steps.safe-branch.outputs.name }}" = "${{ inputs.main_branch }}" ]; then
# Main branch: use main branch cache only
ccache --set-config=cache_dir="$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-main" >> $GITHUB_ENV
echo "📦 Main branch: using ~/.ccache-main"
else
# Feature branch: use current branch cache with main as secondary (read-only fallback)
ccache --set-config=cache_dir="$HOME/.ccache-current"
ccache --set-config=secondary_storage="file:$HOME/.ccache-main"
echo "CCACHE_DIR=$HOME/.ccache-current" >> $GITHUB_ENV
echo "📦 Feature branch: using ~/.ccache-current with ~/.ccache-main as secondary"
fi
# Print config for verification
echo "=== ccache configuration ==="
@@ -213,3 +244,17 @@ runs:
if: inputs.ccache_enabled == 'true'
shell: bash
run: ccache -s
- name: Save ccache directory for main branch
if: success() && inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-main
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
- name: Save ccache directory for current branch
if: success() && inputs.gha_cache_enabled == 'true' && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
uses: actions/cache/save@v4
with:
path: ~/.ccache-current
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}

View File

@@ -17,6 +17,10 @@ inputs:
description: 'Cache version for invalidation'
required: false
default: '1'
gha_cache_enabled:
description: 'Whether to use actions/cache (disable for self-hosted with volume mounts)'
required: false
default: 'true'
main_branch:
description: 'Main branch name for restore keys'
required: false
@@ -59,14 +63,18 @@ outputs:
runs:
using: 'composite'
steps:
- name: Configure Conan cache paths
if: inputs.os == 'Linux'
shell: bash
run: |
mkdir -p /.conan-cache/conan2 /.conan-cache/conan2_download /.conan-cache/conan2_sources
echo 'core.cache:storage_path=/.conan-cache/conan2' > ~/.conan2/global.conf
echo 'core.download:download_cache=/.conan-cache/conan2_download' >> ~/.conan2/global.conf
echo 'core.sources:download_cache=/.conan-cache/conan2_sources' >> ~/.conan2/global.conf
- name: Restore Conan cache
if: inputs.gha_cache_enabled == 'true'
id: cache-restore-conan
uses: ./.github/actions/xahau-ga-cache-restore
with:
path: ~/.conan2
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-${{ inputs.configuration }}
restore-keys: |
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.py') }}-
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
cache-type: Conan
- name: Configure Conan cache paths
if: inputs.gha_cache_enabled == 'false'
@@ -153,3 +161,10 @@ runs:
--build missing \
--settings build_type=${{ inputs.configuration }} \
..
- name: Save Conan cache
if: success() && inputs.gha_cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ~/.conan2
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}

View File

@@ -20,7 +20,7 @@ jobs:
- Ninja
configuration:
- Debug
runs-on: [self-hosted, macOS]
runs-on: macos-15
env:
build_dir: .build
# Bump this number to invalidate all caches globally.
@@ -30,29 +30,61 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Add Homebrew to PATH
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Install Conan
run: |
echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
echo "/opt/homebrew/sbin" >> "$GITHUB_PATH"
brew install conan
# Verify Conan 2 is installed
conan --version
- name: Install Coreutils
run: |
brew install coreutils
echo "Num proc: $(nproc)"
# To isolate environments for each Runner, instead of installing globally with brew,
# use mise to isolate environments for each Runner directory.
- name: Setup toolchain (mise)
uses: jdx/mise-action@v2
with:
install: true
- name: Install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
- name: Install tools via mise
- name: Install Python
run: |
mise install
mise use cmake@3.23.1 python@3.12 pipx@latest conan@2 ninja@latest ccache@latest
mise reshim
echo "$HOME/.local/share/mise/shims" >> "$GITHUB_PATH"
if which python3 > /dev/null 2>&1; then
echo "Python 3 executable exists"
python3 --version
else
brew install python@3.12
fi
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
if ! which python > /dev/null 2>&1; then
sudo ln -sf $(which python3) /usr/local/bin/python
fi
- name: Install CMake
run: |
# Install CMake 3.x to match local dev environments
# With Conan 2 and the policy args passed to CMake, newer versions
# can have issues with dependencies that require cmake_minimum_required < 3.5
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
# Download and install CMake 3.31.7 directly
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
tar -xzf cmake.tar.gz
# Move the entire CMake.app to /Applications
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
/Applications/CMake.app/Contents/bin/cmake --version
- name: Install ccache
run: brew install ccache
- name: Check environment
run: |
@@ -66,14 +98,6 @@ jobs:
echo "---- Full Environment ----"
env
- name: Get commit message
id: get-commit-message
uses: ./.github/actions/xahau-ga-get-commit-message
with:
event-name: ${{ github.event_name }}
head-commit-message: ${{ github.event.head_commit.message }}
pr-head-sha: ${{ github.event.pull_request.head.sha }}
- name: Detect compiler version
id: detect-compiler
run: |
@@ -105,7 +129,6 @@ jobs:
cache_version: ${{ env.CACHE_VERSION }}
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: libcxx
ccache_max_size: '100G'
- name: Test
run: |

View File

@@ -181,7 +181,8 @@ jobs:
image: ubuntu:24.04
volumes:
- /home/runner/.conan-cache:/.conan-cache
- /home/runner/.ccache-cache:/github/home/.ccache-cache
- /home/runner/.ccache-main:/github/home/.ccache-main
- /home/runner/.ccache-current:/github/home/.ccache-current
defaults:
run:
shell: bash
@@ -324,6 +325,7 @@ jobs:
main_branch: ${{ env.MAIN_BRANCH_NAME }}
stdlib: ${{ matrix.stdlib }}
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
gha_cache_enabled: 'false' # Disable caching for self hosted runner
ccache_max_size: '100G'
- name: Set artifact name

View File

@@ -48,9 +48,13 @@ target_sources (xrpl_core PRIVATE
src/ripple/beast/net/impl/IPAddressV6.cpp
src/ripple/beast/net/impl/IPEndpoint.cpp
src/ripple/beast/utility/src/beast_Journal.cpp
src/ripple/beast/utility/src/beast_PropertyStream.cpp
# Enhanced logging - compiles to empty when BEAST_ENHANCED_LOGGING is not defined
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
src/ripple/beast/utility/src/beast_PropertyStream.cpp)
# Conditionally add enhanced logging source when BEAST_ENHANCED_LOGGING is enabled
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
target_sources(xrpl_core PRIVATE
src/ripple/beast/utility/src/beast_EnhancedLogging.cpp)
endif()
#[===============================[
core sources
@@ -158,16 +162,12 @@ target_link_libraries (xrpl_core
date::date
Ripple::opts)
# date-tz for enhanced logging (always linked, code is #ifdef guarded)
if(TARGET date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
# Link date-tz library when enhanced logging is enabled
if(DEFINED BEAST_ENHANCED_LOGGING AND BEAST_ENHANCED_LOGGING)
if(TARGET date::date-tz)
target_link_libraries(xrpl_core PUBLIC date::date-tz)
endif()
endif()
# BEAST_ENHANCED_LOGGING: enable for Debug builds OR when explicitly requested
# Uses generator expression so it works with multi-config generators (Xcode, VS, Ninja Multi-Config)
target_compile_definitions(xrpl_core PUBLIC
$<$<OR:$<CONFIG:Debug>,$<BOOL:${BEAST_ENHANCED_LOGGING}>>:BEAST_ENHANCED_LOGGING=1>
)
#[=================================[
main/core headers installation
#]=================================]

View File

@@ -37,11 +37,20 @@ endif() #git
set(SOURCE_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/")
add_definitions(-DSOURCE_ROOT_PATH="${SOURCE_ROOT_PATH}")
# BEAST_ENHANCED_LOGGING - adds file:line numbers and formatting to logs
# Automatically enabled for Debug builds via generator expression
# Can be explicitly controlled with -DBEAST_ENHANCED_LOGGING=ON/OFF
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages (auto: Debug=ON, Release=OFF)" OFF)
message(STATUS "BEAST_ENHANCED_LOGGING option: ${BEAST_ENHANCED_LOGGING}")
# BEAST_ENHANCED_LOGGING option - adds file:line numbers and formatting to logs
# Default to ON for Debug builds, OFF for Release
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" ON)
else()
option(BEAST_ENHANCED_LOGGING "Include file and line numbers in log messages" OFF)
endif()
if(BEAST_ENHANCED_LOGGING)
add_definitions(-DBEAST_ENHANCED_LOGGING=1)
message(STATUS "Log line numbers enabled")
else()
message(STATUS "Log line numbers disabled")
endif()
if(thread_safety_analysis)
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)

View File

@@ -192,7 +192,6 @@
#define sfNFTokenMinter ((8U << 16U) + 9U)
#define sfEmitCallback ((8U << 16U) + 10U)
#define sfHookAccount ((8U << 16U) + 16U)
#define sfHookAdministrator ((8U << 16U) + 98U)
#define sfInform ((8U << 16U) + 99U)
#define sfIndexes ((19U << 16U) + 1U)
#define sfHashes ((19U << 16U) + 2U)

View File

@@ -192,7 +192,7 @@ ENV PATH=/usr/local/bin:$PATH
# Configure ccache and Conan 2
# NOTE: Using echo commands instead of heredocs because heredocs in Docker RUN commands are finnicky
RUN /hbb_exe/activate-exec bash -c "ccache -M 100G && \
RUN /hbb_exe/activate-exec bash -c "ccache -M 10G && \
ccache -o cache_dir=/cache/ccache && \
ccache -o compiler_check=content && \
mkdir -p ~/.conan2 /cache/conan2 /cache/conan2_download /cache/conan2_sources && \

View File

@@ -471,10 +471,6 @@ ManifestCache::applyManifest(Manifest m)
auto masterKey = m.masterKey;
map_.emplace(std::move(masterKey), std::move(m));
// Increment sequence to invalidate cached manifest messages
seq_++;
return ManifestDisposition::accepted;
}

View File

@@ -267,9 +267,6 @@ DeleteAccount::preclaim(PreclaimContext const& ctx)
if (sleAccount->isFieldPresent(sfHookNamespaces) ||
sleAccount->isFieldPresent(sfHooks))
return tecHAS_OBLIGATIONS;
if (sleAccount->isFieldPresent(sfHookAdministrator))
return tecHAS_OBLIGATIONS;
}
// When fixNFTokenRemint is enabled, we don't allow an account to be

View File

@@ -897,9 +897,7 @@ ValidNewAccountRoot::finalize(
}
if ((tt == ttPAYMENT || tt == ttIMPORT || tt == ttGENESIS_MINT ||
tt == ttREMIT ||
(tt == ttHOOK_SET &&
view.rules().enabled(featureHookAdministrator))) &&
tt == ttREMIT) &&
isTesSuccess(result))
{
std::uint32_t const startingSeq{

View File

@@ -628,21 +628,6 @@ SetHook::calculateBaseFee(ReadView const& view, STTx const& tx)
TER
SetHook::preclaim(ripple::PreclaimContext const& ctx)
{
if (ctx.tx.isFieldPresent(sfHookAdministrator))
{
auto const& administrator = ctx.tx.getAccountID(sfHookAdministrator);
auto const& sle = ctx.view.read(keylet::account(administrator));
if (!sle)
return tecNO_DST;
if (!sle->isFieldPresent(sfHookAdministrator))
return tecNO_PERMISSION;
if (sle->getAccountID(sfHookAdministrator) !=
ctx.tx.getAccountID(sfAccount))
return tecNO_PERMISSION;
}
auto const& hookSets = ctx.tx.getFieldArray(sfHooks);
for (auto const& hookSetObj : hookSets)
@@ -682,46 +667,12 @@ SetHook::preflight(PreflightContext const& ctx)
return ret;
if (ctx.rules.enabled(fixInvalidTxFlags) &&
ctx.tx.getFlags() & tfSetHookMask)
ctx.tx.getFlags() & tfUniversalMask)
{
JLOG(ctx.j.trace()) << "SetHook: Invalid flags set.";
return temINVALID_FLAG;
}
if (ctx.tx.isFlag(tfNewAccount) &&
!ctx.rules.enabled(featureHookAdministrator))
{
JLOG(ctx.j.trace()) << "SetHook: New account flag set but hook "
"administrator amendment is not enabled.";
return temDISABLED;
}
if (ctx.tx.isFieldPresent(sfDestination))
{
if (!ctx.rules.enabled(featureHookAdministrator))
{
JLOG(ctx.j.trace())
<< "HookSet: Hook administrator amendment not enabled.";
return temDISABLED;
}
if (ctx.tx.isFlag(tfNewAccount))
{
JLOG(ctx.j.trace())
<< "HookSet: Both new account flag and destination set. "
"New account flag and destination cannot be set at the same "
"time.";
return temMALFORMED;
}
if (ctx.tx.getAccountID(sfDestination) ==
ctx.tx.getAccountID(sfAccount))
{
JLOG(ctx.j.trace()) << "HookSet: Redundant hook administrator.";
return temREDUNDANT;
}
}
if (!ctx.tx.isFieldPresent(sfHooks))
{
JLOG(ctx.j.trace())
@@ -1232,23 +1183,6 @@ struct KeyletComparator
}
};
AccountID
randomAccountAddress(ReadView const& view, uint256 const& pseudoOwnerKey)
{
// This number must not be changed without an amendment
constexpr std::uint16_t maxAccountAttempts = 256;
for (std::uint16_t i = 0; i < maxAccountAttempts; ++i)
{
ripesha_hasher rsh;
auto const hash = sha512Half(i, view.info().parentHash, pseudoOwnerKey);
rsh(hash.data(), hash.size());
AccountID const ret{static_cast<ripesha_hasher::result_type>(rsh)};
if (!view.read(keylet::account(ret)))
return ret;
}
return beast::zero;
}
TER
SetHook::setHook()
{
@@ -1268,69 +1202,11 @@ SetHook::setHook()
.app = ctx_.app,
.rules = ctx_.view().rules()};
auto targetAccount = ctx.tx[~sfDestination].value_or(account_);
if (ctx_.tx.isFlag(tfNewAccount))
{
// create the new account
auto const newAccount = randomAccountAddress(ctx_.view(), uint256{});
if (newAccount == beast::zero)
return tecDUPLICATE;
auto sleNewAccount = std::make_shared<SLE>(keylet::account(newAccount));
sleNewAccount->setAccountID(sfAccount, newAccount);
sleNewAccount->setFieldAmount(sfBalance, STAmount{});
sleNewAccount->setFieldU32(sfOwnerCount, 1); // ltHook
std::uint32_t const seqno{
ctx_.view().rules().enabled(featureXahauGenesis)
? ctx_.view().info().parentCloseTime.time_since_epoch().count()
: ctx_.view().rules().enabled(featureDeletableAccounts)
? ctx_.view().seq()
: 1};
sleNewAccount->setFieldU32(sfSequence, seqno);
sleNewAccount->setFieldU32(sfFlags, lsfDisableMaster);
sleNewAccount->setAccountID(sfHookAdministrator, account_);
auto sleFees = view().peek(keylet::fees());
if (sleFees && view().rules().enabled(featureXahauGenesis))
{
auto actIdx = sleFees->isFieldPresent(sfAccountCount)
? sleFees->getFieldU64(sfAccountCount)
: 0;
sleNewAccount->setFieldU64(sfAccountIndex, actIdx);
sleFees->setFieldU64(sfAccountCount, actIdx + 1);
view().update(sleFees);
}
// fund AccountReserve + ObjectReserve (ltHook)
auto const requiredDrops = ctx_.view().fees().accountReserve(1);
auto sourceSle = ctx_.view().peek(keylet::account(account_));
if (!sourceSle)
return tefINTERNAL;
auto const sourceCurrentReserve = ctx_.view().fees().accountReserve(
sourceSle->getFieldU32(sfOwnerCount));
auto const sourceBalance = sourceSle->getFieldAmount(sfBalance).xrp();
if (sourceBalance < sourceCurrentReserve + requiredDrops)
return tecUNFUNDED;
sourceSle->setFieldAmount(sfBalance, sourceBalance - requiredDrops);
ctx_.view().update(sourceSle);
sleNewAccount->setFieldAmount(sfBalance, requiredDrops);
ctx_.view().insert(sleNewAccount);
targetAccount = newAccount;
}
const int blobMax = hook::maxHookWasmSize();
auto const accountKeylet = keylet::account(account_);
auto const hookKeylet = keylet::hook(account_);
auto const hookKeylet = keylet::hook(targetAccount);
auto accountSLE = view().peek(keylet::account(targetAccount));
auto accountSLE = view().peek(accountKeylet);
ripple::STArray newHooks{sfHooks, 8};
auto newHookSLE = std::make_shared<SLE>(hookKeylet);

View File

@@ -360,8 +360,7 @@ Logs::format(
if (!partition.empty())
{
#ifdef BEAST_ENHANCED_LOGGING
if (beast::detail::should_log_use_colors())
output += beast::detail::get_log_highlight_color();
output += beast::detail::get_log_highlight_color();
#endif
output += partition + ":";
}
@@ -393,8 +392,7 @@ Logs::format(
}
#ifdef BEAST_ENHANCED_LOGGING
if (beast::detail::should_log_use_colors())
output += "\033[0m";
output += "\033[0m";
#endif
output += message;

View File

@@ -41,14 +41,6 @@ get_log_highlight_color();
constexpr const char*
strip_source_root(const char* file)
{
// Handle relative paths from build/ directory (common with ccache)
// e.g., "../src/ripple/..." -> "ripple/..."
if (file && file[0] == '.' && file[1] == '.' && file[2] == '/' &&
file[3] == 's' && file[4] == 'r' && file[5] == 'c' && file[6] == '/')
{
return file + 7; // skip "../src/"
}
#ifdef SOURCE_ROOT_PATH
constexpr const char* sourceRoot = SOURCE_ROOT_PATH;
constexpr auto strlen_constexpr = [](const char* s) constexpr

View File

@@ -17,8 +17,6 @@
*/
//==============================================================================
#ifdef BEAST_ENHANCED_LOGGING
#include <ripple/beast/utility/EnhancedLogging.h>
#include <cstdlib>
#include <cstring>
@@ -114,5 +112,3 @@ log_write_location_string(std::ostream& os, const char* file, int line)
} // namespace detail
} // namespace beast
#endif // BEAST_ENHANCED_LOGGING

View File

@@ -155,43 +155,14 @@ Journal::ScopedStream::~ScopedStream()
#ifdef BEAST_ENHANCED_LOGGING
// Add suffix if location is enabled
if (file_ && detail::should_show_location() && !s.empty())
if (file_ && detail::should_show_location() && !s.empty() && s != "\n")
{
// Single optimized scan from the end
size_t const lastNonWhitespace = s.find_last_not_of(" \n\r\t");
// Skip if message is only whitespace (e.g., just "\n" or " \n\n")
if (lastNonWhitespace != std::string::npos)
{
// Count only the trailing newlines (tiny range)
size_t trailingNewlines = 0;
for (size_t i = lastNonWhitespace + 1; i < s.length(); ++i)
{
if (s[i] == '\n')
++trailingNewlines;
}
// Build location string once
std::ostringstream locStream;
detail::log_write_location_string(locStream, file_, line_);
std::string const location = locStream.str();
// Pre-allocate exact size → zero reallocations
size_t const finalSize = lastNonWhitespace + 1 + 1 +
location.length() + trailingNewlines;
std::string result;
result.reserve(finalSize);
// Direct string ops (no ostringstream overhead)
result.append(s, 0, lastNonWhitespace + 1);
result.push_back(' ');
result += location;
if (trailingNewlines > 0)
result.append(trailingNewlines, '\n');
s = std::move(result); // Move, no copy
}
std::ostringstream combined;
combined << s;
if (!s.empty() && s.back() != ' ')
combined << " ";
detail::log_write_location_string(combined, file_, line_);
s = combined.str();
}
#endif

View File

@@ -484,61 +484,44 @@ OverlayImpl::start()
m_peerFinder->setConfig(config);
m_peerFinder->start();
auto addIps = [this](std::vector<std::string> ips, bool fixed) {
auto addIps = [&](std::vector<std::string> bootstrapIps) -> void {
beast::Journal const& j = app_.journal("Overlay");
for (auto& ip : ips)
for (auto& ip : bootstrapIps)
{
std::size_t pos = ip.find('#');
if (pos != std::string::npos)
ip.erase(pos);
JLOG(j.trace())
<< "Found " << (fixed ? "fixed" : "bootstrap") << " IP: " << ip;
JLOG(j.trace()) << "Found boostrap IP: " << ip;
}
m_resolver.resolve(
ips,
[this, fixed](
std::string const& name,
bootstrapIps,
[&](std::string const& name,
std::vector<beast::IP::Endpoint> const& addresses) {
std::vector<std::string> ips;
ips.reserve(addresses.size());
beast::Journal const& j = app_.journal("Overlay");
std::string const base("config: ");
std::vector<beast::IP::Endpoint> eps;
eps.reserve(addresses.size());
for (auto const& addr : addresses)
{
auto ep = addr.port() == 0 ? addr.at_port(DEFAULT_PEER_PORT)
: addr;
JLOG(j.trace())
<< "Parsed " << (fixed ? "fixed" : "bootstrap")
<< " IP: " << ep;
eps.push_back(ep);
std::string addrStr = addr.port() == 0
? to_string(addr.at_port(DEFAULT_PEER_PORT))
: to_string(addr);
JLOG(j.trace()) << "Parsed boostrap IP: " << addrStr;
ips.push_back(addrStr);
}
if (eps.empty())
return;
if (fixed)
{
m_peerFinder->addFixedPeer(base + name, eps);
}
else
{
std::vector<std::string> strs;
strs.reserve(eps.size());
for (auto const& ep : eps)
strs.push_back(to_string(ep));
m_peerFinder->addFallbackStrings(base + name, strs);
}
std::string const base("config: ");
if (!ips.empty())
m_peerFinder->addFallbackStrings(base + name, ips);
});
};
if (!app_.config().IPS.empty())
addIps(app_.config().IPS, false);
addIps(app_.config().IPS);
if (!app_.config().IPS_FIXED.empty())
addIps(app_.config().IPS_FIXED, true);
addIps(app_.config().IPS_FIXED);
auto const timer = std::make_shared<Timer>(*this);
std::lock_guard lock(mutex_);

View File

@@ -74,7 +74,7 @@ namespace detail {
// Feature.cpp. Because it's only used to reserve storage, and determine how
// large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than
// the actual number of amendments. A LogicError on startup will verify this.
static constexpr std::size_t numFeatures = 91;
static constexpr std::size_t numFeatures = 90;
/** Amendments that this server supports and the default voting behavior.
Whether they are enabled depends on the Rules defined in the validated
@@ -378,7 +378,6 @@ extern uint256 const fixInvalidTxFlags;
extern uint256 const featureExtendedHookState;
extern uint256 const fixCronStacking;
extern uint256 const fixHookAPI20251128;
extern uint256 const featureHookAdministrator;
} // namespace ripple
#endif

View File

@@ -563,7 +563,6 @@ extern SF_ACCOUNT const sfEmitCallback;
extern SF_ACCOUNT const sfHookAccount;
extern SF_ACCOUNT const sfNFTokenMinter;
extern SF_ACCOUNT const sfInform;
extern SF_ACCOUNT const sfHookAdministrator;
// path set
extern SField const sfPaths;

View File

@@ -184,11 +184,6 @@ constexpr std::uint32_t const tfNFTokenCancelOfferMask = ~(tfUniversal);
// NFTokenAcceptOffer flags:
constexpr std::uint32_t const tfNFTokenAcceptOfferMask = ~tfUniversal;
enum SetHookFlags : uint32_t {
tfNewAccount = 0x00000001,
};
constexpr std::uint32_t const tfSetHookMask = ~(tfUniversal | tfNewAccount);
// URIToken mask
constexpr std::uint32_t const tfURITokenMintMask = ~(tfUniversal | tfBurnable);
constexpr std::uint32_t const tfURITokenNonMintMask = ~tfUniversal;

View File

@@ -484,7 +484,6 @@ REGISTER_FIX (fixInvalidTxFlags, Supported::yes, VoteBehavior::De
REGISTER_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo);
REGISTER_FIX (fixCronStacking, Supported::yes, VoteBehavior::DefaultYes);
REGISTER_FIX (fixHookAPI20251128, Supported::yes, VoteBehavior::DefaultYes);
REGISTER_FEATURE(HookAdministrator, Supported::yes, VoteBehavior::DefaultNo);
// The following amendments are obsolete, but must remain supported
// because they could potentially get enabled.

View File

@@ -70,7 +70,6 @@ LedgerFormats::LedgerFormats()
{sfTouchCount, soeOPTIONAL},
{sfHookStateScale, soeOPTIONAL},
{sfCron, soeOPTIONAL},
{sfHookAdministrator, soeOPTIONAL},
},
commonFields);

View File

@@ -315,7 +315,6 @@ CONSTRUCT_TYPED_SFIELD(sfEmitCallback, "EmitCallback", ACCOUNT,
// account (uncommon)
CONSTRUCT_TYPED_SFIELD(sfHookAccount, "HookAccount", ACCOUNT, 16);
CONSTRUCT_TYPED_SFIELD(sfHookAdministrator, "HookAdministrator", ACCOUNT, 98);
CONSTRUCT_TYPED_SFIELD(sfInform, "Inform", ACCOUNT, 99);
// vector of 256-bit

View File

@@ -324,7 +324,6 @@ TxFormats::TxFormats()
{
{sfHooks, soeREQUIRED},
{sfTicketSequence, soeOPTIONAL},
{sfDestination, soeOPTIONAL},
},
commonFields);