mirror of
https://github.com/XRPLF/rippled.git
synced 2026-02-20 05:42:38 +00:00
Compare commits
13 Commits
ximinez/le
...
bthomee/no
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
554df631c6 | ||
|
|
5e704bfdfb | ||
|
|
fe8cc02bfa | ||
|
|
061c033f52 | ||
|
|
832a7e7e4a | ||
|
|
b2371c4c02 | ||
|
|
b94a7c4b44 | ||
|
|
9b9027112d | ||
|
|
8e7889c66e | ||
|
|
d836c3788d | ||
|
|
1cb7c0293f | ||
|
|
52dabc1f79 | ||
|
|
2d78d41f7b |
191
.clang-tidy
191
.clang-tidy
@@ -1,191 +0,0 @@
|
||||
---
|
||||
Checks: "-*,
|
||||
bugprone-argument-comment
|
||||
"
|
||||
# bugprone-assert-side-effect,
|
||||
# bugprone-bad-signal-to-kill-thread,
|
||||
# bugprone-bool-pointer-implicit-conversion,
|
||||
# bugprone-casting-through-void,
|
||||
# bugprone-chained-comparison,
|
||||
# bugprone-compare-pointer-to-member-virtual-function,
|
||||
# bugprone-copy-constructor-init,
|
||||
# bugprone-crtp-constructor-accessibility,
|
||||
# bugprone-dangling-handle,
|
||||
# bugprone-dynamic-static-initializers,
|
||||
# bugprone-empty-catch,
|
||||
# bugprone-fold-init-type,
|
||||
# bugprone-forward-declaration-namespace,
|
||||
# bugprone-inaccurate-erase,
|
||||
# bugprone-inc-dec-in-conditions,
|
||||
# bugprone-incorrect-enable-if,
|
||||
# bugprone-incorrect-roundings,
|
||||
# bugprone-infinite-loop,
|
||||
# bugprone-integer-division,
|
||||
# bugprone-lambda-function-name,
|
||||
# bugprone-macro-parentheses,
|
||||
# bugprone-macro-repeated-side-effects,
|
||||
# bugprone-misplaced-operator-in-strlen-in-alloc,
|
||||
# bugprone-misplaced-pointer-arithmetic-in-alloc,
|
||||
# bugprone-misplaced-widening-cast,
|
||||
# bugprone-move-forwarding-reference,
|
||||
# bugprone-multi-level-implicit-pointer-conversion,
|
||||
# bugprone-multiple-new-in-one-expression,
|
||||
# bugprone-multiple-statement-macro,
|
||||
# bugprone-no-escape,
|
||||
# bugprone-non-zero-enum-to-bool-conversion,
|
||||
# bugprone-optional-value-conversion,
|
||||
# bugprone-parent-virtual-call,
|
||||
# bugprone-pointer-arithmetic-on-polymorphic-object,
|
||||
# bugprone-posix-return,
|
||||
# bugprone-redundant-branch-condition,
|
||||
# bugprone-reserved-identifier,
|
||||
# bugprone-return-const-ref-from-parameter,
|
||||
# bugprone-shared-ptr-array-mismatch,
|
||||
# bugprone-signal-handler,
|
||||
# bugprone-signed-char-misuse,
|
||||
# bugprone-sizeof-container,
|
||||
# bugprone-sizeof-expression,
|
||||
# bugprone-spuriously-wake-up-functions,
|
||||
# bugprone-standalone-empty,
|
||||
# bugprone-string-constructor,
|
||||
# bugprone-string-integer-assignment,
|
||||
# bugprone-string-literal-with-embedded-nul,
|
||||
# bugprone-stringview-nullptr,
|
||||
# bugprone-suspicious-enum-usage,
|
||||
# bugprone-suspicious-include,
|
||||
# bugprone-suspicious-memory-comparison,
|
||||
# bugprone-suspicious-memset-usage,
|
||||
# bugprone-suspicious-missing-comma,
|
||||
# bugprone-suspicious-realloc-usage,
|
||||
# bugprone-suspicious-semicolon,
|
||||
# bugprone-suspicious-string-compare,
|
||||
# bugprone-suspicious-stringview-data-usage,
|
||||
# bugprone-swapped-arguments,
|
||||
# bugprone-switch-missing-default-case,
|
||||
# bugprone-terminating-continue,
|
||||
# bugprone-throw-keyword-missing,
|
||||
# bugprone-too-small-loop-variable,
|
||||
# bugprone-undefined-memory-manipulation,
|
||||
# bugprone-undelegated-constructor,
|
||||
# bugprone-unhandled-exception-at-new,
|
||||
# bugprone-unhandled-self-assignment,
|
||||
# bugprone-unique-ptr-array-mismatch,
|
||||
# bugprone-unsafe-functions,
|
||||
# bugprone-unused-local-non-trivial-variable,
|
||||
# bugprone-unused-raii,
|
||||
# bugprone-unused-return-value,
|
||||
# bugprone-use-after-move,
|
||||
# bugprone-virtual-near-miss,
|
||||
# cppcoreguidelines-init-variables,
|
||||
# cppcoreguidelines-misleading-capture-default-by-value,
|
||||
# cppcoreguidelines-no-suspend-with-lock,
|
||||
# cppcoreguidelines-pro-type-member-init,
|
||||
# cppcoreguidelines-pro-type-static-cast-downcast,
|
||||
# cppcoreguidelines-rvalue-reference-param-not-moved,
|
||||
# cppcoreguidelines-use-default-member-init,
|
||||
# cppcoreguidelines-virtual-class-destructor,
|
||||
# hicpp-ignored-remove-result,
|
||||
# llvm-namespace-comment,
|
||||
# misc-const-correctness,
|
||||
# misc-definitions-in-headers,
|
||||
# misc-header-include-cycle,
|
||||
# misc-include-cleaner,
|
||||
# misc-misplaced-const,
|
||||
# misc-redundant-expression,
|
||||
# misc-static-assert,
|
||||
# misc-throw-by-value-catch-by-reference,
|
||||
# misc-unused-alias-decls,
|
||||
# misc-unused-using-decls,
|
||||
# modernize-concat-nested-namespaces,
|
||||
# modernize-deprecated-headers,
|
||||
# modernize-make-shared,
|
||||
# modernize-make-unique,
|
||||
# modernize-pass-by-value,
|
||||
# modernize-type-traits,
|
||||
# modernize-use-designated-initializers,
|
||||
# modernize-use-emplace,
|
||||
# modernize-use-equals-default,
|
||||
# modernize-use-equals-delete,
|
||||
# modernize-use-override,
|
||||
# modernize-use-ranges,
|
||||
# modernize-use-starts-ends-with,
|
||||
# modernize-use-std-numbers,
|
||||
# modernize-use-using,
|
||||
# performance-faster-string-find,
|
||||
# performance-for-range-copy,
|
||||
# performance-implicit-conversion-in-loop,
|
||||
# performance-inefficient-vector-operation,
|
||||
# performance-move-const-arg,
|
||||
# performance-move-constructor-init,
|
||||
# performance-no-automatic-move,
|
||||
# performance-trivially-destructible,
|
||||
# readability-avoid-nested-conditional-operator,
|
||||
# readability-avoid-return-with-void-value,
|
||||
# readability-braces-around-statements,
|
||||
# readability-const-return-type,
|
||||
# readability-container-contains,
|
||||
# readability-container-size-empty,
|
||||
# readability-convert-member-functions-to-static,
|
||||
# readability-duplicate-include,
|
||||
# readability-else-after-return,
|
||||
# readability-enum-initial-value,
|
||||
# readability-implicit-bool-conversion,
|
||||
# readability-inconsistent-declaration-parameter-name,
|
||||
# readability-identifier-naming,
|
||||
# readability-make-member-function-const,
|
||||
# readability-math-missing-parentheses,
|
||||
# readability-misleading-indentation,
|
||||
# readability-non-const-parameter,
|
||||
# readability-redundant-casting,
|
||||
# readability-redundant-declaration,
|
||||
# readability-redundant-inline-specifier,
|
||||
# readability-redundant-member-init,
|
||||
# readability-redundant-string-init,
|
||||
# readability-reference-to-constructed-temporary,
|
||||
# readability-simplify-boolean-expr,
|
||||
# readability-static-accessed-through-instance,
|
||||
# readability-static-definition-in-anonymous-namespace,
|
||||
# readability-suspicious-call-argument,
|
||||
# readability-use-std-min-max
|
||||
#
|
||||
# CheckOptions:
|
||||
# readability-braces-around-statements.ShortStatementLines: 2
|
||||
# readability-identifier-naming.MacroDefinitionCase: UPPER_CASE
|
||||
# readability-identifier-naming.ClassCase: CamelCase
|
||||
# readability-identifier-naming.StructCase: CamelCase
|
||||
# readability-identifier-naming.UnionCase: CamelCase
|
||||
# readability-identifier-naming.EnumCase: CamelCase
|
||||
# readability-identifier-naming.EnumConstantCase: CamelCase
|
||||
# readability-identifier-naming.ScopedEnumConstantCase: CamelCase
|
||||
# readability-identifier-naming.GlobalConstantCase: UPPER_CASE
|
||||
# readability-identifier-naming.GlobalConstantPrefix: "k"
|
||||
# readability-identifier-naming.GlobalVariableCase: CamelCase
|
||||
# readability-identifier-naming.GlobalVariablePrefix: "g"
|
||||
# readability-identifier-naming.ConstexprFunctionCase: camelBack
|
||||
# readability-identifier-naming.ConstexprMethodCase: camelBack
|
||||
# readability-identifier-naming.ClassMethodCase: camelBack
|
||||
# readability-identifier-naming.ClassMemberCase: camelBack
|
||||
# readability-identifier-naming.ClassConstantCase: UPPER_CASE
|
||||
# readability-identifier-naming.ClassConstantPrefix: "k"
|
||||
# readability-identifier-naming.StaticConstantCase: UPPER_CASE
|
||||
# readability-identifier-naming.StaticConstantPrefix: "k"
|
||||
# readability-identifier-naming.StaticVariableCase: UPPER_CASE
|
||||
# readability-identifier-naming.StaticVariablePrefix: "k"
|
||||
# readability-identifier-naming.ConstexprVariableCase: UPPER_CASE
|
||||
# readability-identifier-naming.ConstexprVariablePrefix: "k"
|
||||
# readability-identifier-naming.LocalConstantCase: camelBack
|
||||
# readability-identifier-naming.LocalVariableCase: camelBack
|
||||
# readability-identifier-naming.TemplateParameterCase: CamelCase
|
||||
# readability-identifier-naming.ParameterCase: camelBack
|
||||
# readability-identifier-naming.FunctionCase: camelBack
|
||||
# readability-identifier-naming.MemberCase: camelBack
|
||||
# readability-identifier-naming.PrivateMemberSuffix: _
|
||||
# readability-identifier-naming.ProtectedMemberSuffix: _
|
||||
# readability-identifier-naming.PublicMemberSuffix: ""
|
||||
# readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
|
||||
# bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
|
||||
# bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
|
||||
# misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h;__chrono/.*;fmt/chrono.h;boost/uuid/uuid_hash.hpp'
|
||||
#
|
||||
# HeaderFilterRegex: '^.*/(src|tests)/.*\.(h|hpp)$'
|
||||
WarningsAsErrors: "*"
|
||||
@@ -29,7 +29,7 @@ format:
|
||||
disable: false
|
||||
_help_line_width:
|
||||
- How wide to allow formatted cmake files
|
||||
line_width: 100
|
||||
line_width: 120
|
||||
_help_tab_size:
|
||||
- How many spaces to tab for indent
|
||||
tab_size: 4
|
||||
|
||||
@@ -5,7 +5,7 @@ Loop: test.jtx test.unit_test
|
||||
test.unit_test == test.jtx
|
||||
|
||||
Loop: xrpld.app xrpld.overlay
|
||||
xrpld.overlay ~= xrpld.app
|
||||
xrpld.overlay == xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.peerfinder
|
||||
xrpld.peerfinder == xrpld.app
|
||||
|
||||
15
.github/workflows/on-pr.yml
vendored
15
.github/workflows/on-pr.yml
vendored
@@ -65,12 +65,9 @@ jobs:
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/reusable-clang-tidy.yml
|
||||
.github/workflows/reusable-clang-tidy-files.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.github/workflows/reusable-upload-recipe.yml
|
||||
.clang-tidy
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
@@ -110,17 +107,6 @@ jobs:
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-check-rename.yml
|
||||
|
||||
clang-tidy:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-clang-tidy.yml
|
||||
permissions:
|
||||
issues: write
|
||||
contents: read
|
||||
with:
|
||||
check_only_changed: true
|
||||
create_issue_on_failure: false
|
||||
|
||||
build-test:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
@@ -170,7 +156,6 @@ jobs:
|
||||
needs:
|
||||
- check-levelization
|
||||
- check-rename
|
||||
- clang-tidy
|
||||
- build-test
|
||||
- upload-recipe
|
||||
- notify-clio
|
||||
|
||||
12
.github/workflows/on-trigger.yml
vendored
12
.github/workflows/on-trigger.yml
vendored
@@ -22,12 +22,9 @@ on:
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-clang-tidy.yml"
|
||||
- ".github/workflows/reusable-clang-tidy-files.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".github/workflows/reusable-upload-recipe.yml"
|
||||
- ".clang-tidy"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
@@ -63,15 +60,6 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
clang-tidy:
|
||||
uses: ./.github/workflows/reusable-clang-tidy.yml
|
||||
permissions:
|
||||
issues: write
|
||||
contents: read
|
||||
with:
|
||||
check_only_changed: false
|
||||
create_issue_on_failure: ${{ github.event_name == 'schedule' }}
|
||||
|
||||
build-test:
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
|
||||
162
.github/workflows/reusable-clang-tidy-files.yml
vendored
162
.github/workflows/reusable-clang-tidy-files.yml
vendored
@@ -1,162 +0,0 @@
|
||||
name: Run clang-tidy on files
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
files:
|
||||
description: "List of files to check (empty means check all files)"
|
||||
type: string
|
||||
default: ""
|
||||
create_issue_on_failure:
|
||||
description: "Whether to create an issue if the check failed"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# Conan installs the generators in the build/generators directory, see the
|
||||
# layout() method in conanfile.py. We then run CMake from the build directory.
|
||||
BUILD_DIR: build
|
||||
BUILD_TYPE: Release
|
||||
|
||||
jobs:
|
||||
run-clang-tidy:
|
||||
name: Run clang tidy
|
||||
runs-on: ["self-hosted", "Linux", "X64", "heavy"]
|
||||
container: "ghcr.io/xrplf/ci/debian-trixie:clang-21-sha-53033a2"
|
||||
permissions:
|
||||
issues: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/prepare-runner@2cbf481018d930656e9276fcc20dc0e3a0be5b6d
|
||||
with:
|
||||
enable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/get-nproc@cf0433aa74563aead044a1e395610c96d65a37cf
|
||||
id: nproc
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ env.BUILD_TYPE }}
|
||||
log_verbosity: verbose
|
||||
|
||||
- name: Configure CMake
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
run: |
|
||||
cmake \
|
||||
-G 'Ninja' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
-Dtests=ON \
|
||||
-Dwerr=ON \
|
||||
-Dxrpld=ON \
|
||||
..
|
||||
|
||||
# clang-tidy needs headers generated from proto files
|
||||
- name: Build libxrpl.libpb
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
run: |
|
||||
ninja -j ${{ steps.nproc.outputs.nproc }} xrpl.libpb
|
||||
|
||||
- name: Run clang tidy
|
||||
id: run_clang_tidy
|
||||
continue-on-error: true
|
||||
env:
|
||||
FILES: ${{ inputs.files }}
|
||||
run: |
|
||||
run-clang-tidy -j ${{ steps.nproc.outputs.nproc }} -p "$BUILD_DIR" $FILES 2>&1 | tee clang-tidy-output.txt
|
||||
|
||||
- name: Upload clang-tidy output
|
||||
if: steps.run_clang_tidy.outcome != 'success'
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: clang-tidy-results
|
||||
path: clang-tidy-output.txt
|
||||
retention-days: 30
|
||||
|
||||
- name: Create an issue
|
||||
if: steps.run_clang_tidy.outcome != 'success' && inputs.create_issue_on_failure
|
||||
id: create_issue
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Prepare issue body with clang-tidy output
|
||||
cat > issue.md <<EOF
|
||||
## Clang-tidy Check Failed
|
||||
|
||||
**Workflow:** ${{ github.workflow }}
|
||||
**Run ID:** ${{ github.run_id }}
|
||||
**Commit:** ${{ github.sha }}
|
||||
**Branch/Ref:** ${{ github.ref }}
|
||||
**Triggered by:** ${{ github.actor }}
|
||||
|
||||
### Clang-tidy Output:
|
||||
\`\`\`
|
||||
EOF
|
||||
|
||||
# Append clang-tidy output (filter for errors and warnings)
|
||||
if [ -f clang-tidy-output.txt ]; then
|
||||
# Extract lines containing 'error:', 'warning:', or 'note:'
|
||||
grep -E '(error:|warning:|note:)' clang-tidy-output.txt > filtered-output.txt || true
|
||||
|
||||
# If filtered output is empty, use original (might be a different error format)
|
||||
if [ ! -s filtered-output.txt ]; then
|
||||
cp clang-tidy-output.txt filtered-output.txt
|
||||
fi
|
||||
|
||||
# Truncate if too large
|
||||
head -c 60000 filtered-output.txt >> issue.md
|
||||
if [ "$(wc -c < filtered-output.txt)" -gt 60000 ]; then
|
||||
echo "" >> issue.md
|
||||
echo "... (output truncated, see artifacts for full output)" >> issue.md
|
||||
fi
|
||||
|
||||
rm filtered-output.txt
|
||||
else
|
||||
echo "No output file found" >> issue.md
|
||||
fi
|
||||
|
||||
cat >> issue.md <<EOF
|
||||
\`\`\`
|
||||
|
||||
**Workflow run:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
---
|
||||
*This issue was automatically created by the clang-tidy workflow.*
|
||||
EOF
|
||||
|
||||
# Create the issue
|
||||
gh issue create \
|
||||
--label "Bug,Clang-tidy" \
|
||||
--title "Clang-tidy check failed" \
|
||||
--body-file ./issue.md \
|
||||
> create_issue.log
|
||||
|
||||
created_issue="$(sed 's|.*/||' create_issue.log)"
|
||||
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
|
||||
echo "Created issue #$created_issue"
|
||||
|
||||
rm -f create_issue.log issue.md clang-tidy-output.txt
|
||||
|
||||
- name: Fail the workflow if clang-tidy failed
|
||||
if: steps.run_clang_tidy.outcome != 'success'
|
||||
run: |
|
||||
echo "Clang-tidy check failed!"
|
||||
exit 1
|
||||
47
.github/workflows/reusable-clang-tidy.yml
vendored
47
.github/workflows/reusable-clang-tidy.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: Clang-tidy check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
check_only_changed:
|
||||
description: "Check only changed files in PR. If false, checks all files in the repository."
|
||||
type: boolean
|
||||
default: false
|
||||
create_issue_on_failure:
|
||||
description: "Whether to create an issue if the check failed"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
determine-files:
|
||||
name: Determine files to check
|
||||
if: ${{ inputs.check_only_changed }}
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
any_changed: ${{ steps.changed_files.outputs.any_changed }}
|
||||
all_changed_files: ${{ steps.changed_files.outputs.all_changed_files }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Get changed C++ files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
**/*.cpp
|
||||
**/*.h
|
||||
**/*.ipp
|
||||
separator: " "
|
||||
|
||||
run-clang-tidy:
|
||||
needs: [determine-files]
|
||||
if: ${{ always() && !cancelled() && (!inputs.check_only_changed || needs.determine-files.outputs.any_changed == 'true') }}
|
||||
uses: ./.github/workflows/reusable-clang-tidy-files.yml
|
||||
with:
|
||||
files: ${{ inputs.check_only_changed && needs.determine-files.outputs.all_changed_files || '' }}
|
||||
create_issue_on_failure: ${{ inputs.create_issue_on_failure }}
|
||||
@@ -17,7 +17,6 @@ project(xrpl)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||
|
||||
include(CompilationEnv)
|
||||
|
||||
@@ -39,16 +38,16 @@ include(Ccache)
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if (Git_FOUND)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse
|
||||
HEAD OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
if (gch)
|
||||
set(GIT_COMMIT_HASH "${gch}")
|
||||
message(STATUS gch: ${GIT_COMMIT_HASH})
|
||||
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
|
||||
endif ()
|
||||
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse
|
||||
--abbrev-ref HEAD OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
|
||||
if (gb)
|
||||
set(GIT_BRANCH "${gb}")
|
||||
message(STATUS gb: ${GIT_BRANCH})
|
||||
@@ -68,8 +67,7 @@ include(FetchContent)
|
||||
include(ExternalProject)
|
||||
include(CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
|
||||
if (target)
|
||||
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build"
|
||||
)
|
||||
message(FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
endif ()
|
||||
|
||||
include(XrplSanity)
|
||||
@@ -78,8 +76,7 @@ include(XrplSettings)
|
||||
# this check has to remain in the top-level cmake because of the early return statement
|
||||
if (packages_only)
|
||||
if (NOT TARGET rpm)
|
||||
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?"
|
||||
)
|
||||
message(FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
|
||||
endif ()
|
||||
return()
|
||||
endif ()
|
||||
@@ -121,8 +118,7 @@ target_link_libraries(
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if (rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS
|
||||
XRPL_ROCKSDB_AVAILABLE=1)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1)
|
||||
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
|
||||
endif ()
|
||||
|
||||
|
||||
@@ -43,8 +43,7 @@ set(CMAKE_VS_GLOBALS "CLToolExe=cl.exe" "CLToolPath=${CMAKE_BINARY_DIR}" "TrackF
|
||||
# By default Visual Studio generators will use /Zi to capture debug information, which is not compatible with ccache, so
|
||||
# tell it to use /Z7 instead.
|
||||
if (MSVC)
|
||||
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG
|
||||
CMAKE_CXX_FLAGS_RELEASE)
|
||||
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
string(REPLACE "/Zi" "/Z7" ${var_} "${${var_}}")
|
||||
endforeach ()
|
||||
endif ()
|
||||
|
||||
@@ -180,8 +180,7 @@ elseif (DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
|
||||
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if (APPLE)
|
||||
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program(LLVMCOV_PATH llvm-cov)
|
||||
endif ()
|
||||
@@ -200,8 +199,8 @@ foreach (LANG ${LANGUAGES})
|
||||
if ("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
|
||||
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
|
||||
endif ()
|
||||
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}"
|
||||
MATCHES "(LLVM)?[Ff]lang")
|
||||
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES
|
||||
"(LLVM)?[Ff]lang")
|
||||
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
|
||||
endif ()
|
||||
endforeach ()
|
||||
@@ -322,16 +321,14 @@ function (setup_target_for_coverage_gcovr)
|
||||
endif ()
|
||||
|
||||
if ("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting..."
|
||||
)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else ()
|
||||
if ((Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested"))
|
||||
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
|
||||
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
|
||||
elseif (Coverage_FORMAT STREQUAL "html-single")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
|
||||
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL
|
||||
"json-details")
|
||||
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL "json-details")
|
||||
OR (Coverage_FORMAT STREQUAL "coveralls"))
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
|
||||
elseif (Coverage_FORMAT STREQUAL "txt")
|
||||
@@ -455,10 +452,8 @@ function (setup_target_for_coverage_gcovr)
|
||||
COMMENT "Running gcovr to produce code coverage report.")
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(
|
||||
TARGET ${Coverage_NAME} POST_BUILD COMMAND echo
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD COMMAND echo
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}")
|
||||
endfunction () # setup_target_for_coverage_gcovr
|
||||
|
||||
function (add_code_coverage_to_target name scope)
|
||||
@@ -468,9 +463,8 @@ function (add_code_coverage_to_target name scope)
|
||||
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
|
||||
|
||||
# Add compiler options to the target
|
||||
target_compile_options(
|
||||
${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||
target_compile_options(${name} ${scope} $<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||
|
||||
target_link_libraries(${name} ${scope} $<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS}>
|
||||
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS}>)
|
||||
|
||||
@@ -17,8 +17,7 @@ link_libraries(Xrpl::common)
|
||||
if (NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
endif ()
|
||||
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE
|
||||
${CMAKE_POSITION_INDEPENDENT_CODE})
|
||||
set_target_properties(common PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ${CMAKE_POSITION_INDEPENDENT_CODE})
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions(
|
||||
common
|
||||
@@ -38,8 +37,7 @@ if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
string(REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
|
||||
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG
|
||||
CMAKE_CXX_FLAGS_RELEASE)
|
||||
foreach (var_ CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
|
||||
# also remove dynamic runtime
|
||||
string(REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
|
||||
@@ -145,23 +143,20 @@ if (voidstar)
|
||||
elseif (NOT is_linux)
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
|
||||
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting..."
|
||||
)
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (use_mold)
|
||||
# use mold linker if available
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET
|
||||
OUTPUT_VARIABLE LD_VERSION)
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries(common INTERFACE -fuse-ld=mold)
|
||||
endif ()
|
||||
unset(LD_VERSION)
|
||||
elseif (use_gold AND is_gcc)
|
||||
# use gold linker if available
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET
|
||||
OUTPUT_VARIABLE LD_VERSION)
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
#[=========================================================[
|
||||
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
|
||||
default instead of DT_RPATH, so you might have slightly
|
||||
@@ -191,8 +186,7 @@ elseif (use_gold AND is_gcc)
|
||||
unset(LD_VERSION)
|
||||
elseif (use_lld)
|
||||
# use lld linker if available
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET
|
||||
OUTPUT_VARIABLE LD_VERSION)
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "LLD")
|
||||
target_link_libraries(common INTERFACE -fuse-ld=lld)
|
||||
endif ()
|
||||
|
||||
@@ -14,8 +14,7 @@ target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/x
|
||||
PROTOS include/xrpl/proto/xrpl.proto)
|
||||
|
||||
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}")
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto PROTOS "${protos}")
|
||||
target_protobuf_sources(
|
||||
xrpl.libpb xrpl/proto
|
||||
LANGUAGE grpc
|
||||
@@ -25,9 +24,8 @@ target_protobuf_sources(
|
||||
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc)
|
||||
|
||||
target_compile_options(
|
||||
xrpl.libpb
|
||||
PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>:
|
||||
--system-header-prefix="google/protobuf" -Wno-deprecated-dynamic-exception-spec >
|
||||
xrpl.libpb PUBLIC $<$<BOOL:${is_msvc}>:-wd4996> $<$<BOOL:${is_xcode}>: --system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec >
|
||||
PRIVATE $<$<BOOL:${is_msvc}>:-wd4065> $<$<NOT:$<BOOL:${is_msvc}>>:-Wno-deprecated-declarations>)
|
||||
|
||||
target_link_libraries(xrpl.libpb PUBLIC protobuf::libprotobuf gRPC::grpc++)
|
||||
@@ -75,8 +73,7 @@ target_link_libraries(xrpl.libxrpl.protocol PUBLIC xrpl.libxrpl.crypto xrpl.libx
|
||||
|
||||
# Level 05
|
||||
add_module(xrpl core)
|
||||
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol)
|
||||
target_link_libraries(xrpl.libxrpl.core PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
|
||||
|
||||
# Level 06
|
||||
add_module(xrpl resource)
|
||||
@@ -84,23 +81,22 @@ target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
# Level 07
|
||||
add_module(xrpl net)
|
||||
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol xrpl.libxrpl.resource)
|
||||
target_link_libraries(xrpl.libxrpl.net PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource)
|
||||
|
||||
add_module(xrpl nodestore)
|
||||
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol)
|
||||
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.json xrpl.libxrpl.protocol)
|
||||
|
||||
add_module(xrpl shamap)
|
||||
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.protocol xrpl.libxrpl.nodestore)
|
||||
target_link_libraries(xrpl.libxrpl.shamap PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.crypto xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.nodestore)
|
||||
|
||||
add_module(xrpl rdb)
|
||||
target_link_libraries(xrpl.libxrpl.rdb PUBLIC xrpl.libxrpl.basics xrpl.libxrpl.core)
|
||||
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol xrpl.libxrpl.core
|
||||
xrpl.libxrpl.rdb xrpl.libxrpl.resource)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol xrpl.libxrpl.core xrpl.libxrpl.rdb
|
||||
xrpl.libxrpl.resource)
|
||||
|
||||
add_module(xrpl conditions)
|
||||
target_link_libraries(xrpl.libxrpl.conditions PUBLIC xrpl.libxrpl.server)
|
||||
|
||||
@@ -65,8 +65,8 @@ add_custom_command(
|
||||
OUTPUT "${doxygen_index_file}"
|
||||
COMMAND "${CMAKE_COMMAND}" -E env "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
|
||||
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" "DOXYGEN_TAGFILES=${doxygen_tagfiles}"
|
||||
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}"
|
||||
"DOXYGEN_DOT_PATH=${doxygen_dot_path}" "${DOXYGEN_EXECUTABLE}" "${doxyfile}"
|
||||
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" "DOXYGEN_DOT_PATH=${doxygen_dot_path}"
|
||||
"${DOXYGEN_EXECUTABLE}" "${doxyfile}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
DEPENDS "${dependencies}" "${tagfile}")
|
||||
add_custom_target(docs DEPENDS "${doxygen_index_file}" SOURCES "${dependencies}")
|
||||
|
||||
@@ -41,13 +41,11 @@ install(TARGETS common
|
||||
INCLUDES
|
||||
DESTINATION include)
|
||||
|
||||
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
|
||||
install(EXPORT XrplExports FILE XrplTargets.cmake NAMESPACE Xrpl:: DESTINATION lib/cmake/xrpl)
|
||||
include(CMakePackageConfigHelpers)
|
||||
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version}
|
||||
COMPATIBILITY SameMajorVersion)
|
||||
write_basic_package_version_file(XrplConfigVersion.cmake VERSION ${xrpld_version} COMPATIBILITY SameMajorVersion)
|
||||
|
||||
if (is_root_project AND TARGET xrpld)
|
||||
install(TARGETS xrpld RUNTIME DESTINATION bin)
|
||||
@@ -74,5 +72,5 @@ if (is_root_project AND TARGET xrpld)
|
||||
")
|
||||
endif ()
|
||||
|
||||
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake DESTINATION lib/cmake/xrpl)
|
||||
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake ${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
|
||||
DESTINATION lib/cmake/xrpl)
|
||||
|
||||
@@ -33,13 +33,10 @@ target_compile_definitions(
|
||||
target_compile_options(
|
||||
opts
|
||||
INTERFACE $<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized> $<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
target_link_libraries(opts INTERFACE $<$<BOOL:${profile}>:-pg> $<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
if (jemalloc)
|
||||
find_package(jemalloc REQUIRED)
|
||||
|
||||
@@ -19,8 +19,7 @@ if (NOT is_multiconfig)
|
||||
endif ()
|
||||
|
||||
if (is_clang) # both Clang and AppleClang
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS
|
||||
16.0)
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
|
||||
message(FATAL_ERROR "This project requires clang 16 or later")
|
||||
endif ()
|
||||
elseif (is_gcc)
|
||||
@@ -33,8 +32,7 @@ endif ()
|
||||
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
message(FATAL_ERROR "Builds (in-source) are not allowed in "
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
|
||||
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory."
|
||||
)
|
||||
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
|
||||
endif ()
|
||||
|
||||
if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
|
||||
|
||||
@@ -70,8 +70,7 @@ if (is_linux AND NOT SANITIZER)
|
||||
else ()
|
||||
set(TRUNCATED_LOGS_DEFAULT OFF)
|
||||
endif ()
|
||||
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux."
|
||||
${TRUNCATED_LOGS_DEFAULT})
|
||||
option(TRUNCATED_THREAD_NAME_LOGS "Show warnings about truncated thread names on Linux." ${TRUNCATED_LOGS_DEFAULT})
|
||||
if (TRUNCATED_THREAD_NAME_LOGS)
|
||||
add_compile_definitions(TRUNCATED_THREAD_NAME_LOGS)
|
||||
endif ()
|
||||
@@ -93,13 +92,11 @@ endif ()
|
||||
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
"Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option(local_protobuf "Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option(local_grpc "Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table" OFF)
|
||||
option(beast_no_unit_test_inline "Prevents unit test definitions from being inserted into global table" OFF)
|
||||
option(single_io_service_thread "Restricts the number of threads calling io_context::run to one. \
|
||||
This can be useful when debugging." OFF)
|
||||
option(boost_show_deprecated "Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
option(validator_keys
|
||||
"Enables building of validator-keys tool as a separate target (imported via FetchContent)"
|
||||
OFF)
|
||||
option(validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch(current_branch)
|
||||
@@ -10,9 +8,8 @@ if (validator_keys)
|
||||
endif ()
|
||||
message(STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare(
|
||||
validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}")
|
||||
FetchContent_Declare(validator_keys GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}")
|
||||
FetchContent_MakeAvailable(validator_keys)
|
||||
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
|
||||
@@ -15,12 +15,11 @@ include(isolate_headers)
|
||||
function (add_module parent name)
|
||||
set(target ${PROJECT_NAME}.lib${parent}.${name})
|
||||
add_library(${target} OBJECT)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp")
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp")
|
||||
target_sources(${target} PRIVATE ${sources})
|
||||
target_include_directories(${target} PUBLIC "$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>")
|
||||
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/include"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}" PUBLIC)
|
||||
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}" PRIVATE)
|
||||
isolate_headers(${target} "${CMAKE_CURRENT_SOURCE_DIR}/src" "${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
|
||||
PRIVATE)
|
||||
endfunction ()
|
||||
|
||||
@@ -39,7 +39,6 @@ if (SANITIZERS_ENABLED AND is_clang)
|
||||
endif ()
|
||||
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
|
||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
|
||||
target_compile_options(
|
||||
opts INTERFACE # ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
target_compile_options(opts INTERFACE # ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
endif ()
|
||||
|
||||
@@ -6,7 +6,6 @@ ignorePaths:
|
||||
- docs/**/*.puml
|
||||
- cmake/**
|
||||
- LICENSE.md
|
||||
- .clang-tidy
|
||||
language: en
|
||||
allowCompoundWords: true # TODO (#6334)
|
||||
ignoreRandomStrings: true
|
||||
|
||||
@@ -244,7 +244,15 @@ message TMGetObjectByHash {
|
||||
|
||||
message TMLedgerNode {
|
||||
required bytes nodedata = 1;
|
||||
|
||||
// Used when fixLedgerNodeID is disabled.
|
||||
optional bytes nodeid = 2; // missing for ledger base data
|
||||
|
||||
// Used when fixLedgerNodeID is enabled. Neither value is set for ledger base data.
|
||||
oneof reference {
|
||||
bytes id = 3; // Set for inner nodes.
|
||||
uint32 depth = 4; // Set for leaf nodes.
|
||||
}
|
||||
}
|
||||
|
||||
enum TMLedgerInfoType {
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
|
||||
// Add new amendments to the top of this list.
|
||||
// Keep it sorted in reverse chronological order.
|
||||
XRPL_FIX (LedgerNodeID, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (PermissionedDomainInvariant, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (ExpiredNFTokenOfferRemoval, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (BatchInnerSigs, Supported::yes, VoteBehavior::DefaultNo)
|
||||
|
||||
@@ -252,7 +252,7 @@ public:
|
||||
bool
|
||||
getNodeFat(
|
||||
SHAMapNodeID const& wanted,
|
||||
std::vector<std::pair<SHAMapNodeID, Blob>>& data,
|
||||
std::vector<std::tuple<SHAMapNodeID, Blob, bool>>& data,
|
||||
bool fatLeaves,
|
||||
std::uint32_t depth) const;
|
||||
|
||||
@@ -280,9 +280,12 @@ public:
|
||||
serializeRoot(Serializer& s) const;
|
||||
|
||||
SHAMapAddNode
|
||||
addRootNode(SHAMapHash const& hash, Slice const& rootNode, SHAMapSyncFilter* filter);
|
||||
addRootNode(SHAMapHash const& hash, intr_ptr::SharedPtr<SHAMapTreeNode> rootNode, SHAMapSyncFilter const* filter);
|
||||
SHAMapAddNode
|
||||
addKnownNode(SHAMapNodeID const& nodeID, Slice const& rawNode, SHAMapSyncFilter* filter);
|
||||
addKnownNode(
|
||||
SHAMapNodeID const& nodeID,
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode> treeNode,
|
||||
SHAMapSyncFilter const* filter);
|
||||
|
||||
// status functions
|
||||
void
|
||||
@@ -333,7 +336,7 @@ private:
|
||||
cacheLookup(SHAMapHash const& hash) const;
|
||||
|
||||
void
|
||||
canonicalize(SHAMapHash const& hash, intr_ptr::SharedPtr<SHAMapTreeNode>&) const;
|
||||
canonicalize(SHAMapHash const& hash, intr_ptr::SharedPtr<SHAMapTreeNode>& node) const;
|
||||
|
||||
// database operations
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode>
|
||||
@@ -341,11 +344,11 @@ private:
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode>
|
||||
fetchNodeNT(SHAMapHash const& hash) const;
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode>
|
||||
fetchNodeNT(SHAMapHash const& hash, SHAMapSyncFilter* filter) const;
|
||||
fetchNodeNT(SHAMapHash const& hash, SHAMapSyncFilter const* filter) const;
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode>
|
||||
fetchNode(SHAMapHash const& hash) const;
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode>
|
||||
checkFilter(SHAMapHash const& hash, SHAMapSyncFilter* filter) const;
|
||||
checkFilter(SHAMapHash const& hash, SHAMapSyncFilter const* filter) const;
|
||||
|
||||
/** Update hashes up to the root */
|
||||
void
|
||||
@@ -405,10 +408,11 @@ private:
|
||||
// If pending, callback is called as if it called fetchNodeNT
|
||||
using descendCallback = std::function<void(intr_ptr::SharedPtr<SHAMapTreeNode>, SHAMapHash const&)>;
|
||||
SHAMapTreeNode*
|
||||
descendAsync(SHAMapInnerNode* parent, int branch, SHAMapSyncFilter* filter, bool& pending, descendCallback&&) const;
|
||||
descendAsync(SHAMapInnerNode* parent, int branch, SHAMapSyncFilter const* filter, bool& pending, descendCallback&&)
|
||||
const;
|
||||
|
||||
std::pair<SHAMapTreeNode*, SHAMapNodeID>
|
||||
descend(SHAMapInnerNode* parent, SHAMapNodeID const& parentID, int branch, SHAMapSyncFilter* filter) const;
|
||||
descend(SHAMapInnerNode* parent, SHAMapNodeID const& parentID, int branch, SHAMapSyncFilter const* filter) const;
|
||||
|
||||
// Non-storing
|
||||
// Does not hook the returned node to its parent
|
||||
|
||||
@@ -1289,14 +1289,14 @@ addEmptyHolding(
|
||||
dstId,
|
||||
index.key,
|
||||
sleDst,
|
||||
/*bAuth=*/false,
|
||||
/*bNoRipple=*/true,
|
||||
/*bFreeze=*/false,
|
||||
/*auth=*/false,
|
||||
/*noRipple=*/true,
|
||||
/*freeze=*/false,
|
||||
/*deepFreeze*/ false,
|
||||
/*saBalance=*/STAmount{Issue{currency, noAccount()}},
|
||||
/*saLimit=*/STAmount{Issue{currency, dstId}},
|
||||
/*uSrcQualityIn=*/0,
|
||||
/*uSrcQualityOut=*/0,
|
||||
/*balance=*/STAmount{Issue{currency, noAccount()}},
|
||||
/*limit=*/STAmount{Issue{currency, dstId}},
|
||||
/*qualityIn=*/0,
|
||||
/*qualityOut=*/0,
|
||||
journal);
|
||||
}
|
||||
|
||||
@@ -1893,93 +1893,6 @@ rippleSendIOU(
|
||||
return terResult;
|
||||
}
|
||||
|
||||
template <class TAsset>
|
||||
static TER
|
||||
doSendMulti(
|
||||
std::string const& name,
|
||||
ApplyView& view,
|
||||
AccountID const& senderID,
|
||||
TAsset const& issue,
|
||||
MultiplePaymentDestinations const& receivers,
|
||||
STAmount& actual,
|
||||
beast::Journal j,
|
||||
WaiveTransferFee waiveFee,
|
||||
// Don't pass back parameters that the caller already has
|
||||
std::function<TER(AccountID const& senderID, AccountID const& receiverID, STAmount const& amount, bool checkIssuer)>
|
||||
doCredit,
|
||||
std::function<TER(AccountID const& issuer, STAmount const& takeFromSender, STAmount const& amount)> preMint = {})
|
||||
{
|
||||
// Use the same pattern for all the SendMulti functions to help avoid
|
||||
// divergence and copy/paste errors.
|
||||
auto const& issuer = issue.getIssuer();
|
||||
|
||||
// These values may not stay in sync
|
||||
STAmount takeFromSender{issue};
|
||||
actual = takeFromSender;
|
||||
|
||||
// Failures return immediately.
|
||||
for (auto const& r : receivers)
|
||||
{
|
||||
auto const& receiverID = r.first;
|
||||
STAmount amount{issue, r.second};
|
||||
|
||||
if (amount < beast::zero)
|
||||
{
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
/* If we aren't sending anything or if the sender is the same as the
|
||||
* receiver then we don't need to do anything.
|
||||
*/
|
||||
if (!amount || (senderID == receiverID))
|
||||
continue;
|
||||
|
||||
using namespace std::string_literals;
|
||||
XRPL_ASSERT(!isXRP(receiverID), ("xrpl::"s + name + " : receiver is not XRP").c_str());
|
||||
|
||||
if (senderID == issuer || receiverID == issuer || issuer == noAccount())
|
||||
{
|
||||
if (preMint)
|
||||
{
|
||||
if (auto const ter = preMint(issuer, takeFromSender, amount))
|
||||
return ter;
|
||||
}
|
||||
// Direct send: redeeming IOUs and/or sending own IOUs.
|
||||
if (auto const ter = doCredit(senderID, receiverID, amount, false))
|
||||
return ter;
|
||||
actual += amount;
|
||||
// Do not add amount to takeFromSender, because doCredit took
|
||||
// it.
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Sending 3rd party: transit.
|
||||
|
||||
// Calculate the amount to transfer accounting
|
||||
// for any transfer fees if the fee is not waived:
|
||||
STAmount actualSend = (waiveFee == WaiveTransferFee::Yes || issue.native())
|
||||
? amount
|
||||
: multiply(amount, transferRate(view, amount));
|
||||
actual += actualSend;
|
||||
takeFromSender += actualSend;
|
||||
|
||||
JLOG(j.debug()) << name << "> " << to_string(senderID) << " - > " << to_string(receiverID)
|
||||
<< " : deliver=" << amount.getFullText() << " cost=" << actualSend.getFullText();
|
||||
|
||||
if (TER const terResult = doCredit(issuer, receiverID, amount, true))
|
||||
return terResult;
|
||||
}
|
||||
|
||||
if (senderID != issuer && takeFromSender)
|
||||
{
|
||||
if (TER const terResult = doCredit(senderID, issuer, takeFromSender, true))
|
||||
return terResult;
|
||||
}
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
// Send regardless of limits.
|
||||
// --> receivers: Amount/currency/issuer to deliver to receivers.
|
||||
// <-- saActual: Amount actually cost to sender. Sender pays fees.
|
||||
@@ -1993,14 +1906,63 @@ rippleSendMultiIOU(
|
||||
beast::Journal j,
|
||||
WaiveTransferFee waiveFee)
|
||||
{
|
||||
auto const& issuer = issue.getIssuer();
|
||||
|
||||
XRPL_ASSERT(!isXRP(senderID), "xrpl::rippleSendMultiIOU : sender is not XRP");
|
||||
|
||||
auto doCredit =
|
||||
[&view, j](AccountID const& senderID, AccountID const& receiverID, STAmount const& amount, bool checkIssuer) {
|
||||
return rippleCreditIOU(view, senderID, receiverID, amount, checkIssuer, j);
|
||||
};
|
||||
// These may diverge
|
||||
STAmount takeFromSender{issue};
|
||||
actual = takeFromSender;
|
||||
|
||||
return doSendMulti("rippleSendMultiIOU", view, senderID, issue, receivers, actual, j, waiveFee, doCredit);
|
||||
// Failures return immediately.
|
||||
for (auto const& r : receivers)
|
||||
{
|
||||
auto const& receiverID = r.first;
|
||||
STAmount amount{issue, r.second};
|
||||
|
||||
/* If we aren't sending anything or if the sender is the same as the
|
||||
* receiver then we don't need to do anything.
|
||||
*/
|
||||
if (!amount || (senderID == receiverID))
|
||||
continue;
|
||||
|
||||
XRPL_ASSERT(!isXRP(receiverID), "xrpl::rippleSendMultiIOU : receiver is not XRP");
|
||||
|
||||
if (senderID == issuer || receiverID == issuer || issuer == noAccount())
|
||||
{
|
||||
// Direct send: redeeming IOUs and/or sending own IOUs.
|
||||
if (auto const ter = rippleCreditIOU(view, senderID, receiverID, amount, false, j))
|
||||
return ter;
|
||||
actual += amount;
|
||||
// Do not add amount to takeFromSender, because rippleCreditIOU took
|
||||
// it.
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Sending 3rd party IOUs: transit.
|
||||
|
||||
// Calculate the amount to transfer accounting
|
||||
// for any transfer fees if the fee is not waived:
|
||||
STAmount actualSend =
|
||||
(waiveFee == WaiveTransferFee::Yes) ? amount : multiply(amount, transferRate(view, issuer));
|
||||
actual += actualSend;
|
||||
takeFromSender += actualSend;
|
||||
|
||||
JLOG(j.debug()) << "rippleSendMultiIOU> " << to_string(senderID) << " - > " << to_string(receiverID)
|
||||
<< " : deliver=" << amount.getFullText() << " cost=" << actual.getFullText();
|
||||
|
||||
if (TER const terResult = rippleCreditIOU(view, issuer, receiverID, amount, true, j))
|
||||
return terResult;
|
||||
}
|
||||
|
||||
if (senderID != issuer && takeFromSender)
|
||||
{
|
||||
if (TER const terResult = rippleCreditIOU(view, senderID, issuer, takeFromSender, true, j))
|
||||
return terResult;
|
||||
}
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
static TER
|
||||
@@ -2129,9 +2091,9 @@ accountSendMultiIOU(
|
||||
{
|
||||
XRPL_ASSERT_PARTS(receivers.size() > 1, "xrpl::accountSendMultiIOU", "multiple recipients provided");
|
||||
|
||||
STAmount actual;
|
||||
if (!issue.native())
|
||||
{
|
||||
STAmount actual;
|
||||
JLOG(j.trace()) << "accountSendMultiIOU: " << to_string(senderID) << " sending " << receivers.size() << " IOUs";
|
||||
|
||||
return rippleSendMultiIOU(view, senderID, issue, receivers, actual, j, waiveFee);
|
||||
@@ -2156,81 +2118,6 @@ accountSendMultiIOU(
|
||||
<< " receivers.";
|
||||
}
|
||||
|
||||
auto doCredit = [&view, &sender, &receivers, j](
|
||||
AccountID const& senderID,
|
||||
AccountID const& receiverID,
|
||||
STAmount const& amount,
|
||||
bool /*checkIssuer*/) -> TER {
|
||||
if (!senderID)
|
||||
{
|
||||
SLE::pointer receiver = receiverID != beast::zero ? view.peek(keylet::account(receiverID)) : SLE::pointer();
|
||||
|
||||
if (auto stream = j.trace())
|
||||
{
|
||||
std::string receiver_bal("-");
|
||||
|
||||
if (receiver)
|
||||
receiver_bal = receiver->getFieldAmount(sfBalance).getFullText();
|
||||
|
||||
stream << "accountSendMultiIOU> " << to_string(senderID) << " -> " << to_string(receiverID) << " ("
|
||||
<< receiver_bal << ") : " << amount.getFullText();
|
||||
}
|
||||
|
||||
if (receiver)
|
||||
{
|
||||
// Increment XRP balance.
|
||||
auto const rcvBal = receiver->getFieldAmount(sfBalance);
|
||||
receiver->setFieldAmount(sfBalance, rcvBal + amount);
|
||||
view.creditHook(xrpAccount(), receiverID, amount, -rcvBal);
|
||||
|
||||
view.update(receiver);
|
||||
}
|
||||
|
||||
if (auto stream = j.trace())
|
||||
{
|
||||
std::string receiver_bal("-");
|
||||
|
||||
if (receiver)
|
||||
receiver_bal = receiver->getFieldAmount(sfBalance).getFullText();
|
||||
|
||||
stream << "accountSendMultiIOU< " << to_string(senderID) << " -> " << to_string(receiverID) << " ("
|
||||
<< receiver_bal << ") : " << amount.getFullText();
|
||||
}
|
||||
return tesSUCCESS;
|
||||
}
|
||||
// Sender
|
||||
if (sender)
|
||||
{
|
||||
if (sender->getFieldAmount(sfBalance) < amount)
|
||||
{
|
||||
return TER{tecFAILED_PROCESSING};
|
||||
}
|
||||
else
|
||||
{
|
||||
auto const sndBal = sender->getFieldAmount(sfBalance);
|
||||
view.creditHook(senderID, xrpAccount(), amount, sndBal);
|
||||
|
||||
// Decrement XRP balance.
|
||||
sender->setFieldAmount(sfBalance, sndBal - amount);
|
||||
view.update(sender);
|
||||
}
|
||||
}
|
||||
|
||||
if (auto stream = j.trace())
|
||||
{
|
||||
std::string sender_bal("-");
|
||||
|
||||
if (sender)
|
||||
sender_bal = sender->getFieldAmount(sfBalance).getFullText();
|
||||
|
||||
stream << "accountSendMultiIOU< " << to_string(senderID) << " (" << sender_bal << ") -> "
|
||||
<< receivers.size() << " receivers.";
|
||||
}
|
||||
|
||||
return tesSUCCESS;
|
||||
};
|
||||
return doSendMulti("accountSendMultiIOU", view, senderID, issue, receivers, actual, j, waiveFee, doCredit);
|
||||
|
||||
// Failures return immediately.
|
||||
STAmount takeFromSender{issue};
|
||||
for (auto const& r : receivers)
|
||||
@@ -2443,33 +2330,80 @@ rippleSendMultiMPT(
|
||||
beast::Journal j,
|
||||
WaiveTransferFee waiveFee)
|
||||
{
|
||||
// Safe to get MPT since rippleSendMultiMPT is only called by
|
||||
// accountSendMultiMPT
|
||||
auto const& issuer = mptIssue.getIssuer();
|
||||
|
||||
auto const sle = view.read(keylet::mptIssuance(mptIssue.getMptID()));
|
||||
if (!sle)
|
||||
return tecOBJECT_NOT_FOUND;
|
||||
|
||||
auto preMint = [&](AccountID const& issuer, STAmount const& takeFromSender, STAmount const& amount) -> TER {
|
||||
// if sender is issuer, check that the new OutstandingAmount will
|
||||
// not exceed MaximumAmount
|
||||
if (senderID == issuer)
|
||||
// These may diverge
|
||||
STAmount takeFromSender{mptIssue};
|
||||
actual = takeFromSender;
|
||||
|
||||
for (auto const& r : receivers)
|
||||
{
|
||||
auto const& receiverID = r.first;
|
||||
STAmount amount{mptIssue, r.second};
|
||||
|
||||
if (amount < beast::zero)
|
||||
{
|
||||
XRPL_ASSERT_PARTS(
|
||||
takeFromSender == beast::zero,
|
||||
"rippler::rippleSendMultiMPT",
|
||||
"sender == issuer, takeFromSender == zero");
|
||||
auto const sendAmount = amount.mpt().value();
|
||||
auto const maximumAmount = sle->at(~sfMaximumAmount).value_or(maxMPTokenAmount);
|
||||
if (sendAmount > maximumAmount || sle->getFieldU64(sfOutstandingAmount) > maximumAmount - sendAmount)
|
||||
return tecPATH_DRY;
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
}
|
||||
|
||||
return tesSUCCESS;
|
||||
};
|
||||
auto doCredit = [&view, j](AccountID const& senderID, AccountID const& receiverID, STAmount const& amount, bool) {
|
||||
return rippleCreditMPT(view, senderID, receiverID, amount, j);
|
||||
};
|
||||
/* If we aren't sending anything or if the sender is the same as the
|
||||
* receiver then we don't need to do anything.
|
||||
*/
|
||||
if (!amount || (senderID == receiverID))
|
||||
continue;
|
||||
|
||||
return doSendMulti(
|
||||
"rippleSendMultiMPT", view, senderID, mptIssue, receivers, actual, j, waiveFee, doCredit, preMint);
|
||||
if (senderID == issuer || receiverID == issuer)
|
||||
{
|
||||
// if sender is issuer, check that the new OutstandingAmount will
|
||||
// not exceed MaximumAmount
|
||||
if (senderID == issuer)
|
||||
{
|
||||
XRPL_ASSERT_PARTS(
|
||||
takeFromSender == beast::zero,
|
||||
"rippler::rippleSendMultiMPT",
|
||||
"sender == issuer, takeFromSender == zero");
|
||||
auto const sendAmount = amount.mpt().value();
|
||||
auto const maximumAmount = sle->at(~sfMaximumAmount).value_or(maxMPTokenAmount);
|
||||
if (sendAmount > maximumAmount || sle->getFieldU64(sfOutstandingAmount) > maximumAmount - sendAmount)
|
||||
return tecPATH_DRY;
|
||||
}
|
||||
|
||||
// Direct send: redeeming MPTs and/or sending own MPTs.
|
||||
if (auto const ter = rippleCreditMPT(view, senderID, receiverID, amount, j))
|
||||
return ter;
|
||||
actual += amount;
|
||||
// Do not add amount to takeFromSender, because rippleCreditMPT took
|
||||
// it
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Sending 3rd party MPTs: transit.
|
||||
STAmount actualSend = (waiveFee == WaiveTransferFee::Yes)
|
||||
? amount
|
||||
: multiply(amount, transferRate(view, amount.get<MPTIssue>().getMptID()));
|
||||
actual += actualSend;
|
||||
takeFromSender += actualSend;
|
||||
|
||||
JLOG(j.debug()) << "rippleSendMultiMPT> " << to_string(senderID) << " - > " << to_string(receiverID)
|
||||
<< " : deliver=" << amount.getFullText() << " cost=" << actualSend.getFullText();
|
||||
|
||||
if (auto const terResult = rippleCreditMPT(view, issuer, receiverID, amount, j))
|
||||
return terResult;
|
||||
}
|
||||
if (senderID != issuer && takeFromSender)
|
||||
{
|
||||
if (TER const terResult = rippleCreditMPT(view, senderID, issuer, takeFromSender, j))
|
||||
return terResult;
|
||||
}
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
static TER
|
||||
|
||||
@@ -172,7 +172,7 @@ SHAMap::finishFetch(SHAMapHash const& hash, std::shared_ptr<NodeObject> const& o
|
||||
|
||||
// See if a sync filter has a node
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode>
|
||||
SHAMap::checkFilter(SHAMapHash const& hash, SHAMapSyncFilter* filter) const
|
||||
SHAMap::checkFilter(SHAMapHash const& hash, SHAMapSyncFilter const* filter) const
|
||||
{
|
||||
if (auto nodeData = filter->getNode(hash))
|
||||
{
|
||||
@@ -198,7 +198,7 @@ SHAMap::checkFilter(SHAMapHash const& hash, SHAMapSyncFilter* filter) const
|
||||
// Get a node without throwing
|
||||
// Used on maps where missing nodes are expected
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode>
|
||||
SHAMap::fetchNodeNT(SHAMapHash const& hash, SHAMapSyncFilter* filter) const
|
||||
SHAMap::fetchNodeNT(SHAMapHash const& hash, SHAMapSyncFilter const* filter) const
|
||||
{
|
||||
auto node = cacheLookup(hash);
|
||||
if (node)
|
||||
@@ -307,7 +307,7 @@ SHAMap::descendNoStore(SHAMapInnerNode& parent, int branch) const
|
||||
}
|
||||
|
||||
std::pair<SHAMapTreeNode*, SHAMapNodeID>
|
||||
SHAMap::descend(SHAMapInnerNode* parent, SHAMapNodeID const& parentID, int branch, SHAMapSyncFilter* filter) const
|
||||
SHAMap::descend(SHAMapInnerNode* parent, SHAMapNodeID const& parentID, int branch, SHAMapSyncFilter const* filter) const
|
||||
{
|
||||
XRPL_ASSERT(parent->isInner(), "xrpl::SHAMap::descend : valid parent input");
|
||||
XRPL_ASSERT((branch >= 0) && (branch < branchFactor), "xrpl::SHAMap::descend : valid branch input");
|
||||
@@ -334,7 +334,7 @@ SHAMapTreeNode*
|
||||
SHAMap::descendAsync(
|
||||
SHAMapInnerNode* parent,
|
||||
int branch,
|
||||
SHAMapSyncFilter* filter,
|
||||
SHAMapSyncFilter const* filter,
|
||||
bool& pending,
|
||||
descendCallback&& callback) const
|
||||
{
|
||||
|
||||
@@ -113,7 +113,7 @@ selectBranch(SHAMapNodeID const& id, uint256 const& hash)
|
||||
SHAMapNodeID
|
||||
SHAMapNodeID::createID(int depth, uint256 const& key)
|
||||
{
|
||||
XRPL_ASSERT((depth >= 0) && (depth < 65), "xrpl::SHAMapNodeID::createID : valid branch input");
|
||||
XRPL_ASSERT(depth >= 0 && depth <= SHAMap::leafDepth, "xrpl::SHAMapNodeID::createID : valid branch input");
|
||||
return SHAMapNodeID(depth, key & depthMask(depth));
|
||||
}
|
||||
|
||||
|
||||
@@ -374,7 +374,7 @@ SHAMap::getMissingNodes(int max, SHAMapSyncFilter* filter)
|
||||
bool
|
||||
SHAMap::getNodeFat(
|
||||
SHAMapNodeID const& wanted,
|
||||
std::vector<std::pair<SHAMapNodeID, Blob>>& data,
|
||||
std::vector<std::tuple<SHAMapNodeID, Blob, bool>>& data,
|
||||
bool fatLeaves,
|
||||
std::uint32_t depth) const
|
||||
{
|
||||
@@ -419,7 +419,7 @@ SHAMap::getNodeFat(
|
||||
// Add this node to the reply
|
||||
s.erase();
|
||||
node->serializeForWire(s);
|
||||
data.emplace_back(std::make_pair(nodeID, s.getData()));
|
||||
data.emplace_back(std::make_tuple(nodeID, s.getData(), node->isLeaf()));
|
||||
|
||||
if (node->isInner())
|
||||
{
|
||||
@@ -449,7 +449,7 @@ SHAMap::getNodeFat(
|
||||
// Just include this node
|
||||
s.erase();
|
||||
childNode->serializeForWire(s);
|
||||
data.emplace_back(std::make_pair(childID, s.getData()));
|
||||
data.emplace_back(std::make_tuple(childID, s.getData(), childNode->isLeaf()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -467,7 +467,10 @@ SHAMap::serializeRoot(Serializer& s) const
|
||||
}
|
||||
|
||||
SHAMapAddNode
|
||||
SHAMap::addRootNode(SHAMapHash const& hash, Slice const& rootNode, SHAMapSyncFilter* filter)
|
||||
SHAMap::addRootNode(
|
||||
SHAMapHash const& hash,
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode> rootNode,
|
||||
SHAMapSyncFilter const* filter)
|
||||
{
|
||||
// we already have a root_ node
|
||||
if (root_->getHash().isNonZero())
|
||||
@@ -478,14 +481,13 @@ SHAMap::addRootNode(SHAMapHash const& hash, Slice const& rootNode, SHAMapSyncFil
|
||||
}
|
||||
|
||||
XRPL_ASSERT(cowid_ >= 1, "xrpl::SHAMap::addRootNode : valid cowid");
|
||||
auto node = SHAMapTreeNode::makeFromWire(rootNode);
|
||||
if (!node || node->getHash() != hash)
|
||||
if (rootNode->getHash() != hash)
|
||||
return SHAMapAddNode::invalid();
|
||||
|
||||
if (backed_)
|
||||
canonicalize(hash, node);
|
||||
canonicalize(hash, rootNode);
|
||||
|
||||
root_ = node;
|
||||
root_ = rootNode;
|
||||
|
||||
if (root_->isLeaf())
|
||||
clearSynching();
|
||||
@@ -501,9 +503,12 @@ SHAMap::addRootNode(SHAMapHash const& hash, Slice const& rootNode, SHAMapSyncFil
|
||||
}
|
||||
|
||||
SHAMapAddNode
|
||||
SHAMap::addKnownNode(SHAMapNodeID const& node, Slice const& rawNode, SHAMapSyncFilter* filter)
|
||||
SHAMap::addKnownNode(
|
||||
SHAMapNodeID const& nodeID,
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode> treeNode,
|
||||
SHAMapSyncFilter const* filter)
|
||||
{
|
||||
XRPL_ASSERT(!node.isRoot(), "xrpl::SHAMap::addKnownNode : valid node input");
|
||||
XRPL_ASSERT(!nodeID.isRoot(), "xrpl::SHAMap::addKnownNode : valid node input");
|
||||
|
||||
if (!isSynching())
|
||||
{
|
||||
@@ -516,14 +521,14 @@ SHAMap::addKnownNode(SHAMapNodeID const& node, Slice const& rawNode, SHAMapSyncF
|
||||
auto currNode = root_.get();
|
||||
|
||||
while (currNode->isInner() && !static_cast<SHAMapInnerNode*>(currNode)->isFullBelow(generation) &&
|
||||
(currNodeID.getDepth() < node.getDepth()))
|
||||
(currNodeID.getDepth() < nodeID.getDepth()))
|
||||
{
|
||||
int const branch = selectBranch(currNodeID, node.getNodeID());
|
||||
int const branch = selectBranch(currNodeID, nodeID.getNodeID());
|
||||
XRPL_ASSERT(branch >= 0, "xrpl::SHAMap::addKnownNode : valid branch");
|
||||
auto inner = static_cast<SHAMapInnerNode*>(currNode);
|
||||
if (inner->isEmptyBranch(branch))
|
||||
{
|
||||
JLOG(journal_.warn()) << "Add known node for empty branch" << node;
|
||||
JLOG(journal_.warn()) << "Add known node for empty branch" << nodeID;
|
||||
return SHAMapAddNode::invalid();
|
||||
}
|
||||
|
||||
@@ -539,62 +544,41 @@ SHAMap::addKnownNode(SHAMapNodeID const& node, Slice const& rawNode, SHAMapSyncF
|
||||
if (currNode != nullptr)
|
||||
continue;
|
||||
|
||||
auto newNode = SHAMapTreeNode::makeFromWire(rawNode);
|
||||
|
||||
if (!newNode || childHash != newNode->getHash())
|
||||
if (childHash != treeNode->getHash())
|
||||
{
|
||||
JLOG(journal_.warn()) << "Corrupt node received";
|
||||
return SHAMapAddNode::invalid();
|
||||
}
|
||||
|
||||
// In rare cases, a node can still be corrupt even after hash
|
||||
// validation. For leaf nodes, we perform an additional check to
|
||||
// ensure the node's position in the tree is consistent with its
|
||||
// content to prevent inconsistencies that could
|
||||
// propagate further down the line.
|
||||
if (newNode->isLeaf())
|
||||
{
|
||||
auto const& actualKey = static_cast<SHAMapLeafNode const*>(newNode.get())->peekItem()->key();
|
||||
|
||||
// Validate that this leaf belongs at the target position
|
||||
auto const expectedNodeID = SHAMapNodeID::createID(node.getDepth(), actualKey);
|
||||
if (expectedNodeID.getNodeID() != node.getNodeID())
|
||||
{
|
||||
JLOG(journal_.debug()) << "Leaf node position mismatch: "
|
||||
<< "expected=" << expectedNodeID.getNodeID() << ", actual=" << node.getNodeID();
|
||||
return SHAMapAddNode::invalid();
|
||||
}
|
||||
}
|
||||
|
||||
// Inner nodes must be at a level strictly less than 64
|
||||
// but leaf nodes (while notionally at level 64) can be
|
||||
// at any depth up to and including 64:
|
||||
if ((currNodeID.getDepth() > leafDepth) || (newNode->isInner() && currNodeID.getDepth() == leafDepth))
|
||||
if ((currNodeID.getDepth() > leafDepth) || (treeNode->isInner() && currNodeID.getDepth() == leafDepth))
|
||||
{
|
||||
// Map is provably invalid
|
||||
state_ = SHAMapState::Invalid;
|
||||
return SHAMapAddNode::useful();
|
||||
}
|
||||
|
||||
if (currNodeID != node)
|
||||
if (currNodeID != nodeID)
|
||||
{
|
||||
// Either this node is broken or we didn't request it (yet)
|
||||
JLOG(journal_.warn()) << "unable to hook node " << node;
|
||||
JLOG(journal_.warn()) << "unable to hook node " << nodeID;
|
||||
JLOG(journal_.info()) << " stuck at " << currNodeID;
|
||||
JLOG(journal_.info()) << "got depth=" << node.getDepth() << ", walked to= " << currNodeID.getDepth();
|
||||
JLOG(journal_.info()) << "got depth=" << nodeID.getDepth() << ", walked to= " << currNodeID.getDepth();
|
||||
return SHAMapAddNode::useful();
|
||||
}
|
||||
|
||||
if (backed_)
|
||||
canonicalize(childHash, newNode);
|
||||
canonicalize(childHash, treeNode);
|
||||
|
||||
newNode = prevNode->canonicalizeChild(branch, std::move(newNode));
|
||||
treeNode = prevNode->canonicalizeChild(branch, std::move(treeNode));
|
||||
|
||||
if (filter)
|
||||
{
|
||||
Serializer s;
|
||||
newNode->serializeWithPrefix(s);
|
||||
filter->gotNode(false, childHash, ledgerSeq_, std::move(s.modData()), newNode->getType());
|
||||
treeNode->serializeWithPrefix(s);
|
||||
filter->gotNode(false, childHash, ledgerSeq_, std::move(s.modData()), treeNode->getType());
|
||||
}
|
||||
|
||||
return SHAMapAddNode::useful();
|
||||
|
||||
352
src/test/app/LedgerNodeHelpers_test.cpp
Normal file
352
src/test/app/LedgerNodeHelpers_test.cpp
Normal file
@@ -0,0 +1,352 @@
|
||||
#include <test/jtx.h>
|
||||
#include <test/jtx/Env.h>
|
||||
#include <test/shamap/common.h>
|
||||
#include <test/unit_test/SuiteJournal.h>
|
||||
|
||||
#include <xrpld/app/ledger/detail/LedgerNodeHelpers.h>
|
||||
|
||||
#include <xrpl/basics/random.h>
|
||||
#include <xrpl/beast/unit_test.h>
|
||||
#include <xrpl/proto/xrpl.pb.h>
|
||||
#include <xrpl/shamap/SHAMap.h>
|
||||
#include <xrpl/shamap/SHAMapItem.h>
|
||||
#include <xrpl/shamap/SHAMapLeafNode.h>
|
||||
|
||||
namespace xrpl {
|
||||
namespace tests {
|
||||
|
||||
class LedgerNodeHelpers_test : public beast::unit_test::suite
|
||||
{
|
||||
// Helper to create a simple SHAMapItem for testing
|
||||
boost::intrusive_ptr<SHAMapItem>
|
||||
makeTestItem(std::uint32_t seed)
|
||||
{
|
||||
Serializer s;
|
||||
s.add32(seed);
|
||||
s.add32(seed + 1);
|
||||
s.add32(seed + 2);
|
||||
return make_shamapitem(s.getSHA512Half(), s.slice());
|
||||
}
|
||||
|
||||
// Helper to serialize a tree node to wire format
|
||||
std::string
|
||||
serializeNode(intr_ptr::SharedPtr<SHAMapTreeNode> const& node)
|
||||
{
|
||||
Serializer s;
|
||||
node->serializeWithPrefix(s);
|
||||
auto const slice = s.slice();
|
||||
return std::string(reinterpret_cast<char const*>(slice.data()), slice.size());
|
||||
}
|
||||
|
||||
void
|
||||
testValidateLedgerNode()
|
||||
{
|
||||
testcase("validateLedgerNode");
|
||||
|
||||
using namespace test::jtx;
|
||||
|
||||
/*// Test with amendment disabled.
|
||||
{
|
||||
Env env{*this, testable_amendments() - fixLedgerNodeID};
|
||||
auto& app = env.app();
|
||||
|
||||
// Valid node with nodeid.
|
||||
protocol::TMLedgerNode node1;
|
||||
node1.set_nodedata("test_data");
|
||||
node1.set_nodeid("test_nodeid");
|
||||
BEAST_EXPECT(validateLedgerNode(app, node1));
|
||||
|
||||
// Invalid: missing nodedata.
|
||||
protocol::TMLedgerNode node2;
|
||||
node2.set_nodeid("test_nodeid");
|
||||
BEAST_EXPECT(!validateLedgerNode(app, node2));
|
||||
|
||||
// Invalid: has new field (id).
|
||||
protocol::TMLedgerNode node3;
|
||||
node3.set_nodedata("test_data");
|
||||
node3.set_nodeid("test_nodeid");
|
||||
node3.set_id("test_id");
|
||||
BEAST_EXPECT(!validateLedgerNode(app, node3));
|
||||
|
||||
// Invalid: has new field (depth).
|
||||
protocol::TMLedgerNode node4;
|
||||
node4.set_nodedata("test_data");
|
||||
node4.set_nodeid("test_nodeid");
|
||||
node4.set_depth(5);
|
||||
BEAST_EXPECT(!validateLedgerNode(app, node4));
|
||||
}
|
||||
|
||||
// Test with amendment enabled.
|
||||
{
|
||||
Env env{*this, testable_amendments() | fixLedgerNodeID};
|
||||
auto& app = env.app();
|
||||
|
||||
// Valid inner node with id.
|
||||
protocol::TMLedgerNode node1;
|
||||
node1.set_nodedata("test_data");
|
||||
node1.set_id("test_id");
|
||||
BEAST_EXPECT(validateLedgerNode(app, node1));
|
||||
|
||||
// Valid leaf node with depth.
|
||||
protocol::TMLedgerNode node2;
|
||||
node2.set_nodedata("test_data");
|
||||
node2.set_depth(5);
|
||||
BEAST_EXPECT(validateLedgerNode(app, node2));
|
||||
|
||||
// Valid leaf node at max depth.
|
||||
protocol::TMLedgerNode node3;
|
||||
node3.set_nodedata("test_data");
|
||||
node3.set_depth(SHAMap::leafDepth);
|
||||
BEAST_EXPECT(validateLedgerNode(app, node3));
|
||||
|
||||
// Invalid: depth exceeds max depth.
|
||||
protocol::TMLedgerNode node4;
|
||||
node4.set_nodedata("test_data");
|
||||
node4.set_depth(SHAMap::leafDepth + 1);
|
||||
BEAST_EXPECT(!validateLedgerNode(app, node4));
|
||||
|
||||
// Invalid: has legacy field (nodeid).
|
||||
protocol::TMLedgerNode node5;
|
||||
node5.set_nodedata("test_data");
|
||||
node5.set_nodeid("test_nodeid");
|
||||
BEAST_EXPECT(!validateLedgerNode(app, node5));
|
||||
|
||||
// Invalid: missing both id and depth.
|
||||
protocol::TMLedgerNode node6;
|
||||
node6.set_nodedata("test_data");
|
||||
BEAST_EXPECT(!validateLedgerNode(app, node6));
|
||||
|
||||
// Invalid: missing nodedata.
|
||||
protocol::TMLedgerNode node7;
|
||||
node7.set_id("test_id");
|
||||
BEAST_EXPECT(!validateLedgerNode(app, node7));
|
||||
}*/
|
||||
}
|
||||
|
||||
/*void
|
||||
testGetTreeNode()
|
||||
{
|
||||
testcase("getTreeNode");
|
||||
|
||||
using namespace test::jtx;
|
||||
test::SuiteJournal journal("LedgerNodeHelpers_test", *this);
|
||||
TestNodeFamily f(journal);
|
||||
|
||||
// Test with valid inner node
|
||||
{
|
||||
auto innerNode = std::make_shared<SHAMapInnerNode>(1);
|
||||
auto serialized = serializeNode(innerNode);
|
||||
auto result = getTreeNode(serialized);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
BEAST_EXPECT((*result)->isInner());
|
||||
}
|
||||
|
||||
// Test with valid leaf node
|
||||
{
|
||||
auto item = makeTestItem(12345);
|
||||
auto leafNode =
|
||||
std::make_shared<SHAMapLeafNode>(item, SHAMapNodeType::tnACCOUNT_STATE);
|
||||
auto serialized = serializeNode(leafNode);
|
||||
auto result = getTreeNode(serialized);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
BEAST_EXPECT((*result)->isLeaf());
|
||||
}
|
||||
|
||||
// Test with invalid data - empty string
|
||||
{
|
||||
auto result = getTreeNode("");
|
||||
BEAST_EXPECT(!result.has_value());
|
||||
}
|
||||
|
||||
// Test with invalid data - garbage data
|
||||
{
|
||||
std::string garbage = "This is not a valid serialized node!";
|
||||
auto result = getTreeNode(garbage);
|
||||
BEAST_EXPECT(!result.has_value());
|
||||
}
|
||||
|
||||
// Test with malformed data - truncated
|
||||
{
|
||||
auto item = makeTestItem(54321);
|
||||
auto leafNode =
|
||||
std::make_shared<SHAMapLeafNode>(item, SHAMapNodeType::tnACCOUNT_STATE);
|
||||
auto serialized = serializeNode(leafNode);
|
||||
// Truncate the data
|
||||
serialized = serialized.substr(0, 5);
|
||||
auto result = getTreeNode(serialized);
|
||||
BEAST_EXPECT(!result.has_value());
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
testGetSHAMapNodeID()
|
||||
{
|
||||
testcase("getSHAMapNodeID");
|
||||
|
||||
using namespace test::jtx;
|
||||
test::SuiteJournal journal("LedgerNodeHelpers_test", *this);
|
||||
TestNodeFamily f(journal);
|
||||
|
||||
auto item = makeTestItem(99999);
|
||||
auto leafNode =
|
||||
std::make_shared<SHAMapLeafNode>(item, SHAMapNodeType::tnACCOUNT_STATE);
|
||||
auto innerNode = std::make_shared<SHAMapInnerNode>(1);
|
||||
|
||||
// Test with amendment disabled
|
||||
{
|
||||
Env env{*this};
|
||||
auto& app = env.app();
|
||||
|
||||
// Test with leaf node - valid nodeid
|
||||
{
|
||||
auto nodeID = SHAMapNodeID::createID(5, item->key());
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_nodeid(nodeID.getRawString());
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, leafNode);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
BEAST_EXPECT(*result == nodeID);
|
||||
}
|
||||
|
||||
// Test with leaf node - invalid nodeid (wrong depth)
|
||||
{
|
||||
auto nodeID = SHAMapNodeID::createID(7, item->key());
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_nodeid(nodeID.getRawString());
|
||||
|
||||
auto wrongNodeID = SHAMapNodeID::createID(5, item->key());
|
||||
ledgerNode.set_nodeid(wrongNodeID.getRawString());
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, leafNode);
|
||||
// Should fail because nodeid doesn't match the leaf's key at the
|
||||
// given depth
|
||||
BEAST_EXPECT(!result.has_value());
|
||||
}
|
||||
|
||||
// Test with inner node
|
||||
{
|
||||
auto nodeID = SHAMapNodeID::createID(3, uint256{});
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_nodeid(nodeID.getRawString());
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, innerNode);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
BEAST_EXPECT(*result == nodeID);
|
||||
}
|
||||
|
||||
// Test missing nodeid
|
||||
{
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, leafNode);
|
||||
BEAST_EXPECT(!result.has_value());
|
||||
}
|
||||
}
|
||||
|
||||
// Test with amendment enabled
|
||||
{
|
||||
Env env{*this, testable_amendments() | fixLedgerNodeID};
|
||||
auto& app = env.app();
|
||||
|
||||
// Test with leaf node - valid depth
|
||||
{
|
||||
std::uint32_t depth = 5;
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_depth(depth);
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, leafNode);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
auto expectedID = SHAMapNodeID::createID(depth, item->key());
|
||||
BEAST_EXPECT(*result == expectedID);
|
||||
}
|
||||
|
||||
// Test with leaf node - missing depth
|
||||
{
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, leafNode);
|
||||
BEAST_EXPECT(!result.has_value());
|
||||
}
|
||||
|
||||
// Test with inner node - valid id
|
||||
{
|
||||
auto nodeID = SHAMapNodeID::createID(3, uint256{});
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_id(nodeID.getRawString());
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, innerNode);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
BEAST_EXPECT(*result == nodeID);
|
||||
}
|
||||
|
||||
// Test with inner node - missing id
|
||||
{
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, innerNode);
|
||||
BEAST_EXPECT(!result.has_value());
|
||||
}
|
||||
|
||||
// Test that nodeid is rejected when amendment is enabled
|
||||
{
|
||||
auto nodeID = SHAMapNodeID::createID(5, item->key());
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_nodeid(nodeID.getRawString());
|
||||
|
||||
// Should fail validation before getSHAMapNodeID is called,
|
||||
// but let's verify getSHAMapNodeID behavior
|
||||
// Note: validateLedgerNode should be called first in practice
|
||||
BEAST_EXPECT(!validateLedgerNode(app, ledgerNode));
|
||||
}
|
||||
|
||||
// Test with root node (inner node at depth 0)
|
||||
{
|
||||
auto rootNode = std::make_shared<SHAMapInnerNode>(1);
|
||||
auto nodeID = SHAMapNodeID{}; // root node ID
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_id(nodeID.getRawString());
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, rootNode);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
BEAST_EXPECT(*result == nodeID);
|
||||
}
|
||||
|
||||
// Test with leaf at maximum depth
|
||||
{
|
||||
std::uint32_t depth = SHAMap::leafDepth;
|
||||
protocol::TMLedgerNode ledgerNode;
|
||||
ledgerNode.set_nodedata("test_data");
|
||||
ledgerNode.set_depth(depth);
|
||||
|
||||
auto result = getSHAMapNodeID(app, ledgerNode, leafNode);
|
||||
BEAST_EXPECT(result.has_value());
|
||||
auto expectedID = SHAMapNodeID::createID(depth, item->key());
|
||||
BEAST_EXPECT(*result == expectedID);
|
||||
}
|
||||
}
|
||||
}*/
|
||||
|
||||
public:
|
||||
void
|
||||
run() override
|
||||
{
|
||||
testValidateLedgerNode();
|
||||
// testGetTreeNode();
|
||||
// testGetSHAMapNodeID();
|
||||
}
|
||||
};
|
||||
|
||||
BEAST_DEFINE_TESTSUITE(LedgerNodeHelpers, app, xrpl);
|
||||
|
||||
} // namespace tests
|
||||
} // namespace xrpl
|
||||
@@ -1010,7 +1010,7 @@ public:
|
||||
// Charlie - queue a transaction, with a higher fee
|
||||
// than default
|
||||
env(noop(charlie), fee(15), queued);
|
||||
checkMetrics(*this, env, 6, initQueueMax, 4, 3, 257);
|
||||
checkMetrics(*this, env, 6, initQueueMax, 4, 3, 256);
|
||||
|
||||
BEAST_EXPECT(env.seq(alice) == aliceSeq);
|
||||
BEAST_EXPECT(env.seq(bob) == bobSeq);
|
||||
|
||||
@@ -103,13 +103,14 @@ public:
|
||||
destination.setSynching();
|
||||
|
||||
{
|
||||
std::vector<std::pair<SHAMapNodeID, Blob>> a;
|
||||
std::vector<std::tuple<SHAMapNodeID, Blob, bool>> a;
|
||||
|
||||
BEAST_EXPECT(source.getNodeFat(SHAMapNodeID(), a, rand_bool(eng_), rand_int(eng_, 2)));
|
||||
|
||||
unexpected(a.size() < 1, "NodeSize");
|
||||
|
||||
BEAST_EXPECT(destination.addRootNode(source.getHash(), makeSlice(a[0].second), nullptr).isGood());
|
||||
auto node = SHAMapTreeNode::makeFromWire(makeSlice(std::get<1>(a[0])));
|
||||
BEAST_EXPECT(destination.addRootNode(source.getHash(), std::move(node), nullptr).isGood());
|
||||
}
|
||||
|
||||
do
|
||||
@@ -123,7 +124,7 @@ public:
|
||||
break;
|
||||
|
||||
// get as many nodes as possible based on this information
|
||||
std::vector<std::pair<SHAMapNodeID, Blob>> b;
|
||||
std::vector<std::tuple<SHAMapNodeID, Blob, bool>> b;
|
||||
|
||||
for (auto& it : nodesMissing)
|
||||
{
|
||||
@@ -145,7 +146,8 @@ public:
|
||||
// Don't use BEAST_EXPECT here b/c it will be called a
|
||||
// non-deterministic number of times and the number of tests run
|
||||
// should be deterministic
|
||||
if (!destination.addKnownNode(b[i].first, makeSlice(b[i].second), nullptr).isUseful())
|
||||
auto node = SHAMapTreeNode::makeFromWire(makeSlice(std::get<1>(b[i])));
|
||||
if (!destination.addKnownNode(std::get<0>(b[i]), std::move(node), nullptr).isUseful())
|
||||
fail("", __FILE__, __LINE__);
|
||||
}
|
||||
} while (true);
|
||||
|
||||
@@ -137,10 +137,10 @@ private:
|
||||
receiveNode(protocol::TMLedgerData& packet, SHAMapAddNode&);
|
||||
|
||||
bool
|
||||
takeTxRootNode(Slice const& data, SHAMapAddNode&);
|
||||
takeTxRootNode(std::string const& data, SHAMapAddNode&);
|
||||
|
||||
bool
|
||||
takeAsRootNode(Slice const& data, SHAMapAddNode&);
|
||||
takeAsRootNode(std::string const& data, SHAMapAddNode&);
|
||||
|
||||
std::vector<uint256>
|
||||
neededTxHashes(int max, SHAMapSyncFilter* filter) const;
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#include <xrpld/app/ledger/InboundLedgers.h>
|
||||
#include <xrpld/app/ledger/LedgerMaster.h>
|
||||
#include <xrpld/app/ledger/TransactionStateSF.h>
|
||||
#include <xrpld/app/ledger/detail/LedgerNodeHelpers.h>
|
||||
#include <xrpld/app/main/Application.h>
|
||||
#include <xrpld/overlay/Overlay.h>
|
||||
|
||||
@@ -815,38 +816,43 @@ InboundLedger::receiveNode(protocol::TMLedgerData& packet, SHAMapAddNode& san)
|
||||
std::make_unique<AccountStateSF>(mLedger->stateMap().family().db(), app_.getLedgerMaster())};
|
||||
}();
|
||||
|
||||
try
|
||||
{
|
||||
auto const f = filter.get();
|
||||
auto const f = filter.get();
|
||||
|
||||
for (auto const& node : packet.nodes())
|
||||
for (auto const& ledger_node : packet.nodes())
|
||||
{
|
||||
if (!validateLedgerNode(app_, ledger_node))
|
||||
{
|
||||
auto const nodeID = deserializeSHAMapNodeID(node.nodeid());
|
||||
|
||||
if (!nodeID)
|
||||
throw std::runtime_error("data does not properly deserialize");
|
||||
|
||||
if (nodeID->isRoot())
|
||||
{
|
||||
san += map.addRootNode(rootHash, makeSlice(node.nodedata()), f);
|
||||
}
|
||||
else
|
||||
{
|
||||
san += map.addKnownNode(*nodeID, makeSlice(node.nodedata()), f);
|
||||
}
|
||||
|
||||
if (!san.isGood())
|
||||
{
|
||||
JLOG(journal_.warn()) << "Received bad node data";
|
||||
return;
|
||||
}
|
||||
JLOG(journal_.warn()) << "Got malformed ledger node";
|
||||
san.incInvalid();
|
||||
return;
|
||||
}
|
||||
|
||||
auto treeNode = getTreeNode(ledger_node.nodedata());
|
||||
if (!treeNode)
|
||||
{
|
||||
JLOG(journal_.warn()) << "Got invalid node data";
|
||||
san.incInvalid();
|
||||
return;
|
||||
}
|
||||
|
||||
auto const nodeID = getSHAMapNodeID(app_, ledger_node, *treeNode);
|
||||
if (!nodeID)
|
||||
{
|
||||
JLOG(journal_.warn()) << "Got invalid node id";
|
||||
san.incInvalid();
|
||||
return;
|
||||
}
|
||||
|
||||
if (nodeID->isRoot())
|
||||
san += map.addRootNode(rootHash, std::move(*treeNode), f);
|
||||
else
|
||||
san += map.addKnownNode(*nodeID, std::move(*treeNode), f);
|
||||
|
||||
if (!san.isGood())
|
||||
{
|
||||
JLOG(journal_.warn()) << "Received bad node data";
|
||||
return;
|
||||
}
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(journal_.error()) << "Received bad node data: " << e.what();
|
||||
san.incInvalid();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!map.isSynching())
|
||||
@@ -868,7 +874,7 @@ InboundLedger::receiveNode(protocol::TMLedgerData& packet, SHAMapAddNode& san)
|
||||
Call with a lock
|
||||
*/
|
||||
bool
|
||||
InboundLedger::takeAsRootNode(Slice const& data, SHAMapAddNode& san)
|
||||
InboundLedger::takeAsRootNode(std::string const& data, SHAMapAddNode& san)
|
||||
{
|
||||
if (failed_ || mHaveState)
|
||||
{
|
||||
@@ -884,8 +890,16 @@ InboundLedger::takeAsRootNode(Slice const& data, SHAMapAddNode& san)
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
auto treeNode = getTreeNode(data);
|
||||
if (!treeNode)
|
||||
{
|
||||
JLOG(journal_.warn()) << "Got invalid node data";
|
||||
san.incInvalid();
|
||||
return false;
|
||||
}
|
||||
|
||||
AccountStateSF filter(mLedger->stateMap().family().db(), app_.getLedgerMaster());
|
||||
san += mLedger->stateMap().addRootNode(SHAMapHash{mLedger->header().accountHash}, data, &filter);
|
||||
san += mLedger->stateMap().addRootNode(SHAMapHash{mLedger->header().accountHash}, *treeNode, &filter);
|
||||
return san.isGood();
|
||||
}
|
||||
|
||||
@@ -893,7 +907,7 @@ InboundLedger::takeAsRootNode(Slice const& data, SHAMapAddNode& san)
|
||||
Call with a lock
|
||||
*/
|
||||
bool
|
||||
InboundLedger::takeTxRootNode(Slice const& data, SHAMapAddNode& san)
|
||||
InboundLedger::takeTxRootNode(std::string const& data, SHAMapAddNode& san)
|
||||
{
|
||||
if (failed_ || mHaveTransactions)
|
||||
{
|
||||
@@ -909,8 +923,16 @@ InboundLedger::takeTxRootNode(Slice const& data, SHAMapAddNode& san)
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
auto treeNode = getTreeNode(data);
|
||||
if (!treeNode)
|
||||
{
|
||||
JLOG(journal_.warn()) << "Got invalid node data";
|
||||
san.incInvalid();
|
||||
return false;
|
||||
}
|
||||
|
||||
TransactionStateSF filter(mLedger->txMap().family().db(), app_.getLedgerMaster());
|
||||
san += mLedger->txMap().addRootNode(SHAMapHash{mLedger->header().txHash}, data, &filter);
|
||||
san += mLedger->txMap().addRootNode(SHAMapHash{mLedger->header().txHash}, *treeNode, &filter);
|
||||
return san.isGood();
|
||||
}
|
||||
|
||||
@@ -1004,14 +1026,12 @@ InboundLedger::processData(std::shared_ptr<Peer> peer, protocol::TMLedgerData& p
|
||||
san.incUseful();
|
||||
}
|
||||
|
||||
if (!mHaveState && (packet.nodes().size() > 1) &&
|
||||
!takeAsRootNode(makeSlice(packet.nodes(1).nodedata()), san))
|
||||
if (!mHaveState && (packet.nodes().size() > 1) && !takeAsRootNode(packet.nodes(1).nodedata(), san))
|
||||
{
|
||||
JLOG(journal_.warn()) << "Included AS root invalid";
|
||||
}
|
||||
|
||||
if (!mHaveTransactions && (packet.nodes().size() > 2) &&
|
||||
!takeTxRootNode(makeSlice(packet.nodes(2).nodedata()), san))
|
||||
if (!mHaveTransactions && (packet.nodes().size() > 2) && !takeTxRootNode(packet.nodes(2).nodedata(), san))
|
||||
{
|
||||
JLOG(journal_.warn()) << "Included TX root invalid";
|
||||
}
|
||||
@@ -1044,13 +1064,13 @@ InboundLedger::processData(std::shared_ptr<Peer> peer, protocol::TMLedgerData& p
|
||||
|
||||
ScopedLockType sl(mtx_);
|
||||
|
||||
// Verify node IDs and data are complete
|
||||
for (auto const& node : packet.nodes())
|
||||
// Verify nodes are complete
|
||||
for (auto const& ledger_node : packet.nodes())
|
||||
{
|
||||
if (!node.has_nodeid() || !node.has_nodedata())
|
||||
if (!validateLedgerNode(app_, ledger_node))
|
||||
{
|
||||
JLOG(journal_.warn()) << "Got bad node";
|
||||
peer->charge(Resource::feeMalformedRequest, "ledger_data bad node");
|
||||
JLOG(journal_.warn()) << "Got malformed ledger node";
|
||||
peer->charge(Resource::feeMalformedRequest, "ledger_node");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include <xrpld/app/ledger/InboundLedgers.h>
|
||||
#include <xrpld/app/ledger/LedgerMaster.h>
|
||||
#include <xrpld/app/ledger/detail/LedgerNodeHelpers.h>
|
||||
#include <xrpld/app/main/Application.h>
|
||||
|
||||
#include <xrpl/basics/DecayingSample.h>
|
||||
@@ -213,29 +214,20 @@ public:
|
||||
gotStaleData(std::shared_ptr<protocol::TMLedgerData> packet_ptr) override
|
||||
{
|
||||
Serializer s;
|
||||
try
|
||||
for (auto const& ledger_node : packet_ptr->nodes())
|
||||
{
|
||||
for (int i = 0; i < packet_ptr->nodes().size(); ++i)
|
||||
{
|
||||
auto const& node = packet_ptr->nodes(i);
|
||||
if (!validateLedgerNode(app_, ledger_node))
|
||||
return;
|
||||
|
||||
if (!node.has_nodeid() || !node.has_nodedata())
|
||||
return;
|
||||
auto const treeNode = getTreeNode(ledger_node.nodedata());
|
||||
if (!treeNode)
|
||||
return;
|
||||
auto const tn = *treeNode;
|
||||
|
||||
auto newNode = SHAMapTreeNode::makeFromWire(makeSlice(node.nodedata()));
|
||||
s.erase();
|
||||
tn->serializeWithPrefix(s);
|
||||
|
||||
if (!newNode)
|
||||
return;
|
||||
|
||||
s.erase();
|
||||
newNode->serializeWithPrefix(s);
|
||||
|
||||
app_.getLedgerMaster().addFetchPack(
|
||||
newNode->getHash().as_uint256(), std::make_shared<Blob>(s.begin(), s.end()));
|
||||
}
|
||||
}
|
||||
catch (std::exception const&)
|
||||
{
|
||||
app_.getLedgerMaster().addFetchPack(tn->getHash().as_uint256(), std::make_shared<Blob>(s.begin(), s.end()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include <xrpld/app/ledger/InboundLedgers.h>
|
||||
#include <xrpld/app/ledger/InboundTransactions.h>
|
||||
#include <xrpld/app/ledger/detail/LedgerNodeHelpers.h>
|
||||
#include <xrpld/app/ledger/detail/TransactionAcquire.h>
|
||||
#include <xrpld/app/main/Application.h>
|
||||
|
||||
@@ -127,26 +128,35 @@ public:
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<std::pair<SHAMapNodeID, Slice>> data;
|
||||
std::vector<std::pair<SHAMapNodeID, intr_ptr::SharedPtr<SHAMapTreeNode>>> data;
|
||||
data.reserve(packet.nodes().size());
|
||||
|
||||
for (auto const& node : packet.nodes())
|
||||
for (auto const& ledger_node : packet.nodes())
|
||||
{
|
||||
if (!node.has_nodeid() || !node.has_nodedata())
|
||||
if (!validateLedgerNode(app_, ledger_node))
|
||||
{
|
||||
peer->charge(Resource::feeMalformedRequest, "ledger_data");
|
||||
JLOG(j_.warn()) << "Got malformed ledger node";
|
||||
peer->charge(Resource::feeMalformedRequest, "ledger_node");
|
||||
return;
|
||||
}
|
||||
|
||||
auto const id = deserializeSHAMapNodeID(node.nodeid());
|
||||
|
||||
if (!id)
|
||||
auto const treeNode = getTreeNode(ledger_node.nodedata());
|
||||
if (!treeNode)
|
||||
{
|
||||
peer->charge(Resource::feeInvalidData, "ledger_data");
|
||||
JLOG(j_.warn()) << "Got invalid node data";
|
||||
peer->charge(Resource::feeInvalidData, "node_data");
|
||||
return;
|
||||
}
|
||||
|
||||
data.emplace_back(std::make_pair(*id, makeSlice(node.nodedata())));
|
||||
auto const nodeID = getSHAMapNodeID(app_, ledger_node, *treeNode);
|
||||
if (!nodeID)
|
||||
{
|
||||
JLOG(j_.warn()) << "Got invalid node id";
|
||||
peer->charge(Resource::feeInvalidData, "node_id");
|
||||
return;
|
||||
}
|
||||
|
||||
data.emplace_back(std::make_pair(*nodeID, *treeNode));
|
||||
}
|
||||
|
||||
if (!ta->takeNodes(data, peer).isUseful())
|
||||
|
||||
163
src/xrpld/app/ledger/detail/LedgerNodeHelpers.cpp
Normal file
163
src/xrpld/app/ledger/detail/LedgerNodeHelpers.cpp
Normal file
@@ -0,0 +1,163 @@
|
||||
#include <xrpld/app/ledger/detail/LedgerNodeHelpers.h>
|
||||
#include <xrpld/app/main/Application.h>
|
||||
|
||||
#include <xrpl/basics/IntrusivePointer.h>
|
||||
#include <xrpl/beast/utility/instrumentation.h>
|
||||
#include <xrpl/ledger/AmendmentTable.h>
|
||||
#include <xrpl/shamap/SHAMapLeafNode.h>
|
||||
#include <xrpl/shamap/SHAMapNodeID.h>
|
||||
#include <xrpl/shamap/SHAMapTreeNode.h>
|
||||
|
||||
namespace xrpl {
|
||||
|
||||
/**
|
||||
* @brief Validates a ledger node based on the fixLedgerNodeID amendment status.
|
||||
*
|
||||
* This function checks whether a ledger node has the expected fields based on
|
||||
* whether the fixLedgerNodeID amendment is enabled. The validation rules differ
|
||||
* depending on the amendment state:
|
||||
*
|
||||
* When the amendment is enabled:
|
||||
* - The node must have `nodedata`.
|
||||
* - The legacy `nodeid` field must NOT be present.
|
||||
* - For inner nodes: the `id` field must be present.
|
||||
* - For leaf nodes: the `depth` field must be present and <= SHAMap::leafDepth.
|
||||
*
|
||||
* When the amendment is disabled:
|
||||
* - The node must have `nodedata`.
|
||||
* - The `nodeid` field must be present.
|
||||
* - The new `id` and `depth` fields must NOT be present.
|
||||
*
|
||||
* @param app The application instance used to check amendment status.
|
||||
* @param ledger_node The ledger node to validate.
|
||||
* @return true if the ledger node has the expected fields, false otherwise.
|
||||
*/
|
||||
bool
|
||||
validateLedgerNode(Application& app, protocol::TMLedgerNode const& ledger_node)
|
||||
{
|
||||
if (!ledger_node.has_nodedata())
|
||||
return false;
|
||||
|
||||
if (app.getAmendmentTable().isEnabled(fixLedgerNodeID))
|
||||
{
|
||||
// Note that we cannot confirm here whether the node is actually an
|
||||
// inner or leaf node, and will need to perform additional checks
|
||||
// separately.
|
||||
if (ledger_node.has_nodeid())
|
||||
return false;
|
||||
if (ledger_node.has_id())
|
||||
return true;
|
||||
return ledger_node.has_depth() && ledger_node.depth() <= SHAMap::leafDepth;
|
||||
}
|
||||
|
||||
if (ledger_node.has_id() || ledger_node.has_depth())
|
||||
return false;
|
||||
return ledger_node.has_nodeid();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Deserializes a SHAMapTreeNode from wire format data.
|
||||
*
|
||||
* This function attempts to create a SHAMapTreeNode from the provided data
|
||||
* string. If the data is malformed or deserialization fails, the function
|
||||
* returns std::nullopt instead of throwing an exception.
|
||||
*
|
||||
* @param data The serialized node data in wire format.
|
||||
* @return An optional containing the deserialized tree node if successful, or
|
||||
* std::nullopt if deserialization fails.
|
||||
*/
|
||||
std::optional<intr_ptr::SharedPtr<SHAMapTreeNode>>
|
||||
getTreeNode(std::string const& data)
|
||||
{
|
||||
auto const slice = makeSlice(data);
|
||||
try
|
||||
{
|
||||
return SHAMapTreeNode::makeFromWire(slice);
|
||||
}
|
||||
catch (std::exception const&)
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Extracts or reconstructs the SHAMapNodeID from a ledger node.
|
||||
*
|
||||
* This function retrieves the SHAMapNodeID for a tree node, with behavior that
|
||||
* depends on the fixLedgerNodeID amendment status and the node type (inner vs.
|
||||
* leaf).
|
||||
*
|
||||
* When the fixLedgerNodeID amendment is enabled:
|
||||
* - For inner nodes: Deserializes the node ID from the `ledger_node.id` field.
|
||||
* Note that root nodes are also inner nodes.
|
||||
* - For leaf nodes: Reconstructs the node ID using both the depth from the
|
||||
* `ledger_node.depth` field and the key from the leaf node's item.
|
||||
*
|
||||
* When the amendment is disabled:
|
||||
* - For all nodes: Deserializes the node ID from the `ledger_node.nodeid`
|
||||
* field.
|
||||
* - For leaf nodes: Validates that the node ID is consistent with the leaf's
|
||||
* key.
|
||||
*
|
||||
* @param app The application instance used to check amendment status.
|
||||
* @param ledger_node The protocol message containing the ledger node data.
|
||||
* @param treeNode The deserialized tree node (inner or leaf node).
|
||||
* @return An optional containing the node ID if extraction/reconstruction
|
||||
* succeeds, or std::nullopt if the required fields are missing or
|
||||
* validation fails.
|
||||
*/
|
||||
std::optional<SHAMapNodeID>
|
||||
getSHAMapNodeID(
|
||||
Application& app,
|
||||
protocol::TMLedgerNode const& ledger_node,
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode> const& treeNode)
|
||||
{
|
||||
// When the amendment is enabled and a node depth is present, we can calculate the node ID.
|
||||
if (app.getAmendmentTable().isEnabled(fixLedgerNodeID))
|
||||
{
|
||||
if (treeNode->isInner())
|
||||
{
|
||||
XRPL_ASSERT(ledger_node.has_id(), "xrpl::getSHAMapNodeID : node ID is present");
|
||||
if (!ledger_node.has_id())
|
||||
return std::nullopt;
|
||||
|
||||
return deserializeSHAMapNodeID(ledger_node.id());
|
||||
}
|
||||
|
||||
if (treeNode->isLeaf())
|
||||
{
|
||||
XRPL_ASSERT(ledger_node.has_depth(), "xrpl::getSHAMapNodeID : node depth is present");
|
||||
if (!ledger_node.has_depth())
|
||||
return std::nullopt;
|
||||
|
||||
auto const key = static_cast<SHAMapLeafNode const*>(treeNode.get())->peekItem()->key();
|
||||
return SHAMapNodeID::createID(ledger_node.depth(), key);
|
||||
}
|
||||
|
||||
UNREACHABLE("xrpl::getSHAMapNodeID : tree node is neither inner nor leaf");
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// When the amendment is disabled, we expect the node ID to always be present. For leaf nodes
|
||||
// we perform an extra check to ensure the node's position in the tree is consistent with its
|
||||
// content.
|
||||
XRPL_ASSERT(ledger_node.has_nodeid(), "xrpl::getSHAMapNodeID : node ID is present");
|
||||
if (!ledger_node.has_nodeid())
|
||||
return std::nullopt;
|
||||
|
||||
auto const nodeID = deserializeSHAMapNodeID(ledger_node.nodeid());
|
||||
if (!nodeID)
|
||||
return std::nullopt;
|
||||
|
||||
if (treeNode->isLeaf())
|
||||
{
|
||||
auto const key = static_cast<SHAMapLeafNode const*>(treeNode.get())->peekItem()->key();
|
||||
auto const expected_id = SHAMapNodeID::createID(static_cast<int>(nodeID->getDepth()), key);
|
||||
if (nodeID->getNodeID() != expected_id.getNodeID())
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
return nodeID;
|
||||
}
|
||||
|
||||
} // namespace xrpl
|
||||
83
src/xrpld/app/ledger/detail/LedgerNodeHelpers.h
Normal file
83
src/xrpld/app/ledger/detail/LedgerNodeHelpers.h
Normal file
@@ -0,0 +1,83 @@
|
||||
#pragma once
|
||||
|
||||
#include <xrpld/app/main/Application.h>
|
||||
|
||||
#include <xrpl/basics/IntrusivePointer.h>
|
||||
#include <xrpl/shamap/SHAMapLeafNode.h>
|
||||
#include <xrpl/shamap/SHAMapNodeID.h>
|
||||
#include <xrpl/shamap/SHAMapTreeNode.h>
|
||||
|
||||
namespace xrpl {
|
||||
|
||||
/**
|
||||
* @brief Validates a ledger node based on the fixLedgerNodeID amendment status.
|
||||
*
|
||||
* This function checks whether a ledger node has the expected fields based on
|
||||
* whether the fixLedgerNodeID amendment is enabled. The validation rules differ
|
||||
* depending on the amendment state:
|
||||
*
|
||||
* When the amendment is enabled:
|
||||
* - The node must have `nodedata`.
|
||||
* - The legacy `nodeid` field must NOT be present.
|
||||
* - For inner nodes: the `id` field must be present.
|
||||
* - For leaf nodes: the `depth` field must be present and <= SHAMap::leafDepth.
|
||||
*
|
||||
* When the amendment is disabled:
|
||||
* - The node must have `nodedata`.
|
||||
* - The `nodeid` field must be present.
|
||||
* - The new `id` and `depth` fields must NOT be present.
|
||||
*
|
||||
* @param app The application instance used to check amendment status.
|
||||
* @param ledger_node The ledger node to validate.
|
||||
* @return true if the ledger node has the expected fields, false otherwise.
|
||||
*/
|
||||
bool
|
||||
validateLedgerNode(Application& app, protocol::TMLedgerNode const& ledger_node);
|
||||
|
||||
/**
|
||||
* @brief Deserializes a SHAMapTreeNode from wire format data.
|
||||
*
|
||||
* This function attempts to create a SHAMapTreeNode from the provided data
|
||||
* string. If the data is malformed or deserialization fails, the function
|
||||
* returns std::nullopt instead of throwing an exception.
|
||||
*
|
||||
* @param data The serialized node data in wire format.
|
||||
* @return An optional containing the deserialized tree node if successful, or
|
||||
* std::nullopt if deserialization fails.
|
||||
*/
|
||||
std::optional<intr_ptr::SharedPtr<SHAMapTreeNode>>
|
||||
getTreeNode(std::string const& data);
|
||||
|
||||
/**
|
||||
* @brief Extracts or reconstructs the SHAMapNodeID from a ledger node.
|
||||
*
|
||||
* This function retrieves the SHAMapNodeID for a tree node, with behavior that
|
||||
* depends on the fixLedgerNodeID amendment status and the node type (inner vs.
|
||||
* leaf).
|
||||
*
|
||||
* When the fixLedgerNodeID amendment is enabled:
|
||||
* - For inner nodes: Deserializes the node ID from the `ledger_node.id` field.
|
||||
* Note that root nodes are also inner nodes.
|
||||
* - For leaf nodes: Reconstructs the node ID using both the depth from the
|
||||
* `ledger_node.depth` field and the key from the leaf node's item.
|
||||
*
|
||||
* When the amendment is disabled:
|
||||
* - For all nodes: Deserializes the node ID from the `ledger_node.nodeid`
|
||||
* field.
|
||||
* - For leaf nodes: Validates that the node ID is consistent with the leaf's
|
||||
* key.
|
||||
*
|
||||
* @param app The application instance used to check amendment status.
|
||||
* @param ledger_node The protocol message containing the ledger node data.
|
||||
* @param treeNode The deserialized tree node (inner or leaf node).
|
||||
* @return An optional containing the node ID if extraction/reconstruction
|
||||
* succeeds, or std::nullopt if the required fields are missing or
|
||||
* validation fails.
|
||||
*/
|
||||
std::optional<SHAMapNodeID>
|
||||
getSHAMapNodeID(
|
||||
Application& app,
|
||||
protocol::TMLedgerNode const& ledger_node,
|
||||
intr_ptr::SharedPtr<SHAMapTreeNode> const& treeNode);
|
||||
|
||||
} // namespace xrpl
|
||||
@@ -144,7 +144,7 @@ TransactionAcquire::trigger(std::shared_ptr<Peer> const& peer)
|
||||
|
||||
SHAMapAddNode
|
||||
TransactionAcquire::takeNodes(
|
||||
std::vector<std::pair<SHAMapNodeID, Slice>> const& data,
|
||||
std::vector<std::pair<SHAMapNodeID, intr_ptr::SharedPtr<SHAMapTreeNode>>>& data,
|
||||
std::shared_ptr<Peer> const& peer)
|
||||
{
|
||||
ScopedLockType sl(mtx_);
|
||||
@@ -168,20 +168,22 @@ TransactionAcquire::takeNodes(
|
||||
|
||||
ConsensusTransSetSF sf(app_, app_.getTempNodeCache());
|
||||
|
||||
for (auto const& d : data)
|
||||
for (auto& d : data)
|
||||
{
|
||||
if (d.first.isRoot() && mHaveRoot)
|
||||
{
|
||||
JLOG(journal_.debug()) << "Got root TXS node, already have it";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (d.first.isRoot())
|
||||
{
|
||||
if (mHaveRoot)
|
||||
JLOG(journal_.debug()) << "Got root TXS node, already have it";
|
||||
else if (!mMap->addRootNode(SHAMapHash{hash_}, d.second, nullptr).isGood())
|
||||
{
|
||||
if (!mMap->addRootNode(SHAMapHash{hash_}, std::move(d.second), nullptr).isGood())
|
||||
JLOG(journal_.warn()) << "TX acquire got bad root node";
|
||||
}
|
||||
else
|
||||
mHaveRoot = true;
|
||||
}
|
||||
else if (!mMap->addKnownNode(d.first, d.second, &sf).isGood())
|
||||
else if (!mMap->addKnownNode(d.first, std::move(d.second), &sf).isGood())
|
||||
{
|
||||
JLOG(journal_.warn()) << "TX acquire got bad non-root node";
|
||||
return SHAMapAddNode::invalid();
|
||||
|
||||
@@ -19,7 +19,9 @@ public:
|
||||
~TransactionAcquire() = default;
|
||||
|
||||
SHAMapAddNode
|
||||
takeNodes(std::vector<std::pair<SHAMapNodeID, Slice>> const& data, std::shared_ptr<Peer> const&);
|
||||
takeNodes(
|
||||
std::vector<std::pair<SHAMapNodeID, intr_ptr::SharedPtr<SHAMapTreeNode>>>& data,
|
||||
std::shared_ptr<Peer> const&);
|
||||
|
||||
void
|
||||
init(int startPeers);
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
#include <xrpl/basics/safe_cast.h>
|
||||
#include <xrpl/core/HashRouter.h>
|
||||
#include <xrpl/core/PerfLog.h>
|
||||
#include <xrpl/ledger/AmendmentTable.h>
|
||||
#include <xrpl/protocol/TxFlags.h>
|
||||
#include <xrpl/protocol/digest.h>
|
||||
#include <xrpl/server/LoadFeeTrack.h>
|
||||
@@ -3131,7 +3132,7 @@ PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
{
|
||||
auto const queryDepth{m->has_querydepth() ? m->querydepth() : (isHighLatency() ? 2 : 1)};
|
||||
|
||||
std::vector<std::pair<SHAMapNodeID, Blob>> data;
|
||||
std::vector<std::tuple<SHAMapNodeID, Blob, bool>> data;
|
||||
|
||||
for (int i = 0; i < m->nodeids_size() && ledgerData.nodes_size() < Tuning::softMaxReplyNodes; ++i)
|
||||
{
|
||||
@@ -3150,9 +3151,21 @@ PeerImp::processLedgerRequest(std::shared_ptr<protocol::TMGetLedger> const& m)
|
||||
{
|
||||
if (ledgerData.nodes_size() >= Tuning::hardMaxReplyNodes)
|
||||
break;
|
||||
|
||||
protocol::TMLedgerNode* node{ledgerData.add_nodes()};
|
||||
node->set_nodeid(d.first.getRawString());
|
||||
node->set_nodedata(d.second.data(), d.second.size());
|
||||
|
||||
auto const& node_data = std::get<1>(d);
|
||||
node->set_nodedata(node_data.data(), node_data.size());
|
||||
|
||||
// When the amendment is disabled, we always set the node ID. However, when the amendment is
|
||||
// enabled, we only set it for inner nodes, while for leaf nodes we set the node depth instead.
|
||||
auto const& nodeID = std::get<0>(d);
|
||||
if (!app_.getAmendmentTable().isEnabled(fixLedgerNodeID))
|
||||
node->set_nodeid(nodeID.getRawString());
|
||||
else if (std::get<2>(d))
|
||||
node->set_depth(nodeID.getDepth());
|
||||
else
|
||||
node->set_id(nodeID.getRawString());
|
||||
}
|
||||
}
|
||||
else
|
||||
|
||||
Reference in New Issue
Block a user