Compare commits

..

50 Commits

Author SHA1 Message Date
Ed Hennis
e5b276d41a Merge branch 'develop' into ximinez/vault-test 2026-03-10 13:39:04 -04:00
Ed Hennis
b74ac47d0d Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  chore: Add custom cmake definitions for gersemi (6491)
  refactor: Update transaction folder structure (6483)
  chore: Apply gersemi changes (6486)
  chore: Use gersemi instead of ancient cmake-format (6486)
  Add Formats and Flags to `server_definitions` (6321)
  fix: Fix docs deployment for pull requests (6482)
  fix: Stop committing generated docs to prevent repo bloat (6474)
2026-03-06 12:30:29 -05:00
Ed Hennis
dc86ba0235 Merge branch 'develop' into ximinez/vault-test 2026-03-04 17:11:59 -04:00
Ed Hennis
5e7eca38da Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  chore: Update pre-commit hooks (6460)
  ci: [DEPENDABOT] bump actions/upload-artifact from 6.0.0 to 7.0.0 (6450)
  tests: Improve stability of Subscribe tests (6420)
  refactor: Fix clang-tidy `bugprone-empty-catch` check (6419)
  refactor: Splits invariant checks into multiple classes (6440)
  chore: Make nix hook optional (6431)
  test: Grep for failures in CI (6339)
  chore: Enable clang-tidy checks without issues (6414)
  refactor: Use uint256 directly as key instead of void pointer (6313)
  chore: Update cleanup-workspace to delete old .conan2 dir on macOS (6412)
2026-03-03 20:19:39 -05:00
Ed Hennis
df986c2c51 Merge branch 'develop' into ximinez/vault-test 2026-02-24 17:43:28 -04:00
Ed Hennis
14ffeb31e6 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  ci: [DEPENDABOT] bump actions/upload-artifact from 4.6.2 to 6.0.0 (6396)
2026-02-20 17:50:22 -05:00
Ed Hennis
89f4af5b99 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  ci: [DEPENDABOT] bump actions/checkout from 4.3.0 to 6.0.2 (6397)
2026-02-20 17:26:39 -05:00
Ed Hennis
720f82c8cd Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  ci: [DEPENDABOT] bump actions/setup-python from 5.6.0 to 6.2.0 (6395)
  ci: [DEPENDABOT] bump tj-actions/changed-files from 46.0.5 to 47.0.4 (6394)
  ci: [DEPENDABOT] bump codecov/codecov-action from 5.4.3 to 5.5.2 (6398)
  ci: Build docs in PRs and in private repos (6400)
  ci: Add dependabot config (6379)
  Fix tautological assertion (6393)
2026-02-20 17:23:59 -05:00
Ed Hennis
6c7b07eea6 Merge commit '2c1fad1023' into ximinez/vault-test
* commit '2c1fad1023':
  chore: Apply clang-format width 100 (6387)
2026-02-20 17:23:47 -05:00
Ed Hennis
621c99f8b8 Update formatting 2026-02-20 17:08:51 -05:00
Ed Hennis
120892edb2 Merge commit '25cca465538a56cce501477f9e5e2c1c7ea2d84c' into ximinez/vault-test
* commit '25cca465538a56cce501477f9e5e2c1c7ea2d84c':
  chore: Set clang-format width to 100 in config file (6387)
2026-02-20 17:08:30 -05:00
Ed Hennis
af8ce79add Merge branch 'develop' into ximinez/vault-test 2026-02-19 16:25:08 -05:00
Ed Hennis
c8eaa6f39e Merge branch 'develop' into ximinez/vault-test 2026-02-18 21:11:39 -04:00
Ed Hennis
740a22aeaf Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  chore: Update secp256k1 and openssl (6327)
  chore: Remove unnecessary script (6326)
  refactor: Replace include guards by '#pragma once' (6322)
  chore: Remove unity builds (6300)
  refactor: Add ServiceRegistry to help modularization (6222)
  fix: Deletes expired NFToken offers from ledger (5707)
  chore: Add .zed editor config directory to .gitignore (6317)
  docs: Update API changelog, add APIv2+APIv3 version documentation (6308)
  fix: Restore config changes that broke standalone mode (6301)
  chore: Add upper-case match for ARM64 in CompilationEnv (6315)
  ci: Update hashes of XRPLF/actions (6316)
  chore: Format all cmake files without comments (6294)
  chore: Add cmake-format pre-commit hook (6279)
2026-02-04 16:09:38 -05:00
Ed Hennis
149d9803e2 Fix formatting 2026-01-28 19:36:31 -05:00
Ed Hennis
304b5a1516 Merge branch 'develop' into ximinez/vault-test 2026-01-28 19:09:27 -04:00
Ed Hennis
78ccd2de31 Merge commit '5f638f55536def0d88b970d1018a465a238e55f4' into ximinez/vault-test
* commit '5f638f55536def0d88b970d1018a465a238e55f4':
  chore: Set ColumnLimit to 120 in clang-format (6288)
2026-01-28 18:07:23 -05:00
Ed Hennis
abfef33493 Merge commit '92046785d1fea5f9efe5a770d636792ea6cab78b' into ximinez/vault-test
* commit '92046785d1fea5f9efe5a770d636792ea6cab78b':
  test: Fix the `xrpl.net` unit test using async read (6241)
  ci: Upload Conan recipes for develop, release candidates, and releases (6286)
  fix: Stop embedded tests from hanging on ARM by using `atomic_flag` (6248)
  fix:  Remove DEFAULT fields that change to the default in associateAsset (6259) (6273)
  refactor: Update Boost to 1.90 (6280)
  refactor: clean up uses of `std::source_location` (6272)
  ci: Pass missing sanitizers input to actions (6266)
  ci: Properly propagate Conan credentials (6265)
  ci: Explicitly set version when exporting the Conan recipe (6264)
  ci: Use plus instead of hyphen for Conan recipe version suffix (6261)
  chore: Detect uninitialized variables in CMake files (6247)
  ci: Run on-trigger and on-pr when generate-version is modified (6257)
  refactor: Enforce 15-char limit and simplify labels for thread naming (6212)
  docs: Update Ripple Bug Bounty public key (6258)
  ci: Add missing commit hash to Conan recipe version (6256)
  fix: Include `<functional>` header in `Number.h` (6254)
  ci: Upload Conan recipe for merges into develop and commits to release (6235)
  Limit reply size on `TMGetObjectByHash` queries (6110)
  ci: remove 'master' branch as a trigger (6234)
  Improve ledger_entry lookups for fee, amendments, NUNL, and hashes (5644)
2026-01-28 18:07:13 -05:00
Ed Hennis
cdcc6b6a1a Avoid input interpolation 2026-01-15 12:15:08 -05:00
Ed Hennis
79058660ce Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  ci: Add sanitizers to CI builds (5996)
2026-01-15 12:12:04 -05:00
Ed Hennis
21315d64fd Merge branch 'develop' into ximinez/vault-test 2026-01-15 12:06:14 -04:00
Ed Hennis
a0a8a15ea7 Merge branch 'develop' into ximinez/vault-test 2026-01-13 18:19:23 -04:00
Ed Hennis
339ef7a9b9 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  fix: Update Conan lock file with changed OpenSSL recipe (6211)
  ci: Update actions/images to use cmake 4.2.1 and conan 2.24.0 (6209)
  refactor: Remove unnecessary version number and options in cmake find_package (6169)
2026-01-13 14:28:30 -05:00
Ed Hennis
cb230ea5a1 Merge branch 'develop' into ximinez/vault-test 2026-01-12 14:52:29 -04:00
Ed Hennis
09912f7da2 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  fix: Inner batch transactions never have valid signatures (6069)
  chore: Change `/Zi` to `/Z7` for ccache, remove debug symbols in CI (6198)
  VaultClawback: Burn shares of an empty vault (6120)
  fix: Truncate thread name to 15 chars on Linux (5758)
  docs: Fix minor spelling issues in comments (6194)
2026-01-10 23:55:20 -05:00
Ed Hennis
5251673b86 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  ci: Use updated prepare-runner in actions and worfklows (6188)
2026-01-08 16:06:35 -05:00
Ed Hennis
33bda40b59 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  refactor: Fix typos, enable cspell pre-commit (5719)
  fix: Reorder Batch Preflight Errors (6176)
  refactor: Remove unused credentials signature hash prefix (6186)
  refactor: Fix spelling issues in all variables/functions (6184)
  refactor: Fix spelling issues in private/local variables and functions (6182)
  refactor: Fix typos in comments, configure cspell (6164)
  ci: Move variable into right place (6179)
  ci: Use ccache to cache build objects for speeding up building (6104)
2026-01-08 12:04:45 -05:00
Ed Hennis
bd35dee693 Merge branch 'develop' into ximinez/vault-test 2026-01-06 14:02:29 -05:00
Ed Hennis
8da0e34746 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  ci: Remove superfluous build directory creation (6159)
2025-12-22 17:40:26 -05:00
Ed Hennis
642eff03c3 Merge branch 'develop' into ximinez/vault-test 2025-12-18 20:00:04 -05:00
Ed Hennis
82d25b4bbd Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  ci: Update shared actions (6147)
  chore: Fix some typos in comments (6082)
2025-12-12 20:35:24 -05:00
Ed Hennis
9bd8e516df Fix formatting 2025-12-12 14:00:12 -05:00
Ed Hennis
8ee703ed05 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  refactor: Rename `ripple` namespace to `xrpl` (5982)
  refactor: Move JobQueue and related classes into xrpl.core module (6121)
  refactor: Rename `rippled` binary to `xrpld` (5983)
  refactor: rename info() to header() (6138)
  refactor: rename `LedgerInfo` to `LedgerHeader` (6136)
  refactor: clean up `RPCHelpers` (5684)
  chore: Fix docs readme and cmake (6122)
  chore: Clean up .gitignore and .gitattributes (6001)
  chore: Use updated secp256k1 recipe (6118)
2025-12-11 16:05:27 -05:00
Ed Hennis
3427b2db16 Merge branch 'develop' into ximinez/vault-test 2025-12-05 21:13:21 -05:00
Ed Hennis
a5de779453 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  Implement Lending Protocol (unsupported) (5270)
2025-12-02 18:40:55 -05:00
Ed Hennis
e9609e0078 Merge branch 'develop' into ximinez/vault-test 2025-12-01 14:40:57 -05:00
Ed Hennis
933e645f85 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  refactor: Replaces ed25519-donna source by Conan package (6088)
  chore: Add black pre-commit hook (6086)
  chore: Make conan lockfile generation commands into a script (6085)
  chore: Update lockfile (6083)
  docs: Update instructions how to (re)generate conan.lock file (6070)
2025-11-28 15:51:15 -05:00
Ed Hennis
7f139d8474 Merge branch 'develop' into ximinez/vault-test 2025-11-26 00:25:30 -05:00
Ed Hennis
3bcf656446 Merge branch 'develop' into ximinez/vault-test 2025-11-25 14:55:19 -05:00
Ed Hennis
5d6ab7e970 Merge branch 'develop' into ximinez/vault-test 2025-11-24 21:49:23 -05:00
Ed Hennis
33f5017c55 Merge branch 'develop' into ximinez/vault-test 2025-11-24 21:30:34 -05:00
Ed Hennis
26ca6c1828 Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  chore: Clean up comment in NetworkOps_test.cpp (6066)
  refactor: Retire DisallowIncoming amendment (6045)
  refactor: Retire Checks amendment (6055)
  ci: Only upload artifacts in XRPLF repo owner (6060)
  fix: Set correct index for limit in `book_offers` CLI (6043)
  Fix: Perform array size check (6030)
2025-11-21 14:33:40 -05:00
Ed Hennis
768049c076 Merge branch 'develop' into ximinez/vault-test 2025-11-18 22:51:17 -05:00
Ed Hennis
58e8f5c032 Merge branch 'develop' into ximinez/vault-test 2025-11-15 03:08:53 -05:00
Ed Hennis
4e3cb34bb5 Merge branch 'develop' into ximinez/vault-test 2025-11-13 12:19:54 -05:00
Ed Hennis
957f4b15eb Merge remote-tracking branch 'XRPLF/develop' into ximinez/vault-test
* XRPLF/develop:
  chore: Set version 3.1.0-b0 (5986)
  ci: Clean workspace on Windows self-hosted runners (6024)
  docs: fix spelling in comments (6002)
  fix: floating point representation errors in vault (5997)
  ci: Specify bash as the default shell in workflows (6021)
  refactor: Add `XRPL_RETIRE_FIX` and `XRPL_RETIRE_FEATURE` macros (6014)
  refactor: Retire DepositPreAuth and DepositAuth amendments (5978)
  chore: Move running of unit tests out of coverage target (6018)
  refactor: Retire PayChanRecipientOwnerDir amendment (5946)
2025-11-12 14:16:09 -05:00
Ed Hennis
bae5a9057f Fix formatting 2025-11-10 19:53:35 -05:00
Ed Hennis
330728eb2d Remove the cmake UNIT_TEST_REFERENCE_FEE setting
- Replaced by --unittest-fee runtime param
2025-11-10 19:53:35 -05:00
Ed Hennis
7dcc839d98 Fix tests when the runner isn't available for the referencefee 2025-11-10 19:53:35 -05:00
Ed Hennis
18a62f4b27 Add command line unittest-fee 2025-11-10 19:53:35 -05:00
229 changed files with 4018 additions and 4619 deletions

View File

@@ -8,14 +8,12 @@ Checks: "-*,
bugprone-chained-comparison,
bugprone-compare-pointer-to-member-virtual-function,
bugprone-copy-constructor-init,
bugprone-crtp-constructor-accessibility,
bugprone-dangling-handle,
bugprone-dynamic-static-initializers,
bugprone-empty-catch,
bugprone-fold-init-type,
bugprone-forward-declaration-namespace,
bugprone-inaccurate-erase,
bugprone-inc-dec-in-conditions,
bugprone-incorrect-enable-if,
bugprone-incorrect-roundings,
bugprone-infinite-loop,
@@ -60,19 +58,14 @@ Checks: "-*,
bugprone-suspicious-string-compare,
bugprone-suspicious-stringview-data-usage,
bugprone-swapped-arguments,
bugprone-switch-missing-default-case,
bugprone-terminating-continue,
bugprone-throw-keyword-missing,
bugprone-too-small-loop-variable,
# bugprone-unchecked-optional-access, # see https://github.com/XRPLF/rippled/pull/6502
bugprone-undefined-memory-manipulation,
bugprone-undelegated-constructor,
bugprone-unhandled-exception-at-new,
bugprone-unhandled-self-assignment,
bugprone-unique-ptr-array-mismatch,
bugprone-unsafe-functions,
bugprone-unused-raii,
bugprone-unused-return-value,
bugprone-unused-local-non-trivial-variable,
bugprone-virtual-near-miss,
cppcoreguidelines-no-suspend-with-lock,
@@ -85,24 +78,30 @@ Checks: "-*,
misc-throw-by-value-catch-by-reference,
misc-unused-alias-decls,
misc-unused-using-decls,
modernize-deprecated-headers,
modernize-make-shared,
modernize-make-unique,
performance-implicit-conversion-in-loop,
performance-move-constructor-init,
performance-trivially-destructible,
readability-duplicate-include,
readability-enum-initial-value,
readability-misleading-indentation,
readability-non-const-parameter,
readability-redundant-declaration,
readability-reference-to-constructed-temporary
readability-reference-to-constructed-temporary,
modernize-deprecated-headers,
modernize-make-shared,
modernize-make-unique,
performance-implicit-conversion-in-loop,
performance-move-constructor-init,
performance-trivially-destructible
"
# ---
# checks that have some issues that need to be resolved:
#
# bugprone-crtp-constructor-accessibility,
# bugprone-inc-dec-in-conditions,
# bugprone-move-forwarding-reference,
# bugprone-switch-missing-default-case,
# bugprone-unused-return-value,
# bugprone-use-after-move,
# bugprone-unhandled-self-assignment,
# bugprone-unused-raii,
#
# cppcoreguidelines-misleading-capture-default-by-value,
# cppcoreguidelines-init-variables,
@@ -195,7 +194,7 @@ CheckOptions:
# readability-identifier-naming.PublicMemberSuffix: ""
# readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
# bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
# misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h;__chrono/.*;fmt/chrono.h;boost/uuid/uuid_hash.hpp'
#
# HeaderFilterRegex: '^.*/(src|tests)/.*\.(h|hpp)$'

View File

@@ -29,6 +29,22 @@ If a refactor, how is this better than the previous implementation?
If there is a spec or design document for this feature, please link it here.
-->
### Type of Change
<!--
Please check [x] relevant options, delete irrelevant ones.
-->
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] Refactor (non-breaking change that only restructures code)
- [ ] Performance (increase or change in throughput and/or latency)
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
- [ ] Documentation update
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
- [ ] Release
### API Impact
<!--

View File

@@ -70,7 +70,7 @@ that `test` code should _never_ be included in `xrpl` or `xrpld` code.)
## Validation
The [levelization](generate.py) script takes no parameters,
The [levelization](generate.sh) script takes no parameters,
reads no environment variables, and can be run from any directory,
as long as it is in the expected location in the rippled repo.
It can be run at any time from within a checked out repo, and will
@@ -104,7 +104,7 @@ It generates many files of [results](results):
Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or
done anything else to improve levelization, run `generate.py`,
done anything else to improve levelization, run `levelization.sh`,
and commit the updated results.
The `loops.txt` and `ordering.txt` files relate the modules
@@ -128,7 +128,7 @@ The committed files hide the detailed values intentionally, to
prevent false alarms and merging issues, and because it's easy to
get those details locally.
1. Run `generate.py`
1. Run `levelization.sh`
2. Grep the modules in `paths.txt`.
- For example, if a cycle is found `A ~= B`, simply `grep -w
A .github/scripts/levelization/results/paths.txt | grep -w B`

View File

@@ -1,335 +0,0 @@
#!/usr/bin/env python3
"""
Usage: generate.py
This script takes no parameters, and can be called from any directory in the file system.
"""
import os
import re
import subprocess
import sys
from collections import defaultdict
from pathlib import Path
from typing import Dict, List, Tuple, Set, Optional
# Compile regex patterns once at module level
INCLUDE_PATTERN = re.compile(r"^\s*#include.*/.*\.h")
INCLUDE_PATH_PATTERN = re.compile(r'[<"]([^>"]+)[>"]')
def dictionary_sort_key(s: str) -> str:
"""
Create a sort key that mimics 'sort -d' (dictionary order).
Dictionary order only considers blanks and alphanumeric characters.
This means punctuation like '.' is ignored during sorting.
"""
# Keep only alphanumeric characters and spaces
return "".join(c for c in s if c.isalnum() or c.isspace())
def get_level(file_path: str) -> str:
"""
Extract the level from a file path (second and third directory components).
Equivalent to bash: cut -d/ -f 2,3
Examples:
src/xrpld/app/main.cpp -> xrpld.app
src/libxrpl/protocol/STObject.cpp -> libxrpl.protocol
include/xrpl/basics/base_uint.h -> xrpl.basics
"""
parts = file_path.split("/")
# Get fields 2 and 3 (indices 1 and 2 in 0-based indexing)
if len(parts) >= 3:
level = f"{parts[1]}/{parts[2]}"
elif len(parts) >= 2:
level = f"{parts[1]}/toplevel"
else:
level = file_path
# If the "level" indicates a file, cut off the filename
if "." in level.split("/")[-1]: # Avoid Path object creation
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level = level.rsplit("/", 1)[0] + "/toplevel"
return level.replace("/", ".")
def extract_include_level(include_line: str) -> Optional[str]:
"""
Extract the include path from an #include directive.
Gets the first two directory components from the include path.
Equivalent to bash: cut -d/ -f 1,2
Examples:
#include <xrpl/basics/base_uint.h> -> xrpl.basics
#include "xrpld/app/main/Application.h" -> xrpld.app
"""
# Remove everything before the quote or angle bracket
match = INCLUDE_PATH_PATTERN.search(include_line)
if not match:
return None
include_path = match.group(1)
parts = include_path.split("/")
# Get first two fields (indices 0 and 1)
if len(parts) >= 2:
include_level = f"{parts[0]}/{parts[1]}"
else:
include_level = include_path
# If the "includelevel" indicates a file, cut off the filename
if "." in include_level.split("/")[-1]: # Avoid Path object creation
include_level = include_level.rsplit("/", 1)[0] + "/toplevel"
return include_level.replace("/", ".")
def find_repository_directories(
start_path: Path, depth_limit: int = 10
) -> Tuple[Path, List[Path]]:
"""
Find the repository root by looking for src or include folders.
Walks up the directory tree from the start path.
"""
current = start_path.resolve()
# Walk up the directory tree
for _ in range(depth_limit): # Limit search depth to prevent infinite loops
src_path = current / "src"
include_path = current / "include"
# Check if this directory has src or include folders
has_src = src_path.exists()
has_include = include_path.exists()
if has_src or has_include:
return current, [src_path, include_path]
# Move up one level
parent = current.parent
if parent == current: # Reached filesystem root
break
current = parent
# If we couldn't find it, raise an error
raise RuntimeError(
"Could not find repository root. "
"Expected to find a directory containing 'src' and/or 'include' folders."
)
def main():
# Change to the script's directory
script_dir = Path(__file__).parent.resolve()
os.chdir(script_dir)
# Clean up and create results directory.
results_dir = script_dir / "results"
if results_dir.exists():
import shutil
shutil.rmtree(results_dir)
results_dir.mkdir()
# Find the repository root by searching for src and include directories.
try:
repo_root, scan_dirs = find_repository_directories(script_dir)
print(f"Found repository root: {repo_root}")
print(f"Scanning directories:")
for scan_dir in scan_dirs:
print(f" - {scan_dir.relative_to(repo_root)}")
except RuntimeError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
print("\nScanning for raw includes...")
# Find all #include directives
raw_includes: List[Tuple[str, str]] = []
rawincludes_file = results_dir / "rawincludes.txt"
# Write to file as we go to avoid storing everything in memory.
with open(rawincludes_file, "w", buffering=8192) as raw_f:
for dir_path in scan_dirs:
print(f" Scanning {dir_path.relative_to(repo_root)}...")
for file_path in dir_path.rglob("*"):
if not file_path.is_file():
continue
try:
rel_path_str = str(file_path.relative_to(repo_root))
# Read file with a large buffer for performance.
with open(
file_path,
"r",
encoding="utf-8",
errors="ignore",
buffering=8192,
) as f:
for line in f:
# Quick check before regex
if "#include" not in line or "boost" in line:
continue
if INCLUDE_PATTERN.match(line):
line_stripped = line.strip()
entry = f"{rel_path_str}:{line_stripped}\n"
print(entry, end="")
raw_f.write(entry)
raw_includes.append((rel_path_str, line_stripped))
except Exception as e:
print(f"Error reading {file_path}: {e}", file=sys.stderr)
# Build levelization paths and count directly (no need to sort first).
print("Build levelization paths")
path_counts: Dict[Tuple[str, str], int] = defaultdict(int)
for file_path, include_line in raw_includes:
include_level = extract_include_level(include_line)
if not include_level:
continue
level = get_level(file_path)
if level != include_level:
path_counts[(level, include_level)] += 1
# Sort and deduplicate paths (using dictionary order like bash 'sort -d').
print("Sort and deduplicate paths")
paths_file = results_dir / "paths.txt"
with open(paths_file, "w") as f:
# Sort using dictionary order: only alphanumeric and spaces matter
sorted_items = sorted(
path_counts.items(),
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
)
for (level, include_level), count in sorted_items:
line = f"{count:7} {level} {include_level}\n"
print(line.rstrip())
f.write(line)
# Split into flat-file database
print("Split into flat-file database")
includes_dir = results_dir / "includes"
included_by_dir = results_dir / "included_by"
includes_dir.mkdir()
included_by_dir.mkdir()
# Batch writes by grouping data first to avoid repeated file opens.
includes_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
included_by_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
# Process in sorted order to match bash script behaviour (dictionary order).
sorted_items = sorted(
path_counts.items(),
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
)
for (level, include_level), count in sorted_items:
includes_data[level].append((include_level, count))
included_by_data[include_level].append((level, count))
# Write all includes files in sorted order (dictionary order).
for level in sorted(includes_data.keys(), key=dictionary_sort_key):
entries = includes_data[level]
with open(includes_dir / level, "w") as f:
for include_level, count in entries:
line = f"{include_level} {count}\n"
print(line.rstrip())
f.write(line)
# Write all included_by files in sorted order (dictionary order).
for include_level in sorted(included_by_data.keys(), key=dictionary_sort_key):
entries = included_by_data[include_level]
with open(included_by_dir / include_level, "w") as f:
for level, count in entries:
line = f"{level} {count}\n"
print(line.rstrip())
f.write(line)
# Search for loops
print("Search for loops")
loops_file = results_dir / "loops.txt"
ordering_file = results_dir / "ordering.txt"
loops_found: Set[Tuple[str, str]] = set()
# Pre-load all include files into memory to avoid repeated I/O.
# This is the biggest optimisation - we were reading files repeatedly in nested loops.
# Use list of tuples to preserve file order.
includes_cache: Dict[str, List[Tuple[str, int]]] = {}
includes_lookup: Dict[str, Dict[str, int]] = {} # For fast lookup
# Note: bash script uses 'for source in *' which uses standard glob sorting,
# NOT dictionary order. So we use standard sorted() here, not dictionary_sort_key.
for include_file in sorted(includes_dir.iterdir(), key=lambda p: p.name):
if not include_file.is_file():
continue
includes_cache[include_file.name] = []
includes_lookup[include_file.name] = {}
with open(include_file, "r") as f:
for line in f:
parts = line.strip().split()
if len(parts) >= 2:
include_name = parts[0]
include_count = int(parts[1])
includes_cache[include_file.name].append(
(include_name, include_count)
)
includes_lookup[include_file.name][include_name] = include_count
with open(loops_file, "w", buffering=8192) as loops_f, open(
ordering_file, "w", buffering=8192
) as ordering_f:
# Use standard sorting to match bash glob expansion 'for source in *'.
for source in sorted(includes_cache.keys()):
source_includes = includes_cache[source]
for include, include_freq in source_includes:
# Check if include file exists and references source
if include not in includes_lookup:
continue
source_freq = includes_lookup[include].get(source)
if source_freq is not None:
# Found a loop
loop_key = tuple(sorted([source, include]))
if loop_key in loops_found:
continue
loops_found.add(loop_key)
loops_f.write(f"Loop: {source} {include}\n")
# If the counts are close, indicate that the two modules are
# on the same level, though they shouldn't be.
diff = include_freq - source_freq
if diff > 3:
loops_f.write(f" {source} > {include}\n\n")
elif diff < -3:
loops_f.write(f" {include} > {source}\n\n")
elif source_freq == include_freq:
loops_f.write(f" {include} == {source}\n\n")
else:
loops_f.write(f" {include} ~= {source}\n\n")
else:
ordering_f.write(f"{source} > {include}\n")
# Print results
print("\nOrdering:")
with open(ordering_file, "r") as f:
print(f.read(), end="")
print("\nLoops:")
with open(loops_file, "r") as f:
print(f.read(), end="")
if __name__ == "__main__":
main()

130
.github/scripts/levelization/generate.sh vendored Executable file
View File

@@ -0,0 +1,130 @@
#!/bin/bash
# Usage: generate.sh
# This script takes no parameters, reads no environment variables,
# and can be run from any directory, as long as it is in the expected
# location in the repo.
pushd $( dirname $0 )
if [ -v PS1 ]
then
# if the shell is interactive, clean up any flotsam before analyzing
git clean -ix
fi
# Ensure all sorting is ASCII-order consistently across platforms.
export LANG=C
rm -rfv results
mkdir results
includes="$( pwd )/results/rawincludes.txt"
pushd ../../..
echo Raw includes:
grep -r '^[ ]*#include.*/.*\.h' include src | \
grep -v boost | tee ${includes}
popd
pushd results
oldifs=${IFS}
IFS=:
mkdir includes
mkdir included_by
echo Build levelization paths
exec 3< ${includes} # open rawincludes.txt for input
while read -r -u 3 file include
do
level=$( echo ${file} | cut -d/ -f 2,3 )
# If the "level" indicates a file, cut off the filename
if [[ "${level##*.}" != "${level}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
level="$( dirname ${level} )/toplevel"
fi
level=$( echo ${level} | tr '/' '.' )
includelevel=$( echo ${include} | sed 's/.*["<]//; s/[">].*//' | \
cut -d/ -f 1,2 )
if [[ "${includelevel##*.}" != "${includelevel}" ]]
then
# Use the "toplevel" label as a workaround for `sort`
# inconsistencies between different utility versions
includelevel="$( dirname ${includelevel} )/toplevel"
fi
includelevel=$( echo ${includelevel} | tr '/' '.' )
if [[ "$level" != "$includelevel" ]]
then
echo $level $includelevel | tee -a paths.txt
fi
done
echo Sort and deduplicate paths
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
mv sortedpaths.txt paths.txt
exec 3>&- #close fd 3
IFS=${oldifs}
unset oldifs
echo Split into flat-file database
exec 4<paths.txt # open paths.txt for input
while read -r -u 4 count level include
do
echo ${include} ${count} | tee -a includes/${level}
echo ${level} ${count} | tee -a included_by/${include}
done
exec 4>&- #close fd 4
loops="$( pwd )/loops.txt"
ordering="$( pwd )/ordering.txt"
pushd includes
echo Search for loops
# Redirect stdout to a file
exec 4>&1
exec 1>"${loops}"
for source in *
do
if [[ -f "$source" ]]
then
exec 5<"${source}" # open for input
while read -r -u 5 include includefreq
do
if [[ -f $include ]]
then
if grep -q -w $source $include
then
if grep -q -w "Loop: $include $source" "${loops}"
then
continue
fi
sourcefreq=$( grep -w $source $include | cut -d\ -f2 )
echo "Loop: $source $include"
# If the counts are close, indicate that the two modules are
# on the same level, though they shouldn't be
if [[ $(( $includefreq - $sourcefreq )) -gt 3 ]]
then
echo -e " $source > $include\n"
elif [[ $(( $sourcefreq - $includefreq )) -gt 3 ]]
then
echo -e " $include > $source\n"
elif [[ $sourcefreq -eq $includefreq ]]
then
echo -e " $include == $source\n"
else
echo -e " $include ~= $source\n"
fi
else
echo "$source > $include" >> "${ordering}"
fi
fi
done
exec 5>&- #close fd 5
fi
done
exec 1>&4 #close fd 1
exec 4>&- #close fd 4
cat "${ordering}"
cat "${loops}"
popd
popd
popd

View File

@@ -134,7 +134,6 @@ test.peerfinder > xrpld.core
test.peerfinder > xrpld.peerfinder
test.peerfinder > xrpl.protocol
test.protocol > test.toplevel
test.protocol > test.unit_test
test.protocol > xrpl.basics
test.protocol > xrpl.json
test.protocol > xrpl.protocol
@@ -172,7 +171,6 @@ test.shamap > xrpl.shamap
test.toplevel > test.csf
test.toplevel > xrpl.json
test.unit_test > xrpl.basics
test.unit_test > xrpl.protocol
tests.libxrpl > xrpl.basics
tests.libxrpl > xrpl.json
tests.libxrpl > xrpl.net

View File

@@ -40,6 +40,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# The default CMake target is 'all' for Linux and MacOS and 'install'
# for Windows, but it can get overridden for certain configurations.
cmake_target = "install" if os["distro_name"] == "windows" else "all"
unittest_args = ""
# We build and test all configurations by default, except for Windows in
# Debug, because it is too slow, as well as when code coverage is
@@ -55,7 +56,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# fee to 500.
# - Bookworm using GCC 15: Debug on linux/amd64, enable code
# coverage (which will be done below).
# - Bookworm using Clang 16: Debug on linux/amd64, enable voidstar.
# - Bookworm using Clang 16: Debug on linux/arm64, enable voidstar.
# - Bookworm using Clang 17: Release on linux/amd64, set the
# reference fee to 1000.
# - Bookworm using Clang 20: Debug on linux/amd64.
@@ -67,7 +68,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
and build_type == "Release"
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
unittest_args = f"{unittest_args} --unittest-fee=500"
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
@@ -78,7 +79,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
and build_type == "Debug"
and architecture["platform"] == "linux/amd64"
and architecture["platform"] == "linux/arm64"
):
cmake_args = f"-Dvoidstar=ON {cmake_args}"
skip = False
@@ -87,7 +88,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
and build_type == "Release"
and architecture["platform"] == "linux/amd64"
):
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
unittest_args = f"{unittest_args} --unittest-fee=1000"
skip = False
if (
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
@@ -245,6 +246,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
{
"config_name": config_name + "-asan-ubsan",
"cmake_args": cmake_args,
"unittest_args": unittest_args,
"cmake_target": cmake_target,
"build_only": build_only,
"build_type": build_type,
@@ -260,6 +262,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
{
"config_name": config_name + "-tsan-ubsan",
"cmake_args": cmake_args,
"unittest_args": unittest_args,
"cmake_target": cmake_target,
"build_only": build_only,
"build_type": build_type,
@@ -273,6 +276,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
{
"config_name": config_name,
"cmake_args": cmake_args,
"unittest_args": unittest_args,
"cmake_target": cmake_target,
"build_only": build_only,
"build_type": build_type,

View File

@@ -1,17 +0,0 @@
name: Check signed commits
# The use of `pull_request_target` allows the action to post comments on
# external PRs created from forks.
on: pull_request_target
jobs:
check-signed-commits:
name: Check signed commits in PR
runs-on: ubuntu-latest
# The action needs write permissions to post comments on the PR.
permissions:
contents: read
pull-requests: write
steps:
- name: Check signed commits in PR
uses: 1Password/check-signed-commits-action@ed2885f3ed2577a4f5d3c3fe895432a557d23d52 # v1.2.0

View File

@@ -141,8 +141,9 @@ jobs:
needs:
- should-run
- build-test
# Only run when committing to a PR that targets a release branch.
if: ${{ github.repository == 'XRPLF/rippled' && needs.should-run.outputs.go == 'true' && github.event_name == 'pull_request' && startsWith(github.event.pull_request.base.ref, 'release') }}
# Only run when committing to a PR that targets a release branch in the
# XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' && needs.should-run.outputs.go == 'true' && startsWith(github.ref, 'refs/heads/release') }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}

View File

@@ -17,7 +17,8 @@ defaults:
jobs:
upload-recipe:
if: ${{ github.repository == 'XRPLF/rippled' }}
# Only run when a tag is pushed to the XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}

View File

@@ -92,8 +92,8 @@ jobs:
upload-recipe:
needs: build-test
# Only run when pushing to the develop branch.
if: ${{ github.repository == 'XRPLF/rippled' && github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
# Only run when pushing to the develop branch in the XRPLF repository.
if: ${{ github.repository_owner == 'XRPLF' && github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
uses: ./.github/workflows/reusable-upload-recipe.yml
secrets:
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}

View File

@@ -25,6 +25,12 @@ on:
type: string
default: ""
unittest_args:
description: "Additional arguments to pass to rippled when running tests"
required: false
type: string
default: ""
cmake_target:
description: "The CMake target to build."
required: true
@@ -76,7 +82,7 @@ jobs:
name: ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: ${{ inputs.sanitizers != '' && 360 || 60 }}
timeout-minutes: 60
env:
# Use a namespace to keep the objects separate for each configuration.
CCACHE_NAMESPACE: ${{ inputs.config_name }}
@@ -176,7 +182,7 @@ jobs:
fi
- name: Upload the binary (Linux)
if: ${{ github.repository == 'XRPLF/rippled' && runner.os == 'Linux' }}
if: ${{ github.repository_owner == 'XRPLF' && runner.os == 'Linux' }}
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: xrpld-${{ inputs.config_name }}
@@ -204,17 +210,11 @@ jobs:
- name: Set sanitizer options
if: ${{ !inputs.build_only && env.SANITIZERS_ENABLED == 'true' }}
env:
CONFIG_NAME: ${{ inputs.config_name }}
run: |
ASAN_OPTS="include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-asan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/asan.supp"
if [[ "${CONFIG_NAME}" == *gcc* ]]; then
ASAN_OPTS="${ASAN_OPTS}:alloc_dealloc_mismatch=0"
fi
echo "ASAN_OPTIONS=${ASAN_OPTS}" >> ${GITHUB_ENV}
echo "TSAN_OPTIONS=include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-tsan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/tsan.supp" >> ${GITHUB_ENV}
echo "UBSAN_OPTIONS=include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-ubsan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/ubsan.supp" >> ${GITHUB_ENV}
echo "LSAN_OPTIONS=include=${GITHUB_WORKSPACE}/sanitizers/suppressions/runtime-lsan-options.txt:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/lsan.supp" >> ${GITHUB_ENV}
echo "ASAN_OPTIONS=print_stacktrace=1:detect_container_overflow=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/asan.supp" >> ${GITHUB_ENV}
echo "TSAN_OPTIONS=second_deadlock_stack=1:halt_on_error=0:suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/tsan.supp" >> ${GITHUB_ENV}
echo "UBSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/ubsan.supp" >> ${GITHUB_ENV}
echo "LSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/sanitizers/suppressions/lsan.supp" >> ${GITHUB_ENV}
- name: Run the separate tests
if: ${{ !inputs.build_only }}
@@ -234,11 +234,11 @@ jobs:
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', env.BUILD_DIR, inputs.build_type) || env.BUILD_DIR }}
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
UNITTEST_ARGS: ${{ inputs.unittest_args }}
run: |
set -o pipefail
# Coverage builds are slower due to instrumentation; use fewer parallel jobs to avoid flakiness
[ "$COVERAGE_ENABLED" = "true" ] && BUILD_NPROC=$(( BUILD_NPROC - 2 ))
./xrpld --unittest --unittest-jobs "${BUILD_NPROC}" 2>&1 | tee unittest.log
./xrpld --unittest --unittest-jobs "${BUILD_NPROC}" \
${UNITTEST_ARGS} 2>&1 | tee unittest.log
- name: Show test failure summary
if: ${{ failure() && !inputs.build_only }}
@@ -274,7 +274,7 @@ jobs:
--target coverage
- name: Upload coverage report
if: ${{ github.repository == 'XRPLF/rippled' && !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
with:
disable_search: true

View File

@@ -53,6 +53,7 @@ jobs:
build_type: ${{ matrix.build_type }}
ccache_enabled: ${{ inputs.ccache_enabled }}
cmake_args: ${{ matrix.cmake_args }}
unittest_args: ${{ matrix.unittest_args }}
cmake_target: ${{ matrix.cmake_target }}
runs_on: ${{ toJSON(matrix.architecture.runner) }}
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}

View File

@@ -20,7 +20,7 @@ jobs:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Check levelization
run: python .github/scripts/levelization/generate.py
run: .github/scripts/levelization/generate.sh
- name: Check for differences
env:
MESSAGE: |
@@ -32,7 +32,7 @@ jobs:
removed from loops.txt, it's probably an improvement, while if
something was added, it's probably a regression.
Run '.github/scripts/levelization/generate.py' in your repo, commit
Run '.github/scripts/levelization/generate.sh' in your repo, commit
and push the changes. See .github/scripts/levelization/README.md for
more info.
run: |

View File

@@ -51,5 +51,5 @@ jobs:
if: ${{ always() && !cancelled() && (!inputs.check_only_changed || needs.determine-files.outputs.any_cpp_changed == 'true' || needs.determine-files.outputs.clang_tidy_config_changed == 'true') }}
uses: ./.github/workflows/reusable-clang-tidy-files.yml
with:
files: ${{ needs.determine-files.outputs.clang_tidy_config_changed == 'true' && '' || (inputs.check_only_changed && needs.determine-files.outputs.all_changed_files || '') }}
files: ${{ (needs.determine-files.outputs.clang_tidy_config_changed == 'true' && '') || (inputs.check_only_changed && needs.determine-files.outputs.all_changed_files || '') }}
create_issue_on_failure: ${{ inputs.create_issue_on_failure }}

View File

@@ -69,28 +69,22 @@ jobs:
conan export . --version=${{ steps.version.outputs.version }}
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/${{ steps.version.outputs.version }}
# When this workflow is triggered by a push event, it will always be when merging into the
# 'develop' branch, see on-trigger.yml.
- name: Upload Conan recipe (develop)
if: ${{ github.event_name == 'push' }}
if: ${{ github.ref == 'refs/heads/develop' }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=develop
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/develop
# When this workflow is triggered by a pull request event, it will always be when merging into
# one of the 'release' branches, see on-pr.yml.
- name: Upload Conan recipe (rc)
if: ${{ github.event_name == 'pull_request' }}
if: ${{ startsWith(github.ref, 'refs/heads/release') }}
env:
REMOTE_NAME: ${{ inputs.remote_name }}
run: |
conan export . --version=rc
conan upload --confirm --check --remote="${REMOTE_NAME}" xrpl/rc
# When this workflow is triggered by a tag event, it will always be when tagging a final
# release, see on-tag.yml.
- name: Upload Conan recipe (release)
if: ${{ github.event_name == 'tag' }}
env:

View File

@@ -103,11 +103,11 @@ jobs:
sanitizers: ${{ matrix.sanitizers }}
- name: Log into Conan remote
if: ${{ github.repository == 'XRPLF/rippled' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
- name: Upload Conan packages
if: ${{ github.repository == 'XRPLF/rippled' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
env:
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}

3
.gitignore vendored
View File

@@ -75,9 +75,6 @@ DerivedData
/.claude
/CLAUDE.md
# Python
__pycache__
# Direnv's directory
/.direnv

View File

@@ -131,6 +131,7 @@ if(coverage)
include(XrplCov)
endif()
set(PROJECT_EXPORT_SET XrplExports)
include(XrplCore)
include(XrplInstall)
include(XrplValidatorKeys)

View File

@@ -127,6 +127,26 @@ tl;dr
> 6. Wrap the body at 72 characters.
> 7. Use the body to explain what and why vs. how.
In addition to those guidelines, please add one of the following
prefixes to the subject line if appropriate.
- `fix:` - The primary purpose is to fix an existing bug.
- `perf:` - The primary purpose is performance improvements.
- `refactor:` - The changes refactor code without affecting
functionality.
- `test:` - The changes _only_ affect unit tests.
- `docs:` - The changes _only_ affect documentation. This can
include code comments in addition to `.md` files like this one.
- `build:` - The changes _only_ affect the build process,
including CMake and/or Conan settings.
- `chore:` - Other tasks that don't affect the binary, but don't fit
any of the other cases. e.g. formatting, git settings, updating
Github Actions jobs.
Whenever possible, when updating commits after the PR is open, please
add the PR number to the end of the subject line. e.g. `test: Add
unit tests for Feature X (#1234)`.
## Pull requests
In general, pull requests use `develop` as the base branch.
@@ -160,23 +180,6 @@ credibility of the existing approvals is insufficient.
Pull requests must be merged by [squash-and-merge][squash]
to preserve a linear history for the `develop` branch.
### Type of Change
In addition to those guidelines, please start your PR title with one of the following:
- `build:` - The changes _only_ affect the build process, including CMake and/or Conan settings.
- `feat`: New feature (change which adds functionality).
- `fix:` - The primary purpose is to fix an existing bug.
- `docs:` - The changes _only_ affect documentation.
- `test:` - The changes _only_ affect unit tests.
- `ci`: Continuous Integration (changes to our CI configuration files and scripts).
- `style`: Code style (formatting).
- `refactor:` - The changes refactor code without affecting functionality.
- `perf:` - The primary purpose is performance improvements.
- `chore:` - Other tasks that don't affect the binary, but don't fit any of the other cases. e.g. `git` settings, `clang-tidy`, removing dead code, dropping support for older tooling.
First letter after the type prefix should be capitalized, and the type prefix should be followed by a colon and a space. e.g. `feat: Add support for Borrowing Protocol`.
### "Ready to merge"
A pull request should only have the "Ready to merge" label added when it

60
cmake/XrplConfig.cmake Normal file
View File

@@ -0,0 +1,60 @@
include(CMakeFindDependencyMacro)
# need to represent system dependencies of the lib here
#[=========================================================[
Boost
#]=========================================================]
if(static OR APPLE OR MSVC)
set(Boost_USE_STATIC_LIBS ON)
endif()
set(Boost_USE_MULTITHREADED ON)
if(static OR MSVC)
set(Boost_USE_STATIC_RUNTIME ON)
else()
set(Boost_USE_STATIC_RUNTIME OFF)
endif()
find_dependency(
Boost
COMPONENTS
chrono
container
context
coroutine
date_time
filesystem
program_options
regex
system
thread
)
#[=========================================================[
OpenSSL
#]=========================================================]
if(NOT DEFINED OPENSSL_ROOT_DIR)
if(DEFINED ENV{OPENSSL_ROOT})
set(OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
elseif(APPLE)
find_program(homebrew brew)
if(homebrew)
execute_process(
COMMAND ${homebrew} --prefix openssl
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
OUTPUT_STRIP_TRAILING_WHITESPACE
)
endif()
endif()
file(TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
endif()
if(static OR APPLE OR MSVC)
set(OPENSSL_USE_STATIC_LIBS ON)
endif()
set(OPENSSL_MSVC_STATIC_RT ON)
find_dependency(OpenSSL REQUIRED)
find_dependency(ZLIB)
find_dependency(date)
if(TARGET ZLIB::ZLIB)
set_target_properties(
OpenSSL::Crypto
PROPERTIES INTERFACE_LINK_LIBRARIES ZLIB::ZLIB
)
endif()

View File

@@ -226,10 +226,6 @@ if(xrpld)
add_executable(xrpld)
if(tests)
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
target_compile_definitions(
xrpld
PRIVATE UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
)
endif()
target_include_directories(
xrpld

View File

@@ -2,38 +2,100 @@
install stuff
#]===================================================================]
include(GNUInstallDirs)
include(create_symbolic_link)
if(is_root_project AND TARGET xrpld)
install(
TARGETS xrpld
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT runtime
)
install(
FILES "${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg"
DESTINATION "${CMAKE_INSTALL_SYSCONFDIR}/xrpld"
RENAME xrpld.cfg
COMPONENT runtime
)
install(
FILES "${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt"
DESTINATION "${CMAKE_INSTALL_SYSCONFDIR}/xrpld"
RENAME validators.txt
COMPONENT runtime
)
# If no suffix is defined for executables (e.g. Windows uses .exe but Linux
# and macOS use none), then explicitly set it to the empty string.
if(NOT DEFINED suffix)
set(suffix "")
endif()
install(
TARGETS xrpl.libpb xrpl.libxrpl
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT development
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT development
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT development
TARGETS
common
opts
xrpl_boost
xrpl_libs
xrpl_syslibs
xrpl.imports.main
xrpl.libpb
xrpl.libxrpl
xrpl.libxrpl.basics
xrpl.libxrpl.beast
xrpl.libxrpl.conditions
xrpl.libxrpl.core
xrpl.libxrpl.crypto
xrpl.libxrpl.git
xrpl.libxrpl.json
xrpl.libxrpl.rdb
xrpl.libxrpl.ledger
xrpl.libxrpl.net
xrpl.libxrpl.nodestore
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
xrpl.libxrpl.server
xrpl.libxrpl.shamap
xrpl.libxrpl.tx
antithesis-sdk-cpp
EXPORT XrplExports
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin
INCLUDES DESTINATION include
)
install(
DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
COMPONENT development
)
install(
EXPORT XrplExports
FILE XrplTargets.cmake
NAMESPACE Xrpl::
DESTINATION lib/cmake/xrpl
)
include(CMakePackageConfigHelpers)
write_basic_package_version_file(
XrplConfigVersion.cmake
VERSION ${xrpld_version}
COMPATIBILITY SameMajorVersion
)
if(is_root_project AND TARGET xrpld)
install(TARGETS xrpld RUNTIME DESTINATION bin)
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
# sample configs should not overwrite existing files
# install if-not-exists workaround as suggested by
# https://cmake.org/Bug/view.php?id=12646
install(
CODE
"
macro (copy_if_not_exists SRC DEST NEWNAME)
if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
file (INSTALL FILE_PERMISSIONS OWNER_READ OWNER_WRITE DESTINATION \"\${CMAKE_INSTALL_PREFIX}/\${DEST}\" FILES \"\${SRC}\" RENAME \"\${NEWNAME}\")
else ()
message (\"-- Skipping : \$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
endif ()
endmacro()
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg)
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
"
)
install(
CODE
"
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(xrpld${suffix} \
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/rippled${suffix})
"
)
endif()
install(
FILES
${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
DESTINATION lib/cmake/xrpl
)

View File

@@ -23,7 +23,7 @@ target_compile_definitions(
BOOST_FILESYSTEM_NO_DEPRECATED
>
$<$<NOT:$<BOOL:${boost_show_deprecated}>>:
BOOST_COROUTINES2_NO_DEPRECATION_WARNING
BOOST_COROUTINES_NO_DEPRECATION_WARNING
BOOST_BEAST_ALLOW_DEPRECATED
BOOST_FILESYSTEM_DEPRECATED
>

View File

@@ -50,13 +50,6 @@ if(MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
message(FATAL_ERROR "Visual Studio 32-bit build is not supported.")
endif()
if(voidstar AND NOT is_amd64)
message(
FATAL_ERROR
"The voidstar library only supported on amd64/x86_64. Detected archictecture was: ${CMAKE_SYSTEM_PROCESSOR}"
)
endif()
if(APPLE AND NOT HOMEBREW)
find_program(HOMEBREW brew)
endif()

View File

@@ -23,12 +23,6 @@ option(assert "Enables asserts, even in release builds" OFF)
option(xrpld "Build xrpld" ON)
option(tests "Build tests" ON)
if(tests)
# This setting allows making a separate workflow to test fees other than default 10
if(NOT UNIT_TEST_REFERENCE_FEE)
set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "")
endif()
endif()
option(unity "Creates a build using UNITY support in cmake." OFF)
if(unity)

View File

@@ -7,7 +7,7 @@ find_package(
COMPONENTS
chrono
container
context
coroutine
date_time
filesystem
json
@@ -26,7 +26,7 @@ target_link_libraries(
Boost::headers
Boost::chrono
Boost::container
Boost::context
Boost::coroutine
Boost::date_time
Boost::filesystem
Boost::json
@@ -38,26 +38,23 @@ target_link_libraries(
if(Boost_COMPILER)
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
endif()
# GCC 14+ has a false positive -Wuninitialized warning in Boost.Coroutine2's
# state.hpp when compiled with -O3. This is due to GCC's intentional behavior
# change (Bug #98871, #119388) where warnings from inlined system header code
# are no longer suppressed by -isystem. The warning occurs in operator|= in
# boost/coroutine2/detail/state.hpp when inlined from push_control_block::destroy().
# See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=119388
if(is_gcc AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 14)
target_compile_options(xrpl_boost INTERFACE -Wno-uninitialized)
endif()
# Boost.Context's ucontext backend has ASAN fiber-switching annotations
# (start/finish_switch_fiber) that are compiled in when BOOST_USE_ASAN is defined.
# This tells ASAN about coroutine stack switches, preventing false positive
# stack-use-after-scope errors. BOOST_USE_UCONTEXT ensures the ucontext backend
# is selected (fcontext does not support ASAN annotations).
# These defines must match what Boost was compiled with (see conan/profiles/sanitizers).
if(enable_asan)
target_compile_definitions(
xrpl_boost
INTERFACE BOOST_USE_ASAN BOOST_USE_UCONTEXT
if(SANITIZERS_ENABLED AND is_clang)
# TODO: gcc does not support -fsanitize-blacklist...can we do something else for gcc ?
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
get_target_property(
Boost_INCLUDE_DIRS
Boost::headers
INTERFACE_INCLUDE_DIRECTORIES
)
endif()
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
file(
WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt
"src:${Boost_INCLUDE_DIRS}/*"
)
target_compile_options(
opts
INTERFACE # ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt
)
endif()

View File

@@ -7,21 +7,16 @@ include(default)
{% if compiler == "gcc" %}
{% if "address" in sanitizers or "thread" in sanitizers or "undefinedbehavior" in sanitizers %}
{% set sanitizer_list = [] %}
{% set defines = [] %}
{% set model_code = "" %}
{% set extra_cxxflags = ["-fno-omit-frame-pointer", "-O1", "-Wno-stringop-overflow"] %}
{% if "address" in sanitizers %}
{% set _ = sanitizer_list.append("address") %}
{% set model_code = "-mcmodel=large" %}
{% set _ = defines.append("BOOST_USE_ASAN")%}
{% set _ = defines.append("BOOST_USE_UCONTEXT")%}
{% elif "thread" in sanitizers %}
{% set _ = sanitizer_list.append("thread") %}
{% set model_code = "-mcmodel=medium" %}
{% set _ = extra_cxxflags.append("-Wno-tsan") %}
{% set _ = defines.append("BOOST_USE_TSAN")%}
{% set _ = defines.append("BOOST_USE_UCONTEXT")%}
{% endif %}
{% if "undefinedbehavior" in sanitizers %}
@@ -34,22 +29,16 @@ include(default)
tools.build:cxxflags+=['{{sanitizer_flags}} {{" ".join(extra_cxxflags)}}']
tools.build:sharedlinkflags+=['{{sanitizer_flags}}']
tools.build:exelinkflags+=['{{sanitizer_flags}}']
tools.build:defines+={{defines}}
{% endif %}
{% elif compiler == "apple-clang" or compiler == "clang" %}
{% if "address" in sanitizers or "thread" in sanitizers or "undefinedbehavior" in sanitizers %}
{% set sanitizer_list = [] %}
{% set defines = [] %}
{% set extra_cxxflags = ["-fno-omit-frame-pointer", "-O1"] %}
{% if "address" in sanitizers %}
{% set _ = sanitizer_list.append("address") %}
{% set _ = defines.append("BOOST_USE_ASAN")%}
{% set _ = defines.append("BOOST_USE_UCONTEXT")%}
{% elif "thread" in sanitizers %}
{% set _ = sanitizer_list.append("thread") %}
{% set _ = defines.append("BOOST_USE_TSAN")%}
{% set _ = defines.append("BOOST_USE_UCONTEXT")%}
{% endif %}
{% if "undefinedbehavior" in sanitizers %}
@@ -63,24 +52,8 @@ include(default)
tools.build:cxxflags+=['{{sanitizer_flags}} {{" ".join(extra_cxxflags)}}']
tools.build:sharedlinkflags+=['{{sanitizer_flags}}']
tools.build:exelinkflags+=['{{sanitizer_flags}}']
tools.build:defines+={{defines}}
{% endif %}
{% endif %}
{% endif %}
tools.info.package_id:confs+=["tools.build:cxxflags", "tools.build:exelinkflags", "tools.build:sharedlinkflags", "tools.build:defines"]
[options]
{% if sanitizers %}
{% if "address" in sanitizers %}
# Build Boost.Context with ucontext backend (not fcontext) so that
# ASAN fiber-switching annotations (__sanitizer_start/finish_switch_fiber)
# are compiled into the library. fcontext (assembly) has no ASAN support.
# define=BOOST_USE_ASAN=1 is critical: it must be defined when building
# Boost.Context itself so the ucontext backend compiles in the ASAN annotations.
boost/*:extra_b2_flags=context-impl=ucontext address-sanitizer=on define=BOOST_USE_ASAN=1
boost/*:without_context=False
# Boost stacktrace fails to build with some sanitizers
boost/*:without_stacktrace=True
{% endif %}
{% endif %}
tools.info.package_id:confs+=["tools.build:cxxflags", "tools.build:exelinkflags", "tools.build:sharedlinkflags"]

View File

@@ -1,5 +1,4 @@
import re
import os
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
@@ -58,9 +57,6 @@ class Xrpl(ConanFile):
"tests": False,
"unity": False,
"xrpld": False,
"boost/*:without_context": False,
"boost/*:without_coroutine": True,
"boost/*:without_coroutine2": False,
"date/*:header_only": True,
"ed25519/*:shared": False,
"grpc/*:shared": False,
@@ -130,12 +126,6 @@ class Xrpl(ConanFile):
if self.settings.compiler in ["clang", "gcc"]:
self.options["boost"].without_cobalt = True
# Check if environment variable exists
if "SANITIZERS" in os.environ:
sanitizers = os.environ["SANITIZERS"]
if "address" in sanitizers.lower():
self.default_options["fPIC"] = False
def requirements(self):
# Conan 2 requires transitive headers to be specified
transitive_headers_opt = (
@@ -206,8 +196,7 @@ class Xrpl(ConanFile):
"boost::headers",
"boost::chrono",
"boost::container",
"boost::context",
"boost::coroutine2",
"boost::coroutine",
"boost::date_time",
"boost::filesystem",
"boost::json",

View File

@@ -99,7 +99,6 @@ words:
- endmacro
- exceptioned
- Falco
- fcontext
- finalizers
- firewalled
- fmtdur
@@ -112,7 +111,6 @@ words:
- gpgcheck
- gpgkey
- hotwallet
- hwaddress
- hwrap
- ifndef
- inequation

View File

@@ -89,8 +89,8 @@ cmake --build . --parallel 4
**IMPORTANT**: ASAN with Boost produces many false positives. Use these options:
```bash
export ASAN_OPTIONS="include=sanitizers/suppressions/runtime-asan-options.txt:suppressions=sanitizers/suppressions/asan.supp"
export LSAN_OPTIONS="include=sanitizers/suppressions/runtime-lsan-options.txt:suppressions=sanitizers/suppressions/lsan.supp"
export ASAN_OPTIONS="print_stacktrace=1:detect_container_overflow=0:suppressions=path/to/asan.supp:halt_on_error=0:log_path=asan.log"
export LSAN_OPTIONS="suppressions=path/to/lsan.supp:halt_on_error=0:log_path=lsan.log"
# Run tests
./xrpld --unittest --unittest-jobs=5
@@ -108,7 +108,7 @@ export LSAN_OPTIONS="include=sanitizers/suppressions/runtime-lsan-options.txt:su
### ThreadSanitizer (TSan)
```bash
export TSAN_OPTIONS="include=sanitizers/suppressions/runtime-tsan-options.txt:suppressions=sanitizers/suppressions/tsan.supp"
export TSAN_OPTIONS="suppressions=path/to/tsan.supp halt_on_error=0 log_path=tsan.log"
# Run tests
./xrpld --unittest --unittest-jobs=5
@@ -129,7 +129,7 @@ More details [here](https://github.com/google/sanitizers/wiki/AddressSanitizerLe
### UndefinedBehaviorSanitizer (UBSan)
```bash
export UBSAN_OPTIONS="include=sanitizers/suppressions/runtime-ubsan-options.txt:suppressions=sanitizers/suppressions/ubsan.supp"
export UBSAN_OPTIONS="suppressions=path/to/ubsan.supp:print_stacktrace=1:halt_on_error=0:log_path=ubsan.log"
# Run tests
./xrpld --unittest --unittest-jobs=5

View File

@@ -1,6 +1,5 @@
#pragma once
#include <xrpl/basics/sanitizers.h>
#include <xrpl/beast/type_name.h>
#include <exception>
@@ -24,28 +23,16 @@ LogThrow(std::string const& title);
When called from within a catch block, it will pass
control to the next matching exception handler, if any.
Otherwise, std::terminate will be called.
ASAN can't handle sudden jumps in control flow very well. This
function is marked as XRPL_NO_SANITIZE_ADDRESS to prevent it from
triggering false positives, since it throws.
*/
[[noreturn]] XRPL_NO_SANITIZE_ADDRESS inline void
[[noreturn]] inline void
Rethrow()
{
LogThrow("Re-throwing exception");
throw;
}
/*
Logs and throws an exception of type E.
ASAN can't handle sudden jumps in control flow very well. This
function is marked as XRPL_NO_SANITIZE_ADDRESS to prevent it from
triggering false positives, since it throws.
*/
template <class E, class... Args>
[[noreturn]] XRPL_NO_SANITIZE_ADDRESS inline void
[[noreturn]] inline void
Throw(Args&&... args)
{
static_assert(

View File

@@ -1,13 +0,0 @@
#pragma once
// Helper to disable ASan/HwASan for specific functions
/*
ASAN flags some false positives with sudden jumps in control flow, like
exceptions, or when encountering coroutine stack switches. This macro can be used to disable ASAN
intrumentation for specific functions.
*/
#if defined(__GNUC__) || defined(__clang__)
#define XRPL_NO_SANITIZE_ADDRESS __attribute__((no_sanitize("address", "hwaddress")))
#else
#define XRPL_NO_SANITIZE_ADDRESS
#endif

View File

@@ -9,6 +9,7 @@
#include <boost/assert.hpp>
#include <mutex>
#include <optional>
#include <string>
namespace beast {
@@ -22,6 +23,7 @@ namespace unit_test {
class runner
{
std::string arg_;
std::optional<std::int64_t> referenceFee_;
bool default_ = false;
bool failed_ = false;
bool cond_ = false;
@@ -54,6 +56,24 @@ public:
return arg_;
}
/** Set the reference fee (in drops) for tests.
If provided, this value is used in every suite that
does not override it.
*/
void
referenceFee(std::int64_t fee)
{
referenceFee_ = fee;
}
/** Returns the reference fee, if any. */
std::optional<std::int64_t> const&
referenceFee() const
{
return referenceFee_;
}
/** Run the specified suite.
@return `true` if any conditions failed.
*/

View File

@@ -281,6 +281,16 @@ public:
return runner_->arg();
}
/** Return the reference fee associated with the runner. */
std::optional<std::int64_t>
referenceFee() const
{
assert(runner_);
if (!runner_)
return {};
return runner_->referenceFee();
}
// DEPRECATED
// @return `true` if the test condition indicates success(a false value)
template <class Condition, class String>

View File

@@ -1,5 +1,7 @@
#pragma once
#include <xrpl/basics/ByteUtilities.h>
namespace xrpl {
template <class F>
@@ -9,18 +11,16 @@ JobQueue::Coro::Coro(Coro_create_t, JobQueue& jq, JobType type, std::string cons
, name_(name)
, running_(false)
, coro_(
// Stack size of 1MB wasn't sufficient for deep calls. ASAN tests flagged the issue. Hence
// increasing the size to 1.5MB.
boost::context::protected_fixedsize_stack(1536 * 1024),
[this, fn = std::forward<F>(f)](
boost::coroutines2::asymmetric_coroutine<void>::push_type& do_yield) {
boost::coroutines::asymmetric_coroutine<void>::push_type& do_yield) {
yield_ = &do_yield;
yield();
fn(shared_from_this());
#ifndef NDEBUG
finished_ = true;
#endif
})
},
boost::coroutines::attributes(megabytes(1)))
{
}

View File

@@ -7,8 +7,7 @@
#include <xrpl/core/detail/Workers.h>
#include <xrpl/json/json_value.h>
#include <boost/context/protected_fixedsize_stack.hpp>
#include <boost/coroutine2/all.hpp>
#include <boost/coroutine/all.hpp>
#include <set>
@@ -49,8 +48,8 @@ public:
std::mutex mutex_;
std::mutex mutex_run_;
std::condition_variable cv_;
boost::coroutines2::coroutine<void>::pull_type coro_;
boost::coroutines2::coroutine<void>::push_type* yield_;
boost::coroutines::asymmetric_coroutine<void>::pull_type coro_;
boost::coroutines::asymmetric_coroutine<void>::push_type* yield_;
#ifndef NDEBUG
bool finished_ = false;
#endif

View File

@@ -29,18 +29,6 @@ public:
bool sslVerify,
beast::Journal j);
/** Destroys the global SSL context created by initializeSSLContext().
*
* This releases the underlying boost::asio::ssl::context and any
* associated OpenSSL resources. Must not be called while any
* HTTPClient requests are in flight.
*
* @note Currently only called from tests during teardown. In production,
* the SSL context lives for the lifetime of the process.
*/
static void
cleanupSSLContext();
static void
get(bool bSSL,
boost::asio::io_context& io_context,

View File

@@ -64,49 +64,6 @@
namespace xrpl {
// Feature names must not exceed this length (in characters, excluding the null terminator).
static constexpr std::size_t maxFeatureNameSize = 63;
// Reserve this exact feature-name length (in characters/bytes, excluding the null terminator)
// so that a 32-byte uint256 (for example, in WASM or other interop contexts) can be used
// as a compact, fixed-size feature selector without conflicting with human-readable names.
static constexpr std::size_t reservedFeatureNameSize = 32;
// Both validFeatureNameSize and validFeatureName are consteval functions that can be used in
// static_asserts to validate feature names at compile time. They are only used inside
// enforceValidFeatureName in Feature.cpp, but are exposed here for testing. The expected
// parameter `auto fn` is a constexpr lambda which returns a const char*, making it available
// for compile-time evaluation. Read more in https://accu.org/journals/overload/30/172/wu/
consteval auto
validFeatureNameSize(auto fn) -> bool
{
constexpr char const* n = fn();
// Note, std::strlen is not constexpr, we need to implement our own here.
constexpr std::size_t N = [](auto n) {
std::size_t ret = 0;
for (auto ptr = n; *ptr != '\0'; ret++, ++ptr)
;
return ret;
}(n);
return N != reservedFeatureNameSize && //
N <= maxFeatureNameSize;
}
consteval auto
validFeatureName(auto fn) -> bool
{
constexpr char const* n = fn();
// Prevent the use of visually confusable characters and enforce that feature names
// are always valid ASCII. This is needed because C++ allows Unicode identifiers.
// Characters below 0x20 are nonprintable control characters, and characters with the 0x80 bit
// set are non-ASCII (e.g. UTF-8 encoding of Unicode), so both are disallowed.
for (auto ptr = n; *ptr != '\0'; ++ptr)
{
if (*ptr & 0x80 || *ptr < 0x20)
return false;
}
return true;
}
enum class VoteBehavior : int { Obsolete = -1, DefaultNo = 0, DefaultYes };
enum class AmendmentSupport : int { Retired = -1, Supported = 0, Unsupported };

View File

@@ -209,7 +209,7 @@ std::size_t constexpr maxDIDDocumentLength = 256;
std::size_t constexpr maxDIDURILength = 256;
/** The maximum length of an Attestation inside a DID */
std::size_t constexpr maxDIDDataLength = 256;
std::size_t constexpr maxDIDAttestationLength = 256;
/** The maximum length of a domain */
std::size_t constexpr maxDomainLength = 256;

View File

@@ -15,7 +15,7 @@
namespace xrpl {
enum class TxnSql : char {
enum TxnSql : char {
txnSqlNew = 'N',
txnSqlConflict = 'C',
txnSqlHeld = 'H',
@@ -83,9 +83,6 @@ public:
std::uint32_t
getSeqValue() const;
AccountID
getFeePayer() const;
boost::container::flat_set<AccountID>
getMentionedAccounts() const;
@@ -125,7 +122,7 @@ public:
getMetaSQL(
Serializer rawTxn,
std::uint32_t inLedger,
TxnSql status,
char status,
std::string const& escapedMetaData) const;
std::vector<uint256> const&

View File

@@ -16,11 +16,8 @@ namespace xrpl {
/** A secret key. */
class SecretKey
{
public:
static constexpr std::size_t size_ = 32;
private:
std::uint8_t buf_[size_];
std::uint8_t buf_[32];
public:
using const_iterator = std::uint8_t const*;
@@ -30,14 +27,9 @@ public:
SecretKey&
operator=(SecretKey const&) = default;
bool
operator==(SecretKey const&) = delete;
bool
operator!=(SecretKey const&) = delete;
~SecretKey();
SecretKey(std::array<std::uint8_t, size_> const& data);
SecretKey(std::array<std::uint8_t, 32> const& data);
SecretKey(Slice const& slice);
std::uint8_t const*
@@ -86,10 +78,16 @@ public:
};
inline bool
operator==(SecretKey const& lhs, SecretKey const& rhs) = delete;
operator==(SecretKey const& lhs, SecretKey const& rhs)
{
return lhs.size() == rhs.size() && std::memcmp(lhs.data(), rhs.data(), rhs.size()) == 0;
}
inline bool
operator!=(SecretKey const& lhs, SecretKey const& rhs) = delete;
operator!=(SecretKey const& lhs, SecretKey const& rhs)
{
return !(lhs == rhs);
}
//------------------------------------------------------------------------------

View File

@@ -14,7 +14,7 @@ namespace xrpl {
static inline std::string const&
systemName()
{
static std::string const name = "xrpld";
static std::string const name = "ripple";
return name;
}

View File

@@ -42,7 +42,7 @@ TRANSACTION(ttPAYMENT, 0, Payment,
/** This transaction type creates an escrow object. */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/escrow/EscrowCreate.h>
# include <xrpl/tx/transactors/escrow/Escrow.h>
#endif
TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate,
Delegation::delegable,
@@ -58,9 +58,6 @@ TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate,
}))
/** This transaction type completes an existing escrow. */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/escrow/EscrowFinish.h>
#endif
TRANSACTION(ttESCROW_FINISH, 2, EscrowFinish,
Delegation::delegable,
uint256{},
@@ -97,7 +94,7 @@ TRANSACTION(ttACCOUNT_SET, 3, AccountSet,
/** This transaction type cancels an existing escrow. */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/escrow/EscrowCancel.h>
# include <xrpl/tx/transactors/escrow/Escrow.h>
#endif
TRANSACTION(ttESCROW_CANCEL, 4, EscrowCancel,
Delegation::delegable,
@@ -183,7 +180,7 @@ TRANSACTION(ttSIGNER_LIST_SET, 12, SignerListSet,
/** This transaction type creates a new unidirectional XRP payment channel. */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/payment_channel/PayChanCreate.h>
# include <xrpl/tx/transactors/payment_channel/PayChan.h>
#endif
TRANSACTION(ttPAYCHAN_CREATE, 13, PaymentChannelCreate,
Delegation::delegable,
@@ -199,9 +196,6 @@ TRANSACTION(ttPAYCHAN_CREATE, 13, PaymentChannelCreate,
}))
/** This transaction type funds an existing unidirectional XRP payment channel. */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/payment_channel/PayChanFund.h>
#endif
TRANSACTION(ttPAYCHAN_FUND, 14, PaymentChannelFund,
Delegation::delegable,
uint256{},
@@ -213,9 +207,6 @@ TRANSACTION(ttPAYCHAN_FUND, 14, PaymentChannelFund,
}))
/** This transaction type submits a claim against an existing unidirectional payment channel. */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/payment_channel/PayChanClaim.h>
#endif
TRANSACTION(ttPAYCHAN_CLAIM, 15, PaymentChannelClaim,
Delegation::delegable,
uint256{},
@@ -626,7 +617,7 @@ TRANSACTION(ttXCHAIN_CREATE_BRIDGE, 48, XChainCreateBridge,
/** This transaction type creates or updates a DID */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/did/DIDSet.h>
# include <xrpl/tx/transactors/did/DID.h>
#endif
TRANSACTION(ttDID_SET, 49, DIDSet,
Delegation::delegable,
@@ -639,9 +630,6 @@ TRANSACTION(ttDID_SET, 49, DIDSet,
}))
/** This transaction type deletes a DID */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/did/DIDDelete.h>
#endif
TRANSACTION(ttDID_DELETE, 50, DIDDelete,
Delegation::delegable,
featureDID,
@@ -751,7 +739,7 @@ TRANSACTION(ttMPTOKEN_AUTHORIZE, 57, MPTokenAuthorize,
/** This transaction type create an Credential instance */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/credentials/CredentialCreate.h>
# include <xrpl/tx/transactors/credentials/Credentials.h>
#endif
TRANSACTION(ttCREDENTIAL_CREATE, 58, CredentialCreate,
Delegation::delegable,
@@ -765,9 +753,6 @@ TRANSACTION(ttCREDENTIAL_CREATE, 58, CredentialCreate,
}))
/** This transaction type accept an Credential object */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/credentials/CredentialAccept.h>
#endif
TRANSACTION(ttCREDENTIAL_ACCEPT, 59, CredentialAccept,
Delegation::delegable,
featureCredentials,
@@ -778,9 +763,6 @@ TRANSACTION(ttCREDENTIAL_ACCEPT, 59, CredentialAccept,
}))
/** This transaction type delete an Credential object */
#if TRANSACTION_INCLUDE
# include <xrpl/tx/transactors/credentials/CredentialDelete.h>
#endif
TRANSACTION(ttCREDENTIAL_DELETE, 60, CredentialDelete,
Delegation::delegable,
featureCredentials,
@@ -848,7 +830,7 @@ TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
# include <xrpl/tx/transactors/vault/VaultCreate.h>
#endif
TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
Delegation::notDelegable,
Delegation::delegable,
featureSingleAssetVault,
createPseudoAcct | createMPTIssuance | mustModifyVault,
({
@@ -866,7 +848,7 @@ TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
# include <xrpl/tx/transactors/vault/VaultSet.h>
#endif
TRANSACTION(ttVAULT_SET, 66, VaultSet,
Delegation::notDelegable,
Delegation::delegable,
featureSingleAssetVault,
mustModifyVault,
({
@@ -881,7 +863,7 @@ TRANSACTION(ttVAULT_SET, 66, VaultSet,
# include <xrpl/tx/transactors/vault/VaultDelete.h>
#endif
TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
Delegation::notDelegable,
Delegation::delegable,
featureSingleAssetVault,
mustDeleteAcct | destroyMPTIssuance | mustModifyVault,
({
@@ -893,7 +875,7 @@ TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
# include <xrpl/tx/transactors/vault/VaultDeposit.h>
#endif
TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
Delegation::notDelegable,
Delegation::delegable,
featureSingleAssetVault,
mayAuthorizeMPT | mustModifyVault,
({
@@ -906,7 +888,7 @@ TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
# include <xrpl/tx/transactors/vault/VaultWithdraw.h>
#endif
TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
Delegation::notDelegable,
Delegation::delegable,
featureSingleAssetVault,
mayDeleteMPT | mayAuthorizeMPT | mustModifyVault,
({
@@ -921,7 +903,7 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
# include <xrpl/tx/transactors/vault/VaultClawback.h>
#endif
TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback,
Delegation::notDelegable,
Delegation::delegable,
featureSingleAssetVault,
mayDeleteMPT | mustModifyVault,
({
@@ -950,7 +932,7 @@ TRANSACTION(ttBATCH, 71, Batch,
# include <xrpl/tx/transactors/lending/LoanBrokerSet.h>
#endif
TRANSACTION(ttLOAN_BROKER_SET, 74, LoanBrokerSet,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
createPseudoAcct | mayAuthorizeMPT, ({
{sfVaultID, soeREQUIRED},
@@ -967,7 +949,7 @@ TRANSACTION(ttLOAN_BROKER_SET, 74, LoanBrokerSet,
# include <xrpl/tx/transactors/lending/LoanBrokerDelete.h>
#endif
TRANSACTION(ttLOAN_BROKER_DELETE, 75, LoanBrokerDelete,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
mustDeleteAcct | mayAuthorizeMPT, ({
{sfLoanBrokerID, soeREQUIRED},
@@ -978,7 +960,7 @@ TRANSACTION(ttLOAN_BROKER_DELETE, 75, LoanBrokerDelete,
# include <xrpl/tx/transactors/lending/LoanBrokerCoverDeposit.h>
#endif
TRANSACTION(ttLOAN_BROKER_COVER_DEPOSIT, 76, LoanBrokerCoverDeposit,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
noPriv, ({
{sfLoanBrokerID, soeREQUIRED},
@@ -990,7 +972,7 @@ TRANSACTION(ttLOAN_BROKER_COVER_DEPOSIT, 76, LoanBrokerCoverDeposit,
# include <xrpl/tx/transactors/lending/LoanBrokerCoverWithdraw.h>
#endif
TRANSACTION(ttLOAN_BROKER_COVER_WITHDRAW, 77, LoanBrokerCoverWithdraw,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
mayAuthorizeMPT, ({
{sfLoanBrokerID, soeREQUIRED},
@@ -1005,7 +987,7 @@ TRANSACTION(ttLOAN_BROKER_COVER_WITHDRAW, 77, LoanBrokerCoverWithdraw,
# include <xrpl/tx/transactors/lending/LoanBrokerCoverClawback.h>
#endif
TRANSACTION(ttLOAN_BROKER_COVER_CLAWBACK, 78, LoanBrokerCoverClawback,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
noPriv, ({
{sfLoanBrokerID, soeOPTIONAL},
@@ -1017,7 +999,7 @@ TRANSACTION(ttLOAN_BROKER_COVER_CLAWBACK, 78, LoanBrokerCoverClawback,
# include <xrpl/tx/transactors/lending/LoanSet.h>
#endif
TRANSACTION(ttLOAN_SET, 80, LoanSet,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
mayAuthorizeMPT | mustModifyVault, ({
{sfLoanBrokerID, soeREQUIRED},
@@ -1044,7 +1026,7 @@ TRANSACTION(ttLOAN_SET, 80, LoanSet,
# include <xrpl/tx/transactors/lending/LoanDelete.h>
#endif
TRANSACTION(ttLOAN_DELETE, 81, LoanDelete,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
noPriv, ({
{sfLoanID, soeREQUIRED},
@@ -1055,7 +1037,7 @@ TRANSACTION(ttLOAN_DELETE, 81, LoanDelete,
# include <xrpl/tx/transactors/lending/LoanManage.h>
#endif
TRANSACTION(ttLOAN_MANAGE, 82, LoanManage,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
// All of the LoanManage options will modify the vault, but the
// transaction can succeed without options, essentially making it
@@ -1069,7 +1051,7 @@ TRANSACTION(ttLOAN_MANAGE, 82, LoanManage,
# include <xrpl/tx/transactors/lending/LoanPay.h>
#endif
TRANSACTION(ttLOAN_PAY, 84, LoanPay,
Delegation::notDelegable,
Delegation::delegable,
featureLendingProtocol,
mayAuthorizeMPT | mustModifyVault, ({
{sfLoanID, soeREQUIRED},

View File

@@ -114,7 +114,8 @@ protected:
beast::Journal const j_;
AccountID const account_;
XRPAmount preFeeBalance_; // Balance before fees.
XRPAmount mPriorBalance; // Balance before fees.
XRPAmount mSourceBalance; // Balance after fees.
virtual ~Transactor() = default;
Transactor(Transactor const&) = delete;

View File

@@ -27,33 +27,6 @@ namespace xrpl {
* communicate the interface required of any invariant checker. Any invariant
* check implementation should implement the public methods documented here.
*
* ## Rules for implementing `finalize`
*
* ### Invariants must run regardless of transaction result
*
* An invariant's `finalize` method MUST perform meaningful checks even when
* the transaction has failed (i.e., `!isTesSuccess(tec)`). The following
* pattern is almost certainly wrong and must never be used:
*
* @code
* // WRONG: skipping all checks on failure defeats the purpose of invariants
* if (!isTesSuccess(tec))
* return true;
* @endcode
*
* The entire purpose of invariants is to detect and prevent the impossible.
* A bug or exploit could cause a failed transaction to mutate ledger state in
* unexpected ways. Invariants are the last line of defense against such
* scenarios.
*
* In general: an invariant that expects a domain-specific state change to
* occur (e.g., a new object being created) should only expect that change
* when the transaction succeeded. A failed VaultCreate must not have created
* a Vault. A failed LoanSet must not have created a Loan.
*
* Also be aware that failed transactions, regardless of type, carry no
* Privileges. Any privilege-gated checks must therefore also be applied to
* failed transactions.
*/
class InvariantChecker_PROTOTYPE
{
@@ -75,11 +48,7 @@ public:
/**
* @brief called after all ledger entries have been visited to determine
* the final status of the check.
*
* This method MUST perform meaningful checks even when `tec` indicates a
* failed transaction. See the class-level documentation for the rules
* governing how failed transactions must be handled.
* the final status of the check
*
* @param tx the transaction being applied
* @param tec the current TER result of the transaction

View File

@@ -1,29 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class CredentialAccept : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit CredentialAccept(ApplyContext& ctx) : Transactor(ctx)
{
}
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -1,29 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class CredentialCreate : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit CredentialCreate(ApplyContext& ctx) : Transactor(ctx)
{
}
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -1,29 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class CredentialDelete : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit CredentialDelete(ApplyContext& ctx) : Transactor(ctx)
{
}
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -0,0 +1,77 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class CredentialCreate : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit CredentialCreate(ApplyContext& ctx) : Transactor(ctx)
{
}
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
//------------------------------------------------------------------------------
class CredentialDelete : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit CredentialDelete(ApplyContext& ctx) : Transactor(ctx)
{
}
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
//------------------------------------------------------------------------------
class CredentialAccept : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit CredentialAccept(ApplyContext& ctx) : Transactor(ctx)
{
}
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -370,7 +370,7 @@ changeSpotPriceQuality(
if (!amounts)
{
JLOG(j.trace()) << "changeSpotPrice calc failed: " << to_string(pool.in) << " "
<< to_string(pool.out) << " " << quality << " " << tfee;
<< to_string(pool.out) << " " << quality << " " << tfee << std::endl;
return std::nullopt;
}

View File

@@ -4,6 +4,24 @@
namespace xrpl {
class DIDSet : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit DIDSet(ApplyContext& ctx) : Transactor(ctx)
{
}
static NotTEC
preflight(PreflightContext const& ctx);
TER
doApply() override;
};
//------------------------------------------------------------------------------
class DIDDelete : public Transactor
{
public:

View File

@@ -1,23 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class DIDSet : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit DIDSet(ApplyContext& ctx) : Transactor(ctx)
{
}
static NotTEC
preflight(PreflightContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -0,0 +1,80 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class EscrowCreate : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
explicit EscrowCreate(ApplyContext& ctx) : Transactor(ctx)
{
}
static TxConsequences
makeTxConsequences(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
//------------------------------------------------------------------------------
class EscrowFinish : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit EscrowFinish(ApplyContext& ctx) : Transactor(ctx)
{
}
static bool
checkExtraFeatures(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static NotTEC
preflightSigValidated(PreflightContext const& ctx);
static XRPAmount
calculateBaseFee(ReadView const& view, STTx const& tx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
//------------------------------------------------------------------------------
class EscrowCancel : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit EscrowCancel(ApplyContext& ctx) : Transactor(ctx)
{
}
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -1,26 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class EscrowCancel : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit EscrowCancel(ApplyContext& ctx) : Transactor(ctx)
{
}
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -1,29 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class EscrowCreate : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
explicit EscrowCreate(ApplyContext& ctx) : Transactor(ctx)
{
}
static TxConsequences
makeTxConsequences(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -1,35 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class EscrowFinish : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit EscrowFinish(ApplyContext& ctx) : Transactor(ctx)
{
}
static bool
checkExtraFeatures(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static NotTEC
preflightSigValidated(PreflightContext const& ctx);
static XRPAmount
calculateBaseFee(ReadView const& view, STTx const& tx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
} // namespace xrpl

View File

@@ -0,0 +1,83 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class PayChanCreate : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
explicit PayChanCreate(ApplyContext& ctx) : Transactor(ctx)
{
}
static TxConsequences
makeTxConsequences(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
using PaymentChannelCreate = PayChanCreate;
//------------------------------------------------------------------------------
class PayChanFund : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
explicit PayChanFund(ApplyContext& ctx) : Transactor(ctx)
{
}
static TxConsequences
makeTxConsequences(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
TER
doApply() override;
};
using PaymentChannelFund = PayChanFund;
//------------------------------------------------------------------------------
class PayChanClaim : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit PayChanClaim(ApplyContext& ctx) : Transactor(ctx)
{
}
static bool
checkExtraFeatures(PreflightContext const& ctx);
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
using PaymentChannelClaim = PayChanClaim;
} // namespace xrpl

View File

@@ -1,34 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class PayChanClaim : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Normal};
explicit PayChanClaim(ApplyContext& ctx) : Transactor(ctx)
{
}
static bool
checkExtraFeatures(PreflightContext const& ctx);
static std::uint32_t
getFlagsMask(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
using PaymentChannelClaim = PayChanClaim;
} // namespace xrpl

View File

@@ -1,31 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class PayChanCreate : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
explicit PayChanCreate(ApplyContext& ctx) : Transactor(ctx)
{
}
static TxConsequences
makeTxConsequences(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
static TER
preclaim(PreclaimContext const& ctx);
TER
doApply() override;
};
using PaymentChannelCreate = PayChanCreate;
} // namespace xrpl

View File

@@ -1,28 +0,0 @@
#pragma once
#include <xrpl/tx/Transactor.h>
namespace xrpl {
class PayChanFund : public Transactor
{
public:
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
explicit PayChanFund(ApplyContext& ctx) : Transactor(ctx)
{
}
static TxConsequences
makeTxConsequences(PreflightContext const& ctx);
static NotTEC
preflight(PreflightContext const& ctx);
TER
doApply() override;
};
using PaymentChannelFund = PayChanFund;
} // namespace xrpl

View File

@@ -1,6 +1,24 @@
# The idea is to empty this file gradually by fixing the underlying issues and removing suppressions.
#
# ASAN_OPTIONS="print_stacktrace=1:detect_container_overflow=0:suppressions=sanitizers/suppressions/asan.supp:halt_on_error=0"
#
# The detect_container_overflow=0 option disables false positives from:
# - Boost intrusive containers (slist_iterator.hpp, hashtable.hpp, aged_unordered_container.h)
# - Boost context/coroutine stack switching (Workers.cpp, thread.h)
#
# See: https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow
# Boost
interceptor_name:boost/asio
# Leaks in Doctest tests: xrpl.test.*
interceptor_name:src/libxrpl/net/HTTPClient.cpp
interceptor_name:src/libxrpl/net/RegisterSSLCerts.cpp
interceptor_name:src/tests/libxrpl/net/HTTPClient.cpp
interceptor_name:xrpl/net/AutoSocket.h
interceptor_name:xrpl/net/HTTPClient.h
interceptor_name:xrpl/net/HTTPClientSSLContext.h
interceptor_name:xrpl/net/RegisterSSLCerts.h
# Suppress false positive stack-buffer errors in thread stack allocation
# Related to ASan's __asan_handle_no_return warnings (github.com/google/sanitizers/issues/189)

View File

@@ -1,5 +1,16 @@
# The idea is to empty this file gradually by fixing the underlying issues and removing suppresions.
# Suppress leaks detected by asan in rippled code.
leak:src/libxrpl/net/HTTPClient.cpp
leak:src/libxrpl/net/RegisterSSLCerts.cpp
leak:src/tests/libxrpl/net/HTTPClient.cpp
leak:xrpl/net/AutoSocket.h
leak:xrpl/net/HTTPClient.h
leak:xrpl/net/HTTPClientSSLContext.h
leak:xrpl/net/RegisterSSLCerts.h
leak:ripple::HTTPClient
leak:ripple::HTTPClientImp
# Suppress leaks detected by asan in boost code.
leak:boost::asio
leak:boost/asio

View File

@@ -1,8 +0,0 @@
detect_container_overflow=false
detect_stack_use_after_return=false
debug=true
halt_on_error=false
print_stats=true
print_cmdline=true
use_sigaltstack=0
print_stacktrace=1

View File

@@ -1 +0,0 @@
halt_on_error=false

View File

@@ -1,3 +0,0 @@
halt_on_error=false
verbosity=1
second_deadlock_stack=1

View File

@@ -1 +0,0 @@
halt_on_error=false

View File

@@ -27,11 +27,3 @@ src:core/JobQueue.cpp
src:libxrpl/beast/utility/beast_Journal.cpp
src:test/beast/beast_PropertyStream_test.cpp
src:src/test/app/Invariants_test.cpp
# ASan false positive: stack-use-after-scope in ErrorCodes.h inline functions.
# When Clang inlines the StaticString overloads (e.g. invalid_field_error(StaticString)),
# ASan scope-poisons the temporary std::string before the inlined callee finishes reading
# through the const ref. This corrupts the coroutine stack and crashes the Simulate test.
# See asan.supp comments for full explanation and planned fix.
[address]
src:*ErrorCodes.h

View File

@@ -182,17 +182,6 @@ signed-integer-overflow:src/test/beast/LexicalCast_test.cpp
# External library suppressions
unsigned-integer-overflow:nudb/detail/xxhash.hpp
# Loan_test.cpp intentional underflow in test arithmetic
unsigned-integer-overflow:src/test/app/Loan_test.cpp
undefined:src/test/app/Loan_test.cpp
# Source tree restructured paths (libxrpl/tx/transactors/)
# These duplicate the xrpld/app/tx/detail entries above for the new layout
unsigned-integer-overflow:src/libxrpl/tx/transactors/oracle/SetOracle.cpp
undefined:src/libxrpl/tx/transactors/oracle/SetOracle.cpp
unsigned-integer-overflow:src/libxrpl/tx/transactors/nft/NFTokenMint.cpp
undefined:src/libxrpl/tx/transactors/nft/NFTokenMint.cpp
# Protobuf intentional overflows in hash functions
# Protobuf uses intentional unsigned overflow for hash computation (stringpiece.h:393)
unsigned-integer-overflow:google/protobuf/stubs/stringpiece.h

View File

@@ -258,7 +258,7 @@ Number::Guard::doRoundUp(
}
bringIntoRange(negative, mantissa, exponent, minMantissa);
if (exponent > maxExponent)
Throw<std::overflow_error>(std::string(location));
throw std::overflow_error(location);
}
template <UnsignedMantissa T>
@@ -298,7 +298,7 @@ Number::Guard::doRound(rep& drops, std::string location)
// or "(maxRep + 1) / 10", neither of which will round up when
// converting to rep, though the latter might overflow _before_
// rounding.
Throw<std::overflow_error>(std::string(location)); // LCOV_EXCL_LINE
throw std::overflow_error(location); // LCOV_EXCL_LINE
}
++drops;
}

View File

@@ -35,6 +35,7 @@ namespace xrpl {
template <class Derived>
class AsyncObject
{
protected:
AsyncObject() : m_pending(0)
{
}
@@ -92,8 +93,6 @@ public:
private:
// The number of handlers pending.
std::atomic<int> m_pending;
friend Derived;
};
class ResolverAsioImpl : public ResolverAsio, public AsyncObject<ResolverAsioImpl>

View File

@@ -116,7 +116,6 @@ encode(void* dest, void const* src, std::size_t len)
in += 3;
}
// NOLINTNEXTLINE(bugprone-switch-missing-default-case)
switch (len % 3)
{
case 2:
@@ -169,7 +168,7 @@ decode(void* dest, char const* src, std::size_t len)
break;
++in;
c4[i] = v;
if (++i; i == 4)
if (++i == 4)
{
c3[0] = (c4[0] << 2) + ((c4[1] & 0x30) >> 4);
c3[1] = ((c4[1] & 0xf) << 4) + ((c4[2] & 0x3c) >> 2);

View File

@@ -239,7 +239,6 @@ initAuthenticated(
{
boost::system::error_code ec;
// NOLINTNEXTLINE(bugprone-unused-return-value)
context.use_certificate_file(cert_file, boost::asio::ssl::context::pem, ec);
if (ec)
@@ -299,7 +298,6 @@ initAuthenticated(
{
boost::system::error_code ec;
// NOLINTNEXTLINE(bugprone-unused-return-value)
context.use_private_key_file(key_file, boost::asio::ssl::context::pem, ec);
if (ec)

View File

@@ -470,7 +470,6 @@ public:
m_io_context.run();
// NOLINTNEXTLINE(bugprone-unused-return-value)
m_socket.shutdown(boost::asio::ip::udp::socket::shutdown_send, ec);
m_socket.close();

View File

@@ -729,18 +729,21 @@ Reader::decodeUnicodeCodePoint(Token& token, Location& current, Location end, un
unsigned int surrogatePair;
if (*current != '\\' || *(current + 1) != 'u')
if (*(current++) == '\\' && *(current++) == 'u')
{
if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair))
{
unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF);
}
else
return false;
}
else
return addError(
"expecting another \\u token to begin the second half of a unicode surrogate pair",
"expecting another \\u token to begin the second half of a "
"unicode surrogate pair",
token,
current);
current += 2; // skip two characters checked above
if (!decodeUnicodeEscapeSequence(token, current, end, surrogatePair))
return false;
unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF);
}
return true;

View File

@@ -319,7 +319,7 @@ StyledWriter::writeValue(Value const& value)
document_ += " : ";
writeValue(childValue);
if (++it; it == members.end())
if (++it == members.end())
break;
document_ += ",";

View File

@@ -74,10 +74,8 @@ BookDirs::const_iterator::operator++()
XRPL_ASSERT(index_ != zero, "xrpl::BookDirs::const_iterator::operator++ : nonzero index");
if (!cdirNext(*view_, cur_key_, sle_, entry_, index_))
{
if (index_ == 0)
cur_key_ = view_->succ(++cur_key_, next_quality_).value_or(zero);
if (index_ != 0 || cur_key_ == zero)
if (index_ != 0 ||
(cur_key_ = view_->succ(++cur_key_, next_quality_).value_or(zero)) == zero)
{
cur_key_ = key_;
entry_ = 0;
@@ -86,7 +84,9 @@ BookDirs::const_iterator::operator++()
else if (!cdirFirst(*view_, cur_key_, sle_, entry_, index_))
{
// LCOV_EXCL_START
UNREACHABLE("xrpl::BookDirs::const_iterator::operator++ : directory is empty");
UNREACHABLE(
"xrpl::BookDirs::const_iterator::operator++ : directory is "
"empty");
// LCOV_EXCL_STOP
}
}

View File

@@ -26,12 +26,6 @@ HTTPClient::initializeSSLContext(
httpClientSSLContext.emplace(sslVerifyDir, sslVerifyFile, sslVerify, j);
}
void
HTTPClient::cleanupSSLContext()
{
httpClientSSLContext.reset();
}
//------------------------------------------------------------------------------
//
// Fetch a web page via http or https.

View File

@@ -79,7 +79,6 @@ registerSSLCerts(boost::asio::ssl::context& ctx, boost::system::error_code& ec,
SSL_CTX_set_cert_store(ctx.native_handle(), store.release());
#else
// NOLINTNEXTLINE(bugprone-unused-return-value)
ctx.set_default_verify_paths(ec);
#endif
}

View File

@@ -3,7 +3,6 @@
#include <xrpl/beast/core/SemanticVersion.h>
#include <xrpl/git/Git.h>
#include <xrpl/protocol/BuildInfo.h>
#include <xrpl/protocol/SystemParameters.h>
#include <boost/preprocessor/stringize.hpp>
@@ -81,7 +80,7 @@ getVersionString()
std::string const&
getFullVersionString()
{
static std::string const value = systemName() + "-" + getVersionString();
static std::string const value = "rippled-" + getVersionString();
return value;
}

View File

@@ -395,20 +395,10 @@ featureToName(uint256 const& f)
#pragma push_macro("XRPL_RETIRE_FIX")
#undef XRPL_RETIRE_FIX
consteval auto
enforceValidFeatureName(auto fn) -> char const*
{
static_assert(validFeatureName(fn), "Invalid feature name");
static_assert(validFeatureNameSize(fn), "Invalid feature name size");
return fn();
}
#define XRPL_FEATURE(name, supported, vote) \
uint256 const feature##name = \
registerFeature(enforceValidFeatureName([] { return #name; }), supported, vote);
uint256 const feature##name = registerFeature(#name, supported, vote);
#define XRPL_FIX(name, supported, vote) \
uint256 const fix##name = \
registerFeature(enforceValidFeatureName([] { return "fix" #name; }), supported, vote);
uint256 const fix##name = registerFeature("fix" #name, supported, vote);
// clang-format off
#define XRPL_RETIRE_FEATURE(name) \

View File

@@ -23,9 +23,6 @@ STBase::STBase(SField const& n) : fName(&n)
STBase&
STBase::operator=(STBase const& t)
{
if (this == &t)
return *this;
if (!fName->isUseful())
fName = t.fName;
return *this;

View File

@@ -211,20 +211,6 @@ STTx::getSeqValue() const
return getSeqProxy().value();
}
AccountID
STTx::getFeePayer() const
{
// If sfDelegate is present, the delegate account is the payer
// note: if a delegate is specified, its authorization to act on behalf of the account is
// enforced in `Transactor::checkPermission`
// cryptographic signature validity is checked separately (e.g., in `Transactor::checkSign`)
if (isFieldPresent(sfDelegate))
return getAccountID(sfDelegate);
// Default payer
return getAccountID(sfAccount);
}
void
STTx::sign(
PublicKey const& publicKey,
@@ -365,7 +351,7 @@ STTx::getMetaSQL(std::uint32_t inLedger, std::string const& escapedMetaData) con
{
Serializer s;
add(s);
return getMetaSQL(s, inLedger, TxnSql::txnSqlValidated, escapedMetaData);
return getMetaSQL(s, inLedger, txnSqlValidated, escapedMetaData);
}
// VFALCO This could be a free function elsewhere
@@ -373,7 +359,7 @@ std::string
STTx::getMetaSQL(
Serializer rawTxn,
std::uint32_t inLedger,
TxnSql status,
char status,
std::string const& escapedMetaData) const
{
static boost::format bfTrans("('%s', '%s', '%s', '%d', '%d', '%c', %s, %s)");
@@ -384,8 +370,8 @@ STTx::getMetaSQL(
return str(
boost::format(bfTrans) % to_string(getTransactionID()) % format->getName() %
toBase58(getAccountID(sfAccount)) % getFieldU32(sfSequence) % inLedger %
safe_cast<char>(status) % rTxn % escapedMetaData);
toBase58(getAccountID(sfAccount)) % getFieldU32(sfSequence) % inLedger % status % rTxn %
escapedMetaData);
}
static Expected<void, std::string>

View File

@@ -39,9 +39,6 @@ Consumer::~Consumer()
Consumer&
Consumer::operator=(Consumer const& other)
{
if (this == &other)
return *this;
// remove old ref
if (m_logic && m_entry)
m_logic->release(*m_entry);

View File

@@ -68,7 +68,6 @@ HTTPReply(int nStatus, std::string const& content, Json::Output const& output, b
return;
}
// NOLINTNEXTLINE(bugprone-switch-missing-default-case)
switch (nStatus)
{
case 200:

View File

@@ -352,7 +352,8 @@ Transactor::checkFee(PreclaimContext const& ctx, XRPAmount baseFee)
if (feePaid == beast::zero)
return tesSUCCESS;
auto const id = ctx.tx.getFeePayer();
auto const id = ctx.tx.isFieldPresent(sfDelegate) ? ctx.tx.getAccountID(sfDelegate)
: ctx.tx.getAccountID(sfAccount);
auto const sle = ctx.view.read(keylet::account(id));
if (!sle)
return terNO_ACCOUNT;
@@ -381,18 +382,32 @@ Transactor::payFee()
{
auto const feePaid = ctx_.tx[sfFee].xrp();
auto const feePayer = ctx_.tx.getFeePayer();
auto const sle = view().peek(keylet::account(feePayer));
if (!sle)
return tefINTERNAL; // LCOV_EXCL_LINE
if (ctx_.tx.isFieldPresent(sfDelegate))
{
// Delegated transactions are paid by the delegated account.
auto const delegate = ctx_.tx.getAccountID(sfDelegate);
auto const delegatedSle = view().peek(keylet::account(delegate));
if (!delegatedSle)
return tefINTERNAL; // LCOV_EXCL_LINE
// Deduct the fee, so it's not available during the transaction.
// Will only write the account back if the transaction succeeds.
sle->setFieldAmount(sfBalance, sle->getFieldAmount(sfBalance) - feePaid);
if (feePayer != account_)
view().update(sle); // done in `apply()` for the account
delegatedSle->setFieldAmount(sfBalance, delegatedSle->getFieldAmount(sfBalance) - feePaid);
view().update(delegatedSle);
}
else
{
auto const sle = view().peek(keylet::account(account_));
if (!sle)
return tefINTERNAL; // LCOV_EXCL_LINE
// Deduct the fee, so it's not available during the transaction.
// Will only write the account back if the transaction succeeds.
mSourceBalance -= feePaid;
sle->setFieldAmount(sfBalance, mSourceBalance);
// VFALCO Should we call view().rawDestroyXRP() here as well?
}
// VFALCO Should we call view().rawDestroyXRP() here as well?
return tesSUCCESS;
}
@@ -591,7 +606,8 @@ Transactor::apply()
if (sle)
{
preFeeBalance_ = STAmount{(*sle)[sfBalance]}.xrp();
mPriorBalance = STAmount{(*sle)[sfBalance]}.xrp();
mSourceBalance = mPriorBalance;
TER result = consumeSeqProxy(sle);
if (result != tesSUCCESS)
@@ -1007,7 +1023,9 @@ Transactor::reset(XRPAmount fee)
if (!txnAcct)
return {tefINTERNAL, beast::zero};
auto const payerSle = view().peek(keylet::account(ctx_.tx.getFeePayer()));
auto const payerSle = ctx_.tx.isFieldPresent(sfDelegate)
? view().peek(keylet::account(ctx_.tx.getAccountID(sfDelegate)))
: txnAcct;
if (!payerSle)
return {tefINTERNAL, beast::zero}; // LCOV_EXCL_LINE

View File

@@ -1,4 +1,5 @@
#include <xrpl/basics/Log.h>
#include <xrpl/basics/mulDiv.h>
#include <xrpl/beast/utility/instrumentation.h>
#include <xrpl/ledger/CredentialHelpers.h>
#include <xrpl/ledger/View.h>
@@ -10,7 +11,7 @@
#include <xrpl/tx/transactors/account/DeleteAccount.h>
#include <xrpl/tx/transactors/account/SetSignerList.h>
#include <xrpl/tx/transactors/delegate/DelegateSet.h>
#include <xrpl/tx/transactors/did/DIDDelete.h>
#include <xrpl/tx/transactors/did/DID.h>
#include <xrpl/tx/transactors/nft/NFTokenUtils.h>
#include <xrpl/tx/transactors/oracle/DeleteOracle.h>
#include <xrpl/tx/transactors/payment/DepositPreauth.h>
@@ -371,10 +372,9 @@ DeleteAccount::doApply()
return ter;
// Transfer any XRP remaining after the fee is paid to the destination:
auto const remainingBalance = src->getFieldAmount(sfBalance).xrp();
(*dst)[sfBalance] = (*dst)[sfBalance] + remainingBalance;
(*src)[sfBalance] = (*src)[sfBalance] - remainingBalance;
ctx_.deliver(remainingBalance);
(*dst)[sfBalance] = (*dst)[sfBalance] + mSourceBalance;
(*src)[sfBalance] = (*src)[sfBalance] - mSourceBalance;
ctx_.deliver(mSourceBalance);
XRPL_ASSERT(
(*src)[sfBalance] == XRPAmount(0), "xrpl::DeleteAccount::doApply : source balance is zero");
@@ -388,7 +388,7 @@ DeleteAccount::doApply()
}
// Re-arm the password change fee if we can and need to.
if (remainingBalance > XRPAmount(0) && dst->isFlag(lsfPasswordSpent))
if (mSourceBalance > XRPAmount(0) && dst->isFlag(lsfPasswordSpent))
dst->clearFlag(lsfPasswordSpent);
view().update(dst);

View File

@@ -491,7 +491,7 @@ SetAccount::doApply()
if (messageKey.empty())
{
JLOG(j_.debug()) << "clear message key";
JLOG(j_.debug()) << "set message key";
sle->makeFieldAbsent(sfMessageKey);
}
else

View File

@@ -306,7 +306,7 @@ SetSignerList::replaceSignerList()
// We check the reserve against the starting balance because we want to
// allow dipping into the reserve to pay fees. This behavior is consistent
// with CreateTicket.
if (preFeeBalance_ < newReserve)
if (mPriorBalance < newReserve)
return tecINSUFFICIENT_RESERVE;
// Everything's ducky. Add the ltSIGNER_LIST to the ledger.

View File

@@ -337,7 +337,7 @@ enum class DepositAuthPolicy { normal, dstCanBypass };
struct TransferHelperSubmittingAccountInfo
{
AccountID account;
STAmount preFeeBalance_;
STAmount preFeeBalance;
STAmount postFeeBalance;
};
@@ -423,7 +423,7 @@ transferHelper(
if (!submittingAccountInfo || submittingAccountInfo->account != src ||
submittingAccountInfo->postFeeBalance != curBal)
return curBal;
return submittingAccountInfo->preFeeBalance_;
return submittingAccountInfo->preFeeBalance;
}();
if (availableBalance < amt + reserve)
@@ -1852,8 +1852,7 @@ XChainCommit::doApply()
auto const amount = ctx_.tx[sfAmount];
auto const bridgeSpec = ctx_.tx[sfXChainBridge];
auto const sleAccount = psb.read(keylet::account(account));
if (!sleAccount)
if (!psb.read(keylet::account(account)))
return tecINTERNAL; // LCOV_EXCL_LINE
auto const sleBridge = readBridge(psb, bridgeSpec);
@@ -1864,7 +1863,7 @@ XChainCommit::doApply()
// Support dipping into reserves to pay the fee
TransferHelperSubmittingAccountInfo submittingAccountInfo{
account_, preFeeBalance_, (*sleAccount)[sfBalance]};
account_, mPriorBalance, mSourceBalance};
auto const thTer = transferHelper(
psb,
@@ -2133,7 +2132,7 @@ XChainCreateAccountCommit::doApply()
// Support dipping into reserves to pay the fee
TransferHelperSubmittingAccountInfo submittingAccountInfo{
account_, preFeeBalance_, (*sle)[sfBalance]};
account_, mPriorBalance, mSourceBalance};
STAmount const toTransfer = amount + reward;
auto const thTer = transferHelper(
psb,

View File

@@ -25,11 +25,15 @@ CancelCheck::preclaim(PreclaimContext const& ctx)
return tecNO_ENTRY;
}
using duration = NetClock::duration;
using timepoint = NetClock::time_point;
auto const optExpiry = (*sleCheck)[~sfExpiration];
// Expiration is defined in terms of the close time of the parent
// ledger, because we definitively know the time that it closed but
// we do not know the closing time of the ledger that is under
// construction.
if (!hasExpired(ctx.view, (*sleCheck)[~sfExpiration]))
if (!optExpiry || (ctx.view.parentCloseTime() < timepoint{duration{*optExpiry}}))
{
// If the check is not yet expired, then only the creator or the
// destination may cancel the check.

View File

@@ -309,7 +309,7 @@ CashCheck::doApply()
// Can the account cover the trust line's reserve?
if (std::uint32_t const ownerCount = {sleDst->at(sfOwnerCount)};
preFeeBalance_ < psb.fees().accountReserve(ownerCount + 1))
mPriorBalance < psb.fees().accountReserve(ownerCount + 1))
{
JLOG(j_.trace()) << "Trust line does not exist. "
"Insufficent reserve to create line.";

View File

@@ -140,7 +140,7 @@ CreateCheck::doApply()
{
STAmount const reserve{view().fees().accountReserve(sle->getFieldU32(sfOwnerCount) + 1)};
if (preFeeBalance_ < reserve)
if (mPriorBalance < reserve)
return tecINSUFFICIENT_RESERVE;
}

View File

@@ -1,113 +0,0 @@
#include <xrpl/basics/Log.h>
#include <xrpl/ledger/ApplyView.h>
#include <xrpl/ledger/CredentialHelpers.h>
#include <xrpl/ledger/View.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/TxFlags.h>
#include <xrpl/tx/transactors/credentials/CredentialAccept.h>
#include <chrono>
namespace xrpl {
using namespace credentials;
std::uint32_t
CredentialAccept::getFlagsMask(PreflightContext const& ctx)
{
// 0 means "Allow any flags"
return ctx.rules.enabled(fixInvalidTxFlags) ? tfUniversalMask : 0;
}
NotTEC
CredentialAccept::preflight(PreflightContext const& ctx)
{
if (!ctx.tx[sfIssuer])
{
JLOG(ctx.j.trace()) << "Malformed transaction: Issuer field zeroed.";
return temINVALID_ACCOUNT_ID;
}
auto const credType = ctx.tx[sfCredentialType];
if (credType.empty() || (credType.size() > maxCredentialTypeLength))
{
JLOG(ctx.j.trace()) << "Malformed transaction: invalid size of CredentialType.";
return temMALFORMED;
}
return tesSUCCESS;
}
TER
CredentialAccept::preclaim(PreclaimContext const& ctx)
{
AccountID const subject = ctx.tx[sfAccount];
AccountID const issuer = ctx.tx[sfIssuer];
auto const credType(ctx.tx[sfCredentialType]);
if (!ctx.view.exists(keylet::account(issuer)))
{
JLOG(ctx.j.warn()) << "No issuer: " << to_string(issuer);
return tecNO_ISSUER;
}
auto const sleCred = ctx.view.read(keylet::credential(subject, issuer, credType));
if (!sleCred)
{
JLOG(ctx.j.warn()) << "No credential: " << to_string(subject) << ", " << to_string(issuer)
<< ", " << credType;
return tecNO_ENTRY;
}
if (sleCred->getFieldU32(sfFlags) & lsfAccepted)
{
JLOG(ctx.j.warn()) << "Credential already accepted: " << to_string(subject) << ", "
<< to_string(issuer) << ", " << credType;
return tecDUPLICATE;
}
return tesSUCCESS;
}
TER
CredentialAccept::doApply()
{
AccountID const issuer{ctx_.tx[sfIssuer]};
// Both exist as credential object exist itself (checked in preclaim)
auto const sleSubject = view().peek(keylet::account(account_));
auto const sleIssuer = view().peek(keylet::account(issuer));
if (!sleSubject || !sleIssuer)
return tefINTERNAL; // LCOV_EXCL_LINE
{
STAmount const reserve{
view().fees().accountReserve(sleSubject->getFieldU32(sfOwnerCount) + 1)};
if (preFeeBalance_ < reserve)
return tecINSUFFICIENT_RESERVE;
}
auto const credType(ctx_.tx[sfCredentialType]);
Keylet const credentialKey = keylet::credential(account_, issuer, credType);
auto const sleCred = view().peek(credentialKey); // Checked in preclaim()
if (checkExpired(sleCred, view().header().parentCloseTime))
{
JLOG(j_.trace()) << "Credential is expired: " << sleCred->getText();
// delete expired credentials even if the transaction failed
auto const err = credentials::deleteSLE(view(), sleCred, j_);
return isTesSuccess(err) ? tecEXPIRED : err;
}
sleCred->setFieldU32(sfFlags, lsfAccepted);
view().update(sleCred);
adjustOwnerCount(view(), sleIssuer, -1, j_);
adjustOwnerCount(view(), sleSubject, 1, j_);
return tesSUCCESS;
}
} // namespace xrpl

View File

@@ -1,164 +0,0 @@
#include <xrpl/basics/Log.h>
#include <xrpl/ledger/ApplyView.h>
#include <xrpl/ledger/CredentialHelpers.h>
#include <xrpl/ledger/View.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/TxFlags.h>
#include <xrpl/tx/transactors/credentials/CredentialCreate.h>
#include <chrono>
namespace xrpl {
/*
Credentials
======
A verifiable credentials (VC
https://en.wikipedia.org/wiki/Verifiable_credentials), as defined by the W3C
specification (https://www.w3.org/TR/vc-data-model-2.0/), is a
secure and tamper-evident way to represent information about a subject, such
as an individual, organization, or even an IoT device. These credentials are
issued by a trusted entity and can be verified by third parties without
directly involving the issuer at all.
*/
using namespace credentials;
std::uint32_t
CredentialCreate::getFlagsMask(PreflightContext const& ctx)
{
// 0 means "Allow any flags"
return ctx.rules.enabled(fixInvalidTxFlags) ? tfUniversalMask : 0;
}
NotTEC
CredentialCreate::preflight(PreflightContext const& ctx)
{
auto const& tx = ctx.tx;
auto& j = ctx.j;
if (!tx[sfSubject])
{
JLOG(j.trace()) << "Malformed transaction: Invalid Subject";
return temMALFORMED;
}
auto const uri = tx[~sfURI];
if (uri && (uri->empty() || (uri->size() > maxCredentialURILength)))
{
JLOG(j.trace()) << "Malformed transaction: invalid size of URI.";
return temMALFORMED;
}
auto const credType = tx[sfCredentialType];
if (credType.empty() || (credType.size() > maxCredentialTypeLength))
{
JLOG(j.trace()) << "Malformed transaction: invalid size of CredentialType.";
return temMALFORMED;
}
return tesSUCCESS;
}
TER
CredentialCreate::preclaim(PreclaimContext const& ctx)
{
auto const credType(ctx.tx[sfCredentialType]);
auto const subject = ctx.tx[sfSubject];
if (!ctx.view.exists(keylet::account(subject)))
{
JLOG(ctx.j.trace()) << "Subject doesn't exist.";
return tecNO_TARGET;
}
if (ctx.view.exists(keylet::credential(subject, ctx.tx[sfAccount], credType)))
{
JLOG(ctx.j.trace()) << "Credential already exists.";
return tecDUPLICATE;
}
return tesSUCCESS;
}
TER
CredentialCreate::doApply()
{
auto const subject = ctx_.tx[sfSubject];
auto const credType(ctx_.tx[sfCredentialType]);
Keylet const credentialKey = keylet::credential(subject, account_, credType);
auto const sleCred = std::make_shared<SLE>(credentialKey);
if (!sleCred)
return tefINTERNAL; // LCOV_EXCL_LINE
auto const optExp = ctx_.tx[~sfExpiration];
if (optExp)
{
std::uint32_t const closeTime =
ctx_.view().header().parentCloseTime.time_since_epoch().count();
if (closeTime > *optExp)
{
JLOG(j_.trace()) << "Malformed transaction: "
"Expiration time is in the past.";
return tecEXPIRED;
}
sleCred->setFieldU32(sfExpiration, *optExp);
}
auto const sleIssuer = view().peek(keylet::account(account_));
if (!sleIssuer)
return tefINTERNAL; // LCOV_EXCL_LINE
{
STAmount const reserve{
view().fees().accountReserve(sleIssuer->getFieldU32(sfOwnerCount) + 1)};
if (preFeeBalance_ < reserve)
return tecINSUFFICIENT_RESERVE;
}
sleCred->setAccountID(sfSubject, subject);
sleCred->setAccountID(sfIssuer, account_);
sleCred->setFieldVL(sfCredentialType, credType);
if (ctx_.tx.isFieldPresent(sfURI))
sleCred->setFieldVL(sfURI, ctx_.tx.getFieldVL(sfURI));
{
auto const page =
view().dirInsert(keylet::ownerDir(account_), credentialKey, describeOwnerDir(account_));
JLOG(j_.trace()) << "Adding Credential to owner directory " << to_string(credentialKey.key)
<< ": " << (page ? "success" : "failure");
if (!page)
return tecDIR_FULL;
sleCred->setFieldU64(sfIssuerNode, *page);
adjustOwnerCount(view(), sleIssuer, 1, j_);
}
if (subject == account_)
{
sleCred->setFieldU32(sfFlags, lsfAccepted);
}
else
{
auto const page =
view().dirInsert(keylet::ownerDir(subject), credentialKey, describeOwnerDir(subject));
JLOG(j_.trace()) << "Adding Credential to owner directory " << to_string(credentialKey.key)
<< ": " << (page ? "success" : "failure");
if (!page)
return tecDIR_FULL;
sleCred->setFieldU64(sfSubjectNode, *page);
view().update(view().peek(keylet::account(subject)));
}
view().insert(sleCred);
return tesSUCCESS;
}
} // namespace xrpl

View File

@@ -1,90 +0,0 @@
#include <xrpl/basics/Log.h>
#include <xrpl/ledger/ApplyView.h>
#include <xrpl/ledger/CredentialHelpers.h>
#include <xrpl/ledger/View.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/TxFlags.h>
#include <xrpl/tx/transactors/credentials/CredentialDelete.h>
#include <chrono>
namespace xrpl {
using namespace credentials;
std::uint32_t
CredentialDelete::getFlagsMask(PreflightContext const& ctx)
{
// 0 means "Allow any flags"
return ctx.rules.enabled(fixInvalidTxFlags) ? tfUniversalMask : 0;
}
NotTEC
CredentialDelete::preflight(PreflightContext const& ctx)
{
auto const subject = ctx.tx[~sfSubject];
auto const issuer = ctx.tx[~sfIssuer];
if (!subject && !issuer)
{
// Neither field is present, the transaction is malformed.
JLOG(ctx.j.trace()) << "Malformed transaction: "
"No Subject or Issuer fields.";
return temMALFORMED;
}
// Make sure that the passed account is valid.
if ((subject && subject->isZero()) || (issuer && issuer->isZero()))
{
JLOG(ctx.j.trace()) << "Malformed transaction: Subject or Issuer "
"field zeroed.";
return temINVALID_ACCOUNT_ID;
}
auto const credType = ctx.tx[sfCredentialType];
if (credType.empty() || (credType.size() > maxCredentialTypeLength))
{
JLOG(ctx.j.trace()) << "Malformed transaction: invalid size of CredentialType.";
return temMALFORMED;
}
return tesSUCCESS;
}
TER
CredentialDelete::preclaim(PreclaimContext const& ctx)
{
AccountID const account{ctx.tx[sfAccount]};
auto const subject = ctx.tx[~sfSubject].value_or(account);
auto const issuer = ctx.tx[~sfIssuer].value_or(account);
auto const credType(ctx.tx[sfCredentialType]);
if (!ctx.view.exists(keylet::credential(subject, issuer, credType)))
return tecNO_ENTRY;
return tesSUCCESS;
}
TER
CredentialDelete::doApply()
{
auto const subject = ctx_.tx[~sfSubject].value_or(account_);
auto const issuer = ctx_.tx[~sfIssuer].value_or(account_);
auto const credType(ctx_.tx[sfCredentialType]);
auto const sleCred = view().peek(keylet::credential(subject, issuer, credType));
if (!sleCred)
return tefINTERNAL; // LCOV_EXCL_LINE
if ((subject != account_) && (issuer != account_) &&
!checkExpired(sleCred, ctx_.view().header().parentCloseTime))
{
JLOG(j_.trace()) << "Can't delete non-expired credential.";
return tecNO_PERMISSION;
}
return deleteSLE(view(), sleCred, j_);
}
} // namespace xrpl

View File

@@ -0,0 +1,341 @@
#include <xrpl/basics/Log.h>
#include <xrpl/ledger/ApplyView.h>
#include <xrpl/ledger/CredentialHelpers.h>
#include <xrpl/ledger/View.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/TxFlags.h>
#include <xrpl/tx/transactors/credentials/Credentials.h>
#include <chrono>
namespace xrpl {
/*
Credentials
======
A verifiable credentials (VC
https://en.wikipedia.org/wiki/Verifiable_credentials), as defined by the W3C
specification (https://www.w3.org/TR/vc-data-model-2.0/), is a
secure and tamper-evident way to represent information about a subject, such
as an individual, organization, or even an IoT device. These credentials are
issued by a trusted entity and can be verified by third parties without
directly involving the issuer at all.
*/
using namespace credentials;
// ------- CREATE --------------------------
std::uint32_t
CredentialCreate::getFlagsMask(PreflightContext const& ctx)
{
// 0 means "Allow any flags"
return ctx.rules.enabled(fixInvalidTxFlags) ? tfUniversalMask : 0;
}
NotTEC
CredentialCreate::preflight(PreflightContext const& ctx)
{
auto const& tx = ctx.tx;
auto& j = ctx.j;
if (!tx[sfSubject])
{
JLOG(j.trace()) << "Malformed transaction: Invalid Subject";
return temMALFORMED;
}
auto const uri = tx[~sfURI];
if (uri && (uri->empty() || (uri->size() > maxCredentialURILength)))
{
JLOG(j.trace()) << "Malformed transaction: invalid size of URI.";
return temMALFORMED;
}
auto const credType = tx[sfCredentialType];
if (credType.empty() || (credType.size() > maxCredentialTypeLength))
{
JLOG(j.trace()) << "Malformed transaction: invalid size of CredentialType.";
return temMALFORMED;
}
return tesSUCCESS;
}
TER
CredentialCreate::preclaim(PreclaimContext const& ctx)
{
auto const credType(ctx.tx[sfCredentialType]);
auto const subject = ctx.tx[sfSubject];
if (!ctx.view.exists(keylet::account(subject)))
{
JLOG(ctx.j.trace()) << "Subject doesn't exist.";
return tecNO_TARGET;
}
if (ctx.view.exists(keylet::credential(subject, ctx.tx[sfAccount], credType)))
{
JLOG(ctx.j.trace()) << "Credential already exists.";
return tecDUPLICATE;
}
return tesSUCCESS;
}
TER
CredentialCreate::doApply()
{
auto const subject = ctx_.tx[sfSubject];
auto const credType(ctx_.tx[sfCredentialType]);
Keylet const credentialKey = keylet::credential(subject, account_, credType);
auto const sleCred = std::make_shared<SLE>(credentialKey);
if (!sleCred)
return tefINTERNAL; // LCOV_EXCL_LINE
auto const optExp = ctx_.tx[~sfExpiration];
if (optExp)
{
std::uint32_t const closeTime =
ctx_.view().header().parentCloseTime.time_since_epoch().count();
if (closeTime > *optExp)
{
JLOG(j_.trace()) << "Malformed transaction: "
"Expiration time is in the past.";
return tecEXPIRED;
}
sleCred->setFieldU32(sfExpiration, ctx_.tx.getFieldU32(sfExpiration));
}
auto const sleIssuer = view().peek(keylet::account(account_));
if (!sleIssuer)
return tefINTERNAL; // LCOV_EXCL_LINE
{
STAmount const reserve{
view().fees().accountReserve(sleIssuer->getFieldU32(sfOwnerCount) + 1)};
if (mPriorBalance < reserve)
return tecINSUFFICIENT_RESERVE;
}
sleCred->setAccountID(sfSubject, subject);
sleCred->setAccountID(sfIssuer, account_);
sleCred->setFieldVL(sfCredentialType, credType);
if (ctx_.tx.isFieldPresent(sfURI))
sleCred->setFieldVL(sfURI, ctx_.tx.getFieldVL(sfURI));
{
auto const page =
view().dirInsert(keylet::ownerDir(account_), credentialKey, describeOwnerDir(account_));
JLOG(j_.trace()) << "Adding Credential to owner directory " << to_string(credentialKey.key)
<< ": " << (page ? "success" : "failure");
if (!page)
return tecDIR_FULL;
sleCred->setFieldU64(sfIssuerNode, *page);
adjustOwnerCount(view(), sleIssuer, 1, j_);
}
if (subject == account_)
{
sleCred->setFieldU32(sfFlags, lsfAccepted);
}
else
{
auto const page =
view().dirInsert(keylet::ownerDir(subject), credentialKey, describeOwnerDir(subject));
JLOG(j_.trace()) << "Adding Credential to owner directory " << to_string(credentialKey.key)
<< ": " << (page ? "success" : "failure");
if (!page)
return tecDIR_FULL;
sleCred->setFieldU64(sfSubjectNode, *page);
view().update(view().peek(keylet::account(subject)));
}
view().insert(sleCred);
return tesSUCCESS;
}
// ------- DELETE --------------------------
std::uint32_t
CredentialDelete::getFlagsMask(PreflightContext const& ctx)
{
// 0 means "Allow any flags"
return ctx.rules.enabled(fixInvalidTxFlags) ? tfUniversalMask : 0;
}
NotTEC
CredentialDelete::preflight(PreflightContext const& ctx)
{
auto const subject = ctx.tx[~sfSubject];
auto const issuer = ctx.tx[~sfIssuer];
if (!subject && !issuer)
{
// Neither field is present, the transaction is malformed.
JLOG(ctx.j.trace()) << "Malformed transaction: "
"No Subject or Issuer fields.";
return temMALFORMED;
}
// Make sure that the passed account is valid.
if ((subject && subject->isZero()) || (issuer && issuer->isZero()))
{
JLOG(ctx.j.trace()) << "Malformed transaction: Subject or Issuer "
"field zeroed.";
return temINVALID_ACCOUNT_ID;
}
auto const credType = ctx.tx[sfCredentialType];
if (credType.empty() || (credType.size() > maxCredentialTypeLength))
{
JLOG(ctx.j.trace()) << "Malformed transaction: invalid size of CredentialType.";
return temMALFORMED;
}
return tesSUCCESS;
}
TER
CredentialDelete::preclaim(PreclaimContext const& ctx)
{
AccountID const account{ctx.tx[sfAccount]};
auto const subject = ctx.tx[~sfSubject].value_or(account);
auto const issuer = ctx.tx[~sfIssuer].value_or(account);
auto const credType(ctx.tx[sfCredentialType]);
if (!ctx.view.exists(keylet::credential(subject, issuer, credType)))
return tecNO_ENTRY;
return tesSUCCESS;
}
TER
CredentialDelete::doApply()
{
auto const subject = ctx_.tx[~sfSubject].value_or(account_);
auto const issuer = ctx_.tx[~sfIssuer].value_or(account_);
auto const credType(ctx_.tx[sfCredentialType]);
auto const sleCred = view().peek(keylet::credential(subject, issuer, credType));
if (!sleCred)
return tefINTERNAL; // LCOV_EXCL_LINE
if ((subject != account_) && (issuer != account_) &&
!checkExpired(sleCred, ctx_.view().header().parentCloseTime))
{
JLOG(j_.trace()) << "Can't delete non-expired credential.";
return tecNO_PERMISSION;
}
return deleteSLE(view(), sleCred, j_);
}
// ------- APPLY --------------------------
std::uint32_t
CredentialAccept::getFlagsMask(PreflightContext const& ctx)
{
// 0 means "Allow any flags"
return ctx.rules.enabled(fixInvalidTxFlags) ? tfUniversalMask : 0;
}
NotTEC
CredentialAccept::preflight(PreflightContext const& ctx)
{
if (!ctx.tx[sfIssuer])
{
JLOG(ctx.j.trace()) << "Malformed transaction: Issuer field zeroed.";
return temINVALID_ACCOUNT_ID;
}
auto const credType = ctx.tx[sfCredentialType];
if (credType.empty() || (credType.size() > maxCredentialTypeLength))
{
JLOG(ctx.j.trace()) << "Malformed transaction: invalid size of CredentialType.";
return temMALFORMED;
}
return tesSUCCESS;
}
TER
CredentialAccept::preclaim(PreclaimContext const& ctx)
{
AccountID const subject = ctx.tx[sfAccount];
AccountID const issuer = ctx.tx[sfIssuer];
auto const credType(ctx.tx[sfCredentialType]);
if (!ctx.view.exists(keylet::account(issuer)))
{
JLOG(ctx.j.warn()) << "No issuer: " << to_string(issuer);
return tecNO_ISSUER;
}
auto const sleCred = ctx.view.read(keylet::credential(subject, issuer, credType));
if (!sleCred)
{
JLOG(ctx.j.warn()) << "No credential: " << to_string(subject) << ", " << to_string(issuer)
<< ", " << credType;
return tecNO_ENTRY;
}
if (sleCred->getFieldU32(sfFlags) & lsfAccepted)
{
JLOG(ctx.j.warn()) << "Credential already accepted: " << to_string(subject) << ", "
<< to_string(issuer) << ", " << credType;
return tecDUPLICATE;
}
return tesSUCCESS;
}
TER
CredentialAccept::doApply()
{
AccountID const issuer{ctx_.tx[sfIssuer]};
// Both exist as credential object exist itself (checked in preclaim)
auto const sleSubject = view().peek(keylet::account(account_));
auto const sleIssuer = view().peek(keylet::account(issuer));
if (!sleSubject || !sleIssuer)
return tefINTERNAL; // LCOV_EXCL_LINE
{
STAmount const reserve{
view().fees().accountReserve(sleSubject->getFieldU32(sfOwnerCount) + 1)};
if (mPriorBalance < reserve)
return tecINSUFFICIENT_RESERVE;
}
auto const credType(ctx_.tx[sfCredentialType]);
Keylet const credentialKey = keylet::credential(account_, issuer, credType);
auto const sleCred = view().peek(credentialKey); // Checked in preclaim()
if (checkExpired(sleCred, view().header().parentCloseTime))
{
JLOG(j_.trace()) << "Credential is expired: " << sleCred->getText();
// delete expired credentials even if the transaction failed
auto const err = credentials::deleteSLE(view(), sleCred, j_);
return isTesSuccess(err) ? tecEXPIRED : err;
}
sleCred->setFieldU32(sfFlags, lsfAccepted);
view().update(sleCred);
adjustOwnerCount(view(), sleIssuer, -1, j_);
adjustOwnerCount(view(), sleSubject, 1, j_);
return tesSUCCESS;
}
} // namespace xrpl

View File

@@ -70,7 +70,7 @@ DelegateSet::doApply()
STAmount const reserve{
ctx_.view().fees().accountReserve(sleOwner->getFieldU32(sfOwnerCount) + 1)};
if (preFeeBalance_ < reserve)
if (mPriorBalance < reserve)
return tecINSUFFICIENT_RESERVE;
auto const& permissions = ctx_.tx.getFieldArray(sfPermissions);

View File

@@ -172,7 +172,7 @@ AMMClawback::applyGuts(Sandbox& sb)
0,
FreezeHandling::fhIGNORE_FREEZE,
WithdrawAll::Yes,
preFeeBalance_,
mPriorBalance,
ctx_.journal);
else
std::tie(result, newLPTokenBalance, amountWithdraw, amount2Withdraw) =
@@ -251,7 +251,7 @@ AMMClawback::equalWithdrawMatchingOneAmount(
0,
FreezeHandling::fhIGNORE_FREEZE,
WithdrawAll::Yes,
preFeeBalance_,
mPriorBalance,
ctx_.journal);
auto const& rules = sb.rules();
@@ -282,7 +282,7 @@ AMMClawback::equalWithdrawMatchingOneAmount(
0,
FreezeHandling::fhIGNORE_FREEZE,
WithdrawAll::No,
preFeeBalance_,
mPriorBalance,
ctx_.journal);
}
@@ -301,7 +301,7 @@ AMMClawback::equalWithdrawMatchingOneAmount(
0,
FreezeHandling::fhIGNORE_FREEZE,
WithdrawAll::No,
preFeeBalance_,
mPriorBalance,
ctx_.journal);
}

View File

@@ -17,6 +17,7 @@ AMMDeposit::checkExtraFeatures(PreflightContext const& ctx)
std::uint32_t
AMMDeposit::getFlagsMask(PreflightContext const& ctx)
{
return tfAMMDepositMask;
}

Some files were not shown because too many files have changed in this diff Show More