mirror of
https://github.com/XRPLF/rippled.git
synced 2025-12-02 08:55:53 +00:00
Compare commits
26 Commits
ab9644267d
...
ripple/wam
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
337358a065 | ||
|
|
5c3b1f6f29 | ||
|
|
6733689102 | ||
|
|
c14bc53aa3 | ||
|
|
ce3eec85ee | ||
|
|
817f9c4f8c | ||
|
|
6be8f2124c | ||
|
|
edfed06001 | ||
|
|
1c646dba91 | ||
|
|
6781068058 | ||
|
|
cfe57c1dfe | ||
|
|
c34d09a971 | ||
|
|
ebd90c4742 | ||
|
|
ba52d34828 | ||
|
|
1b6312afb3 | ||
|
|
bf32dc2e72 | ||
|
|
a15d65f7a2 | ||
|
|
2de8488855 | ||
|
|
129aa4bfaa | ||
|
|
b1d70db63b | ||
|
|
f03c3aafe4 | ||
|
|
51a9f106d1 | ||
|
|
bfc048e3fe | ||
|
|
83418644f7 | ||
|
|
dbc9dd5bfc | ||
|
|
45ab15d4b5 |
6
.github/CODEOWNERS
vendored
6
.github/CODEOWNERS
vendored
@@ -1,8 +1,2 @@
|
||||
# Allow anyone to review any change by default.
|
||||
*
|
||||
|
||||
# Require the rpc-reviewers team to review changes to the rpc code.
|
||||
include/xrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/libxrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/xrpld/rpc/ @xrplf/rpc-reviewers
|
||||
src/xrpld/app/misc/ @xrplf/rpc-reviewers
|
||||
|
||||
28
.github/actions/build-deps/action.yml
vendored
28
.github/actions/build-deps/action.yml
vendored
@@ -10,40 +10,24 @@ inputs:
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
build_nproc:
|
||||
description: "The number of processors to use for building."
|
||||
required: true
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
log_verbosity:
|
||||
description: "The logging verbosity."
|
||||
required: false
|
||||
default: "verbose"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
BUILD_NPROC: ${{ inputs.build_nproc }}
|
||||
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build="${BUILD_OPTION}" \
|
||||
--options:host='&:tests=True' \
|
||||
--options:host='&:xrpld=True' \
|
||||
--settings:all build_type="${BUILD_TYPE}" \
|
||||
--conf:all tools.build:jobs=${BUILD_NPROC} \
|
||||
--conf:all tools.build:verbosity="${LOG_VERBOSITY}" \
|
||||
--conf:all tools.compilation:verbosity="${LOG_VERBOSITY}" \
|
||||
--build ${{ inputs.force_build == 'true' && '"*"' || 'missing' }} \
|
||||
--options:host '&:tests=True' \
|
||||
--options:host '&:xrpld=True' \
|
||||
--settings:all build_type=${{ inputs.build_type }} \
|
||||
..
|
||||
|
||||
96
.github/actions/build-test/action.yml
vendored
Normal file
96
.github/actions/build-test/action.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
# This action build and tests the binary. The Conan dependencies must have
|
||||
# already been installed (see the build-deps action).
|
||||
name: Build and Test
|
||||
description: "Build and test the binary."
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
default: ""
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Configuring CMake.'
|
||||
cmake \
|
||||
-G '${{ inputs.os == 'windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
|
||||
${{ inputs.cmake_args }} \
|
||||
..
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Building binary.'
|
||||
cmake \
|
||||
--build . \
|
||||
--config ${{ inputs.build_type }} \
|
||||
--parallel $(nproc) \
|
||||
--target ${{ inputs.cmake_target }}
|
||||
- name: Check linking
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Checking linking.'
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: Verify voidstar
|
||||
if: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Verifying presence of instrumentation.'
|
||||
./rippled --version | grep libvoidstar
|
||||
- name: Test the binary
|
||||
if: ${{ inputs.build_only == 'false' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'windows' && inputs.build_type || '' }}
|
||||
run: |
|
||||
echo 'Testing binary.'
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
- name: Upload coverage report
|
||||
if: ${{ inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ inputs.codecov_token }}
|
||||
verbose: true
|
||||
43
.github/actions/print-env/action.yml
vendored
43
.github/actions/print-env/action.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Print build environment
|
||||
description: "Print environment and some tooling versions"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
- name: Check configuration (Linux and macOS)
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
7
.github/actions/setup-conan/action.yml
vendored
7
.github/actions/setup-conan/action.yml
vendored
@@ -35,12 +35,9 @@ runs:
|
||||
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
|
||||
run: |
|
||||
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
|
||||
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
|
||||
6
.github/scripts/levelization/README.md
vendored
6
.github/scripts/levelization/README.md
vendored
@@ -72,15 +72,15 @@ It generates many files of [results](results):
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
|
||||
- [`levelization.yml`](../../workflows/check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
|
||||
@@ -17,7 +17,7 @@ Loop: xrpld.app xrpld.rpc
|
||||
xrpld.rpc > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.shamap
|
||||
xrpld.shamap ~= xrpld.app
|
||||
xrpld.app > xrpld.shamap
|
||||
|
||||
Loop: xrpld.core xrpld.perflog
|
||||
xrpld.perflog == xrpld.core
|
||||
|
||||
@@ -8,10 +8,6 @@ libxrpl.ledger > xrpl.ledger
|
||||
libxrpl.ledger > xrpl.protocol
|
||||
libxrpl.net > xrpl.basics
|
||||
libxrpl.net > xrpl.net
|
||||
libxrpl.nodestore > xrpl.basics
|
||||
libxrpl.nodestore > xrpl.json
|
||||
libxrpl.nodestore > xrpl.nodestore
|
||||
libxrpl.nodestore > xrpl.protocol
|
||||
libxrpl.protocol > xrpl.basics
|
||||
libxrpl.protocol > xrpl.json
|
||||
libxrpl.protocol > xrpl.protocol
|
||||
@@ -22,9 +18,6 @@ libxrpl.server > xrpl.basics
|
||||
libxrpl.server > xrpl.json
|
||||
libxrpl.server > xrpl.protocol
|
||||
libxrpl.server > xrpl.server
|
||||
libxrpl.shamap > xrpl.basics
|
||||
libxrpl.shamap > xrpl.protocol
|
||||
libxrpl.shamap > xrpl.shamap
|
||||
test.app > test.jtx
|
||||
test.app > test.rpc
|
||||
test.app > test.toplevel
|
||||
@@ -32,11 +25,11 @@ test.app > test.unit_test
|
||||
test.app > xrpl.basics
|
||||
test.app > xrpld.app
|
||||
test.app > xrpld.core
|
||||
test.app > xrpld.nodestore
|
||||
test.app > xrpld.overlay
|
||||
test.app > xrpld.rpc
|
||||
test.app > xrpl.json
|
||||
test.app > xrpl.ledger
|
||||
test.app > xrpl.nodestore
|
||||
test.app > xrpl.protocol
|
||||
test.app > xrpl.resource
|
||||
test.basics > test.jtx
|
||||
@@ -93,7 +86,8 @@ test.nodestore > test.toplevel
|
||||
test.nodestore > test.unit_test
|
||||
test.nodestore > xrpl.basics
|
||||
test.nodestore > xrpld.core
|
||||
test.nodestore > xrpl.nodestore
|
||||
test.nodestore > xrpld.nodestore
|
||||
test.nodestore > xrpld.unity
|
||||
test.overlay > test.jtx
|
||||
test.overlay > test.toplevel
|
||||
test.overlay > test.unit_test
|
||||
@@ -101,8 +95,8 @@ test.overlay > xrpl.basics
|
||||
test.overlay > xrpld.app
|
||||
test.overlay > xrpld.overlay
|
||||
test.overlay > xrpld.peerfinder
|
||||
test.overlay > xrpld.shamap
|
||||
test.overlay > xrpl.protocol
|
||||
test.overlay > xrpl.shamap
|
||||
test.peerfinder > test.beast
|
||||
test.peerfinder > test.unit_test
|
||||
test.peerfinder > xrpl.basics
|
||||
@@ -137,22 +131,17 @@ test.server > xrpl.json
|
||||
test.server > xrpl.server
|
||||
test.shamap > test.unit_test
|
||||
test.shamap > xrpl.basics
|
||||
test.shamap > xrpl.nodestore
|
||||
test.shamap > xrpld.nodestore
|
||||
test.shamap > xrpld.shamap
|
||||
test.shamap > xrpl.protocol
|
||||
test.shamap > xrpl.shamap
|
||||
test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
tests.libxrpl > xrpl.ledger
|
||||
tests.libxrpl > xrpl.json
|
||||
tests.libxrpl > xrpl.net
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.ledger > xrpl.basics
|
||||
xrpl.ledger > xrpl.protocol
|
||||
xrpl.net > xrpl.basics
|
||||
xrpl.nodestore > xrpl.basics
|
||||
xrpl.nodestore > xrpl.protocol
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
xrpl.resource > xrpl.basics
|
||||
@@ -161,21 +150,17 @@ xrpl.resource > xrpl.protocol
|
||||
xrpl.server > xrpl.basics
|
||||
xrpl.server > xrpl.json
|
||||
xrpl.server > xrpl.protocol
|
||||
xrpl.shamap > xrpl.basics
|
||||
xrpl.shamap > xrpl.nodestore
|
||||
xrpl.shamap > xrpl.protocol
|
||||
xrpld.app > test.unit_test
|
||||
xrpld.app > xrpl.basics
|
||||
xrpld.app > xrpld.conditions
|
||||
xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.nodestore
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.ledger
|
||||
xrpld.app > xrpl.net
|
||||
xrpld.app > xrpl.nodestore
|
||||
xrpld.app > xrpl.protocol
|
||||
xrpld.app > xrpl.resource
|
||||
xrpld.app > xrpl.shamap
|
||||
xrpld.conditions > xrpl.basics
|
||||
xrpld.conditions > xrpl.protocol
|
||||
xrpld.consensus > xrpl.basics
|
||||
@@ -185,6 +170,11 @@ xrpld.core > xrpl.basics
|
||||
xrpld.core > xrpl.json
|
||||
xrpld.core > xrpl.net
|
||||
xrpld.core > xrpl.protocol
|
||||
xrpld.nodestore > xrpl.basics
|
||||
xrpld.nodestore > xrpld.core
|
||||
xrpld.nodestore > xrpld.unity
|
||||
xrpld.nodestore > xrpl.json
|
||||
xrpld.nodestore > xrpl.protocol
|
||||
xrpld.overlay > xrpl.basics
|
||||
xrpld.overlay > xrpld.core
|
||||
xrpld.overlay > xrpld.peerfinder
|
||||
@@ -200,11 +190,13 @@ xrpld.perflog > xrpl.basics
|
||||
xrpld.perflog > xrpl.json
|
||||
xrpld.rpc > xrpl.basics
|
||||
xrpld.rpc > xrpld.core
|
||||
xrpld.rpc > xrpld.nodestore
|
||||
xrpld.rpc > xrpl.json
|
||||
xrpld.rpc > xrpl.ledger
|
||||
xrpld.rpc > xrpl.net
|
||||
xrpld.rpc > xrpl.nodestore
|
||||
xrpld.rpc > xrpl.protocol
|
||||
xrpld.rpc > xrpl.resource
|
||||
xrpld.rpc > xrpl.server
|
||||
xrpld.shamap > xrpl.shamap
|
||||
xrpld.shamap > xrpl.basics
|
||||
xrpld.shamap > xrpld.nodestore
|
||||
xrpld.shamap > xrpl.protocol
|
||||
|
||||
10
.github/scripts/strategy-matrix/generate.py
vendored
10
.github/scripts/strategy-matrix/generate.py
vendored
@@ -74,14 +74,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
# - 9.4 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 9.6 using Clang: Release and no Unity on linux/amd64.
|
||||
if os['distro_name'] == 'rhel':
|
||||
skip = True
|
||||
if os['distro_version'] == '9':
|
||||
if os['distro_version'] == '9.4':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
elif os['distro_version'] == '10':
|
||||
elif os['distro_version'] == '9.6':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
@@ -162,7 +162,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': build_only,
|
||||
'build_only': 'true' if build_only else 'false',
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
|
||||
122
.github/scripts/strategy-matrix/linux.json
vendored
122
.github/scripts/strategy-matrix/linux.json
vendored
@@ -14,169 +14,139 @@
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "15"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "19"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "20"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "6948666"
|
||||
"compiler_version": "19"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
3
.github/scripts/strategy-matrix/macos.json
vendored
3
.github/scripts/strategy-matrix/macos.json
vendored
@@ -10,8 +10,7 @@
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
3
.github/scripts/strategy-matrix/windows.json
vendored
3
.github/scripts/strategy-matrix/windows.json
vendored
@@ -10,8 +10,7 @@
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
|
||||
147
.github/workflows/build-test.yml
vendored
Normal file
147
.github/workflows/build-test.yml
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ inputs.os == 'linux' && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
steps:
|
||||
- name: Check strategy matrix
|
||||
run: |
|
||||
echo 'Operating system distro name: ${{ matrix.os.distro_name }}'
|
||||
echo 'Operating system distro version: ${{ matrix.os.distro_version }}'
|
||||
echo 'Operating system compiler name: ${{ matrix.os.compiler_name }}'
|
||||
echo 'Operating system compiler version: ${{ matrix.os.compiler_version }}'
|
||||
echo 'Architecture platform: ${{ matrix.architecture.platform }}'
|
||||
echo 'Architecture runner: ${{ toJson(matrix.architecture.runner) }}'
|
||||
echo 'Build type: ${{ matrix.build_type }}'
|
||||
echo 'Build only: ${{ matrix.build_only }}'
|
||||
echo 'CMake arguments: ${{ matrix.cmake_args }}'
|
||||
echo 'CMake target: ${{ matrix.cmake_target }}'
|
||||
echo 'Config name: ${{ matrix.config_name }}'
|
||||
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
- name: Check configuration (Linux and MacOS)
|
||||
if: ${{ inputs.os == 'linux' || inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ inputs.os == 'linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ inputs.dependencies_force_build }}
|
||||
|
||||
- name: Build and test binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
codecov_token: ${{ secrets.codecov_token }}
|
||||
os: ${{ inputs.os }}
|
||||
@@ -40,52 +40,47 @@ jobs:
|
||||
upload:
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Calculate conan reference
|
||||
id: conan_ref
|
||||
run: |
|
||||
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
outputs:
|
||||
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
|
||||
-F "client_payload[pr_url]=${PR_URL}"
|
||||
-F "client_payload[pr_url]=${{ github.event.pull_request.html_url }}"
|
||||
18
.github/workflows/on-pr.yml
vendored
18
.github/workflows/on-pr.yml
vendored
@@ -50,8 +50,8 @@ jobs:
|
||||
files: |
|
||||
# These paths are unique to `on-pr.yml`.
|
||||
.github/scripts/levelization/**
|
||||
.github/workflows/reusable-check-levelization.yml
|
||||
.github/workflows/reusable-notify-clio.yml
|
||||
.github/workflows/check-levelization.yml
|
||||
.github/workflows/notify-clio.yml
|
||||
.github/workflows/on-pr.yml
|
||||
|
||||
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||
@@ -59,11 +59,8 @@ jobs:
|
||||
.github/actions/build-test/**
|
||||
.github/actions/setup-conan/**
|
||||
.github/scripts/strategy-matrix/**
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/build-test.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
@@ -96,27 +93,26 @@ jobs:
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
uses: ./.github/workflows/check-levelization.yml
|
||||
|
||||
build-test:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- should-run
|
||||
- build-test
|
||||
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/reusable-notify-clio.yml
|
||||
uses: ./.github/workflows/notify-clio.yml
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
|
||||
43
.github/workflows/on-trigger.yml
vendored
43
.github/workflows/on-trigger.yml
vendored
@@ -9,12 +9,12 @@ name: Trigger
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "release*"
|
||||
- "master"
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/reusable-check-missing-commits.yml"
|
||||
- ".github/workflows/check-missing-commits.yml"
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
|
||||
@@ -23,11 +23,8 @@ on:
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/actions/setup-conan/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/build-test.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
@@ -46,16 +43,25 @@ on:
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API.
|
||||
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
||||
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
||||
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
||||
# dependencies if needed.
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
# When a PR is merged into the develop branch it will be assigned a unique
|
||||
# group identifier, so execution will continue even if another PR is merged
|
||||
# while it is still running. In all other cases the group identifier is shared
|
||||
# per branch, so that any in-progress runs are cancelled when a new commit is
|
||||
# pushed.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
@@ -65,16 +71,15 @@ defaults:
|
||||
jobs:
|
||||
check-missing-commits:
|
||||
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/reusable-check-missing-commits.yml
|
||||
uses: ./.github/workflows/check-missing-commits.yml
|
||||
|
||||
build-test:
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
5
.github/workflows/pre-commit.yml
vendored
5
.github/workflows/pre-commit.yml
vendored
@@ -7,9 +7,8 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||
run-hooks:
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@af1b0f0d764cda2e5435f5ac97b240d4bd4d95d3
|
||||
with:
|
||||
runs_on: ubuntu-latest
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit" }'
|
||||
|
||||
20
.github/workflows/publish-docs.yml
vendored
20
.github/workflows/publish-docs.yml
vendored
@@ -23,24 +23,16 @@ defaults:
|
||||
|
||||
env:
|
||||
BUILD_DIR: .build
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
@@ -54,16 +46,12 @@ jobs:
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
|
||||
- name: Build documentation
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
mkdir -p ${{ env.BUILD_DIR }}
|
||||
cd ${{ env.BUILD_DIR }}
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel ${BUILD_NPROC}
|
||||
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
|
||||
77
.github/workflows/reusable-build-test-config.yml
vendored
77
.github/workflows/reusable-build-test-config.yml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Build and test configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: true
|
||||
type: boolean
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
type: string
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
type: string
|
||||
required: true
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The configuration string (used for naming artifacts and such)."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: false
|
||||
type: number
|
||||
default: 2
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
uses: ./.github/workflows/reusable-build.yml
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
cmake_args: ${{ inputs.cmake_args }}
|
||||
cmake_target: ${{ inputs.cmake_target }}
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
image: ${{ inputs.image }}
|
||||
config_name: ${{ inputs.config_name }}
|
||||
nproc_subtract: ${{ inputs.nproc_subtract }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test:
|
||||
needs: build
|
||||
uses: ./.github/workflows/reusable-test.yml
|
||||
with:
|
||||
run_tests: ${{ !inputs.build_only }}
|
||||
verify_voidstar: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
image: ${{ inputs.image }}
|
||||
config_name: ${{ inputs.config_name }}
|
||||
nproc_subtract: ${{ inputs.nproc_subtract }}
|
||||
58
.github/workflows/reusable-build-test.yml
vendored
58
.github/workflows/reusable-build-test.yml
vendored
@@ -1,58 +0,0 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary for each configuration.
|
||||
build-test-config:
|
||||
needs:
|
||||
- generate-matrix
|
||||
uses: ./.github/workflows/reusable-build-test-config.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
154
.github/workflows/reusable-build.yml
vendored
154
.github/workflows/reusable-build.yml
vendored
@@ -1,154 +0,0 @@
|
||||
name: Build rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
type: string
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: true
|
||||
type: string
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The name of the configuration."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: true
|
||||
type: number
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ inputs.nproc_subtract }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||
run: |
|
||||
cmake \
|
||||
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
${CMAKE_ARGS} \
|
||||
..
|
||||
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel ${BUILD_NPROC} \
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Put built binaries in one location
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE_DIR: ${{ runner.os == 'Windows' && inputs.build_type || '' }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
mkdir -p ./binaries/doctest/
|
||||
|
||||
cp ./${BUILD_TYPE_DIR}/rippled* ./binaries/
|
||||
if [ "${CMAKE_TARGET}" != 'coverage' ]; then
|
||||
cp ./src/tests/libxrpl/${BUILD_TYPE_DIR}/xrpl.test.* ./binaries/doctest/
|
||||
fi
|
||||
|
||||
- name: Upload rippled artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
path: ${{ env.BUILD_DIR }}/binaries/
|
||||
retention-days: 3
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ github.repository_owner == 'XRPLF' && inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
@@ -35,7 +35,4 @@ jobs:
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
run: ./generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
111
.github/workflows/reusable-test.yml
vendored
111
.github/workflows/reusable-test.yml
vendored
@@ -1,111 +0,0 @@
|
||||
name: Test rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
verify_voidstar:
|
||||
description: "Whether to verify the presence of voidstar instrumentation."
|
||||
required: true
|
||||
type: boolean
|
||||
run_tests:
|
||||
description: "Whether to run unit tests"
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The name of the configuration."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: true
|
||||
type: number
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ inputs.nproc_subtract }}
|
||||
|
||||
- name: Download rippled artifact
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
|
||||
- name: Make binary executable (Linux and macOS)
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
run: |
|
||||
chmod +x ./rippled
|
||||
|
||||
- name: Check linking (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
run: |
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verifying presence of instrumentation
|
||||
if: ${{ inputs.verify_voidstar }}
|
||||
shell: bash
|
||||
run: |
|
||||
./rippled --version | grep libvoidstar
|
||||
|
||||
- name: Run the embedded tests
|
||||
if: ${{ inputs.run_tests }}
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
./rippled --unittest --unittest-jobs ${BUILD_NPROC}
|
||||
|
||||
- name: Run the separate tests
|
||||
if: ${{ inputs.run_tests }}
|
||||
env:
|
||||
EXT: ${{ runner.os == 'Windows' && '.exe' || '' }}
|
||||
shell: bash
|
||||
run: |
|
||||
for test_file in ./doctest/*${EXT}; do
|
||||
echo "Executing $test_file"
|
||||
chmod +x "$test_file"
|
||||
if [[ "${{ runner.os }}" == "Windows" && "$test_file" == "./doctest/xrpl.test.net.exe" ]]; then
|
||||
echo "Skipping $test_file on Windows"
|
||||
else
|
||||
"$test_file"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Debug failure (Linux)
|
||||
if: ${{ failure() && runner.os == 'Linux' && inputs.run_tests }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "IPv4 local port range:"
|
||||
cat /proc/sys/net/ipv4/ip_local_port_range
|
||||
echo "Netstat:"
|
||||
netstat -an
|
||||
38
.github/workflows/upload-conan-deps.yml
vendored
38
.github/workflows/upload-conan-deps.yml
vendored
@@ -24,30 +24,30 @@ on:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
|
||||
- .github/workflows/reusable-strategy-matrix.yml
|
||||
|
||||
- .github/actions/build-deps/action.yml
|
||||
- .github/actions/setup-conan/action.yml
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
|
||||
- conanfile.py
|
||||
- conan.lock
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||
|
||||
# Build and upload the dependencies for each configuration.
|
||||
run-upload-conan-deps:
|
||||
needs:
|
||||
- generate-matrix
|
||||
@@ -56,29 +56,19 @@ jobs:
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
@@ -89,19 +79,13 @@ jobs:
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: .build
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' }}
|
||||
run: conan remote login ${{ env.CONAN_REMOTE_NAME }} "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
|
||||
- name: Upload Conan packages
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
env:
|
||||
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
|
||||
run: conan upload "*" -r=${{ env.CONAN_REMOTE_NAME }} --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
|
||||
23
BUILD.md
23
BUILD.md
@@ -39,12 +39,17 @@ found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/), or higher
|
||||
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
[^2]:
|
||||
CMake 4 is not yet supported by all dependencies required by this project.
|
||||
If you are affected by this issue, follow [conan workaround for cmake
|
||||
4](#workaround-for-cmake-4)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
@@ -142,6 +147,7 @@ git sparse-checkout set recipes/snappy
|
||||
git sparse-checkout add recipes/soci
|
||||
git fetch origin master
|
||||
git checkout master
|
||||
conan export --version 2.4.1 external/wamr # TODO: needs to be added to the conan center index
|
||||
conan export --version 1.1.10 recipes/snappy/all
|
||||
conan export --version 4.0.3 recipes/soci/all
|
||||
rm -rf .git
|
||||
@@ -277,6 +283,21 @@ sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Workaround for CMake 4
|
||||
|
||||
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||
profile to use CMake version 3 for dependencies, by adding the following two
|
||||
lines to your profile:
|
||||
|
||||
```text
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
```
|
||||
|
||||
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||
is needed because some of the dependencies used by this project do not support
|
||||
CMake 4.
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
|
||||
@@ -120,6 +120,7 @@ endif()
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(date REQUIRED)
|
||||
find_package(xxHash REQUIRED)
|
||||
find_package(wamr REQUIRED)
|
||||
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
|
||||
@@ -975,47 +975,6 @@
|
||||
# number of ledger records online. Must be greater
|
||||
# than or equal to ledger_history.
|
||||
#
|
||||
# Optional keys for NuDB only:
|
||||
#
|
||||
# nudb_block_size EXPERIMENTAL: Block size in bytes for NuDB storage.
|
||||
# Must be a power of 2 between 4096 and 32768. Default is 4096.
|
||||
#
|
||||
# This parameter controls the fundamental storage unit
|
||||
# size for NuDB's internal data structures. The choice
|
||||
# of block size can significantly impact performance
|
||||
# depending on your storage hardware and filesystem:
|
||||
#
|
||||
# - 4096 bytes: Optimal for most standard SSDs and
|
||||
# traditional filesystems (ext4, NTFS, HFS+).
|
||||
# Provides good balance of performance and storage
|
||||
# efficiency. Recommended for most deployments.
|
||||
# Minimizes memory footprint and provides consistent
|
||||
# low-latency access patterns across diverse hardware.
|
||||
#
|
||||
# - 8192-16384 bytes: May improve performance on
|
||||
# high-end NVMe SSDs and copy-on-write filesystems
|
||||
# like ZFS or Btrfs that benefit from larger block
|
||||
# alignment. Can reduce metadata overhead for large
|
||||
# databases. Offers better sequential throughput and
|
||||
# reduced I/O operations at the cost of higher memory
|
||||
# usage per operation.
|
||||
#
|
||||
# - 32768 bytes (32K): Maximum supported block size
|
||||
# for high-performance scenarios with very fast
|
||||
# storage. May increase memory usage and reduce
|
||||
# efficiency for smaller databases. Best suited for
|
||||
# enterprise environments with abundant RAM.
|
||||
#
|
||||
# Performance testing is recommended before deploying
|
||||
# any non-default block size in production environments.
|
||||
#
|
||||
# Note: This setting cannot be changed after database
|
||||
# creation without rebuilding the entire database.
|
||||
# Choose carefully based on your hardware and expected
|
||||
# database size.
|
||||
#
|
||||
# Example: nudb_block_size=4096
|
||||
#
|
||||
# These keys modify the behavior of online_delete, and thus are only
|
||||
# relevant if online_delete is defined and non-zero:
|
||||
#
|
||||
@@ -1512,7 +1471,6 @@ secure_gateway = 127.0.0.1
|
||||
[node_db]
|
||||
type=NuDB
|
||||
path=/var/lib/rippled/db/nudb
|
||||
nudb_block_size=4096
|
||||
online_delete=512
|
||||
advisory_delete=0
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ if (static OR APPLE OR MSVC)
|
||||
set (OPENSSL_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set (OPENSSL_MSVC_STATIC_RT ON)
|
||||
find_dependency (OpenSSL REQUIRED)
|
||||
find_dependency (OpenSSL 1.1.1 REQUIRED)
|
||||
find_dependency (ZLIB)
|
||||
find_dependency (date)
|
||||
if (TARGET ZLIB::ZLIB)
|
||||
|
||||
@@ -53,21 +53,26 @@ add_library(xrpl.imports.main INTERFACE)
|
||||
|
||||
target_link_libraries(xrpl.imports.main
|
||||
INTERFACE
|
||||
absl::random_random
|
||||
date::date
|
||||
ed25519::ed25519
|
||||
LibArchive::LibArchive
|
||||
OpenSSL::Crypto
|
||||
Ripple::boost
|
||||
Ripple::libs
|
||||
Ripple::opts
|
||||
Ripple::syslibs
|
||||
absl::random_random
|
||||
date::date
|
||||
ed25519::ed25519
|
||||
secp256k1::secp256k1
|
||||
xrpl.libpb
|
||||
xxHash::xxhash
|
||||
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
|
||||
wamr::wamr
|
||||
)
|
||||
|
||||
if (WIN32)
|
||||
target_link_libraries(xrpl.imports.main INTERFACE ntdll)
|
||||
endif()
|
||||
|
||||
|
||||
include(add_module)
|
||||
include(target_link_modules)
|
||||
|
||||
@@ -112,21 +117,6 @@ target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
add_module(xrpl nodestore)
|
||||
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
add_module(xrpl shamap)
|
||||
target_link_libraries(xrpl.libxrpl.shamap PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.nodestore
|
||||
)
|
||||
|
||||
add_module(xrpl ledger)
|
||||
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
@@ -152,8 +142,6 @@ target_link_modules(xrpl PUBLIC
|
||||
protocol
|
||||
resource
|
||||
server
|
||||
nodestore
|
||||
shamap
|
||||
net
|
||||
ledger
|
||||
)
|
||||
|
||||
@@ -8,23 +8,20 @@ install (
|
||||
TARGETS
|
||||
common
|
||||
opts
|
||||
ripple_boost
|
||||
ripple_libs
|
||||
ripple_syslibs
|
||||
ripple_boost
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
xrpl.libxrpl
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.beast
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.ledger
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl.nodestore
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.ledger
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl.shamap
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl
|
||||
antithesis-sdk-cpp
|
||||
EXPORT RippleExports
|
||||
LIBRARY DESTINATION lib
|
||||
@@ -41,7 +38,7 @@ install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(xrpl \
|
||||
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
||||
")
|
||||
|
||||
install (EXPORT RippleExports
|
||||
@@ -75,7 +72,7 @@ if (is_root_project AND TARGET rippled)
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(rippled${suffix} \
|
||||
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||
")
|
||||
endif ()
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch (current_branch)
|
||||
@@ -6,15 +6,17 @@ if (validator_keys)
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare (
|
||||
validator_keys
|
||||
validator_keys_src
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_MakeAvailable(validator_keys)
|
||||
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
|
||||
FetchContent_GetProperties (validator_keys_src)
|
||||
if (NOT validator_keys_src_POPULATED)
|
||||
message (STATUS "Pausing to download ValidatorKeys...")
|
||||
FetchContent_Populate (validator_keys_src)
|
||||
endif ()
|
||||
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
|
||||
endif ()
|
||||
|
||||
@@ -7,7 +7,7 @@ function(xrpl_add_test name)
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||
)
|
||||
add_executable(${target} ${ARGN} ${sources})
|
||||
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
|
||||
|
||||
isolate_headers(
|
||||
${target}
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
"requires": [
|
||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
|
||||
"wamr/2.4.1#731b101bc8fa06d84e5c84edb4dc41a5%1756223745.11",
|
||||
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
|
||||
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
|
||||
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
|
||||
"rocksdb/10.0.1#85537f46e538974d67da0c3977de48ac%1756234304.347",
|
||||
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
|
||||
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
||||
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1759746684.671",
|
||||
"openssl/3.5.2#0c5a5e15ae569f45dff57adcf1770cf7%1756234259.61",
|
||||
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
|
||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
||||
@@ -53,4 +54,4 @@
|
||||
]
|
||||
},
|
||||
"config_requires": []
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads and uploads.
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
core:non_interactive=True
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
|
||||
@@ -21,11 +21,14 @@ compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||
|
||||
[conf]
|
||||
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||
tools.build:cxxflags+=['-Wno-restrict']
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
{% endif %}
|
||||
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
|
||||
@@ -2,6 +2,7 @@ from conan import ConanFile, __version__ as conan_version
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
import re
|
||||
|
||||
|
||||
class Xrpl(ConanFile):
|
||||
name = 'xrpl'
|
||||
|
||||
@@ -27,9 +28,10 @@ class Xrpl(ConanFile):
|
||||
'grpc/1.50.1',
|
||||
'libarchive/3.8.1',
|
||||
'nudb/2.0.9',
|
||||
'openssl/3.5.4',
|
||||
'openssl/3.5.2',
|
||||
'soci/4.0.3',
|
||||
'zlib/1.3.1',
|
||||
'wamr/2.4.1',
|
||||
]
|
||||
|
||||
test_requires = [
|
||||
@@ -133,6 +135,7 @@ class Xrpl(ConanFile):
|
||||
self.folders.generators = 'build/generators'
|
||||
|
||||
generators = 'CMakeDeps'
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables['tests'] = self.options.tests
|
||||
@@ -190,6 +193,7 @@ class Xrpl(ConanFile):
|
||||
'protobuf::libprotobuf',
|
||||
'soci::soci',
|
||||
'sqlite3::sqlite',
|
||||
'wamr::wamr',
|
||||
'xxhash::xxhash',
|
||||
'zlib::zlib',
|
||||
]
|
||||
|
||||
6
external/wamr/conandata.yml
vendored
Normal file
6
external/wamr/conandata.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
patches:
|
||||
2.4.1:
|
||||
- patch_description: add metering to iwasm interpreter
|
||||
patch_file: patches/ripp_metering.patch
|
||||
patch_type: conan
|
||||
|
||||
92
external/wamr/conanfile.py
vendored
Normal file
92
external/wamr/conanfile.py
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
from conan import ConanFile, tools
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, CMakeDeps, cmake_layout
|
||||
from conan.tools.files import (
|
||||
apply_conandata_patches,
|
||||
export_conandata_patches,
|
||||
# get,
|
||||
)
|
||||
from conan.tools.scm import Git
|
||||
|
||||
# import os
|
||||
|
||||
required_conan_version = ">=1.55.0"
|
||||
|
||||
|
||||
class WamrConan(ConanFile):
|
||||
name = "wamr"
|
||||
version = "2.4.1"
|
||||
license = "Apache License v2.0"
|
||||
url = "https://github.com/bytecodealliance/wasm-micro-runtime.git"
|
||||
description = "Webassembly micro runtime"
|
||||
package_type = "library"
|
||||
settings = "os", "compiler", "build_type", "arch"
|
||||
options = {"shared": [True, False], "fPIC": [True, False]}
|
||||
default_options = {"shared": False, "fPIC": True}
|
||||
# requires = [("llvm/20.1.1@")]
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
pass
|
||||
|
||||
# def build_requirements(self):
|
||||
# self.tool_requires("llvm/20.1.1")
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == "Windows":
|
||||
del self.options.fPIC
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def source(self):
|
||||
git = Git(self)
|
||||
git.fetch_commit(
|
||||
url="https://github.com/bytecodealliance/wasm-micro-runtime.git",
|
||||
commit="b124f70345d712bead5c0c2393acb2dc583511de",
|
||||
)
|
||||
# get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
|
||||
tc.variables["WAMR_BUILD_INTERP"] = 1
|
||||
tc.variables["WAMR_BUILD_FAST_INTERP"] = 1
|
||||
tc.variables["WAMR_BUILD_INSTRUCTION_METERING"] = 1
|
||||
tc.variables["WAMR_BUILD_AOT"] = 0
|
||||
tc.variables["WAMR_BUILD_JIT"] = 0
|
||||
tc.variables["WAMR_BUILD_FAST_JIT"] = 0
|
||||
tc.variables["WAMR_BUILD_SIMD"] = 0
|
||||
tc.variables["WAMR_BUILD_LIB_PTHREAD"] = 0
|
||||
tc.variables["WAMR_BUILD_LIB_WASI_THREADS"] = 0
|
||||
tc.variables["WAMR_BUILD_TAIL_CALL"] = 1
|
||||
tc.variables["WAMR_BUILD_BULK_MEMORY"] = 0
|
||||
tc.variables["WAMR_DISABLE_HW_BOUND_CHECK"] = 1
|
||||
tc.variables["WAMR_DISABLE_STACK_HW_BOUND_CHECK"] = 1
|
||||
tc.variables["WAMR_BH_LOG"] = "wamr_log_to_rippled"
|
||||
|
||||
tc.generate()
|
||||
|
||||
# This generates "foo-config.cmake" and "bar-config.cmake" in self.generators_folder
|
||||
deps = CMakeDeps(self)
|
||||
deps.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
# self.run(f'echo {self.source_folder}')
|
||||
# Explicit way:
|
||||
# self.run('cmake %s/hello %s' % (self.source_folder, cmake.command_line))
|
||||
# self.run("cmake --build . %s" % cmake.build_config)
|
||||
|
||||
def package(self):
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.install()
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.libs = ["iwasm"]
|
||||
self.cpp_info.names["cmake_find_package"] = "wamr"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "wamr"
|
||||
901
external/wamr/patches/ripp_metering.patch
vendored
Normal file
901
external/wamr/patches/ripp_metering.patch
vendored
Normal file
@@ -0,0 +1,901 @@
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 4b28fa89..7d523a3d 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -1,7 +1,7 @@
|
||||
# Copyright (C) 2019 Intel Corporation. All rights reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
|
||||
-cmake_minimum_required (VERSION 3.14)
|
||||
+cmake_minimum_required (VERSION 3.20)
|
||||
|
||||
option(BUILD_SHARED_LIBS "Build using shared libraries" OFF)
|
||||
|
||||
@@ -170,7 +170,7 @@ if (MINGW)
|
||||
endif ()
|
||||
|
||||
if (WIN32)
|
||||
- target_link_libraries(vmlib PRIVATE ntdll)
|
||||
+ target_link_libraries(vmlib PUBLIC ntdll)
|
||||
endif()
|
||||
|
||||
set (WAMR_PUBLIC_HEADERS
|
||||
diff --git a/core/iwasm/aot/aot_runtime.c b/core/iwasm/aot/aot_runtime.c
|
||||
index d2621fb2..6c96a844 100644
|
||||
--- a/core/iwasm/aot/aot_runtime.c
|
||||
+++ b/core/iwasm/aot/aot_runtime.c
|
||||
@@ -5611,7 +5611,7 @@ aot_resolve_import_func(AOTModule *module, AOTImportFunc *import_func)
|
||||
import_func->func_ptr_linked = wasm_native_resolve_symbol(
|
||||
import_func->module_name, import_func->func_name,
|
||||
import_func->func_type, &import_func->signature,
|
||||
- &import_func->attachment, &import_func->call_conv_raw);
|
||||
+ &import_func->attachment, NULL, &import_func->call_conv_raw);
|
||||
#if WASM_ENABLE_MULTI_MODULE != 0
|
||||
if (!import_func->func_ptr_linked) {
|
||||
if (!wasm_runtime_is_built_in_module(import_func->module_name)) {
|
||||
diff --git a/core/iwasm/common/wasm_c_api.c b/core/iwasm/common/wasm_c_api.c
|
||||
index 269ec577..34eb7c34 100644
|
||||
--- a/core/iwasm/common/wasm_c_api.c
|
||||
+++ b/core/iwasm/common/wasm_c_api.c
|
||||
@@ -3242,10 +3242,20 @@ wasm_func_copy(const wasm_func_t *func)
|
||||
|
||||
cloned->func_idx_rt = func->func_idx_rt;
|
||||
cloned->inst_comm_rt = func->inst_comm_rt;
|
||||
+ cloned->gas = func->gas;
|
||||
|
||||
RETURN_OBJ(cloned, wasm_func_delete)
|
||||
}
|
||||
|
||||
+uint32_t
|
||||
+wasm_func_set_gas(wasm_func_t *func, uint32_t gas)
|
||||
+{
|
||||
+ if(!func) return 0;
|
||||
+
|
||||
+ func->gas = gas;
|
||||
+ return gas;
|
||||
+}
|
||||
+
|
||||
own wasm_functype_t *
|
||||
wasm_func_type(const wasm_func_t *func)
|
||||
{
|
||||
@@ -4998,11 +5008,11 @@ wasm_instance_new_with_args_ex(wasm_store_t *store, const wasm_module_t *module,
|
||||
goto failed;
|
||||
}
|
||||
|
||||
+ WASMModuleInstance *wasm_module_inst = NULL;
|
||||
/* create the c-api func import list */
|
||||
#if WASM_ENABLE_INTERP != 0
|
||||
if (instance->inst_comm_rt->module_type == Wasm_Module_Bytecode) {
|
||||
- WASMModuleInstance *wasm_module_inst =
|
||||
- (WASMModuleInstance *)instance->inst_comm_rt;
|
||||
+ wasm_module_inst = (WASMModuleInstance *)instance->inst_comm_rt;
|
||||
p_func_imports = &(wasm_module_inst->c_api_func_imports);
|
||||
import_func_count = MODULE_INTERP(module)->import_function_count;
|
||||
}
|
||||
@@ -5052,6 +5062,13 @@ wasm_instance_new_with_args_ex(wasm_store_t *store, const wasm_module_t *module,
|
||||
}
|
||||
bh_assert(func_import->func_ptr_linked);
|
||||
|
||||
+ // fill gas
|
||||
+ if(wasm_module_inst) {
|
||||
+ WASMFunctionInstance *fi = wasm_module_inst->e->functions + func_host->func_idx_rt;
|
||||
+ if(fi) fi->gas = func_host->gas;
|
||||
+ }
|
||||
+
|
||||
+
|
||||
func_import++;
|
||||
}
|
||||
|
||||
@@ -5389,3 +5406,8 @@ wasm_instance_get_wasm_func_exec_time(const wasm_instance_t *instance,
|
||||
return -1.0;
|
||||
#endif
|
||||
}
|
||||
+
|
||||
+wasm_exec_env_t wasm_instance_exec_env(const wasm_instance_t *instance)
|
||||
+{
|
||||
+ return wasm_runtime_get_exec_env_singleton(instance->inst_comm_rt);
|
||||
+}
|
||||
diff --git a/core/iwasm/common/wasm_c_api_internal.h b/core/iwasm/common/wasm_c_api_internal.h
|
||||
index 49a17a96..19a85980 100644
|
||||
--- a/core/iwasm/common/wasm_c_api_internal.h
|
||||
+++ b/core/iwasm/common/wasm_c_api_internal.h
|
||||
@@ -142,6 +142,10 @@ struct wasm_func_t {
|
||||
void (*finalizer)(void *);
|
||||
} cb_env;
|
||||
} u;
|
||||
+
|
||||
+ // gas cost for import func
|
||||
+ uint32 gas;
|
||||
+
|
||||
/*
|
||||
* an index in both functions runtime instance lists
|
||||
* of interpreter mode and aot mode
|
||||
diff --git a/core/iwasm/common/wasm_exec_env.c b/core/iwasm/common/wasm_exec_env.c
|
||||
index 47752950..5f26d886 100644
|
||||
--- a/core/iwasm/common/wasm_exec_env.c
|
||||
+++ b/core/iwasm/common/wasm_exec_env.c
|
||||
@@ -86,7 +86,7 @@ wasm_exec_env_create_internal(struct WASMModuleInstanceCommon *module_inst,
|
||||
#endif
|
||||
|
||||
#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
- exec_env->instructions_to_execute = -1;
|
||||
+ exec_env->instructions_to_execute = INT64_MAX;
|
||||
#endif
|
||||
|
||||
return exec_env;
|
||||
diff --git a/core/iwasm/common/wasm_exec_env.h b/core/iwasm/common/wasm_exec_env.h
|
||||
index 5d80312f..b2ecce2e 100644
|
||||
--- a/core/iwasm/common/wasm_exec_env.h
|
||||
+++ b/core/iwasm/common/wasm_exec_env.h
|
||||
@@ -89,7 +89,7 @@ typedef struct WASMExecEnv {
|
||||
|
||||
#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
/* instructions to execute */
|
||||
- int instructions_to_execute;
|
||||
+ int64 instructions_to_execute;
|
||||
#endif
|
||||
|
||||
#if WASM_ENABLE_FAST_JIT != 0
|
||||
diff --git a/core/iwasm/common/wasm_native.c b/core/iwasm/common/wasm_native.c
|
||||
index 060bb2c3..9221c36a 100644
|
||||
--- a/core/iwasm/common/wasm_native.c
|
||||
+++ b/core/iwasm/common/wasm_native.c
|
||||
@@ -180,9 +180,9 @@ native_symbol_cmp(const void *native_symbol1, const void *native_symbol2)
|
||||
((const NativeSymbol *)native_symbol2)->symbol);
|
||||
}
|
||||
|
||||
-static void *
|
||||
+static NativeSymbol *
|
||||
lookup_symbol(NativeSymbol *native_symbols, uint32 n_native_symbols,
|
||||
- const char *symbol, const char **p_signature, void **p_attachment)
|
||||
+ const char *symbol)
|
||||
{
|
||||
NativeSymbol *native_symbol, key = { 0 };
|
||||
|
||||
@@ -190,9 +190,7 @@ lookup_symbol(NativeSymbol *native_symbols, uint32 n_native_symbols,
|
||||
|
||||
if ((native_symbol = bsearch(&key, native_symbols, n_native_symbols,
|
||||
sizeof(NativeSymbol), native_symbol_cmp))) {
|
||||
- *p_signature = native_symbol->signature;
|
||||
- *p_attachment = native_symbol->attachment;
|
||||
- return native_symbol->func_ptr;
|
||||
+ return native_symbol;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
@@ -205,25 +203,36 @@ lookup_symbol(NativeSymbol *native_symbols, uint32 n_native_symbols,
|
||||
void *
|
||||
wasm_native_resolve_symbol(const char *module_name, const char *field_name,
|
||||
const WASMFuncType *func_type,
|
||||
- const char **p_signature, void **p_attachment,
|
||||
+ const char **p_signature, void **p_attachment, uint32_t *gas,
|
||||
bool *p_call_conv_raw)
|
||||
{
|
||||
NativeSymbolsNode *node, *node_next;
|
||||
const char *signature = NULL;
|
||||
void *func_ptr = NULL, *attachment = NULL;
|
||||
+ NativeSymbol *native_symbol = NULL;
|
||||
|
||||
node = g_native_symbols_list;
|
||||
while (node) {
|
||||
node_next = node->next;
|
||||
if (!strcmp(node->module_name, module_name)) {
|
||||
- if ((func_ptr =
|
||||
+ if ((native_symbol =
|
||||
lookup_symbol(node->native_symbols, node->n_native_symbols,
|
||||
- field_name, &signature, &attachment))
|
||||
+ field_name))
|
||||
|| (field_name[0] == '_'
|
||||
- && (func_ptr = lookup_symbol(
|
||||
+ && (native_symbol = lookup_symbol(
|
||||
node->native_symbols, node->n_native_symbols,
|
||||
- field_name + 1, &signature, &attachment))))
|
||||
- break;
|
||||
+ field_name + 1))))
|
||||
+ {
|
||||
+ func_ptr = native_symbol->func_ptr;
|
||||
+ if(func_ptr)
|
||||
+ {
|
||||
+ if(gas)
|
||||
+ *gas = native_symbol->gas;
|
||||
+ signature = native_symbol->signature;
|
||||
+ attachment = native_symbol->attachment;
|
||||
+ break;
|
||||
+ }
|
||||
+ }
|
||||
}
|
||||
node = node_next;
|
||||
}
|
||||
diff --git a/core/iwasm/common/wasm_native.h b/core/iwasm/common/wasm_native.h
|
||||
index 9a6afee1..0fe4739f 100644
|
||||
--- a/core/iwasm/common/wasm_native.h
|
||||
+++ b/core/iwasm/common/wasm_native.h
|
||||
@@ -52,7 +52,7 @@ wasm_native_lookup_libc_builtin_global(const char *module_name,
|
||||
void *
|
||||
wasm_native_resolve_symbol(const char *module_name, const char *field_name,
|
||||
const WASMFuncType *func_type,
|
||||
- const char **p_signature, void **p_attachment,
|
||||
+ const char **p_signature, void **p_attachment, uint32_t *gas,
|
||||
bool *p_call_conv_raw);
|
||||
|
||||
bool
|
||||
diff --git a/core/iwasm/common/wasm_runtime_common.c b/core/iwasm/common/wasm_runtime_common.c
|
||||
index 943b46fc..d026777e 100644
|
||||
--- a/core/iwasm/common/wasm_runtime_common.c
|
||||
+++ b/core/iwasm/common/wasm_runtime_common.c
|
||||
@@ -2344,10 +2344,18 @@ wasm_runtime_access_exce_check_guard_page()
|
||||
#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
void
|
||||
wasm_runtime_set_instruction_count_limit(WASMExecEnv *exec_env,
|
||||
- int instructions_to_execute)
|
||||
+ int64 instructions_to_execute)
|
||||
{
|
||||
+ if(instructions_to_execute == -1)
|
||||
+ instructions_to_execute = INT64_MAX;
|
||||
exec_env->instructions_to_execute = instructions_to_execute;
|
||||
}
|
||||
+
|
||||
+int64
|
||||
+wasm_runtime_get_instruction_count_limit(WASMExecEnv *exec_env)
|
||||
+{
|
||||
+ return exec_env->instructions_to_execute;
|
||||
+}
|
||||
#endif
|
||||
|
||||
WASMFuncType *
|
||||
@@ -7412,7 +7420,7 @@ wasm_runtime_is_import_func_linked(const char *module_name,
|
||||
const char *func_name)
|
||||
{
|
||||
return wasm_native_resolve_symbol(module_name, func_name, NULL, NULL, NULL,
|
||||
- NULL);
|
||||
+ NULL, NULL);
|
||||
}
|
||||
|
||||
bool
|
||||
@@ -7869,13 +7877,14 @@ wasm_runtime_get_module_name(wasm_module_t module)
|
||||
bool
|
||||
wasm_runtime_detect_native_stack_overflow(WASMExecEnv *exec_env)
|
||||
{
|
||||
+#if WASM_DISABLE_STACK_HW_BOUND_CHECK == 0
|
||||
uint8 *boundary = exec_env->native_stack_boundary;
|
||||
RECORD_STACK_USAGE(exec_env, (uint8 *)&boundary);
|
||||
if (boundary == NULL) {
|
||||
/* the platform doesn't support os_thread_get_stack_boundary */
|
||||
return true;
|
||||
}
|
||||
-#if defined(OS_ENABLE_HW_BOUND_CHECK) && WASM_DISABLE_STACK_HW_BOUND_CHECK == 0
|
||||
+#if defined(OS_ENABLE_HW_BOUND_CHECK)
|
||||
uint32 page_size = os_getpagesize();
|
||||
uint32 guard_page_count = STACK_OVERFLOW_CHECK_GUARD_PAGE_COUNT;
|
||||
boundary = boundary + page_size * guard_page_count;
|
||||
@@ -7885,6 +7894,7 @@ wasm_runtime_detect_native_stack_overflow(WASMExecEnv *exec_env)
|
||||
"native stack overflow");
|
||||
return false;
|
||||
}
|
||||
+#endif
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -7907,7 +7917,7 @@ wasm_runtime_detect_native_stack_overflow_size(WASMExecEnv *exec_env,
|
||||
boundary = boundary - WASM_STACK_GUARD_SIZE + requested_size;
|
||||
if ((uint8 *)&boundary < boundary) {
|
||||
wasm_runtime_set_exception(wasm_runtime_get_module_inst(exec_env),
|
||||
- "native stack overflow");
|
||||
+ "native s stack overflow");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
diff --git a/core/iwasm/common/wasm_runtime_common.h b/core/iwasm/common/wasm_runtime_common.h
|
||||
index 324620be..54155a0c 100644
|
||||
--- a/core/iwasm/common/wasm_runtime_common.h
|
||||
+++ b/core/iwasm/common/wasm_runtime_common.h
|
||||
@@ -833,7 +833,10 @@ wasm_runtime_set_native_stack_boundary(WASMExecEnv *exec_env,
|
||||
/* See wasm_export.h for description */
|
||||
WASM_RUNTIME_API_EXTERN void
|
||||
wasm_runtime_set_instruction_count_limit(WASMExecEnv *exec_env,
|
||||
- int instructions_to_execute);
|
||||
+ int64 instructions_to_execute);
|
||||
+WASM_RUNTIME_API_EXTERN int64
|
||||
+wasm_runtime_get_instruction_count_limit(WASMExecEnv *exec_env);
|
||||
+
|
||||
#endif
|
||||
|
||||
#if WASM_CONFIGURABLE_BOUNDS_CHECKS != 0
|
||||
diff --git a/core/iwasm/include/lib_export.h b/core/iwasm/include/lib_export.h
|
||||
index 0ca668f5..93bcf807 100644
|
||||
--- a/core/iwasm/include/lib_export.h
|
||||
+++ b/core/iwasm/include/lib_export.h
|
||||
@@ -24,6 +24,8 @@ typedef struct NativeSymbol {
|
||||
/* attachment which can be retrieved in native API by
|
||||
calling wasm_runtime_get_function_attachment(exec_env) */
|
||||
void *attachment;
|
||||
+ // gas cost for import func
|
||||
+ uint32_t gas;
|
||||
} NativeSymbol;
|
||||
|
||||
/* clang-format off */
|
||||
diff --git a/core/iwasm/include/wasm_c_api.h b/core/iwasm/include/wasm_c_api.h
|
||||
index 241a0eec..1141744c 100644
|
||||
--- a/core/iwasm/include/wasm_c_api.h
|
||||
+++ b/core/iwasm/include/wasm_c_api.h
|
||||
@@ -19,8 +19,10 @@
|
||||
#if defined(_MSC_BUILD)
|
||||
#if defined(COMPILING_WASM_RUNTIME_API)
|
||||
#define WASM_API_EXTERN __declspec(dllexport)
|
||||
-#else
|
||||
+#elif defined(_DLL)
|
||||
#define WASM_API_EXTERN __declspec(dllimport)
|
||||
+#else
|
||||
+#define WASM_API_EXTERN
|
||||
#endif
|
||||
#else
|
||||
#define WASM_API_EXTERN
|
||||
@@ -592,6 +594,8 @@ WASM_API_EXTERN size_t wasm_func_result_arity(const wasm_func_t*);
|
||||
WASM_API_EXTERN own wasm_trap_t* wasm_func_call(
|
||||
const wasm_func_t*, const wasm_val_vec_t* args, wasm_val_vec_t* results);
|
||||
|
||||
+WASM_API_EXTERN own uint32_t wasm_func_set_gas(wasm_func_t*, uint32_t);
|
||||
+
|
||||
|
||||
// Global Instances
|
||||
|
||||
@@ -701,6 +705,11 @@ WASM_API_EXTERN double wasm_instance_sum_wasm_exec_time(const wasm_instance_t*);
|
||||
// func_name. If the function is not found, return 0.
|
||||
WASM_API_EXTERN double wasm_instance_get_wasm_func_exec_time(const wasm_instance_t*, const char *);
|
||||
|
||||
+struct WASMExecEnv;
|
||||
+typedef struct WASMExecEnv *wasm_exec_env_t;
|
||||
+
|
||||
+WASM_API_EXTERN wasm_exec_env_t wasm_instance_exec_env(const wasm_instance_t*);
|
||||
+
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// Convenience
|
||||
|
||||
diff --git a/core/iwasm/include/wasm_export.h b/core/iwasm/include/wasm_export.h
|
||||
index 81efb8f6..f752a970 100644
|
||||
--- a/core/iwasm/include/wasm_export.h
|
||||
+++ b/core/iwasm/include/wasm_export.h
|
||||
@@ -20,8 +20,10 @@
|
||||
#if defined(_MSC_BUILD)
|
||||
#if defined(COMPILING_WASM_RUNTIME_API)
|
||||
#define WASM_RUNTIME_API_EXTERN __declspec(dllexport)
|
||||
-#else
|
||||
+#elif defined(_DLL)
|
||||
#define WASM_RUNTIME_API_EXTERN __declspec(dllimport)
|
||||
+#else
|
||||
+#define WASM_RUNTIME_API_EXTERN
|
||||
#endif
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#define WASM_RUNTIME_API_EXTERN __attribute__((visibility("default")))
|
||||
@@ -1874,7 +1876,14 @@ wasm_runtime_set_native_stack_boundary(wasm_exec_env_t exec_env,
|
||||
*/
|
||||
WASM_RUNTIME_API_EXTERN void
|
||||
wasm_runtime_set_instruction_count_limit(wasm_exec_env_t exec_env,
|
||||
- int instruction_count);
|
||||
+ int64_t instruction_count);
|
||||
+
|
||||
+WASM_RUNTIME_API_EXTERN int64_t
|
||||
+wasm_runtime_get_instruction_count_limit(wasm_exec_env_t exec_env);
|
||||
+
|
||||
+WASM_RUNTIME_API_EXTERN void
|
||||
+wasm_runtime_set_instruction_schedule(wasm_exec_env_t exec_env,
|
||||
+ int64_t const *instructions_schedule);
|
||||
|
||||
/**
|
||||
* Dump runtime memory consumption, including:
|
||||
diff --git a/core/iwasm/interpreter/wasm.h b/core/iwasm/interpreter/wasm.h
|
||||
index 0dd73958..b7cad5f2 100644
|
||||
--- a/core/iwasm/interpreter/wasm.h
|
||||
+++ b/core/iwasm/interpreter/wasm.h
|
||||
@@ -617,6 +617,9 @@ typedef struct WASMFunctionImport {
|
||||
WASMModule *import_module;
|
||||
WASMFunction *import_func_linked;
|
||||
#endif
|
||||
+ // gas cost for import func
|
||||
+ uint32 gas;
|
||||
+
|
||||
} WASMFunctionImport;
|
||||
|
||||
#if WASM_ENABLE_TAGS != 0
|
||||
diff --git a/core/iwasm/interpreter/wasm_interp_classic.c b/core/iwasm/interpreter/wasm_interp_classic.c
|
||||
index edc473f2..55071613 100644
|
||||
--- a/core/iwasm/interpreter/wasm_interp_classic.c
|
||||
+++ b/core/iwasm/interpreter/wasm_interp_classic.c
|
||||
@@ -1547,13 +1547,14 @@ get_global_addr(uint8 *global_data, WASMGlobalInstance *global)
|
||||
}
|
||||
|
||||
#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
-#define CHECK_INSTRUCTION_LIMIT() \
|
||||
- if (instructions_left == 0) { \
|
||||
- wasm_set_exception(module, "instruction limit exceeded"); \
|
||||
- goto got_exception; \
|
||||
- } \
|
||||
- else if (instructions_left > 0) \
|
||||
- instructions_left--;
|
||||
+#define CHECK_INSTRUCTION_LIMIT() \
|
||||
+ do { \
|
||||
+ --instructions_left; \
|
||||
+ if (instructions_left < 0) { \
|
||||
+ wasm_set_exception(module, "instruction limit exceeded"); \
|
||||
+ goto got_exception; \
|
||||
+ } \
|
||||
+ } while (0)
|
||||
#else
|
||||
#define CHECK_INSTRUCTION_LIMIT() (void)0
|
||||
#endif
|
||||
@@ -1603,10 +1604,9 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
uint32 cache_index, type_index, param_cell_num, cell_num;
|
||||
|
||||
#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
- int instructions_left = -1;
|
||||
- if (exec_env) {
|
||||
+ int64 instructions_left = INT64_MAX;
|
||||
+ if (exec_env)
|
||||
instructions_left = exec_env->instructions_to_execute;
|
||||
- }
|
||||
#endif
|
||||
|
||||
#if WASM_ENABLE_EXCE_HANDLING != 0
|
||||
@@ -6849,6 +6849,11 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
FREE_FRAME(exec_env, frame);
|
||||
wasm_exec_env_set_cur_frame(exec_env, prev_frame);
|
||||
|
||||
+#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
+ if(exec_env)
|
||||
+ exec_env->instructions_to_execute = instructions_left;
|
||||
+#endif
|
||||
+
|
||||
if (!prev_frame->ip) {
|
||||
/* Called from native. */
|
||||
return;
|
||||
@@ -6889,6 +6894,12 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
}
|
||||
#endif
|
||||
SYNC_ALL_TO_FRAME();
|
||||
+
|
||||
+#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
+ if(exec_env)
|
||||
+ exec_env->instructions_to_execute = instructions_left;
|
||||
+#endif
|
||||
+
|
||||
return;
|
||||
|
||||
#if WASM_ENABLE_LABELS_AS_VALUES == 0
|
||||
diff --git a/core/iwasm/interpreter/wasm_interp_fast.c b/core/iwasm/interpreter/wasm_interp_fast.c
|
||||
index 36d4538f..4d03603e 100644
|
||||
--- a/core/iwasm/interpreter/wasm_interp_fast.c
|
||||
+++ b/core/iwasm/interpreter/wasm_interp_fast.c
|
||||
@@ -90,14 +90,14 @@ typedef float64 CellType_F64;
|
||||
} while (0)
|
||||
|
||||
#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
-#define CHECK_INSTRUCTION_LIMIT() \
|
||||
- if (instructions_left == 0) { \
|
||||
- wasm_set_exception(module, "instruction limit exceeded"); \
|
||||
- goto got_exception; \
|
||||
- } \
|
||||
- else if (instructions_left > 0) \
|
||||
- instructions_left--;
|
||||
-
|
||||
+#define CHECK_INSTRUCTION_LIMIT() \
|
||||
+ do { \
|
||||
+ --instructions_left; \
|
||||
+ if (instructions_left < 0) { \
|
||||
+ wasm_set_exception(module, "instruction limit exceeded"); \
|
||||
+ goto got_exception; \
|
||||
+ } \
|
||||
+ } while (0)
|
||||
#else
|
||||
#define CHECK_INSTRUCTION_LIMIT() (void)0
|
||||
#endif
|
||||
@@ -1438,7 +1438,6 @@ wasm_interp_dump_op_count()
|
||||
do { \
|
||||
const void *p_label_addr = *(void **)frame_ip; \
|
||||
frame_ip += sizeof(void *); \
|
||||
- CHECK_INSTRUCTION_LIMIT(); \
|
||||
goto *p_label_addr; \
|
||||
} while (0)
|
||||
#else
|
||||
@@ -1450,7 +1449,6 @@ wasm_interp_dump_op_count()
|
||||
/* int32 relative offset was emitted in 64-bit target */ \
|
||||
p_label_addr = label_base + (int32)LOAD_U32_WITH_2U16S(frame_ip); \
|
||||
frame_ip += sizeof(int32); \
|
||||
- CHECK_INSTRUCTION_LIMIT(); \
|
||||
goto *p_label_addr; \
|
||||
} while (0)
|
||||
#else
|
||||
@@ -1461,17 +1459,18 @@ wasm_interp_dump_op_count()
|
||||
/* uint32 label address was emitted in 32-bit target */ \
|
||||
p_label_addr = (void *)(uintptr_t)LOAD_U32_WITH_2U16S(frame_ip); \
|
||||
frame_ip += sizeof(int32); \
|
||||
- CHECK_INSTRUCTION_LIMIT(); \
|
||||
goto *p_label_addr; \
|
||||
} while (0)
|
||||
#endif
|
||||
#endif /* end of WASM_CPU_SUPPORTS_UNALIGNED_ADDR_ACCESS */
|
||||
-#define HANDLE_OP_END() FETCH_OPCODE_AND_DISPATCH()
|
||||
+#define HANDLE_OP_END() CHECK_INSTRUCTION_LIMIT(); FETCH_OPCODE_AND_DISPATCH()
|
||||
|
||||
#else /* else of WASM_ENABLE_LABELS_AS_VALUES */
|
||||
|
||||
#define HANDLE_OP(opcode) case opcode:
|
||||
-#define HANDLE_OP_END() continue
|
||||
+#define HANDLE_OP_END() \
|
||||
+ CHECK_INSTRUCTION_LIMIT(); \
|
||||
+ continue
|
||||
|
||||
#endif /* end of WASM_ENABLE_LABELS_AS_VALUES */
|
||||
|
||||
@@ -1540,10 +1539,9 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
uint8 opcode = 0, local_type, *global_addr;
|
||||
|
||||
#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
- int instructions_left = -1;
|
||||
- if (exec_env) {
|
||||
+ int64 instructions_left = INT64_MAX;
|
||||
+ if (exec_env)
|
||||
instructions_left = exec_env->instructions_to_execute;
|
||||
- }
|
||||
#endif
|
||||
#if !defined(OS_ENABLE_HW_BOUND_CHECK) \
|
||||
|| WASM_CPU_SUPPORTS_UNALIGNED_ADDR_ACCESS == 0
|
||||
@@ -4012,7 +4010,15 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
}
|
||||
|
||||
/* constant instructions */
|
||||
+#ifdef ENABLE_FLOAT_POINT
|
||||
HANDLE_OP(WASM_OP_F64_CONST)
|
||||
+#else
|
||||
+ HANDLE_OP(WASM_OP_F64_CONST)
|
||||
+ {
|
||||
+ wasm_set_exception(module, "opcode disabled");
|
||||
+ goto got_exception;
|
||||
+ }
|
||||
+#endif
|
||||
HANDLE_OP(WASM_OP_I64_CONST)
|
||||
{
|
||||
uint8 *orig_ip = frame_ip;
|
||||
@@ -4025,7 +4031,15 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+#ifdef ENABLE_FLOAT_POINT
|
||||
+ HANDLE_OP(WASM_OP_F32_CONST)
|
||||
+#else
|
||||
HANDLE_OP(WASM_OP_F32_CONST)
|
||||
+ {
|
||||
+ wasm_set_exception(module, "opcode disabled");
|
||||
+ goto got_exception;
|
||||
+ }
|
||||
+#endif
|
||||
HANDLE_OP(WASM_OP_I32_CONST)
|
||||
{
|
||||
uint8 *orig_ip = frame_ip;
|
||||
@@ -4172,6 +4186,8 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+#ifdef ENABLE_FLOAT_POINT
|
||||
+
|
||||
/* comparison instructions of f32 */
|
||||
HANDLE_OP(WASM_OP_F32_EQ)
|
||||
{
|
||||
@@ -4245,6 +4261,24 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
DEF_OP_CMP(float64, F64, >=);
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
+#else
|
||||
+ HANDLE_OP(WASM_OP_F32_EQ)
|
||||
+ HANDLE_OP(WASM_OP_F32_NE)
|
||||
+ HANDLE_OP(WASM_OP_F32_LT)
|
||||
+ HANDLE_OP(WASM_OP_F32_GT)
|
||||
+ HANDLE_OP(WASM_OP_F32_LE)
|
||||
+ HANDLE_OP(WASM_OP_F32_GE)
|
||||
+ HANDLE_OP(WASM_OP_F64_EQ)
|
||||
+ HANDLE_OP(WASM_OP_F64_NE)
|
||||
+ HANDLE_OP(WASM_OP_F64_LT)
|
||||
+ HANDLE_OP(WASM_OP_F64_GT)
|
||||
+ HANDLE_OP(WASM_OP_F64_LE)
|
||||
+ HANDLE_OP(WASM_OP_F64_GE)
|
||||
+ {
|
||||
+ wasm_set_exception(module, "opcode disabled");
|
||||
+ goto got_exception;
|
||||
+ }
|
||||
+#endif
|
||||
|
||||
/* numeric instructions of i32 */
|
||||
HANDLE_OP(WASM_OP_I32_CLZ)
|
||||
@@ -4573,6 +4607,8 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+#ifdef ENABLE_FLOAT_POINT
|
||||
+
|
||||
/* numeric instructions of f32 */
|
||||
HANDLE_OP(WASM_OP_F32_ABS)
|
||||
{
|
||||
@@ -4784,6 +4820,43 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+#else
|
||||
+
|
||||
+ HANDLE_OP(WASM_OP_F32_ABS)
|
||||
+ HANDLE_OP(WASM_OP_F32_NEG)
|
||||
+ HANDLE_OP(WASM_OP_F32_CEIL)
|
||||
+ HANDLE_OP(WASM_OP_F32_FLOOR)
|
||||
+ HANDLE_OP(WASM_OP_F32_TRUNC)
|
||||
+ HANDLE_OP(WASM_OP_F32_NEAREST)
|
||||
+ HANDLE_OP(WASM_OP_F32_SQRT)
|
||||
+ HANDLE_OP(WASM_OP_F32_ADD)
|
||||
+ HANDLE_OP(WASM_OP_F32_SUB)
|
||||
+ HANDLE_OP(WASM_OP_F32_MUL)
|
||||
+ HANDLE_OP(WASM_OP_F32_DIV)
|
||||
+ HANDLE_OP(WASM_OP_F32_MIN)
|
||||
+ HANDLE_OP(WASM_OP_F32_MAX)
|
||||
+ HANDLE_OP(WASM_OP_F32_COPYSIGN)
|
||||
+ HANDLE_OP(WASM_OP_F64_ABS)
|
||||
+ HANDLE_OP(WASM_OP_F64_NEG)
|
||||
+ HANDLE_OP(WASM_OP_F64_CEIL)
|
||||
+ HANDLE_OP(WASM_OP_F64_FLOOR)
|
||||
+ HANDLE_OP(WASM_OP_F64_TRUNC)
|
||||
+ HANDLE_OP(WASM_OP_F64_NEAREST)
|
||||
+ HANDLE_OP(WASM_OP_F64_SQRT)
|
||||
+ HANDLE_OP(WASM_OP_F64_ADD)
|
||||
+ HANDLE_OP(WASM_OP_F64_SUB)
|
||||
+ HANDLE_OP(WASM_OP_F64_MUL)
|
||||
+ HANDLE_OP(WASM_OP_F64_DIV)
|
||||
+ HANDLE_OP(WASM_OP_F64_MIN)
|
||||
+ HANDLE_OP(WASM_OP_F64_MAX)
|
||||
+ HANDLE_OP(WASM_OP_F64_COPYSIGN)
|
||||
+ {
|
||||
+ wasm_set_exception(module, "opcode disabled");
|
||||
+ goto got_exception;
|
||||
+ }
|
||||
+
|
||||
+#endif //ENABLE_FLOAT_POINT
|
||||
+
|
||||
/* conversions of i32 */
|
||||
HANDLE_OP(WASM_OP_I32_WRAP_I64)
|
||||
{
|
||||
@@ -4792,6 +4865,8 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+
|
||||
+#ifdef ENABLE_FLOAT_POINT
|
||||
HANDLE_OP(WASM_OP_I32_TRUNC_S_F32)
|
||||
{
|
||||
/* We don't use INT32_MIN/INT32_MAX/UINT32_MIN/UINT32_MAX,
|
||||
@@ -4821,6 +4896,19 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+#else
|
||||
+
|
||||
+ HANDLE_OP(WASM_OP_I32_TRUNC_S_F32)
|
||||
+ HANDLE_OP(WASM_OP_I32_TRUNC_U_F32)
|
||||
+ HANDLE_OP(WASM_OP_I32_TRUNC_S_F64)
|
||||
+ HANDLE_OP(WASM_OP_I32_TRUNC_U_F64)
|
||||
+ {
|
||||
+ wasm_set_exception(module, "opcode disabled");
|
||||
+ goto got_exception;
|
||||
+ }
|
||||
+
|
||||
+#endif //ENABLE_FLOAT_POINT
|
||||
+
|
||||
/* conversions of i64 */
|
||||
HANDLE_OP(WASM_OP_I64_EXTEND_S_I32)
|
||||
{
|
||||
@@ -4834,6 +4922,8 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+#ifdef ENABLE_FLOAT_POINT
|
||||
+
|
||||
HANDLE_OP(WASM_OP_I64_TRUNC_S_F32)
|
||||
{
|
||||
DEF_OP_TRUNC_F32(-9223373136366403584.0f,
|
||||
@@ -4937,6 +5027,32 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
HANDLE_OP_END();
|
||||
}
|
||||
|
||||
+#else
|
||||
+ HANDLE_OP(WASM_OP_I64_TRUNC_S_F32)
|
||||
+ HANDLE_OP(WASM_OP_I64_TRUNC_U_F32)
|
||||
+ HANDLE_OP(WASM_OP_I64_TRUNC_S_F64)
|
||||
+ HANDLE_OP(WASM_OP_I64_TRUNC_U_F64)
|
||||
+ HANDLE_OP(WASM_OP_F32_CONVERT_S_I32)
|
||||
+ HANDLE_OP(WASM_OP_F32_CONVERT_U_I32)
|
||||
+ HANDLE_OP(WASM_OP_F32_CONVERT_S_I64)
|
||||
+ HANDLE_OP(WASM_OP_F32_CONVERT_U_I64)
|
||||
+ HANDLE_OP(WASM_OP_F32_DEMOTE_F64)
|
||||
+ HANDLE_OP(WASM_OP_F64_CONVERT_S_I32)
|
||||
+ HANDLE_OP(WASM_OP_F64_CONVERT_U_I32)
|
||||
+ HANDLE_OP(WASM_OP_F64_CONVERT_S_I64)
|
||||
+ HANDLE_OP(WASM_OP_F64_CONVERT_U_I64)
|
||||
+ HANDLE_OP(WASM_OP_F64_PROMOTE_F32)
|
||||
+ HANDLE_OP(WASM_OP_I32_REINTERPRET_F32)
|
||||
+ HANDLE_OP(WASM_OP_F32_REINTERPRET_I32)
|
||||
+ HANDLE_OP(WASM_OP_I64_REINTERPRET_F64)
|
||||
+ HANDLE_OP(WASM_OP_F64_REINTERPRET_I64)
|
||||
+ {
|
||||
+ wasm_set_exception(module, "opcode disabled");
|
||||
+ goto got_exception;
|
||||
+ }
|
||||
+
|
||||
+#endif //ENABLE_FLOAT_POINT
|
||||
+
|
||||
HANDLE_OP(EXT_OP_COPY_STACK_TOP)
|
||||
{
|
||||
addr1 = GET_OFFSET();
|
||||
@@ -5108,6 +5224,8 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
{
|
||||
GET_OPCODE();
|
||||
switch (opcode) {
|
||||
+
|
||||
+#ifdef ENABLE_FLOAT_POINT
|
||||
case WASM_OP_I32_TRUNC_SAT_S_F32:
|
||||
DEF_OP_TRUNC_SAT_F32(-2147483904.0f, 2147483648.0f,
|
||||
true, true);
|
||||
@@ -5140,6 +5258,9 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
DEF_OP_TRUNC_SAT_F64(-1.0, 18446744073709551616.0,
|
||||
false, false);
|
||||
break;
|
||||
+
|
||||
+#endif
|
||||
+
|
||||
#if WASM_ENABLE_BULK_MEMORY != 0
|
||||
case WASM_OP_MEMORY_INIT:
|
||||
{
|
||||
@@ -7672,6 +7793,7 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
{
|
||||
wasm_interp_call_func_native(module, exec_env, cur_func,
|
||||
prev_frame);
|
||||
+ instructions_left -= cur_func->gas;
|
||||
}
|
||||
|
||||
#if WASM_ENABLE_TAIL_CALL != 0 || WASM_ENABLE_GC != 0
|
||||
@@ -7784,6 +7906,11 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
FREE_FRAME(exec_env, frame);
|
||||
wasm_exec_env_set_cur_frame(exec_env, (WASMRuntimeFrame *)prev_frame);
|
||||
|
||||
+#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
+ if (exec_env)
|
||||
+ exec_env->instructions_to_execute = instructions_left;
|
||||
+#endif
|
||||
+
|
||||
if (!prev_frame->ip)
|
||||
/* Called from native. */
|
||||
return;
|
||||
@@ -7812,6 +7939,10 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
|
||||
|
||||
got_exception:
|
||||
SYNC_ALL_TO_FRAME();
|
||||
+#if WASM_ENABLE_INSTRUCTION_METERING != 0
|
||||
+ if (exec_env)
|
||||
+ exec_env->instructions_to_execute = instructions_left;
|
||||
+#endif
|
||||
return;
|
||||
|
||||
#if WASM_ENABLE_LABELS_AS_VALUES == 0
|
||||
diff --git a/core/iwasm/interpreter/wasm_mini_loader.c b/core/iwasm/interpreter/wasm_mini_loader.c
|
||||
index 771538a1..d6e6a6b8 100644
|
||||
--- a/core/iwasm/interpreter/wasm_mini_loader.c
|
||||
+++ b/core/iwasm/interpreter/wasm_mini_loader.c
|
||||
@@ -805,6 +805,7 @@ load_function_import(const uint8 **p_buf, const uint8 *buf_end,
|
||||
const char *linked_signature = NULL;
|
||||
void *linked_attachment = NULL;
|
||||
bool linked_call_conv_raw = false;
|
||||
+ uint32_t gas = 0;
|
||||
|
||||
read_leb_uint32(p, p_end, declare_type_index);
|
||||
*p_buf = p;
|
||||
@@ -816,7 +817,7 @@ load_function_import(const uint8 **p_buf, const uint8 *buf_end,
|
||||
/* check built-in modules */
|
||||
linked_func = wasm_native_resolve_symbol(
|
||||
sub_module_name, function_name, declare_func_type, &linked_signature,
|
||||
- &linked_attachment, &linked_call_conv_raw);
|
||||
+ &linked_attachment, &gas, &linked_call_conv_raw);
|
||||
|
||||
function->module_name = (char *)sub_module_name;
|
||||
function->field_name = (char *)function_name;
|
||||
@@ -825,6 +826,7 @@ load_function_import(const uint8 **p_buf, const uint8 *buf_end,
|
||||
function->signature = linked_signature;
|
||||
function->attachment = linked_attachment;
|
||||
function->call_conv_raw = linked_call_conv_raw;
|
||||
+ function->gas = gas;
|
||||
return true;
|
||||
}
|
||||
|
||||
diff --git a/core/iwasm/interpreter/wasm_runtime.c b/core/iwasm/interpreter/wasm_runtime.c
|
||||
index b4aa483d..2d74e469 100644
|
||||
--- a/core/iwasm/interpreter/wasm_runtime.c
|
||||
+++ b/core/iwasm/interpreter/wasm_runtime.c
|
||||
@@ -168,7 +168,7 @@ wasm_resolve_import_func(const WASMModule *module, WASMFunctionImport *function)
|
||||
#endif
|
||||
function->func_ptr_linked = wasm_native_resolve_symbol(
|
||||
function->module_name, function->field_name, function->func_type,
|
||||
- &function->signature, &function->attachment, &function->call_conv_raw);
|
||||
+ &function->signature, &function->attachment, &function->gas, &function->call_conv_raw);
|
||||
|
||||
if (function->func_ptr_linked) {
|
||||
return true;
|
||||
@@ -820,6 +820,7 @@ functions_instantiate(const WASMModule *module, WASMModuleInstance *module_inst,
|
||||
function->param_count =
|
||||
(uint16)function->u.func_import->func_type->param_count;
|
||||
function->param_types = function->u.func_import->func_type->types;
|
||||
+ function->gas = import->u.function.gas;
|
||||
function->local_cell_num = 0;
|
||||
function->local_count = 0;
|
||||
function->local_types = NULL;
|
||||
diff --git a/core/iwasm/interpreter/wasm_runtime.h b/core/iwasm/interpreter/wasm_runtime.h
|
||||
index 16c670f0..5ddac567 100644
|
||||
--- a/core/iwasm/interpreter/wasm_runtime.h
|
||||
+++ b/core/iwasm/interpreter/wasm_runtime.h
|
||||
@@ -237,6 +237,10 @@ struct WASMFunctionInstance {
|
||||
WASMFunctionImport *func_import;
|
||||
WASMFunction *func;
|
||||
} u;
|
||||
+
|
||||
+ // gas cost for import func
|
||||
+ uint32 gas;
|
||||
+
|
||||
#if WASM_ENABLE_MULTI_MODULE != 0
|
||||
WASMModuleInstance *import_module_inst;
|
||||
WASMFunctionInstance *import_func_inst;
|
||||
diff --git a/core/iwasm/libraries/libc-builtin/libc_builtin_wrapper.c b/core/iwasm/libraries/libc-builtin/libc_builtin_wrapper.c
|
||||
index a68c0749..cafb6915 100644
|
||||
--- a/core/iwasm/libraries/libc-builtin/libc_builtin_wrapper.c
|
||||
+++ b/core/iwasm/libraries/libc-builtin/libc_builtin_wrapper.c
|
||||
@@ -1038,16 +1038,16 @@ print_f64_wrapper(wasm_exec_env_t exec_env, double f64)
|
||||
|
||||
/* clang-format off */
|
||||
#define REG_NATIVE_FUNC(func_name, signature) \
|
||||
- { #func_name, func_name##_wrapper, signature, NULL }
|
||||
+ { #func_name, func_name##_wrapper, signature, NULL, 0 }
|
||||
/* clang-format on */
|
||||
|
||||
static NativeSymbol native_symbols_libc_builtin[] = {
|
||||
REG_NATIVE_FUNC(printf, "($*)i"),
|
||||
REG_NATIVE_FUNC(sprintf, "($$*)i"),
|
||||
REG_NATIVE_FUNC(snprintf, "(*~$*)i"),
|
||||
- { "vprintf", printf_wrapper, "($*)i", NULL },
|
||||
- { "vsprintf", sprintf_wrapper, "($$*)i", NULL },
|
||||
- { "vsnprintf", snprintf_wrapper, "(*~$*)i", NULL },
|
||||
+ { "vprintf", printf_wrapper, "($*)i", NULL, 0 },
|
||||
+ { "vsprintf", sprintf_wrapper, "($$*)i", NULL, 0 },
|
||||
+ { "vsnprintf", snprintf_wrapper, "(*~$*)i", NULL, 0 },
|
||||
REG_NATIVE_FUNC(puts, "($)i"),
|
||||
REG_NATIVE_FUNC(putchar, "(i)i"),
|
||||
REG_NATIVE_FUNC(memcmp, "(**~)i"),
|
||||
diff --git a/core/iwasm/libraries/libc-wasi/libc_wasi_wrapper.c b/core/iwasm/libraries/libc-wasi/libc_wasi_wrapper.c
|
||||
index f7dfea0b..c01e80a9 100644
|
||||
--- a/core/iwasm/libraries/libc-wasi/libc_wasi_wrapper.c
|
||||
+++ b/core/iwasm/libraries/libc-wasi/libc_wasi_wrapper.c
|
||||
@@ -2269,7 +2269,7 @@ wasi_sched_yield(wasm_exec_env_t exec_env)
|
||||
|
||||
/* clang-format off */
|
||||
#define REG_NATIVE_FUNC(func_name, signature) \
|
||||
- { #func_name, wasi_##func_name, signature, NULL }
|
||||
+ { #func_name, wasi_##func_name, signature, NULL, 0 }
|
||||
/* clang-format on */
|
||||
|
||||
static NativeSymbol native_symbols_libc_wasi[] = {
|
||||
diff --git a/core/shared/platform/include/platform_wasi_types.h b/core/shared/platform/include/platform_wasi_types.h
|
||||
index ac1a95ea..e23b500e 100644
|
||||
--- a/core/shared/platform/include/platform_wasi_types.h
|
||||
+++ b/core/shared/platform/include/platform_wasi_types.h
|
||||
@@ -36,7 +36,11 @@ extern "C" {
|
||||
#if WASM_ENABLE_UVWASI != 0 || WASM_ENABLE_LIBC_WASI == 0
|
||||
#define assert_wasi_layout(expr, message) /* nothing */
|
||||
#else
|
||||
-#define assert_wasi_layout(expr, message) _Static_assert(expr, message)
|
||||
+ #ifndef _MSC_VER
|
||||
+ #define assert_wasi_layout(expr, message) _Static_assert(expr, message)
|
||||
+ #else
|
||||
+ #define assert_wasi_layout(expr, message) static_assert(expr, message)
|
||||
+ #endif
|
||||
#endif
|
||||
|
||||
assert_wasi_layout(_Alignof(int8_t) == 1, "non-wasi data layout");
|
||||
@@ -654,14 +654,12 @@ SharedWeakUnion<T>::convertToWeak()
|
||||
break;
|
||||
case destroy:
|
||||
// We just added a weak ref. How could we destroy?
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::SharedWeakUnion::convertToWeak : destroying freshly "
|
||||
"added ref");
|
||||
delete p;
|
||||
unsafeSetRawPtr(nullptr);
|
||||
return true; // Should never happen
|
||||
// LCOV_EXCL_STOP
|
||||
case partialDestroy:
|
||||
// This is a weird case. We just converted the last strong
|
||||
// pointer to a weak pointer.
|
||||
|
||||
@@ -360,6 +360,10 @@ abs(Number x) noexcept
|
||||
Number
|
||||
power(Number const& f, unsigned n);
|
||||
|
||||
// logarithm with base 10
|
||||
Number
|
||||
lg(Number const& value);
|
||||
|
||||
// Returns f^(1/d)
|
||||
// Uses Newton–Raphson iterations until the result stops changing
|
||||
// to find the root of the polynomial g(x) = x^d - f
|
||||
|
||||
@@ -170,6 +170,9 @@ public:
|
||||
bool
|
||||
retrieve(key_type const& key, T& data);
|
||||
|
||||
mutex_type&
|
||||
peekMutex();
|
||||
|
||||
std::vector<key_type>
|
||||
getKeys() const;
|
||||
|
||||
|
||||
@@ -668,6 +668,29 @@ TaggedCache<
|
||||
return true;
|
||||
}
|
||||
|
||||
template <
|
||||
class Key,
|
||||
class T,
|
||||
bool IsKeyCache,
|
||||
class SharedWeakUnionPointer,
|
||||
class SharedPointerType,
|
||||
class Hash,
|
||||
class KeyEqual,
|
||||
class Mutex>
|
||||
inline auto
|
||||
TaggedCache<
|
||||
Key,
|
||||
T,
|
||||
IsKeyCache,
|
||||
SharedWeakUnionPointer,
|
||||
SharedPointerType,
|
||||
Hash,
|
||||
KeyEqual,
|
||||
Mutex>::peekMutex() -> mutex_type&
|
||||
{
|
||||
return m_mutex;
|
||||
}
|
||||
|
||||
template <
|
||||
class Key,
|
||||
class T,
|
||||
|
||||
@@ -94,11 +94,7 @@ hash_append(Hasher& h, beast::IP::Address const& addr) noexcept
|
||||
else if (addr.is_v6())
|
||||
hash_append(h, addr.to_v6().to_bytes());
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("beast::hash_append : invalid address type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
} // namespace beast
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ public:
|
||||
* without formatting (not human friendly).
|
||||
*
|
||||
* The JSON document is written in a single line. It is not intended for 'human'
|
||||
* consumption, but may be useful to support feature such as RPC where bandwidth
|
||||
* consumption, but may be useful to support feature such as RPC where bandwith
|
||||
* is limited. \sa Reader, Value
|
||||
*/
|
||||
|
||||
|
||||
@@ -284,14 +284,12 @@ public:
|
||||
{
|
||||
if (key.type != ltOFFER)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::ApplyView::dirAppend : only Offers are appended to "
|
||||
"book directories");
|
||||
// Only Offers are appended to book directories. Call dirInsert()
|
||||
// instead
|
||||
return std::nullopt;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
return dirAdd(true, directory, key.key, describe);
|
||||
}
|
||||
|
||||
@@ -74,6 +74,18 @@ public:
|
||||
deliver_ = amount;
|
||||
}
|
||||
|
||||
void
|
||||
setGasUsed(std::optional<std::uint32_t> const gasUsed)
|
||||
{
|
||||
gasUsed_ = gasUsed;
|
||||
}
|
||||
|
||||
void
|
||||
setWasmReturnCode(std::int32_t const wasmReturnCode)
|
||||
{
|
||||
wasmReturnCode_ = wasmReturnCode;
|
||||
}
|
||||
|
||||
/** Get the number of modified entries
|
||||
*/
|
||||
std::size_t
|
||||
@@ -92,6 +104,8 @@ public:
|
||||
|
||||
private:
|
||||
std::optional<STAmount> deliver_;
|
||||
std::optional<std::uint32_t> gasUsed_;
|
||||
std::optional<std::int32_t> wasmReturnCode_;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2012, 2013 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_APP_LEDGER_INDEX_MAP_H_INCLUDED
|
||||
#define RIPPLE_APP_LEDGER_INDEX_MAP_H_INCLUDED
|
||||
|
||||
#include <algorithm>
|
||||
#include <mutex>
|
||||
#include <unordered_map>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
template <class Key, class Mapped>
|
||||
class LedgerIndexMap
|
||||
{
|
||||
public:
|
||||
LedgerIndexMap() = default;
|
||||
explicit LedgerIndexMap(std::size_t reserve_capacity)
|
||||
{
|
||||
data_.reserve(reserve_capacity);
|
||||
}
|
||||
|
||||
LedgerIndexMap(LedgerIndexMap const&) = delete;
|
||||
LedgerIndexMap&
|
||||
operator=(LedgerIndexMap const&) = delete;
|
||||
LedgerIndexMap(LedgerIndexMap&&) = delete;
|
||||
LedgerIndexMap&
|
||||
operator=(LedgerIndexMap&&) = delete;
|
||||
|
||||
Mapped&
|
||||
operator[](Key const& k)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
return data_[k];
|
||||
}
|
||||
|
||||
Mapped&
|
||||
operator[](Key&& k)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
return data_[std::move(k)];
|
||||
}
|
||||
|
||||
[[nodiscard]] Mapped*
|
||||
get(Key const& k)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
auto it = data_.find(k);
|
||||
return it == data_.end() ? nullptr : &it->second;
|
||||
}
|
||||
|
||||
[[nodiscard]] Mapped const*
|
||||
get(Key const& k) const
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
auto it = data_.find(k);
|
||||
return it == data_.end() ? nullptr : &it->second;
|
||||
}
|
||||
|
||||
template <class... Args>
|
||||
Mapped&
|
||||
put(Key const& k, Args&&... args)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
auto [it, inserted] = data_.try_emplace(k, std::forward<Args>(args)...);
|
||||
if (!inserted)
|
||||
it->second = Mapped(std::forward<Args>(args)...);
|
||||
return it->second;
|
||||
}
|
||||
|
||||
bool
|
||||
contains(Key const& k) const
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
return data_.find(k) != data_.end();
|
||||
}
|
||||
|
||||
std::size_t
|
||||
size() const noexcept
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
return data_.size();
|
||||
}
|
||||
|
||||
bool
|
||||
empty() const noexcept
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
return data_.empty();
|
||||
}
|
||||
|
||||
void
|
||||
reserve(std::size_t n)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
data_.reserve(n);
|
||||
}
|
||||
|
||||
void
|
||||
rehash(std::size_t n)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
data_.rehash(n);
|
||||
}
|
||||
|
||||
std::size_t
|
||||
eraseBefore(Key const& cutoff)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
auto const before = data_.size();
|
||||
std::erase_if(data_, [&](auto const& kv) { return kv.first < cutoff; });
|
||||
return before - data_.size();
|
||||
}
|
||||
|
||||
private:
|
||||
std::unordered_map<Key, Mapped> data_;
|
||||
mutable std::mutex mutex_;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif // RIPPLE_APP_LEDGER_INDEX_MAP_H_INCLUDED
|
||||
@@ -72,6 +72,8 @@ public:
|
||||
TER ter,
|
||||
std::optional<STAmount> const& deliver,
|
||||
std::optional<uint256 const> const& parentBatchId,
|
||||
std::optional<std::uint32_t> const& gasUsed,
|
||||
std::optional<std::int32_t> const& wasmReturnCode,
|
||||
bool isDryRun,
|
||||
beast::Journal j);
|
||||
|
||||
|
||||
@@ -24,6 +24,8 @@
|
||||
|
||||
namespace ripple {
|
||||
|
||||
constexpr std::uint32_t MICRO_DROPS_PER_DROP{1'000'000};
|
||||
|
||||
/** Reflects the fee settings for a particular ledger.
|
||||
|
||||
The fees are always the same for any transactions applied
|
||||
@@ -34,6 +36,10 @@ struct Fees
|
||||
XRPAmount base{0}; // Reference tx cost (drops)
|
||||
XRPAmount reserve{0}; // Reserve base (drops)
|
||||
XRPAmount increment{0}; // Reserve increment (drops)
|
||||
std::uint32_t extensionComputeLimit{
|
||||
0}; // Extension compute limit (instructions)
|
||||
std::uint32_t extensionSizeLimit{0}; // Extension size limit (bytes)
|
||||
std::uint32_t gasPrice{0}; // price of WASM gas (micro-drops)
|
||||
|
||||
explicit Fees() = default;
|
||||
Fees(Fees const&) = default;
|
||||
|
||||
@@ -58,6 +58,13 @@ private:
|
||||
normalize();
|
||||
|
||||
public:
|
||||
/* The range for the mantissa when normalized */
|
||||
static std::int64_t constexpr minMantissa = 1000000000000000ull;
|
||||
static std::int64_t constexpr maxMantissa = 9999999999999999ull;
|
||||
/* The range for the exponent when normalized */
|
||||
static int constexpr minExponent = -96;
|
||||
static int constexpr maxExponent = 80;
|
||||
|
||||
IOUAmount() = default;
|
||||
explicit IOUAmount(Number const& other);
|
||||
IOUAmount(beast::Zero);
|
||||
|
||||
@@ -231,6 +231,12 @@ page(Keylet const& root, std::uint64_t index = 0) noexcept
|
||||
Keylet
|
||||
escrow(AccountID const& src, std::uint32_t seq) noexcept;
|
||||
|
||||
inline Keylet
|
||||
escrow(uint256 const& key) noexcept
|
||||
{
|
||||
return {ltESCROW, key};
|
||||
}
|
||||
|
||||
/** A PaymentChannel */
|
||||
Keylet
|
||||
payChan(AccountID const& src, AccountID const& dst, std::uint32_t seq) noexcept;
|
||||
|
||||
@@ -188,14 +188,14 @@ enum LedgerSpecificFlags {
|
||||
lsfMPTCanTransfer = 0x00000020,
|
||||
lsfMPTCanClawback = 0x00000040,
|
||||
|
||||
lsmfMPTCanMutateCanLock = 0x00000002,
|
||||
lsmfMPTCanMutateRequireAuth = 0x00000004,
|
||||
lsmfMPTCanMutateCanEscrow = 0x00000008,
|
||||
lsmfMPTCanMutateCanTrade = 0x00000010,
|
||||
lsmfMPTCanMutateCanTransfer = 0x00000020,
|
||||
lsmfMPTCanMutateCanClawback = 0x00000040,
|
||||
lsmfMPTCanMutateMetadata = 0x00010000,
|
||||
lsmfMPTCanMutateTransferFee = 0x00020000,
|
||||
lmfMPTCanMutateCanLock = 0x00000002,
|
||||
lmfMPTCanMutateRequireAuth = 0x00000004,
|
||||
lmfMPTCanMutateCanEscrow = 0x00000008,
|
||||
lmfMPTCanMutateCanTrade = 0x00000010,
|
||||
lmfMPTCanMutateCanTransfer = 0x00000020,
|
||||
lmfMPTCanMutateCanClawback = 0x00000040,
|
||||
lmfMPTCanMutateMetadata = 0x00010000,
|
||||
lmfMPTCanMutateTransferFee = 0x00020000,
|
||||
|
||||
// ltMPTOKEN
|
||||
lsfMPTAuthorized = 0x00000002,
|
||||
|
||||
@@ -74,9 +74,6 @@ public:
|
||||
Permission&
|
||||
operator=(Permission const&) = delete;
|
||||
|
||||
std::optional<std::string>
|
||||
getPermissionName(std::uint32_t const value) const;
|
||||
|
||||
std::optional<std::uint32_t>
|
||||
getGranularValue(std::string const& name) const;
|
||||
|
||||
@@ -86,9 +83,6 @@ public:
|
||||
std::optional<TxType>
|
||||
getGranularTxType(GranularPermissionType const& gpType) const;
|
||||
|
||||
std::optional<std::reference_wrapper<uint256 const>> const
|
||||
getTxFeature(TxType txType) const;
|
||||
|
||||
bool
|
||||
isDelegatable(std::uint32_t const& permissionValue, Rules const& rules)
|
||||
const;
|
||||
|
||||
@@ -55,10 +55,7 @@ std::size_t constexpr oversizeMetaDataCap = 5200;
|
||||
/** The maximum number of entries per directory page */
|
||||
std::size_t constexpr dirNodeMaxEntries = 32;
|
||||
|
||||
/** The maximum number of pages allowed in a directory
|
||||
|
||||
Made obsolete by fixDirectoryLimit amendment.
|
||||
*/
|
||||
/** The maximum number of pages allowed in a directory */
|
||||
std::uint64_t constexpr dirNodeMaxPages = 262144;
|
||||
|
||||
/** The maximum number of items in an NFT page */
|
||||
@@ -181,6 +178,13 @@ std::size_t constexpr permissionMaxSize = 10;
|
||||
/** The maximum number of transactions that can be in a batch. */
|
||||
std::size_t constexpr maxBatchTxCount = 8;
|
||||
|
||||
/** The maximum length of a Data field in Escrow object that can be updated by
|
||||
* Wasm code */
|
||||
std::size_t constexpr maxWasmDataLength = 4 * 1024;
|
||||
|
||||
/** The maximum length of a parameters passed from Wasm code*/
|
||||
std::size_t constexpr maxWasmParamLength = 1024;
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
#define RIPPLE_PROTOCOL_PUBLICKEY_H_INCLUDED
|
||||
|
||||
#include <xrpl/basics/Slice.h>
|
||||
#include <xrpl/beast/net/IPEndpoint.h>
|
||||
#include <xrpl/protocol/KeyType.h>
|
||||
#include <xrpl/protocol/STExchange.h>
|
||||
#include <xrpl/protocol/UintTypes.h>
|
||||
@@ -265,24 +264,6 @@ calcNodeID(PublicKey const&);
|
||||
AccountID
|
||||
calcAccountID(PublicKey const& pk);
|
||||
|
||||
inline std::string
|
||||
getFingerprint(
|
||||
beast::IP::Endpoint const& address,
|
||||
std::optional<PublicKey> const& publicKey = std::nullopt,
|
||||
std::optional<std::string> const& id = std::nullopt)
|
||||
{
|
||||
std::stringstream ss;
|
||||
ss << "IP Address: " << address;
|
||||
if (publicKey.has_value())
|
||||
{
|
||||
ss << ", Public Key: " << toBase58(TokenType::NodePublic, *publicKey);
|
||||
}
|
||||
if (id.has_value())
|
||||
{
|
||||
ss << ", Id: " << id.value();
|
||||
}
|
||||
return ss.str();
|
||||
}
|
||||
} // namespace ripple
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -709,6 +709,37 @@ canAdd(STAmount const& amt1, STAmount const& amt2);
|
||||
bool
|
||||
canSubtract(STAmount const& amt1, STAmount const& amt2);
|
||||
|
||||
// Since `canonicalize` does not have access to a ledger, this is needed to put
|
||||
// the low-level routine stAmountCanonicalize on an amendment switch. Only
|
||||
// transactions need to use this switchover. Outside of a transaction it's safe
|
||||
// to unconditionally use the new behavior.
|
||||
|
||||
bool
|
||||
getSTAmountCanonicalizeSwitchover();
|
||||
|
||||
void
|
||||
setSTAmountCanonicalizeSwitchover(bool v);
|
||||
|
||||
/** RAII class to set and restore the STAmount canonicalize switchover.
|
||||
*/
|
||||
|
||||
class STAmountSO
|
||||
{
|
||||
public:
|
||||
explicit STAmountSO(bool v) : saved_(getSTAmountCanonicalizeSwitchover())
|
||||
{
|
||||
setSTAmountCanonicalizeSwitchover(v);
|
||||
}
|
||||
|
||||
~STAmountSO()
|
||||
{
|
||||
setSTAmountCanonicalizeSwitchover(saved_);
|
||||
}
|
||||
|
||||
private:
|
||||
bool saved_;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -82,6 +82,7 @@ using STUInt32 = STInteger<std::uint32_t>;
|
||||
using STUInt64 = STInteger<std::uint64_t>;
|
||||
|
||||
using STInt32 = STInteger<std::int32_t>;
|
||||
// using STInt64 = STInteger<std::int64_t>; // Can be added if&when needed
|
||||
|
||||
template <typename Integer>
|
||||
inline STInteger<Integer>::STInteger(Integer v) : value_(v)
|
||||
|
||||
@@ -244,9 +244,6 @@ public:
|
||||
getFieldPathSet(SField const& field) const;
|
||||
STVector256 const&
|
||||
getFieldV256(SField const& field) const;
|
||||
// If not found, returns an object constructed with the given field
|
||||
STObject
|
||||
getFieldObject(SField const& field) const;
|
||||
STArray const&
|
||||
getFieldArray(SField const& field) const;
|
||||
STCurrency const&
|
||||
@@ -393,8 +390,6 @@ public:
|
||||
setFieldV256(SField const& field, STVector256 const& v);
|
||||
void
|
||||
setFieldArray(SField const& field, STArray const& v);
|
||||
void
|
||||
setFieldObject(SField const& field, STObject const& v);
|
||||
|
||||
template <class Tag>
|
||||
void
|
||||
|
||||
@@ -54,6 +54,34 @@ public:
|
||||
Json::Value error;
|
||||
};
|
||||
|
||||
/** Holds the serialized result of parsing an input JSON array.
|
||||
This does validation and checking on the provided JSON.
|
||||
*/
|
||||
class STParsedJSONArray
|
||||
{
|
||||
public:
|
||||
/** Parses and creates an STParsedJSON array.
|
||||
The result of the parsing is stored in array and error.
|
||||
Exceptions:
|
||||
Does not throw.
|
||||
@param name The name of the JSON field, used in diagnostics.
|
||||
@param json The JSON-RPC to parse.
|
||||
*/
|
||||
STParsedJSONArray(std::string const& name, Json::Value const& json);
|
||||
|
||||
STParsedJSONArray() = delete;
|
||||
STParsedJSONArray(STParsedJSONArray const&) = delete;
|
||||
STParsedJSONArray&
|
||||
operator=(STParsedJSONArray const&) = delete;
|
||||
~STParsedJSONArray() = default;
|
||||
|
||||
/** The STArray if the parse was successful. */
|
||||
std::optional<STArray> array;
|
||||
|
||||
/** On failure, an appropriate set of error values. */
|
||||
Json::Value error;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -87,14 +87,8 @@ public:
|
||||
getFullText() const override;
|
||||
|
||||
// Outer transaction functions / signature functions.
|
||||
static Blob
|
||||
getSignature(STObject const& sigObject);
|
||||
|
||||
Blob
|
||||
getSignature() const
|
||||
{
|
||||
return getSignature(*this);
|
||||
}
|
||||
getSignature() const;
|
||||
|
||||
uint256
|
||||
getSigningHash() const;
|
||||
@@ -125,20 +119,13 @@ public:
|
||||
getJson(JsonOptions options, bool binary) const;
|
||||
|
||||
void
|
||||
sign(
|
||||
PublicKey const& publicKey,
|
||||
SecretKey const& secretKey,
|
||||
std::optional<std::reference_wrapper<SField const>> signatureTarget =
|
||||
{});
|
||||
|
||||
enum class RequireFullyCanonicalSig : bool { no, yes };
|
||||
sign(PublicKey const& publicKey, SecretKey const& secretKey);
|
||||
|
||||
/** Check the signature.
|
||||
@param requireCanonicalSig If `true`, check that the signature is fully
|
||||
canonical. If `false`, only check that the signature is valid.
|
||||
@param rules The current ledger rules.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
enum class RequireFullyCanonicalSig : bool { no, yes };
|
||||
|
||||
Expected<void, std::string>
|
||||
checkSign(RequireFullyCanonicalSig requireCanonicalSig, Rules const& rules)
|
||||
const;
|
||||
@@ -163,34 +150,17 @@ public:
|
||||
char status,
|
||||
std::string const& escapedMetaData) const;
|
||||
|
||||
std::vector<uint256> const&
|
||||
std::vector<uint256>
|
||||
getBatchTransactionIDs() const;
|
||||
|
||||
private:
|
||||
/** Check the signature.
|
||||
@param requireCanonicalSig If `true`, check that the signature is fully
|
||||
canonical. If `false`, only check that the signature is valid.
|
||||
@param rules The current ledger rules.
|
||||
@param sigObject Reference to object that contains the signature fields.
|
||||
Will be *this more often than not.
|
||||
@return `true` if valid signature. If invalid, the error message string.
|
||||
*/
|
||||
Expected<void, std::string>
|
||||
checkSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkSingleSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
STObject const& sigObject) const;
|
||||
checkSingleSign(RequireFullyCanonicalSig requireCanonicalSig) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkMultiSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const;
|
||||
Rules const& rules) const;
|
||||
|
||||
Expected<void, std::string>
|
||||
checkBatchSingleSign(
|
||||
@@ -209,7 +179,7 @@ private:
|
||||
move(std::size_t n, void* buf) override;
|
||||
|
||||
friend class detail::STVar;
|
||||
mutable std::vector<uint256> batchTxnIds_;
|
||||
mutable std::vector<uint256> batch_txn_ids_;
|
||||
};
|
||||
|
||||
bool
|
||||
|
||||
@@ -73,8 +73,14 @@ static constexpr std::uint32_t XRP_LEDGER_EARLIEST_SEQ{32570u};
|
||||
* used in asserts and tests. */
|
||||
static constexpr std::uint32_t XRP_LEDGER_EARLIEST_FEES{562177u};
|
||||
|
||||
/** The minimum amount of support an amendment should have. */
|
||||
constexpr std::ratio<80, 100> amendmentMajorityCalcThreshold;
|
||||
/** The minimum amount of support an amendment should have.
|
||||
|
||||
@note This value is used by legacy code and will become obsolete
|
||||
once the fixAmendmentMajorityCalc amendment activates.
|
||||
*/
|
||||
constexpr std::ratio<204, 256> preFixAmendmentMajorityCalcThreshold;
|
||||
|
||||
constexpr std::ratio<80, 100> postFixAmendmentMajorityCalcThreshold;
|
||||
|
||||
/** The minimum amount of time an amendment must hold a majority */
|
||||
constexpr std::chrono::seconds const defaultAmendmentMajorityTime = weeks{2};
|
||||
|
||||
@@ -141,6 +141,8 @@ enum TEMcodes : TERUnderlyingType {
|
||||
temARRAY_TOO_LARGE,
|
||||
temBAD_TRANSFER_FEE,
|
||||
temINVALID_INNER_BATCH,
|
||||
|
||||
temBAD_WASM,
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
@@ -185,6 +187,8 @@ enum TEFcodes : TERUnderlyingType {
|
||||
tefNO_TICKET,
|
||||
tefNFTOKEN_IS_NOT_TRANSFERABLE,
|
||||
tefINVALID_LEDGER_FIX_TYPE,
|
||||
tefNO_WASM,
|
||||
tefWASM_FIELD_NOT_INCLUDED,
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
@@ -225,9 +229,8 @@ enum TERcodes : TERUnderlyingType {
|
||||
terQUEUED, // Transaction is being held in TxQ until fee drops
|
||||
terPRE_TICKET, // Ticket is not yet in ledger but might be on its way
|
||||
terNO_AMM, // AMM doesn't exist for the asset pair
|
||||
terADDRESS_COLLISION, // Failed to allocate AccountID when trying to
|
||||
// create a pseudo-account
|
||||
terNO_DELEGATE_PERMISSION, // Delegate does not have permission
|
||||
terADDRESS_COLLISION, // Failed to allocate AccountID when trying to
|
||||
// create a pseudo-account
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
@@ -362,10 +365,8 @@ enum TECcodes : TERUnderlyingType {
|
||||
tecLIMIT_EXCEEDED = 195,
|
||||
tecPSEUDO_ACCOUNT = 196,
|
||||
tecPRECISION_LOSS = 197,
|
||||
// DEPRECATED: This error code tecNO_DELEGATE_PERMISSION is reserved for
|
||||
// backward compatibility with historical data on non-prod networks, can be
|
||||
// reclaimed after those networks reset.
|
||||
tecNO_DELEGATE_PERMISSION = 198,
|
||||
tecWASM_REJECTED = 199,
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
@@ -677,8 +678,7 @@ isTerRetry(TER x) noexcept
|
||||
inline bool
|
||||
isTesSuccess(TER x) noexcept
|
||||
{
|
||||
// Makes use of TERSubset::operator bool()
|
||||
return !(x);
|
||||
return (x == tesSUCCESS);
|
||||
}
|
||||
|
||||
inline bool
|
||||
|
||||
@@ -156,14 +156,14 @@ constexpr std::uint32_t const tfMPTokenIssuanceCreateMask =
|
||||
|
||||
// MPTokenIssuanceCreate MutableFlags:
|
||||
// Indicating specific fields or flags may be changed after issuance.
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanLock = lsmfMPTCanMutateCanLock;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateRequireAuth = lsmfMPTCanMutateRequireAuth;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanEscrow = lsmfMPTCanMutateCanEscrow;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTrade = lsmfMPTCanMutateCanTrade;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTransfer = lsmfMPTCanMutateCanTransfer;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanClawback = lsmfMPTCanMutateCanClawback;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateMetadata = lsmfMPTCanMutateMetadata;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateTransferFee = lsmfMPTCanMutateTransferFee;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanLock = lmfMPTCanMutateCanLock;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateRequireAuth = lmfMPTCanMutateRequireAuth;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanEscrow = lmfMPTCanMutateCanEscrow;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTrade = lmfMPTCanMutateCanTrade;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanTransfer = lmfMPTCanMutateCanTransfer;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateCanClawback = lmfMPTCanMutateCanClawback;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateMetadata = lmfMPTCanMutateMetadata;
|
||||
constexpr std::uint32_t const tmfMPTCanMutateTransferFee = lmfMPTCanMutateTransferFee;
|
||||
constexpr std::uint32_t const tmfMPTokenIssuanceCreateMutableMask =
|
||||
~(tmfMPTCanMutateCanLock | tmfMPTCanMutateRequireAuth | tmfMPTCanMutateCanEscrow | tmfMPTCanMutateCanTrade
|
||||
| tmfMPTCanMutateCanTransfer | tmfMPTCanMutateCanClawback | tmfMPTCanMutateMetadata | tmfMPTCanMutateTransferFee);
|
||||
|
||||
@@ -33,35 +33,48 @@ namespace ripple {
|
||||
|
||||
class TxMeta
|
||||
{
|
||||
private:
|
||||
struct CtorHelper
|
||||
{
|
||||
explicit CtorHelper() = default;
|
||||
};
|
||||
template <class T>
|
||||
TxMeta(
|
||||
uint256 const& txID,
|
||||
std::uint32_t ledger,
|
||||
T const& data,
|
||||
CtorHelper);
|
||||
|
||||
public:
|
||||
TxMeta(uint256 const& transactionID, std::uint32_t ledger);
|
||||
TxMeta(uint256 const& txID, std::uint32_t ledger, Blob const&);
|
||||
TxMeta(uint256 const& txID, std::uint32_t ledger, std::string const&);
|
||||
TxMeta(uint256 const& txID, std::uint32_t ledger, STObject const&);
|
||||
|
||||
uint256 const&
|
||||
getTxID() const
|
||||
{
|
||||
return transactionID_;
|
||||
return mTransactionID;
|
||||
}
|
||||
std::uint32_t
|
||||
getLgrSeq() const
|
||||
{
|
||||
return ledgerSeq_;
|
||||
return mLedger;
|
||||
}
|
||||
int
|
||||
getResult() const
|
||||
{
|
||||
return result_;
|
||||
return mResult;
|
||||
}
|
||||
TER
|
||||
getResultTER() const
|
||||
{
|
||||
return TER::fromInt(result_);
|
||||
return TER::fromInt(mResult);
|
||||
}
|
||||
std::uint32_t
|
||||
getIndex() const
|
||||
{
|
||||
return index_;
|
||||
return mIndex;
|
||||
}
|
||||
|
||||
void
|
||||
@@ -88,52 +101,110 @@ public:
|
||||
STArray&
|
||||
getNodes()
|
||||
{
|
||||
return nodes_;
|
||||
return (mNodes);
|
||||
}
|
||||
STArray const&
|
||||
getNodes() const
|
||||
{
|
||||
return nodes_;
|
||||
return (mNodes);
|
||||
}
|
||||
|
||||
void
|
||||
setAdditionalFields(STObject const& obj)
|
||||
setDeliveredAmount(STAmount const& delivered)
|
||||
{
|
||||
if (obj.isFieldPresent(sfDeliveredAmount))
|
||||
deliveredAmount_ = obj.getFieldAmount(sfDeliveredAmount);
|
||||
|
||||
if (obj.isFieldPresent(sfParentBatchID))
|
||||
parentBatchID_ = obj.getFieldH256(sfParentBatchID);
|
||||
mDelivered = delivered;
|
||||
}
|
||||
|
||||
std::optional<STAmount> const&
|
||||
STAmount
|
||||
getDeliveredAmount() const
|
||||
{
|
||||
return deliveredAmount_;
|
||||
XRPL_ASSERT(
|
||||
hasDeliveredAmount(),
|
||||
"ripple::TxMeta::getDeliveredAmount : non-null delivered amount");
|
||||
return *mDelivered;
|
||||
}
|
||||
|
||||
bool
|
||||
hasDeliveredAmount() const
|
||||
{
|
||||
return static_cast<bool>(mDelivered);
|
||||
}
|
||||
|
||||
void
|
||||
setDeliveredAmount(std::optional<STAmount> const& amount)
|
||||
setParentBatchId(uint256 const& parentBatchId)
|
||||
{
|
||||
deliveredAmount_ = amount;
|
||||
parentBatchId_ = parentBatchId;
|
||||
}
|
||||
|
||||
uint256
|
||||
getParentBatchId() const
|
||||
{
|
||||
XRPL_ASSERT(
|
||||
hasParentBatchId(),
|
||||
"ripple::TxMeta::getParentBatchId : non-null batch id");
|
||||
return *parentBatchId_;
|
||||
}
|
||||
|
||||
bool
|
||||
hasParentBatchId() const
|
||||
{
|
||||
return static_cast<bool>(parentBatchId_);
|
||||
}
|
||||
|
||||
void
|
||||
setParentBatchID(std::optional<uint256> const& id)
|
||||
setGasUsed(std::uint32_t const& gasUsed)
|
||||
{
|
||||
parentBatchID_ = id;
|
||||
gasUsed_ = gasUsed;
|
||||
}
|
||||
|
||||
std::uint32_t
|
||||
getGasUsed() const
|
||||
{
|
||||
XRPL_ASSERT(
|
||||
hasGasUsed(),
|
||||
"ripple::TxMeta::getGasUsed : non-null gas used field");
|
||||
return *gasUsed_;
|
||||
}
|
||||
|
||||
bool
|
||||
hasGasUsed() const
|
||||
{
|
||||
return static_cast<bool>(gasUsed_);
|
||||
}
|
||||
|
||||
void
|
||||
setWasmReturnCode(std::int32_t const& wasmReturnCode)
|
||||
{
|
||||
wasmReturnCode_ = wasmReturnCode;
|
||||
}
|
||||
|
||||
std::int32_t
|
||||
getWasmReturnCode() const
|
||||
{
|
||||
XRPL_ASSERT(
|
||||
hasWasmReturnCode(),
|
||||
"ripple::TxMeta::getWasmReturnCode : non-null wasm return code");
|
||||
return *wasmReturnCode_;
|
||||
}
|
||||
|
||||
bool
|
||||
hasWasmReturnCode() const
|
||||
{
|
||||
return static_cast<bool>(wasmReturnCode_);
|
||||
}
|
||||
|
||||
private:
|
||||
uint256 transactionID_;
|
||||
std::uint32_t ledgerSeq_;
|
||||
std::uint32_t index_;
|
||||
int result_;
|
||||
uint256 mTransactionID;
|
||||
std::uint32_t mLedger;
|
||||
std::uint32_t mIndex;
|
||||
int mResult;
|
||||
|
||||
std::optional<STAmount> deliveredAmount_;
|
||||
std::optional<uint256> parentBatchID_;
|
||||
std::optional<STAmount> mDelivered;
|
||||
std::optional<uint256> parentBatchId_;
|
||||
std::optional<std::uint32_t> gasUsed_;
|
||||
std::optional<std::int32_t> wasmReturnCode_;
|
||||
|
||||
STArray nodes_;
|
||||
STArray mNodes;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -129,12 +129,10 @@ inplace_bigint_div_rem(std::span<uint64_t> numerator, std::uint64_t divisor)
|
||||
{
|
||||
// should never happen, but if it does then it seems natural to define
|
||||
// the a null set of numbers to be zero, so the remainder is also zero.
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::b58_fast::detail::inplace_bigint_div_rem : empty "
|
||||
"numerator");
|
||||
return 0;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
auto to_u128 = [](std::uint64_t high,
|
||||
|
||||
@@ -29,14 +29,16 @@
|
||||
|
||||
// Add new amendments to the top of this list.
|
||||
// Keep it sorted in reverse chronological order.
|
||||
// If you add an amendment here, then do not forget to increment `numFeatures`
|
||||
// in include/xrpl/protocol/Feature.h.
|
||||
|
||||
XRPL_FEATURE(PermissionDelegationV1_1, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (DirectoryLimit, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (IncludeKeyletFields, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(SmartEscrow, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (IncludeKeyletFields, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DynamicMPT, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (TokenEscrowV1, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PriceOracleOrder, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (DelegateV1_1, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PriceOracleOrder, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (AMMClawbackRounding, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (EnforceNFTokenTrustlineV2, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -44,8 +46,8 @@ XRPL_FIX (AMMv1_3, Supported::yes, VoteBehavior::DefaultNo
|
||||
XRPL_FEATURE(PermissionedDEX, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Batch, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(SingleAssetVault, Supported::no, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(PermissionDelegation, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (PayChanCancelAfter, Supported::yes, VoteBehavior::DefaultNo)
|
||||
// Check flags in Credential transactions
|
||||
XRPL_FIX (InvalidTxFlags, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (FrozenLPTokenTransfer, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(DeepFreeze, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -77,6 +79,7 @@ XRPL_FIX (DisallowIncomingV1, Supported::yes, VoteBehavior::DefaultNo
|
||||
XRPL_FEATURE(XChainBridge, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(AMM, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(Clawback, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (ReducedOffersV1, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (NFTokenRemint, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (NonFungibleTokensV1_2, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (UniversalNumber, Supported::yes, VoteBehavior::DefaultNo)
|
||||
@@ -88,17 +91,33 @@ XRPL_FIX (TrustLinesToSelf, Supported::yes, VoteBehavior::DefaultNo
|
||||
XRPL_FEATURE(NonFungibleTokensV1_1, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(ExpandedSignerList, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FEATURE(CheckCashMakesTrustLine, Supported::yes, VoteBehavior::DefaultNo)
|
||||
XRPL_FIX (RmSmallIncreasedQOffers, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (STAmountCanonicalize, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(FlowSortStrands, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(TicketBatch, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(NegativeUNL, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (AmendmentMajorityCalc, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(HardenedValidations, Supported::yes, VoteBehavior::DefaultYes)
|
||||
// fix1781: XRPEndpointSteps should be included in the circular payment check
|
||||
XRPL_FIX (1781, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(RequireFullyCanonicalSig, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (QualityUpperBound, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(DeletableAccounts, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (PayChanRecipientOwnerDir, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (CheckThreading, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (MasterKeyAsRegularKey, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (TakerDryOfferRemoval, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(MultiSignReserve, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1578, Supported::yes, VoteBehavior::DefaultYes)
|
||||
// fix1515: Use liquidity from strands that consume max offers, but mark as dry
|
||||
XRPL_FIX (1515, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(DepositPreauth, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1623, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1543, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1571, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(Checks, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(DepositAuth, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FIX (1513, Supported::yes, VoteBehavior::DefaultYes)
|
||||
XRPL_FEATURE(Flow, Supported::yes, VoteBehavior::DefaultYes)
|
||||
|
||||
// The following amendments are obsolete, but must remain supported
|
||||
@@ -119,38 +138,21 @@ XRPL_FEATURE(CryptoConditionsSuite, Supported::yes, VoteBehavior::Obsolete)
|
||||
|
||||
// The following amendments have been active for at least two years. Their
|
||||
// pre-amendment code has been removed and the identifiers are deprecated.
|
||||
// All known amendments and amendments that may appear in a validated ledger
|
||||
// must be registered either here or above with the "active" amendments
|
||||
//
|
||||
// Please keep this list sorted alphabetically for convenience.
|
||||
XRPL_RETIRE(fix1201)
|
||||
// All known amendments and amendments that may appear in a validated
|
||||
// ledger must be registered either here or above with the "active" amendments
|
||||
XRPL_RETIRE(MultiSign)
|
||||
XRPL_RETIRE(TrustSetAuth)
|
||||
XRPL_RETIRE(FeeEscalation)
|
||||
XRPL_RETIRE(PayChan)
|
||||
XRPL_RETIRE(CryptoConditions)
|
||||
XRPL_RETIRE(TickSize)
|
||||
XRPL_RETIRE(fix1368)
|
||||
XRPL_RETIRE(Escrow)
|
||||
XRPL_RETIRE(fix1373)
|
||||
XRPL_RETIRE(EnforceInvariants)
|
||||
XRPL_RETIRE(SortedDirectories)
|
||||
XRPL_RETIRE(fix1201)
|
||||
XRPL_RETIRE(fix1512)
|
||||
XRPL_RETIRE(fix1513)
|
||||
XRPL_RETIRE(fix1515)
|
||||
XRPL_RETIRE(fix1523)
|
||||
XRPL_RETIRE(fix1528)
|
||||
XRPL_RETIRE(fix1543)
|
||||
XRPL_RETIRE(fix1571)
|
||||
XRPL_RETIRE(fix1578)
|
||||
XRPL_RETIRE(fix1623)
|
||||
XRPL_RETIRE(fix1781)
|
||||
XRPL_RETIRE(fixAmendmentMajorityCalc)
|
||||
XRPL_RETIRE(fixCheckThreading)
|
||||
XRPL_RETIRE(fixMasterKeyAsRegularKey)
|
||||
XRPL_RETIRE(fixQualityUpperBound)
|
||||
XRPL_RETIRE(fixReducedOffersV1)
|
||||
XRPL_RETIRE(fixRmSmallIncreasedQOffers)
|
||||
XRPL_RETIRE(fixSTAmountCanonicalize)
|
||||
XRPL_RETIRE(fixTakerDryOfferRemoval)
|
||||
XRPL_RETIRE(CryptoConditions)
|
||||
XRPL_RETIRE(Escrow)
|
||||
XRPL_RETIRE(EnforceInvariants)
|
||||
XRPL_RETIRE(FeeEscalation)
|
||||
XRPL_RETIRE(FlowCross)
|
||||
XRPL_RETIRE(MultiSign)
|
||||
XRPL_RETIRE(PayChan)
|
||||
XRPL_RETIRE(SortedDirectories)
|
||||
XRPL_RETIRE(TickSize)
|
||||
XRPL_RETIRE(TrustSetAuth)
|
||||
|
||||
@@ -350,6 +350,8 @@ LEDGER_ENTRY(ltESCROW, 0x0075, Escrow, escrow, ({
|
||||
{sfCondition, soeOPTIONAL},
|
||||
{sfCancelAfter, soeOPTIONAL},
|
||||
{sfFinishAfter, soeOPTIONAL},
|
||||
{sfFinishFunction, soeOPTIONAL},
|
||||
{sfData, soeOPTIONAL},
|
||||
{sfSourceTag, soeOPTIONAL},
|
||||
{sfDestinationTag, soeOPTIONAL},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
@@ -457,7 +459,7 @@ LEDGER_ENTRY(ltCREDENTIAL, 0x0081, Credential, credential, ({
|
||||
{sfExpiration, soeOPTIONAL},
|
||||
{sfURI, soeOPTIONAL},
|
||||
{sfIssuerNode, soeREQUIRED},
|
||||
{sfSubjectNode, soeOPTIONAL},
|
||||
{sfSubjectNode, soeREQUIRED},
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED},
|
||||
}))
|
||||
|
||||
@@ -115,6 +115,11 @@ TYPED_SFIELD(sfFirstNFTokenSequence, UINT32, 50)
|
||||
TYPED_SFIELD(sfOracleDocumentID, UINT32, 51)
|
||||
TYPED_SFIELD(sfPermissionValue, UINT32, 52)
|
||||
TYPED_SFIELD(sfMutableFlags, UINT32, 53)
|
||||
TYPED_SFIELD(sfExtensionComputeLimit, UINT32, 54)
|
||||
TYPED_SFIELD(sfExtensionSizeLimit, UINT32, 55)
|
||||
TYPED_SFIELD(sfGasPrice, UINT32, 56)
|
||||
TYPED_SFIELD(sfComputationAllowance, UINT32, 57)
|
||||
TYPED_SFIELD(sfGasUsed, UINT32, 58)
|
||||
|
||||
// 64-bit integers (common)
|
||||
TYPED_SFIELD(sfIndexNext, UINT64, 1)
|
||||
@@ -208,11 +213,8 @@ TYPED_SFIELD(sfAssetsMaximum, NUMBER, 3)
|
||||
TYPED_SFIELD(sfAssetsTotal, NUMBER, 4)
|
||||
TYPED_SFIELD(sfLossUnrealized, NUMBER, 5)
|
||||
|
||||
// int32
|
||||
// NOTE: Do not use `sfDummyInt32`. It's so far the only use of INT32
|
||||
// in this file and has been defined here for test only.
|
||||
// TODO: Replace `sfDummyInt32` with actually useful field.
|
||||
TYPED_SFIELD(sfDummyInt32, INT32, 1) // for tests only
|
||||
// 32-bit signed (common)
|
||||
TYPED_SFIELD(sfWasmReturnCode, INT32, 1)
|
||||
|
||||
// currency amount (common)
|
||||
TYPED_SFIELD(sfAmount, AMOUNT, 1)
|
||||
@@ -242,7 +244,7 @@ TYPED_SFIELD(sfBaseFeeDrops, AMOUNT, 22)
|
||||
TYPED_SFIELD(sfReserveBaseDrops, AMOUNT, 23)
|
||||
TYPED_SFIELD(sfReserveIncrementDrops, AMOUNT, 24)
|
||||
|
||||
// currency amount (AMM)
|
||||
// currency amount (more)
|
||||
TYPED_SFIELD(sfLPTokenOut, AMOUNT, 25)
|
||||
TYPED_SFIELD(sfLPTokenIn, AMOUNT, 26)
|
||||
TYPED_SFIELD(sfEPrice, AMOUNT, 27)
|
||||
@@ -250,6 +252,7 @@ TYPED_SFIELD(sfPrice, AMOUNT, 28)
|
||||
TYPED_SFIELD(sfSignatureReward, AMOUNT, 29)
|
||||
TYPED_SFIELD(sfMinAccountCreateAmount, AMOUNT, 30)
|
||||
TYPED_SFIELD(sfLPTokenBalance, AMOUNT, 31)
|
||||
TYPED_SFIELD(sfFinishFunction, VL, 32)
|
||||
|
||||
// variable length (common)
|
||||
TYPED_SFIELD(sfPublicKey, VL, 1)
|
||||
|
||||
@@ -69,11 +69,13 @@ TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate,
|
||||
noPriv,
|
||||
({
|
||||
{sfDestination, soeREQUIRED},
|
||||
{sfDestinationTag, soeOPTIONAL},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
{sfCondition, soeOPTIONAL},
|
||||
{sfCancelAfter, soeOPTIONAL},
|
||||
{sfFinishAfter, soeOPTIONAL},
|
||||
{sfDestinationTag, soeOPTIONAL},
|
||||
{sfFinishFunction, soeOPTIONAL},
|
||||
{sfData, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
/** This transaction type completes an existing escrow. */
|
||||
@@ -87,6 +89,7 @@ TRANSACTION(ttESCROW_FINISH, 2, EscrowFinish,
|
||||
{sfFulfillment, soeOPTIONAL},
|
||||
{sfCondition, soeOPTIONAL},
|
||||
{sfCredentialIDs, soeOPTIONAL},
|
||||
{sfComputationAllowance, soeOPTIONAL},
|
||||
}))
|
||||
|
||||
|
||||
@@ -316,7 +319,7 @@ TRANSACTION(ttTRUST_SET, 20, TrustSet,
|
||||
#endif
|
||||
TRANSACTION(ttACCOUNT_DELETE, 21, AccountDelete,
|
||||
Delegation::notDelegatable,
|
||||
featureDeletableAccounts,
|
||||
uint256{},
|
||||
mustDeleteAcct,
|
||||
({
|
||||
{sfDestination, soeREQUIRED},
|
||||
@@ -837,7 +840,7 @@ TRANSACTION(ttPERMISSIONED_DOMAIN_DELETE, 63, PermissionedDomainDelete,
|
||||
#endif
|
||||
TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
|
||||
Delegation::notDelegatable,
|
||||
featurePermissionDelegationV1_1,
|
||||
featurePermissionDelegation,
|
||||
noPriv,
|
||||
({
|
||||
{sfAuthorize, soeREQUIRED},
|
||||
@@ -851,7 +854,7 @@ TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
|
||||
TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
createPseudoAcct | createMPTIssuance | mustModifyVault,
|
||||
createPseudoAcct | createMPTIssuance,
|
||||
({
|
||||
{sfAsset, soeREQUIRED, soeMPTSupported},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -869,7 +872,7 @@ TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
|
||||
TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mustModifyVault,
|
||||
noPriv,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAssetsMaximum, soeOPTIONAL},
|
||||
@@ -884,7 +887,7 @@ TRANSACTION(ttVAULT_SET, 66, VaultSet,
|
||||
TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mustDeleteAcct | destroyMPTIssuance | mustModifyVault,
|
||||
mustDeleteAcct | destroyMPTIssuance,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
}))
|
||||
@@ -896,7 +899,7 @@ TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
|
||||
TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayAuthorizeMPT | mustModifyVault,
|
||||
mayAuthorizeMPT,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -909,7 +912,7 @@ TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
|
||||
TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT | mayAuthorizeMPT | mustModifyVault,
|
||||
mayDeleteMPT,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfAmount, soeREQUIRED, soeMPTSupported},
|
||||
@@ -924,7 +927,7 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
|
||||
TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback,
|
||||
Delegation::delegatable,
|
||||
featureSingleAssetVault,
|
||||
mayDeleteMPT | mustModifyVault,
|
||||
mayDeleteMPT,
|
||||
({
|
||||
{sfVaultID, soeREQUIRED},
|
||||
{sfHolder, soeREQUIRED},
|
||||
|
||||
@@ -569,7 +569,6 @@ JSS(settle_delay); // out: AccountChannels
|
||||
JSS(severity); // in: LogLevel
|
||||
JSS(shares); // out: VaultInfo
|
||||
JSS(signature); // out: NetworkOPs, ChannelAuthorize
|
||||
JSS(signature_target); // in: TransactionSign
|
||||
JSS(signature_verified); // out: ChannelVerify
|
||||
JSS(signing_key); // out: NetworkOPs
|
||||
JSS(signing_keys); // out: ValidatorList
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
#define RIPPLE_RESOURCE_CONSUMER_H_INCLUDED
|
||||
|
||||
#include <xrpl/basics/Log.h>
|
||||
#include <xrpl/protocol/PublicKey.h>
|
||||
#include <xrpl/resource/Charge.h>
|
||||
#include <xrpl/resource/Disposition.h>
|
||||
|
||||
@@ -88,9 +87,6 @@ public:
|
||||
Entry&
|
||||
entry();
|
||||
|
||||
void
|
||||
setPublicKey(PublicKey const& publicKey);
|
||||
|
||||
private:
|
||||
Logic* m_logic;
|
||||
Entry* m_entry;
|
||||
|
||||
@@ -53,7 +53,7 @@ struct Entry : public beast::List<Entry>::Node
|
||||
std::string
|
||||
to_string() const
|
||||
{
|
||||
return getFingerprint(key->address, publicKey);
|
||||
return key->address.to_string();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -82,9 +82,6 @@ struct Entry : public beast::List<Entry>::Node
|
||||
return local_balance.add(charge, now) + remote_balance;
|
||||
}
|
||||
|
||||
// The public key of the peer
|
||||
std::optional<PublicKey> publicKey;
|
||||
|
||||
// Back pointer to the map key (bit of a hack here)
|
||||
Key const* key;
|
||||
|
||||
|
||||
@@ -436,12 +436,10 @@ public:
|
||||
admin_.erase(admin_.iterator_to(entry));
|
||||
break;
|
||||
default:
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::Resource::Logic::release : invalid entry "
|
||||
"kind");
|
||||
break;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
inactive_.push_back(entry);
|
||||
entry.whenExpires = m_clock.now() + secondsUntilExpiration;
|
||||
|
||||
@@ -30,29 +30,15 @@
|
||||
#include <boost/asio/buffer.hpp>
|
||||
#include <boost/asio/io_context.hpp>
|
||||
#include <boost/asio/ip/tcp.hpp>
|
||||
#include <boost/asio/post.hpp>
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/steady_timer.hpp>
|
||||
#include <boost/beast/core/detect_ssl.hpp>
|
||||
#include <boost/beast/core/multi_buffer.hpp>
|
||||
#include <boost/beast/core/tcp_stream.hpp>
|
||||
#include <boost/container/flat_map.hpp>
|
||||
#include <boost/predef.h>
|
||||
|
||||
#if !BOOST_OS_WINDOWS
|
||||
#include <sys/resource.h>
|
||||
|
||||
#include <dirent.h>
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
#include <cstdint>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <optional>
|
||||
#include <sstream>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -112,27 +98,10 @@ private:
|
||||
boost::asio::strand<boost::asio::io_context::executor_type> strand_;
|
||||
bool ssl_;
|
||||
bool plain_;
|
||||
static constexpr std::chrono::milliseconds INITIAL_ACCEPT_DELAY{50};
|
||||
static constexpr std::chrono::milliseconds MAX_ACCEPT_DELAY{2000};
|
||||
std::chrono::milliseconds accept_delay_{INITIAL_ACCEPT_DELAY};
|
||||
boost::asio::steady_timer backoff_timer_;
|
||||
static constexpr double FREE_FD_THRESHOLD = 0.70;
|
||||
|
||||
struct FDStats
|
||||
{
|
||||
std::uint64_t used{0};
|
||||
std::uint64_t limit{0};
|
||||
};
|
||||
|
||||
void
|
||||
reOpen();
|
||||
|
||||
std::optional<FDStats>
|
||||
query_fd_stats() const;
|
||||
|
||||
bool
|
||||
should_throttle_for_fds();
|
||||
|
||||
public:
|
||||
Door(
|
||||
Handler& handler,
|
||||
@@ -330,7 +299,6 @@ Door<Handler>::Door(
|
||||
, plain_(
|
||||
port_.protocol.count("http") > 0 || port_.protocol.count("ws") > 0 ||
|
||||
port_.protocol.count("ws2"))
|
||||
, backoff_timer_(io_context)
|
||||
{
|
||||
reOpen();
|
||||
}
|
||||
@@ -355,7 +323,6 @@ Door<Handler>::close()
|
||||
return boost::asio::post(
|
||||
strand_,
|
||||
std::bind(&Door<Handler>::close, this->shared_from_this()));
|
||||
backoff_timer_.cancel();
|
||||
error_code ec;
|
||||
acceptor_.close(ec);
|
||||
}
|
||||
@@ -401,17 +368,6 @@ Door<Handler>::do_accept(boost::asio::yield_context do_yield)
|
||||
{
|
||||
while (acceptor_.is_open())
|
||||
{
|
||||
if (should_throttle_for_fds())
|
||||
{
|
||||
backoff_timer_.expires_after(accept_delay_);
|
||||
boost::system::error_code tec;
|
||||
backoff_timer_.async_wait(do_yield[tec]);
|
||||
accept_delay_ = std::min(accept_delay_ * 2, MAX_ACCEPT_DELAY);
|
||||
JLOG(j_.warn()) << "Throttling do_accept for "
|
||||
<< accept_delay_.count() << "ms.";
|
||||
continue;
|
||||
}
|
||||
|
||||
error_code ec;
|
||||
endpoint_type remote_address;
|
||||
stream_type stream(ioc_);
|
||||
@@ -421,28 +377,15 @@ Door<Handler>::do_accept(boost::asio::yield_context do_yield)
|
||||
{
|
||||
if (ec == boost::asio::error::operation_aborted)
|
||||
break;
|
||||
|
||||
if (ec == boost::asio::error::no_descriptors ||
|
||||
ec == boost::asio::error::no_buffer_space)
|
||||
JLOG(j_.error()) << "accept: " << ec.message();
|
||||
if (ec == boost::asio::error::no_descriptors)
|
||||
{
|
||||
JLOG(j_.warn()) << "accept: Too many open files. Pausing for "
|
||||
<< accept_delay_.count() << "ms.";
|
||||
|
||||
backoff_timer_.expires_after(accept_delay_);
|
||||
boost::system::error_code tec;
|
||||
backoff_timer_.async_wait(do_yield[tec]);
|
||||
|
||||
accept_delay_ = std::min(accept_delay_ * 2, MAX_ACCEPT_DELAY);
|
||||
}
|
||||
else
|
||||
{
|
||||
JLOG(j_.error()) << "accept error: " << ec.message();
|
||||
JLOG(j_.info()) << "re-opening acceptor";
|
||||
reOpen();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
accept_delay_ = INITIAL_ACCEPT_DELAY;
|
||||
|
||||
if (ssl_ && plain_)
|
||||
{
|
||||
if (auto sp = ios().template emplace<Detector>(
|
||||
@@ -465,60 +408,6 @@ Door<Handler>::do_accept(boost::asio::yield_context do_yield)
|
||||
}
|
||||
}
|
||||
|
||||
template <class Handler>
|
||||
std::optional<typename Door<Handler>::FDStats>
|
||||
Door<Handler>::query_fd_stats() const
|
||||
{
|
||||
#if BOOST_OS_WINDOWS
|
||||
return std::nullopt;
|
||||
#else
|
||||
FDStats s;
|
||||
struct rlimit rl;
|
||||
if (getrlimit(RLIMIT_NOFILE, &rl) != 0 || rl.rlim_cur == RLIM_INFINITY)
|
||||
return std::nullopt;
|
||||
s.limit = static_cast<std::uint64_t>(rl.rlim_cur);
|
||||
#if BOOST_OS_LINUX
|
||||
constexpr char const* kFdDir = "/proc/self/fd";
|
||||
#else
|
||||
constexpr char const* kFdDir = "/dev/fd";
|
||||
#endif
|
||||
if (DIR* d = ::opendir(kFdDir))
|
||||
{
|
||||
std::uint64_t cnt = 0;
|
||||
while (::readdir(d) != nullptr)
|
||||
++cnt;
|
||||
::closedir(d);
|
||||
// readdir counts '.', '..', and the DIR* itself shows in the list
|
||||
s.used = (cnt >= 3) ? (cnt - 3) : 0;
|
||||
return s;
|
||||
}
|
||||
return std::nullopt;
|
||||
#endif
|
||||
}
|
||||
|
||||
template <class Handler>
|
||||
bool
|
||||
Door<Handler>::should_throttle_for_fds()
|
||||
{
|
||||
#if BOOST_OS_WINDOWS
|
||||
return false;
|
||||
#else
|
||||
auto const stats = query_fd_stats();
|
||||
if (!stats || stats->limit == 0)
|
||||
return false;
|
||||
|
||||
auto const& s = *stats;
|
||||
auto const free = (s.limit > s.used) ? (s.limit - s.used) : 0ull;
|
||||
double const free_ratio =
|
||||
static_cast<double>(free) / static_cast<double>(s.limit);
|
||||
if (free_ratio < FREE_FD_THRESHOLD)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -239,11 +239,9 @@ Logs::fromSeverity(beast::severities::Severity level)
|
||||
case kError:
|
||||
return lsERROR;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::fromSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
break;
|
||||
}
|
||||
@@ -267,11 +265,9 @@ Logs::toSeverity(LogSeverity level)
|
||||
return kWarning;
|
||||
case lsERROR:
|
||||
return kError;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toSeverity : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case lsFATAL:
|
||||
break;
|
||||
}
|
||||
@@ -296,11 +292,9 @@ Logs::toString(LogSeverity s)
|
||||
return "Error";
|
||||
case lsFATAL:
|
||||
return "Fatal";
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::toString : invalid severity");
|
||||
return "Unknown";
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -362,11 +356,9 @@ Logs::format(
|
||||
case kError:
|
||||
output += "ERR ";
|
||||
break;
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("ripple::Logs::format : invalid severity");
|
||||
[[fallthrough]];
|
||||
// LCOV_EXCL_STOP
|
||||
case kFatal:
|
||||
output += "FTL ";
|
||||
break;
|
||||
|
||||
@@ -623,6 +623,48 @@ power(Number const& f, unsigned n)
|
||||
return r;
|
||||
}
|
||||
|
||||
// Continued fraction approximation of ln(x)
|
||||
static Number
|
||||
ln(Number const& x, unsigned iterations = 50)
|
||||
{
|
||||
if (x <= 0)
|
||||
throw std::runtime_error("Not positive value");
|
||||
|
||||
Number const z = (x - 1) / (x + 1);
|
||||
Number const zz = z * z;
|
||||
Number denom = Number(1, -10);
|
||||
|
||||
// Construct the fraction from the bottom up
|
||||
for (int i = iterations; i > 0; --i)
|
||||
{
|
||||
Number k(2 * i - 1);
|
||||
denom = k - (i * i * zz / denom);
|
||||
}
|
||||
|
||||
auto const r = 2 * z / denom;
|
||||
return r;
|
||||
}
|
||||
|
||||
Number
|
||||
lg(Number const& x)
|
||||
{
|
||||
static Number const ln10 = ln(Number(10));
|
||||
|
||||
if (x <= Number(10))
|
||||
{
|
||||
auto const r = ln(x) / ln10;
|
||||
return r;
|
||||
}
|
||||
|
||||
// ln(x) = ln(normX * 10^norm) = ln(normX) + norm * ln(10)
|
||||
int diffExp = 15 + x.exponent();
|
||||
Number const normalX = x / Number(1, diffExp); // (1 <= normalX < 10)
|
||||
auto const lnX = ln(normalX) + diffExp * ln10;
|
||||
|
||||
auto const r = lnX / ln10;
|
||||
return r;
|
||||
}
|
||||
|
||||
// Returns f^(1/d)
|
||||
// Uses Newton–Raphson iterations until the result stops changing
|
||||
// to find the non-negative root of the polynomial g(x) = x^d - f
|
||||
|
||||
@@ -36,7 +36,6 @@ LogThrow(std::string const& title)
|
||||
[[noreturn]] void
|
||||
LogicError(std::string const& s) noexcept
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(debugLog().fatal()) << s;
|
||||
std::cerr << "Logic error: " << s << std::endl;
|
||||
// Use a non-standard contract naming here (without namespace) because
|
||||
@@ -46,7 +45,6 @@ LogicError(std::string const& s) noexcept
|
||||
// For the above reasons, we want this contract to stand out.
|
||||
UNREACHABLE("LogicError", {{"message", s}});
|
||||
std::abort();
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -174,7 +174,7 @@ Array::append(Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Array::append : invalid type"); // LCOV_EXCL_LINE
|
||||
UNREACHABLE("Json::Array::append : invalid type");
|
||||
}
|
||||
|
||||
void
|
||||
@@ -209,7 +209,7 @@ Object::set(std::string const& k, Json::Value const& v)
|
||||
return;
|
||||
}
|
||||
}
|
||||
UNREACHABLE("Json::Object::set : invalid type"); // LCOV_EXCL_LINE
|
||||
UNREACHABLE("Json::Object::set : invalid type");
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -213,10 +213,8 @@ Value::Value(ValueType type) : type_(type), allocated_(0)
|
||||
value_.bool_ = false;
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::Value(ValueType) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,10 +290,8 @@ Value::Value(Value const& other) : type_(other.type_)
|
||||
value_.map_ = new ObjectValues(*other.value_.map_);
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::Value(Value const&) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -322,10 +318,8 @@ Value::~Value()
|
||||
delete value_.map_;
|
||||
break;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::~Value : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -425,10 +419,8 @@ operator<(Value const& x, Value const& y)
|
||||
return *x.value_.map_ < *y.value_.map_;
|
||||
}
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::operator<(Value, Value) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable
|
||||
@@ -473,10 +465,8 @@ operator==(Value const& x, Value const& y)
|
||||
return x.value_.map_->size() == y.value_.map_->size() &&
|
||||
*x.value_.map_ == *y.value_.map_;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::operator==(Value, Value) : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable
|
||||
@@ -516,10 +506,8 @@ Value::asString() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to string");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asString : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return ""; // unreachable
|
||||
@@ -560,10 +548,8 @@ Value::asInt() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to int");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asInt : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -604,10 +590,8 @@ Value::asUInt() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to uint");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asUInt : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -638,10 +622,8 @@ Value::asDouble() const
|
||||
case objectValue:
|
||||
JSON_ASSERT_MESSAGE(false, "Type is not convertible to double");
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asDouble : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
@@ -672,10 +654,8 @@ Value::asBool() const
|
||||
case objectValue:
|
||||
return value_.map_->size() != 0;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::asBool : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return false; // unreachable;
|
||||
@@ -730,10 +710,8 @@ Value::isConvertibleTo(ValueType other) const
|
||||
return other == objectValue ||
|
||||
(other == nullValue && value_.map_->size() == 0);
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::isConvertible : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return false; // unreachable;
|
||||
@@ -766,10 +744,8 @@ Value::size() const
|
||||
case objectValue:
|
||||
return Int(value_.map_->size());
|
||||
|
||||
// LCOV_EXCL_START
|
||||
default:
|
||||
UNREACHABLE("Json::Value::size : invalid type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return 0; // unreachable;
|
||||
|
||||
@@ -116,6 +116,8 @@ ApplyStateTable::apply(
|
||||
TER ter,
|
||||
std::optional<STAmount> const& deliver,
|
||||
std::optional<uint256 const> const& parentBatchId,
|
||||
std::optional<std::uint32_t> const& gasUsed,
|
||||
std::optional<std::int32_t> const& wasmReturnCode,
|
||||
bool isDryRun,
|
||||
beast::Journal j)
|
||||
{
|
||||
@@ -128,9 +130,14 @@ ApplyStateTable::apply(
|
||||
{
|
||||
TxMeta meta(tx.getTransactionID(), to.seq());
|
||||
|
||||
meta.setDeliveredAmount(deliver);
|
||||
meta.setParentBatchID(parentBatchId);
|
||||
|
||||
if (deliver)
|
||||
meta.setDeliveredAmount(*deliver);
|
||||
if (parentBatchId)
|
||||
meta.setParentBatchId(*parentBatchId);
|
||||
if (gasUsed)
|
||||
meta.setGasUsed(*gasUsed);
|
||||
if (wasmReturnCode)
|
||||
meta.setWasmReturnCode(*wasmReturnCode);
|
||||
Mods newMod;
|
||||
for (auto& item : items_)
|
||||
{
|
||||
@@ -259,11 +266,9 @@ ApplyStateTable::apply(
|
||||
}
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::detail::ApplyStateTable::apply : unsupported "
|
||||
"operation type");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
@@ -682,6 +687,12 @@ ApplyStateTable::threadOwners(
|
||||
if (auto const optSleAcct{(*sle)[~sfAccount]})
|
||||
threadTx(base, meta, *optSleAcct, mods, j);
|
||||
|
||||
// Don't thread a check's sfDestination unless the amendment is
|
||||
// enabled
|
||||
if (ledgerType == ltCHECK &&
|
||||
!base.rules().enabled(fixCheckThreading))
|
||||
break;
|
||||
|
||||
// If sfDestination is present, thread to that account
|
||||
if (auto const optSleDest{(*sle)[~sfDestination]})
|
||||
threadTx(base, meta, *optSleDest, mods, j);
|
||||
|
||||
@@ -22,9 +22,6 @@
|
||||
#include <xrpl/ledger/ApplyView.h>
|
||||
#include <xrpl/protocol/Protocol.h>
|
||||
|
||||
#include <limits>
|
||||
#include <type_traits>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
std::optional<std::uint64_t>
|
||||
@@ -94,21 +91,8 @@ ApplyView::dirAdd(
|
||||
return page;
|
||||
}
|
||||
|
||||
// We rely on modulo arithmetic of unsigned integers (guaranteed in
|
||||
// [basic.fundamental] paragraph 2) to detect page representation overflow.
|
||||
// For signed integers this would be UB, hence static_assert here.
|
||||
static_assert(std::is_unsigned_v<decltype(page)>);
|
||||
// Defensive check against breaking changes in compiler.
|
||||
static_assert([]<typename T>(std::type_identity<T>) constexpr -> T {
|
||||
T tmp = std::numeric_limits<T>::max();
|
||||
return ++tmp;
|
||||
}(std::type_identity<decltype(page)>{}) == 0);
|
||||
++page;
|
||||
// Check whether we're out of pages.
|
||||
if (page == 0)
|
||||
return std::nullopt;
|
||||
if (!rules().enabled(fixDirectoryLimit) &&
|
||||
page >= dirNodeMaxPages) // Old pages limit
|
||||
if (++page >= dirNodeMaxPages)
|
||||
return std::nullopt;
|
||||
|
||||
// We are about to create a new node; we'll link it to
|
||||
@@ -149,10 +133,8 @@ ApplyView::emptyDirDelete(Keylet const& directory)
|
||||
if (directory.type != ltDIR_NODE ||
|
||||
node->getFieldH256(sfRootIndex) != directory.key)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::ApplyView::emptyDirDelete : invalid node type");
|
||||
return false;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
// The directory still contains entries and so it cannot be removed
|
||||
|
||||
@@ -35,7 +35,16 @@ ApplyViewImpl::apply(
|
||||
bool isDryRun,
|
||||
beast::Journal j)
|
||||
{
|
||||
return items_.apply(to, tx, ter, deliver_, parentBatchId, isDryRun, j);
|
||||
return items_.apply(
|
||||
to,
|
||||
tx,
|
||||
ter,
|
||||
deliver_,
|
||||
parentBatchId,
|
||||
gasUsed_,
|
||||
wasmReturnCode_,
|
||||
isDryRun,
|
||||
j);
|
||||
}
|
||||
|
||||
std::size_t
|
||||
|
||||
@@ -36,9 +36,7 @@ BookDirs::BookDirs(ReadView const& view, Book const& book)
|
||||
{
|
||||
if (!cdirFirst(*view_, key_, sle_, entry_, index_))
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::BookDirs::BookDirs : directory is empty");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -112,11 +110,9 @@ BookDirs::const_iterator::operator++()
|
||||
}
|
||||
else if (!cdirFirst(*view_, cur_key_, sle_, entry_, index_))
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE(
|
||||
"ripple::BookDirs::const_iterator::operator++ : directory is "
|
||||
"empty");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -77,23 +77,19 @@ deleteSLE(
|
||||
AccountID const& account, SField const& node, bool isOwner) -> TER {
|
||||
auto const sleAccount = view.peek(keylet::account(account));
|
||||
if (!sleAccount)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
{ // LCOV_EXCL_START
|
||||
JLOG(j.fatal()) << "Internal error: can't retrieve Owner account.";
|
||||
return tecINTERNAL;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
} // LCOV_EXCL_STOP
|
||||
|
||||
// Remove object from owner directory
|
||||
std::uint64_t const page = sleCredential->getFieldU64(node);
|
||||
if (!view.dirRemove(
|
||||
keylet::ownerDir(account), page, sleCredential->key(), false))
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
{ // LCOV_EXCL_START
|
||||
JLOG(j.fatal()) << "Unable to delete Credential from owner.";
|
||||
return tefBAD_LEDGER;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
} // LCOV_EXCL_STOP
|
||||
|
||||
if (isOwner)
|
||||
adjustOwnerCount(view, sleAccount, -1, j);
|
||||
|
||||
@@ -324,12 +324,10 @@ isVaultPseudoAccountFrozen(
|
||||
auto const issuer = mptIssuance->getAccountID(sfIssuer);
|
||||
auto const mptIssuer = view.read(keylet::account(issuer));
|
||||
if (mptIssuer == nullptr)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
{ // LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::isVaultPseudoAccountFrozen : null MPToken issuer");
|
||||
return false;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
} // LCOV_EXCL_STOP
|
||||
|
||||
if (!mptIssuer->isFieldPresent(sfVaultID))
|
||||
return false; // not a Vault pseudo-account, common case
|
||||
@@ -340,8 +338,7 @@ isVaultPseudoAccountFrozen(
|
||||
{ // LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::isVaultPseudoAccountFrozen : null vault");
|
||||
return false;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
} // LCOV_EXCL_STOP
|
||||
|
||||
return isAnyFrozen(view, {issuer, account}, vault->at(sfAsset), depth + 1);
|
||||
}
|
||||
@@ -1134,7 +1131,7 @@ createPseudoAccount(
|
||||
uint256 const& pseudoOwnerKey,
|
||||
SField const& ownerField)
|
||||
{
|
||||
[[maybe_unused]] auto const& fields = getPseudoAccountFields();
|
||||
auto const& fields = getPseudoAccountFields();
|
||||
XRPL_ASSERT(
|
||||
std::count_if(
|
||||
fields.begin(),
|
||||
@@ -1242,12 +1239,6 @@ addEmptyHolding(
|
||||
// If the line already exists, don't create it again.
|
||||
if (view.read(index))
|
||||
return tecDUPLICATE;
|
||||
|
||||
// Can the account cover the trust line reserve ?
|
||||
std::uint32_t const ownerCount = sleDst->at(sfOwnerCount);
|
||||
if (priorBalance < view.fees().accountReserve(ownerCount + 1))
|
||||
return tecNO_LINE_INSUF_RESERVE;
|
||||
|
||||
return trustCreate(
|
||||
view,
|
||||
high,
|
||||
@@ -1298,7 +1289,7 @@ authorizeMPToken(
|
||||
{
|
||||
auto const sleAcct = view.peek(keylet::account(account));
|
||||
if (!sleAcct)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
|
||||
// If the account that submitted the tx is a holder
|
||||
// Note: `account_` is holder's account
|
||||
@@ -1363,17 +1354,17 @@ authorizeMPToken(
|
||||
|
||||
auto const sleMptIssuance = view.read(keylet::mptIssuance(mptIssuanceID));
|
||||
if (!sleMptIssuance)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
|
||||
// If the account that submitted this tx is the issuer of the MPT
|
||||
// Note: `account_` is issuer's account
|
||||
// `holderID` is holder's account
|
||||
if (account != (*sleMptIssuance)[sfIssuer])
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
|
||||
auto const sleMpt = view.peek(keylet::mptoken(mptIssuanceID, *holderID));
|
||||
if (!sleMpt)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
|
||||
std::uint32_t const flagsIn = sleMpt->getFieldU32(sfFlags);
|
||||
std::uint32_t flagsOut = flagsIn;
|
||||
@@ -1430,7 +1421,7 @@ trustCreate(
|
||||
describeOwnerDir(uLowAccountID));
|
||||
|
||||
if (!lowNode)
|
||||
return tecDIR_FULL; // LCOV_EXCL_LINE
|
||||
return tecDIR_FULL;
|
||||
|
||||
auto highNode = view.dirInsert(
|
||||
keylet::ownerDir(uHighAccountID),
|
||||
@@ -1438,14 +1429,14 @@ trustCreate(
|
||||
describeOwnerDir(uHighAccountID));
|
||||
|
||||
if (!highNode)
|
||||
return tecDIR_FULL; // LCOV_EXCL_LINE
|
||||
return tecDIR_FULL;
|
||||
|
||||
bool const bSetDst = saLimit.getIssuer() == uDstAccountID;
|
||||
bool const bSetHigh = bSrcHigh ^ bSetDst;
|
||||
|
||||
XRPL_ASSERT(sleAccount, "ripple::trustCreate : non-null SLE");
|
||||
if (!sleAccount)
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
return tefINTERNAL;
|
||||
|
||||
XRPL_ASSERT(
|
||||
sleAccount->getAccountID(sfAccount) ==
|
||||
@@ -1524,12 +1515,10 @@ removeEmptyHolding(
|
||||
{
|
||||
auto const sle = view.read(keylet::account(accountID));
|
||||
if (!sle)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
return tecINTERNAL;
|
||||
auto const balance = sle->getFieldAmount(sfBalance);
|
||||
if (balance.xrp() != 0)
|
||||
return tecHAS_OBLIGATIONS;
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
@@ -1547,8 +1536,7 @@ removeEmptyHolding(
|
||||
auto sleLowAccount =
|
||||
view.peek(keylet::account(line->at(sfLowLimit)->getIssuer()));
|
||||
if (!sleLowAccount)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
return tecINTERNAL;
|
||||
adjustOwnerCount(view, sleLowAccount, -1, journal);
|
||||
// It's not really necessary to clear the reserve flag, since the line
|
||||
// is about to be deleted, but this will make the metadata reflect an
|
||||
@@ -1562,8 +1550,7 @@ removeEmptyHolding(
|
||||
auto sleHighAccount =
|
||||
view.peek(keylet::account(line->at(sfHighLimit)->getIssuer()));
|
||||
if (!sleHighAccount)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
return tecINTERNAL;
|
||||
adjustOwnerCount(view, sleHighAccount, -1, journal);
|
||||
// It's not really necessary to clear the reserve flag, since the line
|
||||
// is about to be deleted, but this will make the metadata reflect an
|
||||
@@ -1623,7 +1610,7 @@ trustDelete(
|
||||
sleRippleState->key(),
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
JLOG(j.trace()) << "trustDelete: Deleting ripple line: high";
|
||||
@@ -1634,7 +1621,7 @@ trustDelete(
|
||||
sleRippleState->key(),
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
JLOG(j.trace()) << "trustDelete: Deleting ripple line: state";
|
||||
@@ -1660,7 +1647,7 @@ offerDelete(ApplyView& view, std::shared_ptr<SLE> const& sle, beast::Journal j)
|
||||
offerIndex,
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
if (!view.dirRemove(
|
||||
@@ -1669,7 +1656,7 @@ offerDelete(ApplyView& view, std::shared_ptr<SLE> const& sle, beast::Journal j)
|
||||
offerIndex,
|
||||
false))
|
||||
{
|
||||
return tefBAD_LEDGER; // LCOV_EXCL_LINE
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
if (sle->isFieldPresent(sfAdditionalBooks))
|
||||
@@ -1833,7 +1820,7 @@ rippleCreditIOU(
|
||||
|
||||
auto const sleAccount = view.peek(keylet::account(uReceiverID));
|
||||
if (!sleAccount)
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
return tefINTERNAL;
|
||||
|
||||
bool const noRipple = (sleAccount->getFlags() & lsfDefaultRipple) == 0;
|
||||
|
||||
@@ -1923,16 +1910,14 @@ accountSendIOU(
|
||||
{
|
||||
if (saAmount < beast::zero || saAmount.holds<MPTIssue>())
|
||||
{
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
XRPL_ASSERT(
|
||||
saAmount >= beast::zero && !saAmount.holds<MPTIssue>(),
|
||||
"ripple::accountSendIOU : minimum amount and not MPT");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
/* If we aren't sending anything or if the sender is the same as the
|
||||
@@ -1989,10 +1974,8 @@ accountSendIOU(
|
||||
{
|
||||
// VFALCO Its laborious to have to mutate the
|
||||
// TER based on params everywhere
|
||||
// LCOV_EXCL_START
|
||||
terResult = view.open() ? TER{telFAILED_PROCESSING}
|
||||
: TER{tecFAILED_PROCESSING};
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -2079,7 +2062,7 @@ rippleCreditMPT(
|
||||
view.update(sleIssuance);
|
||||
}
|
||||
else
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -2339,7 +2322,7 @@ issueIOU(
|
||||
|
||||
auto const receiverAccount = view.peek(keylet::account(account));
|
||||
if (!receiverAccount)
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
return tefINTERNAL;
|
||||
|
||||
bool noRipple = (receiverAccount->getFlags() & lsfDefaultRipple) == 0;
|
||||
|
||||
@@ -2427,13 +2410,11 @@ redeemIOU(
|
||||
// In order to hold an IOU, a trust line *MUST* exist to track the
|
||||
// balance. If it doesn't, then something is very wrong. Don't try
|
||||
// to continue.
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.fatal()) << "redeemIOU: " << to_string(account)
|
||||
<< " attempts to redeem " << amount.getFullText()
|
||||
<< " but no trust line exists!";
|
||||
|
||||
return tefINTERNAL;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
TER
|
||||
@@ -2453,7 +2434,7 @@ transferXRP(
|
||||
SLE::pointer const sender = view.peek(keylet::account(from));
|
||||
SLE::pointer const receiver = view.peek(keylet::account(to));
|
||||
if (!sender || !receiver)
|
||||
return tefINTERNAL; // LCOV_EXCL_LINE
|
||||
return tefINTERNAL;
|
||||
|
||||
JLOG(j.trace()) << "transferXRP: " << to_string(from) << " -> "
|
||||
<< to_string(to) << ") : " << amount.getFullText();
|
||||
@@ -2463,10 +2444,8 @@ transferXRP(
|
||||
// VFALCO Its unfortunate we have to keep
|
||||
// mutating these TER everywhere
|
||||
// FIXME: this logic should be moved to callers maybe?
|
||||
// LCOV_EXCL_START
|
||||
return view.open() ? TER{telFAILED_PROCESSING}
|
||||
: TER{tecFAILED_PROCESSING};
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
// Decrement XRP balance.
|
||||
@@ -2697,8 +2676,7 @@ enforceMPTokenAuthorization(
|
||||
UNREACHABLE(
|
||||
"ripple::enforceMPTokenAuthorization : condition list is incomplete");
|
||||
return tefINTERNAL;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
} // LCOV_EXCL_STOP
|
||||
|
||||
TER
|
||||
canTransfer(
|
||||
@@ -2747,13 +2725,11 @@ cleanupOnAccountDelete(
|
||||
if (!sleItem)
|
||||
{
|
||||
// Directory node has an invalid index. Bail out.
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.fatal())
|
||||
<< "DeleteAccount: Directory node in ledger " << view.seq()
|
||||
<< " has index to object that is missing: "
|
||||
<< to_string(dirEntry);
|
||||
return tefBAD_LEDGER;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
LedgerEntryType const nodeType{safe_cast<LedgerEntryType>(
|
||||
@@ -2786,11 +2762,9 @@ cleanupOnAccountDelete(
|
||||
"ripple::cleanupOnAccountDelete : minimum dir entries");
|
||||
if (uDirEntry == 0)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
JLOG(j.error())
|
||||
<< "DeleteAccount iterator re-validation failed.";
|
||||
return tefBAD_LEDGER;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
if (skipEntry == SkipEntry::No)
|
||||
uDirEntry--;
|
||||
@@ -2810,7 +2784,7 @@ deleteAMMTrustLine(
|
||||
beast::Journal j)
|
||||
{
|
||||
if (!sleState || sleState->getType() != ltRIPPLE_STATE)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
|
||||
auto const& [low, high] = std::minmax(
|
||||
sleState->getFieldAmount(sfLowLimit).getIssuer(),
|
||||
@@ -2818,14 +2792,13 @@ deleteAMMTrustLine(
|
||||
auto sleLow = view.peek(keylet::account(low));
|
||||
auto sleHigh = view.peek(keylet::account(high));
|
||||
if (!sleLow || !sleHigh)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
|
||||
return tecINTERNAL;
|
||||
bool const ammLow = sleLow->isFieldPresent(sfAMMID);
|
||||
bool const ammHigh = sleHigh->isFieldPresent(sfAMMID);
|
||||
|
||||
// can't both be AMM
|
||||
if (ammLow && ammHigh)
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
|
||||
// at least one must be
|
||||
if (!ammLow && !ammHigh)
|
||||
@@ -2845,7 +2818,7 @@ deleteAMMTrustLine(
|
||||
|
||||
auto const uFlags = !ammLow ? lsfLowReserve : lsfHighReserve;
|
||||
if (!(sleState->getFlags() & uFlags))
|
||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||
return tecINTERNAL;
|
||||
|
||||
adjustOwnerCount(view, !ammLow ? sleLow : sleHigh, -1, j);
|
||||
|
||||
@@ -3153,7 +3126,7 @@ rippleUnlockEscrowMPT(
|
||||
{ // LCOV_EXCL_START
|
||||
JLOG(j.error())
|
||||
<< "rippleUnlockEscrowMPT: MPToken not found for " << receiver;
|
||||
return tecOBJECT_NOT_FOUND;
|
||||
return tecOBJECT_NOT_FOUND; // LCOV_EXCL_LINE
|
||||
} // LCOV_EXCL_STOP
|
||||
|
||||
auto current = sle->getFieldU64(sfMPTAmount);
|
||||
|
||||
@@ -383,7 +383,7 @@ public:
|
||||
static boost::regex reStatus{
|
||||
"\\`HTTP/1\\S+ (\\d{3}) .*\\'"}; // HTTP/1.1 200 OK
|
||||
static boost::regex reSize{
|
||||
"\\`.*\\r\\nContent-Length:\\s+([0-9]+).*\\'", boost::regex::icase};
|
||||
"\\`.*\\r\\nContent-Length:\\s+([0-9]+).*\\'"};
|
||||
static boost::regex reBody{"\\`.*\\r\\n\\r\\n(.*)\\'"};
|
||||
|
||||
boost::smatch smMatch;
|
||||
|
||||
@@ -36,7 +36,7 @@ namespace BuildInfo {
|
||||
// and follow the format described at http://semver.org/
|
||||
//------------------------------------------------------------------------------
|
||||
// clang-format off
|
||||
char const* const versionString = "3.0.0-b1"
|
||||
char const* const versionString = "2.6.1-rc1"
|
||||
// clang-format on
|
||||
|
||||
#if defined(DEBUG) || defined(SANITIZER)
|
||||
|
||||
@@ -58,13 +58,6 @@ setSTNumberSwitchover(bool v)
|
||||
*getStaticSTNumberSwitchover() = v;
|
||||
}
|
||||
|
||||
/* The range for the mantissa when normalized */
|
||||
static std::int64_t constexpr minMantissa = 1000000000000000ull;
|
||||
static std::int64_t constexpr maxMantissa = 9999999999999999ull;
|
||||
/* The range for the exponent when normalized */
|
||||
static int constexpr minExponent = -96;
|
||||
static int constexpr maxExponent = 80;
|
||||
|
||||
IOUAmount
|
||||
IOUAmount::minPositiveAmount()
|
||||
{
|
||||
@@ -312,7 +305,8 @@ mulRatio(
|
||||
{
|
||||
if (!result)
|
||||
{
|
||||
return IOUAmount(-minMantissa, minExponent);
|
||||
return IOUAmount(
|
||||
-IOUAmount::minMantissa, IOUAmount::minExponent);
|
||||
}
|
||||
// This subtraction cannot underflow because `result` is not zero
|
||||
return IOUAmount(result.mantissa() - 1, result.exponent());
|
||||
|
||||
@@ -101,22 +101,6 @@ Permission::getInstance()
|
||||
return instance;
|
||||
}
|
||||
|
||||
std::optional<std::string>
|
||||
Permission::getPermissionName(std::uint32_t const value) const
|
||||
{
|
||||
auto const permissionValue = static_cast<GranularPermissionType>(value);
|
||||
if (auto const granular = getGranularName(permissionValue))
|
||||
return *granular;
|
||||
|
||||
// not a granular permission, check if it maps to a transaction type
|
||||
auto const txType = permissionToTxType(value);
|
||||
if (auto const* item = TxFormats::getInstance().findByType(txType);
|
||||
item != nullptr)
|
||||
return item->getName();
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::optional<std::uint32_t>
|
||||
Permission::getGranularValue(std::string const& name) const
|
||||
{
|
||||
@@ -147,19 +131,6 @@ Permission::getGranularTxType(GranularPermissionType const& gpType) const
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::optional<std::reference_wrapper<uint256 const>> const
|
||||
Permission::getTxFeature(TxType txType) const
|
||||
{
|
||||
auto const txFeaturesIt = txFeatureMap_.find(txType);
|
||||
XRPL_ASSERT(
|
||||
txFeaturesIt != txFeatureMap_.end(),
|
||||
"ripple::Permissions::getTxFeature : tx exists in txFeatureMap_");
|
||||
|
||||
if (txFeaturesIt->second == uint256{})
|
||||
return std::nullopt;
|
||||
return txFeaturesIt->second;
|
||||
}
|
||||
|
||||
bool
|
||||
Permission::isDelegatable(
|
||||
std::uint32_t const& permissionValue,
|
||||
@@ -174,22 +145,25 @@ Permission::isDelegatable(
|
||||
auto const txType = permissionToTxType(permissionValue);
|
||||
auto const it = delegatableTx_.find(txType);
|
||||
|
||||
if (it == delegatableTx_.end())
|
||||
return false;
|
||||
if (rules.enabled(fixDelegateV1_1))
|
||||
{
|
||||
if (it == delegatableTx_.end())
|
||||
return false;
|
||||
|
||||
auto const txFeaturesIt = txFeatureMap_.find(txType);
|
||||
XRPL_ASSERT(
|
||||
txFeaturesIt != txFeatureMap_.end(),
|
||||
"ripple::Permissions::isDelegatable : tx exists in txFeatureMap_");
|
||||
auto const txFeaturesIt = txFeatureMap_.find(txType);
|
||||
XRPL_ASSERT(
|
||||
txFeaturesIt != txFeatureMap_.end(),
|
||||
"ripple::Permissions::isDelegatable : tx exists in txFeatureMap_");
|
||||
|
||||
// Delegation is only allowed if the required amendment for the transaction
|
||||
// is enabled. For transactions that do not require an amendment, delegation
|
||||
// is always allowed.
|
||||
if (txFeaturesIt->second != uint256{} &&
|
||||
!rules.enabled(txFeaturesIt->second))
|
||||
return false;
|
||||
// fixDelegateV1_1: Delegation is only allowed if the required amendment
|
||||
// for the transaction is enabled. For transactions that do not require
|
||||
// an amendment, delegation is always allowed.
|
||||
if (txFeaturesIt->second != uint256{} &&
|
||||
!rules.enabled(txFeaturesIt->second))
|
||||
return false;
|
||||
}
|
||||
|
||||
if (it->second == Delegation::notDelegatable)
|
||||
if (it != delegatableTx_.end() && it->second == Delegation::notDelegatable)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
|
||||
@@ -68,6 +68,29 @@
|
||||
|
||||
namespace ripple {
|
||||
|
||||
namespace {
|
||||
|
||||
// Use a static inside a function to help prevent order-of-initialzation issues
|
||||
LocalValue<bool>&
|
||||
getStaticSTAmountCanonicalizeSwitchover()
|
||||
{
|
||||
static LocalValue<bool> r{true};
|
||||
return r;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
bool
|
||||
getSTAmountCanonicalizeSwitchover()
|
||||
{
|
||||
return *getStaticSTAmountCanonicalizeSwitchover();
|
||||
}
|
||||
|
||||
void
|
||||
setSTAmountCanonicalizeSwitchover(bool v)
|
||||
{
|
||||
*getStaticSTAmountCanonicalizeSwitchover() = v;
|
||||
}
|
||||
|
||||
static std::uint64_t const tenTo14 = 100000000000000ull;
|
||||
static std::uint64_t const tenTo14m1 = tenTo14 - 1;
|
||||
static std::uint64_t const tenTo17 = tenTo14 * 1000;
|
||||
@@ -861,14 +884,18 @@ STAmount::canonicalize()
|
||||
return;
|
||||
}
|
||||
|
||||
// log(cMaxNativeN, 10) == 17
|
||||
if (native() && mOffset > 17)
|
||||
Throw<std::runtime_error>("Native currency amount out of range");
|
||||
// log(maxMPTokenAmount, 10) ~ 18.96
|
||||
if (mAsset.holds<MPTIssue>() && mOffset > 18)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
if (getSTAmountCanonicalizeSwitchover())
|
||||
{
|
||||
// log(cMaxNativeN, 10) == 17
|
||||
if (native() && mOffset > 17)
|
||||
Throw<std::runtime_error>(
|
||||
"Native currency amount out of range");
|
||||
// log(maxMPTokenAmount, 10) ~ 18.96
|
||||
if (mAsset.holds<MPTIssue>() && mOffset > 18)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
}
|
||||
|
||||
if (getSTNumberSwitchover())
|
||||
if (getSTNumberSwitchover() && getSTAmountCanonicalizeSwitchover())
|
||||
{
|
||||
Number num(
|
||||
mIsNegative ? -mValue : mValue, mOffset, Number::unchecked{});
|
||||
@@ -892,14 +919,16 @@ STAmount::canonicalize()
|
||||
|
||||
while (mOffset > 0)
|
||||
{
|
||||
// N.B. do not move the overflow check to after the
|
||||
// multiplication
|
||||
if (native() && mValue > cMaxNativeN)
|
||||
Throw<std::runtime_error>(
|
||||
"Native currency amount out of range");
|
||||
else if (!native() && mValue > maxMPTokenAmount)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
|
||||
if (getSTAmountCanonicalizeSwitchover())
|
||||
{
|
||||
// N.B. do not move the overflow check to after the
|
||||
// multiplication
|
||||
if (native() && mValue > cMaxNativeN)
|
||||
Throw<std::runtime_error>(
|
||||
"Native currency amount out of range");
|
||||
else if (!native() && mValue > maxMPTokenAmount)
|
||||
Throw<std::runtime_error>("MPT amount out of range");
|
||||
}
|
||||
mValue *= 10;
|
||||
--mOffset;
|
||||
}
|
||||
|
||||
@@ -112,9 +112,7 @@ void
|
||||
STBase::add(Serializer& s) const
|
||||
{
|
||||
// Should never be called
|
||||
// LCOV_EXCL_START
|
||||
UNREACHABLE("ripple::STBase::add : not implemented");
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
bool
|
||||
|
||||
@@ -62,10 +62,8 @@ STUInt8::getText() const
|
||||
if (transResultInfo(TER::fromInt(value_), token, human))
|
||||
return human;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
JLOG(debugLog().error())
|
||||
<< "Unknown result code in metadata: " << value_;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return std::to_string(value_);
|
||||
@@ -82,10 +80,8 @@ STUInt8::getJson(JsonOptions) const
|
||||
if (transResultInfo(TER::fromInt(value_), token, human))
|
||||
return token;
|
||||
|
||||
// LCOV_EXCL_START
|
||||
JLOG(debugLog().error())
|
||||
<< "Unknown result code in metadata: " << value_;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
return value_;
|
||||
@@ -175,13 +171,6 @@ template <>
|
||||
std::string
|
||||
STUInt32::getText() const
|
||||
{
|
||||
if (getFName() == sfPermissionValue)
|
||||
{
|
||||
auto const permissionName =
|
||||
Permission::getInstance().getPermissionName(value_);
|
||||
if (permissionName)
|
||||
return *permissionName;
|
||||
}
|
||||
return std::to_string(value_);
|
||||
}
|
||||
|
||||
@@ -191,10 +180,23 @@ STUInt32::getJson(JsonOptions) const
|
||||
{
|
||||
if (getFName() == sfPermissionValue)
|
||||
{
|
||||
auto const permissionName =
|
||||
Permission::getInstance().getPermissionName(value_);
|
||||
if (permissionName)
|
||||
return *permissionName;
|
||||
auto const permissionValue =
|
||||
static_cast<GranularPermissionType>(value_);
|
||||
auto const granular =
|
||||
Permission::getInstance().getGranularName(permissionValue);
|
||||
|
||||
if (granular)
|
||||
{
|
||||
return *granular;
|
||||
}
|
||||
else
|
||||
{
|
||||
auto const txType =
|
||||
Permission::getInstance().permissionToTxType(value_);
|
||||
auto item = TxFormats::getInstance().findByType(txType);
|
||||
if (item != nullptr)
|
||||
return item->getName();
|
||||
}
|
||||
}
|
||||
|
||||
return value_;
|
||||
|
||||
@@ -688,16 +688,6 @@ STObject::getFieldV256(SField const& field) const
|
||||
return getFieldByConstRef<STVector256>(field, empty);
|
||||
}
|
||||
|
||||
STObject
|
||||
STObject::getFieldObject(SField const& field) const
|
||||
{
|
||||
STObject const empty{field};
|
||||
auto ret = getFieldByConstRef<STObject>(field, empty);
|
||||
if (ret != empty)
|
||||
ret.applyTemplateFromSField(field);
|
||||
return ret;
|
||||
}
|
||||
|
||||
STArray const&
|
||||
STObject::getFieldArray(SField const& field) const
|
||||
{
|
||||
@@ -843,12 +833,6 @@ STObject::setFieldArray(SField const& field, STArray const& v)
|
||||
setFieldUsingAssignment(field, v);
|
||||
}
|
||||
|
||||
void
|
||||
STObject::setFieldObject(SField const& field, STObject const& v)
|
||||
{
|
||||
setFieldUsingAssignment(field, v);
|
||||
}
|
||||
|
||||
Json::Value
|
||||
STObject::getJson(JsonOptions options) const
|
||||
{
|
||||
|
||||
@@ -83,8 +83,7 @@ constexpr std::
|
||||
return static_cast<U1>(value);
|
||||
}
|
||||
|
||||
// LCOV_EXCL_START
|
||||
static inline std::string
|
||||
static std::string
|
||||
make_name(std::string const& object, std::string const& field)
|
||||
{
|
||||
if (field.empty())
|
||||
@@ -93,7 +92,7 @@ make_name(std::string const& object, std::string const& field)
|
||||
return object + "." + field;
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
not_an_object(std::string const& object, std::string const& field)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -101,20 +100,20 @@ not_an_object(std::string const& object, std::string const& field)
|
||||
"Field '" + make_name(object, field) + "' is not a JSON object.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
not_an_object(std::string const& object)
|
||||
{
|
||||
return not_an_object(object, "");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
not_an_array(std::string const& object)
|
||||
{
|
||||
return RPC::make_error(
|
||||
rpcINVALID_PARAMS, "Field '" + object + "' is not a JSON array.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
unknown_field(std::string const& object, std::string const& field)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -122,7 +121,7 @@ unknown_field(std::string const& object, std::string const& field)
|
||||
"Field '" + make_name(object, field) + "' is unknown.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
out_of_range(std::string const& object, std::string const& field)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -130,7 +129,7 @@ out_of_range(std::string const& object, std::string const& field)
|
||||
"Field '" + make_name(object, field) + "' is out of range.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
bad_type(std::string const& object, std::string const& field)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -138,7 +137,7 @@ bad_type(std::string const& object, std::string const& field)
|
||||
"Field '" + make_name(object, field) + "' has bad type.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
invalid_data(std::string const& object, std::string const& field)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -146,13 +145,13 @@ invalid_data(std::string const& object, std::string const& field)
|
||||
"Field '" + make_name(object, field) + "' has invalid data.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
invalid_data(std::string const& object)
|
||||
{
|
||||
return invalid_data(object, "");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
array_expected(std::string const& object, std::string const& field)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -160,7 +159,7 @@ array_expected(std::string const& object, std::string const& field)
|
||||
"Field '" + make_name(object, field) + "' must be a JSON array.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
string_expected(std::string const& object, std::string const& field)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -168,7 +167,7 @@ string_expected(std::string const& object, std::string const& field)
|
||||
"Field '" + make_name(object, field) + "' must be a string.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
too_deep(std::string const& object)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -176,7 +175,7 @@ too_deep(std::string const& object)
|
||||
"Field '" + object + "' exceeds nesting depth limit.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
singleton_expected(std::string const& object, unsigned int index)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -185,7 +184,7 @@ singleton_expected(std::string const& object, unsigned int index)
|
||||
"]' must be an object with a single key/object value.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
template_mismatch(SField const& sField)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -194,7 +193,7 @@ template_mismatch(SField const& sField)
|
||||
"' contents did not meet requirements for that type.");
|
||||
}
|
||||
|
||||
static inline Json::Value
|
||||
static Json::Value
|
||||
non_object_in_array(std::string const& item, Json::UInt index)
|
||||
{
|
||||
return RPC::make_error(
|
||||
@@ -202,7 +201,6 @@ non_object_in_array(std::string const& item, Json::UInt index)
|
||||
"Item '" + item + "' at index " + std::to_string(index) +
|
||||
" is not an object. Arrays may only contain objects.");
|
||||
}
|
||||
// LCOV_EXCL_STOP
|
||||
|
||||
template <class STResult, class Integer>
|
||||
static std::optional<detail::STVar>
|
||||
@@ -387,13 +385,10 @@ parseLeaf(
|
||||
|
||||
auto const& field = SField::getField(fieldName);
|
||||
|
||||
// checked in parseObject
|
||||
if (field == sfInvalid)
|
||||
{
|
||||
// LCOV_EXCL_START
|
||||
error = unknown_field(json_name, fieldName);
|
||||
return ret;
|
||||
// LCOV_EXCL_STOP
|
||||
}
|
||||
|
||||
switch (field.fieldType)
|
||||
@@ -647,17 +642,11 @@ parseLeaf(
|
||||
}
|
||||
else if (value.isInt())
|
||||
{
|
||||
// future-proofing - a static assert failure if the JSON
|
||||
// library ever supports larger ints
|
||||
// In such case, we will need additional bounds checks here
|
||||
static_assert(
|
||||
std::is_same_v<decltype(value.asInt()), std::int32_t>);
|
||||
ret = detail::make_stvar<STInt32>(field, value.asInt());
|
||||
}
|
||||
else if (value.isUInt())
|
||||
{
|
||||
auto const uintValue = value.asUInt();
|
||||
if (uintValue >
|
||||
if (value.asUInt() >
|
||||
static_cast<std::uint32_t>(
|
||||
std::numeric_limits<std::int32_t>::max()))
|
||||
{
|
||||
@@ -665,7 +654,7 @@ parseLeaf(
|
||||
return ret;
|
||||
}
|
||||
ret = detail::make_stvar<STInt32>(
|
||||
field, static_cast<std::int32_t>(uintValue));
|
||||
field, safe_cast<std::int32_t>(value.asInt()));
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -811,12 +800,6 @@ parseLeaf(
|
||||
AccountID uAccount, uIssuer;
|
||||
Currency uCurrency;
|
||||
|
||||
if (!account && !currency && !issuer)
|
||||
{
|
||||
error = invalid_data(element_name);
|
||||
return ret;
|
||||
}
|
||||
|
||||
if (account)
|
||||
{
|
||||
// human account id
|
||||
@@ -1209,4 +1192,24 @@ STParsedJSONObject::STParsedJSONObject(
|
||||
object = parseObject(name, json, sfGeneric, 0, error);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
STParsedJSONArray::STParsedJSONArray(
|
||||
std::string const& name,
|
||||
Json::Value const& json)
|
||||
{
|
||||
using namespace STParsedJSONDetail;
|
||||
auto arr = parseArray(name, json, sfGeneric, 0, error);
|
||||
if (!arr)
|
||||
array.reset();
|
||||
else
|
||||
{
|
||||
auto p = dynamic_cast<STArray*>(&arr->get());
|
||||
if (p == nullptr)
|
||||
array.reset();
|
||||
else
|
||||
array = std::move(*p);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -200,11 +200,11 @@ STTx::getSigningHash() const
|
||||
}
|
||||
|
||||
Blob
|
||||
STTx::getSignature(STObject const& sigObject)
|
||||
STTx::getSignature() const
|
||||
{
|
||||
try
|
||||
{
|
||||
return sigObject.getFieldVL(sfTxnSignature);
|
||||
return getFieldVL(sfTxnSignature);
|
||||
}
|
||||
catch (std::exception const&)
|
||||
{
|
||||
@@ -234,68 +234,35 @@ STTx::getSeqValue() const
|
||||
}
|
||||
|
||||
void
|
||||
STTx::sign(
|
||||
PublicKey const& publicKey,
|
||||
SecretKey const& secretKey,
|
||||
std::optional<std::reference_wrapper<SField const>> signatureTarget)
|
||||
STTx::sign(PublicKey const& publicKey, SecretKey const& secretKey)
|
||||
{
|
||||
auto const data = getSigningData(*this);
|
||||
|
||||
auto const sig = ripple::sign(publicKey, secretKey, makeSlice(data));
|
||||
|
||||
if (signatureTarget)
|
||||
{
|
||||
auto& target = peekFieldObject(*signatureTarget);
|
||||
target.setFieldVL(sfTxnSignature, sig);
|
||||
}
|
||||
else
|
||||
{
|
||||
setFieldVL(sfTxnSignature, sig);
|
||||
}
|
||||
setFieldVL(sfTxnSignature, sig);
|
||||
tid_ = getHash(HashPrefix::transactionID);
|
||||
}
|
||||
|
||||
Expected<void, std::string>
|
||||
STTx::checkSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const
|
||||
{
|
||||
try
|
||||
{
|
||||
// Determine whether we're single- or multi-signing by looking
|
||||
// at the SigningPubKey. If it's empty we must be
|
||||
// multi-signing. Otherwise we're single-signing.
|
||||
|
||||
Blob const& signingPubKey = sigObject.getFieldVL(sfSigningPubKey);
|
||||
return signingPubKey.empty()
|
||||
? checkMultiSign(requireCanonicalSig, rules, sigObject)
|
||||
: checkSingleSign(requireCanonicalSig, sigObject);
|
||||
}
|
||||
catch (std::exception const&)
|
||||
{
|
||||
}
|
||||
return Unexpected("Internal signature check failure.");
|
||||
}
|
||||
|
||||
Expected<void, std::string>
|
||||
STTx::checkSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules) const
|
||||
{
|
||||
if (auto const ret = checkSign(requireCanonicalSig, rules, *this); !ret)
|
||||
return ret;
|
||||
|
||||
/* Placeholder for field that will be added by Lending Protocol
|
||||
if (isFieldPresent(sfCounterpartySignature))
|
||||
try
|
||||
{
|
||||
auto const counterSig = getFieldObject(sfCounterpartySignature);
|
||||
if (auto const ret = checkSign(requireCanonicalSig, rules, counterSig);
|
||||
!ret)
|
||||
return Unexpected("Counterparty: " + ret.error());
|
||||
// Determine whether we're single- or multi-signing by looking
|
||||
// at the SigningPubKey. If it's empty we must be
|
||||
// multi-signing. Otherwise we're single-signing.
|
||||
Blob const& signingPubKey = getFieldVL(sfSigningPubKey);
|
||||
return signingPubKey.empty()
|
||||
? checkMultiSign(requireCanonicalSig, rules)
|
||||
: checkSingleSign(requireCanonicalSig);
|
||||
}
|
||||
*/
|
||||
return {};
|
||||
catch (std::exception const&)
|
||||
{
|
||||
}
|
||||
return Unexpected("Internal signature check failure.");
|
||||
}
|
||||
|
||||
Expected<void, std::string>
|
||||
@@ -415,23 +382,23 @@ STTx::getMetaSQL(
|
||||
|
||||
static Expected<void, std::string>
|
||||
singleSignHelper(
|
||||
STObject const& sigObject,
|
||||
STObject const& signer,
|
||||
Slice const& data,
|
||||
bool const fullyCanonical)
|
||||
{
|
||||
// We don't allow both a non-empty sfSigningPubKey and an sfSigners.
|
||||
// That would allow the transaction to be signed two ways. So if both
|
||||
// fields are present the signature is invalid.
|
||||
if (sigObject.isFieldPresent(sfSigners))
|
||||
if (signer.isFieldPresent(sfSigners))
|
||||
return Unexpected("Cannot both single- and multi-sign.");
|
||||
|
||||
bool validSig = false;
|
||||
try
|
||||
{
|
||||
auto const spk = sigObject.getFieldVL(sfSigningPubKey);
|
||||
auto const spk = signer.getFieldVL(sfSigningPubKey);
|
||||
if (publicKeyType(makeSlice(spk)))
|
||||
{
|
||||
Blob const signature = sigObject.getFieldVL(sfTxnSignature);
|
||||
Blob const signature = signer.getFieldVL(sfTxnSignature);
|
||||
validSig = verify(
|
||||
PublicKey(makeSlice(spk)),
|
||||
data,
|
||||
@@ -451,14 +418,12 @@ singleSignHelper(
|
||||
}
|
||||
|
||||
Expected<void, std::string>
|
||||
STTx::checkSingleSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
STObject const& sigObject) const
|
||||
STTx::checkSingleSign(RequireFullyCanonicalSig requireCanonicalSig) const
|
||||
{
|
||||
auto const data = getSigningData(*this);
|
||||
bool const fullyCanonical = (getFlags() & tfFullyCanonicalSig) ||
|
||||
(requireCanonicalSig == STTx::RequireFullyCanonicalSig::yes);
|
||||
return singleSignHelper(sigObject, makeSlice(data), fullyCanonical);
|
||||
return singleSignHelper(*this, makeSlice(data), fullyCanonical);
|
||||
}
|
||||
|
||||
Expected<void, std::string>
|
||||
@@ -475,29 +440,31 @@ STTx::checkBatchSingleSign(
|
||||
|
||||
Expected<void, std::string>
|
||||
multiSignHelper(
|
||||
STObject const& sigObject,
|
||||
std::optional<AccountID> txnAccountID,
|
||||
STObject const& signerObj,
|
||||
bool const fullyCanonical,
|
||||
std::function<Serializer(AccountID const&)> makeMsg,
|
||||
Rules const& rules)
|
||||
{
|
||||
// Make sure the MultiSigners are present. Otherwise they are not
|
||||
// attempting multi-signing and we just have a bad SigningPubKey.
|
||||
if (!sigObject.isFieldPresent(sfSigners))
|
||||
if (!signerObj.isFieldPresent(sfSigners))
|
||||
return Unexpected("Empty SigningPubKey.");
|
||||
|
||||
// We don't allow both an sfSigners and an sfTxnSignature. Both fields
|
||||
// being present would indicate that the transaction is signed both ways.
|
||||
if (sigObject.isFieldPresent(sfTxnSignature))
|
||||
if (signerObj.isFieldPresent(sfTxnSignature))
|
||||
return Unexpected("Cannot both single- and multi-sign.");
|
||||
|
||||
STArray const& signers{sigObject.getFieldArray(sfSigners)};
|
||||
STArray const& signers{signerObj.getFieldArray(sfSigners)};
|
||||
|
||||
// There are well known bounds that the number of signers must be within.
|
||||
if (signers.size() < STTx::minMultiSigners ||
|
||||
signers.size() > STTx::maxMultiSigners(&rules))
|
||||
return Unexpected("Invalid Signers array size.");
|
||||
|
||||
// We also use the sfAccount field inside the loop. Get it once.
|
||||
auto const txnAccountID = signerObj.getAccountID(sfAccount);
|
||||
|
||||
// Signers must be in sorted order by AccountID.
|
||||
AccountID lastAccountID(beast::zero);
|
||||
|
||||
@@ -505,10 +472,8 @@ multiSignHelper(
|
||||
{
|
||||
auto const accountID = signer.getAccountID(sfAccount);
|
||||
|
||||
// The account owner may not usually multisign for themselves.
|
||||
// If they can, txnAccountID will be unseated, which is not equal to any
|
||||
// value.
|
||||
if (txnAccountID == accountID)
|
||||
// The account owner may not multisign for themselves.
|
||||
if (accountID == txnAccountID)
|
||||
return Unexpected("Invalid multisigner.");
|
||||
|
||||
// No duplicate signers allowed.
|
||||
@@ -524,7 +489,6 @@ multiSignHelper(
|
||||
|
||||
// Verify the signature.
|
||||
bool validSig = false;
|
||||
std::optional<std::string> errorWhat;
|
||||
try
|
||||
{
|
||||
auto spk = signer.getFieldVL(sfSigningPubKey);
|
||||
@@ -538,16 +502,15 @@ multiSignHelper(
|
||||
fullyCanonical);
|
||||
}
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
catch (std::exception const&)
|
||||
{
|
||||
// We assume any problem lies with the signature.
|
||||
validSig = false;
|
||||
errorWhat = e.what();
|
||||
}
|
||||
if (!validSig)
|
||||
return Unexpected(
|
||||
std::string("Invalid signature on account ") +
|
||||
toBase58(accountID) + errorWhat.value_or("") + ".");
|
||||
toBase58(accountID) + ".");
|
||||
}
|
||||
// All signatures verified.
|
||||
return {};
|
||||
@@ -569,9 +532,8 @@ STTx::checkBatchMultiSign(
|
||||
serializeBatch(dataStart, getFlags(), getBatchTransactionIDs());
|
||||
return multiSignHelper(
|
||||
batchSigner,
|
||||
std::nullopt,
|
||||
fullyCanonical,
|
||||
[&dataStart](AccountID const& accountID) -> Serializer {
|
||||
[&dataStart](AccountID const& accountID) mutable -> Serializer {
|
||||
Serializer s = dataStart;
|
||||
finishMultiSigningData(accountID, s);
|
||||
return s;
|
||||
@@ -582,27 +544,19 @@ STTx::checkBatchMultiSign(
|
||||
Expected<void, std::string>
|
||||
STTx::checkMultiSign(
|
||||
RequireFullyCanonicalSig requireCanonicalSig,
|
||||
Rules const& rules,
|
||||
STObject const& sigObject) const
|
||||
Rules const& rules) const
|
||||
{
|
||||
bool const fullyCanonical = (getFlags() & tfFullyCanonicalSig) ||
|
||||
(requireCanonicalSig == RequireFullyCanonicalSig::yes);
|
||||
|
||||
// Used inside the loop in multiSignHelper to enforce that
|
||||
// the account owner may not multisign for themselves.
|
||||
auto const txnAccountID = &sigObject != this
|
||||
? std::nullopt
|
||||
: std::optional<AccountID>(getAccountID(sfAccount));
|
||||
|
||||
// We can ease the computational load inside the loop a bit by
|
||||
// pre-constructing part of the data that we hash. Fill a Serializer
|
||||
// with the stuff that stays constant from signature to signature.
|
||||
Serializer dataStart = startMultiSigningData(*this);
|
||||
return multiSignHelper(
|
||||
sigObject,
|
||||
txnAccountID,
|
||||
*this,
|
||||
fullyCanonical,
|
||||
[&dataStart](AccountID const& accountID) -> Serializer {
|
||||
[&dataStart](AccountID const& accountID) mutable -> Serializer {
|
||||
Serializer s = dataStart;
|
||||
finishMultiSigningData(accountID, s);
|
||||
return s;
|
||||
@@ -615,7 +569,7 @@ STTx::checkMultiSign(
|
||||
*
|
||||
* This function returns a vector of transaction IDs by extracting them from
|
||||
* the field array `sfRawTransactions` within the STTx. If the batch
|
||||
* transaction IDs have already been computed and cached in `batchTxnIds_`,
|
||||
* transaction IDs have already been computed and cached in `batch_txn_ids_`,
|
||||
* it returns the cached vector. Otherwise, it computes the transaction IDs,
|
||||
* caches them, and then returns the vector.
|
||||
*
|
||||
@@ -625,7 +579,7 @@ STTx::checkMultiSign(
|
||||
* empty and that the size of the computed batch transaction IDs matches the
|
||||
* size of the `sfRawTransactions` field array.
|
||||
*/
|
||||
std::vector<uint256> const&
|
||||
std::vector<uint256>
|
||||
STTx::getBatchTransactionIDs() const
|
||||
{
|
||||
XRPL_ASSERT(
|
||||
@@ -634,20 +588,16 @@ STTx::getBatchTransactionIDs() const
|
||||
XRPL_ASSERT(
|
||||
getFieldArray(sfRawTransactions).size() != 0,
|
||||
"STTx::getBatchTransactionIDs : empty raw transactions");
|
||||
if (batch_txn_ids_.size() != 0)
|
||||
return batch_txn_ids_;
|
||||
|
||||
// The list of inner ids is built once, then reused on subsequent calls.
|
||||
// After the list is built, it must always have the same size as the array
|
||||
// `sfRawTransactions`. The assert below verifies that.
|
||||
if (batchTxnIds_.size() == 0)
|
||||
{
|
||||
for (STObject const& rb : getFieldArray(sfRawTransactions))
|
||||
batchTxnIds_.push_back(rb.getHash(HashPrefix::transactionID));
|
||||
}
|
||||
for (STObject const& rb : getFieldArray(sfRawTransactions))
|
||||
batch_txn_ids_.push_back(rb.getHash(HashPrefix::transactionID));
|
||||
|
||||
XRPL_ASSERT(
|
||||
batchTxnIds_.size() == getFieldArray(sfRawTransactions).size(),
|
||||
batch_txn_ids_.size() == getFieldArray(sfRawTransactions).size(),
|
||||
"STTx::getBatchTransactionIDs : batch transaction IDs size mismatch");
|
||||
return batchTxnIds_;
|
||||
return batch_txn_ids_;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -89,6 +89,12 @@ Serializer::addInteger(std::int32_t i)
|
||||
{
|
||||
return add32(i);
|
||||
}
|
||||
template <>
|
||||
int
|
||||
Serializer::addInteger(std::int64_t i)
|
||||
{
|
||||
return add64(i);
|
||||
}
|
||||
|
||||
int
|
||||
Serializer::addRaw(Blob const& vector)
|
||||
|
||||
@@ -127,6 +127,8 @@ transResults()
|
||||
MAKE_ERROR(tecLIMIT_EXCEEDED, "Limit exceeded."),
|
||||
MAKE_ERROR(tecPSEUDO_ACCOUNT, "This operation is not allowed against a pseudo-account."),
|
||||
MAKE_ERROR(tecPRECISION_LOSS, "The amounts used by the transaction cannot interact."),
|
||||
MAKE_ERROR(tecNO_DELEGATE_PERMISSION, "Delegated account lacks permission to perform this transaction."),
|
||||
MAKE_ERROR(tecWASM_REJECTED, "The custom WASM code that was run rejected your transaction."),
|
||||
|
||||
MAKE_ERROR(tefALREADY, "The exact transaction was already in this ledger."),
|
||||
MAKE_ERROR(tefBAD_ADD_AUTH, "Not authorized to add account."),
|
||||
@@ -150,6 +152,8 @@ transResults()
|
||||
MAKE_ERROR(tefNO_TICKET, "Ticket is not in ledger."),
|
||||
MAKE_ERROR(tefNFTOKEN_IS_NOT_TRANSFERABLE, "The specified NFToken is not transferable."),
|
||||
MAKE_ERROR(tefINVALID_LEDGER_FIX_TYPE, "The LedgerFixType field has an invalid value."),
|
||||
MAKE_ERROR(tefNO_WASM, "There is no WASM code to run, but a WASM-specific field was included."),
|
||||
MAKE_ERROR(tefWASM_FIELD_NOT_INCLUDED, "WASM code requires a field to be included that was not included."),
|
||||
|
||||
MAKE_ERROR(telLOCAL_ERROR, "Local failure."),
|
||||
MAKE_ERROR(telBAD_DOMAIN, "Domain too long."),
|
||||
@@ -219,6 +223,7 @@ transResults()
|
||||
MAKE_ERROR(temARRAY_TOO_LARGE, "Malformed: Array is too large."),
|
||||
MAKE_ERROR(temBAD_TRANSFER_FEE, "Malformed: Transfer fee is outside valid range."),
|
||||
MAKE_ERROR(temINVALID_INNER_BATCH, "Malformed: Invalid inner batch transaction."),
|
||||
MAKE_ERROR(temBAD_WASM, "Malformed: Provided WASM code is invalid."),
|
||||
|
||||
MAKE_ERROR(terRETRY, "Retry transaction."),
|
||||
MAKE_ERROR(terFUNDS_SPENT, "DEPRECATED."),
|
||||
@@ -234,7 +239,6 @@ transResults()
|
||||
MAKE_ERROR(terPRE_TICKET, "Ticket is not yet in ledger."),
|
||||
MAKE_ERROR(terNO_AMM, "AMM doesn't exist for the asset pair."),
|
||||
MAKE_ERROR(terADDRESS_COLLISION, "Failed to allocate an unique account address."),
|
||||
MAKE_ERROR(terNO_DELEGATE_PERMISSION, "Delegated account lacks permission to perform this transaction."),
|
||||
|
||||
MAKE_ERROR(tesSUCCESS, "The transaction was applied. Only final in a validated ledger."),
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user