Merge branch 'develop' into ximinez/lending-XLS-66

This commit is contained in:
Ed Hennis
2025-11-10 15:35:09 -05:00
committed by GitHub
24 changed files with 703 additions and 553 deletions

View File

@@ -138,8 +138,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# Unity on linux/amd64
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
cmake_args = f'-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}'
cmake_target = 'coverage'
build_only = True
# Generate a unique name for the configuration, e.g. macos-arm64-debug
# or debian-bookworm-gcc-12-amd64-release-unity.

View File

@@ -7,19 +7,23 @@ on:
description: "The directory where to build."
required: true
type: string
build_only:
description: 'Whether to only build or to build and test the code ("true", "false").'
required: true
type: boolean
build_type:
description: 'The build type to use ("Debug", "Release").'
type: string
required: true
cmake_args:
description: "Additional arguments to pass to CMake."
required: false
type: string
default: ""
cmake_target:
description: "The CMake target to build."
type: string
@@ -29,6 +33,7 @@ on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: true
@@ -51,27 +56,162 @@ on:
required: true
jobs:
build:
uses: ./.github/workflows/reusable-build.yml
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ inputs.build_type }}
cmake_args: ${{ inputs.cmake_args }}
cmake_target: ${{ inputs.cmake_target }}
runs_on: ${{ inputs.runs_on }}
image: ${{ inputs.image }}
config_name: ${{ inputs.config_name }}
nproc_subtract: ${{ inputs.nproc_subtract }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
build-and-test:
name: ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: 60
env:
ENABLED_VOIDSTAR: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
ENABLED_COVERAGE: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
steps:
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
test:
needs: build
uses: ./.github/workflows/reusable-test.yml
with:
run_tests: ${{ !inputs.build_only }}
verify_voidstar: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
runs_on: ${{ inputs.runs_on }}
image: ${{ inputs.image }}
config_name: ${{ inputs.config_name }}
nproc_subtract: ${{ inputs.nproc_subtract }}
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
with:
disable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Get number of processors
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
id: nproc
with:
subtract: ${{ inputs.nproc_subtract }}
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_dir: ${{ inputs.build_dir }}
build_nproc: ${{ steps.nproc.outputs.nproc }}
build_type: ${{ inputs.build_type }}
# Set the verbosity to "quiet" for Windows to avoid an excessive
# amount of logs. For other OSes, the "verbose" logs are more useful.
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
- name: Configure CMake
shell: bash
working-directory: ${{ inputs.build_dir }}
env:
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_ARGS: ${{ inputs.cmake_args }}
run: |
cmake \
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
${CMAKE_ARGS} \
..
- name: Build the binary
shell: bash
working-directory: ${{ inputs.build_dir }}
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_TARGET: ${{ inputs.cmake_target }}
run: |
cmake \
--build . \
--config "${BUILD_TYPE}" \
--parallel "${BUILD_NPROC}" \
--target "${CMAKE_TARGET}"
- name: Upload rippled artifact (Linux)
if: ${{ runner.os == 'Linux' }}
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
env:
BUILD_DIR: ${{ inputs.build_dir }}
with:
name: rippled-${{ inputs.config_name }}
path: ${{ env.BUILD_DIR }}/rippled
retention-days: 3
if-no-files-found: error
- name: Check linking (Linux)
if: ${{ runner.os == 'Linux' }}
working-directory: ${{ inputs.build_dir }}
shell: bash
run: |
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Verify presence of instrumentation (Linux)
if: ${{ runner.os == 'Linux' && env.ENABLED_VOIDSTAR == 'true' }}
working-directory: ${{ inputs.build_dir }}
shell: bash
run: |
./rippled --version | grep libvoidstar
- name: Run the separate tests
if: ${{ !inputs.build_only }}
working-directory: ${{ inputs.build_dir }}
# Windows locks some of the build files while running tests, and parallel jobs can collide
env:
BUILD_TYPE: ${{ inputs.build_type }}
PARALLELISM: ${{ runner.os == 'Windows' && '1' || steps.nproc.outputs.nproc }}
shell: bash
run: |
ctest \
--output-on-failure \
-C "${BUILD_TYPE}" \
-j "${PARALLELISM}"
- name: Prepare coverage report
if: ${{ !inputs.build_only && env.ENABLED_COVERAGE == 'true' }}
working-directory: ${{ inputs.build_dir }}
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
BUILD_TYPE: ${{ inputs.build_type }}
shell: bash
run: |
cmake \
--build . \
--config "${BUILD_TYPE}" \
--parallel "${BUILD_NPROC}" \
--target coverage
- name: Upload coverage report
if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.ENABLED_COVERAGE == 'true' }}
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
disable_search: true
disable_telem: true
fail_ci_if_error: true
files: ${{ inputs.build_dir }}/coverage.xml
plugins: noop
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
- name: Run the embedded tests
if: ${{ !inputs.build_only && env.ENABLED_COVERAGE != 'true' }}
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', inputs.build_dir, inputs.build_type) || inputs.build_dir }}
shell: bash
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
run: |
./rippled --unittest --unittest-jobs "${BUILD_NPROC}"
- name: Debug failure (Linux)
if: ${{ failure() && runner.os == 'Linux' && !inputs.build_only }}
shell: bash
run: |
echo "IPv4 local port range:"
cat /proc/sys/net/ipv4/ip_local_port_range
echo "Netstat:"
netstat -an

View File

@@ -1,154 +0,0 @@
name: Build rippled
on:
workflow_call:
inputs:
build_dir:
description: "The directory where to build."
required: true
type: string
build_type:
description: 'The build type to use ("Debug", "Release").'
required: true
type: string
cmake_args:
description: "Additional arguments to pass to CMake."
required: true
type: string
cmake_target:
description: "The CMake target to build."
required: true
type: string
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: true
type: string
config_name:
description: "The name of the configuration."
required: true
type: string
nproc_subtract:
description: "The number of processors to subtract when calculating parallelism."
required: true
type: number
secrets:
CODECOV_TOKEN:
description: "The Codecov token to use for uploading coverage reports."
required: true
defaults:
run:
shell: bash
jobs:
build:
name: Build ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: 60
steps:
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
with:
disable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Get number of processors
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
id: nproc
with:
subtract: ${{ inputs.nproc_subtract }}
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_dir: ${{ inputs.build_dir }}
build_nproc: ${{ steps.nproc.outputs.nproc }}
build_type: ${{ inputs.build_type }}
# Set the verbosity to "quiet" for Windows to avoid an excessive
# amount of logs. For other OSes, the "verbose" logs are more useful.
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
- name: Configure CMake
shell: bash
working-directory: ${{ inputs.build_dir }}
env:
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_ARGS: ${{ inputs.cmake_args }}
run: |
cmake \
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
${CMAKE_ARGS} \
..
- name: Build the binary
shell: bash
working-directory: ${{ inputs.build_dir }}
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_TARGET: ${{ inputs.cmake_target }}
run: |
cmake \
--build . \
--config "${BUILD_TYPE}" \
--parallel ${BUILD_NPROC} \
--target "${CMAKE_TARGET}"
- name: Put built binaries in one location
shell: bash
working-directory: ${{ inputs.build_dir }}
env:
BUILD_TYPE_DIR: ${{ runner.os == 'Windows' && inputs.build_type || '' }}
CMAKE_TARGET: ${{ inputs.cmake_target }}
run: |
mkdir -p ./binaries/doctest/
cp ./${BUILD_TYPE_DIR}/rippled* ./binaries/
if [ "${CMAKE_TARGET}" != 'coverage' ]; then
cp ./src/tests/libxrpl/${BUILD_TYPE_DIR}/xrpl.test.* ./binaries/doctest/
fi
- name: Upload rippled artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
env:
BUILD_DIR: ${{ inputs.build_dir }}
with:
name: rippled-${{ inputs.config_name }}
path: ${{ env.BUILD_DIR }}/binaries/
retention-days: 3
if-no-files-found: error
- name: Upload coverage report
if: ${{ github.repository_owner == 'XRPLF' && inputs.cmake_target == 'coverage' }}
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
disable_search: true
disable_telem: true
fail_ci_if_error: true
files: ${{ inputs.build_dir }}/coverage.xml
plugins: noop
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -1,111 +0,0 @@
name: Test rippled
on:
workflow_call:
inputs:
verify_voidstar:
description: "Whether to verify the presence of voidstar instrumentation."
required: true
type: boolean
run_tests:
description: "Whether to run unit tests"
required: true
type: boolean
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: true
type: string
config_name:
description: "The name of the configuration."
required: true
type: string
nproc_subtract:
description: "The number of processors to subtract when calculating parallelism."
required: true
type: number
jobs:
test:
name: Test ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: 30
steps:
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
- name: Get number of processors
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
id: nproc
with:
subtract: ${{ inputs.nproc_subtract }}
- name: Download rippled artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: rippled-${{ inputs.config_name }}
- name: Make binary executable (Linux and macOS)
shell: bash
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
run: |
chmod +x ./rippled
- name: Check linking (Linux)
if: ${{ runner.os == 'Linux' }}
shell: bash
run: |
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Verifying presence of instrumentation
if: ${{ inputs.verify_voidstar }}
shell: bash
run: |
./rippled --version | grep libvoidstar
- name: Run the embedded tests
if: ${{ inputs.run_tests }}
shell: bash
env:
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
run: |
./rippled --unittest --unittest-jobs ${BUILD_NPROC}
- name: Run the separate tests
if: ${{ inputs.run_tests }}
env:
EXT: ${{ runner.os == 'Windows' && '.exe' || '' }}
shell: bash
run: |
for test_file in ./doctest/*${EXT}; do
echo "Executing $test_file"
chmod +x "$test_file"
if [[ "${{ runner.os }}" == "Windows" && "$test_file" == "./doctest/xrpl.test.net.exe" ]]; then
echo "Skipping $test_file on Windows"
else
"$test_file"
fi
done
- name: Debug failure (Linux)
if: ${{ failure() && runner.os == 'Linux' && inputs.run_tests }}
shell: bash
run: |
echo "IPv4 local port range:"
cat /proc/sys/net/ipv4/ip_local_port_range
echo "Netstat:"
netstat -an

View File

@@ -1,21 +1,3 @@
macro(group_sources_in source_dir curdir)
file(GLOB children RELATIVE ${source_dir}/${curdir}
${source_dir}/${curdir}/*)
foreach (child ${children})
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
group_sources_in(${source_dir} ${curdir}/${child})
else()
string(REPLACE "/" "\\" groupname ${curdir})
source_group(${groupname} FILES
${source_dir}/${curdir}/${child})
endif()
endforeach()
endmacro()
macro(group_sources curdir)
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
endmacro()
macro (exclude_from_default target_)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)

View File

@@ -22,20 +22,4 @@ function(xrpl_add_test name)
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
set_tests_properties(
${target} PROPERTIES
FIXTURES_REQUIRED ${target}_fixture
)
add_test(
NAME ${target}.build
COMMAND
${CMAKE_COMMAND}
--build ${CMAKE_BINARY_DIR}
--config $<CONFIG>
--target ${target}
)
set_tests_properties(${target}.build PROPERTIES
FIXTURES_SETUP ${target}_fixture
)
endfunction()

View File

@@ -12,7 +12,7 @@ if (static OR MSVC)
else ()
set (Boost_USE_STATIC_RUNTIME OFF)
endif ()
find_dependency (Boost 1.70
find_dependency (Boost
COMPONENTS
chrono
container
@@ -52,5 +52,3 @@ if (TARGET ZLIB::ZLIB)
set_target_properties(OpenSSL::Crypto PROPERTIES
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
endif ()
include ("${CMAKE_CURRENT_LIST_DIR}/XrplTargets.cmake")

View File

@@ -73,10 +73,7 @@ include(target_link_modules)
# Level 01
add_module(xrpl beast)
target_link_libraries(xrpl.libxrpl.beast PUBLIC
xrpl.imports.main
xrpl.libpb
)
target_link_libraries(xrpl.libxrpl.beast PUBLIC xrpl.imports.main)
# Level 02
add_module(xrpl basics)

View File

@@ -1,5 +1,5 @@
#[===================================================================[
convenience variables and sanity checks
sanity checks
#]===================================================================]
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
@@ -16,39 +16,19 @@ if (NOT is_multiconfig)
endif ()
endif ()
get_directory_property(has_parent PARENT_DIRECTORY)
if (has_parent)
set (is_root_project OFF)
else ()
set (is_root_project ON)
endif ()
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
set (is_clang TRUE)
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
message (FATAL_ERROR "This project requires clang 8 or later")
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
message (FATAL_ERROR "This project requires clang 16 or later")
endif ()
# TODO min AppleClang version check ?
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
set (is_gcc TRUE)
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
message (FATAL_ERROR "This project requires GCC 8 or later")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
message (FATAL_ERROR "This project requires GCC 12 or later")
endif ()
endif ()
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
set (is_linux TRUE)
else ()
set (is_linux FALSE)
endif ()
if ("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
set (is_ci TRUE)
else ()
set (is_ci FALSE)
endif ()
# check for in-source build and fail
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message (FATAL_ERROR "Builds (in-source) are not allowed in "

View File

@@ -1,10 +1,25 @@
#[===================================================================[
declare user options/settings
declare options and variables
#]===================================================================]
include(ProcessorCount)
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
set (is_linux TRUE)
else()
set(is_linux FALSE)
endif()
ProcessorCount(PROCESSOR_COUNT)
if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
set(is_ci TRUE)
else()
set(is_ci FALSE)
endif()
get_directory_property(has_parent PARENT_DIRECTORY)
if(has_parent)
set(is_root_project OFF)
else()
set(is_root_project ON)
endif()
option(assert "Enables asserts, even in release builds" OFF)
@@ -25,10 +40,15 @@ if(unity)
endif()
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
endif()
if(is_clang AND is_linux)
option(voidstar "Enable Antithesis instrumentation." OFF)
endif()
if(is_gcc OR is_clang)
include(ProcessorCount)
ProcessorCount(PROCESSOR_COUNT)
option(coverage "Generates coverage info." OFF)
option(profile "Add profiling flags" OFF)
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
@@ -48,6 +68,7 @@ else()
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
endif()
if(is_linux)
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
option(static "link protobuf, openssl, libc++, and boost statically" ON)
@@ -64,11 +85,13 @@ else()
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
endif()
if(is_clang)
option(use_lld "enables detection of lld linker" ON)
else()
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif()
option(jemalloc "Enables jemalloc for heap profiling" OFF)
option(werr "treat warnings as errors" OFF)
option(local_protobuf
@@ -102,16 +125,6 @@ if(san)
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif()
endif()
set(container_label "" CACHE STRING "tag to use for package building containers")
option(packages_only
"ONLY generate package building targets. This is special use-case and almost \
certainly not what you want. Use with caution as you won't be able to build \
any compiled targets locally." OFF)
option(have_package_container
"Sometimes you already have the tagged container you want to use for package \
building and you don't want docker to rebuild it. This flag will detach the \
dependency of the package build from the container build. It's an advanced \
use case and most likely you should not be touching this flag." OFF)
# the remaining options are obscure and rarely used
option(beast_no_unit_test_inline
@@ -125,15 +138,13 @@ option(boost_show_deprecated
"Allow boost to fail on deprecated usage. Only useful if you're trying\
to find deprecated calls."
OFF)
option(beast_hashers
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
OFF)
if(WIN32)
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
else()
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
endif()
if(coverage)
message(STATUS "coverage build requested - forcing Debug build")
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)

View File

@@ -6,10 +6,10 @@
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
"rocksdb/10.0.1#85537f46e538974d67da0c3977de48ac%1756234304.347",
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1759820024.194",
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1759746684.671",
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1760106486.594",
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
@@ -17,7 +17,7 @@
"libarchive/3.8.1#5cf685686322e906cb42706ab7e099a8%1756234256.696",
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
"doctest/2.4.12#eb9fb352fb2fdfc8abb17ec270945165%1749889324.069",
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
@@ -30,7 +30,7 @@
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",
"msys2/cci.latest#5b73b10144f73cc5bfe0572ed9be39e1%1751977009.857",
"m4/1.4.19#b38ced39a01e31fef5435bc634461fd2%1700758725.451",
"m4/1.4.19#f119296e5c4772b3bb7ab060ae8f417b%1760707875.678",
"cmake/3.31.8#dde3bde00bb843687e55aea5afa0e220%1756234232.89",
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28",
"automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56",
@@ -48,9 +48,12 @@
"boost/1.83.0": [
"boost/1.88.0"
],
"sqlite3/3.44.2": [
"sqlite3/[>=3.44 <4]": [
"sqlite3/3.49.1"
],
"lz4/[>=1.9.4 <2]": [
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504"
]
},
"config_requires": []
}
}

View File

@@ -33,7 +33,7 @@ class Xrpl(ConanFile):
]
test_requires = [
'doctest/2.4.11',
'doctest/2.4.12',
]
tool_requires = [
@@ -114,7 +114,7 @@ class Xrpl(ConanFile):
if self.options.jemalloc:
self.requires('jemalloc/5.3.0')
if self.options.rocksdb:
self.requires('rocksdb/10.0.1')
self.requires('rocksdb/10.5.1')
self.requires('xxhash/0.8.3', **transitive_headers_opt)
exports_sources = (

View File

@@ -5,6 +5,7 @@
#include <xrpl/json/json_forwards.h>
#include <cstring>
#include <limits>
#include <map>
#include <string>
#include <vector>
@@ -139,9 +140,9 @@ public:
using ArrayIndex = UInt;
static Value const null;
static Int const minInt;
static Int const maxInt;
static UInt const maxUInt;
static constexpr Int minInt = std::numeric_limits<Int>::min();
static constexpr Int maxInt = std::numeric_limits<Int>::max();
static constexpr UInt maxUInt = std::numeric_limits<UInt>::max();
private:
class CZString
@@ -244,6 +245,10 @@ public:
bool
asBool() const;
/** Correct absolute value from int or unsigned int */
UInt
asAbsUInt() const;
// TODO: What is the "empty()" method this docstring mentions?
/** isNull() tests to see if this field is null. Don't use this method to
test for emptiness: use empty(). */

View File

@@ -66,7 +66,6 @@ XRPL_FEATURE(XRPFees, Supported::yes, VoteBehavior::DefaultNo
XRPL_FEATURE(DisallowIncoming, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FIX (RemoveNFTokenAutoTrustLine, Supported::yes, VoteBehavior::DefaultYes)
XRPL_FEATURE(ExpandedSignerList, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FEATURE(CheckCashMakesTrustLine, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FEATURE(FlowSortStrands, Supported::yes, VoteBehavior::DefaultYes)
XRPL_FEATURE(TicketBatch, Supported::yes, VoteBehavior::DefaultYes)
XRPL_FEATURE(NegativeUNL, Supported::yes, VoteBehavior::DefaultYes)
@@ -122,6 +121,7 @@ XRPL_RETIRE(fixRmSmallIncreasedQOffers)
XRPL_RETIRE(fixSTAmountCanonicalize)
XRPL_RETIRE(fixTakerDryOfferRemoval)
XRPL_RETIRE(fixTrustLinesToSelf)
XRPL_RETIRE(CheckCashMakesTrustLine)
XRPL_RETIRE(CryptoConditions)
XRPL_RETIRE(Escrow)
XRPL_RETIRE(EnforceInvariants)

View File

@@ -14,9 +14,6 @@
namespace Json {
Value const Value::null;
Int const Value::minInt = Int(~(UInt(-1) / 2));
Int const Value::maxInt = Int(UInt(-1) / 2);
UInt const Value::maxUInt = UInt(-1);
class DefaultValueAllocator : public ValueAllocator
{
@@ -550,6 +547,69 @@ Value::asInt() const
return 0; // unreachable;
}
UInt
Value::asAbsUInt() const
{
switch (type_)
{
case nullValue:
return 0;
case intValue: {
// Doing this conversion through int64 avoids overflow error for
// value_.int_ = -1 * 2^31 i.e. numeric_limits<int>::min().
if (value_.int_ < 0)
return static_cast<std::int64_t>(value_.int_) * -1;
return value_.int_;
}
case uintValue:
return value_.uint_;
case realValue: {
if (value_.real_ < 0)
{
JSON_ASSERT_MESSAGE(
-1 * value_.real_ <= maxUInt,
"Real out of unsigned integer range");
return UInt(-1 * value_.real_);
}
JSON_ASSERT_MESSAGE(
value_.real_ <= maxUInt, "Real out of unsigned integer range");
return UInt(value_.real_);
}
case booleanValue:
return value_.bool_ ? 1 : 0;
case stringValue: {
char const* const str{value_.string_ ? value_.string_ : ""};
auto const temp = beast::lexicalCastThrow<std::int64_t>(str);
if (temp < 0)
{
JSON_ASSERT_MESSAGE(
-1 * temp <= maxUInt,
"String out of unsigned integer range");
return -1 * temp;
}
JSON_ASSERT_MESSAGE(
temp <= maxUInt, "String out of unsigned integer range");
return temp;
}
case arrayValue:
case objectValue:
JSON_ASSERT_MESSAGE(false, "Type is not convertible to int");
// LCOV_EXCL_START
default:
UNREACHABLE("Json::Value::asAbsInt : invalid type");
// LCOV_EXCL_STOP
}
return 0; // unreachable;
}
Value::UInt
Value::asUInt() const
{

View File

@@ -1108,7 +1108,7 @@ amountFromJson(SField const& name, Json::Value const& v)
}
else
{
parts.mantissa = -value.asInt();
parts.mantissa = value.asAbsUInt();
parts.negative = true;
}
}

View File

@@ -187,7 +187,7 @@ numberFromJson(SField const& field, Json::Value const& value)
}
else
{
parts.mantissa = -value.asInt();
parts.mantissa = value.asAbsUInt();
parts.negative = true;
}
}

View File

@@ -679,9 +679,6 @@ class Check_test : public beast::unit_test::suite
using namespace test::jtx;
bool const cashCheckMakesTrustLine =
features[featureCheckCashMakesTrustLine];
Account const gw{"gateway"};
Account const alice{"alice"};
Account const bob{"bob"};
@@ -714,26 +711,10 @@ class Check_test : public beast::unit_test::suite
// and fails because he hasn't got a trust line for USD.
env(pay(gw, alice, USD(0.5)));
env.close();
if (!cashCheckMakesTrustLine)
{
// If cashing a check automatically creates a trustline then
// this returns tesSUCCESS and the check is removed from the
// ledger which would mess up later tests.
env(check::cash(bob, chkId1, USD(10)), ter(tecNO_LINE));
env.close();
}
// bob sets up the trust line, but not at a high enough limit.
env(trust(bob, USD(9.5)));
env.close();
if (!cashCheckMakesTrustLine)
{
// If cashing a check is allowed to exceed the trust line
// limit then this returns tesSUCCESS and the check is
// removed from the ledger which would mess up later tests.
env(check::cash(bob, chkId1, USD(10)), ter(tecPATH_PARTIAL));
env.close();
}
// bob sets the trust line limit high enough but asks for more
// than the check's SendMax.
@@ -808,34 +789,31 @@ class Check_test : public beast::unit_test::suite
BEAST_EXPECT(ownerCount(env, alice) == 2);
BEAST_EXPECT(ownerCount(env, bob) == 1);
if (cashCheckMakesTrustLine)
{
// Automatic trust lines are enabled. But one aspect of
// automatic trust lines is that they allow the account
// cashing a check to exceed their trust line limit. Show
// that at work.
//
// bob's trust line limit is currently USD(10.5). Show that
// a payment to bob cannot exceed that trust line, but cashing
// a check can.
// Automatic trust lines are enabled. But one aspect of
// automatic trust lines is that they allow the account
// cashing a check to exceed their trust line limit. Show
// that at work.
//
// bob's trust line limit is currently USD(10.5). Show that
// a payment to bob cannot exceed that trust line, but cashing
// a check can.
// Payment of 20 USD fails.
env(pay(gw, bob, USD(20)), ter(tecPATH_PARTIAL));
env.close();
// Payment of 20 USD fails.
env(pay(gw, bob, USD(20)), ter(tecPATH_PARTIAL));
env.close();
uint256 const chkId20{getCheckIndex(gw, env.seq(gw))};
env(check::create(gw, bob, USD(20)));
env.close();
uint256 const chkId20{getCheckIndex(gw, env.seq(gw))};
env(check::create(gw, bob, USD(20)));
env.close();
// However cashing a check for 20 USD succeeds.
env(check::cash(bob, chkId20, USD(20)));
env.close();
env.require(balance(bob, USD(30)));
// However cashing a check for 20 USD succeeds.
env(check::cash(bob, chkId20, USD(20)));
env.close();
env.require(balance(bob, USD(30)));
// Clean up this most recent experiment so the rest of the
// tests work.
env(pay(bob, gw, USD(20)));
}
// Clean up this most recent experiment so the rest of the
// tests work.
env(pay(bob, gw, USD(20)));
// ... so bob cancels alice's remaining check.
env(check::cancel(bob, chkId3));
@@ -950,8 +928,7 @@ class Check_test : public beast::unit_test::suite
env(check::create(alice, bob, USD(7)));
env.close();
env(check::cash(bob, chkId, USD(7)),
ter(cashCheckMakesTrustLine ? tecNO_AUTH : tecNO_LINE));
env(check::cash(bob, chkId, USD(7)), ter(tecNO_AUTH));
env.close();
// Now give bob a trustline for USD. bob still can't cash the
@@ -966,17 +943,6 @@ class Check_test : public beast::unit_test::suite
env(trust(gw, bob["USD"](1)), txflags(tfSetfAuth));
env.close();
// bob tries to cash the check again but fails because his trust
// limit is too low.
if (!cashCheckMakesTrustLine)
{
// If cashing a check is allowed to exceed the trust line
// limit then this returns tesSUCCESS and the check is
// removed from the ledger which would mess up later tests.
env(check::cash(bob, chkId, USD(7)), ter(tecPATH_PARTIAL));
env.close();
}
// Two possible outcomes here depending on whether cashing a
// check can build a trust line:
// o If it can't build a trust line, then since bob set his
@@ -985,7 +951,7 @@ class Check_test : public beast::unit_test::suite
// o If it can build a trust line, then the check is allowed to
// exceed the trust limit and bob gets the full transfer.
env(check::cash(bob, chkId, check::DeliverMin(USD(4))));
STAmount const bobGot = cashCheckMakesTrustLine ? USD(7) : USD(5);
STAmount const bobGot = USD(7);
verifyDeliveredAmount(env, bobGot);
env.require(balance(alice, USD(8) - bobGot));
env.require(balance(bob, bobGot));
@@ -1368,23 +1334,6 @@ class Check_test : public beast::unit_test::suite
env(pay(gw, alice, USD(20)));
env.close();
// Before bob gets a trustline, have him try to cash a check.
// Should fail.
{
uint256 const chkId{getCheckIndex(alice, env.seq(alice))};
env(check::create(alice, bob, USD(20)));
env.close();
if (!features[featureCheckCashMakesTrustLine])
{
// If cashing a check automatically creates a trustline then
// this returns tesSUCCESS and the check is removed from the
// ledger which would mess up later tests.
env(check::cash(bob, chkId, USD(20)), ter(tecNO_LINE));
env.close();
}
}
// Now set up bob's trustline.
env(trust(bob, USD(20)));
env.close();
@@ -1984,10 +1933,6 @@ class Check_test : public beast::unit_test::suite
{
// Explore automatic trust line creation when a check is cashed.
//
// This capability is enabled by the featureCheckCashMakesTrustLine
// amendment. So this test executes only when that amendment is
// active.
assert(features[featureCheckCashMakesTrustLine]);
testcase("Trust Line Creation");
@@ -2688,11 +2633,9 @@ public:
{
using namespace test::jtx;
auto const sa = testable_amendments();
testWithFeats(sa - featureCheckCashMakesTrustLine);
testWithFeats(sa - disallowIncoming);
testWithFeats(sa);
testTrustLineCreation(sa); // Test with featureCheckCashMakesTrustLine
testTrustLineCreation(sa);
}
};

View File

@@ -572,7 +572,7 @@ public:
false,
true,
1,
std::chrono::seconds{Json::Value::maxInt + 1}}});
std::chrono::seconds{Json::Value::minInt}}});
// force an out-of-range validUntil value on the future list
// The first list is accepted. The second fails. The parser
// returns the "best" result, so this looks like a success.
@@ -608,7 +608,7 @@ public:
false,
true,
1,
std::chrono::seconds{Json::Value::maxInt + 1},
std::chrono::seconds{Json::Value::minInt},
std::chrono::seconds{Json::Value::maxInt - 6000}}});
// verify refresh intervals are properly clamped
testFetchList(

View File

@@ -3,6 +3,7 @@
#include <xrpl/basics/random.h>
#include <xrpl/beast/unit_test.h>
#include <xrpl/protocol/STAmount.h>
#include <xrpl/protocol/XRPAmount.h>
namespace ripple {
@@ -585,6 +586,216 @@ public:
#endif
}
void
testParseJson()
{
static_assert(!std::is_convertible_v<STAmount*, Number*>);
{
STAmount const stnum{sfNumber};
BEAST_EXPECT(stnum.getSType() == STI_AMOUNT);
BEAST_EXPECT(stnum.getText() == "0");
BEAST_EXPECT(stnum.isDefault() == true);
BEAST_EXPECT(stnum.value() == Number{0});
}
{
BEAST_EXPECT(
amountFromJson(sfNumber, Json::Value(42)) == XRPAmount(42));
BEAST_EXPECT(
amountFromJson(sfNumber, Json::Value(-42)) == XRPAmount(-42));
BEAST_EXPECT(
amountFromJson(sfNumber, Json::UInt(42)) == XRPAmount(42));
BEAST_EXPECT(amountFromJson(sfNumber, "-123") == XRPAmount(-123));
BEAST_EXPECT(amountFromJson(sfNumber, "123") == XRPAmount(123));
BEAST_EXPECT(amountFromJson(sfNumber, "-123") == XRPAmount(-123));
BEAST_EXPECT(amountFromJson(sfNumber, "3.14e2") == XRPAmount(314));
BEAST_EXPECT(
amountFromJson(sfNumber, "-3.14e2") == XRPAmount(-314));
BEAST_EXPECT(amountFromJson(sfNumber, "0") == XRPAmount(0));
BEAST_EXPECT(amountFromJson(sfNumber, "-0") == XRPAmount(0));
constexpr auto imin = std::numeric_limits<int>::min();
BEAST_EXPECT(amountFromJson(sfNumber, imin) == XRPAmount(imin));
BEAST_EXPECT(
amountFromJson(sfNumber, std::to_string(imin)) ==
XRPAmount(imin));
constexpr auto imax = std::numeric_limits<int>::max();
BEAST_EXPECT(amountFromJson(sfNumber, imax) == XRPAmount(imax));
BEAST_EXPECT(
amountFromJson(sfNumber, std::to_string(imax)) ==
XRPAmount(imax));
constexpr auto umax = std::numeric_limits<unsigned int>::max();
BEAST_EXPECT(amountFromJson(sfNumber, umax) == XRPAmount(umax));
BEAST_EXPECT(
amountFromJson(sfNumber, std::to_string(umax)) ==
XRPAmount(umax));
// XRP does not handle fractional part
try
{
auto _ = amountFromJson(sfNumber, "0.0");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected =
"XRP and MPT must be specified as integral amount.";
BEAST_EXPECT(e.what() == expected);
}
// XRP does not handle fractional part
try
{
auto _ = amountFromJson(sfNumber, "1000e-2");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected =
"XRP and MPT must be specified as integral amount.";
BEAST_EXPECT(e.what() == expected);
}
// Obvious non-numbers tested here
try
{
auto _ = amountFromJson(sfNumber, "");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'' is not a number";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(sfNumber, "e");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'e' is not a number";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(sfNumber, "1e");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'1e' is not a number";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(sfNumber, "e2");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'e2' is not a number";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(sfNumber, Json::Value());
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected =
"XRP may not be specified with a null Json value";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(
sfNumber,
"123456789012345678901234567890123456789012345678901234"
"5678"
"901234567890123456789012345678901234567890123456789012"
"3456"
"78901234567890123456789012345678901234567890");
BEAST_EXPECT(false);
}
catch (std::bad_cast const& e)
{
BEAST_EXPECT(true);
}
// We do not handle leading zeros
try
{
auto _ = amountFromJson(sfNumber, "001");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'001' is not a number";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(sfNumber, "000.0");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'000.0' is not a number";
BEAST_EXPECT(e.what() == expected);
}
// We do not handle dangling dot
try
{
auto _ = amountFromJson(sfNumber, ".1");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'.1' is not a number";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(sfNumber, "1.");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'1.' is not a number";
BEAST_EXPECT(e.what() == expected);
}
try
{
auto _ = amountFromJson(sfNumber, "1.e3");
BEAST_EXPECT(false);
}
catch (std::runtime_error const& e)
{
std::string const expected = "'1.e3' is not a number";
BEAST_EXPECT(e.what() == expected);
}
}
}
void
testConvertXRP()
{
@@ -1022,6 +1233,7 @@ public:
testArithmetic();
testUnderflow();
testRounding();
testParseJson();
testConvertXRP();
testConvertIOU();
testCanAddXRP();

View File

@@ -2,6 +2,7 @@
#include <xrpl/beast/unit_test/suite.h>
#include <xrpl/json/json_forwards.h>
#include <xrpl/protocol/Issue.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STAmount.h>
#include <xrpl/protocol/STNumber.h>
@@ -126,6 +127,30 @@ struct STNumber_test : public beast::unit_test::suite
BEAST_EXPECT(
numberFromJson(sfNumber, "-0.000e6") == STNumber(sfNumber, 0));
constexpr auto imin = std::numeric_limits<int>::min();
BEAST_EXPECT(
numberFromJson(sfNumber, imin) ==
STNumber(sfNumber, Number(imin, 0)));
BEAST_EXPECT(
numberFromJson(sfNumber, std::to_string(imin)) ==
STNumber(sfNumber, Number(imin, 0)));
constexpr auto imax = std::numeric_limits<int>::max();
BEAST_EXPECT(
numberFromJson(sfNumber, imax) ==
STNumber(sfNumber, Number(imax, 0)));
BEAST_EXPECT(
numberFromJson(sfNumber, std::to_string(imax)) ==
STNumber(sfNumber, Number(imax, 0)));
constexpr auto umax = std::numeric_limits<unsigned int>::max();
BEAST_EXPECT(
numberFromJson(sfNumber, umax) ==
STNumber(sfNumber, Number(umax, 0)));
BEAST_EXPECT(
numberFromJson(sfNumber, std::to_string(umax)) ==
STNumber(sfNumber, Number(umax, 0)));
// Obvious non-numbers tested here
try
{

View File

@@ -14,5 +14,9 @@ xrpl_add_test(crypto)
target_link_libraries(xrpl.test.crypto PRIVATE xrpl.imports.test)
xrpl_add_test(json)
target_link_libraries(xrpl.test.json PRIVATE xrpl.imports.test)
xrpl_add_test(net)
target_link_libraries(xrpl.test.net PRIVATE xrpl.imports.test)
# Network unit tests are currently not supported on Windows
if(NOT WIN32)
xrpl_add_test(net)
target_link_libraries(xrpl.test.net PRIVATE xrpl.imports.test)
endif()

View File

@@ -7,6 +7,7 @@
#include <doctest/doctest.h>
#include <algorithm>
#include <cmath>
#include <regex>
#include <sstream>
#include <string>
@@ -15,6 +16,14 @@ namespace ripple {
TEST_SUITE_BEGIN("json_value");
TEST_CASE("limits")
{
using namespace Json;
static_assert(Value::minInt == Int(~(UInt(-1) / 2)));
static_assert(Value::maxInt == Int(UInt(-1) / 2));
static_assert(Value::maxUInt == UInt(-1));
}
TEST_CASE("construct and compare Json::StaticString")
{
static constexpr char sample[]{"Contents of a Json::StaticString"};
@@ -582,8 +591,6 @@ TEST_CASE("bad json")
TEST_CASE("edge cases")
{
std::string json;
std::uint32_t max_uint = std::numeric_limits<std::uint32_t>::max();
std::int32_t max_int = std::numeric_limits<std::int32_t>::max();
std::int32_t min_int = std::numeric_limits<std::int32_t>::min();
@@ -592,71 +599,145 @@ TEST_CASE("edge cases")
std::int32_t a_large_int = max_int - 1978;
std::int32_t a_small_int = min_int + 1978;
json = "{\"max_uint\":" + std::to_string(max_uint);
json += ",\"max_int\":" + std::to_string(max_int);
json += ",\"min_int\":" + std::to_string(min_int);
json += ",\"a_uint\":" + std::to_string(a_uint);
json += ",\"a_large_int\":" + std::to_string(a_large_int);
json += ",\"a_small_int\":" + std::to_string(a_small_int);
json += "}";
{
std::string json = "{\"max_uint\":" + std::to_string(max_uint);
json += ",\"max_int\":" + std::to_string(max_int);
json += ",\"min_int\":" + std::to_string(min_int);
json += ",\"a_uint\":" + std::to_string(a_uint);
json += ",\"a_large_int\":" + std::to_string(a_large_int);
json += ",\"a_small_int\":" + std::to_string(a_small_int);
json += "}";
Json::Value j1;
Json::Reader r1;
Json::Value j1;
Json::Reader r1;
CHECK(r1.parse(json, j1));
CHECK(j1["max_uint"].asUInt() == max_uint);
CHECK(j1["max_int"].asInt() == max_int);
CHECK(j1["min_int"].asInt() == min_int);
CHECK(j1["a_uint"].asUInt() == a_uint);
CHECK(j1["a_uint"] > a_large_int);
CHECK(j1["a_uint"] > a_small_int);
CHECK(j1["a_large_int"].asInt() == a_large_int);
CHECK(j1["a_large_int"].asUInt() == a_large_int);
CHECK(j1["a_large_int"] < a_uint);
CHECK(j1["a_small_int"].asInt() == a_small_int);
CHECK(j1["a_small_int"] < a_uint);
CHECK(r1.parse(json, j1));
CHECK(j1["max_uint"].asUInt() == max_uint);
CHECK(j1["max_uint"].asAbsUInt() == max_uint);
CHECK(j1["max_int"].asInt() == max_int);
CHECK(j1["max_int"].asAbsUInt() == max_int);
CHECK(j1["min_int"].asInt() == min_int);
CHECK(
j1["min_int"].asAbsUInt() ==
static_cast<std::int64_t>(min_int) * -1);
CHECK(j1["a_uint"].asUInt() == a_uint);
CHECK(j1["a_uint"].asAbsUInt() == a_uint);
CHECK(j1["a_uint"] > a_large_int);
CHECK(j1["a_uint"] > a_small_int);
CHECK(j1["a_large_int"].asInt() == a_large_int);
CHECK(j1["a_large_int"].asAbsUInt() == a_large_int);
CHECK(j1["a_large_int"].asUInt() == a_large_int);
CHECK(j1["a_large_int"] < a_uint);
CHECK(j1["a_small_int"].asInt() == a_small_int);
CHECK(
j1["a_small_int"].asAbsUInt() ==
static_cast<std::int64_t>(a_small_int) * -1);
CHECK(j1["a_small_int"] < a_uint);
}
json = "{\"overflow\":";
json += std::to_string(std::uint64_t(max_uint) + 1);
json += "}";
std::uint64_t overflow = std::uint64_t(max_uint) + 1;
{
std::string json = "{\"overflow\":";
json += std::to_string(overflow);
json += "}";
Json::Value j2;
Json::Reader r2;
Json::Value j2;
Json::Reader r2;
CHECK(!r2.parse(json, j2));
CHECK(!r2.parse(json, j2));
}
json = "{\"underflow\":";
json += std::to_string(std::int64_t(min_int) - 1);
json += "}";
std::int64_t underflow = std::int64_t(min_int) - 1;
{
std::string json = "{\"underflow\":";
json += std::to_string(underflow);
json += "}";
Json::Value j3;
Json::Reader r3;
Json::Value j3;
Json::Reader r3;
CHECK(!r3.parse(json, j3));
CHECK(!r3.parse(json, j3));
}
Json::Value intString{"4294967296"};
CHECK_THROWS_AS(intString.asUInt(), beast::BadLexicalCast);
{
Json::Value intString{std::to_string(overflow)};
CHECK_THROWS_AS(intString.asUInt(), beast::BadLexicalCast);
CHECK_THROWS_AS(intString.asAbsUInt(), Json::error);
intString = "4294967295";
CHECK(intString.asUInt() == 4294967295u);
intString = "4294967295";
CHECK(intString.asUInt() == 4294967295u);
CHECK(intString.asAbsUInt() == 4294967295u);
intString = "0";
CHECK(intString.asUInt() == 0);
intString = "0";
CHECK(intString.asUInt() == 0);
CHECK(intString.asAbsUInt() == 0);
intString = "-1";
CHECK_THROWS_AS(intString.asUInt(), beast::BadLexicalCast);
intString = "-1";
CHECK_THROWS_AS(intString.asUInt(), beast::BadLexicalCast);
CHECK(intString.asAbsUInt() == 1);
intString = "2147483648";
CHECK_THROWS_AS(intString.asInt(), beast::BadLexicalCast);
intString = "-4294967295";
CHECK(intString.asAbsUInt() == 4294967295);
intString = "2147483647";
CHECK(intString.asInt() == 2147483647);
intString = "-4294967296";
CHECK_THROWS_AS(intString.asAbsUInt(), Json::error);
intString = "-2147483648";
CHECK(intString.asInt() == -2147483648LL); // MSVC wants the LL
intString = "2147483648";
CHECK_THROWS_AS(intString.asInt(), beast::BadLexicalCast);
CHECK(intString.asAbsUInt() == 2147483648);
intString = "-2147483649";
CHECK_THROWS_AS(intString.asInt(), beast::BadLexicalCast);
intString = "2147483647";
CHECK(intString.asInt() == 2147483647);
CHECK(intString.asAbsUInt() == 2147483647);
intString = "-2147483648";
CHECK(intString.asInt() == -2147483648LL); // MSVC wants the LL
CHECK(intString.asAbsUInt() == 2147483648LL);
intString = "-2147483649";
CHECK_THROWS_AS(intString.asInt(), beast::BadLexicalCast);
CHECK(intString.asAbsUInt() == 2147483649);
}
{
Json::Value intReal{4294967297.0};
CHECK_THROWS_AS(intReal.asUInt(), Json::error);
CHECK_THROWS_AS(intReal.asAbsUInt(), Json::error);
intReal = 4294967295.0;
CHECK(intReal.asUInt() == 4294967295u);
CHECK(intReal.asAbsUInt() == 4294967295u);
intReal = 0.0;
CHECK(intReal.asUInt() == 0);
CHECK(intReal.asAbsUInt() == 0);
intReal = -1.0;
CHECK_THROWS_AS(intReal.asUInt(), Json::error);
CHECK(intReal.asAbsUInt() == 1);
intReal = -4294967295.0;
CHECK(intReal.asAbsUInt() == 4294967295);
intReal = -4294967296.0;
CHECK_THROWS_AS(intReal.asAbsUInt(), Json::error);
intReal = 2147483648.0;
CHECK_THROWS_AS(intReal.asInt(), Json::error);
CHECK(intReal.asAbsUInt() == 2147483648);
intReal = 2147483647.0;
CHECK(intReal.asInt() == 2147483647);
CHECK(intReal.asAbsUInt() == 2147483647);
intReal = -2147483648.0;
CHECK(intReal.asInt() == -2147483648LL); // MSVC wants the LL
CHECK(intReal.asAbsUInt() == 2147483648LL);
intReal = -2147483649.0;
CHECK_THROWS_AS(intReal.asInt(), Json::error);
CHECK(intReal.asAbsUInt() == 2147483649);
}
}
TEST_CASE("copy")
@@ -793,6 +874,7 @@ TEST_CASE("conversions")
CHECK(val.asString() == "");
CHECK(val.asInt() == 0);
CHECK(val.asUInt() == 0);
CHECK(val.asAbsUInt() == 0);
CHECK(val.asDouble() == 0.0);
CHECK(val.asBool() == false);
@@ -813,6 +895,7 @@ TEST_CASE("conversions")
CHECK(val.asString() == "-1234");
CHECK(val.asInt() == -1234);
CHECK_THROWS_AS(val.asUInt(), Json::error);
CHECK(val.asAbsUInt() == 1234u);
CHECK(val.asDouble() == -1234.0);
CHECK(val.asBool() == true);
@@ -833,6 +916,7 @@ TEST_CASE("conversions")
CHECK(val.asString() == "1234");
CHECK(val.asInt() == 1234);
CHECK(val.asUInt() == 1234u);
CHECK(val.asAbsUInt() == 1234u);
CHECK(val.asDouble() == 1234.0);
CHECK(val.asBool() == true);
@@ -853,6 +937,7 @@ TEST_CASE("conversions")
CHECK(std::regex_match(val.asString(), std::regex("^2\\.0*$")));
CHECK(val.asInt() == 2);
CHECK(val.asUInt() == 2u);
CHECK(val.asAbsUInt() == 2u);
CHECK(val.asDouble() == 2.0);
CHECK(val.asBool() == true);
@@ -873,6 +958,7 @@ TEST_CASE("conversions")
CHECK(val.asString() == "54321");
CHECK(val.asInt() == 54321);
CHECK(val.asUInt() == 54321u);
CHECK(val.asAbsUInt() == 54321);
CHECK_THROWS_AS(val.asDouble(), Json::error);
CHECK(val.asBool() == true);
@@ -893,6 +979,7 @@ TEST_CASE("conversions")
CHECK(val.asString() == "");
CHECK_THROWS_AS(val.asInt(), std::exception);
CHECK_THROWS_AS(val.asUInt(), std::exception);
CHECK_THROWS_AS(val.asAbsUInt(), std::exception);
CHECK_THROWS_AS(val.asDouble(), std::exception);
CHECK(val.asBool() == false);
@@ -913,6 +1000,7 @@ TEST_CASE("conversions")
CHECK(val.asString() == "false");
CHECK(val.asInt() == 0);
CHECK(val.asUInt() == 0);
CHECK(val.asAbsUInt() == 0);
CHECK(val.asDouble() == 0.0);
CHECK(val.asBool() == false);
@@ -933,6 +1021,7 @@ TEST_CASE("conversions")
CHECK(val.asString() == "true");
CHECK(val.asInt() == 1);
CHECK(val.asUInt() == 1);
CHECK(val.asAbsUInt() == 1);
CHECK(val.asDouble() == 1.0);
CHECK(val.asBool() == true);
@@ -953,6 +1042,7 @@ TEST_CASE("conversions")
CHECK_THROWS_AS(val.asString(), Json::error);
CHECK_THROWS_AS(val.asInt(), Json::error);
CHECK_THROWS_AS(val.asUInt(), Json::error);
CHECK_THROWS_AS(val.asAbsUInt(), Json::error);
CHECK_THROWS_AS(val.asDouble(), Json::error);
CHECK(val.asBool() == false); // empty or not
@@ -973,6 +1063,7 @@ TEST_CASE("conversions")
CHECK_THROWS_AS(val.asString(), Json::error);
CHECK_THROWS_AS(val.asInt(), Json::error);
CHECK_THROWS_AS(val.asUInt(), Json::error);
CHECK_THROWS_AS(val.asAbsUInt(), Json::error);
CHECK_THROWS_AS(val.asDouble(), Json::error);
CHECK(val.asBool() == false); // empty or not

View File

@@ -156,17 +156,6 @@ CashCheck::preclaim(PreclaimContext const& ctx)
// An issuer can always accept their own currency.
if (!value.native() && (value.getIssuer() != dstId))
{
auto const sleTrustLine =
ctx.view.read(keylet::line(dstId, issuerId, currency));
if (!sleTrustLine &&
!ctx.view.rules().enabled(featureCheckCashMakesTrustLine))
{
JLOG(ctx.j.warn())
<< "Cannot cash check for IOU without trustline.";
return tecNO_LINE;
}
auto const sleIssuer = ctx.view.read(keylet::account(issuerId));
if (!sleIssuer)
{
@@ -178,6 +167,9 @@ CashCheck::preclaim(PreclaimContext const& ctx)
if (sleIssuer->at(sfFlags) & lsfRequireAuth)
{
auto const sleTrustLine =
ctx.view.read(keylet::line(dstId, issuerId, currency));
if (!sleTrustLine)
{
// We can only create a trust line if the issuer does not
@@ -325,10 +317,7 @@ CashCheck::doApply()
Keylet const trustLineKey = keylet::line(truster, trustLineIssue);
bool const destLow = issuer > account_;
bool const checkCashMakesTrustLine =
psb.rules().enabled(featureCheckCashMakesTrustLine);
if (checkCashMakesTrustLine && !psb.exists(trustLineKey))
if (!psb.exists(trustLineKey))
{
// 1. Can the check casher meet the reserve for the trust line?
// 2. Create trust line between destination (this) account
@@ -401,14 +390,11 @@ CashCheck::doApply()
sleTrustLine->at(tweakedLimit) = savedLimit;
});
if (checkCashMakesTrustLine)
{
// Set the trust line limit to the highest possible value
// while flow runs.
STAmount const bigAmount(
trustLineIssue, STAmount::cMaxValue, STAmount::cMaxOffset);
sleTrustLine->at(tweakedLimit) = bigAmount;
}
// Set the trust line limit to the highest possible value
// while flow runs.
STAmount const bigAmount(
trustLineIssue, STAmount::cMaxValue, STAmount::cMaxOffset);
sleTrustLine->at(tweakedLimit) = bigAmount;
// Let flow() do the heavy lifting on a check for an IOU.
auto const result = flow(
@@ -441,15 +427,11 @@ CashCheck::doApply()
<< "flow did not produce DeliverMin.";
return tecPATH_PARTIAL;
}
if (!checkCashMakesTrustLine)
// Set the delivered_amount metadata.
ctx_.deliver(result.actualAmountOut);
}
// Set the delivered amount metadata in all cases, not just
// for DeliverMin.
if (checkCashMakesTrustLine)
ctx_.deliver(result.actualAmountOut);
ctx_.deliver(result.actualAmountOut);
sleCheck = psb.peek(keylet::check(ctx_.tx[sfCheckID]));
}