Compare commits

..

5 Commits

Author SHA1 Message Date
Alex Kremer
c12601c0f5 Merge branch 'develop' into release/2.6.0 2025-10-09 16:58:32 +01:00
Alex Kremer
381009807f Merge remote-tracking branch 'origin/develop' into release/2.6.0 2025-10-08 13:52:49 +01:00
Alex Kremer
f530216688 Merge branch 'develop' into release/2.6.0 2025-10-03 16:30:41 +01:00
Alex Kremer
0cccb1a2c0 Merge branch 'develop' into release/2.6.0 2025-10-01 13:51:22 +01:00
Alex Kremer
1803990aa8 Use xrpl/2.6.1-rc1 2025-09-18 17:07:59 +01:00
61 changed files with 248 additions and 2752 deletions

View File

@@ -5,27 +5,25 @@ inputs:
targets:
description: Space-separated build target names
default: all
nproc_subtract:
description: The number of processors to subtract when calculating parallelism.
subtract_threads:
description: An option for the action get-threads-number.
required: true
default: "0"
runs:
using: composite
steps:
- name: Get number of processors
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
id: nproc
- name: Get number of threads
uses: ./.github/actions/get-threads-number
id: number_of_threads
with:
subtract: ${{ inputs.nproc_subtract }}
subtract_threads: ${{ inputs.subtract_threads }}
- name: Build targets
shell: bash
env:
CMAKE_TARGETS: ${{ inputs.targets }}
run: |
cd build
cmake \
--build . \
--parallel "${{ steps.nproc.outputs.nproc }}" \
--target ${CMAKE_TARGETS}
--parallel "${{ steps.number_of_threads.outputs.threads_number }}" \
--target ${{ inputs.targets }}

View File

@@ -24,7 +24,7 @@ runs:
-j8 --exclude-throw-branches
- name: Archive coverage report
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: coverage-report.xml
path: build/coverage_report.xml

View File

@@ -28,17 +28,12 @@ runs:
- name: Create an issue
id: create_issue
shell: bash
env:
ISSUE_BODY: ${{ inputs.body }}
ISSUE_ASSIGNEES: ${{ inputs.assignees }}
ISSUE_LABELS: ${{ inputs.labels }}
ISSUE_TITLE: ${{ inputs.title }}
run: |
echo -e "${ISSUE_BODY}" > issue.md
echo -e '${{ inputs.body }}' > issue.md
gh issue create \
--assignee "${ISSUE_ASSIGNEES}" \
--label "${ISSUE_LABELS}" \
--title "${ISSUE_TITLE}" \
--assignee '${{ inputs.assignees }}' \
--label '${{ inputs.labels }}' \
--title '${{ inputs.title }}' \
--body-file ./issue.md \
> create_issue.log
created_issue="$(sed 's|.*/||' create_issue.log)"

View File

@@ -0,0 +1,36 @@
name: Get number of threads
description: Determines number of threads to use on macOS and Linux
inputs:
subtract_threads:
description: How many threads to subtract from the calculated number
required: true
default: "0"
outputs:
threads_number:
description: Number of threads to use
value: ${{ steps.number_of_threads_export.outputs.num }}
runs:
using: composite
steps:
- name: Get number of threads on mac
id: mac_threads
if: ${{ runner.os == 'macOS' }}
shell: bash
run: echo "num=$(($(sysctl -n hw.logicalcpu) - 2))" >> $GITHUB_OUTPUT
- name: Get number of threads on Linux
id: linux_threads
if: ${{ runner.os == 'Linux' }}
shell: bash
run: echo "num=$(($(nproc) - 2))" >> $GITHUB_OUTPUT
- name: Shift and export number of threads
id: number_of_threads_export
shell: bash
run: |
num_of_threads="${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}"
shift_by="${{ inputs.subtract_threads }}"
shifted="$((num_of_threads - shift_by))"
echo "num=$(( shifted > 1 ? shifted : 1 ))" >> $GITHUB_OUTPUT

View File

@@ -30,7 +30,7 @@ runs:
uses: ./.github/actions/git-common-ancestor
- name: Restore ccache cache
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@v4
id: ccache_cache
if: ${{ env.CCACHE_DISABLE != '1' }}
with:

View File

@@ -32,7 +32,7 @@ runs:
- name: Save ccache cache
if: ${{ inputs.ccache_cache_hit != 'true' || inputs.ccache_cache_miss_rate == '100.0' }}
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/save@v4
with:
path: ${{ inputs.ccache_dir }}
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-${{ inputs.conan_profile }}-develop-${{ steps.git_common_ancestor.outputs.commit }}

View File

@@ -91,6 +91,19 @@ updates:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/get-threads-number/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/git-common-ancestor/
schedule:

View File

@@ -4,7 +4,7 @@ import json
LINUX_OS = ["heavy", "heavy-arm64"]
LINUX_CONTAINERS = [
'{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
'{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
]
LINUX_COMPILERS = ["gcc", "clang"]

View File

@@ -31,16 +31,15 @@ TESTS=$($TEST_BINARY --gtest_list_tests | awk '/^ / {print suite $1} !/^ / {su
OUTPUT_DIR="./.sanitizer-report"
mkdir -p "$OUTPUT_DIR"
export TSAN_OPTIONS="die_after_fork=0"
export MallocNanoZone='0' # for MacOSX
for TEST in $TESTS; do
OUTPUT_FILE="$OUTPUT_DIR/${TEST//\//_}.log"
$TEST_BINARY --gtest_filter="$TEST" > "$OUTPUT_FILE" 2>&1
OUTPUT_FILE="$OUTPUT_DIR/${TEST//\//_}"
export TSAN_OPTIONS="log_path=\"$OUTPUT_FILE\" die_after_fork=0"
export ASAN_OPTIONS="log_path=\"$OUTPUT_FILE\""
export UBSAN_OPTIONS="log_path=\"$OUTPUT_FILE\""
export MallocNanoZone='0' # for MacOSX
$TEST_BINARY --gtest_filter="$TEST" > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo "'$TEST' failed a sanitizer check."
else
rm "$OUTPUT_FILE"
fi
done

View File

@@ -44,11 +44,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Download Clio binary from artifact
if: ${{ inputs.artifact_name != null }}
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@v5
with:
name: ${{ inputs.artifact_name }}
path: ./docker/clio/artifact/
@@ -56,12 +56,9 @@ jobs:
- name: Download Clio binary from url
if: ${{ inputs.clio_server_binary_url != null }}
shell: bash
env:
BINARY_URL: ${{ inputs.clio_server_binary_url }}
BINARY_SHA256: ${{ inputs.binary_sha256 }}
run: |
wget "${BINARY_URL}" -P ./docker/clio/artifact/
if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${BINARY_SHA256}" ]; then
wget "${{inputs.clio_server_binary_url}}" -P ./docker/clio/artifact/
if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${{inputs.binary_sha256}}" ]; then
echo "Binary sha256 sum doesn't match"
exit 1
fi

View File

@@ -45,7 +45,7 @@ jobs:
build_type: [Release, Debug]
container:
[
'{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }',
'{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }',
]
static: [true]
@@ -75,7 +75,7 @@ jobs:
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
conan_profile: gcc
build_type: Debug
download_ccache: true
@@ -94,7 +94,7 @@ jobs:
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
conan_profile: gcc
build_type: Release
download_ccache: true
@@ -111,12 +111,12 @@ jobs:
needs: build-and-test
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- uses: actions/download-artifact@v5
with:
name: clio_server_Linux_Release_gcc

View File

@@ -17,15 +17,15 @@ jobs:
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
@@ -57,7 +57,7 @@ jobs:
run: strip build/clio_tests
- name: Upload clio_tests
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: clio_tests_check_libxrpl
path: build/clio_tests
@@ -67,10 +67,10 @@ jobs:
needs: build
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- uses: actions/download-artifact@v5
with:
name: clio_tests_check_libxrpl
@@ -90,7 +90,7 @@ jobs:
issues: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Create an issue
uses: ./.github/actions/create-issue

View File

@@ -10,17 +10,15 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: ytanikin/pr-conventional-commits@b72758283dcbee706975950e96bc4bf323a8d8c0 # 1.4.2
- uses: ytanikin/pr-conventional-commits@b72758283dcbee706975950e96bc4bf323a8d8c0 # v1.4.2
with:
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
add_label: false
custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
- name: Check if message starts with upper-case letter
env:
PR_TITLE: ${{ github.event.pull_request.title }}
run: |
if [[ ! "${PR_TITLE}" =~ ^[a-z]+:\ [\[A-Z] ]]; then
if [[ ! "${{ github.event.pull_request.title }}" =~ ^[a-z]+:\ [\[A-Z] ]]; then
echo "Error: PR title must start with an upper-case letter."
exit 1
fi

View File

@@ -27,7 +27,7 @@ jobs:
if: github.event_name != 'push' || contains(github.event.head_commit.message, 'clang-tidy auto fixes')
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
permissions:
contents: write
@@ -35,12 +35,12 @@ jobs:
pull-requests: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
@@ -61,16 +61,16 @@ jobs:
with:
conan_profile: ${{ env.CONAN_PROFILE }}
- name: Get number of processors
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
id: nproc
- name: Get number of threads
uses: ./.github/actions/get-threads-number
id: number_of_threads
- name: Run clang-tidy
continue-on-error: true
shell: bash
id: run_clang_tidy
run: |
run-clang-tidy-${{ env.LLVM_TOOLS_VERSION }} -p build -j "${{ steps.nproc.outputs.nproc }}" -fix -quiet 1>output.txt
run-clang-tidy-${{ env.LLVM_TOOLS_VERSION }} -p build -j "${{ steps.number_of_threads.outputs.threads_number }}" -fix -quiet 1>output.txt
- name: Fix local includes and clang-format style
if: ${{ steps.run_clang_tidy.outcome != 'success' }}

View File

@@ -14,16 +14,16 @@ jobs:
build:
runs-on: ubuntu-latest
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v4
with:
lfs: true
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
@@ -39,10 +39,10 @@ jobs:
run: cmake --build . --target docs
- name: Setup Pages
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5.0.0
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0
uses: actions/upload-pages-artifact@v4
with:
path: build_docs/html
name: docs-develop
@@ -62,6 +62,6 @@ jobs:
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5
uses: actions/deploy-pages@v4
with:
artifact_name: docs-develop

View File

@@ -39,17 +39,17 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
- os: heavy
conan_profile: gcc
build_type: Debug
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
- os: heavy
conan_profile: gcc.ubsan
build_type: Release
static: false
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
uses: ./.github/workflows/reusable-build-test.yml
with:
@@ -73,7 +73,7 @@ jobs:
include:
- os: heavy
conan_profile: clang
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
static: true
- os: macos15
conan_profile: apple-clang
@@ -93,29 +93,18 @@ jobs:
targets: all
analyze_build_time: true
get_date:
name: Get Date
runs-on: ubuntu-latest
outputs:
date: ${{ steps.get_date.outputs.date }}
steps:
- name: Get current date
id: get_date
run: |
echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
nightly_release:
needs: [build-and-test, get_date]
needs: build-and-test
uses: ./.github/workflows/reusable-release.yml
with:
delete_pattern: "nightly-*"
overwrite_release: true
prerelease: true
title: "Clio development (nightly) build"
version: nightly-${{ needs.get_date.outputs.date }}
version: nightly
header: >
> **Note:** Please remember that this is a development release and it is not recommended for production use.
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly-${{ needs.get_date.outputs.date }}>
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
generate_changelog: false
draft: false
@@ -141,7 +130,7 @@ jobs:
issues: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Create an issue
uses: ./.github/actions/create-issue

View File

@@ -8,7 +8,7 @@ on:
jobs:
run-hooks:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
uses: XRPLF/actions/.github/workflows/pre-commit.yml@afbcbdafbe0ce5439492fb87eda6441371086386
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-pre-commit:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-pre-commit:213752862ca95ecadeb59a6176c3db91a7864b3e" }'

View File

@@ -29,7 +29,7 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
uses: ./.github/workflows/reusable-build-test.yml
with:
@@ -49,7 +49,7 @@ jobs:
needs: build-and-test
uses: ./.github/workflows/reusable-release.yml
with:
delete_pattern: ""
overwrite_release: false
prerelease: ${{ contains(github.ref_name, '-') }}
title: "${{ github.ref_name }}"
version: "${{ github.ref_name }}"

View File

@@ -86,7 +86,7 @@ jobs:
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
# We need to fetch tags to have correct version in the release
@@ -95,7 +95,7 @@ jobs:
ref: ${{ github.ref }}
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: ${{ !inputs.download_ccache }}
@@ -145,7 +145,7 @@ jobs:
- name: Upload build time analyze report
if: ${{ inputs.analyze_build_time }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: build_time_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build_time_report.txt
@@ -170,28 +170,28 @@ jobs:
- name: Upload clio_server
if: ${{ inputs.upload_clio_server && !inputs.code_coverage && !inputs.analyze_build_time }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build/clio_server
- name: Upload clio_tests
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build/clio_tests
- name: Upload clio_integration_tests
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time && !inputs.package }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build/clio_integration_tests
- name: Upload Clio Linux package
if: ${{ inputs.package }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: clio_deb_package_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build/*.deb
@@ -221,14 +221,12 @@ jobs:
- name: Verify expected version
if: ${{ inputs.expected_version != '' }}
shell: bash
env:
INPUT_EXPECTED_VERSION: ${{ inputs.expected_version }}
run: |
set -e
EXPECTED_VERSION="clio-${INPUT_EXPECTED_VERSION}"
EXPECTED_VERSION="clio-${{ inputs.expected_version }}"
actual_version=$(./build/clio_server --version)
if [[ "$actual_version" != "$EXPECTED_VERSION" ]]; then
echo "Expected version '${EXPECTED_VERSION}', but got '${actual_version}'"
echo "Expected version '$EXPECTED_VERSION', but got '$actual_version'"
exit 1
fi

View File

@@ -3,10 +3,10 @@ name: Make release
on:
workflow_call:
inputs:
delete_pattern:
description: "Pattern to delete previous releases"
overwrite_release:
description: "Overwrite the current release and tag"
required: true
type: string
type: boolean
prerelease:
description: "Create a prerelease"
@@ -42,7 +42,7 @@ jobs:
release:
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
@@ -51,28 +51,26 @@ jobs:
contents: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- uses: actions/download-artifact@v5
with:
path: release_artifacts
pattern: clio_server_*
- name: Create release notes
shell: bash
env:
RELEASE_HEADER: ${{ inputs.header }}
run: |
echo "# Release notes" > "${RUNNER_TEMP}/release_notes.md"
echo "" >> "${RUNNER_TEMP}/release_notes.md"
printf '%s\n' "${RELEASE_HEADER}" >> "${RUNNER_TEMP}/release_notes.md"
printf '%s\n' "${{ inputs.header }}" >> "${RUNNER_TEMP}/release_notes.md"
- name: Generate changelog
shell: bash
@@ -89,38 +87,26 @@ jobs:
run: .github/scripts/prepare-release-artifacts.sh release_artifacts
- name: Upload release notes
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: release_notes_${{ inputs.version }}
path: "${RUNNER_TEMP}/release_notes.md"
- name: Remove previous release with a pattern
if: ${{ github.event_name != 'pull_request' && inputs.delete_pattern != '' }}
- name: Remove current release and tag
if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }}
shell: bash
env:
DELETE_PATTERN: ${{ inputs.delete_pattern }}
run: |
RELEASES_TO_DELETE=$(gh release list --limit 50 --repo "${GH_REPO}" | grep -E "${DELETE_PATTERN}" | awk -F'\t' '{print $3}' || true)
if [ -n "$RELEASES_TO_DELETE" ]; then
for RELEASE in $RELEASES_TO_DELETE; do
echo "Deleting release: $RELEASE"
gh release delete "$RELEASE" --repo "${GH_REPO}" --yes --cleanup-tag
done
fi
gh release delete ${{ inputs.version }} --yes || true
git push origin :${{ inputs.version }} || true
- name: Publish release
if: ${{ github.event_name != 'pull_request' }}
shell: bash
env:
RELEASE_VERSION: ${{ inputs.version }}
PRERELEASE_OPTION: ${{ inputs.prerelease && '--prerelease' || '' }}
RELEASE_TITLE: ${{ inputs.title }}
DRAFT_OPTION: ${{ inputs.draft && '--draft' || '' }}
run: |
gh release create "${RELEASE_VERSION}" \
${PRERELEASE_OPTION} \
--title "${RELEASE_TITLE}" \
gh release create "${{ inputs.version }}" \
${{ inputs.prerelease && '--prerelease' || '' }} \
--title "${{ inputs.title }}" \
--target "${GITHUB_SHA}" \
${DRAFT_OPTION} \
${{ inputs.draft && '--draft' || '' }} \
--notes-file "${RUNNER_TEMP}/release_notes.md" \
./release_artifacts/clio_server*

View File

@@ -43,18 +43,18 @@ jobs:
env:
# TODO: remove completely when we have fixed all currently existing issues with sanitizers
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.tsan') || (inputs.conan_profile == 'gcc.asan' && inputs.build_type == 'Release') }}
SANITIZER_IGNORE_ERRORS: ${{ endsWith(inputs.conan_profile, '.tsan') || inputs.conan_profile == 'clang.asan' || (inputs.conan_profile == 'gcc.asan' && inputs.build_type == 'Release') }}
steps:
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- uses: actions/download-artifact@v5
with:
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
@@ -68,7 +68,7 @@ jobs:
- name: Run clio_tests (sanitizer errors ignored)
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' }}
run: ./.github/scripts/execute-tests-under-sanitizer.sh ./clio_tests
run: ./.github/scripts/execute-tests-under-sanitizer ./clio_tests
- name: Check for sanitizer report
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' }}
@@ -83,7 +83,7 @@ jobs:
- name: Upload sanitizer report
if: ${{ env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true' }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@v4
with:
name: sanitizer_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: .sanitizer-report/*
@@ -144,7 +144,7 @@ jobs:
sleep 5
done
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- uses: actions/download-artifact@v5
with:
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}

View File

@@ -12,12 +12,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download report artifact
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@v5
with:
name: coverage-report.xml
path: build

View File

@@ -15,7 +15,7 @@ on:
- ".github/actions/**"
- "!.github/actions/build-docker-image/**"
- "!.github/actions/create-issue/**"
- .github/scripts/execute-tests-under-sanitizer.sh
- .github/scripts/execute-tests-under-sanitizer
- CMakeLists.txt
- conanfile.py
@@ -44,7 +44,7 @@ jobs:
uses: ./.github/workflows/reusable-build-test.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e" }'
download_ccache: false
upload_ccache: false
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}

View File

@@ -52,7 +52,7 @@ jobs:
needs: repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
@@ -90,7 +90,7 @@ jobs:
needs: repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
@@ -128,7 +128,7 @@ jobs:
needs: [repo, gcc-amd64, gcc-arm64]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
@@ -137,7 +137,7 @@ jobs:
files: "docker/compilers/gcc/**"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
if: ${{ github.event_name != 'pull_request' }}
@@ -179,7 +179,7 @@ jobs:
needs: repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
@@ -215,7 +215,7 @@ jobs:
needs: [repo, gcc-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
@@ -246,7 +246,7 @@ jobs:
needs: [repo, gcc-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
@@ -277,7 +277,7 @@ jobs:
needs: [repo, tools-amd64, tools-arm64]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
@@ -286,7 +286,7 @@ jobs:
files: "docker/tools/**"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
if: ${{ github.event_name != 'pull_request' }}
@@ -312,7 +312,7 @@ jobs:
needs: [repo, tools-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- uses: ./.github/actions/build-docker-image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -334,7 +334,7 @@ jobs:
needs: [repo, gcc-merge, clang, tools-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- uses: ./.github/actions/build-docker-image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -46,7 +46,7 @@ jobs:
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Calculate conan matrix
id: set-matrix
@@ -69,10 +69,10 @@ jobs:
CONAN_PROFILE: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
@@ -99,6 +99,4 @@ jobs:
- name: Upload Conan packages
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
env:
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
run: conan upload "*" -r=xrplf --confirm ${FORCE_OPTION}
run: conan upload "*" -r=xrplf --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}

View File

@@ -11,10 +11,7 @@
#
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: |
(?x)^(
docs/doxygen-awesome-theme/.*
)$
exclude: ^(docs/doxygen-awesome-theme/|conan\.lock$)
repos:
# `pre-commit sample-config` default hooks

View File

@@ -34,6 +34,7 @@ Below are some useful docs to learn more about Clio.
- [How to configure Clio and rippled](./docs/configure-clio.md)
- [How to run Clio](./docs/run-clio.md)
- [Logging](./docs/logging.md)
- [Troubleshooting guide](./docs/trouble_shooting.md)
**General reference material:**

View File

@@ -55,11 +55,8 @@ RUN pip install -q --no-cache-dir \
# lxml 6.0.0 is not compatible with our image
'lxml<6.0.0' \
cmake \
conan==2.22.1 \
gcovr \
# We're adding pre-commit to this image as well,
# because clang-tidy workflow requires it
pre-commit
conan==2.20.1 \
gcovr
# Install LLVM tools
ARG LLVM_TOOLS_VERSION=20

View File

@@ -5,17 +5,17 @@ It is used in [Clio Github Actions](https://github.com/XRPLF/clio/actions) but c
The image is based on Ubuntu 20.04 and contains:
- ccache 4.12.1
- ccache 4.11.3
- Clang 19
- ClangBuildAnalyzer 1.6.0
- Conan 2.22.1
- Doxygen 1.15.0
- Conan 2.20.1
- Doxygen 1.14
- GCC 15.2.0
- GDB 16.3
- gh 2.82.1
- git-cliff 2.10.1
- mold 2.40.4
- Python 3.8
- gh 2.74
- git-cliff 2.9.1
- mold 2.40.1
- Python 3.13
- and some other useful tools
Conan is set up to build Clio without any additional steps.

View File

@@ -1,6 +1,6 @@
services:
clio_develop:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
volumes:
- clio_develop_conan_data:/root/.conan2/p
- clio_develop_ccache:/root/.ccache

View File

@@ -17,7 +17,6 @@ RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
curl \
git \
libatomic1 \
software-properties-common \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -8,7 +8,7 @@ ARG TARGETARCH
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ARG BUILD_VERSION=0
ARG BUILD_VERSION=2
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
@@ -24,7 +24,7 @@ RUN apt-get update \
WORKDIR /tmp
ARG MOLD_VERSION=2.40.4
ARG MOLD_VERSION=2.40.1
RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v${MOLD_VERSION}.tar.gz" \
&& tar xf "v${MOLD_VERSION}.tar.gz" \
&& cd "mold-${MOLD_VERSION}" \
@@ -34,7 +34,7 @@ RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG CCACHE_VERSION=4.12.1
ARG CCACHE_VERSION=4.11.3
RUN wget --progress=dot:giga "https://github.com/ccache/ccache/releases/download/v${CCACHE_VERSION}/ccache-${CCACHE_VERSION}.tar.gz" \
&& tar xf "ccache-${CCACHE_VERSION}.tar.gz" \
&& cd "ccache-${CCACHE_VERSION}" \
@@ -51,7 +51,7 @@ RUN apt-get update \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG DOXYGEN_VERSION=1.15.0
ARG DOXYGEN_VERSION=1.14.0
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& cd "doxygen-${DOXYGEN_VERSION}" \
@@ -71,13 +71,13 @@ RUN wget --progress=dot:giga "https://github.com/aras-p/ClangBuildAnalyzer/archi
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG GIT_CLIFF_VERSION=2.10.1
ARG GIT_CLIFF_VERSION=2.9.1
RUN wget --progress=dot:giga "https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz" \
&& tar xf git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz \
&& mv git-cliff-${GIT_CLIFF_VERSION}/git-cliff /usr/local/bin/git-cliff \
&& rm -rf /tmp/* /var/tmp/*
ARG GH_VERSION=2.82.1
ARG GH_VERSION=2.74.0
RUN wget --progress=dot:giga "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \

View File

@@ -191,7 +191,7 @@ Open the `index.html` file in your browser to see the documentation pages.
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
```sh
docker run -it ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
docker run -it ghcr.io/xrplf/clio-ci:213752862ca95ecadeb59a6176c3db91a7864b3e
git clone https://github.com/XRPLF/clio
cd clio
```

View File

@@ -951,7 +951,7 @@ span.arrowhead {
border-color: var(--primary-color);
}
#nav-tree-contents > ul > li:first-child > div > a {
#nav-tree ul li:first-child > div > a {
opacity: 0;
pointer-events: none;
}

View File

@@ -209,9 +209,8 @@ TransactionFeed::pub(
rpc::insertDeliveredAmount(pubObj[JS(meta)].as_object(), tx, meta, txMeta.date);
auto& txnPubobj = pubObj[txKey].as_object();
auto& metaPubobj = pubObj[JS(meta)].as_object();
rpc::insertDeliverMaxAlias(txnPubobj, version);
rpc::insertMPTIssuanceID(txnPubobj, tx, metaPubobj, meta);
rpc::insertMPTIssuanceID(txnPubobj, meta);
Json::Value nftJson;
ripple::RPC::insertNFTSyntheticInJson(nftJson, tx, *meta);

View File

@@ -24,8 +24,6 @@ target_sources(
handlers/AccountCurrencies.cpp
handlers/AccountInfo.cpp
handlers/AccountLines.cpp
handlers/AccountMPTokenIssuances.cpp
handlers/AccountMPTokens.cpp
handlers/AccountNFTs.cpp
handlers/AccountObjects.cpp
handlers/AccountOffers.cpp

View File

@@ -35,8 +35,6 @@ handledRpcs()
"account_currencies",
"account_info",
"account_lines",
"account_mptoken_issuances",
"account_mptokens",
"account_nfts",
"account_objects",
"account_offers",

View File

@@ -34,6 +34,7 @@
#include "web/Context.hpp"
#include <boost/algorithm/string/case_conv.hpp>
#include <boost/algorithm/string/predicate.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/format/format_fwd.hpp>
#include <boost/format/free_funcs.hpp>
@@ -257,7 +258,7 @@ toExpandedJson(
auto metaJson = toJson(*meta);
insertDeliveredAmount(metaJson, txn, meta, blobs.date);
insertDeliverMaxAlias(txnJson, apiVersion);
insertMPTIssuanceID(txnJson, txn, metaJson, meta);
insertMPTIssuanceID(txnJson, meta);
if (nftEnabled == NFTokenjson::ENABLE) {
Json::Value nftJson;
@@ -342,41 +343,36 @@ getMPTIssuanceID(std::shared_ptr<ripple::TxMeta const> const& meta)
/**
* @brief Check if transaction has a new MPToken created
*
* @param txn The transaction object
* @param meta The metadata object
* @param txnJson The transaction Json
* @param meta The metadata
* @return true if the transaction can have a mpt_issuance_id
*/
static bool
canHaveMPTIssuanceID(std::shared_ptr<ripple::STTx const> const& txn, std::shared_ptr<ripple::TxMeta const> const& meta)
canHaveMPTIssuanceID(boost::json::object const& txnJson, std::shared_ptr<ripple::TxMeta const> const& meta)
{
if (txn->getTxnType() != ripple::ttMPTOKEN_ISSUANCE_CREATE)
if (txnJson.at(JS(TransactionType)).is_string() and
not boost::iequals(txnJson.at(JS(TransactionType)).as_string(), JS(MPTokenIssuanceCreate)))
return false;
return (meta->getResultTER() == ripple::tesSUCCESS);
if (meta->getResultTER() != ripple::tesSUCCESS)
return false;
return true;
}
bool
insertMPTIssuanceID(
boost::json::object& txnJson,
std::shared_ptr<ripple::STTx const> const& txn,
boost::json::object& metaJson,
std::shared_ptr<ripple::TxMeta const> const& meta
)
insertMPTIssuanceID(boost::json::object& txnJson, std::shared_ptr<ripple::TxMeta const> const& meta)
{
if (!canHaveMPTIssuanceID(txn, meta))
if (!canHaveMPTIssuanceID(txnJson, meta))
return false;
if (txnJson.contains(JS(TransactionType)) && txnJson.at(JS(TransactionType)).is_string() and
txnJson.at(JS(TransactionType)).as_string() == JS(MPTokenIssuanceCreate))
return false;
auto const id = getMPTIssuanceID(meta);
ASSERT(id.has_value(), "MPTIssuanceID must have value");
// For mpttokenissuance create, add mpt_issuance_id to metajson
// Otherwise, add it to txn json
if (txnJson.contains(JS(TransactionType)) && txnJson.at(JS(TransactionType)).is_string() and
txnJson.at(JS(TransactionType)).as_string() == JS(MPTokenIssuanceCreate)) {
metaJson[JS(mpt_issuance_id)] = ripple::to_string(*id);
} else {
txnJson[JS(mpt_issuance_id)] = ripple::to_string(*id);
}
txnJson[JS(mpt_issuance_id)] = ripple::to_string(*id);
return true;
}

View File

@@ -201,23 +201,15 @@ insertDeliveredAmount(
/**
* @brief Add "mpt_issuance_id" into various MPTToken transaction json.
* @note We add "mpt_issuance_id" into the meta part of MPTokenIssuanceCreate only. The reason is because the
* mpt_issuance_id is generated only after one submits MPTokenIssuanceCreate, so theres no way to know what the id is.
* (rippled)
* @note We exclude "mpt_issuance_id" for MPTokenIssuanceCreate only. The reason is because the mpt_issuance_id
* is generated only after one submits MPTokenIssuanceCreate, so theres no way to know what the id is. (rippled)
*
* @param txnJson The transaction Json object
* @param txn The txn object
* @param metaJson The metadata Json object
* @param meta The metadata object
* @return true if the "mpt_issuance_id" is added to either txnJson or metaJson object
* @return true if the "mpt_issuance_id" is added to the txnJson JSON object
*/
bool
insertMPTIssuanceID(
boost::json::object& txnJson,
std::shared_ptr<ripple::STTx const> const& txn,
boost::json::object& metaJson,
std::shared_ptr<ripple::TxMeta const> const& meta
);
insertMPTIssuanceID(boost::json::object& txnJson, std::shared_ptr<ripple::TxMeta const> const& meta);
/**
* @brief Convert STBase object to JSON

View File

@@ -31,8 +31,6 @@
#include "rpc/handlers/AccountCurrencies.hpp"
#include "rpc/handlers/AccountInfo.hpp"
#include "rpc/handlers/AccountLines.hpp"
#include "rpc/handlers/AccountMPTokenIssuances.hpp"
#include "rpc/handlers/AccountMPTokens.hpp"
#include "rpc/handlers/AccountNFTs.hpp"
#include "rpc/handlers/AccountObjects.hpp"
#include "rpc/handlers/AccountOffers.hpp"
@@ -87,9 +85,6 @@ ProductionHandlerProvider::ProductionHandlerProvider(
{"account_currencies", {.handler = AccountCurrenciesHandler{backend}}},
{"account_info", {.handler = AccountInfoHandler{backend, amendmentCenter}}},
{"account_lines", {.handler = AccountLinesHandler{backend}}},
{"account_mptoken_issuances",
{.handler = AccountMPTokenIssuancesHandler{backend}, .isClioOnly = true}}, // clio only
{"account_mptokens", {.handler = AccountMPTokensHandler{backend}, .isClioOnly = true}}, // clio only
{"account_nfts", {.handler = AccountNFTsHandler{backend}}},
{"account_objects", {.handler = AccountObjectsHandler{backend}}},
{"account_offers", {.handler = AccountOffersHandler{backend}}},

View File

@@ -1,235 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include "rpc/handlers/AccountMPTokenIssuances.hpp"
#include "rpc/Errors.hpp"
#include "rpc/JS.hpp"
#include "rpc/RPCHelpers.hpp"
#include "rpc/common/Types.hpp"
#include "util/Assert.hpp"
#include "util/JsonUtils.hpp"
#include <boost/json/conversion.hpp>
#include <boost/json/object.hpp>
#include <boost/json/value.hpp>
#include <boost/json/value_to.hpp>
#include <xrpl/basics/strHex.h>
#include <xrpl/protocol/AccountID.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/LedgerHeader.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol/jss.h>
#include <cstdint>
#include <optional>
#include <string>
#include <utility>
#include <vector>
namespace rpc {
void
AccountMPTokenIssuancesHandler::addMPTokenIssuance(
std::vector<MPTokenIssuanceResponse>& issuances,
ripple::SLE const& sle,
ripple::AccountID const& account
)
{
MPTokenIssuanceResponse issuance;
issuance.issuer = ripple::to_string(account);
issuance.sequence = sle.getFieldU32(ripple::sfSequence);
auto const flags = sle.getFieldU32(ripple::sfFlags);
auto const setFlag = [&](std::optional<bool>& field, std::uint32_t mask) {
if ((flags & mask) != 0u)
field = true;
};
setFlag(issuance.mptLocked, ripple::lsfMPTLocked);
setFlag(issuance.mptCanLock, ripple::lsfMPTCanLock);
setFlag(issuance.mptRequireAuth, ripple::lsfMPTRequireAuth);
setFlag(issuance.mptCanEscrow, ripple::lsfMPTCanEscrow);
setFlag(issuance.mptCanTrade, ripple::lsfMPTCanTrade);
setFlag(issuance.mptCanTransfer, ripple::lsfMPTCanTransfer);
setFlag(issuance.mptCanClawback, ripple::lsfMPTCanClawback);
if (sle.isFieldPresent(ripple::sfTransferFee))
issuance.transferFee = sle.getFieldU16(ripple::sfTransferFee);
if (sle.isFieldPresent(ripple::sfAssetScale))
issuance.assetScale = sle.getFieldU8(ripple::sfAssetScale);
if (sle.isFieldPresent(ripple::sfMaximumAmount))
issuance.maximumAmount = sle.getFieldU64(ripple::sfMaximumAmount);
if (sle.isFieldPresent(ripple::sfOutstandingAmount))
issuance.outstandingAmount = sle.getFieldU64(ripple::sfOutstandingAmount);
if (sle.isFieldPresent(ripple::sfLockedAmount))
issuance.lockedAmount = sle.getFieldU64(ripple::sfLockedAmount);
if (sle.isFieldPresent(ripple::sfMPTokenMetadata))
issuance.mptokenMetadata = ripple::strHex(sle.getFieldVL(ripple::sfMPTokenMetadata));
if (sle.isFieldPresent(ripple::sfDomainID))
issuance.domainID = ripple::strHex(sle.getFieldH256(ripple::sfDomainID));
issuances.push_back(issuance);
}
AccountMPTokenIssuancesHandler::Result
AccountMPTokenIssuancesHandler::process(AccountMPTokenIssuancesHandler::Input const& input, Context const& ctx) const
{
auto const range = sharedPtrBackend_->fetchLedgerRange();
ASSERT(range.has_value(), "AccountMPTokenIssuances' ledger range must be available");
auto const expectedLgrInfo = getLedgerHeaderFromHashOrSeq(
*sharedPtrBackend_, ctx.yield, input.ledgerHash, input.ledgerIndex, range->maxSequence
);
if (!expectedLgrInfo.has_value())
return Error{expectedLgrInfo.error()};
auto const& lgrInfo = expectedLgrInfo.value();
auto const accountID = accountFromStringStrict(input.account);
auto const accountLedgerObject =
sharedPtrBackend_->fetchLedgerObject(ripple::keylet::account(*accountID).key, lgrInfo.seq, ctx.yield);
if (not accountLedgerObject.has_value())
return Error{Status{RippledError::rpcACT_NOT_FOUND}};
Output response;
response.issuances.reserve(input.limit);
auto const addToResponse = [&](ripple::SLE const& sle) {
if (sle.getType() == ripple::ltMPTOKEN_ISSUANCE) {
addMPTokenIssuance(response.issuances, sle, *accountID);
}
};
auto const expectedNext = traverseOwnedNodes(
*sharedPtrBackend_, *accountID, lgrInfo.seq, input.limit, input.marker, ctx.yield, addToResponse
);
if (!expectedNext.has_value())
return Error{expectedNext.error()};
auto const nextMarker = expectedNext.value();
response.account = input.account;
response.limit = input.limit;
response.ledgerHash = ripple::strHex(lgrInfo.hash);
response.ledgerIndex = lgrInfo.seq;
if (nextMarker.isNonZero())
response.marker = nextMarker.toString();
return response;
}
AccountMPTokenIssuancesHandler::Input
tag_invoke(boost::json::value_to_tag<AccountMPTokenIssuancesHandler::Input>, boost::json::value const& jv)
{
auto input = AccountMPTokenIssuancesHandler::Input{};
auto const& jsonObject = jv.as_object();
input.account = boost::json::value_to<std::string>(jv.at(JS(account)));
if (jsonObject.contains(JS(limit)))
input.limit = util::integralValueAs<uint32_t>(jv.at(JS(limit)));
if (jsonObject.contains(JS(marker)))
input.marker = boost::json::value_to<std::string>(jv.at(JS(marker)));
if (jsonObject.contains(JS(ledger_hash)))
input.ledgerHash = boost::json::value_to<std::string>(jv.at(JS(ledger_hash)));
if (jsonObject.contains(JS(ledger_index))) {
if (!jsonObject.at(JS(ledger_index)).is_string()) {
input.ledgerIndex = util::integralValueAs<uint32_t>(jv.at(JS(ledger_index)));
} else if (jsonObject.at(JS(ledger_index)).as_string() != "validated") {
input.ledgerIndex = std::stoi(boost::json::value_to<std::string>(jv.at(JS(ledger_index))));
}
}
return input;
}
void
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, AccountMPTokenIssuancesHandler::Output const& output)
{
using boost::json::value_from;
auto obj = boost::json::object{
{JS(account), output.account},
{JS(ledger_hash), output.ledgerHash},
{JS(ledger_index), output.ledgerIndex},
{JS(validated), output.validated},
{JS(limit), output.limit},
{"mpt_issuances", value_from(output.issuances)},
};
if (output.marker.has_value())
obj[JS(marker)] = *output.marker;
jv = std::move(obj);
}
void
tag_invoke(
boost::json::value_from_tag,
boost::json::value& jv,
AccountMPTokenIssuancesHandler::MPTokenIssuanceResponse const& issuance
)
{
auto obj = boost::json::object{
{JS(issuer), issuance.issuer},
{JS(sequence), issuance.sequence},
};
auto const setIfPresent = [&](boost::json::string_view field, auto const& value) {
if (value.has_value()) {
obj[field] = *value;
}
};
setIfPresent("transfer_fee", issuance.transferFee);
setIfPresent("asset_scale", issuance.assetScale);
setIfPresent("maximum_amount", issuance.maximumAmount);
setIfPresent("outstanding_amount", issuance.outstandingAmount);
setIfPresent("locked_amount", issuance.lockedAmount);
setIfPresent("mptoken_metadata", issuance.mptokenMetadata);
setIfPresent("domain_id", issuance.domainID);
setIfPresent("mpt_locked", issuance.mptLocked);
setIfPresent("mpt_can_lock", issuance.mptCanLock);
setIfPresent("mpt_require_auth", issuance.mptRequireAuth);
setIfPresent("mpt_can_escrow", issuance.mptCanEscrow);
setIfPresent("mpt_can_trade", issuance.mptCanTrade);
setIfPresent("mpt_can_transfer", issuance.mptCanTransfer);
setIfPresent("mpt_can_clawback", issuance.mptCanClawback);
jv = std::move(obj);
}
} // namespace rpc

View File

@@ -1,196 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#pragma once
#include "data/BackendInterface.hpp"
#include "rpc/Errors.hpp"
#include "rpc/JS.hpp"
#include "rpc/common/Checkers.hpp"
#include "rpc/common/MetaProcessors.hpp"
#include "rpc/common/Modifiers.hpp"
#include "rpc/common/Specs.hpp"
#include "rpc/common/Types.hpp"
#include "rpc/common/Validators.hpp"
#include <boost/json/conversion.hpp>
#include <boost/json/value.hpp>
#include <xrpl/protocol/AccountID.h>
#include <xrpl/protocol/ErrorCodes.h>
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol/jss.h>
#include <cstdint>
#include <memory>
#include <optional>
#include <string>
#include <vector>
namespace rpc {
/**
* @brief The account_mptoken_issuances method returns information about all MPTokenIssuance objects the account has
* created.
*/
class AccountMPTokenIssuancesHandler {
// dependencies
std::shared_ptr<BackendInterface> sharedPtrBackend_;
public:
static constexpr auto kLIMIT_MIN = 10;
static constexpr auto kLIMIT_MAX = 400;
static constexpr auto kLIMIT_DEFAULT = 200;
/**
* @brief A struct to hold data for one MPTokenIssuance response.
*/
struct MPTokenIssuanceResponse {
std::string issuer;
uint32_t sequence{};
std::optional<uint16_t> transferFee;
std::optional<uint8_t> assetScale;
std::optional<std::uint64_t> maximumAmount;
std::optional<std::uint64_t> outstandingAmount;
std::optional<std::uint64_t> lockedAmount;
std::optional<std::string> mptokenMetadata;
std::optional<std::string> domainID;
std::optional<bool> mptLocked;
std::optional<bool> mptCanLock;
std::optional<bool> mptRequireAuth;
std::optional<bool> mptCanEscrow;
std::optional<bool> mptCanTrade;
std::optional<bool> mptCanTransfer;
std::optional<bool> mptCanClawback;
};
/**
* @brief A struct to hold the output data of the command.
*/
struct Output {
std::string account;
std::vector<MPTokenIssuanceResponse> issuances;
std::string ledgerHash;
uint32_t ledgerIndex{};
bool validated = true;
std::optional<std::string> marker;
uint32_t limit{};
};
/**
* @brief A struct to hold the input data for the command.
*/
struct Input {
std::string account;
std::optional<std::string> ledgerHash;
std::optional<uint32_t> ledgerIndex;
uint32_t limit = kLIMIT_DEFAULT;
std::optional<std::string> marker;
};
using Result = HandlerReturnType<Output>;
/**
* @brief Construct a new AccountMPTokenIssuancesHandler object.
*
* @param sharedPtrBackend The backend to use.
*/
AccountMPTokenIssuancesHandler(std::shared_ptr<BackendInterface> sharedPtrBackend)
: sharedPtrBackend_(std::move(sharedPtrBackend))
{
}
/**
* @brief Returns the API specification for the command.
*
* @param apiVersion The API version to return the spec for.
* @return The spec for the given API version.
*/
static RpcSpecConstRef
spec([[maybe_unused]] uint32_t apiVersion)
{
static auto const kRPC_SPEC = RpcSpec{
{JS(account),
validation::Required{},
meta::WithCustomError{
validation::CustomValidators::accountValidator, Status(RippledError::rpcACT_MALFORMED)
}},
{JS(ledger_hash), validation::CustomValidators::uint256HexStringValidator},
{JS(limit),
validation::Type<uint32_t>{},
validation::Min(1u),
modifiers::Clamp<int32_t>{kLIMIT_MIN, kLIMIT_MAX}},
{JS(ledger_index), validation::CustomValidators::ledgerIndexValidator},
{JS(marker), validation::CustomValidators::accountMarkerValidator},
{JS(ledger), check::Deprecated{}},
};
return kRPC_SPEC;
}
/**
* @brief Process the AccountMPTokenIssuances command.
*
* @param input The input data for the command.
* @param ctx The context of the request.
* @return The result of the operation.
*/
Result
process(Input const& input, Context const& ctx) const;
private:
static void
addMPTokenIssuance(
std::vector<MPTokenIssuanceResponse>& issuances,
ripple::SLE const& sle,
ripple::AccountID const& account
);
private:
/**
* @brief Convert the Output to a JSON object
*
* @param [out] jv The JSON object to convert to
* @param output The output to convert
*/
friend void
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, Output const& output);
/**
* @brief Convert a JSON object to Input type
*
* @param jv The JSON object to convert
* @return Input parsed from the JSON object
*/
friend Input
tag_invoke(boost::json::value_to_tag<Input>, boost::json::value const& jv);
/**
* @brief Convert the MPTokenIssuanceResponse to a JSON object
*
* @param [out] jv The JSON object to convert to
* @param issuance The MPTokenIssuance response to convert
*/
friend void
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, MPTokenIssuanceResponse const& issuance);
};
} // namespace rpc

View File

@@ -1,190 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include "rpc/handlers/AccountMPTokens.hpp"
#include "rpc/Errors.hpp"
#include "rpc/JS.hpp"
#include "rpc/RPCHelpers.hpp"
#include "rpc/common/Types.hpp"
#include "util/Assert.hpp"
#include "util/JsonUtils.hpp"
#include <boost/json/conversion.hpp>
#include <boost/json/object.hpp>
#include <boost/json/value.hpp>
#include <boost/json/value_from.hpp>
#include <boost/json/value_to.hpp>
#include <xrpl/basics/strHex.h>
#include <xrpl/protocol/AccountID.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/LedgerHeader.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol/jss.h>
#include <cstdint>
#include <optional>
#include <string>
#include <utility>
#include <vector>
namespace rpc {
void
AccountMPTokensHandler::addMPToken(std::vector<MPTokenResponse>& mpts, ripple::SLE const& sle)
{
MPTokenResponse token{};
auto const flags = sle.getFieldU32(ripple::sfFlags);
token.account = ripple::to_string(sle.getAccountID(ripple::sfAccount));
token.MPTokenIssuanceID = ripple::strHex(sle.getFieldH192(ripple::sfMPTokenIssuanceID));
token.MPTAmount = sle.getFieldU64(ripple::sfMPTAmount);
if (sle.isFieldPresent(ripple::sfLockedAmount))
token.lockedAmount = sle.getFieldU64(ripple::sfLockedAmount);
auto const setFlag = [&](std::optional<bool>& field, std::uint32_t mask) {
if ((flags & mask) != 0u)
field = true;
};
setFlag(token.mptLocked, ripple::lsfMPTLocked);
setFlag(token.mptAuthorized, ripple::lsfMPTAuthorized);
mpts.push_back(token);
}
AccountMPTokensHandler::Result
AccountMPTokensHandler::process(AccountMPTokensHandler::Input const& input, Context const& ctx) const
{
auto const range = sharedPtrBackend_->fetchLedgerRange();
ASSERT(range.has_value(), "AccountMPTokens' ledger range must be available");
auto const expectedLgrInfo = getLedgerHeaderFromHashOrSeq(
*sharedPtrBackend_, ctx.yield, input.ledgerHash, input.ledgerIndex, range->maxSequence
);
if (!expectedLgrInfo.has_value())
return Error{expectedLgrInfo.error()};
auto const& lgrInfo = expectedLgrInfo.value();
auto const accountID = accountFromStringStrict(input.account);
auto const accountLedgerObject =
sharedPtrBackend_->fetchLedgerObject(ripple::keylet::account(*accountID).key, lgrInfo.seq, ctx.yield);
if (not accountLedgerObject.has_value())
return Error{Status{RippledError::rpcACT_NOT_FOUND}};
Output response;
response.mpts.reserve(input.limit);
auto const addToResponse = [&](ripple::SLE const& sle) {
if (sle.getType() == ripple::ltMPTOKEN) {
addMPToken(response.mpts, sle);
}
};
auto const expectedNext = traverseOwnedNodes(
*sharedPtrBackend_, *accountID, lgrInfo.seq, input.limit, input.marker, ctx.yield, addToResponse
);
if (!expectedNext.has_value())
return Error{expectedNext.error()};
auto const& nextMarker = expectedNext.value();
response.account = input.account;
response.limit = input.limit;
response.ledgerHash = ripple::strHex(lgrInfo.hash);
response.ledgerIndex = lgrInfo.seq;
if (nextMarker.isNonZero())
response.marker = nextMarker.toString();
return response;
}
AccountMPTokensHandler::Input
tag_invoke(boost::json::value_to_tag<AccountMPTokensHandler::Input>, boost::json::value const& jv)
{
AccountMPTokensHandler::Input input{};
auto const& jsonObject = jv.as_object();
input.account = boost::json::value_to<std::string>(jv.at(JS(account)));
if (jsonObject.contains(JS(limit)))
input.limit = util::integralValueAs<uint32_t>(jv.at(JS(limit)));
if (jsonObject.contains(JS(marker)))
input.marker = boost::json::value_to<std::string>(jv.at(JS(marker)));
if (jsonObject.contains(JS(ledger_hash)))
input.ledgerHash = boost::json::value_to<std::string>(jv.at(JS(ledger_hash)));
if (jsonObject.contains(JS(ledger_index))) {
if (!jv.at(JS(ledger_index)).is_string()) {
input.ledgerIndex = util::integralValueAs<uint32_t>(jv.at(JS(ledger_index)));
} else if (boost::json::value_to<std::string>(jv.at(JS(ledger_index))) != "validated") {
input.ledgerIndex = std::stoi(boost::json::value_to<std::string>(jv.at(JS(ledger_index))));
}
}
return input;
}
void
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, AccountMPTokensHandler::Output const& output)
{
auto obj = boost::json::object{
{JS(account), output.account},
{JS(ledger_hash), output.ledgerHash},
{JS(ledger_index), output.ledgerIndex},
{JS(validated), output.validated},
{JS(limit), output.limit},
{"mptokens", boost::json::value_from(output.mpts)},
};
if (output.marker.has_value())
obj[JS(marker)] = *output.marker;
jv = std::move(obj);
}
void
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, AccountMPTokensHandler::MPTokenResponse const& mptoken)
{
auto obj = boost::json::object{
{JS(account), mptoken.account},
{JS(mpt_issuance_id), mptoken.MPTokenIssuanceID},
{JS(mpt_amount), mptoken.MPTAmount},
};
auto const setIfPresent = [&](boost::json::string_view field, auto const& value) {
if (value.has_value()) {
obj[field] = *value;
}
};
setIfPresent("locked_amount", mptoken.lockedAmount);
setIfPresent("mpt_locked", mptoken.mptLocked);
setIfPresent("mpt_authorized", mptoken.mptAuthorized);
jv = std::move(obj);
}
} // namespace rpc

View File

@@ -1,178 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#pragma once
#include "data/BackendInterface.hpp"
#include "rpc/Errors.hpp"
#include "rpc/JS.hpp"
#include "rpc/common/Checkers.hpp"
#include "rpc/common/MetaProcessors.hpp"
#include "rpc/common/Modifiers.hpp"
#include "rpc/common/Specs.hpp"
#include "rpc/common/Types.hpp"
#include "rpc/common/Validators.hpp"
#include <boost/json/conversion.hpp>
#include <boost/json/value.hpp>
#include <xrpl/protocol/AccountID.h>
#include <xrpl/protocol/ErrorCodes.h>
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol/jss.h>
#include <cstdint>
#include <optional>
#include <string>
#include <vector>
namespace rpc {
/**
* @brief The account_mptokens method returns information about the MPTokens the account currently holds.
*/
class AccountMPTokensHandler {
// dependencies
std::shared_ptr<BackendInterface> sharedPtrBackend_;
public:
static constexpr auto kLIMIT_MIN = 10;
static constexpr auto kLIMIT_MAX = 400;
static constexpr auto kLIMIT_DEFAULT = 200;
/**
* @brief A struct to hold data for one MPToken response.
*/
struct MPTokenResponse {
std::string account;
std::string MPTokenIssuanceID;
uint64_t MPTAmount{};
std::optional<uint64_t> lockedAmount;
std::optional<bool> mptLocked;
std::optional<bool> mptAuthorized;
};
/**
* @brief A struct to hold the output data of the command.
*/
struct Output {
std::string account;
std::vector<MPTokenResponse> mpts;
std::string ledgerHash;
uint32_t ledgerIndex{};
bool validated = true;
std::optional<std::string> marker;
uint32_t limit{};
};
/**
* @brief A struct to hold the input data for the command.
*/
struct Input {
std::string account;
std::optional<std::string> ledgerHash;
std::optional<uint32_t> ledgerIndex;
uint32_t limit = kLIMIT_DEFAULT;
std::optional<std::string> marker;
};
using Result = HandlerReturnType<Output>;
/**
* @brief Construct a new AccountMPTokensHandler object.
*
* @param sharedPtrBackend The backend to use.
*/
AccountMPTokensHandler(std::shared_ptr<BackendInterface> sharedPtrBackend)
: sharedPtrBackend_(std::move(sharedPtrBackend))
{
}
/**
* @brief Returns the API specification for the command.
*
* @param apiVersion The API version to return the spec for.
* @return The spec for the given API version.
*/
static RpcSpecConstRef
spec([[maybe_unused]] uint32_t apiVersion)
{
static auto const kRPC_SPEC = RpcSpec{
{JS(account),
validation::Required{},
meta::WithCustomError{
validation::CustomValidators::accountValidator, Status(RippledError::rpcACT_MALFORMED)
}},
{JS(ledger_hash), validation::CustomValidators::uint256HexStringValidator},
{JS(limit),
validation::Type<uint32_t>{},
validation::Min(1u),
modifiers::Clamp<int32_t>{kLIMIT_MIN, kLIMIT_MAX}},
{JS(ledger_index), validation::CustomValidators::ledgerIndexValidator},
{JS(marker), validation::CustomValidators::accountMarkerValidator},
{JS(ledger), check::Deprecated{}},
};
return kRPC_SPEC;
}
/**
* @brief Process the AccountMPTokens command.
*
* @param input The input data for the command.
* @param ctx The context of the request.
* @return The result of the operation.
*/
Result
process(Input const& input, Context const& ctx) const;
private:
static void
addMPToken(std::vector<MPTokenResponse>& mpts, ripple::SLE const& sle);
private:
/**
* @brief Convert the Output to a JSON object
*
* @param [out] jv The JSON object to convert to
* @param output The output to convert
*/
friend void
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, Output const& output);
/**
* @brief Convert a JSON object to Input type
*
* @param jv The JSON object to convert
* @return Input parsed from the JSON object
*/
friend Input
tag_invoke(boost::json::value_to_tag<Input>, boost::json::value const& jv);
/**
* @brief Convert the MPTokenResponse to a JSON object
*
* @param [out] jv The JSON object to convert to
* @param mptoken The MPToken response to convert
*/
friend void
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, MPTokenResponse const& mptoken);
};
} // namespace rpc

View File

@@ -333,13 +333,7 @@ tag_invoke(boost::json::value_to_tag<LedgerEntryHandler::Input>, boost::json::va
{JS(mptoken), ripple::ltMPTOKEN},
{JS(permissioned_domain), ripple::ltPERMISSIONED_DOMAIN},
{JS(vault), ripple::ltVAULT},
{JS(delegate), ripple::ltDELEGATE},
{JS(amendments), ripple::ltAMENDMENTS},
{JS(fee), ripple::ltFEE_SETTINGS},
{JS(hashes), ripple::ltLEDGER_HASHES},
{JS(nft_offer), ripple::ltNFTOKEN_OFFER},
{JS(nunl), ripple::ltNEGATIVE_UNL},
{JS(signer_list), ripple::ltSIGNER_LIST}
{JS(delegate), ripple::ltDELEGATE}
};
auto const parseBridgeFromJson = [](boost::json::value const& bridgeJson) {

View File

@@ -428,12 +428,6 @@ public:
validation::CustomValidators::accountBase58Validator, Status(ClioError::RpcMalformedAddress)
}}
}}},
{JS(amendments), kMALFORMED_REQUEST_HEX_STRING_VALIDATOR},
{JS(fee), kMALFORMED_REQUEST_HEX_STRING_VALIDATOR},
{JS(hashes), kMALFORMED_REQUEST_HEX_STRING_VALIDATOR},
{JS(nft_offer), kMALFORMED_REQUEST_HEX_STRING_VALIDATOR},
{JS(nunl), kMALFORMED_REQUEST_HEX_STRING_VALIDATOR},
{JS(signer_list), kMALFORMED_REQUEST_HEX_STRING_VALIDATOR},
{JS(ledger), check::Deprecated{}},
{"include_deleted", validation::Type<bool>{}},
};

View File

@@ -29,7 +29,6 @@
#include <algorithm>
#include <memory>
#include <string_view>
void
LoggerFixture::init()

View File

@@ -1442,69 +1442,38 @@ createLptCurrency(std::string_view assetCurrency, std::string_view asset2Currenc
}
ripple::STObject
createMptIssuanceObject(
std::string_view accountId,
std::uint32_t seq,
std::optional<std::string_view> metadata,
std::uint32_t flags,
std::uint64_t outstandingAmount,
std::optional<std::uint16_t> transferFee,
std::optional<std::uint8_t> assetScale,
std::optional<std::uint64_t> maxAmount,
std::optional<std::uint64_t> lockedAmount,
std::optional<std::string_view> domainId
)
createMptIssuanceObject(std::string_view accountId, std::uint32_t seq, std::string_view metadata)
{
ripple::STObject mptIssuance(ripple::sfLedgerEntry);
mptIssuance.setAccountID(ripple::sfIssuer, getAccountIdWithString(accountId));
mptIssuance.setFieldU16(ripple::sfLedgerEntryType, ripple::ltMPTOKEN_ISSUANCE);
mptIssuance.setFieldU32(ripple::sfFlags, 0);
mptIssuance.setFieldU32(ripple::sfSequence, seq);
mptIssuance.setFieldU64(ripple::sfOwnerNode, 0);
mptIssuance.setFieldH256(ripple::sfPreviousTxnID, ripple::uint256{});
mptIssuance.setFieldU32(ripple::sfFlags, flags);
mptIssuance.setFieldU32(ripple::sfPreviousTxnLgrSeq, 0);
mptIssuance.setFieldU64(ripple::sfOutstandingAmount, outstandingAmount);
if (transferFee.has_value())
mptIssuance.setFieldU16(ripple::sfTransferFee, *transferFee);
if (assetScale.has_value())
mptIssuance.setFieldU8(ripple::sfAssetScale, *assetScale);
if (maxAmount.has_value())
mptIssuance.setFieldU64(ripple::sfMaximumAmount, *maxAmount);
if (lockedAmount.has_value())
mptIssuance.setFieldU64(ripple::sfLockedAmount, *lockedAmount);
if (metadata.has_value()) {
ripple::Slice const sliceMetadata(metadata->data(), metadata->size());
mptIssuance.setFieldVL(ripple::sfMPTokenMetadata, sliceMetadata);
}
if (domainId.has_value())
mptIssuance.setFieldH256(ripple::sfDomainID, ripple::uint256{*domainId});
mptIssuance.setFieldU64(ripple::sfMaximumAmount, 0);
mptIssuance.setFieldU64(ripple::sfOutstandingAmount, 0);
ripple::Slice const sliceMetadata(metadata.data(), metadata.size());
mptIssuance.setFieldVL(ripple::sfMPTokenMetadata, sliceMetadata);
return mptIssuance;
}
ripple::STObject
createMpTokenObject(
std::string_view accountId,
ripple::uint192 issuanceID,
std::uint64_t mptAmount,
std::uint32_t flags,
std::optional<uint64_t> lockedAmount
)
createMpTokenObject(std::string_view accountId, ripple::uint192 issuanceID, std::uint64_t mptAmount)
{
ripple::STObject mptoken(ripple::sfLedgerEntry);
mptoken.setAccountID(ripple::sfAccount, getAccountIdWithString(accountId));
mptoken[ripple::sfMPTokenIssuanceID] = issuanceID;
mptoken.setFieldU16(ripple::sfLedgerEntryType, ripple::ltMPTOKEN);
mptoken.setFieldU32(ripple::sfFlags, flags);
mptoken.setFieldU32(ripple::sfFlags, 0);
mptoken.setFieldU64(ripple::sfOwnerNode, 0);
mptoken.setFieldH256(ripple::sfPreviousTxnID, ripple::uint256{});
mptoken.setFieldU32(ripple::sfPreviousTxnLgrSeq, 0);
if (mptAmount != 0u)
mptoken.setFieldU64(ripple::sfMPTAmount, mptAmount);
if (lockedAmount.has_value())
mptoken.setFieldU64(ripple::sfLockedAmount, *lockedAmount);
return mptoken;
}

View File

@@ -451,27 +451,10 @@ createDidObject(std::string_view accountId, std::string_view didDoc, std::string
createLptCurrency(std::string_view assetCurrency, std::string_view asset2Currency);
[[nodiscard]] ripple::STObject
createMptIssuanceObject(
std::string_view accountId,
std::uint32_t seq,
std::optional<std::string_view> metadata = std::nullopt,
std::uint32_t flags = 0,
std::uint64_t outstandingAmount = 0,
std::optional<std::uint16_t> transferFee = std::nullopt,
std::optional<std::uint8_t> assetScale = std::nullopt,
std::optional<std::uint64_t> maxAmount = std::nullopt,
std::optional<std::uint64_t> lockedAmount = std::nullopt,
std::optional<std::string_view> domainId = std::nullopt
);
createMptIssuanceObject(std::string_view accountId, std::uint32_t seq, std::string_view metadata);
[[nodiscard]] ripple::STObject
createMpTokenObject(
std::string_view accountId,
ripple::uint192 issuanceID,
std::uint64_t mptAmount = 1,
std::uint32_t flags = 0,
std::optional<uint64_t> lockedAmount = std::nullopt
);
createMpTokenObject(std::string_view accountId, ripple::uint192 issuanceID, std::uint64_t mptAmount = 1);
[[nodiscard]] ripple::STObject
createMPTIssuanceCreateTx(std::string_view accountId, uint32_t fee, uint32_t seq);

View File

@@ -100,8 +100,6 @@ target_sources(
rpc/handlers/AccountCurrenciesTests.cpp
rpc/handlers/AccountInfoTests.cpp
rpc/handlers/AccountLinesTests.cpp
rpc/handlers/AccountMPTokenIssuancesTests.cpp
rpc/handlers/AccountMPTokensTests.cpp
rpc/handlers/AccountNFTsTests.cpp
rpc/handlers/AccountObjectsTests.cpp
rpc/handlers/AccountOffersTests.cpp

View File

@@ -36,9 +36,6 @@ struct AmendmentBlockHandlerTest : util::prometheus::WithPrometheus, SyncAsioCon
etl::SystemState state;
};
// Note: This test can be flaky due to the way it was written (depends on time)
// Since the old ETL is going to be replaced by ETLng all tests including this one will be deleted anyway so the fix for
// flakiness is to increase the context runtime to 50ms until then (to not waste time).
TEST_F(AmendmentBlockHandlerTest, CallToNotifyAmendmentBlockedSetsStateAndRepeatedlyCallsAction)
{
AmendmentBlockHandler handler{ctx_, state, std::chrono::nanoseconds{1}, actionMock.AsStdFunction()};
@@ -48,7 +45,12 @@ TEST_F(AmendmentBlockHandlerTest, CallToNotifyAmendmentBlockedSetsStateAndRepeat
handler.notifyAmendmentBlocked();
EXPECT_TRUE(state.isAmendmentBlocked);
runContextFor(std::chrono::milliseconds{50});
// Code runs significantly slower when assertions are enabled
#ifdef _GLIBCXX_ASSERTIONS
runContextFor(std::chrono::milliseconds{10});
#else
runContextFor(std::chrono::milliseconds{1});
#endif
}
struct DefaultAmendmentBlockActionTest : LoggerFixture {};

View File

@@ -374,12 +374,10 @@ TEST_F(GrpcSourceNgTests, DeadlineIsHandledCorrectly)
auto grpcSource =
std::make_unique<etlng::impl::GrpcSource>("localhost", std::to_string(getXRPLMockPort()), kDEADLINE);
// Note: this may not be called at all if gRPC cancels before it gets a chance to call the stub
EXPECT_CALL(mockXrpLedgerAPIService, GetLedger)
.Times(testing::AtMost(1))
.WillRepeatedly([&](grpc::ServerContext*,
org::xrpl::rpc::v1::GetLedgerRequest const*,
org::xrpl::rpc::v1::GetLedgerResponse*) {
.WillOnce([&](grpc::ServerContext*,
org::xrpl::rpc::v1::GetLedgerRequest const*,
org::xrpl::rpc::v1::GetLedgerResponse*) {
// wait for main thread to discard us and fail the test if unsuccessful within expected timeframe
[&] { ASSERT_TRUE(sem.try_acquire_for(std::chrono::milliseconds{50})); }();
return grpc::Status{};

View File

@@ -1282,8 +1282,7 @@ TEST_F(FeedTransactionTest, PublishesMPTokenIssuanceCreateTx)
}
],
"TransactionIndex": 0,
"TransactionResult": "tesSUCCESS",
"mpt_issuance_id": "000000014B4E9C06F24296074F7BC48F92A97916C6DC5EA9"
"TransactionResult": "tesSUCCESS"
},
"ctid": "C000002100000000",
"type": "transaction",

View File

@@ -1,840 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include "data/Types.hpp"
#include "rpc/Errors.hpp"
#include "rpc/common/AnyHandler.hpp"
#include "rpc/common/Types.hpp"
#include "rpc/handlers/AccountMPTokenIssuances.hpp"
#include "util/HandlerBaseTestFixture.hpp"
#include "util/NameGenerator.hpp"
#include "util/TestObject.hpp"
#include <boost/asio/spawn.hpp>
#include <boost/json/parse.hpp>
#include <boost/json/value.hpp>
#include <boost/json/value_to.hpp>
#include <fmt/format.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <xrpl/basics/base_uint.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/LedgerHeader.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STObject.h>
#include <cstdint>
#include <functional>
#include <optional>
#include <string>
#include <vector>
using namespace rpc;
using namespace data;
namespace json = boost::json;
using namespace testing;
namespace {
constexpr auto kLEDGER_HASH = "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652";
constexpr auto kACCOUNT = "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn";
constexpr auto kISSUANCE_INDEX1 = "A6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC321";
constexpr auto kISSUANCE_INDEX2 = "B6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC322";
// unique values for issuance1
constexpr uint64_t kISSUANCE1_MAX_AMOUNT = 10000;
constexpr uint64_t kISSUANCE1_OUTSTANDING_AMOUNT = 5000;
constexpr uint8_t kISSUANCE1_ASSET_SCALE = 2;
// unique values for issuance2
constexpr uint64_t kISSUANCE2_MAX_AMOUNT = 20000;
constexpr uint64_t kISSUANCE2_OUTSTANDING_AMOUNT = 800;
constexpr uint64_t kISSUANCE2_LOCKED_AMOUNT = 100;
constexpr uint16_t kISSUANCE2_TRANSFER_FEE = 5;
constexpr auto kISSUANCE2_METADATA = "test-meta";
constexpr auto kISSUANCE2_METADATA_HEX = "746573742D6D657461";
constexpr auto kISSUANCE2_DOMAIN_ID_HEX = "E6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC321";
// define expected JSON for mpt issuances
auto const kISSUANCE_OUT1 = fmt::format(
R"JSON({{
"issuer": "{}",
"sequence": 1,
"maximum_amount": {},
"outstanding_amount": {},
"asset_scale": {},
"mpt_can_escrow": true,
"mpt_can_trade": true,
"mpt_require_auth": true,
"mpt_can_transfer": true
}})JSON",
kACCOUNT,
kISSUANCE1_MAX_AMOUNT,
kISSUANCE1_OUTSTANDING_AMOUNT,
kISSUANCE1_ASSET_SCALE
);
auto const kISSUANCE_OUT2 = fmt::format(
R"JSON({{
"issuer": "{}",
"sequence": 2,
"maximum_amount": {},
"outstanding_amount": {},
"locked_amount": {},
"transfer_fee": {},
"mptoken_metadata": "{}",
"domain_id": "{}",
"mpt_can_lock": true,
"mpt_locked": true,
"mpt_can_clawback": true
}})JSON",
kACCOUNT,
kISSUANCE2_MAX_AMOUNT,
kISSUANCE2_OUTSTANDING_AMOUNT,
kISSUANCE2_LOCKED_AMOUNT,
kISSUANCE2_TRANSFER_FEE,
kISSUANCE2_METADATA_HEX,
kISSUANCE2_DOMAIN_ID_HEX
);
} // namespace
struct RPCAccountMPTokenIssuancesHandlerTest : HandlerBaseTest {
RPCAccountMPTokenIssuancesHandlerTest()
{
backend_->setRange(10, 30);
}
};
struct AccountMPTokenIssuancesParamTestCaseBundle {
std::string testName;
std::string testJson;
std::string expectedError;
std::string expectedErrorMessage;
};
struct AccountMPTokenIssuancesParameterTest : RPCAccountMPTokenIssuancesHandlerTest,
WithParamInterface<AccountMPTokenIssuancesParamTestCaseBundle> {};
// generate values for invalid params test
static auto
generateTestValuesForInvalidParamsTest()
{
return std::vector<AccountMPTokenIssuancesParamTestCaseBundle>{
{.testName = "NonHexLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": "xxx" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashMalformed"},
{.testName = "NonStringLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": 123 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashNotString"},
{.testName = "InvalidLedgerIndexString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_index": "notvalidated" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledgerIndexMalformed"},
{.testName = "MarkerNotString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": 9 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "markerNotString"},
{.testName = "InvalidMarkerContent",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Malformed cursor."},
{.testName = "AccountMissing",
.testJson = R"JSON({ "limit": 10 })JSON",
.expectedError = "invalidParams",
.expectedErrorMessage = "Required field 'account' missing"},
{.testName = "AccountNotString",
.testJson = R"JSON({ "account": 123 })JSON",
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "AccountMalformed",
.testJson = R"JSON({ "account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp" })JSON",
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "LimitNotInteger",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": "t" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitNegative",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": -1 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitZero",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": 0 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitTypeInvalid",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": true }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."}
};
}
INSTANTIATE_TEST_SUITE_P(
RPCAccountMPTokenIssuancesInvalidParamsGroup,
AccountMPTokenIssuancesParameterTest,
ValuesIn(generateTestValuesForInvalidParamsTest()),
tests::util::kNAME_GENERATOR
);
// test invalid params bundle
TEST_P(AccountMPTokenIssuancesParameterTest, InvalidParams)
{
auto const testBundle = GetParam();
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const req = json::parse(testBundle.testJson);
auto const output = handler.process(req, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), testBundle.expectedError);
EXPECT_EQ(err.at("error_message").as_string(), testBundle.expectedErrorMessage);
});
}
// ledger not found via hash
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, NonExistLedgerViaLedgerHash)
{
// mock fetchLedgerByHash return empty
EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _))
.WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}"
}})JSON",
kACCOUNT,
kLEDGER_HASH
)
);
runSpawn([&, this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const output = handler.process(input, Context{.yield = std::ref(yield)});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
// ledger not found via string index
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, NonExistLedgerViaLedgerStringIndex)
{
// mock fetchLedgerBySequence return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_index": "4"
}})JSON",
kACCOUNT
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
// ledger not found via int index
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, NonExistLedgerViaLedgerIntIndex)
{
// mock fetchLedgerBySequence return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_index": 4
}})JSON",
kACCOUNT
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
// ledger not found via hash (seq > max)
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, LedgerSeqOutOfRangeByHash)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31);
EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillOnce(Return(ledgerHeader));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}"
}})JSON",
kACCOUNT,
kLEDGER_HASH
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
// ledger not found via index (seq > max)
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, LedgerSeqOutOfRangeByIndex)
{
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_index": "31"
}})JSON",
kACCOUNT
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
// account not exist
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, NonExistAccount)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillOnce(Return(ledgerHeader));
// fetch account object return empty
EXPECT_CALL(*backend_, doFetchLedgerObject).WillOnce(Return(std::optional<Blob>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}"
}})JSON",
kACCOUNT,
kLEDGER_HASH
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "actNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "Account not found.");
});
}
// fetch mptoken issuances via account successfully
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, DefaultParameters)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
// return non-empty account
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
// return two mptoken issuance objects
ripple::STObject const ownerDir = createOwnerDirLedgerObject(
{ripple::uint256{kISSUANCE_INDEX1}, ripple::uint256{kISSUANCE_INDEX2}}, kISSUANCE_INDEX1
);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
// mocking mptoken issuance ledger objects
std::vector<Blob> bbs;
auto const issuance1 = createMptIssuanceObject(
kACCOUNT,
1,
std::nullopt,
ripple::lsfMPTCanTrade | ripple::lsfMPTRequireAuth | ripple::lsfMPTCanTransfer | ripple::lsfMPTCanEscrow,
kISSUANCE1_OUTSTANDING_AMOUNT,
std::nullopt,
kISSUANCE1_ASSET_SCALE,
kISSUANCE1_MAX_AMOUNT
);
auto const issuance2 = createMptIssuanceObject(
kACCOUNT,
2,
kISSUANCE2_METADATA,
ripple::lsfMPTLocked | ripple::lsfMPTCanLock | ripple::lsfMPTCanClawback,
kISSUANCE2_OUTSTANDING_AMOUNT,
kISSUANCE2_TRANSFER_FEE,
std::nullopt,
kISSUANCE2_MAX_AMOUNT,
kISSUANCE2_LOCKED_AMOUNT,
kISSUANCE2_DOMAIN_ID_HEX
);
bbs.push_back(issuance1.getSerializer().peekData());
bbs.push_back(issuance2.getSerializer().peekData());
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this](auto yield) {
auto const expected = fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}",
"ledger_index": 30,
"validated": true,
"limit": {},
"mpt_issuances": [
{},
{}
]
}})JSON",
kACCOUNT,
kLEDGER_HASH,
AccountMPTokenIssuancesHandler::kLIMIT_DEFAULT,
kISSUANCE_OUT1,
kISSUANCE_OUT2
);
auto const input = json::parse(fmt::format(R"JSON({{"account": "{}"}})JSON", kACCOUNT));
auto handler = AnyHandler{AccountMPTokenIssuancesHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ(json::parse(expected), *output.result);
});
}
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, UseLimit)
{
constexpr int kLIMIT = 20;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
std::vector<ripple::uint256> indexes;
std::vector<Blob> bbs;
for (int i = 0; i < 50; ++i) {
indexes.emplace_back(kISSUANCE_INDEX1);
auto const issuance = createMptIssuanceObject(kACCOUNT, i);
bbs.push_back(issuance.getSerializer().peekData());
}
ripple::STObject ownerDir = createOwnerDirLedgerObject(indexes, kISSUANCE_INDEX1);
ownerDir.setFieldU64(ripple::sfIndexNext, 99);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(7);
ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs));
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(3);
runSpawn([this, kLIMIT](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
kLIMIT
)
);
auto handler = AnyHandler{AccountMPTokenIssuancesHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
auto const resultJson = (*output.result).as_object();
EXPECT_EQ(resultJson.at("mpt_issuances").as_array().size(), kLIMIT);
ASSERT_TRUE(resultJson.contains("marker"));
EXPECT_THAT(boost::json::value_to<std::string>(resultJson.at("marker")), EndsWith(",0"));
});
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokenIssuancesHandler::kLIMIT_MIN - 1
)
);
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ((*output.result).as_object().at("limit").as_uint64(), AccountMPTokenIssuancesHandler::kLIMIT_MIN);
});
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountMPTokenIssuancesHandler{backend_}};
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokenIssuancesHandler::kLIMIT_MAX + 1
)
);
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ((*output.result).as_object().at("limit").as_uint64(), AccountMPTokenIssuancesHandler::kLIMIT_MAX);
});
}
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, MarkerOutput)
{
constexpr auto kNEXT_PAGE = 99;
constexpr auto kLIMIT = 15;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto ownerDirKk = ripple::keylet::ownerDir(account).key;
auto ownerDir2Kk = ripple::keylet::page(ripple::keylet::ownerDir(account), kNEXT_PAGE).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3);
std::vector<ripple::uint256> indexes;
indexes.reserve(10);
for (int i = 0; i < 10; ++i) {
indexes.emplace_back(kISSUANCE_INDEX1);
}
std::vector<Blob> bbs;
bbs.reserve(kLIMIT);
for (int i = 0; i < kLIMIT; ++i) {
bbs.push_back(createMptIssuanceObject(kACCOUNT, i).getSerializer().peekData());
}
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
// mock the first directory page
ripple::STObject ownerDir1 = createOwnerDirLedgerObject(indexes, kISSUANCE_INDEX1);
ownerDir1.setFieldU64(ripple::sfIndexNext, kNEXT_PAGE);
ON_CALL(*backend_, doFetchLedgerObject(ownerDirKk, _, _))
.WillByDefault(Return(ownerDir1.getSerializer().peekData()));
// mock the second directory page
ripple::STObject ownerDir2 = createOwnerDirLedgerObject(indexes, kISSUANCE_INDEX2);
ownerDir2.setFieldU64(ripple::sfIndexNext, 0);
ON_CALL(*backend_, doFetchLedgerObject(ownerDir2Kk, _, _))
.WillByDefault(Return(ownerDir2.getSerializer().peekData()));
runSpawn([this, kLIMIT, kNEXT_PAGE](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
kLIMIT
)
);
auto handler = AnyHandler{AccountMPTokenIssuancesHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
auto const& resultJson = (*output.result).as_object();
EXPECT_EQ(
boost::json::value_to<std::string>(resultJson.at("marker")),
fmt::format("{},{}", kISSUANCE_INDEX1, kNEXT_PAGE)
);
EXPECT_EQ(resultJson.at("mpt_issuances").as_array().size(), kLIMIT);
});
}
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, MarkerInput)
{
constexpr auto kNEXT_PAGE = 99;
constexpr auto kLIMIT = 15;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto ownerDirKk = ripple::keylet::page(ripple::keylet::ownerDir(account), kNEXT_PAGE).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3);
std::vector<Blob> bbs;
std::vector<ripple::uint256> indexes;
for (int i = 0; i < kLIMIT; ++i) {
indexes.emplace_back(kISSUANCE_INDEX1);
bbs.push_back(createMptIssuanceObject(kACCOUNT, i).getSerializer().peekData());
}
ripple::STObject ownerDir = createOwnerDirLedgerObject(indexes, kISSUANCE_INDEX1);
ownerDir.setFieldU64(ripple::sfIndexNext, 0);
ON_CALL(*backend_, doFetchLedgerObject(ownerDirKk, _, _))
.WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this, kLIMIT, kNEXT_PAGE](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {},
"marker": "{},{}"
}})JSON",
kACCOUNT,
kLIMIT,
kISSUANCE_INDEX1,
kNEXT_PAGE
)
);
auto handler = AnyHandler{AccountMPTokenIssuancesHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
auto const& resultJson = (*output.result).as_object();
EXPECT_TRUE(resultJson.if_contains("marker") == nullptr);
EXPECT_EQ(resultJson.at("mpt_issuances").as_array().size(), kLIMIT - 1);
});
}
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, LimitLessThanMin)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
ripple::STObject const ownerDir = createOwnerDirLedgerObject(
{ripple::uint256{kISSUANCE_INDEX1}, ripple::uint256{kISSUANCE_INDEX2}}, kISSUANCE_INDEX1
);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
std::vector<Blob> bbs;
auto const issuance1 = createMptIssuanceObject(
kACCOUNT,
1,
std::nullopt,
ripple::lsfMPTCanTrade | ripple::lsfMPTRequireAuth | ripple::lsfMPTCanTransfer | ripple::lsfMPTCanEscrow,
kISSUANCE1_OUTSTANDING_AMOUNT,
std::nullopt,
kISSUANCE1_ASSET_SCALE,
kISSUANCE1_MAX_AMOUNT
);
auto const issuance2 = createMptIssuanceObject(
kACCOUNT,
2,
kISSUANCE2_METADATA,
ripple::lsfMPTLocked | ripple::lsfMPTCanLock | ripple::lsfMPTCanClawback,
kISSUANCE2_OUTSTANDING_AMOUNT,
kISSUANCE2_TRANSFER_FEE,
std::nullopt,
kISSUANCE2_MAX_AMOUNT,
kISSUANCE2_LOCKED_AMOUNT,
kISSUANCE2_DOMAIN_ID_HEX
);
bbs.push_back(issuance1.getSerializer().peekData());
bbs.push_back(issuance2.getSerializer().peekData());
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokenIssuancesHandler::kLIMIT_MIN - 1
)
);
auto const correctOutput = fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}",
"ledger_index": 30,
"validated": true,
"limit": {},
"mpt_issuances": [
{},
{}
]
}})JSON",
kACCOUNT,
kLEDGER_HASH,
AccountMPTokenIssuancesHandler::kLIMIT_MIN,
kISSUANCE_OUT1,
kISSUANCE_OUT2
);
auto handler = AnyHandler{AccountMPTokenIssuancesHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ(json::parse(correctOutput), *output.result);
});
}
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, LimitMoreThanMax)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
ripple::STObject const ownerDir = createOwnerDirLedgerObject(
{ripple::uint256{kISSUANCE_INDEX1}, ripple::uint256{kISSUANCE_INDEX2}}, kISSUANCE_INDEX1
);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
std::vector<Blob> bbs;
auto const issuance1 = createMptIssuanceObject(
kACCOUNT,
1,
std::nullopt,
ripple::lsfMPTCanTrade | ripple::lsfMPTRequireAuth | ripple::lsfMPTCanTransfer | ripple::lsfMPTCanEscrow,
kISSUANCE1_OUTSTANDING_AMOUNT,
std::nullopt,
kISSUANCE1_ASSET_SCALE,
kISSUANCE1_MAX_AMOUNT
);
auto const issuance2 = createMptIssuanceObject(
kACCOUNT,
2,
kISSUANCE2_METADATA,
ripple::lsfMPTLocked | ripple::lsfMPTCanLock | ripple::lsfMPTCanClawback,
kISSUANCE2_OUTSTANDING_AMOUNT,
kISSUANCE2_TRANSFER_FEE,
std::nullopt,
kISSUANCE2_MAX_AMOUNT,
kISSUANCE2_LOCKED_AMOUNT,
kISSUANCE2_DOMAIN_ID_HEX
);
bbs.push_back(issuance1.getSerializer().peekData());
bbs.push_back(issuance2.getSerializer().peekData());
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokenIssuancesHandler::kLIMIT_MAX + 1
)
);
auto const correctOutput = fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}",
"ledger_index": 30,
"validated": true,
"limit": {},
"mpt_issuances": [
{},
{}
]
}})JSON",
kACCOUNT,
kLEDGER_HASH,
AccountMPTokenIssuancesHandler::kLIMIT_MAX,
kISSUANCE_OUT1,
kISSUANCE_OUT2
);
auto handler = AnyHandler{AccountMPTokenIssuancesHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ(json::parse(correctOutput), *output.result);
});
}
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, EmptyResult)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
ripple::STObject const ownerDir = createOwnerDirLedgerObject({}, kISSUANCE_INDEX1);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}"
}})JSON",
kACCOUNT
)
);
auto handler = AnyHandler{AccountMPTokenIssuancesHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ((*output.result).as_object().at("mpt_issuances").as_array().size(), 0);
});
}

View File

@@ -1,755 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include "data/Types.hpp"
#include "rpc/Errors.hpp"
#include "rpc/common/AnyHandler.hpp"
#include "rpc/common/Types.hpp"
#include "rpc/handlers/AccountMPTokens.hpp"
#include "util/HandlerBaseTestFixture.hpp"
#include "util/NameGenerator.hpp"
#include "util/TestObject.hpp"
#include <boost/json/parse.hpp>
#include <boost/json/value.hpp>
#include <boost/json/value_to.hpp>
#include <fmt/format.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <xrpl/basics/base_uint.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/LedgerHeader.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STObject.h>
#include <cstdint>
#include <optional>
#include <string>
#include <vector>
using namespace rpc;
using namespace data;
namespace json = boost::json;
using namespace testing;
namespace {
constexpr auto kLEDGER_HASH = "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652";
constexpr auto kACCOUNT = "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn";
constexpr auto kISSUANCE_ID_HEX = "00080000B43A1A953EADDB3314A73523789947C752044C49";
constexpr auto kTOKEN_INDEX1 = "A6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC321";
constexpr auto kTOKEN_INDEX2 = "B6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC322";
constexpr uint64_t kTOKEN1_AMOUNT = 500;
constexpr uint64_t kTOKEN1_LOCKED_AMOUNT = 50;
constexpr uint64_t kTOKEN2_AMOUNT = 250;
// define expected JSON for mptokens
auto const kTOKEN_OUT1 = fmt::format(
R"JSON({{
"account": "{}",
"mpt_issuance_id": "{}",
"mpt_amount": {},
"locked_amount": {},
"mpt_locked": true
}})JSON",
kACCOUNT,
kISSUANCE_ID_HEX,
kTOKEN1_AMOUNT,
kTOKEN1_LOCKED_AMOUNT
);
auto const kTOKEN_OUT2 = fmt::format(
R"JSON({{
"account": "{}",
"mpt_issuance_id": "{}",
"mpt_amount": {},
"mpt_authorized": true
}})JSON",
kACCOUNT,
kISSUANCE_ID_HEX,
kTOKEN2_AMOUNT
);
} // namespace
struct RPCAccountMPTokensHandlerTest : HandlerBaseTest {
RPCAccountMPTokensHandlerTest()
{
backend_->setRange(10, 30);
}
};
struct AccountMPTokensParamTestCaseBundle {
std::string testName;
std::string testJson;
std::string expectedError;
std::string expectedErrorMessage;
};
struct AccountMPTokensParameterTest : RPCAccountMPTokensHandlerTest,
WithParamInterface<AccountMPTokensParamTestCaseBundle> {};
// generate values for invalid params test
static auto
generateTestValuesForInvalidParamsTest()
{
return std::vector<AccountMPTokensParamTestCaseBundle>{
{.testName = "NonHexLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": "xxx" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashMalformed"},
{.testName = "NonStringLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": 123 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashNotString"},
{.testName = "InvalidLedgerIndexString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_index": "notvalidated" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledgerIndexMalformed"},
{.testName = "MarkerNotString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": 9 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "markerNotString"},
{.testName = "InvalidMarkerContent",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Malformed cursor."},
{.testName = "AccountMissing",
.testJson = R"JSON({ "limit": 10 })JSON",
.expectedError = "invalidParams",
.expectedErrorMessage = "Required field 'account' missing"},
{.testName = "AccountNotString",
.testJson = R"JSON({ "account": 123 })JSON",
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "AccountMalformed",
.testJson = fmt::format(R"JSON({{ "account": "{}" }})JSON", "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp"),
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "LimitNotInteger",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": "t" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitNegative",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": -1 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitZero",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": 0 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitTypeInvalid",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": true }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."}
};
}
INSTANTIATE_TEST_SUITE_P(
RPCAccountMPTokensInvalidParamsGroup,
AccountMPTokensParameterTest,
ValuesIn(generateTestValuesForInvalidParamsTest()),
tests::util::kNAME_GENERATOR
);
// test invalid params bundle
TEST_P(AccountMPTokensParameterTest, InvalidParams)
{
auto const testBundle = GetParam();
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokensHandler{backend_}};
auto const req = json::parse(testBundle.testJson);
auto const output = handler.process(req, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), testBundle.expectedError);
EXPECT_EQ(err.at("error_message").as_string(), testBundle.expectedErrorMessage);
});
}
TEST_F(RPCAccountMPTokensHandlerTest, NonExistLedgerViaLedgerHash)
{
// mock fetchLedgerByHash to return empty
EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _))
.WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}"
}})JSON",
kACCOUNT,
kLEDGER_HASH
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokensHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
TEST_F(RPCAccountMPTokensHandlerTest, NonExistLedgerViaLedgerStringIndex)
{
// mock fetchLedgerBySequence to return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_index": "4"
}})JSON",
kACCOUNT
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokensHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
TEST_F(RPCAccountMPTokensHandlerTest, NonExistLedgerViaLedgerIntIndex)
{
// mock fetchLedgerBySequence to return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_index": 4
}})JSON",
kACCOUNT
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokensHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
TEST_F(RPCAccountMPTokensHandlerTest, LedgerSeqOutOfRangeByHash)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31);
EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillOnce(Return(ledgerHeader));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}"
}})JSON",
kACCOUNT,
kLEDGER_HASH
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokensHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
TEST_F(RPCAccountMPTokensHandlerTest, LedgerSeqOutOfRangeByIndex)
{
// No need to check from db, call fetchLedgerBySequence 0 times
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_index": "31"
}})JSON",
kACCOUNT
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokensHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "lgrNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "ledgerNotFound");
});
}
TEST_F(RPCAccountMPTokensHandlerTest, NonExistAccount)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillOnce(Return(ledgerHeader));
// fetch account object return empty
EXPECT_CALL(*backend_, doFetchLedgerObject).WillOnce(Return(std::optional<Blob>{}));
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}"
}})JSON",
kACCOUNT,
kLEDGER_HASH
)
);
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountMPTokensHandler{backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_FALSE(output);
auto const err = rpc::makeError(output.result.error());
EXPECT_EQ(err.at("error").as_string(), "actNotFound");
EXPECT_EQ(err.at("error_message").as_string(), "Account not found.");
});
}
TEST_F(RPCAccountMPTokensHandlerTest, DefaultParameters)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
ripple::STObject const ownerDir =
createOwnerDirLedgerObject({ripple::uint256{kTOKEN_INDEX1}, ripple::uint256{kTOKEN_INDEX2}}, kTOKEN_INDEX1);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
std::vector<Blob> bbs;
auto const token1 = createMpTokenObject(
kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), kTOKEN1_AMOUNT, ripple::lsfMPTLocked, kTOKEN1_LOCKED_AMOUNT
);
auto const token2 = createMpTokenObject(
kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), kTOKEN2_AMOUNT, ripple::lsfMPTAuthorized, std::nullopt
);
bbs.push_back(token1.getSerializer().peekData());
bbs.push_back(token2.getSerializer().peekData());
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this](auto yield) {
auto const expected = fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}",
"ledger_index": 30,
"validated": true,
"limit": {},
"mptokens": [
{},
{}
]
}})JSON",
kACCOUNT,
kLEDGER_HASH,
AccountMPTokensHandler::kLIMIT_DEFAULT,
kTOKEN_OUT1,
kTOKEN_OUT2
);
auto const input = json::parse(fmt::format(R"JSON({{"account": "{}"}})JSON", kACCOUNT));
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ(json::parse(expected), *output.result);
});
}
TEST_F(RPCAccountMPTokensHandlerTest, UseLimit)
{
constexpr int kLIMIT = 20;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
std::vector<ripple::uint256> indexes;
std::vector<Blob> bbs;
for (int i = 0; i < 50; ++i) {
indexes.emplace_back(kTOKEN_INDEX1);
auto const token = createMpTokenObject(kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), i, 0, std::nullopt);
bbs.push_back(token.getSerializer().peekData());
}
ripple::STObject ownerDir = createOwnerDirLedgerObject(indexes, kTOKEN_INDEX1);
ownerDir.setFieldU64(ripple::sfIndexNext, 99);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(7);
ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs));
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(3);
runSpawn([this, kLIMIT](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
kLIMIT
)
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
auto const resultJson = (*output.result).as_object();
EXPECT_EQ(resultJson.at("mptokens").as_array().size(), kLIMIT);
ASSERT_TRUE(resultJson.contains("marker"));
EXPECT_THAT(boost::json::value_to<std::string>(resultJson.at("marker")), EndsWith(",0"));
});
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokensHandler::kLIMIT_MIN - 1
)
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ((*output.result).as_object().at("limit").as_uint64(), AccountMPTokensHandler::kLIMIT_MIN);
});
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokensHandler::kLIMIT_MAX + 1
)
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ((*output.result).as_object().at("limit").as_uint64(), AccountMPTokensHandler::kLIMIT_MAX);
});
}
TEST_F(RPCAccountMPTokensHandlerTest, MarkerOutput)
{
constexpr auto kNEXT_PAGE = 99;
constexpr auto kLIMIT = 15;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto ownerDirKk = ripple::keylet::ownerDir(account).key;
auto ownerDir2Kk = ripple::keylet::page(ripple::keylet::ownerDir(account), kNEXT_PAGE).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
std::vector<Blob> bbs;
bbs.reserve(kLIMIT);
for (int i = 0; i < kLIMIT; ++i) {
bbs.push_back(createMpTokenObject(kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), i, 0, std::nullopt)
.getSerializer()
.peekData());
}
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
std::vector<ripple::uint256> indexes1;
indexes1.reserve(10);
for (int i = 0; i < 10; ++i) {
indexes1.emplace_back(kTOKEN_INDEX1);
}
ripple::STObject ownerDir1 = createOwnerDirLedgerObject(indexes1, kTOKEN_INDEX1);
ownerDir1.setFieldU64(ripple::sfIndexNext, kNEXT_PAGE);
ON_CALL(*backend_, doFetchLedgerObject(ownerDirKk, _, _))
.WillByDefault(Return(ownerDir1.getSerializer().peekData()));
ripple::STObject ownerDir2 = createOwnerDirLedgerObject(indexes1, kTOKEN_INDEX2);
ownerDir2.setFieldU64(ripple::sfIndexNext, 0);
ON_CALL(*backend_, doFetchLedgerObject(ownerDir2Kk, _, _))
.WillByDefault(Return(ownerDir2.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3);
runSpawn([this, kLIMIT, kNEXT_PAGE](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
kLIMIT
)
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
auto const& resultJson = (*output.result).as_object();
EXPECT_EQ(resultJson.at("mptokens").as_array().size(), kLIMIT);
EXPECT_EQ(
boost::json::value_to<std::string>(resultJson.at("marker")), fmt::format("{},{}", kTOKEN_INDEX1, kNEXT_PAGE)
);
});
}
TEST_F(RPCAccountMPTokensHandlerTest, MarkerInput)
{
constexpr auto kNEXT_PAGE = 99;
constexpr auto kLIMIT = 15;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
auto ownerDirKk = ripple::keylet::page(ripple::keylet::ownerDir(account), kNEXT_PAGE).key;
std::vector<Blob> bbs;
std::vector<ripple::uint256> indexes;
for (int i = 0; i < kLIMIT; ++i) {
indexes.emplace_back(kTOKEN_INDEX1);
bbs.push_back(createMpTokenObject(kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), i, 0, std::nullopt)
.getSerializer()
.peekData());
}
ripple::STObject ownerDir = createOwnerDirLedgerObject(indexes, kTOKEN_INDEX1);
ownerDir.setFieldU64(ripple::sfIndexNext, 0);
ON_CALL(*backend_, doFetchLedgerObject(ownerDirKk, _, _))
.WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3);
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this, kLIMIT, kNEXT_PAGE](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {},
"marker": "{},{}"
}})JSON",
kACCOUNT,
kLIMIT,
kTOKEN_INDEX1,
kNEXT_PAGE
)
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
auto const& resultJson = (*output.result).as_object();
EXPECT_TRUE(resultJson.if_contains("marker") == nullptr);
EXPECT_EQ(resultJson.at("mptokens").as_array().size(), kLIMIT - 1);
});
}
TEST_F(RPCAccountMPTokensHandlerTest, LimitLessThanMin)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
ripple::STObject const ownerDir =
createOwnerDirLedgerObject({ripple::uint256{kTOKEN_INDEX1}, ripple::uint256{kTOKEN_INDEX2}}, kTOKEN_INDEX1);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
std::vector<Blob> bbs;
auto const token1 = createMpTokenObject(
kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), kTOKEN1_AMOUNT, ripple::lsfMPTLocked, kTOKEN1_LOCKED_AMOUNT
);
bbs.push_back(token1.getSerializer().peekData());
auto const token2 = createMpTokenObject(
kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), kTOKEN2_AMOUNT, ripple::lsfMPTAuthorized, std::nullopt
);
bbs.push_back(token2.getSerializer().peekData());
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokensHandler::kLIMIT_MIN - 1
)
);
auto const correctOutput = fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}",
"ledger_index": 30,
"validated": true,
"limit": {},
"mptokens": [
{},
{}
]
}})JSON",
kACCOUNT,
kLEDGER_HASH,
AccountMPTokensHandler::kLIMIT_MIN,
kTOKEN_OUT1,
kTOKEN_OUT2
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ(json::parse(correctOutput), *output.result);
});
}
TEST_F(RPCAccountMPTokensHandlerTest, LimitMoreThanMax)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
ripple::STObject const ownerDir =
createOwnerDirLedgerObject({ripple::uint256{kTOKEN_INDEX1}, ripple::uint256{kTOKEN_INDEX2}}, kTOKEN_INDEX1);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
std::vector<Blob> bbs;
auto const token1 = createMpTokenObject(
kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), kTOKEN1_AMOUNT, ripple::lsfMPTLocked, kTOKEN1_LOCKED_AMOUNT
);
bbs.push_back(token1.getSerializer().peekData());
auto const token2 = createMpTokenObject(
kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), kTOKEN2_AMOUNT, ripple::lsfMPTAuthorized, std::nullopt
);
bbs.push_back(token2.getSerializer().peekData());
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}",
"limit": {}
}})JSON",
kACCOUNT,
AccountMPTokensHandler::kLIMIT_MAX + 1
)
);
auto const correctOutput = fmt::format(
R"JSON({{
"account": "{}",
"ledger_hash": "{}",
"ledger_index": 30,
"validated": true,
"limit": {},
"mptokens": [
{},
{}
]
}})JSON",
kACCOUNT,
kLEDGER_HASH,
AccountMPTokensHandler::kLIMIT_MAX,
kTOKEN_OUT1,
kTOKEN_OUT2
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ(json::parse(correctOutput), *output.result);
});
}
TEST_F(RPCAccountMPTokensHandlerTest, EmptyResult)
{
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader));
auto account = getAccountIdWithString(kACCOUNT);
auto accountKk = ripple::keylet::account(account).key;
auto owneDirKk = ripple::keylet::ownerDir(account).key;
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
ripple::STObject const ownerDir = createOwnerDirLedgerObject({}, kTOKEN_INDEX1);
ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, _, _)).WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
runSpawn([this](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
"account": "{}"
}})JSON",
kACCOUNT
)
);
auto handler = AnyHandler{AccountMPTokensHandler{this->backend_}};
auto const output = handler.process(input, Context{yield});
ASSERT_TRUE(output);
EXPECT_EQ((*output.result).as_object().at("mptokens").as_array().size(), 0);
});
}

View File

@@ -1625,8 +1625,7 @@ TEST_F(RPCAccountTxHandlerTest, MPTTxs_API_v2)
}}
],
"TransactionIndex": 0,
"TransactionResult": "tesSUCCESS",
"mpt_issuance_id": "000000014B4E9C06F24296074F7BC48F92A97916C6DC5EA9"
"TransactionResult": "tesSUCCESS"
}},
"hash": "A52221F4003C281D3C83F501F418B55A1F9DC1C6A129EF13E1A8F0E5C008DAE3",
"ledger_index": 11,

View File

@@ -23,8 +23,6 @@
#include "rpc/handlers/AccountCurrencies.hpp"
#include "rpc/handlers/AccountInfo.hpp"
#include "rpc/handlers/AccountLines.hpp"
#include "rpc/handlers/AccountMPTokenIssuances.hpp"
#include "rpc/handlers/AccountMPTokens.hpp"
#include "rpc/handlers/AccountNFTs.hpp"
#include "rpc/handlers/AccountObjects.hpp"
#include "rpc/handlers/AccountOffers.hpp"
@@ -88,8 +86,6 @@ using AnyHandlerType = Types<
AccountCurrenciesHandler,
AccountInfoHandler,
AccountLinesHandler,
AccountMPTokenIssuancesHandler,
AccountMPTokensHandler,
AccountNFTsHandler,
AccountObjectsHandler,
AccountOffersHandler,

View File

@@ -2311,23 +2311,11 @@ struct IndexTest : public HandlerBaseTest, public WithParamInterface<std::string
};
};
// content of index, amendments, check, fee, hashes, nft_offer, nunl, nft_page, payment_channel, signer_list fields is
// ledger index.
// content of index, payment_channel, nft_page and check fields is ledger index.
INSTANTIATE_TEST_CASE_P(
RPCLedgerEntryGroup3,
IndexTest,
Values(
"index",
"amendments",
"check",
"fee",
"hashes",
"nft_offer",
"nunl",
"nft_page",
"payment_channel",
"signer_list"
),
Values("index", "nft_page", "payment_channel", "check"),
IndexTest::NameGenerator{}
);
@@ -3756,6 +3744,7 @@ TEST_F(RPCLedgerEntryTest, SyntheticMPTIssuanceID)
"Issuer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"LedgerEntryType": "MPTokenIssuance",
"MPTokenMetadata": "6D65746164617461",
"MaximumAmount": "0",
"OutstandingAmount": "0",
"OwnerNode": "0",
"PreviousTxnID": "0000000000000000000000000000000000000000000000000000000000000000",

View File

@@ -309,6 +309,7 @@ TEST_F(RPCVaultInfoHandlerTest, ValidVaultObjectQueryByVaultID)
"Issuer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"LedgerEntryType": "MPTokenIssuance",
"MPTokenMetadata": "6D65746164617461",
"MaximumAmount": "0",
"OutstandingAmount": "0",
"OwnerNode": "0",
"PreviousTxnID": "0000000000000000000000000000000000000000000000000000000000000000",
@@ -392,6 +393,7 @@ TEST_F(RPCVaultInfoHandlerTest, ValidVaultObjectQueryByOwnerAndSeq)
"Issuer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"LedgerEntryType": "MPTokenIssuance",
"MPTokenMetadata": "6D65746164617461",
"MaximumAmount": "0",
"OutstandingAmount": "0",
"OwnerNode": "0",
"PreviousTxnID": "0000000000000000000000000000000000000000000000000000000000000000",

View File

@@ -25,7 +25,6 @@
#include <spdlog/spdlog.h>
#include <cstddef>
#include <memory>
#include <string>
using namespace util;
@@ -94,11 +93,11 @@ TEST_F(LoggerTest, ManyDynamicLoggers)
for (size_t i = 0; i < kNUM_LOGGERS; ++i) {
std::string const loggerName = "DynamicLogger" + std::to_string(i);
Logger const log{loggerName};
Logger log{loggerName};
log.info() << "Logger number " << i;
ASSERT_EQ(getLoggerString(), "inf:" + loggerName + " - Logger number " + std::to_string(i) + "\n");
Logger const copy = log;
Logger copy = log;
copy.info() << "Copy of logger number " << i;
ASSERT_EQ(getLoggerString(), "inf:" + loggerName + " - Copy of logger number " + std::to_string(i) + "\n");
}

View File

@@ -37,16 +37,15 @@
#include <boost/asio/io_context.hpp>
#include <boost/asio/post.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/asio/steady_timer.hpp>
#include <boost/beast/core/flat_buffer.hpp>
#include <boost/beast/http/status.hpp>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <chrono>
#include <condition_variable>
#include <cstddef>
#include <memory>
#include <mutex>
#include <ranges>
#include <string>
#include <thread>
@@ -115,33 +114,24 @@ TEST_F(WebWsConnectionTests, DisconnectClientOnInactivity)
auto work = boost::asio::make_work_guard(clientCtx);
std::thread clientThread{[&clientCtx]() { clientCtx.run(); }};
std::mutex mutex;
std::condition_variable cv;
bool finished{false};
util::spawn(clientCtx, [&](boost::asio::yield_context yield) {
util::spawn(clientCtx, [&work, this](boost::asio::yield_context yield) {
auto expectedSuccess =
wsClient_.connect("localhost", httpServer_.port(), yield, std::chrono::milliseconds{100});
[&]() { ASSERT_TRUE(expectedSuccess.has_value()) << expectedSuccess.error().message(); }();
std::unique_lock lock{mutex};
// Wait for 2 seconds to not block the test infinitely in case of failure
auto const gotNotified = cv.wait_for(lock, std::chrono::seconds{2}, [&finished]() { return finished; });
[&]() { EXPECT_TRUE(gotNotified); }();
boost::asio::steady_timer timer{yield.get_executor(), std::chrono::milliseconds{5}};
timer.async_wait(yield);
work.reset();
});
runSpawn([&, this](boost::asio::yield_context yield) {
runSpawn([this](boost::asio::yield_context yield) {
auto wsConnection = acceptConnection(yield);
wsConnection->setTimeout(std::chrono::milliseconds{1});
// Client will not respond to pings because there is no reading operation scheduled for it.
auto const receivedMessage = wsConnection->receive(yield);
{
std::unique_lock const lock{mutex};
finished = true;
cv.notify_one();
}
auto const start = std::chrono::steady_clock::now();
auto const receivedMessage = wsConnection->receive(yield);
auto const end = std::chrono::steady_clock::now();
EXPECT_LT(end - start, std::chrono::milliseconds{4}); // Should be 2 ms, double it in case of slow CI.
EXPECT_FALSE(receivedMessage.has_value());
EXPECT_EQ(receivedMessage.error().value(), boost::asio::error::no_permission);