Compare commits

..

3 Commits

Author SHA1 Message Date
Bronek Kozicki
e172510c4f Fixes 2025-09-02 21:28:20 +01:00
Bronek Kozicki
3a2e26c5c0 Remove unnecessary inputs 2025-09-02 20:14:47 +01:00
Bronek Kozicki
6bd3cc054a Add build-selected-commit workflow 2025-09-02 20:10:36 +01:00
58 changed files with 836 additions and 3844 deletions

View File

@@ -1,5 +1,7 @@
# This action installs and optionally uploads Conan dependencies to a remote
# repository. The dependencies will only be uploaded if the credentials are
# provided.
name: Build Conan dependencies
description: "Install Conan dependencies, optionally forcing a rebuild of all dependencies."
# Note that actions do not support 'type' and all inputs are strings, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
@@ -10,10 +12,28 @@ inputs:
build_type:
description: 'The build type to use ("Debug", "Release").'
required: true
conan_remote_name:
description: "The name of the Conan remote to use."
required: true
conan_remote_url:
description: "The URL of the Conan endpoint to use."
required: true
conan_remote_username:
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
required: false
default: ""
conan_remote_password:
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
required: false
default: ""
force_build:
description: 'Force building of all dependencies ("true", "false").'
required: false
default: "false"
force_upload:
description: 'Force uploading of all dependencies ("true", "false").'
required: false
default: "false"
runs:
using: composite
@@ -30,4 +50,13 @@ runs:
--options:host '&:tests=True' \
--options:host '&:xrpld=True' \
--settings:all build_type=${{ inputs.build_type }} \
..
--format=json ..
- name: Upload Conan dependencies
if: ${{ inputs.conan_remote_username != '' && inputs.conan_remote_password != '' }}
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
echo "Logging into Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
conan remote login ${{ inputs.conan_remote_name }} "${{ inputs.conan_remote_username }}" --password "${{ inputs.conan_remote_password }}"
echo 'Uploading dependencies.'
conan upload '*' --confirm --check ${{ inputs.force_upload == 'true' && '--force' || '' }} --remote=${{ inputs.conan_remote_name }}

View File

@@ -1,7 +1,6 @@
# This action build and tests the binary. The Conan dependencies must have
# already been installed (see the build-deps action).
name: Build and Test
description: "Build and test the binary."
# Note that actions do not support 'type' and all inputs are strings, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.

View File

@@ -1,43 +0,0 @@
name: Setup Conan
description: "Set up Conan configuration, profile, and remote."
inputs:
conan_remote_name:
description: "The name of the Conan remote to use."
required: false
default: xrplf
conan_remote_url:
description: "The URL of the Conan endpoint to use."
required: false
default: https://conan.ripplex.io
runs:
using: composite
steps:
- name: Set up Conan configuration
shell: bash
run: |
echo 'Installing configuration.'
cat conan/global.conf ${{ runner.os == 'Linux' && '>>' || '>' }} $(conan config home)/global.conf
echo 'Conan configuration:'
conan config show '*'
- name: Set up Conan profile
shell: bash
run: |
echo 'Installing profile.'
conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo 'Conan profile:'
conan profile show
- name: Set up Conan remote
shell: bash
run: |
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
echo 'Listing Conan remotes.'
conan remote list

47
.github/scripts/strategy-matrix/generate.py vendored Executable file → Normal file
View File

@@ -2,17 +2,7 @@
import argparse
import itertools
import json
from pathlib import Path
from dataclasses import dataclass
THIS_DIR = Path(__file__).parent.resolve()
@dataclass
class Config:
architecture: list[dict]
os: list[dict]
build_type: list[str]
cmake_args: list[str]
import re
'''
Generate a strategy matrix for GitHub Actions CI.
@@ -28,9 +18,9 @@ We will further set additional CMake arguments as follows:
- Certain Debian Bookworm configurations will change the reference fee, enable
codecov, and enable voidstar in PRs.
'''
def generate_strategy_matrix(all: bool, config: Config) -> list:
def generate_strategy_matrix(all: bool, architecture: list[dict], os: list[dict], build_type: list[str], cmake_args: list[str]) -> dict:
configurations = []
for architecture, os, build_type, cmake_args in itertools.product(config.architecture, config.os, config.build_type, config.cmake_args):
for architecture, os, build_type, cmake_args in itertools.product(architecture, os, build_type, cmake_args):
# The default CMake target is 'all' for Linux and MacOS and 'install'
# for Windows, but it can get overridden for certain configurations.
cmake_target = 'install' if os["distro_name"] == 'windows' else 'all'
@@ -45,7 +35,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
# Only generate a subset of configurations in PRs.
if not all:
# Debian:
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
# - Bookworm using GCC 13: Release and Unity on linux/arm64, set
# the reference fee to 500.
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
# code coverage (which will be done below).
@@ -57,7 +47,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
if os['distro_name'] == 'debian':
skip = True
if os['distro_version'] == 'bookworm':
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}'
skip = False
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
@@ -168,30 +158,21 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
'architecture': architecture,
})
return configurations
def read_config(file: Path) -> Config:
config = json.loads(file.read_text())
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
raise Exception('Invalid configuration file.')
return Config(**config)
return {'include': configurations}
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--all', help='Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).', action="store_true")
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=False, type=Path)
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=True, type=str)
args = parser.parse_args()
matrix = []
if args.config is None or args.config == '':
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "linux.json"))
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "macos.json"))
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "windows.json"))
else:
matrix += generate_strategy_matrix(args.all, read_config(args.config))
# Load the JSON configuration file.
config = None
with open(args.config, 'r') as f:
config = json.load(f)
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
raise Exception('Invalid configuration file.')
# Generate the strategy matrix.
print(f'matrix={json.dumps({"include": matrix})}')
print(f'matrix={json.dumps(generate_strategy_matrix(args.all, config['architecture'], config['os'], config['build_type'], config['cmake_args']))}')

View File

@@ -0,0 +1,163 @@
# This workflow builds the binary from the selected commit (not earlier than 2.5.0 release)
name: Build selected commit
# This workflow can only be triggered manually, by a project maintainer
on:
workflow_dispatch:
inputs:
commit:
description: "Commit to build from."
required: false
type: string
build_container:
description: "Build container image to use"
required: true
type: string
default: ghcr.io/xrplf/ci/debian-bullseye:gcc-12
strip_symbols:
description: "Strip debug symbols"
required: true
type: boolean
default: true
archive_archive:
description: "Archive rippled binary"
required: true
type: boolean
default: false
build_only:
description: "Only build, do not run unit tests"
required: true
type: boolean
default: false
build_type:
description: "Build type (Debug or Release)"
required: true
type: choice
default: Release
options:
- Debug
- Release
cmake_args:
description: "CMake args for build"
required: true
type: string
default: "-Dxrpld=ON -Dtests=ON -Dassert=OFF -Dunity=OFF"
dependencies_force_build:
description: "Force building of all dependencies."
required: false
type: boolean
default: false
env:
CONAN_REMOTE_NAME: xrplf
CONAN_REMOTE_URL: https://conan.ripplex.io
BUILD_DIR: .build
jobs:
build:
runs-on: ["self-hosted", "Linux", "X64", "heavy"]
container: ${{ inputs.build_container }}
steps:
- name: Checkout this workflow
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
sparse-checkout: |
.github
conan
- name: Move workflow files on a side
run: |
mkdir -p ${{ runner.temp }}
mv .github conan ${{ runner.temp }}
rm -rf .git
- name: Checkout the commit to build
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
ref: ${{ inputs.commit }}
- name: Restore workflow files
run: |
rm -rf .github conan
mv ${{ runner.temp }}/.github .
mv ${{ runner.temp }}/conan .
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
with:
disable_ccache: true
- name: Check configuration
run: |
echo 'Checking path.'
echo ${PATH} | tr ':' '\n'
echo 'Checking environment variables.'
env | sort
echo 'Checking CMake version.'
cmake --version
echo 'Checking compiler version.'
${CC} --version
echo 'Checking Conan version.'
conan --version
echo 'Checking Ninja version.'
ninja --version
echo 'Checking nproc version.'
nproc --version
- name: Set up Conan configuration
run: |
echo 'Installing configuration.'
cat conan/global.conf >> $(conan config home)/global.conf
echo 'Conan configuration:'
conan config show '*'
- name: Set up Conan profile
run: |
echo 'Installing profile.'
conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo 'Conan profile:'
conan profile show
- name: Set up Conan remote
shell: bash
run: |
echo "Adding Conan remote '${{ env.CONAN_REMOTE_NAME }}' at ${{ env.CONAN_REMOTE_URL }}."
conan remote add --index 0 --force ${{ env.CONAN_REMOTE_NAME }} ${{ env.CONAN_REMOTE_URL }}
echo 'Listing Conan remotes.'
conan remote list
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_dir: ${{ env.BUILD_DIR }}
build_type: ${{ inputs.build_type }}
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
force_build: ${{ inputs.dependencies_force_build }}
force_upload: false
- name: Build and test binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ env.BUILD_DIR }}
build_only: ${{ inputs.build_only }}
build_type: ${{ inputs.build_type }}
cmake_args: ${{ inputs.cmake_args }}
cmake_target: "all"
os: "linux"
- name: Strip symbols
if: ${{ inputs.strip_symbols == 'true' }}
run: |
strip -D --strip-unneeded ${{ env.BUILD_DIR }}/rippled
${{ env.BUILD_DIR }}/rippled --version
- name: Move the binary
run: |
mv ${{ env.BUILD_DIR }}/rippled .
- name: Archive rippled binary
if: ${{ inputs.archive_archive == 'true' }}
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: rippled
path: ./rippled
retention-days: 90
compression-level: 8
overwrite: true

View File

@@ -13,6 +13,14 @@ on:
required: false
type: string
default: ".build"
conan_remote_name:
description: "The name of the Conan remote to use."
required: true
type: string
conan_remote_url:
description: "The URL of the Conan endpoint to use."
required: true
type: string
dependencies_force_build:
description: "Force building of all dependencies."
required: false
@@ -37,6 +45,12 @@ on:
codecov_token:
description: "The Codecov token to use for uploading coverage reports."
required: false
conan_remote_username:
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
required: false
conan_remote_password:
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
required: false
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
@@ -49,10 +63,20 @@ defaults:
jobs:
# Generate the strategy matrix to be used by the following job.
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
os: ${{ inputs.os }}
strategy_matrix: ${{ inputs.strategy_matrix }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: 3.13
- name: Generate strategy matrix
working-directory: .github/scripts/strategy-matrix
id: generate
run: python generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} --config=${{ inputs.os }}.json >> "${GITHUB_OUTPUT}"
outputs:
matrix: ${{ steps.generate.outputs.matrix }}
# Build and test the binary.
build-test:
@@ -78,16 +102,21 @@ jobs:
echo 'CMake target: ${{ matrix.cmake_target }}'
echo 'Config name: ${{ matrix.config_name }}'
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
- name: Clean workspace (MacOS)
if: ${{ inputs.os == 'macos' }}
run: |
WORKSPACE=${{ github.workspace }}
echo "Cleaning workspace '${WORKSPACE}'."
if [ -z "${WORKSPACE}" ] || [ "${WORKSPACE}" = "/" ]; then
echo "Invalid working directory '${WORKSPACE}'."
exit 1
fi
find "${WORKSPACE}" -depth 1 | xargs rm -rfv
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
with:
disable_ccache: false
- name: Check configuration (Windows)
if: ${{ inputs.os == 'windows' }}
@@ -124,16 +153,40 @@ jobs:
echo 'Checking nproc version.'
nproc --version
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Set up Conan configuration
run: |
echo 'Installing configuration.'
cat conan/global.conf ${{ inputs.os == 'linux' && '>>' || '>' }} $(conan config home)/global.conf
echo 'Conan configuration:'
conan config show '*'
- name: Set up Conan profile
run: |
echo 'Installing profile.'
conan config install conan/profiles/default -tf $(conan config home)/profiles/
echo 'Conan profile:'
conan profile show
- name: Set up Conan remote
shell: bash
run: |
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
echo 'Listing Conan remotes.'
conan remote list
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
conan_remote_username: ${{ secrets.conan_remote_username }}
conan_remote_password: ${{ secrets.conan_remote_password }}
force_build: ${{ inputs.dependencies_force_build }}
force_upload: ${{ inputs.dependencies_force_upload }}
- name: Build and test binary
uses: ./.github/actions/build-test
with:

75
.github/workflows/check-format.yml vendored Normal file
View File

@@ -0,0 +1,75 @@
# This workflow checks if the code is properly formatted.
name: Check format
# This workflow can only be triggered by other workflows.
on: workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-format
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
pre-commit:
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/tools-rippled-pre-commit
steps:
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
# same directory for jobs running in containers. The actions/checkout step
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
# safe.directory (see instructions at https://github.com/actions/checkout)
# but that is apparently not happening for some container images. We
# therefore preemptively add both directories to safe.directory. See also
# https://github.com/actions/runner/issues/2058 for more details.
- name: Configure git safe.directory
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory ${{ github.workspace }}
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Check configuration
run: |
echo 'Checking path.'
echo ${PATH} | tr ':' '\n'
echo 'Checking environment variables.'
env | sort
echo 'Checking pre-commit version.'
pre-commit --version
echo 'Checking clang-format version.'
clang-format --version
echo 'Checking NPM version.'
npm --version
echo 'Checking Node.js version.'
node --version
echo 'Checking prettier version.'
prettier --version
- name: Format code
run: pre-commit run --show-diff-on-failure --color=always --all-files
- name: Check for differences
env:
MESSAGE: |
One or more files did not conform to the formatting. Maybe you did
not run 'pre-commit' before committing, or your version of
'clang-format' or 'prettier' has an incompatibility with the ones
used here (see the "Check configuration" step above).
Run 'pre-commit run --all-files' in your repo, and then commit and
push the changes.
run: |
DIFF=$(git status --porcelain)
if [ -n "${DIFF}" ]; then
# Print the files that changed to give the contributor a hint about
# what to expect when running pre-commit on their own machine.
git status
echo "${MESSAGE}"
exit 1
fi

View File

@@ -9,14 +9,12 @@ on:
inputs:
conan_remote_name:
description: "The name of the Conan remote to use."
required: false
required: true
type: string
default: xrplf
conan_remote_url:
description: "The URL of the Conan endpoint to use."
required: false
required: true
type: string
default: https://conan.ripplex.io
secrets:
clio_notify_token:
description: "The GitHub token to notify Clio about new versions."
@@ -52,25 +50,21 @@ jobs:
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
echo 'Extracting version.'
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
- name: Calculate conan reference
id: conan_ref
- name: Add Conan remote
run: |
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
- name: Set up Conan
uses: ./.github/actions/setup-conan
with:
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
echo 'Listing Conan remotes.'
conan remote list
- name: Log into Conan remote
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
- name: Upload package
run: |
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.conan_ref.outputs.conan_ref }}
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}
outputs:
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
channel: ${{ steps.generate.outputs.channel }}
version: ${{ steps.generate.outputs.version }}
notify:
needs: upload
@@ -82,5 +76,5 @@ jobs:
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
-F "client_payload[pr_url]=${{ github.event.pull_request.html_url }}"
-F "client_payload[version]=${{ needs.upload.outputs.version }}@${{ needs.upload.outputs.user }}/${{ needs.upload.outputs.channel }}" \
-F "client_payload[pr]=${{ github.event.pull_request.number }}"

View File

@@ -23,6 +23,10 @@ defaults:
run:
shell: bash
env:
CONAN_REMOTE_NAME: xrplf
CONAN_REMOTE_URL: https://conan.ripplex.io
jobs:
# This job determines whether the rest of the workflow should run. It runs
# when the PR is not a draft (which should also cover merge-group) or
@@ -50,17 +54,18 @@ jobs:
files: |
# These paths are unique to `on-pr.yml`.
.github/scripts/levelization/**
.github/workflows/check-format.yml
.github/workflows/check-levelization.yml
.github/workflows/notify-clio.yml
.github/workflows/on-pr.yml
.clang-format
.pre-commit-config.yaml
# Keep the paths below in sync with those in `on-trigger.yml`.
.github/actions/build-deps/**
.github/actions/build-test/**
.github/actions/setup-conan/**
.github/scripts/strategy-matrix/**
.github/workflows/build-test.yml
.github/workflows/reusable-strategy-matrix.yml
.codecov.yml
cmake/**
conan/**
@@ -70,7 +75,6 @@ jobs:
tests/**
CMakeLists.txt
conanfile.py
conan.lock
- name: Check whether to run
# This step determines whether the rest of the workflow should
# run. The rest of the workflow will run if this job runs AND at
@@ -90,40 +94,61 @@ jobs:
outputs:
go: ${{ steps.go.outputs.go == 'true' }}
check-format:
needs: should-run
if: needs.should-run.outputs.go == 'true'
uses: ./.github/workflows/check-format.yml
check-levelization:
needs: should-run
if: needs.should-run.outputs.go == 'true'
uses: ./.github/workflows/check-levelization.yml
build-test:
# This job works around the limitation that GitHub Actions does not support
# using environment variables as inputs for reusable workflows.
generate-outputs:
needs: should-run
if: needs.should-run.outputs.go == 'true'
runs-on: ubuntu-latest
steps:
- name: No-op
run: true
outputs:
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
build-test:
needs: generate-outputs
uses: ./.github/workflows/build-test.yml
strategy:
matrix:
os: [linux, macos, windows]
with:
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
os: ${{ matrix.os }}
secrets:
codecov_token: ${{ secrets.CODECOV_TOKEN }}
notify-clio:
needs:
- should-run
- generate-outputs
- build-test
if: needs.should-run.outputs.go == 'true'
uses: ./.github/workflows/notify-clio.yml
with:
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
secrets:
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
passed:
if: failure() || cancelled()
needs:
- build-test
- check-format
- check-levelization
runs-on: ubuntu-latest
steps:
- name: Fail
run: false
- name: No-op
run: true

View File

@@ -21,10 +21,8 @@ on:
# Keep the paths below in sync with those in `on-pr.yml`.
- ".github/actions/build-deps/**"
- ".github/actions/build-test/**"
- ".github/actions/setup-conan/**"
- ".github/scripts/strategy-matrix/**"
- ".github/workflows/build-test.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".codecov.yml"
- "cmake/**"
- "conan/**"
@@ -34,7 +32,6 @@ on:
- "tests/**"
- "CMakeLists.txt"
- "conanfile.py"
- "conan.lock"
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
# will force all dependencies to be rebuilt, which is useful to verify that
@@ -68,18 +65,54 @@ defaults:
run:
shell: bash
env:
CONAN_REMOTE_NAME: xrplf
CONAN_REMOTE_URL: https://conan.ripplex.io
jobs:
check-missing-commits:
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
uses: ./.github/workflows/check-missing-commits.yml
# This job works around the limitation that GitHub Actions does not support
# using environment variables as inputs for reusable workflows. It also sets
# outputs that depend on the event that triggered the workflow.
generate-outputs:
runs-on: ubuntu-latest
steps:
- name: Check inputs and set outputs
id: generate
run: |
if [[ '${{ github.event_name }}' == 'push' ]]; then
echo 'dependencies_force_build=false' >> "${GITHUB_OUTPUT}"
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
elif [[ '${{ github.event_name }}' == 'schedule' ]]; then
echo 'dependencies_force_build=true' >> "${GITHUB_OUTPUT}"
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
else
echo 'dependencies_force_build=${{ inputs.dependencies_force_build }}' >> "${GITHUB_OUTPUT}"
echo 'dependencies_force_upload=${{ inputs.dependencies_force_upload }}' >> "${GITHUB_OUTPUT}"
fi
outputs:
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
dependencies_force_build: ${{ steps.generate.outputs.dependencies_force_build }}
dependencies_force_upload: ${{ steps.generate.outputs.dependencies_force_upload }}
build-test:
needs: generate-outputs
uses: ./.github/workflows/build-test.yml
strategy:
matrix:
os: [linux, macos, windows]
with:
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
dependencies_force_build: ${{ needs.generate-outputs.outputs.dependencies_force_build == 'true' }}
dependencies_force_upload: ${{ needs.generate-outputs.outputs.dependencies_force_upload == 'true' }}
os: ${{ matrix.os }}
strategy_matrix: "minimal"
strategy_matrix: "all"
secrets:
codecov_token: ${{ secrets.CODECOV_TOKEN }}
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}

View File

@@ -1,14 +0,0 @@
name: Run pre-commit hooks
on:
pull_request:
push:
branches: [develop, release, master]
workflow_dispatch:
jobs:
run-hooks:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@af1b0f0d764cda2e5435f5ac97b240d4bd4d95d3
with:
runs_on: ubuntu-latest
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit" }'

View File

@@ -1,38 +0,0 @@
name: Generate strategy matrix
on:
workflow_call:
inputs:
os:
description: 'The operating system to use for the build ("linux", "macos", "windows").'
required: false
type: string
strategy_matrix:
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
required: false
type: string
default: "minimal"
outputs:
matrix:
description: "The generated strategy matrix."
value: ${{ jobs.generate-matrix.outputs.matrix }}
jobs:
generate-matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.generate.outputs.matrix }}
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: 3.13
- name: Generate strategy matrix
working-directory: .github/scripts/strategy-matrix
id: generate
run: ./generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }} >> "${GITHUB_OUTPUT}"

View File

@@ -1,84 +0,0 @@
name: Upload Conan Dependencies
on:
schedule:
- cron: "0 3 * * 2-6"
workflow_dispatch:
inputs:
force_source_build:
description: "Force source build of all dependencies"
required: false
default: false
type: boolean
force_upload:
description: "Force upload of all dependencies"
required: false
default: false
type: boolean
pull_request:
branches: [develop]
paths:
# This allows testing changes to the upload workflow in a PR
- .github/workflows/upload-conan-deps.yml
push:
branches: [develop]
paths:
- .github/workflows/upload-conan-deps.yml
- .github/workflows/reusable-strategy-matrix.yml
- .github/actions/build-deps/action.yml
- .github/actions/setup-conan/action.yml
- ".github/scripts/strategy-matrix/**"
- conanfile.py
- conan.lock
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
run-upload-conan-deps:
needs:
- generate-matrix
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
max-parallel: 10
runs-on: ${{ matrix.architecture.runner }}
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
steps:
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
with:
disable_ccache: false
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_dir: .build
build_type: ${{ matrix.build_type }}
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
- name: Login to Conan
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request'
run: conan remote login -p ${{ secrets.CONAN_PASSWORD }} ${{ inputs.conan_remote_name }} ${{ secrets.CONAN_USERNAME }}
- name: Upload Conan packages
if: github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule'
run: conan upload "*" -r=${{ inputs.conan_remote_name }} --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}

3
.gitignore vendored
View File

@@ -111,6 +111,3 @@ bld.rippled/
# Suggested in-tree build directory
/.build*/
# Rust
external/*/target

View File

@@ -1,5 +1,18 @@
# To run pre-commit hooks, first install pre-commit:
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
# - `pip install pre-commit-hooks==${PRE_COMMIT_HOOKS_VERSION}`
#
# Depending on your system, you can use `brew install` or `apt install` as well
# for installing the pre-commit package, but `pip` is needed to install the
# hooks; you can also use `pipx` if you prefer.
# Next, install the required formatters:
# - `pip install clang-format==${CLANG_VERSION}`
# - `npm install prettier@${PRETTIER_VERSION}`
#
# See https://github.com/XRPLF/ci/blob/main/.github/workflows/tools-rippled.yml
# for the versions used in the CI pipeline. You will need to have the exact same
# versions of the tools installed on your system to produce the same results as
# the pipeline.
#
# Then, run the following command to install the git hook scripts:
# - `pre-commit install`
@@ -7,33 +20,45 @@
# - `pre-commit run --all-files`
# To manually run a specific hook, use:
# - `pre-commit run <hook_id> --all-files`
# To run the hooks against only the staged files, use:
# To run the hooks against only the files changed in the current commit, use:
# - `pre-commit run`
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: mixed-line-ending
- id: check-merge-conflict
args: [--assume-in-merge]
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
- repo: local
hooks:
- id: clang-format
args: [--style=file]
"types_or": [c++, c, proto]
- repo: https://github.com/rbubley/mirrors-prettier
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
name: clang-format
language: system
entry: clang-format -i
files: '\.(cpp|hpp|h|ipp|proto)$'
- id: trailing-whitespace
name: trailing-whitespace
entry: trailing-whitespace-fixer
language: system
types: [text]
- id: end-of-file
name: end-of-file
entry: end-of-file-fixer
language: system
types: [text]
- id: mixed-line-ending
name: mixed-line-ending
entry: mixed-line-ending
language: system
types: [text]
- id: check-merge-conflict
name: check-merge-conflict
entry: check-merge-conflict --assume-in-merge
language: system
types: [text]
- repo: local
hooks:
- id: prettier
name: prettier
language: system
entry: prettier --ignore-unknown --write
exclude: |
(?x)^(
external/.*|
.github/scripts/levelization/results/.*\.txt|
conan\.lock
.github/scripts/levelization/results/.*\.txt
)$

View File

@@ -132,7 +132,7 @@ higher index than the default Conan Center remote, so it is consulted first. You
can do this by running:
```bash
conan remote add --index 0 xrplf https://conan.ripplex.io
conan remote add --index 0 xrplf "https://conan.ripplex.io"
```
Alternatively, you can pull the patched recipes into the repository and use them
@@ -158,10 +158,6 @@ updated dependencies with the newer version. However, if we switch to a newer
version that no longer requires a patch, no action is required on your part, as
the new recipe will be automatically pulled from the official Conan Center.
> [!NOTE]
> You might need to add `--lockfile=""` to your `conan install` command
> to avoid automatic use of the existing `conan.lock` file when you run `conan export` manually on your machine
### Conan profile tweaks
#### Missing compiler version
@@ -470,33 +466,6 @@ tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
The location of `rippled` binary in your build directory depends on your
CMake generator. Pass `--help` to see the rest of the command line options.
#### Conan lockfile
To achieve reproducible dependencies, we use [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html).
The `conan.lock` file in the repository contains a "snapshot" of the current dependencies.
It is implicitly used when running `conan` commands, you don't need to specify it.
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
> [!NOTE]
> Conan uses local cache by default when creating a lockfile.
>
> To ensure, that lockfile creation works the same way on all developer machines, you should clear the local cache before creating a new lockfile.
To create a new lockfile, run the following commands in the repository root:
```bash
conan remove '*' --confirm
rm conan.lock
# This ensure that xrplf remote is the first to be consulted
conan remote add --force --index 0 xrplf https://conan.ripplex.io
conan lock create . -o '&:jemalloc=True' -o '&:rocksdb=True'
```
> [!NOTE]
> If some dependencies are exclusive for some OS, you may need to run the last command for them adding `--profile:all <PROFILE>`.
## Coverage report
The coverage report is intended for developers using compilers GCC
@@ -595,13 +564,7 @@ After any updates or changes to dependencies, you may need to do the following:
```
3. Re-run [conan export](#patched-recipes) if needed.
4. [Regenerate lockfile](#conan-lockfile).
5. Re-run [conan install](#build-and-test).
#### ERROR: Package not resolved
If you're seeing an error like `ERROR: Package 'snappy/1.1.10' not resolved: Unable to find 'snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246' in remotes.`,
please add `xrplf` remote or re-run `conan export` for [patched recipes](#patched-recipes).
4. Re-run [conan install](#build-and-test).
### `protobuf/port_def.inc` file not found

View File

@@ -96,7 +96,6 @@ set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
set(SECP256K1_BUILD_EXAMPLES FALSE)
add_subdirectory(external/secp256k1)
add_subdirectory(external/rust-test)
add_library(secp256k1::secp256k1 ALIAS secp256k1)
add_subdirectory(external/ed25519-donna)
add_subdirectory(external/antithesis-sdk)
@@ -130,7 +129,6 @@ target_link_libraries(ripple_libs INTERFACE
secp256k1::secp256k1
soci::soci
SQLite::SQLite3
RustTest
)
# Work around changes to Conan recipe for now.

View File

@@ -6,7 +6,7 @@ The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powe
## XRP
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
## rippled
@@ -23,19 +23,19 @@ If you are interested in running an **API Server** (including a **Full History S
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction fees.
- **[Responsible Software Governance][]:** A team of full-time developers at Ripple & other organizations maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests.
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
- **[Modern Features][]:** Features like Escrow, Checks, and Payment Channels support financial applications atop of the XRP Ledger. This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
[Censorship-Resistant Transaction Processing]: https://xrpl.org/transaction-censorship-detection.html#transaction-censorship-detection
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/consensus-research.html#consensus-research
[Finite XRP Supply]: https://xrpl.org/what-is-xrp.html
[Responsible Software Governance]: https://xrpl.org/contribute-code.html#contribute-code-to-the-xrp-ledger
[Secure, Adaptable Cryptography]: https://xrpl.org/cryptographic-keys.html#cryptographic-keys
[Modern Features]: https://xrpl.org/use-specialized-payment-types.html
[On-Ledger Decentralized Exchange]: https://xrpl.org/decentralized-exchange.html#decentralized-exchange
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
## Source Code

View File

@@ -218,12 +218,12 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
check_cxx_compiler_flag(-fprofile-update HAVE_cxx_fprofile_update)
if(HAVE_cxx_fprofile_update)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
check_c_compiler_flag(-fprofile-update HAVE_c_fprofile_update)
if(HAVE_c_fprofile_update)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()

View File

@@ -1,56 +0,0 @@
{
"version": "0.5",
"requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
"rocksdb/10.0.1#85537f46e538974d67da0c3977de48ac%1756234304.347",
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"openssl/3.5.2#0c5a5e15ae569f45dff57adcf1770cf7%1756234259.61",
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
"libarchive/3.8.1#5cf685686322e906cb42706ab7e099a8%1756234256.696",
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
],
"build_requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1756234281.733",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",
"msys2/cci.latest#5b73b10144f73cc5bfe0572ed9be39e1%1751977009.857",
"m4/1.4.19#b38ced39a01e31fef5435bc634461fd2%1700758725.451",
"cmake/3.31.8#dde3bde00bb843687e55aea5afa0e220%1756234232.89",
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28",
"automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56",
"autoconf/2.71#51077f068e61700d65bb05541ea1e4b0%1731054366.86"
],
"python_requires": [],
"overrides": {
"protobuf/3.21.12": [
null,
"protobuf/3.21.12"
],
"lz4/1.9.4": [
"lz4/1.10.0"
],
"boost/1.83.0": [
"boost/1.88.0"
],
"sqlite3/3.44.2": [
"sqlite3/3.49.1"
]
},
"config_requires": []
}

View File

@@ -1,15 +0,0 @@
cmake_minimum_required(VERSION 3.15)
project(RustTest)
set(CMAKE_INSTALL_CMAKEDIR "lib/cmake/RustTest")
execute_process(COMMAND cargo run --bin install -- --release WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
set(INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/target/install/release/include/")
add_library(RustTest STATIC IMPORTED GLOBAL)
set_target_properties(RustTest PROPERTIES
IMPORTED_LOCATION "${CMAKE_CURRENT_SOURCE_DIR}/target/install/release/lib/RustTest.a"
INTERFACE_INCLUDE_DIRECTORIES "${CMAKE_CURRENT_SOURCE_DIR}/target/install/release/include/"
)

568
external/rust-test/Cargo.lock generated vendored
View File

@@ -1,568 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "RustTest"
version = "0.1.0"
dependencies = [
"cbindgen",
]
[[package]]
name = "anstream"
version = "0.6.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
[[package]]
name = "anstyle-parse"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9"
dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882"
dependencies = [
"anstyle",
"once_cell_polyfill",
"windows-sys 0.59.0",
]
[[package]]
name = "bitflags"
version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
[[package]]
name = "cbindgen"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "975982cdb7ad6a142be15bdf84aea7ec6a9e5d4d797c004d43185b24cfe4e684"
dependencies = [
"clap",
"heck",
"indexmap",
"log",
"proc-macro2",
"quote",
"serde",
"serde_json",
"syn",
"tempfile",
"toml",
]
[[package]]
name = "cfg-if"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
[[package]]
name = "clap"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_lex"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
[[package]]
name = "colorchoice"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "errno"
version = "0.3.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
dependencies = [
"libc",
"windows-sys 0.60.2",
]
[[package]]
name = "fastrand"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "getrandom"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasi",
]
[[package]]
name = "hashbrown"
version = "0.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "indexmap"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
dependencies = [
"equivalent",
"hashbrown",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itoa"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "libc"
version = "0.2.174"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
[[package]]
name = "linux-raw-sys"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
[[package]]
name = "log"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "memchr"
version = "2.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
[[package]]
name = "once_cell"
version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "once_cell_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
[[package]]
name = "proc-macro2"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
[[package]]
name = "r-efi"
version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "rustix"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.60.2",
]
[[package]]
name = "ryu"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "serde"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.141"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
[[package]]
name = "serde_spanned"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
dependencies = [
"serde",
]
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "tempfile"
version = "3.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
dependencies = [
"fastrand",
"getrandom",
"once_cell",
"rustix",
"windows-sys 0.59.0",
]
[[package]]
name = "toml"
version = "0.8.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"toml_write",
"winnow",
]
[[package]]
name = "toml_write"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
[[package]]
name = "unicode-ident"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "wasi"
version = "0.14.2+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
dependencies = [
"wit-bindgen-rt",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-sys"
version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
dependencies = [
"windows-targets 0.53.2",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm 0.52.6",
"windows_aarch64_msvc 0.52.6",
"windows_i686_gnu 0.52.6",
"windows_i686_gnullvm 0.52.6",
"windows_i686_msvc 0.52.6",
"windows_x86_64_gnu 0.52.6",
"windows_x86_64_gnullvm 0.52.6",
"windows_x86_64_msvc 0.52.6",
]
[[package]]
name = "windows-targets"
version = "0.53.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
dependencies = [
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
"windows_i686_gnu 0.53.0",
"windows_i686_gnullvm 0.53.0",
"windows_i686_msvc 0.53.0",
"windows_x86_64_gnu 0.53.0",
"windows_x86_64_gnullvm 0.53.0",
"windows_x86_64_msvc 0.53.0",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_aarch64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_i686_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "windows_x86_64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
version = "0.7.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95"
dependencies = [
"memchr",
]
[[package]]
name = "wit-bindgen-rt"
version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
dependencies = [
"bitflags",
]

View File

@@ -1,13 +0,0 @@
[package]
name = "RustTest"
version = "0.1.0"
edition = "2024"
[lib]
name = "test"
crate-type = ["staticlib"]
[build-dependencies]
cbindgen = "0.29"
[dependencies]

View File

@@ -1,15 +0,0 @@
use std::{env};
use std::{path::PathBuf};
fn main() {
let crate_name = env::var("CARGO_PKG_NAME").unwrap();
let header_name = format!("{}.h", crate_name);
let crate_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let header_path = out_dir.join(header_name);
cbindgen::generate(&crate_dir)
.expect("Unable to generate bindings")
.write_to_file(header_path);
}

View File

@@ -1,54 +0,0 @@
use std::path::PathBuf;
use std::process::{Command};
use std::{env, fs};
fn main() {
let args: Vec<String> = env::args().collect();
// Pass 'build' for the first argument and pass all other
// parameters to the cargo command
Command::new("cargo")
.arg("build")
.args(args.iter().skip(1))
.status()
.expect("failed to run cargo");
// Determine the build config
let build_config = args.iter()
.map(|arg| arg.to_lowercase())
.filter(|arg| arg == "--release" || arg == "--debug")
.next()
.unwrap_or("--debug".to_string());
let config = if build_config == "--debug" { "debug" } else { "release" };
let crate_name = env::var("CARGO_PKG_NAME").unwrap();
let lib_name = format!("{}.a", crate_name);
let header_name = format!("{}.h", crate_name);
let crate_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let header_out_path = out_dir.join(&header_name);
let binary_dir = crate_dir.join(format!("target/{config}"));
let lib_path = fs::read_dir(binary_dir)
.expect("Failed to find lib files")
.filter_map(Result::ok)
.map(|entry| entry.path())
.find(|path| path.extension().and_then(|s| s.to_str()) == Some("a"))
.expect("Can't find a lib file");
let install_base_dir = crate_dir.join("target").join("install").join(config);
let lib_install_dir = install_base_dir.join("lib");
let include_install_dir = install_base_dir.join("include");
let include_install_path = include_install_dir.join(&header_name);
let lib_install_path = lib_install_dir.join(&lib_name);
let _ = std::fs::create_dir_all(&lib_install_dir);
let _ = std::fs::create_dir_all(&include_install_dir);
fs::copy(header_out_path, &include_install_path).expect("Failed to install the header file");
fs::copy(lib_path, &lib_install_path)
.expect("Failed to install the lib file");
println!("Installed to {}", install_base_dir.display());
}

View File

@@ -1,4 +0,0 @@
#[unsafe(no_mangle)]
pub extern "C" fn add(a: i32, b: i32) -> i32 {
a + b
}

View File

@@ -150,24 +150,6 @@ public:
return (mantissa_ < 0) ? -1 : (mantissa_ ? 1 : 0);
}
Number
truncate() const noexcept
{
if (exponent_ >= 0 || mantissa_ == 0)
return *this;
Number ret = *this;
while (ret.exponent_ < 0 && ret.mantissa_ != 0)
{
ret.exponent_ += 1;
ret.mantissa_ /= rep(10);
}
// We are guaranteed that normalize() will never throw an exception
// because exponent is either negative or zero at this point.
ret.normalize();
return ret;
}
friend constexpr bool
operator>(Number const& x, Number const& y) noexcept
{

View File

@@ -20,8 +20,6 @@
#ifndef RIPPLE_PROTOCOL_PERMISSION_H_INCLUDED
#define RIPPLE_PROTOCOL_PERMISSION_H_INCLUDED
#include <xrpl/protocol/Rules.h>
#include <xrpl/protocol/TER.h>
#include <xrpl/protocol/TxFormats.h>
#include <optional>
@@ -55,8 +53,6 @@ class Permission
private:
Permission();
std::unordered_map<std::uint16_t, uint256> txFeatureMap_;
std::unordered_map<std::uint16_t, Delegation> delegatableTx_;
std::unordered_map<std::string, GranularPermissionType>
@@ -84,8 +80,7 @@ public:
getGranularTxType(GranularPermissionType const& gpType) const;
bool
isDelegatable(std::uint32_t const& permissionValue, Rules const& rules)
const;
isDelegatable(std::uint32_t const& permissionValue) const;
// for tx level permission, permission value is equal to tx type plus one
uint32_t

View File

@@ -122,13 +122,6 @@ std::size_t constexpr maxDataPayloadLength = 256;
/** Vault withdrawal policies */
std::uint8_t constexpr vaultStrategyFirstComeFirstServe = 1;
/** Default IOU scale factor for a Vault */
std::uint8_t constexpr vaultDefaultIOUScale = 6;
/** Maximum scale factor for a Vault. The number is chosen to ensure that
1 IOU can be always converted to shares.
10^19 > maxMPTokenAmount (2^64-1) > 10^18 */
std::uint8_t constexpr vaultMaximumIOUScale = 18;
/** Maximum recursion depth for vault shares being put as an asset inside
* another vault; counted from 0 */
std::uint8_t constexpr maxAssetCheckDepth = 5;

View File

@@ -59,7 +59,7 @@ enum TxType : std::uint16_t
#pragma push_macro("TRANSACTION")
#undef TRANSACTION
#define TRANSACTION(tag, value, ...) tag = value,
#define TRANSACTION(tag, value, name, delegatable, fields) tag = value,
#include <xrpl/protocol/detail/transactions.macro>

View File

@@ -32,10 +32,9 @@
// If you add an amendment here, then do not forget to increment `numFeatures`
// in include/xrpl/protocol/Feature.h.
XRPL_FIX (DelegateV1_1, Supported::no, VoteBehavior::DefaultNo)
XRPL_FIX (PriceOracleOrder, Supported::no, VoteBehavior::DefaultNo)
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
XRPL_FIX (AMMClawbackRounding, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FIX (AMMClawbackRounding, Supported::no, VoteBehavior::DefaultNo)
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FIX (EnforceNFTokenTrustlineV2, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FIX (AMMv1_3, Supported::yes, VoteBehavior::DefaultNo)

View File

@@ -499,7 +499,6 @@ LEDGER_ENTRY(ltVAULT, 0x0084, Vault, vault, ({
{sfLossUnrealized, soeREQUIRED},
{sfShareMPTID, soeREQUIRED},
{sfWithdrawalPolicy, soeREQUIRED},
{sfScale, soeDEFAULT},
// no SharesTotal ever (use MPTIssuance.sfOutstandingAmount)
// no PermissionedDomainID ever (use MPTIssuance.sfDomainID)
}))

View File

@@ -22,17 +22,14 @@
#endif
/**
* TRANSACTION(tag, value, name, delegatable, amendments, fields)
* TRANSACTION(tag, value, name, delegatable, fields)
*
* You must define a transactor class in the `ripple` namespace named `name`,
* and include its header in `src/xrpld/app/tx/detail/applySteps.cpp`.
*/
/** This transaction type executes a payment. */
TRANSACTION(ttPAYMENT, 0, Payment,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttPAYMENT, 0, Payment, Delegation::delegatable, ({
{sfDestination, soeREQUIRED},
{sfAmount, soeREQUIRED, soeMPTSupported},
{sfSendMax, soeOPTIONAL, soeMPTSupported},
@@ -45,10 +42,7 @@ TRANSACTION(ttPAYMENT, 0, Payment,
}))
/** This transaction type creates an escrow object. */
TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate, Delegation::delegatable, ({
{sfDestination, soeREQUIRED},
{sfAmount, soeREQUIRED, soeMPTSupported},
{sfCondition, soeOPTIONAL},
@@ -58,10 +52,7 @@ TRANSACTION(ttESCROW_CREATE, 1, EscrowCreate,
}))
/** This transaction type completes an existing escrow. */
TRANSACTION(ttESCROW_FINISH, 2, EscrowFinish,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttESCROW_FINISH, 2, EscrowFinish, Delegation::delegatable, ({
{sfOwner, soeREQUIRED},
{sfOfferSequence, soeREQUIRED},
{sfFulfillment, soeOPTIONAL},
@@ -71,10 +62,7 @@ TRANSACTION(ttESCROW_FINISH, 2, EscrowFinish,
/** This transaction type adjusts various account settings. */
TRANSACTION(ttACCOUNT_SET, 3, AccountSet,
Delegation::notDelegatable,
uint256{},
({
TRANSACTION(ttACCOUNT_SET, 3, AccountSet, Delegation::notDelegatable, ({
{sfEmailHash, soeOPTIONAL},
{sfWalletLocator, soeOPTIONAL},
{sfWalletSize, soeOPTIONAL},
@@ -88,29 +76,20 @@ TRANSACTION(ttACCOUNT_SET, 3, AccountSet,
}))
/** This transaction type cancels an existing escrow. */
TRANSACTION(ttESCROW_CANCEL, 4, EscrowCancel,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttESCROW_CANCEL, 4, EscrowCancel, Delegation::delegatable, ({
{sfOwner, soeREQUIRED},
{sfOfferSequence, soeREQUIRED},
}))
/** This transaction type sets or clears an account's "regular key". */
TRANSACTION(ttREGULAR_KEY_SET, 5, SetRegularKey,
Delegation::notDelegatable,
uint256{},
({
TRANSACTION(ttREGULAR_KEY_SET, 5, SetRegularKey, Delegation::notDelegatable, ({
{sfRegularKey, soeOPTIONAL},
}))
// 6 deprecated
/** This transaction type creates an offer to trade one asset for another. */
TRANSACTION(ttOFFER_CREATE, 7, OfferCreate,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttOFFER_CREATE, 7, OfferCreate, Delegation::delegatable, ({
{sfTakerPays, soeREQUIRED},
{sfTakerGets, soeREQUIRED},
{sfExpiration, soeOPTIONAL},
@@ -119,20 +98,14 @@ TRANSACTION(ttOFFER_CREATE, 7, OfferCreate,
}))
/** This transaction type cancels existing offers to trade one asset for another. */
TRANSACTION(ttOFFER_CANCEL, 8, OfferCancel,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttOFFER_CANCEL, 8, OfferCancel, Delegation::delegatable, ({
{sfOfferSequence, soeREQUIRED},
}))
// 9 deprecated
/** This transaction type creates a new set of tickets. */
TRANSACTION(ttTICKET_CREATE, 10, TicketCreate,
Delegation::delegatable,
featureTicketBatch,
({
TRANSACTION(ttTICKET_CREATE, 10, TicketCreate, Delegation::delegatable, ({
{sfTicketCount, soeREQUIRED},
}))
@@ -141,19 +114,13 @@ TRANSACTION(ttTICKET_CREATE, 10, TicketCreate,
/** This transaction type modifies the signer list associated with an account. */
// The SignerEntries are optional because a SignerList is deleted by
// setting the SignerQuorum to zero and omitting SignerEntries.
TRANSACTION(ttSIGNER_LIST_SET, 12, SignerListSet,
Delegation::notDelegatable,
uint256{},
({
TRANSACTION(ttSIGNER_LIST_SET, 12, SignerListSet, Delegation::notDelegatable, ({
{sfSignerQuorum, soeREQUIRED},
{sfSignerEntries, soeOPTIONAL},
}))
/** This transaction type creates a new unidirectional XRP payment channel. */
TRANSACTION(ttPAYCHAN_CREATE, 13, PaymentChannelCreate,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttPAYCHAN_CREATE, 13, PaymentChannelCreate, Delegation::delegatable, ({
{sfDestination, soeREQUIRED},
{sfAmount, soeREQUIRED},
{sfSettleDelay, soeREQUIRED},
@@ -163,20 +130,14 @@ TRANSACTION(ttPAYCHAN_CREATE, 13, PaymentChannelCreate,
}))
/** This transaction type funds an existing unidirectional XRP payment channel. */
TRANSACTION(ttPAYCHAN_FUND, 14, PaymentChannelFund,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttPAYCHAN_FUND, 14, PaymentChannelFund, Delegation::delegatable, ({
{sfChannel, soeREQUIRED},
{sfAmount, soeREQUIRED},
{sfExpiration, soeOPTIONAL},
}))
/** This transaction type submits a claim against an existing unidirectional payment channel. */
TRANSACTION(ttPAYCHAN_CLAIM, 15, PaymentChannelClaim,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttPAYCHAN_CLAIM, 15, PaymentChannelClaim, Delegation::delegatable, ({
{sfChannel, soeREQUIRED},
{sfAmount, soeOPTIONAL},
{sfBalance, soeOPTIONAL},
@@ -186,10 +147,7 @@ TRANSACTION(ttPAYCHAN_CLAIM, 15, PaymentChannelClaim,
}))
/** This transaction type creates a new check. */
TRANSACTION(ttCHECK_CREATE, 16, CheckCreate,
Delegation::delegatable,
featureChecks,
({
TRANSACTION(ttCHECK_CREATE, 16, CheckCreate, Delegation::delegatable, ({
{sfDestination, soeREQUIRED},
{sfSendMax, soeREQUIRED},
{sfExpiration, soeOPTIONAL},
@@ -198,28 +156,19 @@ TRANSACTION(ttCHECK_CREATE, 16, CheckCreate,
}))
/** This transaction type cashes an existing check. */
TRANSACTION(ttCHECK_CASH, 17, CheckCash,
Delegation::delegatable,
featureChecks,
({
TRANSACTION(ttCHECK_CASH, 17, CheckCash, Delegation::delegatable, ({
{sfCheckID, soeREQUIRED},
{sfAmount, soeOPTIONAL},
{sfDeliverMin, soeOPTIONAL},
}))
/** This transaction type cancels an existing check. */
TRANSACTION(ttCHECK_CANCEL, 18, CheckCancel,
Delegation::delegatable,
featureChecks,
({
TRANSACTION(ttCHECK_CANCEL, 18, CheckCancel, Delegation::delegatable, ({
{sfCheckID, soeREQUIRED},
}))
/** This transaction type grants or revokes authorization to transfer funds. */
TRANSACTION(ttDEPOSIT_PREAUTH, 19, DepositPreauth,
Delegation::delegatable,
featureDepositPreauth,
({
TRANSACTION(ttDEPOSIT_PREAUTH, 19, DepositPreauth, Delegation::delegatable, ({
{sfAuthorize, soeOPTIONAL},
{sfUnauthorize, soeOPTIONAL},
{sfAuthorizeCredentials, soeOPTIONAL},
@@ -227,20 +176,14 @@ TRANSACTION(ttDEPOSIT_PREAUTH, 19, DepositPreauth,
}))
/** This transaction type modifies a trustline between two accounts. */
TRANSACTION(ttTRUST_SET, 20, TrustSet,
Delegation::delegatable,
uint256{},
({
TRANSACTION(ttTRUST_SET, 20, TrustSet, Delegation::delegatable, ({
{sfLimitAmount, soeOPTIONAL},
{sfQualityIn, soeOPTIONAL},
{sfQualityOut, soeOPTIONAL},
}))
/** This transaction type deletes an existing account. */
TRANSACTION(ttACCOUNT_DELETE, 21, AccountDelete,
Delegation::notDelegatable,
uint256{},
({
TRANSACTION(ttACCOUNT_DELETE, 21, AccountDelete, Delegation::notDelegatable, ({
{sfDestination, soeREQUIRED},
{sfDestinationTag, soeOPTIONAL},
{sfCredentialIDs, soeOPTIONAL},
@@ -249,10 +192,7 @@ TRANSACTION(ttACCOUNT_DELETE, 21, AccountDelete,
// 22 reserved
/** This transaction mints a new NFT. */
TRANSACTION(ttNFTOKEN_MINT, 25, NFTokenMint,
Delegation::delegatable,
featureNonFungibleTokensV1,
({
TRANSACTION(ttNFTOKEN_MINT, 25, NFTokenMint, Delegation::delegatable, ({
{sfNFTokenTaxon, soeREQUIRED},
{sfTransferFee, soeOPTIONAL},
{sfIssuer, soeOPTIONAL},
@@ -263,19 +203,13 @@ TRANSACTION(ttNFTOKEN_MINT, 25, NFTokenMint,
}))
/** This transaction burns (i.e. destroys) an existing NFT. */
TRANSACTION(ttNFTOKEN_BURN, 26, NFTokenBurn,
Delegation::delegatable,
featureNonFungibleTokensV1,
({
TRANSACTION(ttNFTOKEN_BURN, 26, NFTokenBurn, Delegation::delegatable, ({
{sfNFTokenID, soeREQUIRED},
{sfOwner, soeOPTIONAL},
}))
/** This transaction creates a new offer to buy or sell an NFT. */
TRANSACTION(ttNFTOKEN_CREATE_OFFER, 27, NFTokenCreateOffer,
Delegation::delegatable,
featureNonFungibleTokensV1,
({
TRANSACTION(ttNFTOKEN_CREATE_OFFER, 27, NFTokenCreateOffer, Delegation::delegatable, ({
{sfNFTokenID, soeREQUIRED},
{sfAmount, soeREQUIRED},
{sfDestination, soeOPTIONAL},
@@ -284,37 +218,25 @@ TRANSACTION(ttNFTOKEN_CREATE_OFFER, 27, NFTokenCreateOffer,
}))
/** This transaction cancels an existing offer to buy or sell an existing NFT. */
TRANSACTION(ttNFTOKEN_CANCEL_OFFER, 28, NFTokenCancelOffer,
Delegation::delegatable,
featureNonFungibleTokensV1,
({
TRANSACTION(ttNFTOKEN_CANCEL_OFFER, 28, NFTokenCancelOffer, Delegation::delegatable, ({
{sfNFTokenOffers, soeREQUIRED},
}))
/** This transaction accepts an existing offer to buy or sell an existing NFT. */
TRANSACTION(ttNFTOKEN_ACCEPT_OFFER, 29, NFTokenAcceptOffer,
Delegation::delegatable,
featureNonFungibleTokensV1,
({
TRANSACTION(ttNFTOKEN_ACCEPT_OFFER, 29, NFTokenAcceptOffer, Delegation::delegatable, ({
{sfNFTokenBuyOffer, soeOPTIONAL},
{sfNFTokenSellOffer, soeOPTIONAL},
{sfNFTokenBrokerFee, soeOPTIONAL},
}))
/** This transaction claws back issued tokens. */
TRANSACTION(ttCLAWBACK, 30, Clawback,
Delegation::delegatable,
featureClawback,
({
TRANSACTION(ttCLAWBACK, 30, Clawback, Delegation::delegatable, ({
{sfAmount, soeREQUIRED, soeMPTSupported},
{sfHolder, soeOPTIONAL},
}))
/** This transaction claws back tokens from an AMM pool. */
TRANSACTION(ttAMM_CLAWBACK, 31, AMMClawback,
Delegation::delegatable,
featureAMMClawback,
({
TRANSACTION(ttAMM_CLAWBACK, 31, AMMClawback, Delegation::delegatable, ({
{sfHolder, soeREQUIRED},
{sfAsset, soeREQUIRED},
{sfAsset2, soeREQUIRED},
@@ -322,20 +244,14 @@ TRANSACTION(ttAMM_CLAWBACK, 31, AMMClawback,
}))
/** This transaction type creates an AMM instance */
TRANSACTION(ttAMM_CREATE, 35, AMMCreate,
Delegation::delegatable,
featureAMM,
({
TRANSACTION(ttAMM_CREATE, 35, AMMCreate, Delegation::delegatable, ({
{sfAmount, soeREQUIRED},
{sfAmount2, soeREQUIRED},
{sfTradingFee, soeREQUIRED},
}))
/** This transaction type deposits into an AMM instance */
TRANSACTION(ttAMM_DEPOSIT, 36, AMMDeposit,
Delegation::delegatable,
featureAMM,
({
TRANSACTION(ttAMM_DEPOSIT, 36, AMMDeposit, Delegation::delegatable, ({
{sfAsset, soeREQUIRED},
{sfAsset2, soeREQUIRED},
{sfAmount, soeOPTIONAL},
@@ -346,10 +262,7 @@ TRANSACTION(ttAMM_DEPOSIT, 36, AMMDeposit,
}))
/** This transaction type withdraws from an AMM instance */
TRANSACTION(ttAMM_WITHDRAW, 37, AMMWithdraw,
Delegation::delegatable,
featureAMM,
({
TRANSACTION(ttAMM_WITHDRAW, 37, AMMWithdraw, Delegation::delegatable, ({
{sfAsset, soeREQUIRED},
{sfAsset2, soeREQUIRED},
{sfAmount, soeOPTIONAL},
@@ -359,20 +272,14 @@ TRANSACTION(ttAMM_WITHDRAW, 37, AMMWithdraw,
}))
/** This transaction type votes for the trading fee */
TRANSACTION(ttAMM_VOTE, 38, AMMVote,
Delegation::delegatable,
featureAMM,
({
TRANSACTION(ttAMM_VOTE, 38, AMMVote, Delegation::delegatable, ({
{sfAsset, soeREQUIRED},
{sfAsset2, soeREQUIRED},
{sfTradingFee, soeREQUIRED},
}))
/** This transaction type bids for the auction slot */
TRANSACTION(ttAMM_BID, 39, AMMBid,
Delegation::delegatable,
featureAMM,
({
TRANSACTION(ttAMM_BID, 39, AMMBid, Delegation::delegatable, ({
{sfAsset, soeREQUIRED},
{sfAsset2, soeREQUIRED},
{sfBidMin, soeOPTIONAL},
@@ -381,29 +288,20 @@ TRANSACTION(ttAMM_BID, 39, AMMBid,
}))
/** This transaction type deletes AMM in the empty state */
TRANSACTION(ttAMM_DELETE, 40, AMMDelete,
Delegation::delegatable,
featureAMM,
({
TRANSACTION(ttAMM_DELETE, 40, AMMDelete, Delegation::delegatable, ({
{sfAsset, soeREQUIRED},
{sfAsset2, soeREQUIRED},
}))
/** This transactions creates a crosschain sequence number */
TRANSACTION(ttXCHAIN_CREATE_CLAIM_ID, 41, XChainCreateClaimID,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_CREATE_CLAIM_ID, 41, XChainCreateClaimID, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfSignatureReward, soeREQUIRED},
{sfOtherChainSource, soeREQUIRED},
}))
/** This transactions initiates a crosschain transaction */
TRANSACTION(ttXCHAIN_COMMIT, 42, XChainCommit,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_COMMIT, 42, XChainCommit, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfXChainClaimID, soeREQUIRED},
{sfAmount, soeREQUIRED},
@@ -411,10 +309,7 @@ TRANSACTION(ttXCHAIN_COMMIT, 42, XChainCommit,
}))
/** This transaction completes a crosschain transaction */
TRANSACTION(ttXCHAIN_CLAIM, 43, XChainClaim,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_CLAIM, 43, XChainClaim, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfXChainClaimID, soeREQUIRED},
{sfDestination, soeREQUIRED},
@@ -423,10 +318,7 @@ TRANSACTION(ttXCHAIN_CLAIM, 43, XChainClaim,
}))
/** This transaction initiates a crosschain account create transaction */
TRANSACTION(ttXCHAIN_ACCOUNT_CREATE_COMMIT, 44, XChainAccountCreateCommit,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_ACCOUNT_CREATE_COMMIT, 44, XChainAccountCreateCommit, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfDestination, soeREQUIRED},
{sfAmount, soeREQUIRED},
@@ -434,10 +326,7 @@ TRANSACTION(ttXCHAIN_ACCOUNT_CREATE_COMMIT, 44, XChainAccountCreateCommit,
}))
/** This transaction adds an attestation to a claim */
TRANSACTION(ttXCHAIN_ADD_CLAIM_ATTESTATION, 45, XChainAddClaimAttestation,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_ADD_CLAIM_ATTESTATION, 45, XChainAddClaimAttestation, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfAttestationSignerAccount, soeREQUIRED},
@@ -453,10 +342,7 @@ TRANSACTION(ttXCHAIN_ADD_CLAIM_ATTESTATION, 45, XChainAddClaimAttestation,
}))
/** This transaction adds an attestation to an account */
TRANSACTION(ttXCHAIN_ADD_ACCOUNT_CREATE_ATTESTATION, 46, XChainAddAccountCreateAttestation,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_ADD_ACCOUNT_CREATE_ATTESTATION, 46, XChainAddAccountCreateAttestation, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfAttestationSignerAccount, soeREQUIRED},
@@ -473,46 +359,31 @@ TRANSACTION(ttXCHAIN_ADD_ACCOUNT_CREATE_ATTESTATION, 46, XChainAddAccountCreateA
}))
/** This transaction modifies a sidechain */
TRANSACTION(ttXCHAIN_MODIFY_BRIDGE, 47, XChainModifyBridge,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_MODIFY_BRIDGE, 47, XChainModifyBridge, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfSignatureReward, soeOPTIONAL},
{sfMinAccountCreateAmount, soeOPTIONAL},
}))
/** This transactions creates a sidechain */
TRANSACTION(ttXCHAIN_CREATE_BRIDGE, 48, XChainCreateBridge,
Delegation::delegatable,
featureXChainBridge,
({
TRANSACTION(ttXCHAIN_CREATE_BRIDGE, 48, XChainCreateBridge, Delegation::delegatable, ({
{sfXChainBridge, soeREQUIRED},
{sfSignatureReward, soeREQUIRED},
{sfMinAccountCreateAmount, soeOPTIONAL},
}))
/** This transaction type creates or updates a DID */
TRANSACTION(ttDID_SET, 49, DIDSet,
Delegation::delegatable,
featureDID,
({
TRANSACTION(ttDID_SET, 49, DIDSet, Delegation::delegatable, ({
{sfDIDDocument, soeOPTIONAL},
{sfURI, soeOPTIONAL},
{sfData, soeOPTIONAL},
}))
/** This transaction type deletes a DID */
TRANSACTION(ttDID_DELETE, 50, DIDDelete,
Delegation::delegatable,
featureDID,
({}))
TRANSACTION(ttDID_DELETE, 50, DIDDelete, Delegation::delegatable, ({}))
/** This transaction type creates an Oracle instance */
TRANSACTION(ttORACLE_SET, 51, OracleSet,
Delegation::delegatable,
featurePriceOracle,
({
TRANSACTION(ttORACLE_SET, 51, OracleSet, Delegation::delegatable, ({
{sfOracleDocumentID, soeREQUIRED},
{sfProvider, soeOPTIONAL},
{sfURI, soeOPTIONAL},
@@ -522,27 +393,18 @@ TRANSACTION(ttORACLE_SET, 51, OracleSet,
}))
/** This transaction type deletes an Oracle instance */
TRANSACTION(ttORACLE_DELETE, 52, OracleDelete,
Delegation::delegatable,
featurePriceOracle,
({
TRANSACTION(ttORACLE_DELETE, 52, OracleDelete, Delegation::delegatable, ({
{sfOracleDocumentID, soeREQUIRED},
}))
/** This transaction type fixes a problem in the ledger state */
TRANSACTION(ttLEDGER_STATE_FIX, 53, LedgerStateFix,
Delegation::delegatable,
fixNFTokenPageLinks,
({
TRANSACTION(ttLEDGER_STATE_FIX, 53, LedgerStateFix, Delegation::delegatable, ({
{sfLedgerFixType, soeREQUIRED},
{sfOwner, soeOPTIONAL},
}))
/** This transaction type creates a MPTokensIssuance instance */
TRANSACTION(ttMPTOKEN_ISSUANCE_CREATE, 54, MPTokenIssuanceCreate,
Delegation::delegatable,
featureMPTokensV1,
({
TRANSACTION(ttMPTOKEN_ISSUANCE_CREATE, 54, MPTokenIssuanceCreate, Delegation::delegatable, ({
{sfAssetScale, soeOPTIONAL},
{sfTransferFee, soeOPTIONAL},
{sfMaximumAmount, soeOPTIONAL},
@@ -551,37 +413,25 @@ TRANSACTION(ttMPTOKEN_ISSUANCE_CREATE, 54, MPTokenIssuanceCreate,
}))
/** This transaction type destroys a MPTokensIssuance instance */
TRANSACTION(ttMPTOKEN_ISSUANCE_DESTROY, 55, MPTokenIssuanceDestroy,
Delegation::delegatable,
featureMPTokensV1,
({
TRANSACTION(ttMPTOKEN_ISSUANCE_DESTROY, 55, MPTokenIssuanceDestroy, Delegation::delegatable, ({
{sfMPTokenIssuanceID, soeREQUIRED},
}))
/** This transaction type sets flags on a MPTokensIssuance or MPToken instance */
TRANSACTION(ttMPTOKEN_ISSUANCE_SET, 56, MPTokenIssuanceSet,
Delegation::delegatable,
featureMPTokensV1,
({
TRANSACTION(ttMPTOKEN_ISSUANCE_SET, 56, MPTokenIssuanceSet, Delegation::delegatable, ({
{sfMPTokenIssuanceID, soeREQUIRED},
{sfHolder, soeOPTIONAL},
{sfDomainID, soeOPTIONAL},
}))
/** This transaction type authorizes a MPToken instance */
TRANSACTION(ttMPTOKEN_AUTHORIZE, 57, MPTokenAuthorize,
Delegation::delegatable,
featureMPTokensV1,
({
TRANSACTION(ttMPTOKEN_AUTHORIZE, 57, MPTokenAuthorize, Delegation::delegatable, ({
{sfMPTokenIssuanceID, soeREQUIRED},
{sfHolder, soeOPTIONAL},
}))
/** This transaction type create an Credential instance */
TRANSACTION(ttCREDENTIAL_CREATE, 58, CredentialCreate,
Delegation::delegatable,
featureCredentials,
({
TRANSACTION(ttCREDENTIAL_CREATE, 58, CredentialCreate, Delegation::delegatable, ({
{sfSubject, soeREQUIRED},
{sfCredentialType, soeREQUIRED},
{sfExpiration, soeOPTIONAL},
@@ -589,79 +439,54 @@ TRANSACTION(ttCREDENTIAL_CREATE, 58, CredentialCreate,
}))
/** This transaction type accept an Credential object */
TRANSACTION(ttCREDENTIAL_ACCEPT, 59, CredentialAccept,
Delegation::delegatable,
featureCredentials,
({
TRANSACTION(ttCREDENTIAL_ACCEPT, 59, CredentialAccept, Delegation::delegatable, ({
{sfIssuer, soeREQUIRED},
{sfCredentialType, soeREQUIRED},
}))
/** This transaction type delete an Credential object */
TRANSACTION(ttCREDENTIAL_DELETE, 60, CredentialDelete,
Delegation::delegatable,
featureCredentials,
({
TRANSACTION(ttCREDENTIAL_DELETE, 60, CredentialDelete, Delegation::delegatable, ({
{sfSubject, soeOPTIONAL},
{sfIssuer, soeOPTIONAL},
{sfCredentialType, soeREQUIRED},
}))
/** This transaction type modify a NFToken */
TRANSACTION(ttNFTOKEN_MODIFY, 61, NFTokenModify,
Delegation::delegatable,
featureDynamicNFT,
({
TRANSACTION(ttNFTOKEN_MODIFY, 61, NFTokenModify, Delegation::delegatable, ({
{sfNFTokenID, soeREQUIRED},
{sfOwner, soeOPTIONAL},
{sfURI, soeOPTIONAL},
}))
/** This transaction type creates or modifies a Permissioned Domain */
TRANSACTION(ttPERMISSIONED_DOMAIN_SET, 62, PermissionedDomainSet,
Delegation::delegatable,
featurePermissionedDomains,
({
TRANSACTION(ttPERMISSIONED_DOMAIN_SET, 62, PermissionedDomainSet, Delegation::delegatable, ({
{sfDomainID, soeOPTIONAL},
{sfAcceptedCredentials, soeREQUIRED},
}))
/** This transaction type deletes a Permissioned Domain */
TRANSACTION(ttPERMISSIONED_DOMAIN_DELETE, 63, PermissionedDomainDelete,
Delegation::delegatable,
featurePermissionedDomains,
({
TRANSACTION(ttPERMISSIONED_DOMAIN_DELETE, 63, PermissionedDomainDelete, Delegation::delegatable, ({
{sfDomainID, soeREQUIRED},
}))
/** This transaction type delegates authorized account specified permissions */
TRANSACTION(ttDELEGATE_SET, 64, DelegateSet,
Delegation::notDelegatable,
featurePermissionDelegation,
({
TRANSACTION(ttDELEGATE_SET, 64, DelegateSet, Delegation::notDelegatable, ({
{sfAuthorize, soeREQUIRED},
{sfPermissions, soeREQUIRED},
}))
/** This transaction creates a single asset vault. */
TRANSACTION(ttVAULT_CREATE, 65, VaultCreate,
Delegation::delegatable,
featureSingleAssetVault,
({
TRANSACTION(ttVAULT_CREATE, 65, VaultCreate, Delegation::delegatable, ({
{sfAsset, soeREQUIRED, soeMPTSupported},
{sfAssetsMaximum, soeOPTIONAL},
{sfMPTokenMetadata, soeOPTIONAL},
{sfDomainID, soeOPTIONAL},
{sfWithdrawalPolicy, soeOPTIONAL},
{sfData, soeOPTIONAL},
{sfScale, soeOPTIONAL},
}))
/** This transaction updates a single asset vault. */
TRANSACTION(ttVAULT_SET, 66, VaultSet,
Delegation::delegatable,
featureSingleAssetVault,
({
TRANSACTION(ttVAULT_SET, 66, VaultSet, Delegation::delegatable, ({
{sfVaultID, soeREQUIRED},
{sfAssetsMaximum, soeOPTIONAL},
{sfDomainID, soeOPTIONAL},
@@ -669,27 +494,18 @@ TRANSACTION(ttVAULT_SET, 66, VaultSet,
}))
/** This transaction deletes a single asset vault. */
TRANSACTION(ttVAULT_DELETE, 67, VaultDelete,
Delegation::delegatable,
featureSingleAssetVault,
({
TRANSACTION(ttVAULT_DELETE, 67, VaultDelete, Delegation::delegatable, ({
{sfVaultID, soeREQUIRED},
}))
/** This transaction trades assets for shares with a vault. */
TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit,
Delegation::delegatable,
featureSingleAssetVault,
({
TRANSACTION(ttVAULT_DEPOSIT, 68, VaultDeposit, Delegation::delegatable, ({
{sfVaultID, soeREQUIRED},
{sfAmount, soeREQUIRED, soeMPTSupported},
}))
/** This transaction trades shares for assets with a vault. */
TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
Delegation::delegatable,
featureSingleAssetVault,
({
TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw, Delegation::delegatable, ({
{sfVaultID, soeREQUIRED},
{sfAmount, soeREQUIRED, soeMPTSupported},
{sfDestination, soeOPTIONAL},
@@ -697,20 +513,14 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw,
}))
/** This transaction claws back tokens from a vault. */
TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback,
Delegation::delegatable,
featureSingleAssetVault,
({
TRANSACTION(ttVAULT_CLAWBACK, 70, VaultClawback, Delegation::delegatable, ({
{sfVaultID, soeREQUIRED},
{sfHolder, soeREQUIRED},
{sfAmount, soeOPTIONAL, soeMPTSupported},
}))
/** This transaction type batches together transactions. */
TRANSACTION(ttBATCH, 71, Batch,
Delegation::notDelegatable,
featureBatch,
({
TRANSACTION(ttBATCH, 71, Batch, Delegation::notDelegatable, ({
{sfRawTransactions, soeREQUIRED},
{sfBatchSigners, soeOPTIONAL},
}))
@@ -719,10 +529,7 @@ TRANSACTION(ttBATCH, 71, Batch,
For details, see: https://xrpl.org/amendments.html
*/
TRANSACTION(ttAMENDMENT, 100, EnableAmendment,
Delegation::notDelegatable,
uint256{},
({
TRANSACTION(ttAMENDMENT, 100, EnableAmendment, Delegation::notDelegatable, ({
{sfLedgerSequence, soeREQUIRED},
{sfAmendment, soeREQUIRED},
}))
@@ -730,10 +537,7 @@ TRANSACTION(ttAMENDMENT, 100, EnableAmendment,
/** This system-generated transaction type is used to update the network's fee settings.
For details, see: https://xrpl.org/fee-voting.html
*/
TRANSACTION(ttFEE, 101, SetFee,
Delegation::notDelegatable,
uint256{},
({
TRANSACTION(ttFEE, 101, SetFee, Delegation::notDelegatable, ({
{sfLedgerSequence, soeOPTIONAL},
// Old version uses raw numbers
{sfBaseFee, soeOPTIONAL},
@@ -750,10 +554,7 @@ TRANSACTION(ttFEE, 101, SetFee,
For details, see: https://xrpl.org/negative-unl.html
*/
TRANSACTION(ttUNL_MODIFY, 102, UNLModify,
Delegation::notDelegatable,
uint256{},
({
TRANSACTION(ttUNL_MODIFY, 102, UNLModify, Delegation::notDelegatable, ({
{sfUNLModifyDisabling, soeREQUIRED},
{sfLedgerSequence, soeREQUIRED},
{sfUNLModifyValidator, soeREQUIRED},

View File

@@ -710,7 +710,7 @@ JSS(write_load); // out: GetCounts
#pragma push_macro("TRANSACTION")
#undef TRANSACTION
#define TRANSACTION(tag, value, name, ...) JSS(name);
#define TRANSACTION(tag, value, name, delegatable, fields) JSS(name);
#include <xrpl/protocol/detail/transactions.macro>

View File

@@ -18,7 +18,6 @@
//==============================================================================
#include <xrpl/beast/utility/instrumentation.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/Permissions.h>
#include <xrpl/protocol/jss.h>
@@ -26,24 +25,11 @@ namespace ripple {
Permission::Permission()
{
txFeatureMap_ = {
#pragma push_macro("TRANSACTION")
#undef TRANSACTION
#define TRANSACTION(tag, value, name, delegatable, amendment, ...) \
{value, amendment},
#include <xrpl/protocol/detail/transactions.macro>
#undef TRANSACTION
#pragma pop_macro("TRANSACTION")
};
delegatableTx_ = {
#pragma push_macro("TRANSACTION")
#undef TRANSACTION
#define TRANSACTION(tag, value, name, delegatable, ...) {value, delegatable},
#define TRANSACTION(tag, value, name, delegatable, fields) {value, delegatable},
#include <xrpl/protocol/detail/transactions.macro>
@@ -132,9 +118,7 @@ Permission::getGranularTxType(GranularPermissionType const& gpType) const
}
bool
Permission::isDelegatable(
std::uint32_t const& permissionValue,
Rules const& rules) const
Permission::isDelegatable(std::uint32_t const& permissionValue) const
{
auto const granularPermission =
getGranularName(static_cast<GranularPermissionType>(permissionValue));
@@ -142,27 +126,7 @@ Permission::isDelegatable(
// granular permissions are always allowed to be delegated
return true;
auto const txType = permissionToTxType(permissionValue);
auto const it = delegatableTx_.find(txType);
if (rules.enabled(fixDelegateV1_1))
{
if (it == delegatableTx_.end())
return false;
auto const txFeaturesIt = txFeatureMap_.find(txType);
XRPL_ASSERT(
txFeaturesIt != txFeatureMap_.end(),
"ripple::Permissions::isDelegatable : tx exists in txFeatureMap_");
// fixDelegateV1_1: Delegation is only allowed if the required amendment
// for the transaction is enabled. For transactions that do not require
// an amendment, delegation is always allowed.
if (txFeaturesIt->second != uint256{} &&
!rules.enabled(txFeaturesIt->second))
return false;
}
auto const it = delegatableTx_.find(permissionValue - 1);
if (it != delegatableTx_.end() && it->second == Delegation::notDelegatable)
return false;

View File

@@ -55,7 +55,7 @@ TxFormats::TxFormats()
#undef TRANSACTION
#define UNWRAP(...) __VA_ARGS__
#define TRANSACTION(tag, value, name, delegatable, amendment, fields) \
#define TRANSACTION(tag, value, name, delegatable, fields) \
add(jss::name, tag, UNWRAP fields, commonFields);
#include <xrpl/protocol/detail/transactions.macro>

View File

@@ -2442,7 +2442,8 @@ class AMMClawback_test : public beast::unit_test::suite
void
run() override
{
FeatureBitset const all = jtx::testable_amendments();
FeatureBitset const all{
jtx::testable_amendments() | fixAMMClawbackRounding};
testInvalidRequest();
testFeatureDisabled(all - featureAMMClawback);

View File

@@ -16,7 +16,6 @@
//==============================================================================
#include <test/jtx.h>
#include <test/jtx/CaptureLogs.h>
#include <test/jtx/delegate.h>
#include <xrpl/protocol/Feature.h>
@@ -140,12 +139,12 @@ class Delegate_test : public beast::unit_test::suite
}
void
testInvalidRequest(FeatureBitset features)
testInvalidRequest()
{
testcase("test invalid DelegateSet");
using namespace jtx;
Env env(*this, features);
Env env(*this);
Account gw{"gateway"};
Account alice{"alice"};
Account bob{"bob"};
@@ -217,17 +216,22 @@ class Delegate_test : public beast::unit_test::suite
}
// non-delegatable transaction
auto const res = features[fixDelegateV1_1] ? ter(temMALFORMED)
: ter(tecNO_PERMISSION);
{
env(delegate::set(gw, alice, {"SetRegularKey"}), res);
env(delegate::set(gw, alice, {"AccountSet"}), res);
env(delegate::set(gw, alice, {"SignerListSet"}), res);
env(delegate::set(gw, alice, {"DelegateSet"}), res);
env(delegate::set(gw, alice, {"EnableAmendment"}), res);
env(delegate::set(gw, alice, {"UNLModify"}), res);
env(delegate::set(gw, alice, {"SetFee"}), res);
env(delegate::set(gw, alice, {"Batch"}), res);
env(delegate::set(gw, alice, {"SetRegularKey"}),
ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"AccountSet"}),
ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"SignerListSet"}),
ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"DelegateSet"}),
ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"SetRegularKey"}),
ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"EnableAmendment"}),
ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"UNLModify"}), ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"SetFee"}), ter(tecNO_PERMISSION));
env(delegate::set(gw, alice, {"Batch"}), ter(tecNO_PERMISSION));
}
}
@@ -532,7 +536,7 @@ class Delegate_test : public beast::unit_test::suite
}
void
testPaymentGranular(FeatureBitset features)
testPaymentGranular()
{
testcase("test payment granular");
using namespace jtx;
@@ -702,158 +706,6 @@ class Delegate_test : public beast::unit_test::suite
env.require(balance(alice, USD(50)));
BEAST_EXPECT(env.balance(bob, USD) == USD(0));
}
// disallow cross currency payment with only PaymentBurn/PaymentMint
// permission
{
Env env(*this, features);
Account const alice{"alice"};
Account const bob{"bob"};
Account const gw{"gateway"};
Account const carol{"carol"};
auto const USD = gw["USD"];
env.fund(XRP(10000), alice, bob, carol, gw);
env.close();
env.trust(USD(50000), alice);
env.trust(USD(50000), bob);
env.trust(USD(50000), carol);
env(pay(gw, alice, USD(10000)));
env(pay(gw, bob, USD(10000)));
env(pay(gw, carol, USD(10000)));
env.close();
auto const result = features[fixDelegateV1_1]
? static_cast<TER>(tecNO_DELEGATE_PERMISSION)
: static_cast<TER>(tesSUCCESS);
auto const offerCount = features[fixDelegateV1_1] ? 1 : 0;
// PaymentMint
{
env(offer(carol, XRP(100), USD(501)));
BEAST_EXPECT(expectOffers(env, carol, 1));
env(delegate::set(gw, bob, {"PaymentMint"}));
env.close();
// post-amendment: fixDelegateV1_1
// bob can not send cross currency payment on behalf of the gw,
// even with PaymentMint permission and gw being the issuer.
env(pay(gw, alice, USD(5000)),
path(~USD),
sendmax(XRP(1001)),
txflags(tfPartialPayment),
delegate::as(bob),
ter(result));
BEAST_EXPECT(expectOffers(env, carol, offerCount));
// succeed with direct payment
env(pay(gw, alice, USD(100)), delegate::as(bob));
env.close();
}
// PaymentBurn
{
env(offer(bob, XRP(100), USD(501)));
BEAST_EXPECT(expectOffers(env, bob, 1));
env(delegate::set(alice, bob, {"PaymentBurn"}));
env.close();
// post-amendment: fixDelegateV1_1
// bob can not send cross currency payment on behalf of alice,
// even with PaymentBurn permission and gw being the issuer.
env(pay(alice, gw, USD(5000)),
path(~USD),
sendmax(XRP(1001)),
txflags(tfPartialPayment),
delegate::as(bob),
ter(result));
BEAST_EXPECT(expectOffers(env, bob, offerCount));
// succeed with direct payment
env(pay(alice, gw, USD(100)), delegate::as(bob));
env.close();
}
}
// PaymentMint and PaymentBurn for MPT
{
std::string logs;
Env env(*this, features, std::make_unique<CaptureLogs>(&logs));
Account const alice{"alice"};
Account const bob{"bob"};
Account const gw{"gateway"};
MPTTester mpt(env, gw, {.holders = {alice, bob}});
mpt.create(
{.ownerCount = 1, .holderCount = 0, .flags = tfMPTCanTransfer});
mpt.authorize({.account = alice});
mpt.authorize({.account = bob});
auto const MPT = mpt["MPT"];
env(pay(gw, alice, MPT(500)));
env(pay(gw, bob, MPT(500)));
env.close();
auto aliceMPT = env.balance(alice, MPT);
auto bobMPT = env.balance(bob, MPT);
// PaymentMint
{
env(delegate::set(gw, bob, {"PaymentMint"}));
env.close();
if (!features[fixDelegateV1_1])
{
// pre-amendment: PaymentMint is not supported for MPT
env(pay(gw, alice, MPT(50)),
delegate::as(bob),
ter(tefEXCEPTION));
}
else
{
env(pay(gw, alice, MPT(50)), delegate::as(bob));
BEAST_EXPECT(env.balance(alice, MPT) == aliceMPT + MPT(50));
BEAST_EXPECT(env.balance(bob, MPT) == bobMPT);
aliceMPT = env.balance(alice, MPT);
}
}
// PaymentBurn
{
env(delegate::set(alice, bob, {"PaymentBurn"}));
env.close();
if (!features[fixDelegateV1_1])
{
// pre-amendment: PaymentBurn is not supported for MPT
env(pay(alice, gw, MPT(50)),
delegate::as(bob),
ter(tefEXCEPTION));
}
else
{
env(pay(alice, gw, MPT(50)), delegate::as(bob));
BEAST_EXPECT(env.balance(alice, MPT) == aliceMPT - MPT(50));
BEAST_EXPECT(env.balance(bob, MPT) == bobMPT);
aliceMPT = env.balance(alice, MPT);
}
}
// Payment transaction for MPT is allowed for both pre and post
// amendment
{
env(delegate::set(
alice, bob, {"PaymentBurn", "PaymentMint", "Payment"}));
env.close();
env(pay(alice, gw, MPT(50)), delegate::as(bob));
BEAST_EXPECT(env.balance(alice, MPT) == aliceMPT - MPT(50));
BEAST_EXPECT(env.balance(bob, MPT) == bobMPT);
aliceMPT = env.balance(alice, MPT);
env(pay(alice, bob, MPT(100)), delegate::as(bob));
BEAST_EXPECT(env.balance(alice, MPT) == aliceMPT - MPT(100));
BEAST_EXPECT(env.balance(bob, MPT) == bobMPT + MPT(100));
}
}
}
void
@@ -1624,216 +1476,18 @@ class Delegate_test : public beast::unit_test::suite
BEAST_EXPECT(env.balance(edward) == edwardBalance);
}
void
testPermissionValue(FeatureBitset features)
{
testcase("test permission value");
using namespace jtx;
Env env(*this, features);
Account alice{"alice"};
Account bob{"bob"};
env.fund(XRP(100000), alice, bob);
env.close();
auto buildRequest = [&](auto value) -> Json::Value {
Json::Value jv;
jv[jss::TransactionType] = jss::DelegateSet;
jv[jss::Account] = alice.human();
jv[sfAuthorize.jsonName] = bob.human();
Json::Value permissionsJson(Json::arrayValue);
Json::Value permissionValue;
permissionValue[sfPermissionValue.jsonName] = value;
Json::Value permissionObj;
permissionObj[sfPermission.jsonName] = permissionValue;
permissionsJson.append(permissionObj);
jv[sfPermissions.jsonName] = permissionsJson;
return jv;
};
// invalid permission value.
// neither granular permission nor transaction level permission
for (auto value : {0, 100000, 54321})
{
auto jv = buildRequest(value);
if (!features[fixDelegateV1_1])
env(jv);
else
env(jv, ter(temMALFORMED));
}
}
void
testTxReqireFeatures(FeatureBitset features)
{
testcase("test delegate disabled tx");
using namespace jtx;
// map of tx and required feature.
// non-delegatable tx are not included.
// NFTokenMint, NFTokenBurn, NFTokenCreateOffer, NFTokenCancelOffer,
// NFTokenAcceptOffer are not included, they are tested separately.
std::unordered_map<std::string, uint256> txRequiredFeatures{
{"TicketCreate", featureTicketBatch},
{"CheckCreate", featureChecks},
{"CheckCash", featureChecks},
{"CheckCancel", featureChecks},
{"DepositPreauth", featureDepositPreauth},
{"Clawback", featureClawback},
{"AMMClawback", featureAMMClawback},
{"AMMCreate", featureAMM},
{"AMMDeposit", featureAMM},
{"AMMWithdraw", featureAMM},
{"AMMVote", featureAMM},
{"AMMBid", featureAMM},
{"AMMDelete", featureAMM},
{"XChainCreateClaimID", featureXChainBridge},
{"XChainCommit", featureXChainBridge},
{"XChainClaim", featureXChainBridge},
{"XChainAccountCreateCommit", featureXChainBridge},
{"XChainAddClaimAttestation", featureXChainBridge},
{"XChainAddAccountCreateAttestation", featureXChainBridge},
{"XChainModifyBridge", featureXChainBridge},
{"XChainCreateBridge", featureXChainBridge},
{"DIDSet", featureDID},
{"DIDDelete", featureDID},
{"OracleSet", featurePriceOracle},
{"OracleDelete", featurePriceOracle},
{"LedgerStateFix", fixNFTokenPageLinks},
{"MPTokenIssuanceCreate", featureMPTokensV1},
{"MPTokenIssuanceDestroy", featureMPTokensV1},
{"MPTokenIssuanceSet", featureMPTokensV1},
{"MPTokenAuthorize", featureMPTokensV1},
{"CredentialCreate", featureCredentials},
{"CredentialAccept", featureCredentials},
{"CredentialDelete", featureCredentials},
{"NFTokenModify", featureDynamicNFT},
{"PermissionedDomainSet", featurePermissionedDomains},
{"PermissionedDomainDelete", featurePermissionedDomains},
{"VaultCreate", featureSingleAssetVault},
{"VaultSet", featureSingleAssetVault},
{"VaultDelete", featureSingleAssetVault},
{"VaultDeposit", featureSingleAssetVault},
{"VaultWithdraw", featureSingleAssetVault},
{"VaultClawback", featureSingleAssetVault}};
// fixDelegateV1_1 post-amendment: can not delegate tx if any
// required feature disabled.
{
auto txAmendmentDisabled = [&](FeatureBitset features,
std::string const& tx) {
BEAST_EXPECT(txRequiredFeatures.contains(tx));
Env env(*this, features - txRequiredFeatures[tx]);
Account const alice{"alice"};
Account const bob{"bob"};
env.fund(XRP(100000), alice, bob);
env.close();
if (!features[fixDelegateV1_1])
env(delegate::set(alice, bob, {tx}));
else
env(delegate::set(alice, bob, {tx}), ter(temMALFORMED));
};
for (auto const& tx : txRequiredFeatures)
txAmendmentDisabled(features, tx.first);
}
// if all the required features in txRequiredFeatures are enabled, will
// succeed
{
auto txAmendmentEnabled = [&](std::string const& tx) {
Env env(*this, features);
Account const alice{"alice"};
Account const bob{"bob"};
env.fund(XRP(100000), alice, bob);
env.close();
env(delegate::set(alice, bob, {tx}));
};
for (auto const& tx : txRequiredFeatures)
txAmendmentEnabled(tx.first);
}
// NFTokenMint, NFTokenBurn, NFTokenCreateOffer, NFTokenCancelOffer, and
// NFTokenAcceptOffer are tested separately. Since
// featureNonFungibleTokensV1_1 includes the functionality of
// featureNonFungibleTokensV1, fixNFTokenNegOffer, and fixNFTokenDirV1,
// both featureNonFungibleTokensV1_1 and featureNonFungibleTokensV1 need
// to be disabled to block these transactions from being delegated.
{
Env env(
*this,
features - featureNonFungibleTokensV1 -
featureNonFungibleTokensV1_1);
Account const alice{"alice"};
Account const bob{"bob"};
env.fund(XRP(100000), alice, bob);
env.close();
for (auto const tx :
{"NFTokenMint",
"NFTokenBurn",
"NFTokenCreateOffer",
"NFTokenCancelOffer",
"NFTokenAcceptOffer"})
{
if (!features[fixDelegateV1_1])
env(delegate::set(alice, bob, {tx}));
else
env(delegate::set(alice, bob, {tx}), ter(temMALFORMED));
}
}
// NFTokenMint, NFTokenBurn, NFTokenCreateOffer, NFTokenCancelOffer, and
// NFTokenAcceptOffer are allowed to be delegated if either
// featureNonFungibleTokensV1 or featureNonFungibleTokensV1_1 is
// enabled.
{
for (auto const feature :
{featureNonFungibleTokensV1, featureNonFungibleTokensV1_1})
{
Env env(*this, features - feature);
Account const alice{"alice"};
Account const bob{"bob"};
env.fund(XRP(100000), alice, bob);
env.close();
for (auto const tx :
{"NFTokenMint",
"NFTokenBurn",
"NFTokenCreateOffer",
"NFTokenCancelOffer",
"NFTokenAcceptOffer"})
env(delegate::set(alice, bob, {tx}));
}
}
}
void
run() override
{
FeatureBitset const all = jtx::testable_amendments();
testFeatureDisabled();
testDelegateSet();
testInvalidRequest(all);
testInvalidRequest(all - fixDelegateV1_1);
testInvalidRequest();
testReserve();
testFee();
testSequence();
testAccountDelete();
testDelegateTransaction();
testPaymentGranular(all);
testPaymentGranular(all - fixDelegateV1_1);
testPaymentGranular();
testTrustSetGranular();
testAccountSetGranular();
testMPTokenIssuanceSetGranular();
@@ -1841,10 +1495,6 @@ class Delegate_test : public beast::unit_test::suite
testSingleSignBadSecret();
testMultiSign();
testMultiSignQuorumNotMet();
testPermissionValue(all);
testPermissionValue(all - fixDelegateV1_1);
testTxReqireFeatures(all);
testTxReqireFeatures(all - fixDelegateV1_1);
}
};
BEAST_DEFINE_TESTSUITE(Delegate, app, ripple);

File diff suppressed because it is too large Load Diff

View File

@@ -720,30 +720,6 @@ public:
BEAST_EXPECT(res2 == STAmount{7518784});
}
void
test_truncate()
{
BEAST_EXPECT(Number(25, +1).truncate() == Number(250, 0));
BEAST_EXPECT(Number(25, 0).truncate() == Number(25, 0));
BEAST_EXPECT(Number(25, -1).truncate() == Number(2, 0));
BEAST_EXPECT(Number(25, -2).truncate() == Number(0, 0));
BEAST_EXPECT(Number(99, -2).truncate() == Number(0, 0));
BEAST_EXPECT(Number(-25, +1).truncate() == Number(-250, 0));
BEAST_EXPECT(Number(-25, 0).truncate() == Number(-25, 0));
BEAST_EXPECT(Number(-25, -1).truncate() == Number(-2, 0));
BEAST_EXPECT(Number(-25, -2).truncate() == Number(0, 0));
BEAST_EXPECT(Number(-99, -2).truncate() == Number(0, 0));
BEAST_EXPECT(Number(0, 0).truncate() == Number(0, 0));
BEAST_EXPECT(Number(0, 30000).truncate() == Number(0, 0));
BEAST_EXPECT(Number(0, -30000).truncate() == Number(0, 0));
BEAST_EXPECT(Number(100, -30000).truncate() == Number(0, 0));
BEAST_EXPECT(Number(100, -30000).truncate() == Number(0, 0));
BEAST_EXPECT(Number(-100, -30000).truncate() == Number(0, 0));
BEAST_EXPECT(Number(-100, -30000).truncate() == Number(0, 0));
}
void
run() override
{
@@ -764,7 +740,6 @@ public:
test_stream();
test_inc_dec();
test_toSTAmount();
test_truncate();
}
};

View File

@@ -74,10 +74,6 @@ public:
/** @} */
/** Create an Account from an account ID. Should only be used when the
* secret key is unavailable, such as for pseudo-accounts. */
explicit Account(std::string name, AccountID const& id);
enum AcctStringType { base58Seed, other };
/** Create an account from a base58 seed string. Throws on invalid seed. */
Account(AcctStringType stringType, std::string base58SeedStr);

View File

@@ -86,14 +86,6 @@ Account::Account(AcctStringType stringType, std::string base58SeedStr)
{
}
Account::Account(std::string name, AccountID const& id)
: Account(name, randomKeyPair(KeyType::secp256k1), privateCtorTag{})
{
// override the randomly generated values
id_ = id;
human_ = toBase58(id_);
}
IOU
Account::operator[](std::string const& s) const
{

View File

@@ -131,32 +131,6 @@ class Simulate_test : public beast::unit_test::suite
std::to_string(env.current()->txCount()));
}
void
testTxJsonMetadataField(
jtx::Env& env,
Json::Value const& tx,
std::function<void(
Json::Value const&,
Json::Value const&,
Json::Value const&)> const& validate,
Json::Value const& expectedMetadataKey,
bool testSerialized = true)
{
env.close();
Json::Value params;
params[jss::tx_json] = tx;
validate(
env.rpc("json", "simulate", to_string(params)),
tx,
expectedMetadataKey);
validate(env.rpc("simulate", to_string(tx)), tx, expectedMetadataKey);
BEAST_EXPECTS(
env.current()->txCount() == 0,
std::to_string(env.current()->txCount()));
}
Json::Value
getJsonMetadata(Json::Value txResult) const
{
@@ -1212,83 +1186,6 @@ class Simulate_test : public beast::unit_test::suite
}
}
void
testSuccessfulTransactionAdditionalMetadata()
{
testcase("Successful transaction with additional metadata");
using namespace jtx;
Env env{*this, envconfig([&](std::unique_ptr<Config> cfg) {
cfg->NETWORK_ID = 1025;
return cfg;
})};
Account const alice("alice");
env.fund(XRP(10000), alice);
env.close();
{
auto validateOutput = [&](Json::Value const& resp,
Json::Value const& tx,
Json::Value const& expectedMetadataKey) {
auto result = resp[jss::result];
BEAST_EXPECT(result[jss::engine_result] == "tesSUCCESS");
BEAST_EXPECT(result[jss::engine_result_code] == 0);
BEAST_EXPECT(
result[jss::engine_result_message] ==
"The simulated transaction would have been applied.");
if (BEAST_EXPECT(
result.isMember(jss::meta) ||
result.isMember(jss::meta_blob)))
{
Json::Value const metadata = getJsonMetadata(result);
BEAST_EXPECT(metadata[sfTransactionIndex.jsonName] == 0);
BEAST_EXPECT(
metadata[sfTransactionResult.jsonName] == "tesSUCCESS");
BEAST_EXPECT(
metadata.isMember(expectedMetadataKey.asString()));
}
};
{
Json::Value tx;
tx[jss::Account] = env.master.human();
tx[jss::TransactionType] = jss::Payment;
tx[sfDestination] = alice.human();
tx[sfAmount] = "100";
// test delivered amount
testTxJsonMetadataField(
env, tx, validateOutput, jss::delivered_amount);
}
{
Json::Value tx;
tx[jss::Account] = env.master.human();
tx[jss::TransactionType] = jss::NFTokenMint;
tx[sfNFTokenTaxon] = 1;
// test nft synthetic
testTxJsonMetadataField(
env, tx, validateOutput, jss::nftoken_id);
}
{
Json::Value tx;
tx[jss::Account] = env.master.human();
tx[jss::TransactionType] = jss::MPTokenIssuanceCreate;
// test mpt issuance id
testTxJsonMetadataField(
env, tx, validateOutput, jss::mpt_issuance_id);
}
}
}
public:
void
run() override
@@ -1305,7 +1202,6 @@ public:
testMultisignedBadPubKey();
testDeleteExpiredCredentials();
testSuccessfulTransactionNetworkID();
testSuccessfulTransactionAdditionalMetadata();
}
};

View File

@@ -166,7 +166,7 @@ private:
int addFlags,
std::function<bool(void)> const& continueCallback);
// Compute the liquidity for a path. Return tesSUCCESS if it has enough
// Compute the liquidity for a path. Return tesSUCCESS if it has has enough
// liquidity to be worth keeping, otherwise an error.
TER
getPathLiquidity(

View File

@@ -23,6 +23,7 @@
#include <xrpl/basics/Log.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/TxFlags.h>
#include <xrpl/protocol/st.h>
namespace ripple {
@@ -50,11 +51,6 @@ DelegateSet::preflight(PreflightContext const& ctx)
{
if (!permissionSet.insert(permission[sfPermissionValue]).second)
return temMALFORMED;
if (ctx.rules.enabled(fixDelegateV1_1) &&
!Permission::getInstance().isDelegatable(
permission[sfPermissionValue], ctx.rules))
return temMALFORMED;
}
return preflight2(ctx);
@@ -72,21 +68,9 @@ DelegateSet::preclaim(PreclaimContext const& ctx)
auto const& permissions = ctx.tx.getFieldArray(sfPermissions);
for (auto const& permission : permissions)
{
if (!ctx.view.rules().enabled(fixDelegateV1_1) &&
!Permission::getInstance().isDelegatable(
permission[sfPermissionValue], ctx.view.rules()))
{
// Before fixDelegateV1_1:
// - The check was performed during preclaim.
// - Transactions from amendments not yet enabled could still be
// delegated.
//
// After fixDelegateV1_1:
// - The check is performed during preflight.
// - Transactions from amendments not yet enabled can no longer be
// delegated.
auto const permissionValue = permission[sfPermissionValue];
if (!Permission::getInstance().isDelegatable(permissionValue))
return tecNO_PERMISSION;
}
}
return tesSUCCESS;

View File

@@ -1510,12 +1510,6 @@ ValidMPTIssuance::finalize(
if (tx.getTxnType() == ttESCROW_FINISH)
return true;
if ((tx.getTxnType() == ttVAULT_CLAWBACK ||
tx.getTxnType() == ttVAULT_WITHDRAW) &&
mptokensDeleted_ == 1 && mptokensCreated_ == 0 &&
mptIssuancesCreated_ == 0 && mptIssuancesDeleted_ == 0)
return true;
}
if (mptIssuancesCreated_ != 0)

View File

@@ -265,33 +265,8 @@ Payment::checkPermission(ReadView const& view, STTx const& tx)
loadGranularPermission(sle, ttPAYMENT, granularPermissions);
auto const& dstAmount = tx.getFieldAmount(sfAmount);
// post-amendment: disallow cross currency payments for PaymentMint and
// PaymentBurn
if (view.rules().enabled(fixDelegateV1_1))
{
auto const& amountAsset = dstAmount.asset();
if (tx.isFieldPresent(sfSendMax) &&
tx[sfSendMax].asset() != amountAsset)
return tecNO_DELEGATE_PERMISSION;
if (granularPermissions.contains(PaymentMint) && !isXRP(amountAsset) &&
amountAsset.getIssuer() == tx[sfAccount])
return tesSUCCESS;
if (granularPermissions.contains(PaymentBurn) && !isXRP(amountAsset) &&
amountAsset.getIssuer() == tx[sfDestination])
return tesSUCCESS;
return tecNO_DELEGATE_PERMISSION;
}
// Calling dstAmount.issue() in the next line would throw if it holds MPT.
// That exception would be caught in preclaim and returned as tefEXCEPTION.
// This check is just a cleaner, more explicit way to get the same result.
if (dstAmount.holds<MPTIssue>())
return tefEXCEPTION;
auto const& amountIssue = dstAmount.issue();
if (granularPermissions.contains(PaymentMint) && !isXRP(amountIssue) &&
amountIssue.account == tx[sfAccount])
return tesSUCCESS;

View File

@@ -21,10 +21,8 @@
#include <xrpld/ledger/View.h>
#include <xrpl/beast/utility/instrumentation.h>
#include <xrpl/protocol/AccountID.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/MPTIssue.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STAmount.h>
#include <xrpl/protocol/STNumber.h>
#include <xrpl/protocol/TER.h>
@@ -153,7 +151,7 @@ VaultClawback::doApply()
if (!vault)
return tefINTERNAL; // LCOV_EXCL_LINE
auto const mptIssuanceID = *((*vault)[sfShareMPTID]);
auto const mptIssuanceID = (*vault)[sfShareMPTID];
auto const sleIssuance = view().read(keylet::mptIssuance(mptIssuanceID));
if (!sleIssuance)
{
@@ -163,169 +161,68 @@ VaultClawback::doApply()
// LCOV_EXCL_STOP
}
Asset const vaultAsset = vault->at(sfAsset);
Asset const asset = vault->at(sfAsset);
STAmount const amount = [&]() -> STAmount {
auto const maybeAmount = tx[~sfAmount];
if (maybeAmount)
return *maybeAmount;
return {sfAmount, vaultAsset, 0};
return {sfAmount, asset, 0};
}();
XRPL_ASSERT(
amount.asset() == vaultAsset,
amount.asset() == asset,
"ripple::VaultClawback::doApply : matching asset");
auto assetsAvailable = vault->at(sfAssetsAvailable);
auto assetsTotal = vault->at(sfAssetsTotal);
[[maybe_unused]] auto const lossUnrealized = vault->at(sfLossUnrealized);
XRPL_ASSERT(
lossUnrealized <= (assetsTotal - assetsAvailable),
"ripple::VaultClawback::doApply : loss and assets do balance");
AccountID holder = tx[sfHolder];
MPTIssue const share{mptIssuanceID};
STAmount sharesDestroyed = {share};
STAmount assetsRecovered;
try
STAmount assets, shares;
if (amount == beast::zero)
{
if (amount == beast::zero)
{
sharesDestroyed = accountHolds(
view(),
holder,
share,
FreezeHandling::fhIGNORE_FREEZE,
AuthHandling::ahIGNORE_AUTH,
j_);
auto const maybeAssets =
sharesToAssetsWithdraw(vault, sleIssuance, sharesDestroyed);
if (!maybeAssets)
return tecINTERNAL; // LCOV_EXCL_LINE
assetsRecovered = *maybeAssets;
}
else
{
assetsRecovered = amount;
{
auto const maybeShares =
assetsToSharesWithdraw(vault, sleIssuance, assetsRecovered);
if (!maybeShares)
return tecINTERNAL; // LCOV_EXCL_LINE
sharesDestroyed = *maybeShares;
}
auto const maybeAssets =
sharesToAssetsWithdraw(vault, sleIssuance, sharesDestroyed);
if (!maybeAssets)
return tecINTERNAL; // LCOV_EXCL_LINE
assetsRecovered = *maybeAssets;
}
// Clamp to maximum.
if (assetsRecovered > *assetsAvailable)
{
assetsRecovered = *assetsAvailable;
// Note, it is important to truncate the number of shares, otherwise
// the corresponding assets might breach the AssetsAvailable
{
auto const maybeShares = assetsToSharesWithdraw(
vault, sleIssuance, assetsRecovered, TruncateShares::yes);
if (!maybeShares)
return tecINTERNAL; // LCOV_EXCL_LINE
sharesDestroyed = *maybeShares;
}
auto const maybeAssets =
sharesToAssetsWithdraw(vault, sleIssuance, sharesDestroyed);
if (!maybeAssets)
return tecINTERNAL; // LCOV_EXCL_LINE
assetsRecovered = *maybeAssets;
if (assetsRecovered > *assetsAvailable)
{
// LCOV_EXCL_START
JLOG(j_.error())
<< "VaultClawback: invalid rounding of shares.";
return tecINTERNAL;
// LCOV_EXCL_STOP
}
}
Asset share = *(*vault)[sfShareMPTID];
shares = accountHolds(
view(),
holder,
share,
FreezeHandling::fhIGNORE_FREEZE,
AuthHandling::ahIGNORE_AUTH,
j_);
assets = sharesToAssetsWithdraw(vault, sleIssuance, shares);
}
catch (std::overflow_error const&)
else
{
// It's easy to hit this exception from Number with large enough Scale
// so we avoid spamming the log and only use debug here.
JLOG(j_.debug()) //
<< "VaultClawback: overflow error with"
<< " scale=" << (int)vault->at(sfScale).value() //
<< ", assetsTotal=" << vault->at(sfAssetsTotal).value()
<< ", sharesTotal=" << sleIssuance->at(sfOutstandingAmount)
<< ", amount=" << amount.value();
return tecPATH_DRY;
assets = amount;
shares = assetsToSharesWithdraw(vault, sleIssuance, assets);
}
if (sharesDestroyed == beast::zero)
return tecPRECISION_LOSS;
// Clamp to maximum.
Number maxAssets = *vault->at(sfAssetsAvailable);
if (assets > maxAssets)
{
assets = maxAssets;
shares = assetsToSharesWithdraw(vault, sleIssuance, assets);
}
assetsTotal -= assetsRecovered;
assetsAvailable -= assetsRecovered;
if (shares == beast::zero)
return tecINSUFFICIENT_FUNDS;
vault->at(sfAssetsTotal) -= assets;
vault->at(sfAssetsAvailable) -= assets;
view().update(vault);
auto const& vaultAccount = vault->at(sfAccount);
// Transfer shares from holder to vault.
if (auto const ter = accountSend(
view(),
holder,
vaultAccount,
sharesDestroyed,
j_,
WaiveTransferFee::Yes);
!isTesSuccess(ter))
if (auto ter = accountSend(
view(), holder, vaultAccount, shares, j_, WaiveTransferFee::Yes))
return ter;
// Try to remove MPToken for shares, if the holder balance is zero. Vault
// pseudo-account will never set lsfMPTAuthorized, so we ignore flags.
// Keep MPToken if holder is the vault owner.
if (holder != vault->at(sfOwner))
{
if (auto const ter =
removeEmptyHolding(view(), holder, sharesDestroyed.asset(), j_);
isTesSuccess(ter))
{
JLOG(j_.debug()) //
<< "VaultClawback: removed empty MPToken for vault shares"
<< " MPTID=" << to_string(mptIssuanceID) //
<< " account=" << toBase58(holder);
}
else if (ter != tecHAS_OBLIGATIONS)
{
// LCOV_EXCL_START
JLOG(j_.error()) //
<< "VaultClawback: failed to remove MPToken for vault shares"
<< " MPTID=" << to_string(mptIssuanceID) //
<< " account=" << toBase58(holder) //
<< " with result: " << transToken(ter);
return ter;
// LCOV_EXCL_STOP
}
// else quietly ignore, holder balance is not zero
}
// Transfer assets from vault to issuer.
if (auto const ter = accountSend(
view(),
vaultAccount,
account_,
assetsRecovered,
j_,
WaiveTransferFee::Yes);
!isTesSuccess(ter))
if (auto ter = accountSend(
view(), vaultAccount, account_, assets, j_, WaiveTransferFee::Yes))
return ter;
// Sanity check
if (accountHolds(
view(),
vaultAccount,
assetsRecovered.asset(),
assets.asset(),
FreezeHandling::fhIGNORE_FREEZE,
AuthHandling::ahIGNORE_AUTH,
j_) < beast::zero)

View File

@@ -25,10 +25,8 @@
#include <xrpl/protocol/Asset.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/Issue.h>
#include <xrpl/protocol/MPTIssue.h>
#include <xrpl/protocol/Protocol.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STNumber.h>
#include <xrpl/protocol/TER.h>
#include <xrpl/protocol/TxFlags.h>
@@ -86,16 +84,6 @@ VaultCreate::preflight(PreflightContext const& ctx)
return temMALFORMED;
}
if (auto const scale = ctx.tx[~sfScale])
{
auto const vaultAsset = ctx.tx[sfAsset];
if (vaultAsset.holds<MPTIssue>() || vaultAsset.native())
return temMALFORMED;
if (scale > vaultMaximumIOUScale)
return temMALFORMED;
}
return preflight2(ctx);
}
@@ -109,8 +97,8 @@ VaultCreate::calculateBaseFee(ReadView const& view, STTx const& tx)
TER
VaultCreate::preclaim(PreclaimContext const& ctx)
{
auto const vaultAsset = ctx.tx[sfAsset];
auto const account = ctx.tx[sfAccount];
auto vaultAsset = ctx.tx[sfAsset];
auto account = ctx.tx[sfAccount];
if (vaultAsset.native())
; // No special checks for XRP
@@ -160,7 +148,7 @@ VaultCreate::preclaim(PreclaimContext const& ctx)
return tecOBJECT_NOT_FOUND;
}
auto const sequence = ctx.tx.getSeqValue();
auto sequence = ctx.tx.getSeqValue();
if (auto const accountId = pseudoAccountAddress(
ctx.view, keylet::vault(account, sequence).key);
accountId == beast::zero)
@@ -177,8 +165,8 @@ VaultCreate::doApply()
// we can consider downgrading them to `tef` or `tem`.
auto const& tx = ctx_.tx;
auto const sequence = tx.getSeqValue();
auto const owner = view().peek(keylet::account(account_));
auto sequence = tx.getSeqValue();
auto owner = view().peek(keylet::account(account_));
if (owner == nullptr)
return tefINTERNAL; // LCOV_EXCL_LINE
@@ -202,10 +190,6 @@ VaultCreate::doApply()
!isTesSuccess(ter))
return ter;
std::uint8_t const scale = (asset.holds<MPTIssue>() || asset.native())
? 0
: ctx_.tx[~sfScale].value_or(vaultDefaultIOUScale);
auto txFlags = tx.getFlags();
std::uint32_t mptFlags = 0;
if ((txFlags & tfVaultShareNonTransferable) == 0)
@@ -225,13 +209,12 @@ VaultCreate::doApply()
.account = pseudoId->value(),
.sequence = 1,
.flags = mptFlags,
.assetScale = scale,
.metadata = tx[~sfMPTokenMetadata],
.domainId = tx[~sfDomainID],
});
if (!maybeShare)
return maybeShare.error(); // LCOV_EXCL_LINE
auto const& mptIssuanceID = *maybeShare;
auto& share = *maybeShare;
vault->setFieldIssue(sfAsset, STIssue{sfAsset, asset});
vault->at(sfFlags) = txFlags & tfVaultPrivate;
@@ -244,7 +227,7 @@ VaultCreate::doApply()
// Leave default values for AssetTotal and AssetAvailable, both zero.
if (auto value = tx[~sfAssetsMaximum])
vault->at(sfAssetsMaximum) = *value;
vault->at(sfShareMPTID) = mptIssuanceID;
vault->at(sfShareMPTID) = share;
if (auto value = tx[~sfData])
vault->at(sfData) = *value;
// Required field, default to vaultStrategyFirstComeFirstServe
@@ -252,31 +235,9 @@ VaultCreate::doApply()
vault->at(sfWithdrawalPolicy) = *value;
else
vault->at(sfWithdrawalPolicy) = vaultStrategyFirstComeFirstServe;
if (scale)
vault->at(sfScale) = scale;
// No `LossUnrealized`.
view().insert(vault);
// Explicitly create MPToken for the vault owner
if (auto const err = authorizeMPToken(
view(), mPriorBalance, mptIssuanceID, account_, ctx_.journal);
!isTesSuccess(err))
return err;
// If the vault is private, set the authorized flag for the vault owner
if (txFlags & tfVaultPrivate)
{
if (auto const err = authorizeMPToken(
view(),
mPriorBalance,
mptIssuanceID,
pseudoId,
ctx_.journal,
{},
account_);
!isTesSuccess(err))
return err;
}
return tesSUCCESS;
}

View File

@@ -21,7 +21,6 @@
#include <xrpld/ledger/View.h>
#include <xrpl/protocol/Feature.h>
#include <xrpl/protocol/MPTIssue.h>
#include <xrpl/protocol/STNumber.h>
#include <xrpl/protocol/TER.h>
#include <xrpl/protocol/TxFlags.h>
@@ -129,8 +128,7 @@ VaultDelete::doApply()
// Destroy the share issuance. Do not use MPTokenIssuanceDestroy for this,
// no special logic needed. First run few checks, duplicated from preclaim.
auto const shareMPTID = *vault->at(sfShareMPTID);
auto const mpt = view().peek(keylet::mptIssuance(shareMPTID));
auto const mpt = view().peek(keylet::mptIssuance(vault->at(sfShareMPTID)));
if (!mpt)
{
// LCOV_EXCL_START
@@ -139,24 +137,6 @@ VaultDelete::doApply()
// LCOV_EXCL_STOP
}
// Try to remove MPToken for vault shares for the vault owner if it exists.
if (auto const mptoken = view().peek(keylet::mptoken(shareMPTID, account_)))
{
if (auto const ter =
removeEmptyHolding(view(), account_, MPTIssue(shareMPTID), j_);
!isTesSuccess(ter))
{
// LCOV_EXCL_START
JLOG(j_.error()) //
<< "VaultDelete: failed to remove vault owner's MPToken"
<< " MPTID=" << to_string(shareMPTID) //
<< " account=" << toBase58(account_) //
<< " with result: " << transToken(ter);
return ter;
// LCOV_EXCL_STOP
}
}
if (!view().dirRemove(
keylet::ownerDir(pseudoID), (*mpt)[sfOwnerNode], mpt->key(), false))
{

View File

@@ -26,7 +26,6 @@
#include <xrpl/protocol/Indexes.h>
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/MPTIssue.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STNumber.h>
#include <xrpl/protocol/TER.h>
#include <xrpl/protocol/TxFlags.h>
@@ -139,7 +138,7 @@ VaultDeposit::preclaim(PreclaimContext const& ctx)
if (isFrozen(ctx.view, account, vaultShare))
return tecLOCKED;
if (vault->isFlag(lsfVaultPrivate) && account != vault->at(sfOwner))
if (vault->isFlag(tfVaultPrivate) && account != vault->at(sfOwner))
{
auto const maybeDomainID = sleIssuance->at(~sfDomainID);
// Since this is a private vault and the account is not its owner, we
@@ -184,7 +183,7 @@ VaultDeposit::doApply()
if (!vault)
return tefINTERNAL; // LCOV_EXCL_LINE
auto const amount = ctx_.tx[sfAmount];
auto const assets = ctx_.tx[sfAmount];
// Make sure the depositor can hold shares.
auto const mptIssuanceID = (*vault)[sfShareMPTID];
auto const sleIssuance = view().read(keylet::mptIssuance(mptIssuanceID));
@@ -198,14 +197,14 @@ VaultDeposit::doApply()
auto const& vaultAccount = vault->at(sfAccount);
// Note, vault owner is always authorized
if (vault->isFlag(lsfVaultPrivate) && account_ != vault->at(sfOwner))
if ((vault->getFlags() & tfVaultPrivate) && account_ != vault->at(sfOwner))
{
if (auto const err = enforceMPTokenAuthorization(
ctx_.view(), mptIssuanceID, account_, mPriorBalance, j_);
!isTesSuccess(err))
return err;
}
else // !vault->isFlag(lsfVaultPrivate) || account_ == vault->at(sfOwner)
else
{
// No authorization needed, but must ensure there is MPToken
auto sleMpt = view().read(keylet::mptoken(mptIssuanceID, account_));
@@ -222,12 +221,8 @@ VaultDeposit::doApply()
}
// If the vault is private, set the authorized flag for the vault owner
if (vault->isFlag(lsfVaultPrivate))
if (vault->isFlag(tfVaultPrivate))
{
// This follows from the reverse of the outer enclosing if condition
XRPL_ASSERT(
account_ == vault->at(sfOwner),
"ripple::VaultDeposit::doApply : account is owner");
if (auto const err = authorizeMPToken(
view(),
mPriorBalance, // priorBalance
@@ -242,52 +237,14 @@ VaultDeposit::doApply()
}
}
STAmount sharesCreated = {vault->at(sfShareMPTID)}, assetsDeposited;
try
{
// Compute exchange before transferring any amounts.
{
auto const maybeShares =
assetsToSharesDeposit(vault, sleIssuance, amount);
if (!maybeShares)
return tecINTERNAL; // LCOV_EXCL_LINE
sharesCreated = *maybeShares;
}
if (sharesCreated == beast::zero)
return tecPRECISION_LOSS;
auto const maybeAssets =
sharesToAssetsDeposit(vault, sleIssuance, sharesCreated);
if (!maybeAssets)
return tecINTERNAL; // LCOV_EXCL_LINE
else if (*maybeAssets > amount)
{
// LCOV_EXCL_START
JLOG(j_.error()) << "VaultDeposit: would take more than offered.";
return tecINTERNAL;
// LCOV_EXCL_STOP
}
assetsDeposited = *maybeAssets;
}
catch (std::overflow_error const&)
{
// It's easy to hit this exception from Number with large enough Scale
// so we avoid spamming the log and only use debug here.
JLOG(j_.debug()) //
<< "VaultDeposit: overflow error with"
<< " scale=" << (int)vault->at(sfScale).value() //
<< ", assetsTotal=" << vault->at(sfAssetsTotal).value()
<< ", sharesTotal=" << sleIssuance->at(sfOutstandingAmount)
<< ", amount=" << amount;
return tecPATH_DRY;
}
// Compute exchange before transferring any amounts.
auto const shares = assetsToSharesDeposit(vault, sleIssuance, assets);
XRPL_ASSERT(
sharesCreated.asset() != assetsDeposited.asset(),
shares.asset() != assets.asset(),
"ripple::VaultDeposit::doApply : assets are not shares");
vault->at(sfAssetsTotal) += assetsDeposited;
vault->at(sfAssetsAvailable) += assetsDeposited;
vault->at(sfAssetsTotal) += assets;
vault->at(sfAssetsAvailable) += assets;
view().update(vault);
// A deposit must not push the vault over its limit.
@@ -296,21 +253,15 @@ VaultDeposit::doApply()
return tecLIMIT_EXCEEDED;
// Transfer assets from depositor to vault.
if (auto const ter = accountSend(
view(),
account_,
vaultAccount,
assetsDeposited,
j_,
WaiveTransferFee::Yes);
!isTesSuccess(ter))
if (auto ter = accountSend(
view(), account_, vaultAccount, assets, j_, WaiveTransferFee::Yes))
return ter;
// Sanity check
if (accountHolds(
view(),
account_,
assetsDeposited.asset(),
assets.asset(),
FreezeHandling::fhIGNORE_FREEZE,
AuthHandling::ahIGNORE_AUTH,
j_) < beast::zero)
@@ -322,14 +273,8 @@ VaultDeposit::doApply()
}
// Transfer shares from vault to depositor.
if (auto const ter = accountSend(
view(),
vaultAccount,
account_,
sharesCreated,
j_,
WaiveTransferFee::Yes);
!isTesSuccess(ter))
if (auto ter = accountSend(
view(), vaultAccount, account_, shares, j_, WaiveTransferFee::Yes))
return ter;
return tesSUCCESS;

View File

@@ -108,7 +108,7 @@ VaultSet::preclaim(PreclaimContext const& ctx)
if (auto const domain = ctx.tx[~sfDomainID])
{
// We can only set domain if private flag was originally set
if (!vault->isFlag(lsfVaultPrivate))
if ((vault->getFlags() & tfVaultPrivate) == 0)
{
JLOG(ctx.j.debug()) << "VaultSet: vault is not private";
return tecNO_PERMISSION;
@@ -175,9 +175,9 @@ VaultSet::doApply()
{
if (*domainId != beast::zero)
{
// In VaultSet::preclaim we enforce that lsfVaultPrivate must have
// In VaultSet::preclaim we enforce that tfVaultPrivate must have
// been set in the vault. We currently do not support making such a
// vault public (i.e. removal of lsfVaultPrivate flag). The
// vault public (i.e. removal of tfVaultPrivate flag). The
// sfDomainID flag must be set in the MPTokenIssuance object and can
// be freely updated.
sleIssuance->setFieldH256(sfDomainID, *domainId);

View File

@@ -177,7 +177,7 @@ VaultWithdraw::doApply()
if (!vault)
return tefINTERNAL; // LCOV_EXCL_LINE
auto const mptIssuanceID = *((*vault)[sfShareMPTID]);
auto const mptIssuanceID = (*vault)[sfShareMPTID];
auto const sleIssuance = view().read(keylet::mptIssuance(mptIssuanceID));
if (!sleIssuance)
{
@@ -192,57 +192,24 @@ VaultWithdraw::doApply()
// to deposit into it, and this means you are also indefinitely authorized
// to withdraw from it.
auto const amount = ctx_.tx[sfAmount];
Asset const vaultAsset = vault->at(sfAsset);
MPTIssue const share{mptIssuanceID};
STAmount sharesRedeemed = {share};
STAmount assetsWithdrawn;
try
auto amount = ctx_.tx[sfAmount];
auto const asset = vault->at(sfAsset);
auto const share = MPTIssue(mptIssuanceID);
STAmount shares, assets;
if (amount.asset() == asset)
{
if (amount.asset() == vaultAsset)
{
// Fixed assets, variable shares.
{
auto const maybeShares =
assetsToSharesWithdraw(vault, sleIssuance, amount);
if (!maybeShares)
return tecINTERNAL; // LCOV_EXCL_LINE
sharesRedeemed = *maybeShares;
}
if (sharesRedeemed == beast::zero)
return tecPRECISION_LOSS;
auto const maybeAssets =
sharesToAssetsWithdraw(vault, sleIssuance, sharesRedeemed);
if (!maybeAssets)
return tecINTERNAL; // LCOV_EXCL_LINE
assetsWithdrawn = *maybeAssets;
}
else if (amount.asset() == share)
{
// Fixed shares, variable assets.
sharesRedeemed = amount;
auto const maybeAssets =
sharesToAssetsWithdraw(vault, sleIssuance, sharesRedeemed);
if (!maybeAssets)
return tecINTERNAL; // LCOV_EXCL_LINE
assetsWithdrawn = *maybeAssets;
}
else
return tefINTERNAL; // LCOV_EXCL_LINE
// Fixed assets, variable shares.
assets = amount;
shares = assetsToSharesWithdraw(vault, sleIssuance, assets);
}
catch (std::overflow_error const&)
else if (amount.asset() == share)
{
// It's easy to hit this exception from Number with large enough Scale
// so we avoid spamming the log and only use debug here.
JLOG(j_.debug()) //
<< "VaultWithdraw: overflow error with"
<< " scale=" << (int)vault->at(sfScale).value() //
<< ", assetsTotal=" << vault->at(sfAssetsTotal).value()
<< ", sharesTotal=" << sleIssuance->at(sfOutstandingAmount)
<< ", amount=" << amount.value();
return tecPATH_DRY;
// Fixed shares, variable assets.
shares = amount;
assets = sharesToAssetsWithdraw(vault, sleIssuance, shares);
}
else
return tefINTERNAL; // LCOV_EXCL_LINE
if (accountHolds(
view(),
@@ -250,72 +217,31 @@ VaultWithdraw::doApply()
share,
FreezeHandling::fhZERO_IF_FROZEN,
AuthHandling::ahIGNORE_AUTH,
j_) < sharesRedeemed)
j_) < shares)
{
JLOG(j_.debug()) << "VaultWithdraw: account doesn't hold enough shares";
return tecINSUFFICIENT_FUNDS;
}
auto assetsAvailable = vault->at(sfAssetsAvailable);
auto assetsTotal = vault->at(sfAssetsTotal);
[[maybe_unused]] auto const lossUnrealized = vault->at(sfLossUnrealized);
XRPL_ASSERT(
lossUnrealized <= (assetsTotal - assetsAvailable),
"ripple::VaultWithdraw::doApply : loss and assets do balance");
// The vault must have enough assets on hand. The vault may hold assets
// that it has already pledged. That is why we look at AssetAvailable
// instead of the pseudo-account balance.
if (*assetsAvailable < assetsWithdrawn)
// The vault must have enough assets on hand. The vault may hold assets that
// it has already pledged. That is why we look at AssetAvailable instead of
// the pseudo-account balance.
if (*vault->at(sfAssetsAvailable) < assets)
{
JLOG(j_.debug()) << "VaultWithdraw: vault doesn't hold enough assets";
return tecINSUFFICIENT_FUNDS;
}
assetsTotal -= assetsWithdrawn;
assetsAvailable -= assetsWithdrawn;
vault->at(sfAssetsTotal) -= assets;
vault->at(sfAssetsAvailable) -= assets;
view().update(vault);
auto const& vaultAccount = vault->at(sfAccount);
// Transfer shares from depositor to vault.
if (auto const ter = accountSend(
view(),
account_,
vaultAccount,
sharesRedeemed,
j_,
WaiveTransferFee::Yes);
!isTesSuccess(ter))
if (auto ter = accountSend(
view(), account_, vaultAccount, shares, j_, WaiveTransferFee::Yes))
return ter;
// Try to remove MPToken for shares, if the account balance is zero. Vault
// pseudo-account will never set lsfMPTAuthorized, so we ignore flags.
// Keep MPToken if holder is the vault owner.
if (account_ != vault->at(sfOwner))
{
if (auto const ter = removeEmptyHolding(
view(), account_, sharesRedeemed.asset(), j_);
isTesSuccess(ter))
{
JLOG(j_.debug()) //
<< "VaultWithdraw: removed empty MPToken for vault shares"
<< " MPTID=" << to_string(mptIssuanceID) //
<< " account=" << toBase58(account_);
}
else if (ter != tecHAS_OBLIGATIONS)
{
// LCOV_EXCL_START
JLOG(j_.error()) //
<< "VaultWithdraw: failed to remove MPToken for vault shares"
<< " MPTID=" << to_string(mptIssuanceID) //
<< " account=" << toBase58(account_) //
<< " with result: " << transToken(ter);
return ter;
// LCOV_EXCL_STOP
}
// else quietly ignore, account balance is not zero
}
auto const dstAcct = [&]() -> AccountID {
if (ctx_.tx.isFieldPresent(sfDestination))
return ctx_.tx.getAccountID(sfDestination);
@@ -323,21 +249,15 @@ VaultWithdraw::doApply()
}();
// Transfer assets from vault to depositor or destination account.
if (auto const ter = accountSend(
view(),
vaultAccount,
dstAcct,
assetsWithdrawn,
j_,
WaiveTransferFee::Yes);
!isTesSuccess(ter))
if (auto ter = accountSend(
view(), vaultAccount, dstAcct, assets, j_, WaiveTransferFee::Yes))
return ter;
// Sanity check
if (accountHolds(
view(),
vaultAccount,
assetsWithdrawn.asset(),
assets.asset(),
FreezeHandling::fhIGNORE_FREEZE,
AuthHandling::ahIGNORE_AUTH,
j_) < beast::zero)

View File

@@ -97,8 +97,8 @@ with_txn_type(TxType txnType, F&& f)
#pragma push_macro("TRANSACTION")
#undef TRANSACTION
#define TRANSACTION(tag, value, name, ...) \
case tag: \
#define TRANSACTION(tag, value, name, delegatable, fields) \
case tag: \
return f.template operator()<name>();
#include <xrpl/protocol/detail/transactions.macro>

View File

@@ -912,41 +912,28 @@ deleteAMMTrustLine(
std::optional<AccountID> const& ammAccountID,
beast::Journal j);
// From the perspective of a vault, return the number of shares to give the
// depositor when they deposit a fixed amount of assets. Since shares are MPT
// this number is integral and always truncated in this calculation.
[[nodiscard]] std::optional<STAmount>
// From the perspective of a vault,
// return the number of shares to give the depositor
// when they deposit a fixed amount of assets.
[[nodiscard]] STAmount
assetsToSharesDeposit(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,
STAmount const& assets);
// From the perspective of a vault, return the number of assets to take from
// depositor when they receive a fixed amount of shares. Note, since shares are
// MPT, they are always an integral number.
[[nodiscard]] std::optional<STAmount>
sharesToAssetsDeposit(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,
STAmount const& shares);
enum class TruncateShares : bool { no = false, yes = true };
// From the perspective of a vault, return the number of shares to demand from
// the depositor when they ask to withdraw a fixed amount of assets. Since
// shares are MPT this number is integral, and it will be rounded to nearest
// unless explicitly requested to be truncated instead.
[[nodiscard]] std::optional<STAmount>
// From the perspective of a vault,
// return the number of shares to demand from the depositor
// when they ask to withdraw a fixed amount of assets.
[[nodiscard]] STAmount
assetsToSharesWithdraw(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,
STAmount const& assets,
TruncateShares truncate = TruncateShares::no);
STAmount const& assets);
// From the perspective of a vault, return the number of assets to give the
// depositor when they redeem a fixed amount of shares. Note, since shares are
// MPT, they are always an integral number.
[[nodiscard]] std::optional<STAmount>
// From the perspective of a vault,
// return the number of assets to give the depositor
// when they redeem a fixed amount of shares.
[[nodiscard]] STAmount
sharesToAssetsWithdraw(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,

View File

@@ -2793,113 +2793,58 @@ rippleCredit(
saAmount.asset().value());
}
[[nodiscard]] std::optional<STAmount>
[[nodiscard]] STAmount
assetsToSharesDeposit(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,
STAmount const& assets)
{
XRPL_ASSERT(
!assets.negative(),
"ripple::assetsToSharesDeposit : non-negative assets");
XRPL_ASSERT(
assets.asset() == vault->at(sfAsset),
"ripple::assetsToSharesDeposit : assets and vault match");
if (assets.negative() || assets.asset() != vault->at(sfAsset))
return std::nullopt; // LCOV_EXCL_LINE
Number const assetTotal = vault->at(sfAssetsTotal);
STAmount shares{vault->at(sfShareMPTID)};
Number assetTotal = vault->at(sfAssetsTotal);
STAmount shares{vault->at(sfShareMPTID), static_cast<Number>(assets)};
if (assetTotal == 0)
return STAmount{
shares.asset(),
Number(assets.mantissa(), assets.exponent() + vault->at(sfScale))
.truncate()};
Number const shareTotal = issuance->at(sfOutstandingAmount);
shares = (shareTotal * (assets / assetTotal)).truncate();
return shares;
Number shareTotal = issuance->at(sfOutstandingAmount);
shares = shareTotal * (assets / assetTotal);
return shares;
}
[[nodiscard]] std::optional<STAmount>
sharesToAssetsDeposit(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,
STAmount const& shares)
{
XRPL_ASSERT(
!shares.negative(),
"ripple::sharesToAssetsDeposit : non-negative shares");
XRPL_ASSERT(
shares.asset() == vault->at(sfShareMPTID),
"ripple::sharesToAssetsDeposit : shares and vault match");
if (shares.negative() || shares.asset() != vault->at(sfShareMPTID))
return std::nullopt; // LCOV_EXCL_LINE
Number const assetTotal = vault->at(sfAssetsTotal);
STAmount assets{vault->at(sfAsset)};
if (assetTotal == 0)
return STAmount{
assets.asset(),
shares.mantissa(),
shares.exponent() - vault->at(sfScale),
false};
Number const shareTotal = issuance->at(sfOutstandingAmount);
assets = assetTotal * (shares / shareTotal);
return assets;
}
[[nodiscard]] std::optional<STAmount>
[[nodiscard]] STAmount
assetsToSharesWithdraw(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,
STAmount const& assets,
TruncateShares truncate)
STAmount const& assets)
{
XRPL_ASSERT(
!assets.negative(),
"ripple::assetsToSharesDeposit : non-negative assets");
XRPL_ASSERT(
assets.asset() == vault->at(sfAsset),
"ripple::assetsToSharesWithdraw : assets and vault match");
if (assets.negative() || assets.asset() != vault->at(sfAsset))
return std::nullopt; // LCOV_EXCL_LINE
Number assetTotal = vault->at(sfAssetsTotal);
assetTotal -= vault->at(sfLossUnrealized);
STAmount shares{vault->at(sfShareMPTID)};
if (assetTotal == 0)
return shares;
Number const shareTotal = issuance->at(sfOutstandingAmount);
Number result = shareTotal * (assets / assetTotal);
if (truncate == TruncateShares::yes)
result = result.truncate();
shares = result;
Number shareTotal = issuance->at(sfOutstandingAmount);
shares = shareTotal * (assets / assetTotal);
return shares;
}
[[nodiscard]] std::optional<STAmount>
[[nodiscard]] STAmount
sharesToAssetsWithdraw(
std::shared_ptr<SLE const> const& vault,
std::shared_ptr<SLE const> const& issuance,
STAmount const& shares)
{
XRPL_ASSERT(
!shares.negative(),
"ripple::sharesToAssetsDeposit : non-negative shares");
XRPL_ASSERT(
shares.asset() == vault->at(sfShareMPTID),
"ripple::sharesToAssetsWithdraw : shares and vault match");
if (shares.negative() || shares.asset() != vault->at(sfShareMPTID))
return std::nullopt; // LCOV_EXCL_LINE
Number assetTotal = vault->at(sfAssetsTotal);
assetTotal -= vault->at(sfLossUnrealized);
STAmount assets{vault->at(sfAsset)};
if (assetTotal == 0)
return assets;
Number const shareTotal = issuance->at(sfOutstandingAmount);
Number shareTotal = issuance->at(sfOutstandingAmount);
assets = assetTotal * (shares / shareTotal);
return assets;
}

View File

@@ -39,96 +39,53 @@ namespace RPC {
// The Concise Transaction ID provides a way to identify a transaction
// that includes which network the transaction was submitted to.
/**
* @brief Encodes ledger sequence, transaction index, and network ID into a CTID
* string.
*
* @param ledgerSeq Ledger sequence number (max 0x0FFF'FFFF).
* @param txnIndex Transaction index within the ledger (max 0xFFFF).
* @param networkID Network identifier (max 0xFFFF).
* @return Optional CTID string in uppercase hexadecimal, or std::nullopt if
* inputs are out of range.
*/
inline std::optional<std::string>
encodeCTID(uint32_t ledgerSeq, uint32_t txnIndex, uint32_t networkID) noexcept
{
constexpr uint32_t maxLedgerSeq = 0x0FFF'FFFF;
constexpr uint32_t maxTxnIndex = 0xFFFF;
constexpr uint32_t maxNetworkID = 0xFFFF;
if (ledgerSeq > maxLedgerSeq || txnIndex > maxTxnIndex ||
networkID > maxNetworkID)
return std::nullopt;
if (ledgerSeq > 0x0FFF'FFFF || txnIndex > 0xFFFF || networkID > 0xFFFF)
return {};
uint64_t ctidValue =
((0xC000'0000ULL + static_cast<uint64_t>(ledgerSeq)) << 32) |
((static_cast<uint64_t>(txnIndex) << 16) | networkID);
((0xC000'0000ULL + static_cast<uint64_t>(ledgerSeq)) << 32) +
(static_cast<uint64_t>(txnIndex) << 16) + networkID;
std::stringstream buffer;
buffer << std::hex << std::uppercase << std::setfill('0') << std::setw(16)
<< ctidValue;
return buffer.str();
return {buffer.str()};
}
/**
* @brief Decodes a CTID string or integer into its component parts.
*
* @tparam T Type of the CTID input (string, string_view, char*, integral).
* @param ctid CTID value to decode.
* @return Optional tuple of (ledgerSeq, txnIndex, networkID), or std::nullopt
* if invalid.
*/
template <typename T>
inline std::optional<std::tuple<uint32_t, uint16_t, uint16_t>>
decodeCTID(T const ctid) noexcept
{
uint64_t ctidValue = 0;
uint64_t ctidValue{0};
if constexpr (
std::is_same_v<T, std::string> || std::is_same_v<T, std::string_view> ||
std::is_same_v<T, char*> || std::is_same_v<T, char const*>)
std::is_same_v<T, std::string> || std::is_same_v<T, char*> ||
std::is_same_v<T, char const*> || std::is_same_v<T, std::string_view>)
{
std::string const ctidString(ctid);
if (ctidString.size() != 16)
return std::nullopt;
if (ctidString.length() != 16)
return {};
static boost::regex const hexRegex("^[0-9A-Fa-f]{16}$");
if (!boost::regex_match(ctidString, hexRegex))
return std::nullopt;
if (!boost::regex_match(ctidString, boost::regex("^[0-9A-Fa-f]+$")))
return {};
try
{
ctidValue = std::stoull(ctidString, nullptr, 16);
}
// LCOV_EXCL_START
catch (...)
{
// should be impossible to hit given the length/regex check
return std::nullopt;
}
// LCOV_EXCL_STOP
ctidValue = std::stoull(ctidString, nullptr, 16);
}
else if constexpr (std::is_integral_v<T>)
{
ctidValue = static_cast<uint64_t>(ctid);
}
ctidValue = ctid;
else
{
return std::nullopt;
}
return {};
// Validate CTID prefix.
constexpr uint64_t ctidPrefixMask = 0xF000'0000'0000'0000ULL;
constexpr uint64_t ctidPrefix = 0xC000'0000'0000'0000ULL;
if ((ctidValue & ctidPrefixMask) != ctidPrefix)
return std::nullopt;
if ((ctidValue & 0xF000'0000'0000'0000ULL) != 0xC000'0000'0000'0000ULL)
return {};
uint32_t ledgerSeq = static_cast<uint32_t>((ctidValue >> 32) & 0x0FFF'FFFF);
uint16_t txnIndex = static_cast<uint16_t>((ctidValue >> 16) & 0xFFFF);
uint16_t networkID = static_cast<uint16_t>(ctidValue & 0xFFFF);
return std::make_tuple(ledgerSeq, txnIndex, networkID);
uint32_t ledger_seq = (ctidValue >> 32) & 0xFFFF'FFFUL;
uint16_t txn_index = (ctidValue >> 16) & 0xFFFFU;
uint16_t network_id = ctidValue & 0xFFFFU;
return {{ledger_seq, txn_index, network_id}};
}
} // namespace RPC

View File

@@ -24,13 +24,10 @@
#include <xrpld/app/misc/TxQ.h>
#include <xrpld/app/tx/apply.h>
#include <xrpld/rpc/Context.h>
#include <xrpld/rpc/DeliveredAmount.h>
#include <xrpld/rpc/GRPCHandlers.h>
#include <xrpld/rpc/MPTokenIssuanceID.h>
#include <xrpld/rpc/detail/TransactionSign.h>
#include <xrpl/protocol/ErrorCodes.h>
#include <xrpl/protocol/NFTSyntheticSerializer.h>
#include <xrpl/protocol/RPCErr.h>
#include <xrpl/protocol/STParsedJSON.h>
#include <xrpl/resource/Fees.h>
@@ -275,17 +272,6 @@ simulateTxn(RPC::JsonContext& context, std::shared_ptr<Transaction> transaction)
else
{
jvResult[jss::meta] = result.metadata->getJson(JsonOptions::none);
RPC::insertDeliveredAmount(
jvResult[jss::meta],
view,
transaction->getSTransaction(),
*result.metadata);
RPC::insertNFTSyntheticInJson(
jvResult, transaction->getSTransaction(), *result.metadata);
RPC::insertMPTokenIssuanceID(
jvResult[jss::meta],
transaction->getSTransaction(),
*result.metadata);
}
}