style: Use pre-commit tool and add simple config (#2029)

I started with really simple pre-commit hooks and will add more on top.

Important files:
- `.pre-commit-config.yaml` - the config for pre-commit
- `.github/workflows/pre-commit.yml` - runs pre-commit hooks in branches
and `develop`
- `.github/workflows/pre-commit-autoupdate.yml` - autoupdates pre-commit
hooks once in a month
This commit is contained in:
Ayaz Salikhov
2025-04-24 17:59:43 +01:00
committed by GitHub
parent 99580a2602
commit 9c92a2b51b
66 changed files with 805 additions and 733 deletions

View File

@@ -20,7 +20,7 @@ if [ -z "$DOXYGEN" ]; then
WARNING
-----------------------------------------------------------------------------
'doxygen' is required to check documentation.
'doxygen' is required to check documentation.
Please install it for next time.
Your changes may fail to pass CI once pushed.

View File

@@ -4,4 +4,3 @@
source .githooks/check-format
source .githooks/check-docs

View File

@@ -5,7 +5,7 @@ inputs:
description: Build target name
default: all
substract_threads:
description: An option for the action get_number_of_threads. See get_number_of_threads
description: An option for the action get_number_of_threads. See get_number_of_threads
required: true
default: '0'
runs:

View File

@@ -38,6 +38,8 @@ runs:
password: ${{ env.GITHUB_TOKEN }}
- uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #v3.6.0
with:
cache-image: false
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 #v3.10.0
- uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 #v5.7.0
@@ -63,4 +65,3 @@ runs:
repository: ${{ inputs.image_name }}
short-description: ${{ inputs.description }}
readme-filepath: ${{ inputs.directory }}/README.md

View File

@@ -45,7 +45,7 @@ runs:
run: |
cd build
conan install .. -of . -b $BUILD_OPTION -s build_type=${{ inputs.build_type }} -o clio:static="${STATIC_OPTION}" -o clio:tests=True -o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" -o clio:lint=False -o clio:coverage="${CODE_COVERAGE}" --profile ${{ inputs.conan_profile }}
- name: Run cmake
shell: bash
env:

View File

@@ -2,7 +2,7 @@ name: Get number of threads
description: Determines number of threads to use on macOS and Linux
inputs:
substract_threads:
description: How many threads to substract from the calculated number
description: How many threads to substract from the calculated number
required: true
default: '0'
outputs:

View File

@@ -20,14 +20,14 @@ runs:
run: |
# Uninstall any existing cmake
brew uninstall cmake --ignore-dependencies || true
# Download specific cmake formula
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
mkdir -p /tmp/homebrew-formula
curl -s -L $FORMULA_URL -o /tmp/homebrew-formula/cmake.rb
# Verify the downloaded formula
ACTUAL_SHA256=$(shasum -a 256 /tmp/homebrew-formula/cmake.rb | cut -d ' ' -f 1)
if [ "$ACTUAL_SHA256" != "$FORMULA_EXPECTED_SHA256" ]; then
@@ -36,7 +36,7 @@ runs:
echo "Actual: $ACTUAL_SHA256"
exit 1
fi
# Install cmake from the specific formula with force flag
brew install --force /tmp/homebrew-formula/cmake.rb
@@ -70,5 +70,3 @@ runs:
run: |
mkdir -p $CCACHE_DIR
mkdir -p $CONAN_USER_HOME/.conan

View File

@@ -55,5 +55,3 @@ runs:
with:
path: ${{ inputs.ccache_dir }}
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-${{ inputs.conan_profile }}-develop-${{ steps.git_common_ancestor.outputs.commit }}

View File

@@ -48,5 +48,3 @@ runs:
else
echo "Conan-non-prod is available"
fi

View File

@@ -23,7 +23,7 @@ jobs:
run: |
./.githooks/check-format --diff
shell: bash
check_docs:
name: Check documentation
runs-on: ubuntu-latest
@@ -132,10 +132,10 @@ jobs:
name: Check Config Description
needs: build
runs-on: heavy
container:
container:
image: rippleci/clio_ci:latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: clio_server_Linux_Release_gcc
@@ -163,8 +163,3 @@ jobs:
fi
rm -f ${configDescriptionFile}
exit 0

View File

@@ -75,7 +75,7 @@ on:
jobs:
build:
name: Build ${{ inputs.container != '' && 'in container' || 'natively' }}
runs-on: ${{ inputs.runs_on }}
runs-on: ${{ inputs.runs_on }}
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
steps:
@@ -137,7 +137,7 @@ jobs:
- name: Strip unit_tests
if: ${{ inputs.unit_tests && !inputs.code_coverage && inputs.sanitizer == 'false' }}
run: strip build/clio_tests
- name: Strip integration_tests
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
run: strip build/clio_integration_tests
@@ -148,14 +148,14 @@ jobs:
with:
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
path: build/clio_server
- name: Upload clio_tests
if: ${{ inputs.unit_tests && !inputs.code_coverage }}
uses: actions/upload-artifact@v4
with:
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
path: build/clio_tests
- name: Upload clio_integration_tests
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
uses: actions/upload-artifact@v4
@@ -184,7 +184,7 @@ jobs:
uses: ./.github/actions/code_coverage
upload_coverage_report:
if: ${{ inputs.code_coverage }}
if: ${{ inputs.code_coverage }}
name: Codecov
needs: build
uses: ./.github/workflows/upload_coverage_report.yml

View File

@@ -25,14 +25,14 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v4
with:
with:
lfs: true
- name: Build docs
run: |
mkdir -p build_docs && cd build_docs
cmake ../docs && cmake --build . --target docs
- name: Setup Pages
uses: actions/configure-pages@v5
@@ -41,7 +41,7 @@ jobs:
with:
path: build_docs/html
name: docs-develop
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@@ -54,13 +54,13 @@ jobs:
build_type: Release
container:
image: rippleci/clio_ci:latest
integration_tests: true
integration_tests: true
- os: heavy
conan_profile: gcc
build_type: Debug
container:
image: rippleci/clio_ci:latest
integration_tests: true
integration_tests: true
runs-on: [self-hosted, "${{ matrix.os }}"]
container: ${{ matrix.container }}
@@ -68,9 +68,9 @@ jobs:
scylladb:
image: ${{ (matrix.integration_tests) && 'scylladb/scylla' || '' }}
options: >-
--health-cmd "cqlsh -e 'describe cluster'"
--health-interval 10s
--health-timeout 5s
--health-cmd "cqlsh -e 'describe cluster'"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
@@ -86,7 +86,7 @@ jobs:
run: |
chmod +x ./clio_tests
./clio_tests
- uses: actions/download-artifact@v4
with:
name: clio_integration_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
@@ -94,7 +94,7 @@ jobs:
# To be enabled back once docker in mac runner arrives
# https://github.com/XRPLF/clio/issues/1400
- name: Run clio_integration_tests
if: matrix.integration_tests
if: matrix.integration_tests
run: |
chmod +x ./clio_integration_tests
./clio_integration_tests --backend_host=scylladb

View File

@@ -0,0 +1,39 @@
name: Pre-commit auto-update
on:
# every first day of the month
schedule:
- cron: "0 0 1 * *"
# on demand
workflow_dispatch:
jobs:
auto-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: pip install pre-commit
- run: pre-commit autoupdate
- run: pre-commit run --all-files
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
if: always()
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
with:
branch: update/pre-commit-hooks
title: Update pre-commit hooks
commit-message: "style: update pre-commit hooks"
body: Update versions of pre-commit hooks to latest version.
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"

25
.github/workflows/pre-commit.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Run pre-commit hooks
on:
pull_request:
push:
branches:
- develop
workflow_dispatch:
jobs:
run-hooks:
runs-on: ubuntu-latest
steps:
- name: Checkout Repo ⚡️
uses: actions/checkout@v4
- name: Set Up Python 🐍
uses: actions/setup-python@v5
with:
python-version: 3.x
- name: Install pre-commit 📦
run: |
pip install --upgrade pip
pip install --upgrade pre-commit
- name: Run pre-commit ✅
run: pre-commit run --all-files

View File

@@ -27,7 +27,7 @@ jobs:
with:
action: codecov/codecov-action@v4
with: |
files: build/coverage_report.xml
files: build/coverage_report.xml
fail_ci_if_error: false
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}

25
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,25 @@
---
# pre-commit is a tool to perform a predefined set of tasks manually and/or
# automatically before git commits are made.
#
# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level
#
# Common tasks
#
# - Run on all files: pre-commit run --all-files
# - Register git hooks: pre-commit install --install-hooks
#
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
# `pre-commit sample-config` default hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-added-large-files
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
- id: end-of-file-fixer
exclude: ^docs/doxygen-awesome-theme/
- id: trailing-whitespace
exclude: ^docs/doxygen-awesome-theme/

View File

@@ -1,8 +1,7 @@
ISC License
Copyright (c) 2022, the clio developers
Copyright (c) 2022, the clio developers
Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -18,7 +18,7 @@ Clio offers the full `rippled` API, with the caveat that Clio by default only re
Clio retrieves data from a designated group of `rippled` nodes instead of connecting to the peer-to-peer network.
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for *any* request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
> [!NOTE]
> [!NOTE]
> Clio requires access to at least one `rippled` node, which can run on the same machine as Clio or separately.
## 📚 Learn more about Clio

View File

@@ -46,7 +46,7 @@ class Clio(ConanFile):
'lint': False,
'docs': False,
'snapshot': False,
'xrpl/*:tests': False,
'xrpl/*:rocksdb': False,
'cassandra-cpp-driver/*:shared': False,
@@ -78,7 +78,7 @@ class Clio(ConanFile):
def layout(self):
cmake_layout(self)
# Fix this setting to follow the default introduced in Conan 1.48
# Fix this setting to follow the default introduced in Conan 1.48
# to align with our build instructions.
self.folders.generators = 'build/generators'

View File

@@ -13,4 +13,4 @@ The image is based on Ubuntu 20.04 and contains:
- and some other useful tools
Conan is set up to build Clio without any additional steps. There are two preset conan profiles: `clang` and `gcc` to use corresponding compiler. By default conan is setup to use `gcc`.
Sanitizer builds for `ASAN`, `TSAN` and `UBSAN` are enabled via conan profiles for each of the supported compilers. These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile gcc.tsan`).
Sanitizer builds for `ASAN`, `TSAN` and `UBSAN` are enabled via conan profiles for each of the supported compilers. These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile gcc.tsan`).

View File

@@ -10,7 +10,7 @@ ENV CCACHE_VERSION=4.10.2 \
LLVM_TOOLS_VERSION=19 \
GH_VERSION=2.40.0 \
DOXYGEN_VERSION=1.12.0
# Add repositories
RUN apt-get -qq update \
&& apt-get -qq install -y --no-install-recommends --no-install-suggests gnupg wget curl software-properties-common \
@@ -95,7 +95,7 @@ RUN conan profile new clang --detect \
&& conan profile update env.CC=/usr/bin/clang-16 clang \
&& conan profile update env.CXX=/usr/bin/clang++-16 clang \
&& conan profile update env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS" clang \
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/clang-16\", \"cpp\": \"/usr/bin/clang++-16\"}" clang
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/clang-16\", \"cpp\": \"/usr/bin/clang++-16\"}" clang
RUN echo "include(gcc)" >> .conan/profiles/default

View File

@@ -1,4 +1,4 @@
FROM ubuntu:focal
FROM ubuntu:focal
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH

View File

@@ -63,7 +63,7 @@ COPY --from=build /gcc12.deb /
# Make gcc-12 available but also leave gcc12.deb for others to copy if needed
RUN apt update && apt-get install -y binutils libc6-dev \
&& dpkg -i /gcc12.deb
&& dpkg -i /gcc12.deb
RUN update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 100 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-12 100 \

View File

@@ -59,4 +59,3 @@ case $1 in
esac
popd > /dev/null

View File

@@ -34,7 +34,7 @@ FULL_SIDEBAR = NO
HTML_HEADER = ${SOURCE}/docs/doxygen-awesome-theme/header.html
HTML_EXTRA_STYLESHEET = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome.css \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only.css \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only-darkmode-toggle.css
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only-darkmode-toggle.css
HTML_EXTRA_FILES = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-darkmode-toggle.js \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-interactive-toc.js

View File

@@ -53,7 +53,7 @@ Here is an example snippet from the config file:
## SSL
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. The `ssl_cert_file` field specifies the filepath for your SSL cert, while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user.
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. The `ssl_cert_file` field specifies the filepath for your SSL cert, while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user.
Your options include:

View File

@@ -21,7 +21,7 @@
//
// Below options will use defaults from cassandra driver if left unspecified.
// See https://docs.datastax.com/en/developer/cpp-driver/2.17/api/struct.CassCluster/ for details.
//
//
// "queue_size_io": 2
//
// ---
@@ -113,7 +113,7 @@
"cache": {
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
// "num_cursors_from_diff": 3200, // Read the cursors from the diff table until we have enough cursors to partition the ledger to load concurrently.
// "num_cursors_from_diff": 3200, // Read the cursors from the diff table until we have enough cursors to partition the ledger to load concurrently.
// "num_cursors_from_account": 3200, // Read the cursors from the account table until we have enough cursors to partition the ledger to load concurrently.
"num_markers": 48, // The number of markers is the number of coroutines to load the cache concurrently.
"page_fetch_size": 512, // The number of rows to load for each page.

View File

@@ -13,7 +13,7 @@ The minimum level of severity at which the log message will be outputted by defa
Each of the variables expands like so:
- `TimeStamp`: The full date and time of the log entry
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
- `ThreadID`: The ID of the thread the log entry is written from
- `Channel`: The channel that this log entry was sent to
- `Severity`: The severity (aka log level) the entry was sent at

View File

@@ -8,7 +8,7 @@ If you see the error log message `Could not connect to Cassandra: No hosts avail
You can use [cqlsh](https://pypi.org/project/cqlsh/) to check the connection to the database.
If you would like to run a local ScyllaDB, you can call:
```sh
docker run --rm -p 9042:9042 --name clio-scylla -d scylladb/scylla
docker run --rm -p 9042:9042 --name clio-scylla -d scylladb/scylla
```
## Check the server status of Clio
@@ -38,10 +38,7 @@ You can check the cache status by calling:
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_full
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_enabled
```
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishs loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishs loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
## Receive error message `Too many requests`
If client sees the error message `Too many requests`, this means that the client is blocked by Clio's DosGuard protection. You may want to add the client's IP to the whitelist in the configuration file, Or update other your DosGuard settings.

View File

@@ -22,19 +22,19 @@ There are three main types of data in each XRP Ledger version:
Due to the structural differences of the different types of databases, Clio may choose to represent these data types using a different schema for each unique database type.
### Keywords
### Keywords
**Sequence**: A unique incrementing identification number used to label the different ledger versions.
**Hash**: The SHA512-half (calculate SHA512 and take the first 256 bits) hash of various ledger data like the entire ledger or specific ledger objects.
**Ledger Object**: The [binary-encoded](https://xrpl.org/serialization.html) STObject containing specific data (i.e. metadata, transaction data).
**Ledger Object**: The [binary-encoded](https://xrpl.org/serialization.html) STObject containing specific data (i.e. metadata, transaction data).
**Metadata**: The data containing [detailed information](https://xrpl.org/transaction-metadata.html#transaction-metadata) of the outcome of a specific transaction, regardless of whether the transaction was successful.
**Metadata**: The data containing [detailed information](https://xrpl.org/transaction-metadata.html#transaction-metadata) of the outcome of a specific transaction, regardless of whether the transaction was successful.
**Transaction data**: The data containing the [full details](https://xrpl.org/transaction-common-fields.html) of a specific transaction.
**Transaction data**: The data containing the [full details](https://xrpl.org/transaction-common-fields.html) of a specific transaction.
**Object Index**: The pseudo-random unique identifier of a ledger object, created by hashing the data of the object.
**Object Index**: The pseudo-random unique identifier of a ledger object, created by hashing the data of the object.
## Cassandra Implementation
@@ -59,10 +59,10 @@ Their schemas and how they work are detailed in the following sections.
### ledger_transactions
```
CREATE TABLE clio.ledger_transactions (
CREATE TABLE clio.ledger_transactions (
ledger_sequence bigint, # The sequence number of the ledger version
hash blob, # Hash of all the transactions on this ledger version
PRIMARY KEY (ledger_sequence, hash)
PRIMARY KEY (ledger_sequence, hash)
) WITH CLUSTERING ORDER BY (hash ASC) ...
```
@@ -71,7 +71,7 @@ This table stores the hashes of all transactions in a given ledger sequence and
### transactions
```
CREATE TABLE clio.transactions (
CREATE TABLE clio.transactions (
hash blob PRIMARY KEY, # The transaction hash
date bigint, # Date of the transaction
ledger_sequence bigint, # The sequence that the transaction was validated
@@ -82,7 +82,7 @@ CREATE TABLE clio.transactions (
This table stores the full transaction and metadata of each ledger version with the transaction hash as the primary key.
To lookup all the transactions that were validated in a ledger version with sequence `n`, first get the all the transaction hashes in that ledger version by querying `SELECT * FROM ledger_transactions WHERE ledger_sequence = n;`. Then, iterate through the list of hashes and query `SELECT * FROM transactions WHERE hash = one_of_the_hash_from_the_list;` to get the detailed transaction data.
To lookup all the transactions that were validated in a ledger version with sequence `n`, first get the all the transaction hashes in that ledger version by querying `SELECT * FROM ledger_transactions WHERE ledger_sequence = n;`. Then, iterate through the list of hashes and query `SELECT * FROM transactions WHERE hash = one_of_the_hash_from_the_list;` to get the detailed transaction data.
### ledger_hashes
@@ -93,7 +93,7 @@ CREATE TABLE clio.ledger_hashes (
) ...
```
This table stores the hash of all ledger versions by their sequences.
This table stores the hash of all ledger versions by their sequences.
### ledger_range
@@ -270,7 +270,7 @@ CREATE TABLE clio.migrator_status (
migrator_name TEXT, # The name of the migrator
status TEXT, # The status of the migrator
PRIMARY KEY (migrator_name)
)
)
```
The `migrator_status` table stores the status of the migratior in this database. If a migrator's status is `migrated`, it means this database has finished data migration for this migrator.

View File

@@ -69,11 +69,11 @@ public:
std::string createKeyspace = [this]() {
return fmt::format(
R"(
CREATE KEYSPACE IF NOT EXISTS {}
CREATE KEYSPACE IF NOT EXISTS {}
WITH replication = {{
'class': 'SimpleStrategy',
'replication_factor': '{}'
}}
}}
AND durable_writes = True
)",
settingsProvider_.get().getKeyspace(),
@@ -91,13 +91,13 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
key blob,
sequence bigint,
object blob,
PRIMARY KEY (key, sequence)
)
WITH CLUSTERING ORDER BY (sequence DESC)
(
key blob,
sequence bigint,
object blob,
PRIMARY KEY (key, sequence)
)
WITH CLUSTERING ORDER BY (sequence DESC)
)",
qualifiedTableName(settingsProvider_.get(), "objects")
));
@@ -105,13 +105,13 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
hash blob PRIMARY KEY,
ledger_sequence bigint,
(
hash blob PRIMARY KEY,
ledger_sequence bigint,
date bigint,
transaction blob,
metadata blob
)
transaction blob,
metadata blob
)
)",
qualifiedTableName(settingsProvider_.get(), "transactions")
));
@@ -119,11 +119,11 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
ledger_sequence bigint,
hash blob,
PRIMARY KEY (ledger_sequence, hash)
)
(
ledger_sequence bigint,
hash blob,
PRIMARY KEY (ledger_sequence, hash)
)
)",
qualifiedTableName(settingsProvider_.get(), "ledger_transactions")
));
@@ -131,12 +131,12 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
key blob,
seq bigint,
next blob,
PRIMARY KEY (key, seq)
)
seq bigint,
next blob,
PRIMARY KEY (key, seq)
)
)",
qualifiedTableName(settingsProvider_.get(), "successor")
));
@@ -144,11 +144,11 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
seq bigint,
(
seq bigint,
key blob,
PRIMARY KEY (seq, key)
)
PRIMARY KEY (seq, key)
)
)",
qualifiedTableName(settingsProvider_.get(), "diff")
));
@@ -156,12 +156,12 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
account blob,
seq_idx tuple<bigint, bigint>,
(
account blob,
seq_idx tuple<bigint, bigint>,
hash blob,
PRIMARY KEY (account, seq_idx)
)
PRIMARY KEY (account, seq_idx)
)
WITH CLUSTERING ORDER BY (seq_idx DESC)
)",
qualifiedTableName(settingsProvider_.get(), "account_tx")
@@ -170,10 +170,10 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
sequence bigint PRIMARY KEY,
header blob
)
)
)",
qualifiedTableName(settingsProvider_.get(), "ledgers")
));
@@ -181,10 +181,10 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
hash blob PRIMARY KEY,
sequence bigint
)
)
)",
qualifiedTableName(settingsProvider_.get(), "ledger_hashes")
));
@@ -192,7 +192,7 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
is_latest boolean PRIMARY KEY,
sequence bigint
)
@@ -203,13 +203,13 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
token_id blob,
(
token_id blob,
sequence bigint,
owner blob,
is_burned boolean,
PRIMARY KEY (token_id, sequence)
)
PRIMARY KEY (token_id, sequence)
)
WITH CLUSTERING ORDER BY (sequence DESC)
)",
qualifiedTableName(settingsProvider_.get(), "nf_tokens")
@@ -218,12 +218,12 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
issuer blob,
taxon bigint,
token_id blob,
PRIMARY KEY (issuer, taxon, token_id)
)
)
WITH CLUSTERING ORDER BY (taxon ASC, token_id ASC)
)",
qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2")
@@ -232,12 +232,12 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
token_id blob,
sequence bigint,
uri blob,
PRIMARY KEY (token_id, sequence)
)
)
WITH CLUSTERING ORDER BY (sequence DESC)
)",
qualifiedTableName(settingsProvider_.get(), "nf_token_uris")
@@ -246,12 +246,12 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
token_id blob,
(
token_id blob,
seq_idx tuple<bigint, bigint>,
hash blob,
PRIMARY KEY (token_id, seq_idx)
)
PRIMARY KEY (token_id, seq_idx)
)
WITH CLUSTERING ORDER BY (seq_idx DESC)
)",
qualifiedTableName(settingsProvider_.get(), "nf_token_transactions")
@@ -260,11 +260,11 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
mpt_id blob,
holder blob,
PRIMARY KEY (mpt_id, holder)
)
)
WITH CLUSTERING ORDER BY (holder ASC)
)",
qualifiedTableName(settingsProvider_.get(), "mp_token_holders")
@@ -273,11 +273,11 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
migrator_name TEXT,
status TEXT,
PRIMARY KEY (migrator_name)
)
)
)",
qualifiedTableName(settingsProvider_.get(), "migrator_status")
));
@@ -285,11 +285,11 @@ public:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
node_id UUID,
message TEXT,
PRIMARY KEY (node_id)
)
)
WITH default_time_to_live = 2
)",
qualifiedTableName(settingsProvider_.get(), "nodes_chat")
@@ -324,7 +324,7 @@ public:
PreparedStatement insertObject = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(key, sequence, object)
VALUES (?, ?, ?)
)",
@@ -335,7 +335,7 @@ public:
PreparedStatement insertTransaction = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(hash, ledger_sequence, date, transaction, metadata)
VALUES (?, ?, ?, ?, ?)
)",
@@ -346,7 +346,7 @@ public:
PreparedStatement insertLedgerTransaction = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(ledger_sequence, hash)
VALUES (?, ?)
)",
@@ -357,7 +357,7 @@ public:
PreparedStatement insertSuccessor = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(key, seq, next)
VALUES (?, ?, ?)
)",
@@ -368,7 +368,7 @@ public:
PreparedStatement insertDiff = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(seq, key)
VALUES (?, ?)
)",
@@ -379,7 +379,7 @@ public:
PreparedStatement insertAccountTx = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(account, seq_idx, hash)
VALUES (?, ?, ?)
)",
@@ -390,7 +390,7 @@ public:
PreparedStatement insertNFT = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(token_id, sequence, owner, is_burned)
VALUES (?, ?, ?, ?)
)",
@@ -401,7 +401,7 @@ public:
PreparedStatement insertIssuerNFT = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(issuer, taxon, token_id)
VALUES (?, ?, ?)
)",
@@ -412,7 +412,7 @@ public:
PreparedStatement insertNFTURI = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(token_id, sequence, uri)
VALUES (?, ?, ?)
)",
@@ -423,7 +423,7 @@ public:
PreparedStatement insertNFTTx = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(token_id, seq_idx, hash)
VALUES (?, ?, ?)
)",
@@ -434,7 +434,7 @@ public:
PreparedStatement insertMPTHolder = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(mpt_id, holder)
VALUES (?, ?)
)",
@@ -445,7 +445,7 @@ public:
PreparedStatement insertLedgerHeader = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(sequence, header)
VALUES (?, ?)
)",
@@ -456,7 +456,7 @@ public:
PreparedStatement insertLedgerHash = [this]() {
return handle_.get().prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(hash, sequence)
VALUES (?, ?)
)",
@@ -471,9 +471,9 @@ public:
PreparedStatement updateLedgerRange = [this]() {
return handle_.get().prepare(fmt::format(
R"(
UPDATE {}
UPDATE {}
SET sequence = ?
WHERE is_latest = ?
WHERE is_latest = ?
IF sequence IN (?, null)
)",
qualifiedTableName(settingsProvider_.get(), "ledger_range")
@@ -483,7 +483,7 @@ public:
PreparedStatement deleteLedgerRange = [this]() {
return handle_.get().prepare(fmt::format(
R"(
UPDATE {}
UPDATE {}
SET sequence = ?
WHERE is_latest = False
)",
@@ -520,11 +520,11 @@ public:
PreparedStatement selectSuccessor = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT next
FROM {}
SELECT next
FROM {}
WHERE key = ?
AND seq <= ?
ORDER BY seq DESC
ORDER BY seq DESC
LIMIT 1
)",
qualifiedTableName(settingsProvider_.get(), "successor")
@@ -534,7 +534,7 @@ public:
PreparedStatement selectDiff = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT key
SELECT key
FROM {}
WHERE seq = ?
)",
@@ -545,11 +545,11 @@ public:
PreparedStatement selectObject = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT object, sequence
FROM {}
SELECT object, sequence
FROM {}
WHERE key = ?
AND sequence <= ?
ORDER BY sequence DESC
ORDER BY sequence DESC
LIMIT 1
)",
qualifiedTableName(settingsProvider_.get(), "objects")
@@ -559,7 +559,7 @@ public:
PreparedStatement selectTransaction = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT transaction, metadata, ledger_sequence, date
SELECT transaction, metadata, ledger_sequence, date
FROM {}
WHERE hash = ?
)",
@@ -570,9 +570,9 @@ public:
PreparedStatement selectAllTransactionHashesInLedger = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT hash
FROM {}
WHERE ledger_sequence = ?
SELECT hash
FROM {}
WHERE ledger_sequence = ?
)",
qualifiedTableName(settingsProvider_.get(), "ledger_transactions")
));
@@ -581,11 +581,11 @@ public:
PreparedStatement selectLedgerPageKeys = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT key
FROM {}
SELECT key
FROM {}
WHERE TOKEN(key) >= ?
AND sequence <= ?
PER PARTITION LIMIT 1
PER PARTITION LIMIT 1
LIMIT ?
ALLOW FILTERING
)",
@@ -611,9 +611,9 @@ public:
PreparedStatement getToken = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT TOKEN(key)
FROM {}
WHERE key = ?
SELECT TOKEN(key)
FROM {}
WHERE key = ?
LIMIT 1
)",
qualifiedTableName(settingsProvider_.get(), "objects")
@@ -623,8 +623,8 @@ public:
PreparedStatement selectAccountTx = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT hash, seq_idx
FROM {}
SELECT hash, seq_idx
FROM {}
WHERE account = ?
AND seq_idx < ?
LIMIT ?
@@ -636,10 +636,10 @@ public:
PreparedStatement selectAccountFromBegining = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT account
FROM {}
SELECT account
FROM {}
WHERE token(account) > 0
PER PARTITION LIMIT 1
PER PARTITION LIMIT 1
LIMIT ?
)",
qualifiedTableName(settingsProvider_.get(), "account_tx")
@@ -649,10 +649,10 @@ public:
PreparedStatement selectAccountFromToken = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT account
FROM {}
SELECT account
FROM {}
WHERE token(account) > token(?)
PER PARTITION LIMIT 1
PER PARTITION LIMIT 1
LIMIT ?
)",
qualifiedTableName(settingsProvider_.get(), "account_tx")
@@ -662,11 +662,11 @@ public:
PreparedStatement selectAccountTxForward = [this]() {
return handle_.get().prepare(fmt::format(
R"(
SELECT hash, seq_idx
FROM {}
SELECT hash, seq_idx
FROM {}
WHERE account = ?
AND seq_idx > ?
ORDER BY seq_idx ASC
ORDER BY seq_idx ASC
LIMIT ?
)",
qualifiedTableName(settingsProvider_.get(), "account_tx")
@@ -677,7 +677,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT sequence, owner, is_burned
FROM {}
FROM {}
WHERE token_id = ?
AND sequence <= ?
ORDER BY sequence DESC
@@ -691,7 +691,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT uri
FROM {}
FROM {}
WHERE token_id = ?
AND sequence <= ?
ORDER BY sequence DESC
@@ -705,7 +705,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT hash, seq_idx
FROM {}
FROM {}
WHERE token_id = ?
AND seq_idx < ?
ORDER BY seq_idx DESC
@@ -719,7 +719,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT hash, seq_idx
FROM {}
FROM {}
WHERE token_id = ?
AND seq_idx >= ?
ORDER BY seq_idx ASC
@@ -733,7 +733,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT token_id
FROM {}
FROM {}
WHERE issuer = ?
AND (taxon, token_id) > ?
ORDER BY taxon ASC, token_id ASC
@@ -747,7 +747,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT token_id
FROM {}
FROM {}
WHERE issuer = ?
AND taxon = ?
AND token_id > ?
@@ -762,7 +762,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT holder
FROM {}
FROM {}
WHERE mpt_id = ?
AND holder > ?
ORDER BY holder ASC
@@ -777,7 +777,7 @@ public:
R"(
SELECT sequence
FROM {}
WHERE hash = ?
WHERE hash = ?
LIMIT 1
)",
qualifiedTableName(settingsProvider_.get(), "ledger_hashes")
@@ -799,7 +799,7 @@ public:
return handle_.get().prepare(fmt::format(
R"(
SELECT sequence
FROM {}
FROM {}
WHERE is_latest = True
)",
qualifiedTableName(settingsProvider_.get(), "ledger_range")

View File

@@ -1,35 +1,35 @@
# Clio Migration
# Clio Migration
Clio maintains the off-chain data of XRPL and multiple indexes tables to powering complex queries. To simplify the creation of index tables, this migration framework handles the process of database change and facilitates the migration of historical data seamlessly.
## Command Line Usage
Clio provides a migration command-line tool to migrate data in database.
Clio provides a migration command-line tool to migrate data in database.
> Note: We need a **configuration file** to run the migration tool. This configuration file has the same format as the configuration file of the Clio server, ensuring consistency and ease of use. It reads the database configuration from the same session as the server's configuration, eliminating the need for separate setup or additional configuration files. Be aware that migration-specific configuration is under `.migration` session.
### To query migration status:
./clio_server --migrate status ~/config/migrator.json
This command returns the current migration status of each migrator. The example output:
./clio_server --migrate status ~/config/migrator.json
This command returns the current migration status of each migrator. The example output:
Current Migration Status:
Migrator: ExampleMigrator - Feature v1, Clio v3 - not migrated
### To start a migration:
./clio_server --migrate ExampleMigrator ~/config/migrator.json
Migration will run if the migrator has not been migrated. The migrator will be marked as migrated after the migration is completed.
## How to write a migrator
@@ -56,7 +56,7 @@ It contains:
## How to use full table scanner (Only for Cassandra/ScyllaDB)
Sometimes migrator isn't able to query the historical data by table's partition key. For example, migrator of transactions needs the historical transaction data without knowing each transaction hash. Full table scanner can help to get all the rows in parallel.
Sometimes migrator isn't able to query the historical data by table's partition key. For example, migrator of transactions needs the historical transaction data without knowing each transaction hash. Full table scanner can help to get all the rows in parallel.
Most indexes are based on either ledger states or transactions. We provide the `objects` and `transactions` scanner. Developers only need to implement the callback function to receive the historical data. Please find the examples in `tests/integration/migration/cassandra/ExampleTransactionsMigrator.cpp` and `tests/integration/migration/cassandra/ExampleObjectsMigrator.cpp`.
@@ -82,14 +82,13 @@ We have some example migrators under `tests/integration/migration/cassandra` fol
- ExampleDropTableMigrator
This migrator drops `diff` table.
This migrator drops `diff` table.
- ExampleLedgerMigrator
This migrator shows how to migrate data when we don't need to do full table scan. This migrator creates an index table `ledger_example` which maintains the map of ledger sequence and its account hash.
This migrator shows how to migrate data when we don't need to do full table scan. This migrator creates an index table `ledger_example` which maintains the map of ledger sequence and its account hash.
- ExampleObjectsMigrator
This migrator shows how to migrate ledger states related data. It uses `ObjectsScanner` to proceed the full scan in parallel. It counts the number of ACCOUNT_ROOT.
- ExampleTransactionsMigrator
This migrator shows how to migrate transactions related data. It uses `TransactionsScanner` to proceed the `transactions` table full scan in parallel. It creates an index table `tx_index_example` which tracks the transaction hash and its according transaction type.

View File

@@ -65,8 +65,8 @@ public:
{
return handler.prepare(fmt::format(
R"(
SELECT *
FROM {}
SELECT *
FROM {}
WHERE TOKEN({}) >= ? AND TOKEN({}) <= ?
)",
data::cassandra::qualifiedTableName<SettingsProviderType>(settingsProvider_.get(), tableName),
@@ -86,7 +86,7 @@ public:
{
static auto kPREPARED = handler.prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(migrator_name, status)
VALUES (?, ?)
)",

View File

@@ -17,7 +17,7 @@ See [tests/unit/rpc](https://github.com/XRPLF/clio/tree/develop/tests/unit/rpc)
Handlers need to fulfil the requirements specified by the `SomeHandler` concept (see `rpc/common/Concepts.hpp`):
- Expose types:
- `Input` - The POD struct which acts as input for the handler
- `Output` - The POD struct which acts as output of a valid handler invocation

View File

@@ -170,7 +170,7 @@ template <typename T>
concept SomeStdDuration = requires {
// Thank you Ed Catmur for this trick.
// See https://stackoverflow.com/questions/74383254/concept-that-models-only-the-stdchrono-duration-types
[]<typename Rep, typename Period>( //
[]<typename Rep, typename Period>( //
std::type_identity<std::chrono::duration<Rep, Period>>
) {}(std::type_identity<std::decay_t<T>>());
};
@@ -180,7 +180,7 @@ concept SomeStdDuration = requires {
*/
template <typename T>
concept SomeStdOptional = requires {
[]<typename Type>( //
[]<typename Type>( //
std::type_identity<std::optional<Type>>
) {}(std::type_identity<std::decay_t<T>>());
};

View File

@@ -70,7 +70,7 @@ Regular, non-stoppable operations, can not be stopped. A non-stoppable operation
#### Scheduled operations
Scheduled operations are wrappers on top of Stoppable and regular Operations and provide the functionality of a timer that needs to run out before the given block of code will finally be executed on the Execution Context.
Scheduled operations can be aborted by calling
Scheduled operations can be aborted by calling
- `cancel` - will only cancel the timer. If the timer already fired this will have no effect
- `requestStop` - will stop the operation if it's already running or as soon as the timer runs out
- `abort` - will call `cancel` immediatelly followed by `requestStop`
@@ -111,12 +111,12 @@ auto res = ctx.execute([&value]() { value = 42; });
res.wait();
ASSERT_EQ(value, 42);
```
```
### Stoppable operation
#### Requesting stoppage
The stop token can be used via the `isStopRequested()` member function:
```cpp
```cpp
auto res = ctx.execute([](auto stopToken) {
while (not stopToken.isStopRequested())
;
@@ -126,9 +126,9 @@ auto res = ctx.execute([](auto stopToken) {
res.requestStop();
```
Alternatively, the stop token is implicity convertible to `bool` so you can also use it like so:
```cpp
```cpp
auto res = ctx.execute([](auto stopRequested) {
while (not stopRequested)
;
@@ -141,7 +141,7 @@ res.requestStop();
#### Automatic stoppage on timeout
By adding an optional timeout as the last arg to `execute` you can have the framework automatically call `requestStop()`:
```cpp
```cpp
auto res = ctx.execute([](auto stopRequested) {
while (not stopRequested)
;
@@ -162,11 +162,11 @@ auto res = ctx.scheduleAfter(
}
);
res.cancel(); // or .abort()
res.cancel(); // or .abort()
```
#### Get value after stopping
```cpp
```cpp
auto res = ctx.scheduleAfter(1ms, [](auto stopRequested) {
while (not stopRequested)
;
@@ -189,7 +189,7 @@ auto res =
auto const err = res.get().error();
EXPECT_TRUE(err.message.ends_with("test"));
EXPECT_TRUE(std::string{err}.ends_with("test"));
```
```
### Strand
The APIs are basically the same as with the parent `ExecutionContext`.
@@ -210,7 +210,7 @@ auto anyCtx = AnyExecutionContext{ctx};
auto op = anyCtx.execute([](auto stopToken) {
while(not stopToken.isStopRequested())
std::this_thread::sleep_for(1s);
std::this_thread::sleep_for(1s);
}, 3s);
```
@@ -221,11 +221,11 @@ Erased operations only expose the `abort` member function that can be used to bo
auto op = anyCtx.scheduleAfter(3s, [](auto stopToken, auto cancelled) {
if (cancelled)
return;
while(not stopToken.isStopRequested())
std::this_thread::sleep_for(1s);
std::this_thread::sleep_for(1s);
}, 3s);
std::this_thread::sleep_for(2s);
std::this_thread::sleep_for(2s);
op.abort(); // cancels the scheduled operation with 1s to spare
```

View File

@@ -40,9 +40,9 @@ TestGlobals::parse(int argc, char* argv[])
// clang-format off
po::options_description description("Clio UT options");
description.add_options()
("backend_host", po::value<std::string>()->default_value(TestGlobals::backendHost),
("backend_host", po::value<std::string>()->default_value(TestGlobals::backendHost),
"sets the cassandra/scylladb host for backend tests")
("backend_keyspace", po::value<std::string>()->default_value(TestGlobals::backendKeyspace),
("backend_keyspace", po::value<std::string>()->default_value(TestGlobals::backendKeyspace),
"sets the cassandra/scylladb keyspace for backend tests")
;
// clang-format on

View File

@@ -50,8 +50,8 @@ protected:
EXPECT_TRUE(handle.connect());
auto const query = fmt::format(
R"(
CREATE KEYSPACE IF NOT EXISTS {}
WITH replication = {{'class': 'SimpleStrategy', 'replication_factor': '1'}}
CREATE KEYSPACE IF NOT EXISTS {}
WITH replication = {{'class': 'SimpleStrategy', 'replication_factor': '1'}}
AND durable_writes = True
)",
keyspace
@@ -209,8 +209,8 @@ TEST_F(BackendCassandraBaseTest, KeyspaceManipulation)
{
auto const query = fmt::format(
R"(
CREATE KEYSPACE {}
WITH replication = {{'class': 'SimpleStrategy', 'replication_factor': '1'}}
CREATE KEYSPACE {}
WITH replication = {{'class': 'SimpleStrategy', 'replication_factor': '1'}}
AND durable_writes = True
)",
keyspace
@@ -248,7 +248,7 @@ TEST_F(BackendCassandraBaseTest, CreateTableWithStrings)
auto handle = createHandle(TestGlobals::instance().backendHost, "test");
auto q1 = fmt::format(
R"(
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
WITH default_time_to_live = {}
)",
5000
@@ -313,7 +313,7 @@ TEST_F(BackendCassandraBaseTest, BatchInsert)
auto handle = createHandle(TestGlobals::instance().backendHost, "test");
auto const q1 = fmt::format(
R"(
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
WITH default_time_to_live = {}
)",
5000
@@ -372,7 +372,7 @@ TEST_F(BackendCassandraBaseTest, BatchInsertAsync)
auto handle = createHandle(TestGlobals::instance().backendHost, "test");
auto const q1 = fmt::format(
R"(
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
WITH default_time_to_live = {}
)",
5000
@@ -418,7 +418,7 @@ TEST_F(BackendCassandraBaseTest, AlterTableAddColumn)
auto handle = createHandle(TestGlobals::instance().backendHost, "test");
auto const q1 = fmt::format(
R"(
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
CREATE TABLE IF NOT EXISTS strings (hash blob PRIMARY KEY, sequence bigint)
WITH default_time_to_live = {}
)",
5000
@@ -438,7 +438,7 @@ TEST_F(BackendCassandraBaseTest, AlterTableMoveToNewTable)
auto const newTable = fmt::format(
R"(
CREATE TABLE IF NOT EXISTS strings_v2 (hash blob PRIMARY KEY, sequence bigint, tmp bigint)
CREATE TABLE IF NOT EXISTS strings_v2 (hash blob PRIMARY KEY, sequence bigint, tmp bigint)
WITH default_time_to_live = {}
)",
5000

View File

@@ -75,7 +75,7 @@ public:
auto static kINSERT_TX_INDEX_EXAMPLE = [this]() {
return handle_.prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(hash, tx_type)
VALUES (?, ?)
)",
@@ -171,7 +171,7 @@ public:
auto static kINSERT_LEDGER_EXAMPLE = [this]() {
return handle_.prepare(fmt::format(
R"(
INSERT INTO {}
INSERT INTO {}
(sequence, account_hash)
VALUES (?, ?)
)",
@@ -319,11 +319,11 @@ private:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
hash blob,
tx_type text,
PRIMARY KEY (hash)
)
PRIMARY KEY (hash)
)
)",
data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example")
));
@@ -331,11 +331,11 @@ private:
statements.emplace_back(fmt::format(
R"(
CREATE TABLE IF NOT EXISTS {}
(
(
sequence bigint,
account_hash blob,
PRIMARY KEY (sequence)
)
PRIMARY KEY (sequence)
)
)",
data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example")
));

View File

@@ -43,7 +43,7 @@ namespace {
constexpr auto kJSON_DATA = R"JSON(
{
"arr": [
"arr": [
{ "first": 1234 },
{ "second": true },
{ "inner_section": [{ "inner": "works" }] },

View File

@@ -60,7 +60,7 @@ TEST(VerifyConfigTest, InvalidJsonFile)
static constexpr auto kINVALID_JSON = R"({
"server": {
"ip": "0.0.0.0",
"port": 51233,
"port": 51233,
}
})";
auto const tmpConfigFile = TmpFile(kINVALID_JSON);

View File

@@ -97,7 +97,7 @@ TEST_F(RPCBaseTest, TypeValidator)
{"arr", Type<json::array>{}},
};
auto passingInput = json::parse(R"({
auto passingInput = json::parse(R"({
"uint": 123,
"int": 321,
"str": "a string",
@@ -615,10 +615,10 @@ TEST_F(RPCBaseTest, SubscribeStreamValidator)
{
auto const spec = RpcSpec{{"streams", CustomValidators::subscribeStreamValidator}};
auto passingInput = json::parse(
R"({
"streams":
R"({
"streams":
[
"ledger",
"ledger",
"transactions_proposed",
"validations",
"transactions",

View File

@@ -1119,7 +1119,7 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsMatchingInputOrder)
auto static const kINPUT = json::parse(fmt::format(
R"({{
"asset": {{
"currency": "JPY",
"currency": "JPY",
"issuer": "{}"
}},
"asset2": {{
@@ -1233,7 +1233,7 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsPreservesInputOrder)
"issuer": "{}"
}},
"asset2": {{
"currency": "JPY",
"currency": "JPY",
"issuer": "{}"
}}
}})",

View File

@@ -69,8 +69,8 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitNotInt)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": "t"
}})",
kACCOUNT
@@ -88,8 +88,8 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitNagetive)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": -1
}})",
kACCOUNT
@@ -107,8 +107,8 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitZero)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 0
}})",
kACCOUNT
@@ -126,8 +126,8 @@ TEST_F(RPCAccountChannelsHandlerTest, NonHexLedgerHash)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 10,
"ledger_hash": "xxx"
}})",
@@ -148,7 +148,7 @@ TEST_F(RPCAccountChannelsHandlerTest, NonStringLedgerHash)
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
"account": "{}",
"limit": 10,
"ledger_hash": 123
}})",
@@ -168,8 +168,8 @@ TEST_F(RPCAccountChannelsHandlerTest, InvalidLedgerIndexString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 10,
"ledger_index": "notvalidated"
}})",
@@ -189,8 +189,8 @@ TEST_F(RPCAccountChannelsHandlerTest, MarkerNotString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"marker": 9
}})",
kACCOUNT
@@ -212,7 +212,7 @@ TEST_F(RPCAccountChannelsHandlerTest, InvalidMarker)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"marker": "123invalid"
}})",
@@ -228,8 +228,8 @@ TEST_F(RPCAccountChannelsHandlerTest, InvalidMarker)
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"marker": 401
}})",
kACCOUNT
@@ -247,7 +247,7 @@ TEST_F(RPCAccountChannelsHandlerTest, AccountInvalidFormat)
{
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp"
})");
auto const output = handler.process(input, Context{yield});
@@ -263,7 +263,7 @@ TEST_F(RPCAccountChannelsHandlerTest, AccountNotString)
{
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"account": 12
})");
auto const output = handler.process(input, Context{yield});
@@ -309,7 +309,7 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerStringIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_index": "4"
}})",
@@ -331,7 +331,7 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerIntIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_index": 4
}})",
@@ -356,7 +356,7 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerHash2)
ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader));
EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_hash": "{}"
}})",
@@ -380,7 +380,7 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerIndex2)
// differ from previous logic
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_index": "31"
}})",
@@ -535,7 +535,7 @@ TEST_F(RPCAccountChannelsHandlerTest, UseLimit)
runSpawn([this](auto yield) {
auto handler = AnyHandler{AccountChannelsHandler{this->backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"limit": 20
}})",
@@ -551,8 +551,8 @@ TEST_F(RPCAccountChannelsHandlerTest, UseLimit)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 9
}})",
kACCOUNT
@@ -564,8 +564,8 @@ TEST_F(RPCAccountChannelsHandlerTest, UseLimit)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountChannelsHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 401
}})",
kACCOUNT
@@ -622,7 +622,7 @@ TEST_F(RPCAccountChannelsHandlerTest, UseDestination)
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
"limit": 30,
"limit": 30,
"destination_account":"{}"
}})",
kACCOUNT,
@@ -657,7 +657,7 @@ TEST_F(RPCAccountChannelsHandlerTest, EmptyChannel)
.WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}"
}})",
kACCOUNT
@@ -742,7 +742,7 @@ TEST_F(RPCAccountChannelsHandlerTest, OptionalResponseField)
ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs));
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}"
}})",
kACCOUNT
@@ -804,7 +804,7 @@ TEST_F(RPCAccountChannelsHandlerTest, MarkerOutput)
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"limit": {}
}})",
@@ -859,7 +859,7 @@ TEST_F(RPCAccountChannelsHandlerTest, MarkerInput)
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"limit": {},
"marker": "{},{}"

View File

@@ -342,7 +342,7 @@ TEST_F(RPCAccountInfoHandlerTest, SignerListsTrueV2)
{
auto const expectedOutput = fmt::format(
R"({{
"account_data":
"account_data":
{{
"Account": "{}",
"Balance": "200",
@@ -385,7 +385,7 @@ TEST_F(RPCAccountInfoHandlerTest, SignerListsTrueV2)
"index": "A9C28A28B85CD533217F5C0A0C7767666B093FA58A0F2D80026FCC4CD932DDC7"
}}
],
"account_flags":
"account_flags":
{{
"defaultRipple": false,
"depositAuth": false,
@@ -443,7 +443,7 @@ TEST_F(RPCAccountInfoHandlerTest, SignerListsTrueV1)
{
auto const expectedOutput = fmt::format(
R"({{
"account_data":
"account_data":
{{
"Account": "{}",
"Balance": "200",
@@ -486,7 +486,7 @@ TEST_F(RPCAccountInfoHandlerTest, SignerListsTrueV1)
}}
]
}},
"account_flags":
"account_flags":
{{
"defaultRipple": false,
"depositAuth": false,

View File

@@ -77,8 +77,8 @@ TEST_F(RPCAccountLinesHandlerTest, NonHexLedgerHash)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 10,
"ledger_hash": "xxx"
}})",
@@ -99,7 +99,7 @@ TEST_F(RPCAccountLinesHandlerTest, NonStringLedgerHash)
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
"account": "{}",
"limit": 10,
"ledger_hash": 123
}})",
@@ -119,8 +119,8 @@ TEST_F(RPCAccountLinesHandlerTest, InvalidLedgerIndexString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 10,
"ledger_index": "notvalidated"
}})",
@@ -140,8 +140,8 @@ TEST_F(RPCAccountLinesHandlerTest, MarkerNotString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"marker": 9
}})",
kACCOUNT
@@ -163,7 +163,7 @@ TEST_F(RPCAccountLinesHandlerTest, InvalidMarker)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"marker": "123invalid"
}})",
@@ -179,8 +179,8 @@ TEST_F(RPCAccountLinesHandlerTest, InvalidMarker)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"marker": 401
}})",
kACCOUNT
@@ -199,7 +199,7 @@ TEST_F(RPCAccountLinesHandlerTest, AccountInvalidFormat)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(
R"({
R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp"
})"
);
@@ -217,7 +217,7 @@ TEST_F(RPCAccountLinesHandlerTest, AccountNotString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(
R"({
R"({
"account": 12
})"
);
@@ -235,7 +235,7 @@ TEST_F(RPCAccountLinesHandlerTest, PeerInvalidFormat)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(
R"({
R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"peer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp"
})"
@@ -253,7 +253,7 @@ TEST_F(RPCAccountLinesHandlerTest, PeerNotString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(
R"({
R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"peer": 12
})"
@@ -272,7 +272,7 @@ TEST_F(RPCAccountLinesHandlerTest, LimitNotInt)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(
R"({
R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"limit": "t"
})"
@@ -290,7 +290,7 @@ TEST_F(RPCAccountLinesHandlerTest, LimitNagetive)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(
R"({
R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"limit": -1
})"
@@ -308,7 +308,7 @@ TEST_F(RPCAccountLinesHandlerTest, LimitZero)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(
R"({
R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"limit": 0
})"
@@ -355,7 +355,7 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerStringIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_index": "4"
}})",
@@ -377,7 +377,7 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerIntIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_index": 4
}})",
@@ -402,7 +402,7 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerHash2)
ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader));
EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_hash": "{}"
}})",
@@ -426,7 +426,7 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerIndex2)
// differ from previous logic
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"ledger_index": "31"
}})",
@@ -581,7 +581,7 @@ TEST_F(RPCAccountLinesHandlerTest, UseLimit)
runSpawn([this](auto yield) {
auto handler = AnyHandler{AccountLinesHandler{this->backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"limit": 20
}})",
@@ -597,8 +597,8 @@ TEST_F(RPCAccountLinesHandlerTest, UseLimit)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 9
}})",
kACCOUNT
@@ -610,8 +610,8 @@ TEST_F(RPCAccountLinesHandlerTest, UseLimit)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{AccountLinesHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
R"({{
"account": "{}",
"limit": 401
}})",
kACCOUNT
@@ -668,7 +668,7 @@ TEST_F(RPCAccountLinesHandlerTest, UseDestination)
auto const input = json::parse(fmt::format(
R"({{
"account": "{}",
"limit": 30,
"limit": 30,
"peer": "{}"
}})",
kACCOUNT,
@@ -703,7 +703,7 @@ TEST_F(RPCAccountLinesHandlerTest, EmptyChannel)
.WillByDefault(Return(ownerDir.getSerializer().peekData()));
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}"
}})",
kACCOUNT
@@ -793,7 +793,7 @@ TEST_F(RPCAccountLinesHandlerTest, OptionalResponseFieldWithDeepFreeze)
ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs));
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}"
}})",
kACCOUNT
@@ -873,7 +873,7 @@ TEST_F(RPCAccountLinesHandlerTest, FrozenTrustLineResponse)
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}"
}})",
kACCOUNT
@@ -936,7 +936,7 @@ TEST_F(RPCAccountLinesHandlerTest, MarkerOutput)
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"limit": {}
}})",
@@ -991,7 +991,7 @@ TEST_F(RPCAccountLinesHandlerTest, MarkerInput)
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"account": "{}",
"limit": {},
"marker": "{},{}"

View File

@@ -1352,7 +1352,7 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTLimitAdjust)
R"({{
"account":"{}",
"marker":"{},{}",
"limit": 12
"limit": 12
}})",
kACCOUNT,
ripple::strHex(marker),

View File

@@ -187,7 +187,7 @@ struct AccountTxParameterTest : public RPCAccountTxHandlerTest,
.testName = "MarkerLedgerNotInt",
.testJson = R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"marker":
"marker":
{
"seq": "string",
"ledger": 1
@@ -200,7 +200,7 @@ struct AccountTxParameterTest : public RPCAccountTxHandlerTest,
.testName = "MarkerSeqNotInt",
.testJson = R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"marker":
"marker":
{
"ledger": "string",
"seq": 1
@@ -318,7 +318,7 @@ struct AccountTxParameterTest : public RPCAccountTxHandlerTest,
AccountTxParamTestCaseBundle{
.testName = "LedgerIndexMaxMinAndLedgerIndex",
.testJson = R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_index_max": 20,
"ledger_index_min": 11,
"ledger_index": 10
@@ -353,7 +353,7 @@ struct AccountTxParameterTest : public RPCAccountTxHandlerTest,
.testName = "LedgerIndexMaxMinAndLedgerHash",
.testJson = fmt::format(
R"({{
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_index_max": 20,
"ledger_index_min": 11,
"ledger_hash": "{}"
@@ -367,7 +367,7 @@ struct AccountTxParameterTest : public RPCAccountTxHandlerTest,
.testName = "LedgerIndexMaxMinAndLedgerHash_API_v1",
.testJson = fmt::format(
R"({{
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_index_max": 20,
"ledger_index_min": 11,
"ledger_hash": "{}"
@@ -393,7 +393,7 @@ struct AccountTxParameterTest : public RPCAccountTxHandlerTest,
AccountTxParamTestCaseBundle{
.testName = "InvalidTxType",
.testJson = R"({
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"tx_type": "unknow"
})",
.expectedError = "invalidParams",
@@ -1130,24 +1130,24 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
"transactions": [
{
"meta": {
"AffectedNodes":
"AffectedNodes":
[
{
"ModifiedNode":
"ModifiedNode":
{
"FinalFields":
"FinalFields":
{
"NFTokens":
"NFTokens":
[
{
"NFToken":
"NFToken":
{
"NFTokenID": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF",
"URI": "7465737475726C"
}
},
{
"NFToken":
"NFToken":
{
"NFTokenID": "1B8590C01B0006EDFA9ED60296DD052DC5E90F99659B25014D08E1BC983515BC",
"URI": "7465737475726C"
@@ -1156,12 +1156,12 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
]
},
"LedgerEntryType": "NFTokenPage",
"PreviousFields":
"PreviousFields":
{
"NFTokens":
"NFTokens":
[
{
"NFToken":
"NFToken":
{
"NFTokenID": "1B8590C01B0006EDFA9ED60296DD052DC5E90F99659B25014D08E1BC983515BC",
"URI": "7465737475726C"
@@ -1176,7 +1176,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
"TransactionResult": "tesSUCCESS",
"nftoken_id": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF"
},
"tx":
"tx":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Fee": "50",
@@ -1192,14 +1192,14 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
"validated": true
},
{
"meta":
"meta":
{
"AffectedNodes":
"AffectedNodes":
[
{
"DeletedNode":
"DeletedNode":
{
"FinalFields":
"FinalFields":
{
"NFTokenID": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA",
"Owner": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -1214,7 +1214,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
"TransactionResult": "tesSUCCESS",
"nftoken_id": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA"
},
"tx":
"tx":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Fee": "50",
@@ -1230,13 +1230,13 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
"validated": true
},
{
"meta":
"meta":
{
"AffectedNodes":
"AffectedNodes":
[
{
"DeletedNode": {
"FinalFields":
"FinalFields":
{
"NFTokenID": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA"
},
@@ -1244,9 +1244,9 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
}
},
{
"DeletedNode":
"DeletedNode":
{
"FinalFields":
"FinalFields":
{
"NFTokenID": "15FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF"
},
@@ -1256,17 +1256,17 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
],
"TransactionIndex": 0,
"TransactionResult": "tesSUCCESS",
"nftoken_ids":
"nftoken_ids":
[
"05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA",
"15FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF"
]
},
"tx":
"tx":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Fee": "50",
"NFTokenOffers":
"NFTokenOffers":
[
"05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA",
"15FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF"
@@ -1282,12 +1282,12 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
"validated": true
},
{
"meta":
"meta":
{
"AffectedNodes":
"AffectedNodes":
[
{
"CreatedNode":
"CreatedNode":
{
"LedgerEntryType": "NFTokenOffer",
"LedgerIndex": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA"
@@ -1298,7 +1298,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
"TransactionResult": "tesSUCCESS",
"offer_id": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA"
},
"tx":
"tx":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Amount": "123",
@@ -1316,7 +1316,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1)
}
],
"validated": true,
"marker":
"marker":
{
"ledger": 12,
"seq": 34
@@ -1363,24 +1363,24 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"transactions": [
{
"meta": {
"AffectedNodes":
"AffectedNodes":
[
{
"ModifiedNode":
"ModifiedNode":
{
"FinalFields":
"FinalFields":
{
"NFTokens":
"NFTokens":
[
{
"NFToken":
"NFToken":
{
"NFTokenID": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF",
"URI": "7465737475726C"
}
},
{
"NFToken":
"NFToken":
{
"NFTokenID": "1B8590C01B0006EDFA9ED60296DD052DC5E90F99659B25014D08E1BC983515BC",
"URI": "7465737475726C"
@@ -1389,12 +1389,12 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
]
},
"LedgerEntryType": "NFTokenPage",
"PreviousFields":
"PreviousFields":
{
"NFTokens":
"NFTokens":
[
{
"NFToken":
"NFToken":
{
"NFTokenID": "1B8590C01B0006EDFA9ED60296DD052DC5E90F99659B25014D08E1BC983515BC",
"URI": "7465737475726C"
@@ -1413,7 +1413,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"ledger_index": 11,
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652",
"close_time_iso": "2000-01-01T00:00:00Z",
"tx_json":
"tx_json":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Fee": "50",
@@ -1427,14 +1427,14 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"validated": true
},
{
"meta":
"meta":
{
"AffectedNodes":
"AffectedNodes":
[
{
"DeletedNode":
"DeletedNode":
{
"FinalFields":
"FinalFields":
{
"NFTokenID": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA",
"Owner": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -1452,7 +1452,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"ledger_index": 11,
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652",
"close_time_iso": "2000-01-01T00:00:00Z",
"tx_json":
"tx_json":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Fee": "50",
@@ -1466,13 +1466,13 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"validated": true
},
{
"meta":
"meta":
{
"AffectedNodes":
"AffectedNodes":
[
{
"DeletedNode": {
"FinalFields":
"FinalFields":
{
"NFTokenID": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA"
},
@@ -1480,9 +1480,9 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
}
},
{
"DeletedNode":
"DeletedNode":
{
"FinalFields":
"FinalFields":
{
"NFTokenID": "15FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF"
},
@@ -1492,7 +1492,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
],
"TransactionIndex": 0,
"TransactionResult": "tesSUCCESS",
"nftoken_ids":
"nftoken_ids":
[
"05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA",
"15FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF"
@@ -1502,11 +1502,11 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"ledger_index": 11,
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652",
"close_time_iso": "2000-01-01T00:00:00Z",
"tx_json":
"tx_json":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Fee": "50",
"NFTokenOffers":
"NFTokenOffers":
[
"05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA",
"15FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DF"
@@ -1520,12 +1520,12 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"validated": true
},
{
"meta":
"meta":
{
"AffectedNodes":
"AffectedNodes":
[
{
"CreatedNode":
"CreatedNode":
{
"LedgerEntryType": "NFTokenOffer",
"LedgerIndex": "05FB0EB4B899F056FA095537C5817163801F544BAFCEA39C995D76DB4D16F9DA"
@@ -1540,7 +1540,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
"ledger_index": 11,
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652",
"close_time_iso": "2000-01-01T00:00:00Z",
"tx_json":
"tx_json":
{
"Account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"Amount": "123",
@@ -1556,7 +1556,7 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2)
}
],
"validated": true,
"marker":
"marker":
{
"ledger": 12,
"seq": 34

View File

@@ -116,7 +116,7 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "MissingTakerGets",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "USD",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
@@ -128,7 +128,7 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "MissingTakerPays",
.testJson = R"({
"taker_gets" :
"taker_gets" :
{
"currency" : "USD",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
@@ -141,7 +141,7 @@ generateParameterBookOffersTestBundles()
.testName = "WrongTypeTakerPays",
.testJson = R"({
"taker_pays" : "wrong",
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
}
@@ -153,7 +153,7 @@ generateParameterBookOffersTestBundles()
.testName = "WrongTypeTakerGets",
.testJson = R"({
"taker_gets" : "wrong",
"taker_pays" :
"taker_pays" :
{
"currency" : "XRP"
}
@@ -165,7 +165,7 @@ generateParameterBookOffersTestBundles()
.testName = "TakerPaysMissingCurrency",
.testJson = R"({
"taker_pays" : {},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
}
@@ -177,7 +177,7 @@ generateParameterBookOffersTestBundles()
.testName = "TakerGetsMissingCurrency",
.testJson = R"({
"taker_gets" : {},
"taker_pays" :
"taker_pays" :
{
"currency" : "XRP"
}
@@ -188,12 +188,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "TakerGetsWrongCurrency",
.testJson = R"({
"taker_gets" :
"taker_gets" :
{
"currency" : "CNYY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_pays" :
"taker_pays" :
{
"currency" : "XRP"
}
@@ -204,12 +204,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "TakerPaysWrongCurrency",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNYY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
}
@@ -220,12 +220,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "TakerGetsCurrencyNotString",
.testJson = R"({
"taker_gets" :
"taker_gets" :
{
"currency" : 123,
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_pays" :
"taker_pays" :
{
"currency" : "XRP"
}
@@ -236,12 +236,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "TakerPaysCurrencyNotString",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : 123,
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
}
@@ -252,12 +252,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "TakerGetsWrongIssuer",
.testJson = R"({
"taker_gets" :
"taker_gets" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs5"
},
"taker_pays" :
"taker_pays" :
{
"currency" : "XRP"
}
@@ -268,12 +268,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "TakerPaysWrongIssuer",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs5"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
}
@@ -284,12 +284,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "InvalidTaker",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -301,12 +301,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "TakerNotString",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -318,12 +318,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "LimitNotInt",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -335,12 +335,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "LimitNagetive",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -352,12 +352,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "LimitZero",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -369,12 +369,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "LedgerIndexInvalid",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -386,12 +386,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "LedgerHashInvalid",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -403,12 +403,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "LedgerHashNotString",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP"
},
@@ -420,12 +420,12 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "GetsPaysXRPWithIssuer",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "XRP",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "CNY",
"issuer" : "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -437,11 +437,11 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "PaysCurrencyWithXRPIssuer",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "JPY"
"currency" : "JPY"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "CNY",
"issuer" : "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -453,14 +453,14 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "GetsCurrencyWithXRPIssuer",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "XRP"
"currency" : "XRP"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "CNY"
}
"currency" : "CNY"
}
})",
.expectedError = "dstIsrMalformed",
.expectedErrorMessage = "Invalid field 'taker_gets.issuer', expected non-XRP issuer."
@@ -468,16 +468,16 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "GetsXRPWithIssuer",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "XRP",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
}
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
}
})",
.expectedError = "dstIsrMalformed",
.expectedErrorMessage = "Unneeded field 'taker_gets.issuer' for XRP currency specification."
@@ -485,16 +485,16 @@ generateParameterBookOffersTestBundles()
ParameterTestBundle{
.testName = "BadMarket",
.testJson = R"({
"taker_pays" :
"taker_pays" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
},
"taker_gets" :
"taker_gets" :
{
"currency" : "CNY",
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
}
"issuer" : "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B"
}
})",
.expectedError = "badMarket",
.expectedErrorMessage = "badMarket"
@@ -620,11 +620,11 @@ generateNormalPathBookOffersTestBundles()
auto const getsXRPPaysUSDInputJson = fmt::format(
R"({{
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"
@@ -635,11 +635,11 @@ generateNormalPathBookOffersTestBundles()
auto const paysXRPGetsUSDInputJson = fmt::format(
R"({{
"taker_pays":
"taker_pays":
{{
"currency": "XRP"
}},
"taker_gets":
"taker_gets":
{{
"currency": "USD",
"issuer": "{}"
@@ -1351,11 +1351,11 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaIntSequence)
auto static const kINPUT = json::parse(fmt::format(
R"({{
"ledger_index": 30,
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"
@@ -1382,11 +1382,11 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaSequence)
auto static const kINPUT = json::parse(fmt::format(
R"({{
"ledger_index": "30",
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"
@@ -1414,11 +1414,11 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaHash)
auto static const kINPUT = json::parse(fmt::format(
R"({{
"ledger_hash": "{}",
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"
@@ -1489,11 +1489,11 @@ TEST_F(RPCBookOffersHandlerTest, Limit)
auto static const kINPUT = json::parse(fmt::format(
R"({{
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"
@@ -1562,11 +1562,11 @@ TEST_F(RPCBookOffersHandlerTest, LimitMoreThanMax)
auto static const kINPUT = json::parse(fmt::format(
R"({{
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"

View File

@@ -90,7 +90,7 @@ generateTestValuesForParametersTest()
{
.testName = "SourceAccountMissing",
.testJson = R"({
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652"
})",
.expectedError = "invalidParams",
@@ -99,8 +99,8 @@ generateTestValuesForParametersTest()
{
.testName = "SourceAccountMalformed",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652"
})",
.expectedError = "actMalformed",
@@ -109,8 +109,8 @@ generateTestValuesForParametersTest()
{
.testName = "SourceAccountNotString",
.testJson = R"({
"source_account": 1234,
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": 1234,
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652"
})",
.expectedError = "invalidParams",
@@ -119,7 +119,7 @@ generateTestValuesForParametersTest()
{
.testName = "DestinationAccountMissing",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652"
})",
.expectedError = "invalidParams",
@@ -128,8 +128,8 @@ generateTestValuesForParametersTest()
{
.testName = "DestinationAccountMalformed",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp",
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652"
})",
.expectedError = "actMalformed",
@@ -138,7 +138,7 @@ generateTestValuesForParametersTest()
{
.testName = "DestinationAccountNotString",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": 1234,
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652"
})",
@@ -148,8 +148,8 @@ generateTestValuesForParametersTest()
{
.testName = "LedgerHashInvalid",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": "x"
})",
.expectedError = "invalidParams",
@@ -158,8 +158,8 @@ generateTestValuesForParametersTest()
{
.testName = "LedgerHashNotString",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": 123
})",
.expectedError = "invalidParams",
@@ -168,8 +168,8 @@ generateTestValuesForParametersTest()
{
.testName = "LedgerIndexNotInt",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_index": "x"
})",
.expectedError = "invalidParams",
@@ -178,8 +178,8 @@ generateTestValuesForParametersTest()
{
.testName = "CredentialsNotArray",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"credentials": "x"
})",
.expectedError = "invalidParams",
@@ -188,8 +188,8 @@ generateTestValuesForParametersTest()
{
.testName = "CredentialsNotStringsInArray",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652",
"credentials": [123]
})",
@@ -199,8 +199,8 @@ generateTestValuesForParametersTest()
{
.testName = "CredentialsNotHexedStringInArray",
.testJson = R"({
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"source_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"destination_account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn",
"ledger_hash": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652",
"credentials": ["234", "432"]
})",
@@ -242,8 +242,8 @@ TEST_F(RPCDepositAuthorizedTest, LedgerNotExistViaIntSequence)
auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}};
auto const req = json::parse(fmt::format(
R"({{
"source_account": "{}",
"destination_account": "{}",
"source_account": "{}",
"destination_account": "{}",
"ledger_index": {}
}})",
kACCOUNT,
@@ -269,8 +269,8 @@ TEST_F(RPCDepositAuthorizedTest, LedgerNotExistViaStringSequence)
auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}};
auto const req = json::parse(fmt::format(
R"({{
"source_account": "{}",
"destination_account": "{}",
"source_account": "{}",
"destination_account": "{}",
"ledger_index": "{}"
}})",
kACCOUNT,
@@ -296,8 +296,8 @@ TEST_F(RPCDepositAuthorizedTest, LedgerNotExistViaHash)
auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}};
auto const req = json::parse(fmt::format(
R"({{
"source_account": "{}",
"destination_account": "{}",
"source_account": "{}",
"destination_account": "{}",
"ledger_hash": "{}"
}})",
kACCOUNT,

View File

@@ -280,14 +280,14 @@ TEST_F(RPCFeatureHandlerTest, SuccessPathViaNameWithSingleSupportedAndEnabledRes
auto const expectedOutput = fmt::format(
R"({{
"2E2FB9CF8A44EB80F4694D38AADAE9B8B7ADAFD2F092E10068E61C98C4F092B0":
"2E2FB9CF8A44EB80F4694D38AADAE9B8B7ADAFD2F092E10068E61C98C4F092B0":
{{
"name": "fixUniversalNumber",
"enabled": true,
"name": "fixUniversalNumber",
"enabled": true,
"supported": true
}},
"ledger_hash": "{}",
"ledger_index": {},
"ledger_hash": "{}",
"ledger_index": {},
"validated": true
}})",
kLEDGER_HASH,
@@ -330,14 +330,14 @@ TEST_F(RPCFeatureHandlerTest, SuccessPathViaHashWithSingleResult)
auto const expectedOutput = fmt::format(
R"({{
"2E2FB9CF8A44EB80F4694D38AADAE9B8B7ADAFD2F092E10068E61C98C4F092B0":
"2E2FB9CF8A44EB80F4694D38AADAE9B8B7ADAFD2F092E10068E61C98C4F092B0":
{{
"name": "fixUniversalNumber",
"enabled": true,
"name": "fixUniversalNumber",
"enabled": true,
"supported": true
}},
"ledger_hash": "{}",
"ledger_index": {},
"ledger_hash": "{}",
"ledger_index": {},
"validated": true
}})",
kLEDGER_HASH,
@@ -414,21 +414,21 @@ TEST_F(RPCFeatureHandlerTest, SuccessPathWithMultipleResults)
auto const expectedOutput = fmt::format(
R"({{
"features": {{
"2E2FB9CF8A44EB80F4694D38AADAE9B8B7ADAFD2F092E10068E61C98C4F092B0":
"2E2FB9CF8A44EB80F4694D38AADAE9B8B7ADAFD2F092E10068E61C98C4F092B0":
{{
"name": "fixUniversalNumber",
"enabled": true,
"name": "fixUniversalNumber",
"enabled": true,
"supported": true
}},
"DF8B4536989BDACE3F934F29423848B9F1D76D09BE6A1FCFE7E7F06AA26ABEAD":
{{
"name": "fixRemoveNFTokenAutoTrustLine",
"enabled": false,
"name": "fixRemoveNFTokenAutoTrustLine",
"enabled": false,
"supported": false
}}
}},
"ledger_hash": "{}",
"ledger_index": {},
"ledger_hash": "{}",
"ledger_index": {},
"validated": true
}})",
kLEDGER_HASH,

View File

@@ -143,7 +143,7 @@ generateTestValuesForParametersTest()
.testName = "no_base_asset",
.testJson = R"({
"quote_asset": "USD",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -159,7 +159,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"quote_asset" : "USD",
"base_asset": "asdf",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -175,7 +175,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"quote_asset" : "USD",
"base_asset": "",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -191,7 +191,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"quote_asset" : "USD",
"base_asset": "+aa",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -206,7 +206,7 @@ generateTestValuesForParametersTest()
.testName = "no_quote_asset",
.testJson = R"({
"base_asset": "USD",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -222,7 +222,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"quote_asset" : "asdf",
"base_asset": "USD",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -238,7 +238,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"quote_asset" : "",
"base_asset": "USD",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -254,7 +254,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"quote_asset" : "+aa",
"base_asset": "USD",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -290,7 +290,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -307,7 +307,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -341,7 +341,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{
"account": "rGh1VZCRBJY6rJiaFpD4LZtyHiuCkC8aeD",
@@ -358,7 +358,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{
"account": "invalid",
@@ -463,7 +463,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, LedgerNotFound)
R"({{
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -497,7 +497,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntrySinglePriceData)
R"({{
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -511,7 +511,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntrySinglePriceData)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "10",
"size": 1,
@@ -546,7 +546,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryStrOracleDocumentId)
R"({{
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -560,7 +560,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryStrOracleDocumentId)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "10",
"size": 1,
@@ -595,7 +595,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, PreviousTxNotFound)
R"({{
"base_asset": "JPY",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -609,7 +609,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, PreviousTxNotFound)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "10",
"size": 1,
@@ -674,7 +674,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NewLedgerObjectHasNoPricePair)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "10",
"size": 1,
@@ -716,7 +716,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesOdd)
R"({{
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -742,7 +742,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesOdd)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "110",
"size": 3,
@@ -784,7 +784,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesEven)
R"({{
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -816,7 +816,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesEven)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "92.5",
"size": 4,
@@ -859,7 +859,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryTrim)
"base_asset": "USD",
"quote_asset": "XRP",
"trim": {},
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -892,13 +892,13 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryTrim)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "92.5",
"size": 4,
"standard_deviation": "138.8944443333378"
}},
"trimmed_set":
"trimmed_set":
{{
"mean": "30",
"size": 2,
@@ -934,7 +934,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NoOracleEntryFound)
R"({{
"base_asset": "USD",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -968,7 +968,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NoMatchAssetPair)
R"({{
"base_asset": "JPY",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -1013,7 +1013,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIsZero)
"base_asset": "USD",
"quote_asset": "XRP",
"time_threshold": {},
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -1046,7 +1046,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIsZero)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "10",
"size": 1,
@@ -1093,7 +1093,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, ValidTimeThreshold)
"base_asset": "USD",
"quote_asset": "XRP",
"time_threshold": {},
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -1126,7 +1126,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, ValidTimeThreshold)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "15",
"size": 2,
@@ -1173,7 +1173,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdTooLong)
"base_asset": "USD",
"quote_asset": "XRP",
"time_threshold": {},
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -1206,7 +1206,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdTooLong)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "92.5",
"size": 4,
@@ -1252,7 +1252,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIncludeOldest)
"base_asset": "USD",
"quote_asset": "XRP",
"time_threshold": {},
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -1285,7 +1285,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIncludeOldest)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "92.5",
"size": 4,
@@ -1336,7 +1336,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, FromTx)
R"({{
"base_asset": "JPY",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",
@@ -1350,7 +1350,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, FromTx)
auto const expected = json::parse(fmt::format(
R"({{
"entire_set":
"entire_set":
{{
"mean": "10",
"size": 1,
@@ -1413,7 +1413,7 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NotFoundInTxHistory)
R"({{
"base_asset": "JPY",
"quote_asset": "XRP",
"oracles":
"oracles":
[
{{
"account": "{}",

View File

@@ -910,7 +910,7 @@ generateTestValuesForParametersTest()
.testName = "EmptyAMMAssetJson",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset":{{}},
"asset2":
@@ -930,7 +930,7 @@ generateTestValuesForParametersTest()
.testName = "EmptyAMMAsset2Json",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2":{{}},
"asset":
@@ -950,7 +950,7 @@ generateTestValuesForParametersTest()
.testName = "MissingAMMAsset2Json",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset":
{{
@@ -969,7 +969,7 @@ generateTestValuesForParametersTest()
.testName = "MissingAMMAssetJson",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2":
{{
@@ -988,7 +988,7 @@ generateTestValuesForParametersTest()
.testName = "AMMAssetNotJson",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset": "invalid",
"asset2":
@@ -1008,7 +1008,7 @@ generateTestValuesForParametersTest()
.testName = "AMMAsset2NotJson",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2": "invalid",
"asset":
@@ -1028,7 +1028,7 @@ generateTestValuesForParametersTest()
.testName = "WrongAMMAssetCurrency",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2":
{{
@@ -1051,7 +1051,7 @@ generateTestValuesForParametersTest()
.testName = "WrongAMMAssetIssuer",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2":
{{
@@ -1074,7 +1074,7 @@ generateTestValuesForParametersTest()
.testName = "MissingAMMAssetIssuerForNonXRP",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2":
{{
@@ -1097,7 +1097,7 @@ generateTestValuesForParametersTest()
.testName = "AMMAssetHasIssuerForXRP",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2":
{{
@@ -1122,7 +1122,7 @@ generateTestValuesForParametersTest()
.testName = "MissingAMMAssetCurrency",
.testJson = fmt::format(
R"({{
"amm":
"amm":
{{
"asset2":
{{
@@ -1143,7 +1143,7 @@ generateTestValuesForParametersTest()
.testName = "BridgeMissingBridgeAccount",
.testJson = fmt::format(
R"({{
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1171,7 +1171,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1200,7 +1200,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1229,7 +1229,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1252,7 +1252,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "abcd",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1280,7 +1280,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "abcd",
@@ -1308,7 +1308,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1336,7 +1336,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1364,7 +1364,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1394,7 +1394,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1420,7 +1420,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"IssuingChainDoor": "{}",
"LockingChainIssue":
@@ -1448,7 +1448,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"LockingChainDoor": "{}",
"LockingChainIssue":
@@ -1474,7 +1474,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"IssuingChainDoor": "{}",
"LockingChainDoor": "{}",
@@ -1498,7 +1498,7 @@ generateTestValuesForParametersTest()
.testJson = fmt::format(
R"({{
"bridge_account": "{}",
"bridge":
"bridge":
{{
"IssuingChainDoor": "{}",
"LockingChainDoor": "{}",
@@ -1541,7 +1541,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedClaimIdJsonMissingClaimId",
.testJson = fmt::format(
R"({{
"xchain_owned_claim_id":
"xchain_owned_claim_id":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1568,7 +1568,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedClaimIdJsonMissingDoor",
.testJson = fmt::format(
R"({{
"xchain_owned_claim_id":
"xchain_owned_claim_id":
{{
"xchain_owned_claim_id": 10,
"LockingChainDoor": "{}",
@@ -1594,7 +1594,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedClaimIdJsonMissingIssue",
.testJson = fmt::format(
R"({{
"xchain_owned_claim_id":
"xchain_owned_claim_id":
{{
"xchain_owned_claim_id": 10,
"LockingChainDoor": "{}",
@@ -1616,7 +1616,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedClaimIdJsonInvalidDoor",
.testJson = fmt::format(
R"({{
"xchain_owned_claim_id":
"xchain_owned_claim_id":
{{
"xchain_owned_claim_id": 10,
"LockingChainDoor": "abcd",
@@ -1643,7 +1643,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedClaimIdJsonInvalidIssue",
.testJson = fmt::format(
R"({{
"xchain_owned_claim_id":
"xchain_owned_claim_id":
{{
"xchain_owned_claim_id": 10,
"LockingChainDoor": "{}",
@@ -1677,7 +1677,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedCreateAccountClaimIdJsonMissingClaimId",
.testJson = fmt::format(
R"({{
"xchain_owned_create_account_claim_id":
"xchain_owned_create_account_claim_id":
{{
"LockingChainDoor": "{}",
"IssuingChainDoor": "{}",
@@ -1704,7 +1704,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedCreateAccountClaimIdJsonMissingDoor",
.testJson = fmt::format(
R"({{
"xchain_owned_create_account_claim_id":
"xchain_owned_create_account_claim_id":
{{
"xchain_owned_create_account_claim_id": 10,
"LockingChainDoor": "{}",
@@ -1730,7 +1730,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedCreateAccountClaimIdJsonMissingIssue",
.testJson = fmt::format(
R"({{
"xchain_owned_create_account_claim_id":
"xchain_owned_create_account_claim_id":
{{
"xchain_owned_create_account_claim_id": 10,
"LockingChainDoor": "{}",
@@ -1752,7 +1752,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedCreateAccountClaimIdJsonInvalidDoor",
.testJson = fmt::format(
R"({{
"xchain_owned_create_account_claim_id":
"xchain_owned_create_account_claim_id":
{{
"xchain_owned_create_account_claim_id": 10,
"LockingChainDoor": "abcd",
@@ -1779,7 +1779,7 @@ generateTestValuesForParametersTest()
.testName = "OwnedCreateAccountClaimIdJsonInvalidIssue",
.testJson = fmt::format(
R"({{
"xchain_owned_create_account_claim_id":
"xchain_owned_create_account_claim_id":
{{
"xchain_owned_create_account_claim_id": 10,
"LockingChainDoor": "{}",

View File

@@ -82,8 +82,8 @@ TEST_F(RPCMPTHoldersHandlerTest, NonHexLedgerHash)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{MPTHoldersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"mpt_issuance_id": "{}",
R"({{
"mpt_issuance_id": "{}",
"ledger_hash": "xxx"
}})",
kMPT_ID
@@ -103,7 +103,7 @@ TEST_F(RPCMPTHoldersHandlerTest, NonStringLedgerHash)
auto const handler = AnyHandler{MPTHoldersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"mpt_issuance_id": "{}",
"mpt_issuance_id": "{}",
"ledger_hash": 123
}})",
kMPT_ID
@@ -122,8 +122,8 @@ TEST_F(RPCMPTHoldersHandlerTest, InvalidLedgerIndexString)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{MPTHoldersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"mpt_issuance_id": "{}",
R"({{
"mpt_issuance_id": "{}",
"ledger_index": "notvalidated"
}})",
kMPT_ID
@@ -142,7 +142,7 @@ TEST_F(RPCMPTHoldersHandlerTest, MPTIDInvalidFormat)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{MPTHoldersHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"mpt_issuance_id": "xxx"
})");
auto const output = handler.process(input, Context{.yield = std::ref(yield)});
@@ -172,7 +172,7 @@ TEST_F(RPCMPTHoldersHandlerTest, MPTIDNotString)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{MPTHoldersHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"mpt_issuance_id": 12
})");
auto const output = handler.process(input, Context{.yield = std::ref(yield)});
@@ -190,7 +190,7 @@ TEST_F(RPCMPTHoldersHandlerTest, MarkerInvalidFormat)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{MPTHoldersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"mpt_issuance_id": "{}",
"marker": "xxx"
}})",
@@ -210,7 +210,7 @@ TEST_F(RPCMPTHoldersHandlerTest, MarkerNotString)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{MPTHoldersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"mpt_issuance_id": "{}",
"marker": 1
}})",
@@ -257,7 +257,7 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerStringIndex)
// mock fetchLedgerBySequence return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerInfo>{}));
auto const input = json::parse(fmt::format(
R"({{
R"({{
"mpt_issuance_id": "{}",
"ledger_index": "4"
}})",
@@ -278,7 +278,7 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerIntIndex)
// mock fetchLedgerBySequence return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerInfo>{}));
auto const input = json::parse(fmt::format(
R"({{
R"({{
"mpt_issuance_id": "{}",
"ledger_index": 4
}})",
@@ -303,7 +303,7 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerHash2)
ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerinfo));
EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"mpt_issuance_id": "{}",
"ledger_hash": "{}"
}})",
@@ -327,7 +327,7 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerIndex2)
// differ from previous logic
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"mpt_issuance_id": "{}",
"ledger_index": "31"
}})",

View File

@@ -67,8 +67,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonHexLedgerHash)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"ledger_hash": "xxx"
}})",
kNFT_ID
@@ -87,8 +87,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitNotInt)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": "xxx"
}})",
kNFT_ID
@@ -106,8 +106,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitNegative)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": -1
}})",
kNFT_ID
@@ -125,8 +125,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitZero)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": 0
}})",
kNFT_ID
@@ -145,7 +145,7 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonStringLedgerHash)
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
"nft_id": "{}",
"ledger_hash": 123
}})",
kNFT_ID
@@ -164,8 +164,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, InvalidLedgerIndexString)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"ledger_index": "notvalidated"
}})",
kNFT_ID
@@ -184,7 +184,7 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NFTIDInvalidFormat)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"nft_id": "00080000B4F4AFC5FBCBD76873F18006173D2193467D3EE7"
})");
auto const output = handler.process(input, Context{.yield = yield});
@@ -200,7 +200,7 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NFTIDNotString)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"nft_id": 12
})");
auto const output = handler.process(input, Context{.yield = yield});
@@ -246,7 +246,7 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonExistLedgerViaLedgerIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_index": "4"
}})",
@@ -271,7 +271,7 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonExistLedgerViaLedgerHash2)
ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader));
EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_hash": "{}"
}})",
@@ -295,7 +295,7 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonExistLedgerViaLedgerIndex2)
// differ from previous logic
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_index": "31"
}})",
@@ -342,8 +342,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, MarkerNotString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"marker": 9
}})",
kNFT_ID
@@ -364,7 +364,7 @@ TEST_F(RPCNFTBuyOffersHandlerTest, InvalidMarker)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"marker": "123invalid"
}})",
@@ -380,8 +380,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, InvalidMarker)
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"marker": 250
}})",
kNFT_ID
@@ -617,8 +617,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLimi
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": 49
}})",
kNFT_ID
@@ -630,8 +630,8 @@ TEST_F(RPCNFTBuyOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLimi
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": 501
}})",
kNFT_ID

View File

@@ -178,7 +178,7 @@ generateTestValuesForParametersTest()
.testName = "MarkerLedgerNotInt",
.testJson = R"({
"nft_id":"00010000A7CAD27B688D14BA1A9FA5366554D6ADCF9CE0875B974D9F00000004",
"marker":
"marker":
{
"seq": "string",
"ledger": 1
@@ -191,7 +191,7 @@ generateTestValuesForParametersTest()
.testName = "MarkerSeqNotInt",
.testJson = R"({
"nft_id":"00010000A7CAD27B688D14BA1A9FA5366554D6ADCF9CE0875B974D9F00000004",
"marker":
"marker":
{
"ledger": "string",
"seq": 1
@@ -231,7 +231,7 @@ generateTestValuesForParametersTest()
NFTHistoryParamTestCaseBundle{
.testName = "LedgerIndexMaxMinAndLedgerIndex",
.testJson = R"({
"nft_id":"00010000A7CAD27B688D14BA1A9FA5366554D6ADCF9CE0875B974D9F00000004",
"nft_id":"00010000A7CAD27B688D14BA1A9FA5366554D6ADCF9CE0875B974D9F00000004",
"ledger_index_max": 20,
"ledger_index_min": 11,
"ledger_index": 10

View File

@@ -62,8 +62,8 @@ TEST_F(RPCNFTInfoHandlerTest, NonHexLedgerHash)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTInfoHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"ledger_hash": "xxx"
}})",
kNFT_ID
@@ -83,7 +83,7 @@ TEST_F(RPCNFTInfoHandlerTest, NonStringLedgerHash)
auto const handler = AnyHandler{NFTInfoHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
"nft_id": "{}",
"ledger_hash": 123
}})",
kNFT_ID
@@ -102,8 +102,8 @@ TEST_F(RPCNFTInfoHandlerTest, InvalidLedgerIndexString)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTInfoHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"ledger_index": "notvalidated"
}})",
kNFT_ID
@@ -122,7 +122,7 @@ TEST_F(RPCNFTInfoHandlerTest, NFTIDInvalidFormat)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTInfoHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"nft_id": "00080000B4F4AFC5FBCBD76873F18006173D2193467D3EE7"
})");
auto const output = handler.process(input, Context{.yield = yield});
@@ -138,7 +138,7 @@ TEST_F(RPCNFTInfoHandlerTest, NFTIDNotString)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTInfoHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"nft_id": 12
})");
auto const output = handler.process(input, Context{.yield = yield});
@@ -184,7 +184,7 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerStringIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_index": "4"
}})",
@@ -206,7 +206,7 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerIntIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_index": 4
}})",
@@ -231,7 +231,7 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerHash2)
ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader));
EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_hash": "{}"
}})",
@@ -255,7 +255,7 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerIndex2)
// differ from previous logic
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_index": "31"
}})",

View File

@@ -67,8 +67,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitNotInt)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": "xxx"
}})",
kNFT_ID
@@ -86,8 +86,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitNegative)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": -1
}})",
kNFT_ID
@@ -105,8 +105,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitZero)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": 0
}})",
kNFT_ID
@@ -124,8 +124,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonHexLedgerHash)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"ledger_hash": "xxx"
}})",
kNFT_ID
@@ -145,7 +145,7 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonStringLedgerHash)
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
"nft_id": "{}",
"ledger_hash": 123
}})",
kNFT_ID
@@ -164,8 +164,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, InvalidLedgerIndexString)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"ledger_index": "notvalidated"
}})",
kNFT_ID
@@ -184,7 +184,7 @@ TEST_F(RPCNFTSellOffersHandlerTest, NFTIDInvalidFormat)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"nft_id": "00080000B4F4AFC5FBCBD76873F18006173D2193467D3EE7"
})");
auto const output = handler.process(input, Context{.yield = yield});
@@ -200,7 +200,7 @@ TEST_F(RPCNFTSellOffersHandlerTest, NFTIDNotString)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"nft_id": 12
})");
auto const output = handler.process(input, Context{.yield = yield});
@@ -246,7 +246,7 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonExistLedgerViaLedgerIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_index": "4"
}})",
@@ -271,7 +271,7 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonExistLedgerViaLedgerHash2)
ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader));
EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_hash": "{}"
}})",
@@ -295,7 +295,7 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonExistLedgerViaLedgerIndex2)
// differ from previous logic
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"ledger_index": "31"
}})",
@@ -342,8 +342,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, MarkerNotString)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"marker": 9
}})",
kNFT_ID
@@ -364,7 +364,7 @@ TEST_F(RPCNFTSellOffersHandlerTest, InvalidMarker)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
R"({{
"nft_id": "{}",
"marker": "123invalid"
}})",
@@ -380,8 +380,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, InvalidMarker)
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"marker": 250
}})",
kNFT_ID
@@ -617,8 +617,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLim
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": 49
}})",
kNFT_ID
@@ -630,8 +630,8 @@ TEST_F(RPCNFTSellOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLim
runSpawn([this](auto yield) {
auto const handler = AnyHandler{NFTSellOffersHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"nft_id": "{}",
R"({{
"nft_id": "{}",
"limit": 501
}})",
kNFT_ID

View File

@@ -106,8 +106,8 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonHexLedgerHash)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"issuer": "{}",
R"({{
"issuer": "{}",
"ledger_hash": "xxx"
}})",
kACCOUNT
@@ -127,7 +127,7 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonStringLedgerHash)
auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"issuer": "{}",
"issuer": "{}",
"ledger_hash": 123
}})",
kACCOUNT
@@ -146,8 +146,8 @@ TEST_F(RPCNFTsByIssuerHandlerTest, InvalidLedgerIndexString)
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}};
auto const input = json::parse(fmt::format(
R"({{
"issuer": "{}",
R"({{
"issuer": "{}",
"ledger_index": "notvalidated"
}})",
kACCOUNT
@@ -166,7 +166,7 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NFTIssuerInvalidFormat)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"issuer": "xxx"
})");
auto const output = handler.process(input, Context{.yield = std::ref(yield)});
@@ -196,7 +196,7 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NFTIssuerNotString)
{
runSpawn([this](boost::asio::yield_context yield) {
auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}};
auto const input = json::parse(R"({
auto const input = json::parse(R"({
"issuer": 12
})");
auto const output = handler.process(input, Context{.yield = std::ref(yield)});
@@ -241,7 +241,7 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerStringIndex)
// mock fetchLedgerBySequence return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(fmt::format(
R"({{
R"({{
"issuer": "{}",
"ledger_index": "4"
}})",
@@ -262,7 +262,7 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerIntIndex)
// mock fetchLedgerBySequence return empty
EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional<ripple::LedgerHeader>{}));
auto const input = json::parse(fmt::format(
R"({{
R"({{
"issuer": "{}",
"ledger_index": 4
}})",
@@ -287,7 +287,7 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerHash2)
ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader));
EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"issuer": "{}",
"ledger_hash": "{}"
}})",
@@ -311,7 +311,7 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerIndex2)
// differ from previous logic
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"issuer": "{}",
"ledger_index": "31"
}})",

View File

@@ -205,7 +205,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -221,7 +221,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -237,7 +237,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -253,7 +253,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -269,7 +269,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -288,7 +288,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -307,7 +307,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -326,7 +326,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -345,7 +345,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -363,7 +363,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -381,7 +381,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -400,7 +400,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -419,7 +419,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -438,7 +438,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -457,7 +457,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "USD",
"issuer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -477,7 +477,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP",
"issuer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -497,7 +497,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -515,7 +515,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -535,7 +535,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -555,7 +555,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -575,7 +575,7 @@ generateTestValuesForParametersTest()
.testJson = R"({
"books": [
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -654,7 +654,7 @@ TEST_F(RPCSubscribeHandlerTest, StreamsWithoutLedger)
TEST_F(RPCSubscribeHandlerTest, StreamsLedger)
{
static constexpr auto kEXPECTED_OUTPUT =
R"({
R"({
"validated_ledgers":"10-30",
"ledger_index":30,
"ledger_hash":"4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A652",
@@ -734,14 +734,14 @@ TEST_F(RPCSubscribeHandlerTest, JustBooks)
{
auto const input = json::parse(fmt::format(
R"({{
"books":
"books":
[
{{
"taker_pays":
"taker_pays":
{{
"currency": "XRP"
}},
"taker_gets":
"taker_gets":
{{
"currency": "USD",
"issuer": "{}"
@@ -766,14 +766,14 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothSet)
{
auto const input = json::parse(fmt::format(
R"({{
"books":
"books":
[
{{
"taker_pays":
"taker_pays":
{{
"currency": "XRP"
}},
"taker_gets":
"taker_gets":
{{
"currency": "USD",
"issuer": "{}"
@@ -799,14 +799,14 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothSnapshotSet)
{
auto const input = json::parse(fmt::format(
R"({{
"books":
"books":
[
{{
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"
@@ -970,14 +970,14 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothUnsetSnapshotSet)
{
auto const input = json::parse(fmt::format(
R"({{
"books":
"books":
[
{{
"taker_gets":
"taker_gets":
{{
"currency": "XRP"
}},
"taker_pays":
"taker_pays":
{{
"currency": "USD",
"issuer": "{}"

View File

@@ -40,8 +40,8 @@ TEST_F(RPCTestHandlerTest, HandlerSuccess)
{
runSpawn([](auto yield) {
auto const handler = AnyHandler{HandlerFake{}};
auto const input = json::parse(R"({
"hello": "world",
auto const input = json::parse(R"({
"hello": "world",
"limit": 10
})");
@@ -69,8 +69,8 @@ TEST_F(RPCTestHandlerTest, HandlerErrorHandling)
{
runSpawn([](auto yield) {
auto const handler = AnyHandler{HandlerFake{}};
auto const input = json::parse(R"({
"hello": "not world",
auto const input = json::parse(R"({
"hello": "not world",
"limit": 10
})");
@@ -88,8 +88,8 @@ TEST_F(RPCTestHandlerTest, HandlerInnerErrorHandling)
{
runSpawn([](auto yield) {
auto const handler = AnyHandler{FailingHandlerFake{}};
auto const input = json::parse(R"({
"hello": "world",
auto const input = json::parse(R"({
"hello": "world",
"limit": 10
})");

View File

@@ -112,7 +112,7 @@ TEST_F(RPCTransactionEntryHandlerTest, NonExistLedgerViaLedgerIndex)
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional<ripple::LedgerHeader>{}));
EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1);
auto const input = json::parse(fmt::format(
R"({{
R"({{
"ledger_index": "4",
"tx_hash": "{}"
}})",
@@ -138,7 +138,7 @@ TEST_F(RPCTransactionEntryHandlerTest, TXNotFound)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{TransactionEntryHandler{backend_}};
auto const req = json::parse(fmt::format(
R"({{
R"({{
"tx_hash": "{}"
}})",
kTXN_ID
@@ -168,7 +168,7 @@ TEST_F(RPCTransactionEntryHandlerTest, LedgerSeqNotMatch)
runSpawn([this](auto yield) {
auto const handler = AnyHandler{TransactionEntryHandler{backend_}};
auto const req = json::parse(fmt::format(
R"({{
R"({{
"tx_hash": "{}",
"ledger_index": "30"
}})",
@@ -245,7 +245,7 @@ TEST_F(RPCTransactionEntryHandlerTest, NormalPath)
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{TransactionEntryHandler{backend_}};
auto const req = json::parse(fmt::format(
R"({{
R"({{
"tx_hash": "{}",
"ledger_index": {}
}})",
@@ -319,7 +319,7 @@ TEST_F(RPCTransactionEntryHandlerTest, NormalPathV2)
runSpawn([&, this](auto yield) {
auto const handler = AnyHandler{TransactionEntryHandler{backend_}};
auto const req = json::parse(fmt::format(
R"({{
R"({{
"tx_hash": "{}",
"ledger_index": {}
}})",

View File

@@ -167,10 +167,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsNotObject",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -184,10 +184,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysNotObject",
.testJson = R"({
"books":
"books":
[
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -201,10 +201,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysMissingCurrency",
.testJson = R"({
"books":
"books":
[
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -218,10 +218,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsMissingCurrency",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -235,10 +235,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysCurrencyNotString",
.testJson = R"({
"books":
"books":
[
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -255,10 +255,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsCurrencyNotString",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -275,10 +275,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysInvalidCurrency",
.testJson = R"({
"books":
"books":
[
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -295,10 +295,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsInvalidCurrency",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -315,10 +315,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysMissingIssuer",
.testJson = R"({
"books":
"books":
[
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -334,10 +334,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsMissingIssuer",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -353,10 +353,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysIssuerNotString",
.testJson = R"({
"books":
"books":
[
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -373,10 +373,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsIssuerNotString",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -393,10 +393,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysInvalidIssuer",
.testJson = R"({
"books":
"books":
[
{
"taker_gets":
"taker_gets":
{
"currency": "XRP"
},
@@ -413,10 +413,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsInvalidIssuer",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -433,10 +433,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerGetsXRPHasIssuer",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "USD",
"issuer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -454,10 +454,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemTakerPaysXRPHasIssuer",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP",
"issuer": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn"
@@ -475,10 +475,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemBadMartket",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},
@@ -494,10 +494,10 @@ generateTestValuesForParametersTest()
UnsubscribeParamTestCaseBundle{
.testName = "BooksItemInvalidBoth",
.testJson = R"({
"books":
"books":
[
{
"taker_pays":
"taker_pays":
{
"currency": "XRP"
},

0
tools/cassandra_delete_range/cassandra_delete_range.go Executable file → Normal file
View File

View File

@@ -37,7 +37,7 @@ foreach (proto ${PROTO_FILES})
add_custom_command(
OUTPUT ${GO_SOURCE_DIR}/${GO_IMPORT_PATH}/${proto_name}.pb.go
COMMAND
protoc ${GO_OPTS} ${GRPC_OPTS}
protoc ${GO_OPTS} ${GRPC_OPTS}
--go-grpc_out=${GO_SOURCE_DIR} -I${PROTO_INC_DIR} ${proto} --plugin=${GOPATH_VALUE}/bin/protoc-gen-go
--plugin=${GOPATH_VALUE}/bin/protoc-gen-go-grpc --go_out=${GO_SOURCE_DIR}/
DEPENDS ${proto}