diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b86842f4..6dba887f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,12 +26,12 @@ repos: # Autoformat: YAML, JSON, Markdown, etc. - repo: https://github.com/rbubley/mirrors-prettier - rev: 787fb9f542b140ba0b2aced38e6a3e68021647a3 # frozen: v3.5.3 + rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2 hooks: - id: prettier - repo: https://github.com/igorshubovych/markdownlint-cli - rev: 586c3ea3f51230da42bab657c6a32e9e66c364f0 # frozen: v0.44.0 + rev: 192ad822316c3a22fb3d3cc8aa6eafa0b8488360 # frozen: v0.45.0 hooks: - id: markdownlint-fix exclude: LICENSE.md @@ -80,7 +80,7 @@ repos: language: script - repo: https://github.com/pre-commit/mirrors-clang-format - rev: f9a52e87b6cdcb01b0a62b8611d9ba9f2dad0067 # frozen: v19.1.7 + rev: 6b9072cd80691b1b48d80046d884409fb1d962d1 # frozen: v20.1.7 hooks: - id: clang-format args: [--style=file] diff --git a/docs/configure-clio.md b/docs/configure-clio.md index b7705bac..3b4557c6 100644 --- a/docs/configure-clio.md +++ b/docs/configure-clio.md @@ -5,7 +5,6 @@ Clio needs access to a `rippled` server in order to work. The following configurations are required for Clio and `rippled` to communicate: 1. In the Clio config file, provide the following: - - The IP of the `rippled` server - The port on which `rippled` is accepting unencrypted WebSocket connections @@ -13,7 +12,6 @@ Clio needs access to a `rippled` server in order to work. The following configur - The port on which `rippled` is handling gRPC requests 2. In the `rippled` config file, you need to open: - - A port to accept unencrypted WebSocket connections - A port to handle gRPC requests, with the IP(s) of Clio specified in the `secure_gateway` entry diff --git a/src/data/AmendmentCenter.cpp b/src/data/AmendmentCenter.cpp index f2caeeca..eb7c14ea 100644 --- a/src/data/AmendmentCenter.cpp +++ b/src/data/AmendmentCenter.cpp @@ -78,17 +78,20 @@ WritingAmendmentKey::WritingAmendmentKey(std::string amendmentName) : AmendmentK } // namespace impl -AmendmentKey::operator std::string const&() const +AmendmentKey:: +operator std::string const&() const { return name; } -AmendmentKey::operator std::string_view() const +AmendmentKey:: +operator std::string_view() const { return name; } -AmendmentKey::operator ripple::uint256() const +AmendmentKey:: +operator ripple::uint256() const { return Amendment::getAmendmentId(name); } diff --git a/src/data/BackendCounters.cpp b/src/data/BackendCounters.cpp index 5fcc2d03..819ea7e6 100644 --- a/src/data/BackendCounters.cpp +++ b/src/data/BackendCounters.cpp @@ -49,35 +49,45 @@ durationInMillisecondsSince(std::chrono::steady_clock::time_point const startTim using namespace util::prometheus; BackendCounters::BackendCounters() - : tooBusyCounter_(PrometheusService::counterInt( - "backend_too_busy_total_number", - Labels(), - "The total number of times the backend was too busy to process a request" - )) - , writeSyncCounter_(PrometheusService::counterInt( - "backend_operations_total_number", - Labels({Label{"operation", "write_sync"}}), - "The total number of times the backend had to write synchronously" - )) - , writeSyncRetryCounter_(PrometheusService::counterInt( - "backend_operations_total_number", - Labels({Label{"operation", "write_sync_retry"}}), - "The total number of times the backend had to retry a synchronous write" - )) + : tooBusyCounter_( + PrometheusService::counterInt( + "backend_too_busy_total_number", + Labels(), + "The total number of times the backend was too busy to process a request" + ) + ) + , writeSyncCounter_( + PrometheusService::counterInt( + "backend_operations_total_number", + Labels({Label{"operation", "write_sync"}}), + "The total number of times the backend had to write synchronously" + ) + ) + , writeSyncRetryCounter_( + PrometheusService::counterInt( + "backend_operations_total_number", + Labels({Label{"operation", "write_sync_retry"}}), + "The total number of times the backend had to retry a synchronous write" + ) + ) , asyncWriteCounters_{"write_async"} , asyncReadCounters_{"read_async"} - , readDurationHistogram_(PrometheusService::histogramInt( - "backend_duration_milliseconds_histogram", - Labels({Label{"operation", "read"}}), - kHISTOGRAM_BUCKETS, - "The duration of backend read operations including retries" - )) - , writeDurationHistogram_(PrometheusService::histogramInt( - "backend_duration_milliseconds_histogram", - Labels({Label{"operation", "write"}}), - kHISTOGRAM_BUCKETS, - "The duration of backend write operations including retries" - )) + , readDurationHistogram_( + PrometheusService::histogramInt( + "backend_duration_milliseconds_histogram", + Labels({Label{"operation", "read"}}), + kHISTOGRAM_BUCKETS, + "The duration of backend read operations including retries" + ) + ) + , writeDurationHistogram_( + PrometheusService::histogramInt( + "backend_duration_milliseconds_histogram", + Labels({Label{"operation", "write"}}), + kHISTOGRAM_BUCKETS, + "The duration of backend write operations including retries" + ) + ) { } @@ -170,26 +180,34 @@ BackendCounters::report() const BackendCounters::AsyncOperationCounters::AsyncOperationCounters(std::string name) : name_(std::move(name)) - , pendingCounter_(PrometheusService::gaugeInt( - "backend_operations_current_number", - Labels({{"operation", name_}, {"status", "pending"}}), - "The current number of pending " + name_ + " operations" - )) - , completedCounter_(PrometheusService::counterInt( - "backend_operations_total_number", - Labels({{"operation", name_}, {"status", "completed"}}), - "The total number of completed " + name_ + " operations" - )) - , retryCounter_(PrometheusService::counterInt( - "backend_operations_total_number", - Labels({{"operation", name_}, {"status", "retry"}}), - "The total number of retried " + name_ + " operations" - )) - , errorCounter_(PrometheusService::counterInt( - "backend_operations_total_number", - Labels({{"operation", name_}, {"status", "error"}}), - "The total number of errored " + name_ + " operations" - )) + , pendingCounter_( + PrometheusService::gaugeInt( + "backend_operations_current_number", + Labels({{"operation", name_}, {"status", "pending"}}), + "The current number of pending " + name_ + " operations" + ) + ) + , completedCounter_( + PrometheusService::counterInt( + "backend_operations_total_number", + Labels({{"operation", name_}, {"status", "completed"}}), + "The total number of completed " + name_ + " operations" + ) + ) + , retryCounter_( + PrometheusService::counterInt( + "backend_operations_total_number", + Labels({{"operation", name_}, {"status", "retry"}}), + "The total number of retried " + name_ + " operations" + ) + ) + , errorCounter_( + PrometheusService::counterInt( + "backend_operations_total_number", + Labels({{"operation", name_}, {"status", "error"}}), + "The total number of errored " + name_ + " operations" + ) + ) { } diff --git a/src/data/BackendInterface.hpp b/src/data/BackendInterface.hpp index b2e31c3e..df5dc2bd 100644 --- a/src/data/BackendInterface.hpp +++ b/src/data/BackendInterface.hpp @@ -234,8 +234,12 @@ public: * @return A vector of ripple::uint256 representing the account roots */ virtual std::vector - fetchAccountRoots(std::uint32_t number, std::uint32_t pageSize, std::uint32_t seq, boost::asio::yield_context yield) - const = 0; + fetchAccountRoots( + std::uint32_t number, + std::uint32_t pageSize, + std::uint32_t seq, + boost::asio::yield_context yield + ) const = 0; /** * @brief Updates the range of sequences that are stored in the DB. @@ -459,8 +463,11 @@ public: * @return The sequence in unit32_t on success; nullopt otherwise */ virtual std::optional - doFetchLedgerObjectSeq(ripple::uint256 const& key, std::uint32_t sequence, boost::asio::yield_context yield) - const = 0; + doFetchLedgerObjectSeq( + ripple::uint256 const& key, + std::uint32_t sequence, + boost::asio::yield_context yield + ) const = 0; /** * @brief The database-specific implementation for fetching ledger objects. diff --git a/src/data/CassandraBackend.hpp b/src/data/CassandraBackend.hpp index 06651a70..00e5be6e 100644 --- a/src/data/CassandraBackend.hpp +++ b/src/data/CassandraBackend.hpp @@ -361,8 +361,10 @@ public: } std::vector - fetchAllTransactionHashesInLedger(std::uint32_t const ledgerSequence, boost::asio::yield_context yield) - const override + fetchAllTransactionHashesInLedger( + std::uint32_t const ledgerSequence, + boost::asio::yield_context yield + ) const override { auto start = std::chrono::system_clock::now(); auto const res = executor_.read(yield, schema_->selectAllTransactionHashesInLedger, ledgerSequence); @@ -392,8 +394,11 @@ public: } std::optional - fetchNFT(ripple::uint256 const& tokenID, std::uint32_t const ledgerSequence, boost::asio::yield_context yield) - const override + fetchNFT( + ripple::uint256 const& tokenID, + std::uint32_t const ledgerSequence, + boost::asio::yield_context yield + ) const override { auto const res = executor_.read(yield, schema_->selectNFT, tokenID, ledgerSequence); if (not res) @@ -554,10 +559,9 @@ public: selectNFTStatements.reserve(nftIDs.size()); std::transform( - std::cbegin(nftIDs), - std::cend(nftIDs), - std::back_inserter(selectNFTStatements), - [&](auto const& nftID) { return schema_->selectNFT.bind(nftID, ledgerSequence); } + std::cbegin(nftIDs), std::cend(nftIDs), std::back_inserter(selectNFTStatements), [&](auto const& nftID) { + return schema_->selectNFT.bind(nftID, ledgerSequence); + } ); auto const nftInfos = executor_.readEach(yield, selectNFTStatements); @@ -566,10 +570,9 @@ public: selectNFTURIStatements.reserve(nftIDs.size()); std::transform( - std::cbegin(nftIDs), - std::cend(nftIDs), - std::back_inserter(selectNFTURIStatements), - [&](auto const& nftID) { return schema_->selectNFTURI.bind(nftID, ledgerSequence); } + std::cbegin(nftIDs), std::cend(nftIDs), std::back_inserter(selectNFTURIStatements), [&](auto const& nftID) { + return schema_->selectNFTURI.bind(nftID, ledgerSequence); + } ); auto const nftUris = executor_.readEach(yield, selectNFTURIStatements); @@ -626,8 +629,11 @@ public: } std::optional - doFetchLedgerObject(ripple::uint256 const& key, std::uint32_t const sequence, boost::asio::yield_context yield) - const override + doFetchLedgerObject( + ripple::uint256 const& key, + std::uint32_t const sequence, + boost::asio::yield_context yield + ) const override { LOG(log_.debug()) << "Fetching ledger object for seq " << sequence << ", key = " << ripple::to_string(key); if (auto const res = executor_.read(yield, schema_->selectObject, key, sequence); res) { @@ -645,8 +651,11 @@ public: } std::optional - doFetchLedgerObjectSeq(ripple::uint256 const& key, std::uint32_t const sequence, boost::asio::yield_context yield) - const override + doFetchLedgerObjectSeq( + ripple::uint256 const& key, + std::uint32_t const sequence, + boost::asio::yield_context yield + ) const override { LOG(log_.debug()) << "Fetching ledger object for seq " << sequence << ", key = " << ripple::to_string(key); if (auto const res = executor_.read(yield, schema_->selectObject, key, sequence); res) { @@ -680,8 +689,11 @@ public: } std::optional - doFetchSuccessorKey(ripple::uint256 key, std::uint32_t const ledgerSequence, boost::asio::yield_context yield) - const override + doFetchSuccessorKey( + ripple::uint256 key, + std::uint32_t const ledgerSequence, + boost::asio::yield_context yield + ) const override { if (auto const res = executor_.read(yield, schema_->selectSuccessor, key, ledgerSequence); res) { if (auto const result = res->template get(); result) { @@ -714,10 +726,9 @@ public: auto const timeDiff = util::timed([this, yield, &results, &hashes, &statements]() { // TODO: seems like a job for "hash IN (list of hashes)" instead? std::transform( - std::cbegin(hashes), - std::cend(hashes), - std::back_inserter(statements), - [this](auto const& hash) { return schema_->selectTransaction.bind(hash); } + std::cbegin(hashes), std::cend(hashes), std::back_inserter(statements), [this](auto const& hash) { + return schema_->selectTransaction.bind(hash); + } ); auto const entries = executor_.readEach(yield, statements); @@ -761,18 +772,14 @@ public: // TODO: seems like a job for "key IN (list of keys)" instead? std::transform( - std::cbegin(keys), - std::cend(keys), - std::back_inserter(statements), - [this, &sequence](auto const& key) { return schema_->selectObject.bind(key, sequence); } + std::cbegin(keys), std::cend(keys), std::back_inserter(statements), [this, &sequence](auto const& key) { + return schema_->selectObject.bind(key, sequence); + } ); auto const entries = executor_.readEach(yield, statements); std::transform( - std::cbegin(entries), - std::cend(entries), - std::back_inserter(results), - [](auto const& res) -> Blob { + std::cbegin(entries), std::cend(entries), std::back_inserter(results), [](auto const& res) -> Blob { if (auto const maybeValue = res.template get(); maybeValue) return *maybeValue; @@ -785,8 +792,12 @@ public: } std::vector - fetchAccountRoots(std::uint32_t number, std::uint32_t pageSize, std::uint32_t seq, boost::asio::yield_context yield) - const override + fetchAccountRoots( + std::uint32_t number, + std::uint32_t pageSize, + std::uint32_t seq, + boost::asio::yield_context yield + ) const override { std::vector liveAccounts; std::optional lastItem; diff --git a/src/data/cassandra/Schema.hpp b/src/data/cassandra/Schema.hpp index c696a139..0ead25db 100644 --- a/src/data/cassandra/Schema.hpp +++ b/src/data/cassandra/Schema.hpp @@ -88,8 +88,9 @@ public: std::vector createSchema = [this]() { std::vector statements; - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( key blob, @@ -99,11 +100,13 @@ public: ) WITH CLUSTERING ORDER BY (sequence DESC) )", - qualifiedTableName(settingsProvider_.get(), "objects") - )); + qualifiedTableName(settingsProvider_.get(), "objects") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( hash blob PRIMARY KEY, @@ -113,11 +116,13 @@ public: metadata blob ) )", - qualifiedTableName(settingsProvider_.get(), "transactions") - )); + qualifiedTableName(settingsProvider_.get(), "transactions") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( ledger_sequence bigint, @@ -125,11 +130,13 @@ public: PRIMARY KEY (ledger_sequence, hash) ) )", - qualifiedTableName(settingsProvider_.get(), "ledger_transactions") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_transactions") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( key blob, @@ -138,11 +145,13 @@ public: PRIMARY KEY (key, seq) ) )", - qualifiedTableName(settingsProvider_.get(), "successor") - )); + qualifiedTableName(settingsProvider_.get(), "successor") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( seq bigint, @@ -150,11 +159,13 @@ public: PRIMARY KEY (seq, key) ) )", - qualifiedTableName(settingsProvider_.get(), "diff") - )); + qualifiedTableName(settingsProvider_.get(), "diff") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( account blob, @@ -164,44 +175,52 @@ public: ) WITH CLUSTERING ORDER BY (seq_idx DESC) )", - qualifiedTableName(settingsProvider_.get(), "account_tx") - )); + qualifiedTableName(settingsProvider_.get(), "account_tx") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( sequence bigint PRIMARY KEY, header blob ) )", - qualifiedTableName(settingsProvider_.get(), "ledgers") - )); + qualifiedTableName(settingsProvider_.get(), "ledgers") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( hash blob PRIMARY KEY, sequence bigint ) )", - qualifiedTableName(settingsProvider_.get(), "ledger_hashes") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_hashes") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( is_latest boolean PRIMARY KEY, sequence bigint ) )", - qualifiedTableName(settingsProvider_.get(), "ledger_range") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_range") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( token_id blob, @@ -212,11 +231,13 @@ public: ) WITH CLUSTERING ORDER BY (sequence DESC) )", - qualifiedTableName(settingsProvider_.get(), "nf_tokens") - )); + qualifiedTableName(settingsProvider_.get(), "nf_tokens") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( issuer blob, @@ -226,11 +247,13 @@ public: ) WITH CLUSTERING ORDER BY (taxon ASC, token_id ASC) )", - qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") - )); + qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( token_id blob, @@ -240,11 +263,13 @@ public: ) WITH CLUSTERING ORDER BY (sequence DESC) )", - qualifiedTableName(settingsProvider_.get(), "nf_token_uris") - )); + qualifiedTableName(settingsProvider_.get(), "nf_token_uris") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( token_id blob, @@ -254,11 +279,13 @@ public: ) WITH CLUSTERING ORDER BY (seq_idx DESC) )", - qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") - )); + qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( mpt_id blob, @@ -267,11 +294,13 @@ public: ) WITH CLUSTERING ORDER BY (holder ASC) )", - qualifiedTableName(settingsProvider_.get(), "mp_token_holders") - )); + qualifiedTableName(settingsProvider_.get(), "mp_token_holders") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( migrator_name TEXT, @@ -279,11 +308,13 @@ public: PRIMARY KEY (migrator_name) ) )", - qualifiedTableName(settingsProvider_.get(), "migrator_status") - )); + qualifiedTableName(settingsProvider_.get(), "migrator_status") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( node_id UUID, @@ -292,8 +323,9 @@ public: ) WITH default_time_to_live = 2 )", - qualifiedTableName(settingsProvider_.get(), "nodes_chat") - )); + qualifiedTableName(settingsProvider_.get(), "nodes_chat") + ) + ); return statements; }(); @@ -322,146 +354,172 @@ public: // PreparedStatement insertObject = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (key, sequence, object) VALUES (?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "objects") - )); + qualifiedTableName(settingsProvider_.get(), "objects") + ) + ); }(); PreparedStatement insertTransaction = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (hash, ledger_sequence, date, transaction, metadata) VALUES (?, ?, ?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "transactions") - )); + qualifiedTableName(settingsProvider_.get(), "transactions") + ) + ); }(); PreparedStatement insertLedgerTransaction = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (ledger_sequence, hash) VALUES (?, ?) )", - qualifiedTableName(settingsProvider_.get(), "ledger_transactions") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_transactions") + ) + ); }(); PreparedStatement insertSuccessor = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (key, seq, next) VALUES (?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "successor") - )); + qualifiedTableName(settingsProvider_.get(), "successor") + ) + ); }(); PreparedStatement insertDiff = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (seq, key) VALUES (?, ?) )", - qualifiedTableName(settingsProvider_.get(), "diff") - )); + qualifiedTableName(settingsProvider_.get(), "diff") + ) + ); }(); PreparedStatement insertAccountTx = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (account, seq_idx, hash) VALUES (?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "account_tx") - )); + qualifiedTableName(settingsProvider_.get(), "account_tx") + ) + ); }(); PreparedStatement insertNFT = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (token_id, sequence, owner, is_burned) VALUES (?, ?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "nf_tokens") - )); + qualifiedTableName(settingsProvider_.get(), "nf_tokens") + ) + ); }(); PreparedStatement insertIssuerNFT = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (issuer, taxon, token_id) VALUES (?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") - )); + qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") + ) + ); }(); PreparedStatement insertNFTURI = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (token_id, sequence, uri) VALUES (?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "nf_token_uris") - )); + qualifiedTableName(settingsProvider_.get(), "nf_token_uris") + ) + ); }(); PreparedStatement insertNFTTx = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (token_id, seq_idx, hash) VALUES (?, ?, ?) )", - qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") - )); + qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") + ) + ); }(); PreparedStatement insertMPTHolder = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (mpt_id, holder) VALUES (?, ?) )", - qualifiedTableName(settingsProvider_.get(), "mp_token_holders") - )); + qualifiedTableName(settingsProvider_.get(), "mp_token_holders") + ) + ); }(); PreparedStatement insertLedgerHeader = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (sequence, header) VALUES (?, ?) )", - qualifiedTableName(settingsProvider_.get(), "ledgers") - )); + qualifiedTableName(settingsProvider_.get(), "ledgers") + ) + ); }(); PreparedStatement insertLedgerHash = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (hash, sequence) VALUES (?, ?) )", - qualifiedTableName(settingsProvider_.get(), "ledger_hashes") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_hashes") + ) + ); }(); // @@ -469,48 +527,56 @@ public: // PreparedStatement updateLedgerRange = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( UPDATE {} SET sequence = ? WHERE is_latest = ? IF sequence IN (?, null) )", - qualifiedTableName(settingsProvider_.get(), "ledger_range") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_range") + ) + ); }(); PreparedStatement deleteLedgerRange = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( UPDATE {} SET sequence = ? WHERE is_latest = False )", - qualifiedTableName(settingsProvider_.get(), "ledger_range") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_range") + ) + ); }(); PreparedStatement insertMigratorStatus = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( INSERT INTO {} (migrator_name, status) VALUES (?, ?) )", - qualifiedTableName(settingsProvider_.get(), "migrator_status") - )); + qualifiedTableName(settingsProvider_.get(), "migrator_status") + ) + ); }(); PreparedStatement updateClioNodeMessage = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( UPDATE {} SET message = ? WHERE node_id = ? )", - qualifiedTableName(settingsProvider_.get(), "nodes_chat") - )); + qualifiedTableName(settingsProvider_.get(), "nodes_chat") + ) + ); }(); // @@ -518,8 +584,9 @@ public: // PreparedStatement selectSuccessor = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT next FROM {} WHERE key = ? @@ -527,24 +594,28 @@ public: ORDER BY seq DESC LIMIT 1 )", - qualifiedTableName(settingsProvider_.get(), "successor") - )); + qualifiedTableName(settingsProvider_.get(), "successor") + ) + ); }(); PreparedStatement selectDiff = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT key FROM {} WHERE seq = ? )", - qualifiedTableName(settingsProvider_.get(), "diff") - )); + qualifiedTableName(settingsProvider_.get(), "diff") + ) + ); }(); PreparedStatement selectObject = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT object, sequence FROM {} WHERE key = ? @@ -552,35 +623,41 @@ public: ORDER BY sequence DESC LIMIT 1 )", - qualifiedTableName(settingsProvider_.get(), "objects") - )); + qualifiedTableName(settingsProvider_.get(), "objects") + ) + ); }(); PreparedStatement selectTransaction = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT transaction, metadata, ledger_sequence, date FROM {} WHERE hash = ? )", - qualifiedTableName(settingsProvider_.get(), "transactions") - )); + qualifiedTableName(settingsProvider_.get(), "transactions") + ) + ); }(); PreparedStatement selectAllTransactionHashesInLedger = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT hash FROM {} WHERE ledger_sequence = ? )", - qualifiedTableName(settingsProvider_.get(), "ledger_transactions") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_transactions") + ) + ); }(); PreparedStatement selectLedgerPageKeys = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT key FROM {} WHERE TOKEN(key) >= ? @@ -589,13 +666,15 @@ public: LIMIT ? ALLOW FILTERING )", - qualifiedTableName(settingsProvider_.get(), "objects") - )); + qualifiedTableName(settingsProvider_.get(), "objects") + ) + ); }(); PreparedStatement selectLedgerPage = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT object, key FROM {} WHERE TOKEN(key) >= ? @@ -604,64 +683,74 @@ public: LIMIT ? ALLOW FILTERING )", - qualifiedTableName(settingsProvider_.get(), "objects") - )); + qualifiedTableName(settingsProvider_.get(), "objects") + ) + ); }(); PreparedStatement getToken = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT TOKEN(key) FROM {} WHERE key = ? LIMIT 1 )", - qualifiedTableName(settingsProvider_.get(), "objects") - )); + qualifiedTableName(settingsProvider_.get(), "objects") + ) + ); }(); PreparedStatement selectAccountTx = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT hash, seq_idx FROM {} WHERE account = ? AND seq_idx < ? LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "account_tx") - )); + qualifiedTableName(settingsProvider_.get(), "account_tx") + ) + ); }(); PreparedStatement selectAccountFromBeginning = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT account FROM {} WHERE token(account) > 0 PER PARTITION LIMIT 1 LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "account_tx") - )); + qualifiedTableName(settingsProvider_.get(), "account_tx") + ) + ); }(); PreparedStatement selectAccountFromToken = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT account FROM {} WHERE token(account) > token(?) PER PARTITION LIMIT 1 LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "account_tx") - )); + qualifiedTableName(settingsProvider_.get(), "account_tx") + ) + ); }(); PreparedStatement selectAccountTxForward = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT hash, seq_idx FROM {} WHERE account = ? @@ -669,13 +758,15 @@ public: ORDER BY seq_idx ASC LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "account_tx") - )); + qualifiedTableName(settingsProvider_.get(), "account_tx") + ) + ); }(); PreparedStatement selectNFT = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT sequence, owner, is_burned FROM {} WHERE token_id = ? @@ -683,13 +774,15 @@ public: ORDER BY sequence DESC LIMIT 1 )", - qualifiedTableName(settingsProvider_.get(), "nf_tokens") - )); + qualifiedTableName(settingsProvider_.get(), "nf_tokens") + ) + ); }(); PreparedStatement selectNFTURI = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT uri FROM {} WHERE token_id = ? @@ -697,13 +790,15 @@ public: ORDER BY sequence DESC LIMIT 1 )", - qualifiedTableName(settingsProvider_.get(), "nf_token_uris") - )); + qualifiedTableName(settingsProvider_.get(), "nf_token_uris") + ) + ); }(); PreparedStatement selectNFTTx = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT hash, seq_idx FROM {} WHERE token_id = ? @@ -711,13 +806,15 @@ public: ORDER BY seq_idx DESC LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") - )); + qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") + ) + ); }(); PreparedStatement selectNFTTxForward = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT hash, seq_idx FROM {} WHERE token_id = ? @@ -725,13 +822,15 @@ public: ORDER BY seq_idx ASC LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") - )); + qualifiedTableName(settingsProvider_.get(), "nf_token_transactions") + ) + ); }(); PreparedStatement selectNFTIDsByIssuer = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT token_id FROM {} WHERE issuer = ? @@ -739,13 +838,15 @@ public: ORDER BY taxon ASC, token_id ASC LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") - )); + qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") + ) + ); }(); PreparedStatement selectNFTIDsByIssuerTaxon = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT token_id FROM {} WHERE issuer = ? @@ -754,13 +855,15 @@ public: ORDER BY taxon ASC, token_id ASC LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") - )); + qualifiedTableName(settingsProvider_.get(), "issuer_nf_tokens_v2") + ) + ); }(); PreparedStatement selectMPTHolders = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT holder FROM {} WHERE mpt_id = ? @@ -768,74 +871,87 @@ public: ORDER BY holder ASC LIMIT ? )", - qualifiedTableName(settingsProvider_.get(), "mp_token_holders") - )); + qualifiedTableName(settingsProvider_.get(), "mp_token_holders") + ) + ); }(); PreparedStatement selectLedgerByHash = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT sequence FROM {} WHERE hash = ? LIMIT 1 )", - qualifiedTableName(settingsProvider_.get(), "ledger_hashes") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_hashes") + ) + ); }(); PreparedStatement selectLedgerBySeq = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT header FROM {} WHERE sequence = ? )", - qualifiedTableName(settingsProvider_.get(), "ledgers") - )); + qualifiedTableName(settingsProvider_.get(), "ledgers") + ) + ); }(); PreparedStatement selectLatestLedger = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT sequence FROM {} WHERE is_latest = True )", - qualifiedTableName(settingsProvider_.get(), "ledger_range") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_range") + ) + ); }(); PreparedStatement selectLedgerRange = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT sequence FROM {} WHERE is_latest in (True, False) )", - qualifiedTableName(settingsProvider_.get(), "ledger_range") - )); + qualifiedTableName(settingsProvider_.get(), "ledger_range") + ) + ); }(); PreparedStatement selectMigratorStatus = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT status FROM {} WHERE migrator_name = ? )", - qualifiedTableName(settingsProvider_.get(), "migrator_status") - )); + qualifiedTableName(settingsProvider_.get(), "migrator_status") + ) + ); }(); PreparedStatement selectClioNodesData = [this]() { - return handle_.get().prepare(fmt::format( - R"( + return handle_.get().prepare( + fmt::format( + R"( SELECT node_id, message FROM {} )", - qualifiedTableName(settingsProvider_.get(), "nodes_chat") - )); + qualifiedTableName(settingsProvider_.get(), "nodes_chat") + ) + ); }(); }; diff --git a/src/data/cassandra/impl/Cluster.cpp b/src/data/cassandra/impl/Cluster.cpp index d793df16..05afc145 100644 --- a/src/data/cassandra/impl/Cluster.cpp +++ b/src/data/cassandra/impl/Cluster.cpp @@ -45,7 +45,8 @@ Cluster::Cluster(Settings const& settings) : ManagedObject{cass_cluster_new(), k cass_cluster_set_token_aware_routing(*this, cass_true); if (auto const rc = cass_cluster_set_protocol_version(*this, CASS_PROTOCOL_VERSION_V4); rc != CASS_OK) { - throw std::runtime_error(fmt::format("Error setting cassandra protocol version to v4: {}", cass_error_desc(rc)) + throw std::runtime_error( + fmt::format("Error setting cassandra protocol version to v4: {}", cass_error_desc(rc)) ); } diff --git a/src/data/cassandra/impl/RetryPolicy.hpp b/src/data/cassandra/impl/RetryPolicy.hpp index cc5e63c2..8ad1fa56 100644 --- a/src/data/cassandra/impl/RetryPolicy.hpp +++ b/src/data/cassandra/impl/RetryPolicy.hpp @@ -45,11 +45,13 @@ public: * @brief Create a new retry policy instance with the io_context provided */ ExponentialBackoffRetryPolicy(boost::asio::io_context& ioc) - : retry_(util::makeRetryExponentialBackoff( - std::chrono::milliseconds(1), - std::chrono::seconds(1), - boost::asio::make_strand(ioc) - )) + : retry_( + util::makeRetryExponentialBackoff( + std::chrono::milliseconds(1), + std::chrono::seconds(1), + boost::asio::make_strand(ioc) + ) + ) { } diff --git a/src/etl/ETLService.cpp b/src/etl/ETLService.cpp index 2db7b05a..5985c8c4 100644 --- a/src/etl/ETLService.cpp +++ b/src/etl/ETLService.cpp @@ -171,9 +171,11 @@ ETLService::runETLPipeline(uint32_t startSequence, uint32_t numExtractors) auto pipe = DataPipeType{numExtractors, startSequence}; for (auto i = 0u; i < numExtractors; ++i) { - extractors.push_back(std::make_unique( - pipe, networkValidatedLedgers_, ledgerFetcher_, startSequence + i, finishSequence_, state_ - )); + extractors.push_back( + std::make_unique( + pipe, networkValidatedLedgers_, ledgerFetcher_, startSequence + i, finishSequence_, state_ + ) + ); } auto transformer = diff --git a/src/etl/LoadBalancer.cpp b/src/etl/LoadBalancer.cpp index 3e07b4b1..50fe7877 100644 --- a/src/etl/LoadBalancer.cpp +++ b/src/etl/LoadBalancer.cpp @@ -184,11 +184,14 @@ LoadBalancer::LoadBalancer( LOG(log_.warn()) << "Failed to fetch ETL state from source = " << source->toString() << " Please check the configuration and network"; } else if (etlState_ && etlState_->networkID != stateOpt->networkID) { - checkOnETLFailure(fmt::format( - "ETL sources must be on the same network. Source network id = {} does not match others network id = {}", - stateOpt->networkID, - etlState_->networkID - )); + checkOnETLFailure( + fmt::format( + "ETL sources must be on the same network. Source network id = {} does not match others network id " + "= {}", + stateOpt->networkID, + etlState_->networkID + ) + ); } else { etlState_ = stateOpt; } @@ -278,9 +281,8 @@ LoadBalancer::forwardToRippled( if (forwardingCache_ and forwardingCache_->shouldCache(cmd)) { bool servedFromCache = true; - auto updater = - [this, &request, &clientIp, &servedFromCache, isAdmin](boost::asio::yield_context yield - ) -> std::expected { + auto updater = [this, &request, &clientIp, &servedFromCache, isAdmin](boost::asio::yield_context yield) + -> std::expected { servedFromCache = false; auto result = forwardToRippledImpl(request, clientIp, isAdmin, yield); if (result.has_value()) { @@ -294,10 +296,9 @@ LoadBalancer::forwardToRippled( }; auto result = forwardingCache_->getOrUpdate( - yield, - cmd, - std::move(updater), - [](util::ResponseExpirationCache::EntryData const& entry) { return not entry.response.contains("error"); } + yield, cmd, std::move(updater), [](util::ResponseExpirationCache::EntryData const& entry) { + return not entry.response.contains("error"); + } ); if (servedFromCache) { ++forwardingCounters_.cacheHit.get(); diff --git a/src/etl/LoadBalancer.hpp b/src/etl/LoadBalancer.hpp index 8edb01bb..2affb52b 100644 --- a/src/etl/LoadBalancer.hpp +++ b/src/etl/LoadBalancer.hpp @@ -172,8 +172,10 @@ public: * @return A std::vector The ledger data */ std::vector - loadInitialLedger(uint32_t sequence, std::chrono::steady_clock::duration retryAfter = std::chrono::seconds{2}) - override; + loadInitialLedger( + uint32_t sequence, + std::chrono::steady_clock::duration retryAfter = std::chrono::seconds{2} + ) override; /** * @brief Load the initial ledger, writing data to the queue. diff --git a/src/etl/NFTHelpers.cpp b/src/etl/NFTHelpers.cpp index 5e5e82b0..4a668056 100644 --- a/src/etl/NFTHelpers.cpp +++ b/src/etl/NFTHelpers.cpp @@ -138,7 +138,8 @@ getNFTokenMintData(ripple::TxMeta const& txMeta, ripple::STTx const& sttx) // There should always be a difference so the returned finalIDs // iterator should never be end(). But better safe than sorry. if (finalIDs.size() != prevIDs.size() + 1 || diff.first == finalIDs.end() || !owner) { - throw std::runtime_error(fmt::format(" - unexpected NFTokenMint data in tx {}", strHex(sttx.getTransactionID())) + throw std::runtime_error( + fmt::format(" - unexpected NFTokenMint data in tx {}", strHex(sttx.getTransactionID())) ); } diff --git a/src/etl/impl/Extractor.hpp b/src/etl/impl/Extractor.hpp index a71cf9a2..30c7fae0 100644 --- a/src/etl/impl/Extractor.hpp +++ b/src/etl/impl/Extractor.hpp @@ -94,8 +94,8 @@ private: double totalTime = 0.0; auto currentSequence = startSequence_; - while (!shouldFinish(currentSequence) && networkValidatedLedgers_->waitUntilValidatedByNetwork(currentSequence) - ) { + while (!shouldFinish(currentSequence) && + networkValidatedLedgers_->waitUntilValidatedByNetwork(currentSequence)) { auto [fetchResponse, time] = ::util::timed>([this, currentSequence]() { return ledgerFetcher_.get().fetchDataAndDiff(currentSequence); }); diff --git a/src/etl/impl/LedgerLoader.hpp b/src/etl/impl/LedgerLoader.hpp index 2ca7bdf1..876e61c1 100644 --- a/src/etl/impl/LedgerLoader.hpp +++ b/src/etl/impl/LedgerLoader.hpp @@ -209,47 +209,49 @@ public: size_t numWrites = 0; backend_->cache().setFull(); - auto seconds = ::util::timed([this, keys = std::move(edgeKeys), sequence, &numWrites]( - ) mutable { - for (auto& key : keys) { - LOG(log_.debug()) << "Writing edge key = " << ripple::strHex(key); - auto succ = backend_->cache().getSuccessor(*ripple::uint256::fromVoidChecked(key), sequence); - if (succ) - backend_->writeSuccessor(std::move(key), sequence, uint256ToString(succ->key)); - } - - ripple::uint256 prev = data::kFIRST_KEY; - while (auto cur = backend_->cache().getSuccessor(prev, sequence)) { - ASSERT(cur.has_value(), "Successor for key {} must exist", ripple::strHex(prev)); - if (prev == data::kFIRST_KEY) - backend_->writeSuccessor(uint256ToString(prev), sequence, uint256ToString(cur->key)); - - if (isBookDir(cur->key, cur->blob)) { - auto base = getBookBase(cur->key); - // make sure the base is not an actual object - if (!backend_->cache().get(base, sequence)) { - auto succ = backend_->cache().getSuccessor(base, sequence); - ASSERT(succ.has_value(), "Book base {} must have a successor", ripple::strHex(base)); - if (succ->key == cur->key) { - LOG(log_.debug()) << "Writing book successor = " << ripple::strHex(base) << " - " - << ripple::strHex(cur->key); - - backend_->writeSuccessor(uint256ToString(base), sequence, uint256ToString(cur->key)); - } - } - - ++numWrites; + auto seconds = + ::util::timed([this, keys = std::move(edgeKeys), sequence, &numWrites]() mutable { + for (auto& key : keys) { + LOG(log_.debug()) << "Writing edge key = " << ripple::strHex(key); + auto succ = backend_->cache().getSuccessor(*ripple::uint256::fromVoidChecked(key), sequence); + if (succ) + backend_->writeSuccessor(std::move(key), sequence, uint256ToString(succ->key)); } - prev = cur->key; - static constexpr std::size_t kLOG_STRIDE = 100000; - if (numWrites % kLOG_STRIDE == 0 && numWrites != 0) - LOG(log_.info()) << "Wrote " << numWrites << " book successors"; - } + ripple::uint256 prev = data::kFIRST_KEY; + while (auto cur = backend_->cache().getSuccessor(prev, sequence)) { + ASSERT(cur.has_value(), "Successor for key {} must exist", ripple::strHex(prev)); + if (prev == data::kFIRST_KEY) + backend_->writeSuccessor(uint256ToString(prev), sequence, uint256ToString(cur->key)); - backend_->writeSuccessor(uint256ToString(prev), sequence, uint256ToString(data::kLAST_KEY)); - ++numWrites; - }); + if (isBookDir(cur->key, cur->blob)) { + auto base = getBookBase(cur->key); + // make sure the base is not an actual object + if (!backend_->cache().get(base, sequence)) { + auto succ = backend_->cache().getSuccessor(base, sequence); + ASSERT(succ.has_value(), "Book base {} must have a successor", ripple::strHex(base)); + if (succ->key == cur->key) { + LOG(log_.debug()) << "Writing book successor = " << ripple::strHex(base) << " - " + << ripple::strHex(cur->key); + + backend_->writeSuccessor( + uint256ToString(base), sequence, uint256ToString(cur->key) + ); + } + } + + ++numWrites; + } + + prev = cur->key; + static constexpr std::size_t kLOG_STRIDE = 100000; + if (numWrites % kLOG_STRIDE == 0 && numWrites != 0) + LOG(log_.info()) << "Wrote " << numWrites << " book successors"; + } + + backend_->writeSuccessor(uint256ToString(prev), sequence, uint256ToString(data::kLAST_KEY)); + ++numWrites; + }); LOG(log_.info()) << "Looping through cache and submitting all writes took " << seconds << " seconds. numWrites = " << std::to_string(numWrites); diff --git a/src/etl/impl/LedgerPublisher.hpp b/src/etl/impl/LedgerPublisher.hpp index e0960c5c..70aa7645 100644 --- a/src/etl/impl/LedgerPublisher.hpp +++ b/src/etl/impl/LedgerPublisher.hpp @@ -249,8 +249,9 @@ public: std::chrono::time_point getLastPublish() const override { - return std::chrono::time_point{std::chrono::seconds{lastPublishSeconds_.get().value() - }}; + return std::chrono::time_point{ + std::chrono::seconds{lastPublishSeconds_.get().value()} + }; } /** diff --git a/src/etl/impl/SubscriptionSource.cpp b/src/etl/impl/SubscriptionSource.cpp index 58ed9014..0f6beff6 100644 --- a/src/etl/impl/SubscriptionSource.cpp +++ b/src/etl/impl/SubscriptionSource.cpp @@ -79,11 +79,13 @@ SubscriptionSource::SubscriptionSource( , onConnect_(std::move(onConnect)) , onDisconnect_(std::move(onDisconnect)) , onLedgerClosed_(std::move(onLedgerClosed)) - , lastMessageTimeSecondsSinceEpoch_(PrometheusService::gaugeInt( - "subscription_source_last_message_time", - util::prometheus::Labels({{"source", fmt::format("{}:{}", ip, wsPort)}}), - "Seconds since epoch of the last message received from rippled subscription streams" - )) + , lastMessageTimeSecondsSinceEpoch_( + PrometheusService::gaugeInt( + "subscription_source_last_message_time", + util::prometheus::Labels({{"source", fmt::format("{}:{}", ip, wsPort)}}), + "Seconds since epoch of the last message received from rippled subscription streams" + ) + ) { wsConnectionBuilder_.addHeader({boost::beast::http::field::user_agent, "clio-client"}) .addHeader({"X-User", "clio-client"}) @@ -329,9 +331,13 @@ SubscriptionSource::setValidatedRange(std::string range) pairs.emplace_back(sequence, sequence); } else { if (minAndMax.size() != 2) { - throw std::runtime_error(fmt::format( - "Error parsing range: {}.Min and max should be of size 2. Got size = {}", range, minAndMax.size() - )); + throw std::runtime_error( + fmt::format( + "Error parsing range: {}.Min and max should be of size 2. Got size = {}", + range, + minAndMax.size() + ) + ); } uint32_t const min = std::stoll(minAndMax[0]); uint32_t const max = std::stoll(minAndMax[1]); diff --git a/src/etlng/LoadBalancer.cpp b/src/etlng/LoadBalancer.cpp index b7b99383..e3ae4bb6 100644 --- a/src/etlng/LoadBalancer.cpp +++ b/src/etlng/LoadBalancer.cpp @@ -184,11 +184,14 @@ LoadBalancer::LoadBalancer( LOG(log_.warn()) << "Failed to fetch ETL state from source = " << source->toString() << " Please check the configuration and network"; } else if (etlState_ && etlState_->networkID != stateOpt->networkID) { - checkOnETLFailure(fmt::format( - "ETL sources must be on the same network. Source network id = {} does not match others network id = {}", - stateOpt->networkID, - etlState_->networkID - )); + checkOnETLFailure( + fmt::format( + "ETL sources must be on the same network. Source network id = {} does not match others network id " + "= {}", + stateOpt->networkID, + etlState_->networkID + ) + ); } else { etlState_ = stateOpt; } @@ -284,9 +287,8 @@ LoadBalancer::forwardToRippled( if (forwardingCache_ and forwardingCache_->shouldCache(cmd)) { bool servedFromCache = true; - auto updater = - [this, &request, &clientIp, &servedFromCache, isAdmin](boost::asio::yield_context yield - ) -> std::expected { + auto updater = [this, &request, &clientIp, &servedFromCache, isAdmin](boost::asio::yield_context yield) + -> std::expected { servedFromCache = false; auto result = forwardToRippledImpl(request, clientIp, isAdmin, yield); if (result.has_value()) { @@ -300,10 +302,9 @@ LoadBalancer::forwardToRippled( }; auto result = forwardingCache_->getOrUpdate( - yield, - cmd, - std::move(updater), - [](util::ResponseExpirationCache::EntryData const& entry) { return not entry.response.contains("error"); } + yield, cmd, std::move(updater), [](util::ResponseExpirationCache::EntryData const& entry) { + return not entry.response.contains("error"); + } ); if (servedFromCache) { ++forwardingCounters_.cacheHit.get(); diff --git a/src/etlng/impl/LedgerPublisher.hpp b/src/etlng/impl/LedgerPublisher.hpp index 9ac634e8..9b07432e 100644 --- a/src/etlng/impl/LedgerPublisher.hpp +++ b/src/etlng/impl/LedgerPublisher.hpp @@ -227,8 +227,9 @@ public: std::chrono::time_point getLastPublish() const override { - return std::chrono::time_point{std::chrono::seconds{lastPublishSeconds_.get().value() - }}; + return std::chrono::time_point{ + std::chrono::seconds{lastPublishSeconds_.get().value()} + }; } /** diff --git a/src/feed/SubscriptionManager.hpp b/src/feed/SubscriptionManager.hpp index b227625d..26fa630e 100644 --- a/src/feed/SubscriptionManager.hpp +++ b/src/feed/SubscriptionManager.hpp @@ -159,8 +159,10 @@ public: * @param transactions The transactions in the current ledger. */ void - pubBookChanges(ripple::LedgerHeader const& lgrInfo, std::vector const& transactions) - final; + pubBookChanges( + ripple::LedgerHeader const& lgrInfo, + std::vector const& transactions + ) final; /** * @brief Subscribe to the proposed transactions feed. diff --git a/src/feed/impl/ProposedTransactionFeed.cpp b/src/feed/impl/ProposedTransactionFeed.cpp index 780d16e8..6e305a8c 100644 --- a/src/feed/impl/ProposedTransactionFeed.cpp +++ b/src/feed/impl/ProposedTransactionFeed.cpp @@ -57,9 +57,7 @@ ProposedTransactionFeed::sub(ripple::AccountID const& account, SubscriberSharedP { auto const weakPtr = std::weak_ptr(subscriber); auto const added = accountSignal_.connectTrackableSlot( - subscriber, - account, - [this, weakPtr](std::shared_ptr const& msg) { + subscriber, account, [this, weakPtr](std::shared_ptr const& msg) { if (auto connectionPtr = weakPtr.lock()) { // Check if this connection already sent if (notified_.contains(connectionPtr.get())) diff --git a/src/migration/README.md b/src/migration/README.md index ae01b880..9f0d5455 100644 --- a/src/migration/README.md +++ b/src/migration/README.md @@ -60,7 +60,6 @@ Most indexes are based on either ledger states or transactions. We provide the ` If you need to do full scan against other table, you can follow below steps: - Describe the table which needs full scan in a struct. It has to satisfy the `TableSpec`(cassandra/Spec.hpp) concept, containing static member: - - Tuple type `Row`, it's the type of each field in a row. The order of types should match what database will return in a row. Key types should come first, followed by other field types sorted in alphabetical order. - `kPARTITION_KEY`, it's the name of the partition key of the table. - `kTABLE_NAME` diff --git a/src/migration/cassandra/impl/CassandraMigrationSchema.hpp b/src/migration/cassandra/impl/CassandraMigrationSchema.hpp index 9d0afc9d..154fdeda 100644 --- a/src/migration/cassandra/impl/CassandraMigrationSchema.hpp +++ b/src/migration/cassandra/impl/CassandraMigrationSchema.hpp @@ -63,16 +63,18 @@ public: std::string const& key ) { - return handler.prepare(fmt::format( - R"( + return handler.prepare( + fmt::format( + R"( SELECT * FROM {} WHERE TOKEN({}) >= ? AND TOKEN({}) <= ? )", - data::cassandra::qualifiedTableName(settingsProvider_.get(), tableName), - key, - key - )); + data::cassandra::qualifiedTableName(settingsProvider_.get(), tableName), + key, + key + ) + ); } /** @@ -84,14 +86,16 @@ public: data::cassandra::PreparedStatement const& getPreparedInsertMigratedMigrator(data::cassandra::Handle const& handler) { - static auto kPREPARED = handler.prepare(fmt::format( - R"( + static auto kPREPARED = handler.prepare( + fmt::format( + R"( INSERT INTO {} (migrator_name, status) VALUES (?, ?) )", - data::cassandra::qualifiedTableName(settingsProvider_.get(), "migrator_status") - )); + data::cassandra::qualifiedTableName(settingsProvider_.get(), "migrator_status") + ) + ); return kPREPARED; } }; diff --git a/src/rpc/Counters.cpp b/src/rpc/Counters.cpp index b6593c19..afb6b489 100644 --- a/src/rpc/Counters.cpp +++ b/src/rpc/Counters.cpp @@ -40,41 +40,55 @@ using util::prometheus::Label; using util::prometheus::Labels; Counters::MethodInfo::MethodInfo(std::string const& method) - : started(PrometheusService::counterInt( - "rpc_method_total_number", - Labels{{{"status", "started"}, {"method", method}}}, - fmt::format("Total number of started calls to the method {}", method) - )) - , finished(PrometheusService::counterInt( - "rpc_method_total_number", - Labels{{{"status", "finished"}, {"method", method}}}, - fmt::format("Total number of finished calls to the method {}", method) - )) - , failed(PrometheusService::counterInt( - "rpc_method_total_number", - Labels{{{"status", "failed"}, {"method", method}}}, - fmt::format("Total number of failed calls to the method {}", method) - )) - , errored(PrometheusService::counterInt( - "rpc_method_total_number", - Labels{{{"status", "errored"}, {"method", method}}}, - fmt::format("Total number of errored calls to the method {}", method) - )) - , forwarded(PrometheusService::counterInt( - "rpc_method_total_number", - Labels{{{"status", "forwarded"}, {"method", method}}}, - fmt::format("Total number of forwarded calls to the method {}", method) - )) - , failedForward(PrometheusService::counterInt( - "rpc_method_total_number", - Labels{{{"status", "failed_forward"}, {"method", method}}}, - fmt::format("Total number of failed forwarded calls to the method {}", method) - )) - , duration(PrometheusService::counterInt( - "rpc_method_duration_us", - Labels({util::prometheus::Label{"method", method}}), - fmt::format("Total duration of calls to the method {}", method) - )) + : started( + PrometheusService::counterInt( + "rpc_method_total_number", + Labels{{{"status", "started"}, {"method", method}}}, + fmt::format("Total number of started calls to the method {}", method) + ) + ) + , finished( + PrometheusService::counterInt( + "rpc_method_total_number", + Labels{{{"status", "finished"}, {"method", method}}}, + fmt::format("Total number of finished calls to the method {}", method) + ) + ) + , failed( + PrometheusService::counterInt( + "rpc_method_total_number", + Labels{{{"status", "failed"}, {"method", method}}}, + fmt::format("Total number of failed calls to the method {}", method) + ) + ) + , errored( + PrometheusService::counterInt( + "rpc_method_total_number", + Labels{{{"status", "errored"}, {"method", method}}}, + fmt::format("Total number of errored calls to the method {}", method) + ) + ) + , forwarded( + PrometheusService::counterInt( + "rpc_method_total_number", + Labels{{{"status", "forwarded"}, {"method", method}}}, + fmt::format("Total number of forwarded calls to the method {}", method) + ) + ) + , failedForward( + PrometheusService::counterInt( + "rpc_method_total_number", + Labels{{{"status", "failed_forward"}, {"method", method}}}, + fmt::format("Total number of failed forwarded calls to the method {}", method) + ) + ) + , duration( + PrometheusService::counterInt( + "rpc_method_duration_us", + Labels({util::prometheus::Label{"method", method}}), + fmt::format("Total duration of calls to the method {}", method) + ) + ) { } @@ -89,31 +103,41 @@ Counters::getMethodInfo(std::string const& method) } Counters::Counters(WorkQueue const& wq) - : tooBusyCounter_(PrometheusService::counterInt( - "rpc_error_total_number", - Labels({Label{"error_type", "too_busy"}}), - "Total number of too busy errors" - )) - , notReadyCounter_(PrometheusService::counterInt( - "rpc_error_total_number", - Labels({Label{"error_type", "not_ready"}}), - "Total number of not ready replyes" - )) - , badSyntaxCounter_(PrometheusService::counterInt( - "rpc_error_total_number", - Labels({Label{"error_type", "bad_syntax"}}), - "Total number of bad syntax replyes" - )) - , unknownCommandCounter_(PrometheusService::counterInt( - "rpc_error_total_number", - Labels({Label{"error_type", "unknown_command"}}), - "Total number of unknown command replyes" - )) - , internalErrorCounter_(PrometheusService::counterInt( - "rpc_error_total_number", - Labels({Label{"error_type", "internal_error"}}), - "Total number of internal errors" - )) + : tooBusyCounter_( + PrometheusService::counterInt( + "rpc_error_total_number", + Labels({Label{"error_type", "too_busy"}}), + "Total number of too busy errors" + ) + ) + , notReadyCounter_( + PrometheusService::counterInt( + "rpc_error_total_number", + Labels({Label{"error_type", "not_ready"}}), + "Total number of not ready replyes" + ) + ) + , badSyntaxCounter_( + PrometheusService::counterInt( + "rpc_error_total_number", + Labels({Label{"error_type", "bad_syntax"}}), + "Total number of bad syntax replyes" + ) + ) + , unknownCommandCounter_( + PrometheusService::counterInt( + "rpc_error_total_number", + Labels({Label{"error_type", "unknown_command"}}), + "Total number of unknown command replyes" + ) + ) + , internalErrorCounter_( + PrometheusService::counterInt( + "rpc_error_total_number", + Labels({Label{"error_type", "internal_error"}}), + "Total number of internal errors" + ) + ) , workQueue_(std::cref(wq)) , startupTime_{std::chrono::system_clock::now()} { diff --git a/src/rpc/README.md b/src/rpc/README.md index 7de7fbe8..bc1a3eb2 100644 --- a/src/rpc/README.md +++ b/src/rpc/README.md @@ -17,7 +17,6 @@ See [tests/unit/rpc](https://github.com/XRPLF/clio/tree/develop/tests/unit/rpc) Handlers need to fulfil the requirements specified by the `SomeHandler` concept (see `rpc/common/Concepts.hpp`): - Expose types: - - `Input` - The POD struct which acts as input for the handler - `Output` - The POD struct which acts as output of a valid handler invocation diff --git a/src/rpc/RPCEngine.hpp b/src/rpc/RPCEngine.hpp index 81e7b960..0ae8596d 100644 --- a/src/rpc/RPCEngine.hpp +++ b/src/rpc/RPCEngine.hpp @@ -158,9 +158,8 @@ public: } if (not ctx.isAdmin and responseCache_ and responseCache_->shouldCache(ctx.method)) { - auto updater = - [this, &ctx](boost::asio::yield_context - ) -> std::expected { + auto updater = [this, &ctx](boost::asio::yield_context) + -> std::expected { auto result = buildResponseImpl(ctx); auto const extracted = diff --git a/src/rpc/RPCHelpers.cpp b/src/rpc/RPCHelpers.cpp index 4c25cd50..1a1c2260 100644 --- a/src/rpc/RPCHelpers.cpp +++ b/src/rpc/RPCHelpers.cpp @@ -1283,9 +1283,10 @@ postProcessOrderBook( } else { saTakerGetsFunded = saOwnerFundsLimit; offerJson["taker_gets_funded"] = toBoostJson(saTakerGetsFunded.getJson(ripple::JsonOptions::none)); - offerJson["taker_pays_funded"] = - toBoostJson(std::min(saTakerPays, ripple::multiply(saTakerGetsFunded, dirRate, saTakerPays.issue())) - .getJson(ripple::JsonOptions::none)); + offerJson["taker_pays_funded"] = toBoostJson( + std::min(saTakerPays, ripple::multiply(saTakerGetsFunded, dirRate, saTakerPays.issue())) + .getJson(ripple::JsonOptions::none) + ); } ripple::STAmount const saOwnerPays = (ripple::parityRate == offerRate) diff --git a/src/rpc/WorkQueue.cpp b/src/rpc/WorkQueue.cpp index b19ccc7f..64412066 100644 --- a/src/rpc/WorkQueue.cpp +++ b/src/rpc/WorkQueue.cpp @@ -46,7 +46,8 @@ WorkQueue::OneTimeCallable::operator()() called_ = true; } } -WorkQueue::OneTimeCallable::operator bool() const +WorkQueue::OneTimeCallable:: +operator bool() const { return func_.operator bool(); } diff --git a/src/rpc/common/MetaProcessors.hpp b/src/rpc/common/MetaProcessors.hpp index c357b04a..9264b5ea 100644 --- a/src/rpc/common/MetaProcessors.hpp +++ b/src/rpc/common/MetaProcessors.hpp @@ -107,8 +107,8 @@ public: template explicit IfType(Requirements&&... requirements) : processor_( - [... r = std::forward(requirements - )](boost::json::value& j, std::string_view key) -> MaybeError { + [... r = std::forward(requirements)](boost::json::value& j, std::string_view key) + -> MaybeError { std::optional firstFailure = std::nullopt; // the check logic is the same as fieldspec diff --git a/src/rpc/common/Validators.hpp b/src/rpc/common/Validators.hpp index b6a5c1c7..3c8258c1 100644 --- a/src/rpc/common/Validators.hpp +++ b/src/rpc/common/Validators.hpp @@ -457,7 +457,10 @@ public: checkIsU32Numeric(std::string_view sv); template - requires(std::is_same_v || std::is_same_v || std::is_same_v) + requires( + std::is_same_v || std::is_same_v || + std::is_same_v + ) MaybeError makeHexStringValidator(boost::json::value const& value, std::string_view key) { diff --git a/src/rpc/handlers/GetAggregatePrice.cpp b/src/rpc/handlers/GetAggregatePrice.cpp index 9a27a845..bef9b250 100644 --- a/src/rpc/handlers/GetAggregatePrice.cpp +++ b/src/rpc/handlers/GetAggregatePrice.cpp @@ -108,9 +108,11 @@ GetAggregatePriceHandler::process(GetAggregatePriceHandler::Input input, Context auto const scale = iter->isFieldPresent(ripple::sfScale) ? -static_cast(iter->getFieldU8(ripple::sfScale)) : 0; - timestampPricesBiMap.insert(TimestampPricesBiMap::value_type( - node.getFieldU32(ripple::sfLastUpdateTime), ripple::STAmount{ripple::noIssue(), price, scale} - )); + timestampPricesBiMap.insert( + TimestampPricesBiMap::value_type( + node.getFieldU32(ripple::sfLastUpdateTime), ripple::STAmount{ripple::noIssue(), price, scale} + ) + ); return true; } return false; @@ -263,12 +265,14 @@ tag_invoke(boost::json::value_to_tag, boost::js } for (auto const& oracle : jsonObject.at(JS(oracles)).as_array()) { - input.oracles.push_back(GetAggregatePriceHandler::Oracle{ - .documentId = boost::json::value_to(oracle.as_object().at(JS(oracle_document_id))), - .account = *util::parseBase58Wrapper( - boost::json::value_to(oracle.as_object().at(JS(account))) - ) - }); + input.oracles.push_back( + GetAggregatePriceHandler::Oracle{ + .documentId = boost::json::value_to(oracle.as_object().at(JS(oracle_document_id))), + .account = *util::parseBase58Wrapper( + boost::json::value_to(oracle.as_object().at(JS(account))) + ) + } + ); } input.baseAsset = boost::json::value_to(jv.at(JS(base_asset))); input.quoteAsset = boost::json::value_to(jv.at(JS(quote_asset))); diff --git a/src/rpc/handlers/LedgerEntry.cpp b/src/rpc/handlers/LedgerEntry.cpp index c7b3812e..d8023959 100644 --- a/src/rpc/handlers/LedgerEntry.cpp +++ b/src/rpc/handlers/LedgerEntry.cpp @@ -75,9 +75,9 @@ LedgerEntryHandler::process(LedgerEntryHandler::Input input, Context const& ctx) key = expectedkey.value(); } else if (input.offer) { - auto const id = - util::parseBase58Wrapper(boost::json::value_to(input.offer->at(JS(account))) - ); + auto const id = util::parseBase58Wrapper( + boost::json::value_to(input.offer->at(JS(account))) + ); key = ripple::keylet::offer(*id, boost::json::value_to(input.offer->at(JS(seq)))).key; } else if (input.rippleStateAccount) { auto const id1 = util::parseBase58Wrapper( @@ -91,9 +91,9 @@ LedgerEntryHandler::process(LedgerEntryHandler::Input input, Context const& ctx) key = ripple::keylet::line(*id1, *id2, currency).key; } else if (input.escrow) { - auto const id = - util::parseBase58Wrapper(boost::json::value_to(input.escrow->at(JS(owner))) - ); + auto const id = util::parseBase58Wrapper( + boost::json::value_to(input.escrow->at(JS(owner))) + ); key = ripple::keylet::escrow(*id, input.escrow->at(JS(seq)).as_int64()).key; } else if (input.depositPreauth) { auto const owner = util::parseBase58Wrapper( @@ -124,9 +124,9 @@ LedgerEntryHandler::process(LedgerEntryHandler::Input input, Context const& ctx) key = ripple::keylet::depositPreauth(owner.value(), authCreds).key; } } else if (input.ticket) { - auto const id = - util::parseBase58Wrapper(boost::json::value_to(input.ticket->at(JS(account)) - )); + auto const id = util::parseBase58Wrapper( + boost::json::value_to(input.ticket->at(JS(account))) + ); key = ripple::getTicketIndex(*id, input.ticket->at(JS(ticket_seq)).as_int64()); } else if (input.amm) { @@ -136,9 +136,9 @@ LedgerEntryHandler::process(LedgerEntryHandler::Input input, Context const& ctx) if (ripple::isXRP(currency)) { return ripple::xrpIssue(); } - auto const issuer = - util::parseBase58Wrapper(boost::json::value_to(assetJson.at(JS(issuer))) - ); + auto const issuer = util::parseBase58Wrapper( + boost::json::value_to(assetJson.at(JS(issuer))) + ); return ripple::Issue{currency, *issuer}; }; @@ -174,9 +174,9 @@ LedgerEntryHandler::process(LedgerEntryHandler::Input input, Context const& ctx) } else if (input.mptoken) { auto const holder = ripple::parseBase58(boost::json::value_to(input.mptoken->at(JS(account)))); - auto const mptIssuanceID = - ripple::uint192{std::string_view(boost::json::value_to(input.mptoken->at(JS(mpt_issuance_id)))) - }; + auto const mptIssuanceID = ripple::uint192{ + std::string_view(boost::json::value_to(input.mptoken->at(JS(mpt_issuance_id)))) + }; key = ripple::keylet::mptoken(mptIssuanceID, *holder).key; } else if (input.permissionedDomain) { auto const account = ripple::parseBase58( @@ -192,9 +192,9 @@ LedgerEntryHandler::process(LedgerEntryHandler::Input input, Context const& ctx) } else if (input.delegate) { auto const account = ripple::parseBase58(boost::json::value_to(input.delegate->at(JS(account)))); - auto const authorize = - ripple::parseBase58(boost::json::value_to(input.delegate->at(JS(authorize))) - ); + auto const authorize = ripple::parseBase58( + boost::json::value_to(input.delegate->at(JS(authorize))) + ); key = ripple::keylet::delegate(*account, *authorize).key; } else { // Must specify 1 of the following fields to indicate what type diff --git a/src/rpc/handlers/LedgerEntry.hpp b/src/rpc/handlers/LedgerEntry.hpp index 970d4f7b..d2f50145 100644 --- a/src/rpc/handlers/LedgerEntry.hpp +++ b/src/rpc/handlers/LedgerEntry.hpp @@ -146,12 +146,12 @@ public: return Error{Status{RippledError::rpcINVALID_PARAMS, "malformedAccounts"}}; } - auto const id1 = - util::parseBase58Wrapper(boost::json::value_to(value.as_array()[0]) - ); - auto const id2 = - util::parseBase58Wrapper(boost::json::value_to(value.as_array()[1]) - ); + auto const id1 = util::parseBase58Wrapper( + boost::json::value_to(value.as_array()[0]) + ); + auto const id2 = util::parseBase58Wrapper( + boost::json::value_to(value.as_array()[1]) + ); if (!id1 || !id2) return Error{Status{ClioError::RpcMalformedAddress, "malformedAddresses"}}; diff --git a/src/rpc/handlers/NFTOffersCommon.cpp b/src/rpc/handlers/NFTOffersCommon.cpp index ddb0b327..5496f00a 100644 --- a/src/rpc/handlers/NFTOffersCommon.cpp +++ b/src/rpc/handlers/NFTOffersCommon.cpp @@ -138,14 +138,7 @@ NFTOffersHandlerBase::iterateOfferDirectory( } auto result = traverseOwnedNodes( - *sharedPtrBackend_, - directory, - cursor, - startHint, - lgrInfo.seq, - reserve, - yield, - [&offers](ripple::SLE&& offer) { + *sharedPtrBackend_, directory, cursor, startHint, lgrInfo.seq, reserve, yield, [&offers](ripple::SLE&& offer) { if (offer.getType() == ripple::ltNFTOKEN_OFFER) { offers.push_back(std::move(offer)); return true; diff --git a/src/rpc/handlers/Subscribe.hpp b/src/rpc/handlers/Subscribe.hpp index 3e3e16af..269353c1 100644 --- a/src/rpc/handlers/Subscribe.hpp +++ b/src/rpc/handlers/Subscribe.hpp @@ -140,8 +140,10 @@ private: subscribeToAccounts(std::vector const& accounts, feed::SubscriberSharedPtr const& session) const; void - subscribeToAccountsProposed(std::vector const& accounts, feed::SubscriberSharedPtr const& session) - const; + subscribeToAccountsProposed( + std::vector const& accounts, + feed::SubscriberSharedPtr const& session + ) const; void subscribeToBooks( diff --git a/src/rpc/handlers/Unsubscribe.cpp b/src/rpc/handlers/Unsubscribe.cpp index 4dc68766..3cf9ee20 100644 --- a/src/rpc/handlers/Unsubscribe.cpp +++ b/src/rpc/handlers/Unsubscribe.cpp @@ -129,8 +129,10 @@ UnsubscribeHandler::unsubscribeFromStreams( } void -UnsubscribeHandler::unsubscribeFromAccounts(std::vector accounts, feed::SubscriberSharedPtr const& session) - const +UnsubscribeHandler::unsubscribeFromAccounts( + std::vector accounts, + feed::SubscriberSharedPtr const& session +) const { for (auto const& account : accounts) { auto const accountID = accountFromStringStrict(account); @@ -150,8 +152,10 @@ UnsubscribeHandler::unsubscribeFromProposedAccounts( } } void -UnsubscribeHandler::unsubscribeFromBooks(std::vector const& books, feed::SubscriberSharedPtr const& session) - const +UnsubscribeHandler::unsubscribeFromBooks( + std::vector const& books, + feed::SubscriberSharedPtr const& session +) const { for (auto const& orderBook : books) { subscriptions_->unsubBook(orderBook.book, session); diff --git a/src/rpc/handlers/Unsubscribe.hpp b/src/rpc/handlers/Unsubscribe.hpp index 6814376c..1e472591 100644 --- a/src/rpc/handlers/Unsubscribe.hpp +++ b/src/rpc/handlers/Unsubscribe.hpp @@ -106,8 +106,10 @@ private: unsubscribeFromAccounts(std::vector accounts, feed::SubscriberSharedPtr const& session) const; void - unsubscribeFromProposedAccounts(std::vector accountsProposed, feed::SubscriberSharedPtr const& session) - const; + unsubscribeFromProposedAccounts( + std::vector accountsProposed, + feed::SubscriberSharedPtr const& session + ) const; void unsubscribeFromBooks(std::vector const& books, feed::SubscriberSharedPtr const& session) const; diff --git a/src/util/Coroutine.hpp b/src/util/Coroutine.hpp index cb4317bf..91b615a1 100644 --- a/src/util/Coroutine.hpp +++ b/src/util/Coroutine.hpp @@ -134,8 +134,7 @@ public: return; boost::asio::spawn( - yield_, - [signal = familySignal_, fn = std::move(fn)](boost::asio::yield_context yield) mutable { + yield_, [signal = familySignal_, fn = std::move(fn)](boost::asio::yield_context yield) mutable { Coroutine coroutine(std::move(yield), std::move(signal)); fn(coroutine); } diff --git a/src/util/SignalsHandler.cpp b/src/util/SignalsHandler.cpp index 9bd91465..37a64da5 100644 --- a/src/util/SignalsHandler.cpp +++ b/src/util/SignalsHandler.cpp @@ -78,8 +78,7 @@ SignalsHandler::SignalsHandler(config::ClioConfigDefinition const& config, std:: << " milliseconds."; setHandler(impl::SignalsHandlerStatic::handleSecondSignal); timer_.emplace(context_.scheduleAfter( - gracefulPeriod_, - [forceExitHandler = std::move(forceExitHandler)](auto&& stopToken, bool canceled) { + gracefulPeriod_, [forceExitHandler = std::move(forceExitHandler)](auto&& stopToken, bool canceled) { // TODO: Update this after https://github.com/XRPLF/clio/issues/1380 if (not stopToken.isStopRequested() and not canceled) { LOG(LogService::warn()) << "Force exit at the end of graceful period."; diff --git a/src/util/async/AnyExecutionContext.hpp b/src/util/async/AnyExecutionContext.hpp index 7b9ffe8c..7baefe77 100644 --- a/src/util/async/AnyExecutionContext.hpp +++ b/src/util/async/AnyExecutionContext.hpp @@ -166,17 +166,16 @@ public: static_assert(not std::is_same_v); auto const millis = std::chrono::duration_cast(delay); - return AnyOperation(pimpl_->scheduleAfter( - millis, - [fn = std::forward(fn)](auto stopToken) -> std::any { + return AnyOperation( + pimpl_->scheduleAfter(millis, [fn = std::forward(fn)](auto stopToken) -> std::any { if constexpr (std::is_void_v) { fn(std::move(stopToken)); return {}; } else { return std::make_any(fn(std::move(stopToken))); } - } - )); + }) + ); } /** @@ -197,8 +196,7 @@ public: auto const millis = std::chrono::duration_cast(delay); return AnyOperation(pimpl_->scheduleAfter( - millis, - [fn = std::forward(fn)](auto stopToken, auto cancelled) -> std::any { + millis, [fn = std::forward(fn)](auto stopToken, auto cancelled) -> std::any { if constexpr (std::is_void_v) { fn(std::move(stopToken), cancelled); return {}; @@ -224,13 +222,10 @@ public: auto const millis = std::chrono::duration_cast(interval); return AnyOperation( // - pimpl_->executeRepeatedly( - millis, - [fn = std::forward(fn)] -> std::any { - fn(); - return {}; - } - ) + pimpl_->executeRepeatedly(millis, [fn = std::forward(fn)] -> std::any { + fn(); + return {}; + }) ); } diff --git a/src/util/async/AnyStrand.hpp b/src/util/async/AnyStrand.hpp index f2d7b8ed..170721c2 100644 --- a/src/util/async/AnyStrand.hpp +++ b/src/util/async/AnyStrand.hpp @@ -146,13 +146,10 @@ public: auto const millis = std::chrono::duration_cast(interval); return AnyOperation( // - pimpl_->executeRepeatedly( - millis, - [fn = std::forward(fn)] -> std::any { - fn(); - return {}; - } - ) + pimpl_->executeRepeatedly(millis, [fn = std::forward(fn)] -> std::any { + fn(); + return {}; + }) ); } diff --git a/src/util/async/Concepts.hpp b/src/util/async/Concepts.hpp index a4986531..7a50999f 100644 --- a/src/util/async/Concepts.hpp +++ b/src/util/async/Concepts.hpp @@ -170,7 +170,7 @@ template concept SomeStdDuration = requires { // Thank you Ed Catmur for this trick. // See https://stackoverflow.com/questions/74383254/concept-that-models-only-the-stdchrono-duration-types - []( // + []( // std::type_identity> ) {}(std::type_identity>()); }; @@ -180,7 +180,7 @@ concept SomeStdDuration = requires { */ template concept SomeStdOptional = requires { - []( // + []( // std::type_identity> ) {}(std::type_identity>()); }; diff --git a/src/util/config/ConfigDefinition.hpp b/src/util/config/ConfigDefinition.hpp index 558ccb7c..6e326939 100644 --- a/src/util/config/ConfigDefinition.hpp +++ b/src/util/config/ConfigDefinition.hpp @@ -259,8 +259,8 @@ private: * without default values must be present in the user's config file. */ static ClioConfigDefinition gClioConfig = ClioConfigDefinition{ - {{"database.type", ConfigValue{ConfigType::String}.defaultValue("cassandra").withConstraint(gValidateCassandraName) - }, + {{"database.type", + ConfigValue{ConfigType::String}.defaultValue("cassandra").withConstraint(gValidateCassandraName)}, {"database.cassandra.contact_points", ConfigValue{ConfigType::String}.defaultValue("localhost")}, {"database.cassandra.secure_connect_bundle", ConfigValue{ConfigType::String}.optional()}, {"database.cassandra.port", ConfigValue{ConfigType::Integer}.withConstraint(gValidatePort).optional()}, @@ -284,10 +284,10 @@ static ClioConfigDefinition gClioConfig = ClioConfigDefinition{ {"database.cassandra.queue_size_io", ConfigValue{ConfigType::Integer}.optional().withConstraint(gValidateUint16)}, {"database.cassandra.write_batch_size", ConfigValue{ConfigType::Integer}.defaultValue(20).withConstraint(gValidateUint16)}, - {"database.cassandra.connect_timeout", ConfigValue{ConfigType::Integer}.optional().withConstraint(gValidateUint32) - }, - {"database.cassandra.request_timeout", ConfigValue{ConfigType::Integer}.optional().withConstraint(gValidateUint32) - }, + {"database.cassandra.connect_timeout", + ConfigValue{ConfigType::Integer}.optional().withConstraint(gValidateUint32)}, + {"database.cassandra.request_timeout", + ConfigValue{ConfigType::Integer}.optional().withConstraint(gValidateUint32)}, {"database.cassandra.username", ConfigValue{ConfigType::String}.optional()}, {"database.cassandra.password", ConfigValue{ConfigType::String}.optional()}, {"database.cassandra.certfile", ConfigValue{ConfigType::String}.optional()}, @@ -308,8 +308,8 @@ static ClioConfigDefinition gClioConfig = ClioConfigDefinition{ {"num_markers", ConfigValue{ConfigType::Integer}.optional().withConstraint(gValidateNumMarkers)}, {"dos_guard.whitelist.[]", Array{ConfigValue{ConfigType::String}.optional()}}, - {"dos_guard.max_fetches", ConfigValue{ConfigType::Integer}.defaultValue(1000'000u).withConstraint(gValidateUint32) - }, + {"dos_guard.max_fetches", + ConfigValue{ConfigType::Integer}.defaultValue(1000'000u).withConstraint(gValidateUint32)}, {"dos_guard.max_connections", ConfigValue{ConfigType::Integer}.defaultValue(20u).withConstraint(gValidateUint32)}, {"dos_guard.max_requests", ConfigValue{ConfigType::Integer}.defaultValue(20u).withConstraint(gValidateUint32)}, {"dos_guard.sweep_interval", @@ -358,8 +358,8 @@ static ClioConfigDefinition gClioConfig = ClioConfigDefinition{ {"cache.page_fetch_size", ConfigValue{ConfigType::Integer}.defaultValue(512).withConstraint(gValidateUint16)}, {"cache.load", ConfigValue{ConfigType::String}.defaultValue("async").withConstraint(gValidateLoadMode)}, - {"log_channels.[].channel", Array{ConfigValue{ConfigType::String}.optional().withConstraint(gValidateChannelName)} - }, + {"log_channels.[].channel", + Array{ConfigValue{ConfigType::String}.optional().withConstraint(gValidateChannelName)}}, {"log_channels.[].log_level", Array{ConfigValue{ConfigType::String}.optional().withConstraint(gValidateLogLevelName)}}, @@ -376,8 +376,8 @@ static ClioConfigDefinition gClioConfig = ClioConfigDefinition{ {"log_rotation_size", ConfigValue{ConfigType::Integer}.defaultValue(2048).withConstraint(gValidateUint32)}, - {"log_directory_max_size", ConfigValue{ConfigType::Integer}.defaultValue(50 * 1024).withConstraint(gValidateUint32) - }, + {"log_directory_max_size", + ConfigValue{ConfigType::Integer}.defaultValue(50 * 1024).withConstraint(gValidateUint32)}, {"log_rotation_hour_interval", ConfigValue{ConfigType::Integer}.defaultValue(12).withConstraint(gValidateUint32)}, diff --git a/src/util/config/ConfigDescription.hpp b/src/util/config/ConfigDescription.hpp index f01e234e..3f14525e 100644 --- a/src/util/config/ConfigDescription.hpp +++ b/src/util/config/ConfigDescription.hpp @@ -134,10 +134,11 @@ public: private: static constexpr auto kCONFIG_DESCRIPTION = std::array{ - KV{.key = "database.type", - .value = - "Specifies the type of database used for storing and retrieving data required by the Clio server. Both " - "ScyllaDB and Cassandra can serve as backends for Clio; however, this value must be set to `cassandra`." + KV{ + .key = "database.type", + .value = + "Specifies the type of database used for storing and retrieving data required by the Clio server. Both " + "ScyllaDB and Cassandra can serve as backends for Clio; however, this value must be set to `cassandra`." }, KV{.key = "database.cassandra.contact_points", .value = "A list of IP addresses or hostnames for the initial cluster nodes (Cassandra or ScyllaDB) that " @@ -190,9 +191,8 @@ private: .value = "Specifies the timeout duration (in seconds) for the forwarding cache used in `rippled` " "communication. A value of `0` means disabling this feature."}, KV{.key = "forwarding.request_timeout", - .value = - "Specifies the timeout duration (in seconds) for the forwarding request used in `rippled` communication." - }, + .value = "Specifies the timeout duration (in seconds) for the forwarding request used in `rippled` " + "communication."}, KV{.key = "rpc.cache_timeout", .value = "Specifies the timeout duration (in seconds) for RPC cache response to timeout. A value of `0` " "means disabling this feature."}, @@ -201,16 +201,15 @@ private: KV{.key = "dos_guard.max_fetches", .value = "The maximum number of fetch operations allowed by DOS guard."}, KV{.key = "dos_guard.max_connections", .value = "The maximum number of concurrent connections for a specific IP address."}, - KV{.key = "dos_guard.max_requests", .value = "The maximum number of requests allowed for a specific IP address." - }, + KV{.key = "dos_guard.max_requests", + .value = "The maximum number of requests allowed for a specific IP address."}, KV{.key = "dos_guard.sweep_interval", .value = "Interval in seconds for DOS guard to sweep(clear) its state."}, KV{.key = "workers", .value = "The number of threads used to process RPC requests."}, KV{.key = "server.ip", .value = "The IP address of the Clio HTTP server."}, KV{.key = "server.port", .value = "The port number of the Clio HTTP server."}, KV{.key = "server.max_queue_size", - .value = - "The maximum size of the server's request queue. If set to `0`, this means there is no queue size limit." - }, + .value = "The maximum size of the server's request queue. If set to `0`, this means there is no queue size " + "limit."}, KV{.key = "server.local_admin", .value = "Indicates if requests from `localhost` are allowed to call Clio admin-only APIs. Note that this " "setting cannot be enabled " @@ -232,8 +231,8 @@ private: "client is slow to receive it, ensuring delivery once the client is ready."}, KV{.key = "prometheus.enabled", .value = "Enables or disables Prometheus metrics."}, KV{.key = "prometheus.compress_reply", .value = "Enables or disables compression of Prometheus responses."}, - KV{.key = "io_threads", .value = "The number of input/output (I/O) threads. The value cannot be less than `1`." - }, + KV{.key = "io_threads", + .value = "The number of input/output (I/O) threads. The value cannot be less than `1`."}, KV{.key = "subscription_workers", .value = "The number of worker threads or processes that are responsible for managing and processing " "subscription-based tasks from `rippled`."}, diff --git a/src/util/prometheus/MetricBuilder.hpp b/src/util/prometheus/MetricBuilder.hpp index 30da7714..11507d71 100644 --- a/src/util/prometheus/MetricBuilder.hpp +++ b/src/util/prometheus/MetricBuilder.hpp @@ -78,8 +78,12 @@ public: ) override; std::unique_ptr - operator()(std::string name, std::string labelsString, MetricType type, std::vector const& buckets) - override; + operator()( + std::string name, + std::string labelsString, + MetricType type, + std::vector const& buckets + ) override; private: static std::unique_ptr diff --git a/src/util/prometheus/impl/HistogramImpl.hpp b/src/util/prometheus/impl/HistogramImpl.hpp index 42b359cd..bcfb3d45 100644 --- a/src/util/prometheus/impl/HistogramImpl.hpp +++ b/src/util/prometheus/impl/HistogramImpl.hpp @@ -75,10 +75,9 @@ public: { auto data = data_->template lock(); auto const bucket = std::lower_bound( - data->buckets.begin(), - data->buckets.end(), - value, - [](Bucket const& bucket, ValueType const& value) { return bucket.upperBound < value; } + data->buckets.begin(), data->buckets.end(), value, [](Bucket const& bucket, ValueType const& value) { + return bucket.upperBound < value; + } ); if (bucket != data->buckets.end()) { ++bucket->count; diff --git a/src/util/requests/impl/WsConnectionImpl.hpp b/src/util/requests/impl/WsConnectionImpl.hpp index 581d3870..341c118d 100644 --- a/src/util/requests/impl/WsConnectionImpl.hpp +++ b/src/util/requests/impl/WsConnectionImpl.hpp @@ -60,8 +60,10 @@ public: } std::expected - read(boost::asio::yield_context yield, std::optional timeout = std::nullopt) - override + read( + boost::asio::yield_context yield, + std::optional timeout = std::nullopt + ) override { boost::beast::error_code errorCode; boost::beast::flat_buffer buffer; @@ -101,8 +103,10 @@ public: } std::optional - close(boost::asio::yield_context yield, std::chrono::steady_clock::duration const timeout = kDEFAULT_TIMEOUT) - override + close( + boost::asio::yield_context yield, + std::chrono::steady_clock::duration const timeout = kDEFAULT_TIMEOUT + ) override { // Set the timeout for closing the connection boost::beast::websocket::stream_base::timeout wsTimeout{}; diff --git a/src/web/impl/WsBase.hpp b/src/web/impl/WsBase.hpp index 0538a34e..0f672e1d 100644 --- a/src/web/impl/WsBase.hpp +++ b/src/web/impl/WsBase.hpp @@ -181,8 +181,7 @@ public: { // Note: post used instead of dispatch to guarantee async behavior of wsFail and maybeSendNext boost::asio::post( - derived().ws().get_executor(), - [this, self = derived().shared_from_this(), msg = std::move(msg)]() { + derived().ws().get_executor(), [this, self = derived().shared_from_this(), msg = std::move(msg)]() { if (messages_.size() > maxSendingQueueSize_) { wsFail(boost::asio::error::timed_out, "Client is too slow"); return; diff --git a/src/web/ng/Server.cpp b/src/web/ng/Server.cpp index 5a5a3d69..c6a0cbde 100644 --- a/src/web/ng/Server.cpp +++ b/src/web/ng/Server.cpp @@ -187,7 +187,8 @@ tryUpgradeConnection( if (expectedUpgradedConnection.has_value()) return std::move(expectedUpgradedConnection).value(); - return std::unexpected{fmt::format("Error upgrading connection: {}", expectedUpgradedConnection.error().what()) + return std::unexpected{ + fmt::format("Error upgrading connection: {}", expectedUpgradedConnection.error().what()) }; } @@ -247,8 +248,7 @@ Server::run() running_ = true; boost::asio::spawn( - ctx_.get(), - [this, acceptor = std::move(acceptor).value()](boost::asio::yield_context yield) mutable { + ctx_.get(), [this, acceptor = std::move(acceptor).value()](boost::asio::yield_context yield) mutable { while (true) { boost::beast::error_code errorCode; boost::asio::ip::tcp::socket socket{ctx_.get().get_executor()}; @@ -314,8 +314,7 @@ Server::handleConnection(boost::asio::ip::tcp::socket socket, boost::asio::yield if (connectionHandler_.isStopping()) { boost::asio::spawn( - ctx_.get(), - [connection = std::move(connectionExpected).value()](boost::asio::yield_context yield) { + ctx_.get(), [connection = std::move(connectionExpected).value()](boost::asio::yield_context yield) { web::ng::impl::ConnectionHandler::stopConnection(*connection, yield); } ); @@ -329,8 +328,7 @@ Server::handleConnection(boost::asio::ip::tcp::socket socket, boost::asio::yield } boost::asio::spawn( - ctx_.get(), - [this, connection = std::move(connection).value()](boost::asio::yield_context yield) mutable { + ctx_.get(), [this, connection = std::move(connection).value()](boost::asio::yield_context yield) mutable { connectionHandler_.processConnection(std::move(connection), yield); } ); diff --git a/src/web/ng/impl/ConnectionHandler.cpp b/src/web/ng/impl/ConnectionHandler.cpp index 838c4bfb..82df27b2 100644 --- a/src/web/ng/impl/ConnectionHandler.cpp +++ b/src/web/ng/impl/ConnectionHandler.cpp @@ -146,11 +146,9 @@ ConnectionHandler::processConnection(ConnectionPtr connectionPtr, boost::asio::y auto* ptr = dynamic_cast(connectionPtr.get()); ASSERT(ptr != nullptr, "Casted not websocket connection"); subscriptionContext = std::make_shared( - tagFactory_, - *ptr, - maxSubscriptionSendQueueSize_, - yield, - [this](Error const& e, Connection const& c) { return handleError(e, c); } + tagFactory_, *ptr, maxSubscriptionSendQueueSize_, yield, [this](Error const& e, Connection const& c) { + return handleError(e, c); + } ); LOG(log_.trace()) << connectionRef.tag() << "Created SubscriptionContext for the connection"; } diff --git a/src/web/ng/impl/HttpConnection.hpp b/src/web/ng/impl/HttpConnection.hpp index 0cad3438..f9db0ec6 100644 --- a/src/web/ng/impl/HttpConnection.hpp +++ b/src/web/ng/impl/HttpConnection.hpp @@ -125,8 +125,10 @@ public: } std::optional - sendRaw(boost::beast::http::response response, boost::asio::yield_context yield) - override + sendRaw( + boost::beast::http::response response, + boost::asio::yield_context yield + ) override { boost::system::error_code error; boost::beast::get_lowest_layer(stream_).expires_after(timeout_); diff --git a/tests/common/util/MockPrometheus.hpp b/tests/common/util/MockPrometheus.hpp index 235cf792..0b88efa3 100644 --- a/tests/common/util/MockPrometheus.hpp +++ b/tests/common/util/MockPrometheus.hpp @@ -107,13 +107,15 @@ struct MockPrometheusImpl : PrometheusInterface { }); EXPECT_CALL(*this, histogramInt) .WillRepeatedly( - [this](std::string name, Labels labels, std::vector const&, std::optional) - -> HistogramInt& { return getMetric(std::move(name), std::move(labels)); } + [this]( + std::string name, Labels labels, std::vector const&, std::optional + ) -> HistogramInt& { return getMetric(std::move(name), std::move(labels)); } ); EXPECT_CALL(*this, histogramDouble) .WillRepeatedly( - [this](std::string name, Labels labels, std::vector const&, std::optional) - -> HistogramDouble& { return getMetric(std::move(name), std::move(labels)); } + [this]( + std::string name, Labels labels, std::vector const&, std::optional + ) -> HistogramDouble& { return getMetric(std::move(name), std::move(labels)); } ); } diff --git a/tests/common/util/TestHttpServer.hpp b/tests/common/util/TestHttpServer.hpp index bb057489..a8c6b31e 100644 --- a/tests/common/util/TestHttpServer.hpp +++ b/tests/common/util/TestHttpServer.hpp @@ -35,8 +35,9 @@ */ class TestHttpServer { public: - using RequestHandler = std::function>(boost::beast::http::request)>; + using RequestHandler = std::function>( + boost::beast::http::request + )>; /** * @brief Construct a new TestHttpServer diff --git a/tests/common/util/TestWebSocketClient.cpp b/tests/common/util/TestWebSocketClient.cpp index bea673c8..798f9852 100644 --- a/tests/common/util/TestWebSocketClient.cpp +++ b/tests/common/util/TestWebSocketClient.cpp @@ -64,13 +64,16 @@ WebSocketSyncClient::connect(std::string const& host, std::string const& port, s // See https://tools.ietf.org/html/rfc7230#section-5.4 auto const hostPort = host + ':' + std::to_string(ep.port()); - ws_.set_option(boost::beast::websocket::stream_base::decorator([additionalHeaders = std::move(additionalHeaders - )](boost::beast::websocket::request_type& req) { - req.set(http::field::user_agent, std::string(BOOST_BEAST_VERSION_STRING) + " websocket-client-coro"); - for (auto const& header : additionalHeaders) { - req.set(header.name, header.value); - } - })); + ws_.set_option( + boost::beast::websocket::stream_base::decorator( + [additionalHeaders = std::move(additionalHeaders)](boost::beast::websocket::request_type& req) { + req.set(http::field::user_agent, std::string(BOOST_BEAST_VERSION_STRING) + " websocket-client-coro"); + for (auto const& header : additionalHeaders) { + req.set(header.name, header.value); + } + } + ) + ); ws_.handshake(hostPort, "/"); } @@ -157,12 +160,15 @@ WebSocketAsyncClient::connect( stream_.set_option(wsTimeout); boost::beast::get_lowest_layer(stream_).expires_never(); - stream_.set_option(boost::beast::websocket::stream_base::decorator([additionalHeaders = std::move(additionalHeaders - )](boost::beast::websocket::request_type& req) { - for (auto const& header : additionalHeaders) { - req.set(header.name, header.value); - } - })); + stream_.set_option( + boost::beast::websocket::stream_base::decorator( + [additionalHeaders = std::move(additionalHeaders)](boost::beast::websocket::request_type& req) { + for (auto const& header : additionalHeaders) { + req.set(header.name, header.value); + } + } + ) + ); stream_.async_handshake(fmt::format("{}:{}", host, port), "/", yield[error]); if (error) diff --git a/tests/integration/data/cassandra/BackendTests.cpp b/tests/integration/data/cassandra/BackendTests.cpp index e0f87cb1..37af5a04 100644 --- a/tests/integration/data/cassandra/BackendTests.cpp +++ b/tests/integration/data/cassandra/BackendTests.cpp @@ -390,10 +390,10 @@ TEST_F(BackendCassandraTest, Basic) "6C7F69A6D25A13AC4A2E9145999F45D4674F939900017A96885FDC2757" "E9284E"; ripple::uint256 nftID; - EXPECT_TRUE( - nftID.parseHex("000800006203F49C21D5D6E022CB16DE3538F248662" - "FC73CEF7FF5C60000002C") - ); + EXPECT_TRUE(nftID.parseHex( + "000800006203F49C21D5D6E022CB16DE3538F248662" + "FC73CEF7FF5C60000002C" + )); std::string metaBlob = hexStringToBinaryString(metaHex); std::string txnBlob = hexStringToBinaryString(txnHex); diff --git a/tests/integration/migration/cassandra/CassandraMigrationManagerTests.cpp b/tests/integration/migration/cassandra/CassandraMigrationManagerTests.cpp index c6771656..d8528aa7 100644 --- a/tests/integration/migration/cassandra/CassandraMigrationManagerTests.cpp +++ b/tests/integration/migration/cassandra/CassandraMigrationManagerTests.cpp @@ -313,8 +313,9 @@ TEST_F(MigrationCassandraManagerLedgerTableTest, MigrateExampleLedgerMigrator) EXPECT_EQ(newTableSize, gLedgerHeaderRawData.size()); auto const getAccountHash = [this](std::uint32_t seq) { - return data::synchronous([&](auto ctx) { return testMigrationBackend_->fetchAccountHashViaSequence(seq, ctx); } - ); + return data::synchronous([&](auto ctx) { + return testMigrationBackend_->fetchAccountHashViaSequence(seq, ctx); + }); }; EXPECT_EQ( diff --git a/tests/integration/migration/cassandra/CassandraMigrationTestBackend.hpp b/tests/integration/migration/cassandra/CassandraMigrationTestBackend.hpp index dbc0b0d2..f66a9e48 100644 --- a/tests/integration/migration/cassandra/CassandraMigrationTestBackend.hpp +++ b/tests/integration/migration/cassandra/CassandraMigrationTestBackend.hpp @@ -73,14 +73,16 @@ public: writeTxIndexExample(std::string const& hash, std::string const& txType) { static auto kINSERT_TX_INDEX_EXAMPLE = [this]() { - return handle_.prepare(fmt::format( - R"( + return handle_.prepare( + fmt::format( + R"( INSERT INTO {} (hash, tx_type) VALUES (?, ?) )", - data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") + ) + ); }(); executor_.writeSync(kINSERT_TX_INDEX_EXAMPLE.bind(hash, data::cassandra::Text(txType))); } @@ -97,12 +99,14 @@ public: fetchTxTypeViaID(std::string const& hash, boost::asio::yield_context ctx) { static auto kFETCH_TX_TYPE = [this]() { - return handle_.prepare(fmt::format( - R"( + return handle_.prepare( + fmt::format( + R"( SELECT tx_type FROM {} WHERE hash = ? )", - data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") + ) + ); }(); auto const res = executor_.read(ctx, kFETCH_TX_TYPE.bind(hash)); if (not res) { @@ -130,12 +134,14 @@ public: fetchTxIndexTableSize(boost::asio::yield_context ctx) { static auto kINSERT_TX_INDEX_EXAMPLE = [this]() { - return handle_.prepare(fmt::format( - R"( + return handle_.prepare( + fmt::format( + R"( SELECT COUNT(*) FROM {} )", - data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") + ) + ); }(); // This function will be called after table being dropped, catch the exception @@ -169,14 +175,16 @@ public: writeLedgerAccountHash(std::uint64_t sequence, std::string const& accountHash) { static auto kINSERT_LEDGER_EXAMPLE = [this]() { - return handle_.prepare(fmt::format( - R"( + return handle_.prepare( + fmt::format( + R"( INSERT INTO {} (sequence, account_hash) VALUES (?, ?) )", - data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") + ) + ); }(); executor_.writeSync(kINSERT_LEDGER_EXAMPLE.bind(sequence, accountHash)); } @@ -193,12 +201,14 @@ public: fetchAccountHashViaSequence(std::uint64_t sequence, boost::asio::yield_context ctx) { static auto kFETCH_ACCOUNT_HASH = [this]() { - return handle_.prepare(fmt::format( - R"( + return handle_.prepare( + fmt::format( + R"( SELECT account_hash FROM {} WHERE sequence = ? )", - data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") + ) + ); }(); auto const res = executor_.read(ctx, kFETCH_ACCOUNT_HASH.bind(sequence)); if (not res) { @@ -226,12 +236,14 @@ public: fetchLedgerTableSize(boost::asio::yield_context ctx) { static auto kINSERT_LEDGER_EXAMPLE = [this]() { - return handle_.prepare(fmt::format( - R"( + return handle_.prepare( + fmt::format( + R"( SELECT COUNT(*) FROM {} )", - data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") + ) + ); }(); // This function will be called after table being dropped, catch the exception @@ -263,12 +275,14 @@ public: auto dropDiffTable() { - return handle_.execute(fmt::format( - R"( + return handle_.execute( + fmt::format( + R"( DROP TABLE IF EXISTS {} )", - data::cassandra::qualifiedTableName(settingsProvider_, "diff") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "diff") + ) + ); } /** @@ -281,12 +295,14 @@ public: fetchDiffTableSize(boost::asio::yield_context ctx) { static auto kCOUNT_DIFF = [this]() { - return handle_.prepare(fmt::format( - R"( + return handle_.prepare( + fmt::format( + R"( SELECT COUNT(*) FROM {} )", - data::cassandra::qualifiedTableName(settingsProvider_, "diff") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "diff") + ) + ); }(); // This function will be called after table being dropped, catch the exception @@ -316,8 +332,9 @@ private: { std::vector statements; - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( hash blob, @@ -325,11 +342,13 @@ private: PRIMARY KEY (hash) ) )", - data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "tx_index_example") + ) + ); - statements.emplace_back(fmt::format( - R"( + statements.emplace_back( + fmt::format( + R"( CREATE TABLE IF NOT EXISTS {} ( sequence bigint, @@ -337,8 +356,9 @@ private: PRIMARY KEY (sequence) ) )", - data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") - )); + data::cassandra::qualifiedTableName(settingsProvider_, "ledger_example") + ) + ); return statements; } }; diff --git a/tests/integration/migration/cassandra/ExampleObjectsMigrator.cpp b/tests/integration/migration/cassandra/ExampleObjectsMigrator.cpp index e6281792..01c01b9d 100644 --- a/tests/integration/migration/cassandra/ExampleObjectsMigrator.cpp +++ b/tests/integration/migration/cassandra/ExampleObjectsMigrator.cpp @@ -46,20 +46,17 @@ ExampleObjectsMigrator::runMigration(std::shared_ptr const& backend, ut std::unordered_set idx; migration::cassandra::impl::ObjectsScanner scanner( {.ctxThreadsNum = ctxFullScanThreads, .jobsNum = jobsFullScan, .cursorsPerJob = cursorPerJobsFullScan}, - migration::cassandra::impl::ObjectsAdapter( - backend, - [&](std::uint32_t, std::optional sle) { - if (sle.has_value()) { - if (sle->getType() == ripple::ltACCOUNT_ROOT) { - if (!idx.contains(sle->key())) { - ExampleObjectsMigrator::accountCount++; - } + migration::cassandra::impl::ObjectsAdapter(backend, [&](std::uint32_t, std::optional sle) { + if (sle.has_value()) { + if (sle->getType() == ripple::ltACCOUNT_ROOT) { + if (!idx.contains(sle->key())) { + ExampleObjectsMigrator::accountCount++; } - idx.insert(sle->key()); - ExampleObjectsMigrator::count++; } + idx.insert(sle->key()); + ExampleObjectsMigrator::count++; } - ) + }) ); scanner.wait(); } diff --git a/tests/integration/migration/cassandra/ExampleTransactionsMigrator.cpp b/tests/integration/migration/cassandra/ExampleTransactionsMigrator.cpp index 9f39f780..bcf8c9fb 100644 --- a/tests/integration/migration/cassandra/ExampleTransactionsMigrator.cpp +++ b/tests/integration/migration/cassandra/ExampleTransactionsMigrator.cpp @@ -51,15 +51,12 @@ ExampleTransactionsMigrator::runMigration( util::Mutex hashSet; migration::cassandra::impl::TransactionsScanner scanner( {.ctxThreadsNum = ctxFullScanThreads, .jobsNum = jobsFullScan, .cursorsPerJob = cursorPerJobsFullScan}, - migration::cassandra::impl::TransactionsAdapter( - backend, - [&](ripple::STTx const& tx, ripple::TxMeta const&) { - hashSet.lock()->insert(ripple::to_string(tx.getTransactionID())); - auto const json = tx.getJson(ripple::JsonOptions::none); - auto const txType = json["TransactionType"].asString(); - backend->writeTxIndexExample(uint256ToString(tx.getTransactionID()), txType); - } - ) + migration::cassandra::impl::TransactionsAdapter(backend, [&](ripple::STTx const& tx, ripple::TxMeta const&) { + hashSet.lock()->insert(ripple::to_string(tx.getTransactionID())); + auto const json = tx.getJson(ripple::JsonOptions::none); + auto const txType = json["TransactionType"].asString(); + backend->writeTxIndexExample(uint256ToString(tx.getTransactionID()), txType); + }) ); scanner.wait(); count = hashSet.lock()->size(); diff --git a/tests/unit/data/BackendInterfaceTests.cpp b/tests/unit/data/BackendInterfaceTests.cpp index afbe2b67..a6430348 100644 --- a/tests/unit/data/BackendInterfaceTests.cpp +++ b/tests/unit/data/BackendInterfaceTests.cpp @@ -117,9 +117,20 @@ TEST_F(BackendInterfaceTest, FetchLedgerPageDisablesCacheOnMissingData) .Times(10) .WillRepeatedly(Return(uint256{"1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"})); EXPECT_CALL(*backend_, doFetchLedgerObjects(_, _, _)) - .WillOnce(Return(std::vector{ - Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{} - })); + .WillOnce(Return( + std::vector{ + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{} + } + )); runSpawn([this](auto yield) { backend_->fetchLedgerPage(std::nullopt, kMAX_SEQ, 10, false, yield); }); EXPECT_TRUE(backend_->cache().isDisabled()); @@ -134,9 +145,20 @@ TEST_F(BackendInterfaceTest, FetchLedgerPageWithoutCorruptionDetectorDoesNotDisa .Times(10) .WillRepeatedly(Return(uint256{"1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"})); EXPECT_CALL(*backend_, doFetchLedgerObjects(_, _, _)) - .WillOnce(Return(std::vector{ - Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{'s'}, Blob{} - })); + .WillOnce(Return( + std::vector{ + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{'s'}, + Blob{} + } + )); runSpawn([this](auto yield) { backend_->fetchLedgerPage(std::nullopt, kMAX_SEQ, 10, false, yield); }); EXPECT_FALSE(backend_->cache().isDisabled()); diff --git a/tests/unit/data/cassandra/SettingsProviderTests.cpp b/tests/unit/data/cassandra/SettingsProviderTests.cpp index 2aafb8b8..54c7570b 100644 --- a/tests/unit/data/cassandra/SettingsProviderTests.cpp +++ b/tests/unit/data/cassandra/SettingsProviderTests.cpp @@ -160,13 +160,17 @@ TEST_F(SettingsProviderTest, SecureBundleConfig) TEST_F(SettingsProviderTest, CertificateConfig) { TmpFile const file{"certificateData"}; - auto const cfg = getParseSettingsConfig(json::parse(fmt::format( - R"JSON({{ + auto const cfg = getParseSettingsConfig( + json::parse( + fmt::format( + R"JSON({{ "database.cassandra.contact_points": "127.0.0.1", "database.cassandra.certfile": "{}" }})JSON", - file.path - ))); + file.path + ) + ) + ); SettingsProvider const provider{cfg.getObject("database.cassandra")}; auto const settings = provider.getSettings(); diff --git a/tests/unit/etl/CursorFromDiffProviderTests.cpp b/tests/unit/etl/CursorFromDiffProviderTests.cpp index 87a33b92..27d5ee99 100644 --- a/tests/unit/etl/CursorFromDiffProviderTests.cpp +++ b/tests/unit/etl/CursorFromDiffProviderTests.cpp @@ -38,8 +38,8 @@ namespace { constexpr auto kSEQ = 30; std::vector const kDIFFS_FOR_SEQ = { - {.key = ripple::uint256{"05E1EAC2574BE082B00B16F907CE32E6058DEB8F9E81CF34A00E80A5D71FA4FE"}, .blob = Blob{} - }, // This object is removed in Seq while it exists in Seq-1 + {.key = ripple::uint256{"05E1EAC2574BE082B00B16F907CE32E6058DEB8F9E81CF34A00E80A5D71FA4FE"}, + .blob = Blob{}}, // This object is removed in Seq while it exists in Seq-1 {.key = ripple::uint256{"110872C7196EE6EF7032952F1852B11BB461A96FF2D7E06A8003B4BB30FD130B"}, .blob = Blob{'s'}}, {.key = ripple::uint256{"3B3A84E850C724E914293271785A31D0BFC8B9DD1B6332E527B149AD72E80E18"}, .blob = Blob{'s'}}, {.key = ripple::uint256{"4EC98C5C3F34C44409BC058998CBD64F6AED3FF6C0CAAEC15F7F42DF14EE9F04"}, .blob = Blob{'s'}}, @@ -57,8 +57,8 @@ std::vector const kDIFFS_FOR_SEQ_MINUS1 = { {.key = ripple::uint256{"110872C7196EE6EF7032952F1852B11BB461A96FF2D7E06A8003B4BB30FD1301"}, .blob = Blob{'s'}}, {.key = ripple::uint256{"3B3A84E850C724E914293271785A31D0BFC8B9DD1B6332E527B149AD72E80E12"}, .blob = Blob{'s'}}, {.key = ripple::uint256{"4EC98C5C3F34C44409BC058998CBD64F6AED3FF6C0CAAEC15F7F42DF14EE9F03"}, .blob = Blob{'s'}}, - {.key = ripple::uint256{"58CEC9F17733EA7BA68C88E6179B8F207D001EE04D4E0366F958CC04FF6AB834"}, .blob = Blob{'s'} - }, // This object is changed in both Seq and Seq-1 + {.key = ripple::uint256{"58CEC9F17733EA7BA68C88E6179B8F207D001EE04D4E0366F958CC04FF6AB834"}, + .blob = Blob{'s'}}, // This object is changed in both Seq and Seq-1 {.key = ripple::uint256{"64FB1712146BA604C274CC335C5DE7ADFE52D1F8C3E904A9F9765FE8158A3E05"}, .blob = Blob{'s'}}, {.key = ripple::uint256{"700BE23B1D9EE3E6BF52543D05843D5345B85D9EDB3D33BBD6B4C3A13C54B386"}, .blob = Blob{'s'}}, {.key = ripple::uint256{"82C297FCBCD634C4424F263D17480AA2F13975DF5846A5BB57246022CEEBE447"}, .blob = Blob{'s'}}, diff --git a/tests/unit/etlng/RegistryTests.cpp b/tests/unit/etlng/RegistryTests.cpp index b75b044d..299ee0a1 100644 --- a/tests/unit/etlng/RegistryTests.cpp +++ b/tests/unit/etlng/RegistryTests.cpp @@ -282,15 +282,17 @@ TEST_F(RegistryTest, FilteringOfTxWorksCorrectlyForInitialTransaction) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extBurn, extOffer); - reg.dispatchInitialData(etlng::model::LedgerData{ - .transactions = transactions, - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ, - }); + reg.dispatchInitialData( + etlng::model::LedgerData{ + .transactions = transactions, + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ, + } + ); } TEST_F(RegistryTest, FilteringOfTxWorksCorrectlyForTransaction) @@ -309,15 +311,17 @@ TEST_F(RegistryTest, FilteringOfTxWorksCorrectlyForTransaction) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extBurn, extOffer); - reg.dispatch(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, InitialObjectsEmpty) @@ -352,15 +356,17 @@ TEST_F(RegistryTest, ObjectsDispatched) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extObj); - reg.dispatch(etlng::model::LedgerData{ - .transactions = {}, - .objects = {util::createObject(), util::createObject(), util::createObject()}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = {}, + .objects = {util::createObject(), util::createObject(), util::createObject()}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, OnLedgerDataForBatch) @@ -377,15 +383,17 @@ TEST_F(RegistryTest, OnLedgerDataForBatch) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, ext); - reg.dispatch(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, InitialObjectsCorrectOrderOfHookCalls) @@ -418,15 +426,17 @@ TEST_F(RegistryTest, InitialDataCorrectOrderOfHookCalls) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extInitialTransaction, extInitialData); - reg.dispatchInitialData(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatchInitialData( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, LedgerDataCorrectOrderOfHookCalls) @@ -456,15 +466,17 @@ TEST_F(RegistryTest, LedgerDataCorrectOrderOfHookCalls) auto reg = Registry( state_, extOnObject, extOnTransaction, extLedgerData ); - reg.dispatch(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = std::move(objects), - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = std::move(objects), + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, ReadonlyModeLedgerDataAllowed) @@ -481,15 +493,17 @@ TEST_F(RegistryTest, ReadonlyModeLedgerDataAllowed) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, ext); - reg.dispatch(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, ReadonlyModeTransactionAllowed) @@ -506,15 +520,17 @@ TEST_F(RegistryTest, ReadonlyModeTransactionAllowed) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extTx); - reg.dispatch(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, ReadonlyModeObjectAllowed) @@ -532,15 +548,17 @@ TEST_F(RegistryTest, ReadonlyModeObjectAllowed) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extObj); - reg.dispatch(etlng::model::LedgerData{ - .transactions = {}, - .objects = std::move(objects), - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = {}, + .objects = std::move(objects), + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, ReadonlyModeInitialDataAllowed) @@ -557,15 +575,17 @@ TEST_F(RegistryTest, ReadonlyModeInitialDataAllowed) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extInitialData); - reg.dispatchInitialData(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatchInitialData( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, ReadonlyModeInitialTransactionAllowed) @@ -582,15 +602,17 @@ TEST_F(RegistryTest, ReadonlyModeInitialTransactionAllowed) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extTx); - reg.dispatchInitialData(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatchInitialData( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, ReadonlyModeInitialObjectAllowed) @@ -630,15 +652,17 @@ TEST_F(RegistryTest, ReadonlyModeRegularExtensionsNotCalled) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extLedgerData); - reg.dispatch(etlng::model::LedgerData{ - .transactions = {}, - .objects = std::move(objects), - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = {}, + .objects = std::move(objects), + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, MixedReadonlyAndRegularExtensions) @@ -658,15 +682,17 @@ TEST_F(RegistryTest, MixedReadonlyAndRegularExtensions) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, extReadonly, extRegular); - reg.dispatch(etlng::model::LedgerData{ - .transactions = {}, - .objects = std::move(objects), - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = {}, + .objects = std::move(objects), + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, MonitorInterfaceExecution) @@ -715,15 +741,17 @@ TEST_F(RegistryTest, ReadonlyModeWithAllowInReadonlyTest) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, ext); - reg.dispatch(etlng::model::LedgerData{ - .transactions = {}, - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = {}, + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); } TEST_F(RegistryTest, ReadonlyModeExecutePluralHooksIfAllowedPaths) @@ -757,25 +785,29 @@ TEST_F(RegistryTest, ReadonlyModeExecutePluralHooksIfAllowedPaths) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, ext); - reg.dispatch(etlng::model::LedgerData{ - .transactions = transactions, - .objects = objects, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = transactions, + .objects = objects, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); - reg.dispatchInitialData(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatchInitialData( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); reg.dispatchInitialObjects(kSEQ, objects, {}); } @@ -815,25 +847,29 @@ TEST_F(RegistryTest, ReadonlyModeExecuteByOneHooksIfAllowedPaths) auto const header = createLedgerHeader(kLEDGER_HASH, kSEQ); auto reg = Registry(state_, ext); - reg.dispatch(etlng::model::LedgerData{ - .transactions = transactions, - .objects = objects, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatch( + etlng::model::LedgerData{ + .transactions = transactions, + .objects = objects, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); - reg.dispatchInitialData(etlng::model::LedgerData{ - .transactions = std::move(transactions), - .objects = {}, - .successors = {}, - .edgeKeys = {}, - .header = header, - .rawHeader = {}, - .seq = kSEQ - }); + reg.dispatchInitialData( + etlng::model::LedgerData{ + .transactions = std::move(transactions), + .objects = {}, + .successors = {}, + .edgeKeys = {}, + .header = header, + .rawHeader = {}, + .seq = kSEQ + } + ); reg.dispatchInitialObjects(kSEQ, objects, {}); } diff --git a/tests/unit/etlng/ext/SuccessorTests.cpp b/tests/unit/etlng/ext/SuccessorTests.cpp index 981afac5..af08dcc7 100644 --- a/tests/unit/etlng/ext/SuccessorTests.cpp +++ b/tests/unit/etlng/ext/SuccessorTests.cpp @@ -294,12 +294,12 @@ TEST_F(SuccessorExtTests, OnLedgerDataWithDeletedObjectAndWithCachedPredecessorA using namespace etlng::model; auto const objKey = "B00AA769C00726371689ED66A7CF57C2502F1BF4BDFF2ACADF67A2A7B5E8960D"; - auto const predKey = - binaryStringToUint256(hexStringToBinaryString("B00AA769C00726371689ED66A7CF57C2502F1BF4BDFF2ACADF67A2A7B5E8960C" - )); - auto const succKey = - binaryStringToUint256(hexStringToBinaryString("B00AA769C00726371689ED66A7CF57C2502F1BF4BDFF2ACADF67A2A7B5E8960E" - )); + auto const predKey = binaryStringToUint256( + hexStringToBinaryString("B00AA769C00726371689ED66A7CF57C2502F1BF4BDFF2ACADF67A2A7B5E8960C") + ); + auto const succKey = binaryStringToUint256( + hexStringToBinaryString("B00AA769C00726371689ED66A7CF57C2502F1BF4BDFF2ACADF67A2A7B5E8960E") + ); auto const createdObj = util::createObject(Object::ModType::Created, objKey); auto const data = createTestData({ createdObj, diff --git a/tests/unit/rpc/JsonBoolTests.cpp b/tests/unit/rpc/JsonBoolTests.cpp index 2c88ec2c..ca7f96e6 100644 --- a/tests/unit/rpc/JsonBoolTests.cpp +++ b/tests/unit/rpc/JsonBoolTests.cpp @@ -53,8 +53,9 @@ public: {.testName = "StringFalseValue", .json = R"JSON({ "test_bool": "false" })JSON", .expectedBool = true}, {.testName = "ArrayTrueValue", .json = R"JSON({ "test_bool": [0] })JSON", .expectedBool = true}, {.testName = "ArrayFalseValue", .json = R"JSON({ "test_bool": [] })JSON", .expectedBool = false}, - {.testName = "ObjectTrueValue", .json = R"JSON({ "test_bool": { "key": null } })JSON", .expectedBool = true - }, + {.testName = "ObjectTrueValue", + .json = R"JSON({ "test_bool": { "key": null } })JSON", + .expectedBool = true}, {.testName = "ObjectFalseValue", .json = R"JSON({ "test_bool": {} })JSON", .expectedBool = false} }; } diff --git a/tests/unit/rpc/RPCEngineTests.cpp b/tests/unit/rpc/RPCEngineTests.cpp index 30192fe5..f53496dd 100644 --- a/tests/unit/rpc/RPCEngineTests.cpp +++ b/tests/unit/rpc/RPCEngineTests.cpp @@ -78,14 +78,14 @@ generateDefaultRPCEngineConfig() return ClioConfigDefinition{ {"server.max_queue_size", ConfigValue{ConfigType::Integer}.defaultValue(2)}, {"workers", ConfigValue{ConfigType::Integer}.defaultValue(4).withConstraint(gValidateUint16)}, - {"rpc.cache_timeout", ConfigValue{ConfigType::Double}.defaultValue(0.0).withConstraint(gValidatePositiveDouble) - }, + {"rpc.cache_timeout", + ConfigValue{ConfigType::Double}.defaultValue(0.0).withConstraint(gValidatePositiveDouble)}, {"log_tag_style", ConfigValue{ConfigType::String}.defaultValue("uint")}, {"dos_guard.whitelist.[]", Array{ConfigValue{ConfigType::String}.optional()}}, {"dos_guard.max_fetches", ConfigValue{ConfigType::Integer}.defaultValue(1000'000u).withConstraint(gValidateUint32)}, - {"dos_guard.max_connections", ConfigValue{ConfigType::Integer}.defaultValue(20u).withConstraint(gValidateUint32) - }, + {"dos_guard.max_connections", + ConfigValue{ConfigType::Integer}.defaultValue(20u).withConstraint(gValidateUint32)}, {"dos_guard.max_requests", ConfigValue{ConfigType::Integer}.defaultValue(20u).withConstraint(gValidateUint32)} }; } @@ -212,8 +212,9 @@ TEST_P(RPCEngineFlowParameterTest, Test) if (testBundle.forwarded) { EXPECT_CALL(*mockLoadBalancerPtr_, forwardToRippled) - .WillOnce(Return(std::expected(json::parse(kFORWARD_REPLY).as_object()) - )); + .WillOnce( + Return(std::expected(json::parse(kFORWARD_REPLY).as_object())) + ); EXPECT_CALL(*handlerProvider, contains).WillOnce(Return(true)); EXPECT_CALL(*mockCountersPtr_, rpcForwarded(testBundle.method)); } @@ -460,8 +461,8 @@ TEST_F(RPCEngineTest, NotCacheIfErrorHappen) auto const cfgCache = ClioConfigDefinition{ {"server.max_queue_size", ConfigValue{ConfigType::Integer}.defaultValue(2)}, {"workers", ConfigValue{ConfigType::Integer}.defaultValue(4).withConstraint(gValidateUint16)}, - {"rpc.cache_timeout", ConfigValue{ConfigType::Double}.defaultValue(10.0).withConstraint(gValidatePositiveDouble) - } + {"rpc.cache_timeout", + ConfigValue{ConfigType::Double}.defaultValue(10.0).withConstraint(gValidatePositiveDouble)} }; auto const notAdmin = false; diff --git a/tests/unit/rpc/RPCHelpersTests.cpp b/tests/unit/rpc/RPCHelpersTests.cpp index db12bc46..244bb0d0 100644 --- a/tests/unit/rpc/RPCHelpersTests.cpp +++ b/tests/unit/rpc/RPCHelpersTests.cpp @@ -523,13 +523,15 @@ TEST_F(RPCHelpersTest, TransactionAndMetadataBinaryJsonV2) TEST_F(RPCHelpersTest, ParseIssue) { - auto issue = parseIssue(boost::json::parse( - R"JSON({ + auto issue = parseIssue( + boost::json::parse( + R"JSON({ "issuer": "rLEsXccBGNR3UPuPu2hUXPjziKC3qKSBun", "currency": "JPY" })JSON" - ) - .as_object()); + ) + .as_object() + ); EXPECT_TRUE(issue.account == getAccountIdWithString(kACCOUNT2)); issue = parseIssue(boost::json::parse(R"JSON({"currency": "XRP"})JSON").as_object()); @@ -540,13 +542,15 @@ TEST_F(RPCHelpersTest, ParseIssue) EXPECT_THROW(parseIssue(boost::json::parse(R"JSON({"currency": "XRP2"})JSON").as_object()), std::runtime_error); EXPECT_THROW( - parseIssue(boost::json::parse( - R"JSON({ + parseIssue( + boost::json::parse( + R"JSON({ "issuer": "abcd", "currency": "JPY" })JSON" - ) - .as_object()), + ) + .as_object() + ), std::runtime_error ); @@ -661,7 +665,8 @@ TEST_F(RPCHelpersTest, isDeepFrozen_TrustLineIsNotDeepFrozen) .WillByDefault(Return(trustlineFrozen.getSerializer().peekData())); runSpawn([&](boost::asio::yield_context yield) { - EXPECT_FALSE(isDeepFrozen(*backend_, kLEDGER_SEQ_OBJECT, account, ripple::Currency{kCURRENCY}, account2, yield) + EXPECT_FALSE( + isDeepFrozen(*backend_, kLEDGER_SEQ_OBJECT, account, ripple::Currency{kCURRENCY}, account2, yield) ); }); } @@ -1102,8 +1107,10 @@ generateTestValuesForParametersTest() {.testName = "ledger", .method = "ledger", .testJson = R"JSON({})JSON", .expected = false}, {.testName = "ledgerWithType", .method = "ledger", .testJson = R"JSON({"type": "fee"})JSON", .expected = false}, {.testName = "ledgerFullTrue", .method = "ledger", .testJson = R"JSON({"full": true})JSON", .expected = true}, - {.testName = "ledgerFullFalse", .method = "ledger", .testJson = R"JSON({"full": false})JSON", .expected = false - }, + {.testName = "ledgerFullFalse", + .method = "ledger", + .testJson = R"JSON({"full": false})JSON", + .expected = false}, {.testName = "ledgerFullIsStr", .method = "ledger", .testJson = R"JSON({"full": "String"})JSON", @@ -1113,14 +1120,22 @@ generateTestValuesForParametersTest() .testJson = R"JSON({"full": ""})JSON", .expected = false}, {.testName = "ledgerFullIsNumber1", .method = "ledger", .testJson = R"JSON({"full": 1})JSON", .expected = true}, - {.testName = "ledgerFullIsNumber0", .method = "ledger", .testJson = R"JSON({"full": 0})JSON", .expected = false - }, - {.testName = "ledgerFullIsNull", .method = "ledger", .testJson = R"JSON({"full": null})JSON", .expected = false - }, - {.testName = "ledgerFullIsFloat0", .method = "ledger", .testJson = R"JSON({"full": 0.0})JSON", .expected = false - }, - {.testName = "ledgerFullIsFloat1", .method = "ledger", .testJson = R"JSON({"full": 0.1})JSON", .expected = true - }, + {.testName = "ledgerFullIsNumber0", + .method = "ledger", + .testJson = R"JSON({"full": 0})JSON", + .expected = false}, + {.testName = "ledgerFullIsNull", + .method = "ledger", + .testJson = R"JSON({"full": null})JSON", + .expected = false}, + {.testName = "ledgerFullIsFloat0", + .method = "ledger", + .testJson = R"JSON({"full": 0.0})JSON", + .expected = false}, + {.testName = "ledgerFullIsFloat1", + .method = "ledger", + .testJson = R"JSON({"full": 0.1})JSON", + .expected = true}, {.testName = "ledgerFullIsArray", .method = "ledger", .testJson = R"JSON({"full": [1]})JSON", .expected = true}, {.testName = "ledgerFullIsEmptyArray", .method = "ledger", diff --git a/tests/unit/rpc/handlers/AMMInfoTests.cpp b/tests/unit/rpc/handlers/AMMInfoTests.cpp index 9a1dd3cd..bfee3dfd 100644 --- a/tests/unit/rpc/handlers/AMMInfoTests.cpp +++ b/tests/unit/rpc/handlers/AMMInfoTests.cpp @@ -179,14 +179,16 @@ TEST_F(RPCAMMInfoHandlerTest, AccountNotFound) ON_CALL(*backend_, doFetchLedgerObject(accountKey, testing::_, testing::_)) .WillByDefault(Return(accountRoot.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}", "account": "{}" }})JSON", - kAMM_ACCOUNT, - kNOTFOUND_ACCOUNT - )); + kAMM_ACCOUNT, + kNOTFOUND_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { @@ -205,12 +207,14 @@ TEST_F(RPCAMMInfoHandlerTest, AMMAccountNotExist) ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(lgrInfo)); ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kWRONG_AMM_ACCOUNT - )); + kWRONG_AMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { @@ -228,12 +232,14 @@ TEST_F(RPCAMMInfoHandlerTest, AMMAccountNotInDBIsMalformed) ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(lgrInfo)); ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { @@ -254,12 +260,14 @@ TEST_F(RPCAMMInfoHandlerTest, AMMAccountNotFoundMissingAmmField) ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(lgrInfo)); ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(accountRoot.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { @@ -289,12 +297,14 @@ TEST_F(RPCAMMInfoHandlerTest, AMMAccountAmmBlobNotFound) ON_CALL(*backend_, doFetchLedgerObject(ammKeylet.key, testing::_, testing::_)) .WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { @@ -328,12 +338,14 @@ TEST_F(RPCAMMInfoHandlerTest, AMMAccountAccBlobNotFound) ON_CALL(*backend_, doFetchLedgerObject(account2Key, testing::_, testing::_)) .WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { @@ -373,18 +385,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathMinimalFirstXRPNoTrustline) ON_CALL(*backend_, doFetchLedgerObject(feesKey, kSEQ, _)).WillByDefault(Return(feesObj)); ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto expectedResult = json::parse(fmt::format( - R"JSON({{ + auto expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -405,13 +420,14 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathMinimalFirstXRPNoTrustline) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -453,20 +469,23 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAccount) ON_CALL(*backend_, doFetchLedgerObject(accountHoldsKeylet.key, kSEQ, _)) .WillByDefault(Return(trustline.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}", "account": "{}" }})JSON", - kAMM_ACCOUNT, - kAMM_ACCOUNT2 - )); + kAMM_ACCOUNT, + kAMM_ACCOUNT2 + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto const expectedResult = json::parse(fmt::format( - R"JSON({{ + auto const expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -487,13 +506,14 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAccount) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT2, - "JPY", - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT2, + "JPY", + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -527,18 +547,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathMinimalSecondXRPNoTrustline) ON_CALL(*backend_, doFetchLedgerObject(feesKey, kSEQ, _)).WillByDefault(Return(feesObj)); ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto const expectedResult = json::parse(fmt::format( - R"JSON({{ + auto const expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -559,13 +582,14 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathMinimalSecondXRPNoTrustline) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -597,18 +621,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathNonXRPNoTrustlines) ON_CALL(*backend_, doFetchLedgerObject(feesKey, kSEQ, _)).WillByDefault(Return(feesObj)); ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto const expectedResult = json::parse(fmt::format( - R"JSON({{ + auto const expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -634,15 +661,16 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathNonXRPNoTrustlines) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "USD", - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "USD", + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -686,18 +714,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathFrozen) ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)) .WillByDefault(Return(trustline2BalanceFrozen.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto const expectedResult = json::parse(fmt::format( - R"JSON({{ + auto const expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -723,15 +754,16 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathFrozen) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "USD", - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "USD", + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -776,18 +808,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathFrozenIssuer) ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)) .WillByDefault(Return(trustline2BalanceFrozen.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto const expectedResult = json::parse(fmt::format( - R"JSON({{ + auto const expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -813,15 +848,16 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathFrozenIssuer) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "USD", - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "USD", + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -858,18 +894,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithTrustline) ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)) .WillByDefault(Return(trustlineBalance.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto expectedResult = json::parse(fmt::format( - R"JSON({{ + auto expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -890,13 +929,14 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithTrustline) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -935,18 +975,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithVoteSlots) ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)) .WillByDefault(Return(trustlineBalance.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto expectedResult = json::parse(fmt::format( - R"JSON({{ + auto expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -979,15 +1022,16 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithVoteSlots) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -1028,18 +1072,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAuctionSlot) ON_CALL(*backend_, doFetchLedgerObject(issue2LineKey, kSEQ, _)) .WillByDefault(Return(trustlineBalance.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "amm_account": "{}" }})JSON", - kAMM_ACCOUNT - )); + kAMM_ACCOUNT + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto expectedResult = json::parse(fmt::format( - R"JSON({{ + auto expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -1075,16 +1122,17 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAuctionSlot) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -1116,8 +1164,9 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsMatchingInputOrder) ON_CALL(*backend_, doFetchLedgerObject(ammKeylet.key, testing::_, testing::_)) .WillByDefault(Return(ammObj.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "asset": {{ "currency": "JPY", "issuer": "{}" @@ -1127,15 +1176,17 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsMatchingInputOrder) "issuer": "{}" }} }})JSON", - kAMM_ACCOUNT, - kAMM_ACCOUNT2 - )); + kAMM_ACCOUNT, + kAMM_ACCOUNT2 + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto expectedResult = json::parse(fmt::format( - R"JSON({{ + auto expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -1180,20 +1231,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsMatchingInputOrder) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT, - "USD", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT, + "USD", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); @@ -1226,8 +1278,9 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsPreservesInputOrder) ON_CALL(*backend_, doFetchLedgerObject(ammKeylet.key, testing::_, testing::_)) .WillByDefault(Return(ammObj.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "asset": {{ "currency": "USD", "issuer": "{}" @@ -1237,15 +1290,17 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsPreservesInputOrder) "issuer": "{}" }} }})JSON", - kAMM_ACCOUNT, - kAMM_ACCOUNT2 - )); + kAMM_ACCOUNT, + kAMM_ACCOUNT2 + ) + ); auto const handler = AnyHandler{AMMInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); - auto expectedResult = json::parse(fmt::format( - R"JSON({{ + auto expectedResult = json::parse( + fmt::format( + R"JSON({{ "amm": {{ "lp_token": {{ "currency": "{}", @@ -1290,20 +1345,21 @@ TEST_F(RPCAMMInfoHandlerTest, HappyPathWithAssetsPreservesInputOrder) "ledger_hash": "{}", "validated": true }})JSON", - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - "USD", - kAMM_ACCOUNT, - "JPY", - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kLP_ISSUE_CURRENCY, - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kAMM_ACCOUNT, - kAMM_ACCOUNT2, - kLEDGER_HASH - )); + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + "USD", + kAMM_ACCOUNT, + "JPY", + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kLP_ISSUE_CURRENCY, + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kAMM_ACCOUNT, + kAMM_ACCOUNT2, + kLEDGER_HASH + ) + ); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), expectedResult); diff --git a/tests/unit/rpc/handlers/AccountChannelsTests.cpp b/tests/unit/rpc/handlers/AccountChannelsTests.cpp index e375eb23..5bc57e6f 100644 --- a/tests/unit/rpc/handlers/AccountChannelsTests.cpp +++ b/tests/unit/rpc/handlers/AccountChannelsTests.cpp @@ -68,13 +68,15 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitNotInt) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": "t" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -87,13 +89,15 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitNegative) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": -1 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -106,13 +110,15 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitZero) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 0 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -125,14 +131,16 @@ TEST_F(RPCAccountChannelsHandlerTest, NonHexLedgerHash) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 10, "ledger_hash": "xxx" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -146,14 +154,16 @@ TEST_F(RPCAccountChannelsHandlerTest, NonStringLedgerHash) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 10, "ledger_hash": 123 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -167,14 +177,16 @@ TEST_F(RPCAccountChannelsHandlerTest, InvalidLedgerIndexString) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 10, "ledger_index": "notvalidated" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -188,13 +200,15 @@ TEST_F(RPCAccountChannelsHandlerTest, MarkerNotString) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": 9 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -211,13 +225,15 @@ TEST_F(RPCAccountChannelsHandlerTest, InvalidMarker) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -227,13 +243,15 @@ TEST_F(RPCAccountChannelsHandlerTest, InvalidMarker) }); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": 401 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -283,14 +301,16 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerHash) .WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -308,13 +328,15 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerStringIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "4" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -330,13 +352,15 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerIntIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": 4 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -355,14 +379,16 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerHash2) auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31); ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader)); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -379,13 +405,15 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistLedgerViaLedgerIndex2) // no need to check from db,call fetchLedgerBySequence 0 time // differ from previous logic EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "31" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -405,14 +433,16 @@ TEST_F(RPCAccountChannelsHandlerTest, NonExistAccount) // fetch account object return empty ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -483,12 +513,14 @@ TEST_F(RPCAccountChannelsHandlerTest, DefaultParameterTest) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -534,13 +566,15 @@ TEST_F(RPCAccountChannelsHandlerTest, UseLimit) runSpawn([this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 20 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); @@ -550,26 +584,30 @@ TEST_F(RPCAccountChannelsHandlerTest, UseLimit) runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 9 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit? }); runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountChannelsHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 401 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit? }); @@ -619,15 +657,17 @@ TEST_F(RPCAccountChannelsHandlerTest, UseDestination) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 30, "destination_account": "{}" }})JSON", - kACCOUNT, - kACCOUNT3 - )); + kACCOUNT, + kACCOUNT3 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -656,12 +696,14 @@ TEST_F(RPCAccountChannelsHandlerTest, EmptyChannel) ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, testing::_, testing::_)) .WillByDefault(Return(ownerDir.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -741,12 +783,14 @@ TEST_F(RPCAccountChannelsHandlerTest, OptionalResponseField) bbs.push_back(channel1.getSerializer().peekData()); ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -803,14 +847,16 @@ TEST_F(RPCAccountChannelsHandlerTest, MarkerOutput) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - kLIMIT - )); + kACCOUNT, + kLIMIT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -858,17 +904,19 @@ TEST_F(RPCAccountChannelsHandlerTest, MarkerInput) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {}, "marker": "{},{}" }})JSON", - kACCOUNT, - kLIMIT, - kINDEX1, - kNEXT_PAGE - )); + kACCOUNT, + kLIMIT, + kINDEX1, + kNEXT_PAGE + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -909,14 +957,16 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitLessThanMin) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountChannelsHandler::kLIMIT_MIN - 1 - )); + kACCOUNT, + AccountChannelsHandler::kLIMIT_MIN - 1 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -955,14 +1005,16 @@ TEST_F(RPCAccountChannelsHandlerTest, LimitMoreThanMax) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountChannelsHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + AccountChannelsHandler::kLIMIT_MAX + 1 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountChannelsHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); diff --git a/tests/unit/rpc/handlers/AccountCurrenciesTests.cpp b/tests/unit/rpc/handlers/AccountCurrenciesTests.cpp index 2835e59a..b6d2278e 100644 --- a/tests/unit/rpc/handlers/AccountCurrenciesTests.cpp +++ b/tests/unit/rpc/handlers/AccountCurrenciesTests.cpp @@ -73,12 +73,14 @@ TEST_F(RPCAccountCurrenciesHandlerTest, AccountNotExist) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountCurrenciesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -95,12 +97,14 @@ TEST_F(RPCAccountCurrenciesHandlerTest, LedgerNonExistViaIntSequence) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(30, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountCurrenciesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -119,14 +123,16 @@ TEST_F(RPCAccountCurrenciesHandlerTest, LedgerNonExistViaStringSequence) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(12, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "{}" }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); auto const handler = AnyHandler{AccountCurrenciesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -144,14 +150,16 @@ TEST_F(RPCAccountCurrenciesHandlerTest, LedgerNonExistViaHash) ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{AccountCurrenciesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -210,12 +218,14 @@ TEST_F(RPCAccountCurrenciesHandlerTest, DefaultParameter) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountCurrenciesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -245,14 +255,16 @@ TEST_F(RPCAccountCurrenciesHandlerTest, RequestViaLegderHash) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{AccountCurrenciesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -282,14 +294,16 @@ TEST_F(RPCAccountCurrenciesHandlerTest, RequestViaLegderSeq) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - ledgerSeq - )); + kACCOUNT, + ledgerSeq + ) + ); auto const handler = AnyHandler{AccountCurrenciesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); diff --git a/tests/unit/rpc/handlers/AccountInfoTests.cpp b/tests/unit/rpc/handlers/AccountInfoTests.cpp index f784b108..5e9b05d2 100644 --- a/tests/unit/rpc/handlers/AccountInfoTests.cpp +++ b/tests/unit/rpc/handlers/AccountInfoTests.cpp @@ -187,13 +187,15 @@ TEST_F(RPCAccountInfoHandlerTest, LedgerNonExistViaIntSequence) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(30, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": 30 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -210,13 +212,15 @@ TEST_F(RPCAccountInfoHandlerTest, LedgerNonExistViaStringSequence) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(30, _)).WillByDefault(Return(std::nullopt)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "30" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -234,14 +238,16 @@ TEST_F(RPCAccountInfoHandlerTest, LedgerNonExistViaHash) ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -261,12 +267,14 @@ TEST_F(RPCAccountInfoHandlerTest, AccountNotExist) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -287,12 +295,14 @@ TEST_F(RPCAccountInfoHandlerTest, AccountInvalid) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(createLegacyFeeSettingBlob(1, 2, 3, 4, 0))); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -321,13 +331,15 @@ TEST_F(RPCAccountInfoHandlerTest, SignerListsInvalid) EXPECT_CALL(*mockAmendmentCenterPtr_, isEnabled(_, Amendments::Clawback, _)).WillOnce(Return(false)); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "signer_lists": true }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -424,13 +436,15 @@ TEST_F(RPCAccountInfoHandlerTest, SignerListsTrueV2) EXPECT_CALL(*mockAmendmentCenterPtr_, isEnabled(_, Amendments::Clawback, _)).WillOnce(Return(false)); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "signer_lists": true }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{.yield = yield, .apiVersion = 2}); @@ -525,13 +539,15 @@ TEST_F(RPCAccountInfoHandlerTest, SignerListsTrueV1) EXPECT_CALL(*mockAmendmentCenterPtr_, isEnabled(_, Amendments::Clawback, _)).WillOnce(Return(false)); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "signer_lists": true }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{.yield = yield, .apiVersion = 1}); @@ -599,12 +615,14 @@ TEST_F(RPCAccountInfoHandlerTest, Flags) EXPECT_CALL(*mockAmendmentCenterPtr_, isEnabled(_, Amendments::Clawback, _)).WillOnce(Return(false)); EXPECT_CALL(*backend_, doFetchLedgerObject); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -628,12 +646,14 @@ TEST_F(RPCAccountInfoHandlerTest, IdentAndSignerListsFalse) EXPECT_CALL(*mockAmendmentCenterPtr_, isEnabled(_, Amendments::Clawback, _)).WillOnce(Return(false)); EXPECT_CALL(*backend_, doFetchLedgerObject); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "ident": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -706,12 +726,14 @@ TEST_F(RPCAccountInfoHandlerTest, DisallowIncoming) EXPECT_CALL(*mockAmendmentCenterPtr_, isEnabled(_, Amendments::Clawback, _)).WillOnce(Return(false)); EXPECT_CALL(*backend_, doFetchLedgerObject); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -780,12 +802,14 @@ TEST_F(RPCAccountInfoHandlerTest, Clawback) EXPECT_CALL(*mockAmendmentCenterPtr_, isEnabled(_, Amendments::Clawback, _)).WillOnce(Return(true)); EXPECT_CALL(*backend_, doFetchLedgerObject); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountInfoHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); diff --git a/tests/unit/rpc/handlers/AccountLinesTests.cpp b/tests/unit/rpc/handlers/AccountLinesTests.cpp index 06a08716..f8c86c9c 100644 --- a/tests/unit/rpc/handlers/AccountLinesTests.cpp +++ b/tests/unit/rpc/handlers/AccountLinesTests.cpp @@ -76,14 +76,16 @@ TEST_F(RPCAccountLinesHandlerTest, NonHexLedgerHash) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 10, "ledger_hash": "xxx" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -97,14 +99,16 @@ TEST_F(RPCAccountLinesHandlerTest, NonStringLedgerHash) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 10, "ledger_hash": 123 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -118,14 +122,16 @@ TEST_F(RPCAccountLinesHandlerTest, InvalidLedgerIndexString) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 10, "ledger_index": "notvalidated" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -139,13 +145,15 @@ TEST_F(RPCAccountLinesHandlerTest, MarkerNotString) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": 9 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -162,13 +170,15 @@ TEST_F(RPCAccountLinesHandlerTest, InvalidMarker) { runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -178,13 +188,15 @@ TEST_F(RPCAccountLinesHandlerTest, InvalidMarker) }); runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": 401 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -329,14 +341,16 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerHash) .WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -354,13 +368,15 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerStringIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "4" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -376,13 +392,15 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerIntIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": 4 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -401,14 +419,16 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerHash2) auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31); ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader)); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -425,13 +445,15 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistLedgerViaLedgerIndex2) // no need to check from db, call fetchLedgerBySequence 0 time // differ from previous logic EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "31" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -451,14 +473,16 @@ TEST_F(RPCAccountLinesHandlerTest, NonExistAccount) // fetch account object return empty ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -501,12 +525,14 @@ TEST_F(RPCAccountLinesHandlerTest, DefaultParameterTest) EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); runSpawn([this](auto yield) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const correctOutput = R"JSON({ "account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn", @@ -580,13 +606,15 @@ TEST_F(RPCAccountLinesHandlerTest, UseLimit) runSpawn([this](auto yield) { auto handler = AnyHandler{AccountLinesHandler{this->backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 20 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); @@ -596,26 +624,30 @@ TEST_F(RPCAccountLinesHandlerTest, UseLimit) runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 9 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit somehow? }); runSpawn([this](auto yield) { auto const handler = AnyHandler{AccountLinesHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 401 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit somehow? }); @@ -665,15 +697,17 @@ TEST_F(RPCAccountLinesHandlerTest, UseDestination) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 30, "peer": "{}" }})JSON", - kACCOUNT, - kACCOUNT3 - )); + kACCOUNT, + kACCOUNT3 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountLinesHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -702,12 +736,14 @@ TEST_F(RPCAccountLinesHandlerTest, EmptyChannel) ON_CALL(*backend_, doFetchLedgerObject(owneDirKk, testing::_, testing::_)) .WillByDefault(Return(ownerDir.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountLinesHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -792,12 +828,14 @@ TEST_F(RPCAccountLinesHandlerTest, OptionalResponseFieldWithDeepFreeze) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountLinesHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -872,12 +910,14 @@ TEST_F(RPCAccountLinesHandlerTest, FrozenTrustLineResponse) bbs.push_back(line2.getSerializer().peekData()); EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountLinesHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -935,14 +975,16 @@ TEST_F(RPCAccountLinesHandlerTest, MarkerOutput) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - kLIMIT - )); + kACCOUNT, + kLIMIT + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountLinesHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -990,17 +1032,19 @@ TEST_F(RPCAccountLinesHandlerTest, MarkerInput) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {}, "marker": "{},{}" }})JSON", - kACCOUNT, - kLIMIT, - kINDEX1, - kNEXT_PAGE - )); + kACCOUNT, + kLIMIT, + kINDEX1, + kNEXT_PAGE + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{AccountLinesHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -1043,14 +1087,16 @@ TEST_F(RPCAccountLinesHandlerTest, LimitLessThanMin) EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); runSpawn([this](auto yield) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountLinesHandler::kLIMIT_MIN - 1 - )); + kACCOUNT, + AccountLinesHandler::kLIMIT_MIN - 1 + ) + ); auto const correctOutput = fmt::format( R"JSON({{ "account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn", @@ -1120,14 +1166,16 @@ TEST_F(RPCAccountLinesHandlerTest, LimitMoreThanMax) EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); runSpawn([this](auto yield) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountLinesHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + AccountLinesHandler::kLIMIT_MAX + 1 + ) + ); auto const correctOutput = fmt::format( R"JSON({{ "account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jpn", diff --git a/tests/unit/rpc/handlers/AccountNFTsTests.cpp b/tests/unit/rpc/handlers/AccountNFTsTests.cpp index 7c3b2e99..d88c32b9 100644 --- a/tests/unit/rpc/handlers/AccountNFTsTests.cpp +++ b/tests/unit/rpc/handlers/AccountNFTsTests.cpp @@ -181,14 +181,16 @@ TEST_F(RPCAccountNFTsHandlerTest, LedgerNotFoundViaHash) ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -207,14 +209,16 @@ TEST_F(RPCAccountNFTsHandlerTest, LedgerNotFoundViaStringIndex) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(kSEQ, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "{}" }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -233,14 +237,16 @@ TEST_F(RPCAccountNFTsHandlerTest, LedgerNotFoundViaIntIndex) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(kSEQ, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -260,12 +266,14 @@ TEST_F(RPCAccountNFTsHandlerTest, AccountNotFound) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -323,12 +331,14 @@ TEST_F(RPCAccountNFTsHandlerTest, NormalPath) .WillByDefault(Return(pageObject.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -357,14 +367,16 @@ TEST_F(RPCAccountNFTsHandlerTest, Limit) .WillByDefault(Return(pageObject.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1 + kLIMIT); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - kLIMIT - )); + kACCOUNT, + kLIMIT + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -392,14 +404,16 @@ TEST_F(RPCAccountNFTsHandlerTest, Marker) .WillByDefault(Return(pageObject.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{}" }})JSON", - kACCOUNT, - kPAGE - )); + kACCOUNT, + kPAGE + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -419,14 +433,16 @@ TEST_F(RPCAccountNFTsHandlerTest, InvalidMarker) ON_CALL(*backend_, doFetchLedgerObject(ripple::keylet::account(accountID).key, 30, _)) .WillByDefault(Return(accountObject.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{}" }})JSON", - kACCOUNT, - kINVALID_PAGE - )); + kACCOUNT, + kINVALID_PAGE + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -448,12 +464,14 @@ TEST_F(RPCAccountNFTsHandlerTest, AccountWithNoNFT) ON_CALL(*backend_, doFetchLedgerObject(ripple::keylet::account(accountID).key, 30, _)) .WillByDefault(Return(accountObject.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -480,14 +498,16 @@ TEST_F(RPCAccountNFTsHandlerTest, invalidPage) .WillByDefault(Return(accountObject.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{}" }})JSON", - kACCOUNT, - kPAGE - )); + kACCOUNT, + kPAGE + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -546,14 +566,16 @@ TEST_F(RPCAccountNFTsHandlerTest, LimitLessThanMin) .WillByDefault(Return(pageObject.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountNFTsHandler::kLIMIT_MIN - 1 - )); + kACCOUNT, + AccountNFTsHandler::kLIMIT_MIN - 1 + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -610,14 +632,16 @@ TEST_F(RPCAccountNFTsHandlerTest, LimitMoreThanMax) .WillByDefault(Return(pageObject.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountNFTsHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + AccountNFTsHandler::kLIMIT_MAX + 1 + ) + ); auto const handler = AnyHandler{AccountNFTsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); diff --git a/tests/unit/rpc/handlers/AccountObjectsTests.cpp b/tests/unit/rpc/handlers/AccountObjectsTests.cpp index b1fc5c98..734d92c2 100644 --- a/tests/unit/rpc/handlers/AccountObjectsTests.cpp +++ b/tests/unit/rpc/handlers/AccountObjectsTests.cpp @@ -223,13 +223,15 @@ TEST_F(RPCAccountObjectsHandlerTest, LedgerNonExistViaIntSequence) // return empty ledgerHeader EXPECT_CALL(*backend_, fetchLedgerBySequence(kMAX_SEQ, _)).WillOnce(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": 30 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -245,13 +247,15 @@ TEST_F(RPCAccountObjectsHandlerTest, LedgerNonExistViaStringSequence) // return empty ledgerHeader EXPECT_CALL(*backend_, fetchLedgerBySequence(kMAX_SEQ, _)).WillOnce(Return(std::nullopt)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "30" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -268,14 +272,16 @@ TEST_F(RPCAccountObjectsHandlerTest, LedgerNonExistViaHash) EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillOnce(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -293,12 +299,14 @@ TEST_F(RPCAccountObjectsHandlerTest, AccountNotExist) EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader)); EXPECT_CALL(*backend_, doFetchLedgerObject).WillOnce(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -365,12 +373,14 @@ TEST_F(RPCAccountObjectsHandlerTest, DefaultParameterNoNFTFound) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -409,14 +419,16 @@ TEST_F(RPCAccountObjectsHandlerTest, Limit) } EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - kLIMIT - )); + kACCOUNT, + kLIMIT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -453,15 +465,17 @@ TEST_F(RPCAccountObjectsHandlerTest, Marker) } EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}" }})JSON", - kACCOUNT, - kINDEX1, - kPAGE - )); + kACCOUNT, + kINDEX1, + kPAGE + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -507,14 +521,16 @@ TEST_F(RPCAccountObjectsHandlerTest, MultipleDirNoNFT) } EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - 2 * kCOUNT - )); + kACCOUNT, + 2 * kCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -561,13 +577,15 @@ TEST_F(RPCAccountObjectsHandlerTest, TypeFilter) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "type": "offer" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -605,13 +623,15 @@ TEST_F(RPCAccountObjectsHandlerTest, TypeFilterAmmType) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "type": "amm" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -658,13 +678,15 @@ TEST_F(RPCAccountObjectsHandlerTest, TypeFilterReturnEmpty) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "type": "check" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -713,13 +735,15 @@ TEST_F(RPCAccountObjectsHandlerTest, DeletionBlockersOnlyFilter) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "deletion_blockers_only": true }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -756,14 +780,16 @@ TEST_F(RPCAccountObjectsHandlerTest, DeletionBlockersOnlyFilterWithTypeFilter) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "deletion_blockers_only": true, "type": "payment_channel" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -818,13 +844,15 @@ TEST_F(RPCAccountObjectsHandlerTest, DeletionBlockersOnlyFilterEmptyResult) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "deletion_blockers_only": true }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -878,14 +906,16 @@ TEST_F(RPCAccountObjectsHandlerTest, DeletionBlockersOnlyFilterWithIncompatibleT EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "deletion_blockers_only": true, "type": "offer" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -992,12 +1022,14 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTMixOtherObjects) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1031,14 +1063,16 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTReachLimitReturnMarker) current = previous; } - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - 10 - )); + kACCOUNT, + 10 + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1080,14 +1114,16 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTReachLimitNoMarker) ); EXPECT_CALL(*backend_, doFetchLedgerObject(current, 30, _)).WillOnce(Return(nftpage11.getSerializer().peekData())); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - 11 - )); + kACCOUNT, + 11 + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1158,15 +1194,17 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTMarker) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}" }})JSON", - kACCOUNT, - ripple::strHex(marker), - std::numeric_limits::max() - )); + kACCOUNT, + ripple::strHex(marker), + std::numeric_limits::max() + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1214,15 +1252,17 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTMarkerNoMoreNFT) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}" }})JSON", - kACCOUNT, - ripple::strHex(ripple::uint256{beast::zero}), - std::numeric_limits::max() - )); + kACCOUNT, + ripple::strHex(ripple::uint256{beast::zero}), + std::numeric_limits::max() + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1242,15 +1282,17 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTMarkerNotInRange) auto const accountKk = ripple::keylet::account(account).key; EXPECT_CALL(*backend_, doFetchLedgerObject(accountKk, kMAX_SEQ, _)).WillOnce(Return(Blob{'f', 'a', 'k', 'e'})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}" }})JSON", - kACCOUNT, - kINDEX1, - std::numeric_limits::max() - )); + kACCOUNT, + kINDEX1, + std::numeric_limits::max() + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1275,15 +1317,17 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTMarkerNotExist) auto const accountNftMax = ripple::keylet::nftpage_max(account).key; EXPECT_CALL(*backend_, doFetchLedgerObject(accountNftMax, kMAX_SEQ, _)).WillOnce(Return(std::nullopt)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}" }})JSON", - kACCOUNT, - ripple::strHex(accountNftMax), - std::numeric_limits::max() - )); + kACCOUNT, + ripple::strHex(accountNftMax), + std::numeric_limits::max() + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1349,16 +1393,18 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTLimitAdjust) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}", "limit": 12 }})JSON", - kACCOUNT, - ripple::strHex(marker), - std::numeric_limits::max() - )); + kACCOUNT, + ripple::strHex(marker), + std::numeric_limits::max() + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1445,13 +1491,15 @@ TEST_F(RPCAccountObjectsHandlerTest, FilterNFT) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "type": "nft_page" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1486,17 +1534,19 @@ TEST_F(RPCAccountObjectsHandlerTest, NFTZeroMarkerNotAffectOtherMarker) } EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {}, "marker": "{},{}" }})JSON", - kACCOUNT, - kLIMIT, - ripple::strHex(ripple::uint256{beast::zero}), - std::numeric_limits::max() - )); + kACCOUNT, + kLIMIT, + ripple::strHex(ripple::uint256{beast::zero}), + std::numeric_limits::max() + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1566,14 +1616,16 @@ TEST_F(RPCAccountObjectsHandlerTest, LimitLessThanMin) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountObjectsHandler::kLIMIT_MIN - 1 - )); + kACCOUNT, + AccountObjectsHandler::kLIMIT_MIN - 1 + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1642,14 +1694,16 @@ TEST_F(RPCAccountObjectsHandlerTest, LimitMoreThanMax) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountObjectsHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + AccountObjectsHandler::kLIMIT_MAX + 1 + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1684,13 +1738,15 @@ TEST_F(RPCAccountObjectsHandlerTest, TypeFilterMPTIssuanceType) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "type": "mpt_issuance" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { @@ -1733,13 +1789,15 @@ TEST_F(RPCAccountObjectsHandlerTest, TypeFilterMPTokenType) EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "type": "mptoken" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountObjectsHandler{backend_}}; runSpawn([&](auto yield) { diff --git a/tests/unit/rpc/handlers/AccountOffersTests.cpp b/tests/unit/rpc/handlers/AccountOffersTests.cpp index faa88495..b1a9bfeb 100644 --- a/tests/unit/rpc/handlers/AccountOffersTests.cpp +++ b/tests/unit/rpc/handlers/AccountOffersTests.cpp @@ -176,14 +176,16 @@ TEST_F(RPCAccountOffersHandlerTest, LedgerNotFoundViaHash) ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -202,14 +204,16 @@ TEST_F(RPCAccountOffersHandlerTest, LedgerNotFoundViaStringIndex) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(kSEQ, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "{}" }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -228,14 +232,16 @@ TEST_F(RPCAccountOffersHandlerTest, LedgerNotFoundViaIntIndex) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(kSEQ, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -255,12 +261,14 @@ TEST_F(RPCAccountOffersHandlerTest, AccountNotFound) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -332,12 +340,14 @@ TEST_F(RPCAccountOffersHandlerTest, DefaultParams) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -380,13 +390,15 @@ TEST_F(RPCAccountOffersHandlerTest, Limit) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": 10 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -433,15 +445,17 @@ TEST_F(RPCAccountOffersHandlerTest, Marker) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}" }})JSON", - kACCOUNT, - kINDEX1, - startPage - )); + kACCOUNT, + kINDEX1, + startPage + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -469,15 +483,17 @@ TEST_F(RPCAccountOffersHandlerTest, MarkerNotExists) ON_CALL(*backend_, doFetchLedgerObject(hintIndex, kLEDGER_SEQ, _)).WillByDefault(Return(std::nullopt)); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "marker": "{},{}" }})JSON", - kACCOUNT, - kINDEX1, - startPage - )); + kACCOUNT, + kINDEX1, + startPage + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -527,14 +543,16 @@ TEST_F(RPCAccountOffersHandlerTest, LimitLessThanMin) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountOffersHandler::kLIMIT_MIN - 1 - )); + kACCOUNT, + AccountOffersHandler::kLIMIT_MIN - 1 + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -582,14 +600,16 @@ TEST_F(RPCAccountOffersHandlerTest, LimitMoreThanMax) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "limit": {} }})JSON", - kACCOUNT, - AccountOffersHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + AccountOffersHandler::kLIMIT_MAX + 1 + ) + ); auto const handler = AnyHandler{AccountOffersHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); diff --git a/tests/unit/rpc/handlers/AccountTxTests.cpp b/tests/unit/rpc/handlers/AccountTxTests.cpp index 7b17803f..4d39a68e 100644 --- a/tests/unit/rpc/handlers/AccountTxTests.cpp +++ b/tests/unit/rpc/handlers/AccountTxTests.cpp @@ -512,17 +512,19 @@ TEST_F(RPCAccountTxHandlerTest, IndexSpecificForwardTrue) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": true }})JSON", - kACCOUNT, - kMIN_SEQ + 1, - kMAX_SEQ - 1 - )); + kACCOUNT, + kMIN_SEQ + 1, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -554,17 +556,19 @@ TEST_F(RPCAccountTxHandlerTest, IndexSpecificForwardFalse) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kACCOUNT, - kMIN_SEQ + 1, - kMAX_SEQ - 1 - )); + kACCOUNT, + kMIN_SEQ + 1, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -596,17 +600,19 @@ TEST_F(RPCAccountTxHandlerTest, IndexNotSpecificForwardTrue) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": true }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -638,17 +644,19 @@ TEST_F(RPCAccountTxHandlerTest, IndexNotSpecificForwardFalse) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -678,17 +686,19 @@ TEST_F(RPCAccountTxHandlerTest, BinaryTrue) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "binary": true }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -732,17 +742,19 @@ TEST_F(RPCAccountTxHandlerTest, BinaryTrueV2) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "binary": true }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -784,8 +796,9 @@ TEST_F(RPCAccountTxHandlerTest, LimitAndMarker) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, @@ -793,10 +806,11 @@ TEST_F(RPCAccountTxHandlerTest, LimitAndMarker) "forward": false, "marker": {{"ledger": 10, "seq": 11}} }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -818,18 +832,20 @@ TEST_F(RPCAccountTxHandlerTest, LimitIsCapped) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "limit": 100000, "forward": false }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -850,19 +866,21 @@ TEST_F(RPCAccountTxHandlerTest, LimitAllowedUpToCap) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "limit": {}, "forward": false }})JSON", - kACCOUNT, - -1, - -1, - AccountTxHandler::kLIMIT_MAX - 1 - )); + kACCOUNT, + -1, + -1, + AccountTxHandler::kLIMIT_MAX - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -898,14 +916,16 @@ TEST_F(RPCAccountTxHandlerTest, SpecificLedgerIndex) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - kMAX_SEQ - 1 - )); + kACCOUNT, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -924,14 +944,16 @@ TEST_F(RPCAccountTxHandlerTest, SpecificNonexistLedgerIntIndex) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - kMAX_SEQ - 1 - )); + kACCOUNT, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -947,14 +969,16 @@ TEST_F(RPCAccountTxHandlerTest, SpecificNonexistLedgerStringIndex) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "{}" }})JSON", - kACCOUNT, - kMAX_SEQ - 1 - )); + kACCOUNT, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -989,14 +1013,16 @@ TEST_F(RPCAccountTxHandlerTest, SpecificLedgerHash) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -1033,13 +1059,15 @@ TEST_F(RPCAccountTxHandlerTest, SpecificLedgerIndexValidated) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "validated" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -1071,17 +1099,19 @@ TEST_F(RPCAccountTxHandlerTest, TxLessThanMinSeq) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kACCOUNT, - kMIN_SEQ + 2, - kMAX_SEQ - 1 - )); + kACCOUNT, + kMIN_SEQ + 2, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -1113,17 +1143,19 @@ TEST_F(RPCAccountTxHandlerTest, TxLargerThanMaxSeq) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kACCOUNT, - kMIN_SEQ + 1, - kMAX_SEQ - 2 - )); + kACCOUNT, + kMIN_SEQ + 1, + kMAX_SEQ - 2 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("account").as_string(), kACCOUNT); @@ -1355,18 +1387,20 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v1) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false, "marker": {{"ledger": 10, "seq": 11}} }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{.yield = yield, .apiVersion = 1u}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(out)); @@ -1603,18 +1637,20 @@ TEST_F(RPCAccountTxHandlerTest, NFTTxs_API_v2) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{AccountTxHandler{backend_, mockETLServicePtr_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false, "marker": {{"ledger": 10, "seq": 11}} }})JSON", - kACCOUNT, - -1, - -1 - )); + kACCOUNT, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(out)); diff --git a/tests/unit/rpc/handlers/BookChangesTests.cpp b/tests/unit/rpc/handlers/BookChangesTests.cpp index 54968ba5..681513ca 100644 --- a/tests/unit/rpc/handlers/BookChangesTests.cpp +++ b/tests/unit/rpc/handlers/BookChangesTests.cpp @@ -160,12 +160,14 @@ TEST_F(RPCBookChangesHandlerTest, LedgerNonExistViaHash) EXPECT_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillOnce(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "ledger_hash": "{}" }})JSON", - kLEDGER_HASH - )); + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{BookChangesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); diff --git a/tests/unit/rpc/handlers/BookOffersTests.cpp b/tests/unit/rpc/handlers/BookOffersTests.cpp index 1d0e52e7..26f2f79d 100644 --- a/tests/unit/rpc/handlers/BookOffersTests.cpp +++ b/tests/unit/rpc/handlers/BookOffersTests.cpp @@ -1152,11 +1152,12 @@ generateNormalPathBookOffersTestBundles() }, .ledgerObjectCalls = 6, .mockedOffers = - std::vector{// After offer1, balance is 30 - 2*10 = 10 - gets10USDPays20XRPOffer, - // offer2 not fully funded, balance is 10, rate is 2, so only - // gets 5 - gets10USDPays20XRPOffer + std::vector{ + // After offer1, balance is 30 - 2*10 = 10 + gets10USDPays20XRPOffer, + // offer2 not fully funded, balance is 10, rate is 2, so only + // gets 5 + gets10USDPays20XRPOffer }, .expectedJson = fmt::format( R"JSON({{ @@ -1495,8 +1496,9 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaIntSequence) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(30, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "ledger_index": 30, "taker_gets": {{ @@ -1508,8 +1510,9 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaIntSequence) "issuer": "{}" }} }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{BookOffersHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](boost::asio::yield_context yield) { auto const output = handler.process(kINPUT, Context{.yield = yield}); @@ -1526,8 +1529,9 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaSequence) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(30, _)).WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "ledger_index": "30", "taker_gets": {{ @@ -1539,8 +1543,9 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaSequence) "issuer": "{}" }} }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{BookOffersHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](boost::asio::yield_context yield) { auto const output = handler.process(kINPUT, Context{.yield = yield}); @@ -1558,8 +1563,9 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaHash) ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillByDefault(Return(std::optional{})); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "ledger_hash": "{}", "taker_gets": {{ @@ -1571,9 +1577,10 @@ TEST_F(RPCBookOffersHandlerTest, LedgerNonExistViaHash) "issuer": "{}" }} }})JSON", - kLEDGER_HASH, - kACCOUNT - )); + kLEDGER_HASH, + kACCOUNT + ) + ); auto const handler = AnyHandler{BookOffersHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](boost::asio::yield_context yield) { auto const output = handler.process(kINPUT, Context{.yield = yield}); @@ -1635,8 +1642,9 @@ TEST_F(RPCBookOffersHandlerTest, Limit) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "taker_gets": {{ "currency": "XRP" @@ -1648,8 +1656,9 @@ TEST_F(RPCBookOffersHandlerTest, Limit) }}, "limit": 5 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const handler = AnyHandler{BookOffersHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](boost::asio::yield_context yield) { auto const output = handler.process(kINPUT, Context{.yield = yield}); @@ -1709,8 +1718,9 @@ TEST_F(RPCBookOffersHandlerTest, LimitMoreThanMax) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "taker_gets": {{ "currency": "XRP" @@ -1722,9 +1732,10 @@ TEST_F(RPCBookOffersHandlerTest, LimitMoreThanMax) }}, "limit": {} }})JSON", - kACCOUNT, - BookOffersHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + BookOffersHandler::kLIMIT_MAX + 1 + ) + ); auto const handler = AnyHandler{BookOffersHandler{backend_, mockAmendmentCenterPtr_}}; runSpawn([&](boost::asio::yield_context yield) { auto const output = handler.process(kINPUT, Context{.yield = yield}); diff --git a/tests/unit/rpc/handlers/DepositAuthorizedTests.cpp b/tests/unit/rpc/handlers/DepositAuthorizedTests.cpp index 63d4a000..a319a2da 100644 --- a/tests/unit/rpc/handlers/DepositAuthorizedTests.cpp +++ b/tests/unit/rpc/handlers/DepositAuthorizedTests.cpp @@ -240,16 +240,18 @@ TEST_F(RPCDepositAuthorizedTest, LedgerNotExistViaIntSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - kACCOUNT2, - kRANGE_MAX - )); + kACCOUNT, + kACCOUNT2, + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -267,16 +269,18 @@ TEST_F(RPCDepositAuthorizedTest, LedgerNotExistViaStringSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_index": "{}" }})JSON", - kACCOUNT, - kACCOUNT2, - kRANGE_MAX - )); + kACCOUNT, + kACCOUNT2, + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -294,16 +298,18 @@ TEST_F(RPCDepositAuthorizedTest, LedgerNotExistViaHash) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -324,16 +330,18 @@ TEST_F(RPCDepositAuthorizedTest, SourceAccountDoesNotExist) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -361,16 +369,18 @@ TEST_F(RPCDepositAuthorizedTest, DestinationAccountDoesNotExist) EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -405,16 +415,18 @@ TEST_F(RPCDepositAuthorizedTest, AccountsAreEqual) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(accountRoot.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -451,16 +463,18 @@ TEST_F(RPCDepositAuthorizedTest, DifferentAccountsNoDepositAuthFlag) .WillByDefault(Return(account2Root.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -498,16 +512,18 @@ TEST_F(RPCDepositAuthorizedTest, DifferentAccountsWithDepositAuthFlagReturnsFals .WillByDefault(Return(account2Root.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -545,16 +561,18 @@ TEST_F(RPCDepositAuthorizedTest, DifferentAccountsWithDepositAuthFlagReturnsTrue .WillByDefault(Return(account2Root.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -603,18 +621,20 @@ TEST_F(RPCDepositAuthorizedTest, CredentialAcceptedAndNotExpiredReturnsTrue) .WillByDefault(Return(credential.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(4); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}", "credentials": ["{}"] }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH, - ripple::strHex(credentialIndex) - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH, + ripple::strHex(credentialIndex) + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -651,18 +671,20 @@ TEST_F(RPCDepositAuthorizedTest, CredentialNotAuthorizedReturnsFalse) EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}", "credentials": ["{}"] }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH, - ripple::strHex(credentialIndex) - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH, + ripple::strHex(credentialIndex) + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -707,18 +729,20 @@ TEST_F(RPCDepositAuthorizedTest, CredentialExpiredReturnsFalse) EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}", "credentials": ["{}"] }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH, - ripple::strHex(credentialIndex) - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH, + ripple::strHex(credentialIndex) + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -757,19 +781,21 @@ TEST_F(RPCDepositAuthorizedTest, DuplicateCredentialsReturnsFalse) EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}", "credentials": ["{}", "{}"] }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH, - ripple::strHex(credentialIndex), - ripple::strHex(credentialIndex) - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH, + ripple::strHex(credentialIndex), + ripple::strHex(credentialIndex) + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -799,17 +825,19 @@ TEST_F(RPCDepositAuthorizedTest, NoElementsInCredentialsReturnsFalse) EXPECT_CALL(*backend_, doFetchLedgerObject).Times(2); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}", "credentials": [] }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -850,21 +878,24 @@ TEST_F(RPCDepositAuthorizedTest, MoreThanMaxNumberOfCredentialsReturnsFalse) std::vector credentials(9, ripple::strHex(credentialIndex)); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}", "credentials": [{}] }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH, - fmt::join( - credentials | std::views::transform([](std::string const& cred) { return fmt::format("\"{}\"", cred); }), - ", " + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH, + fmt::join( + credentials | + std::views::transform([](std::string const& cred) { return fmt::format("\"{}\"", cred); }), + ", " + ) ) - )); + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; @@ -904,18 +935,20 @@ TEST_F(RPCDepositAuthorizedTest, DifferentSubjectAccountForCredentialReturnsFals .WillByDefault(Return(credential.getSerializer().peekData())); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "source_account": "{}", "destination_account": "{}", "ledger_hash": "{}", "credentials": ["{}"] }})JSON", - kACCOUNT, - kACCOUNT2, - kLEDGER_HASH, - ripple::strHex(credentialIndex) - )); + kACCOUNT, + kACCOUNT2, + kLEDGER_HASH, + ripple::strHex(credentialIndex) + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{DepositAuthorizedHandler{backend_}}; diff --git a/tests/unit/rpc/handlers/FeatureTests.cpp b/tests/unit/rpc/handlers/FeatureTests.cpp index afbb06d6..f50314cb 100644 --- a/tests/unit/rpc/handlers/FeatureTests.cpp +++ b/tests/unit/rpc/handlers/FeatureTests.cpp @@ -181,12 +181,14 @@ TEST_F(RPCFeatureHandlerTest, LedgerNotExistViaIntSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{FeatureHandler{backend_, mockAmendmentCenterPtr_}}; - auto const req = boost::json::parse(fmt::format( - R"JSON({{ + auto const req = boost::json::parse( + fmt::format( + R"JSON({{ "ledger_index": {} }})JSON", - kRANGE_MAX - )); + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -201,12 +203,14 @@ TEST_F(RPCFeatureHandlerTest, LedgerNotExistViaStringSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{FeatureHandler{backend_, mockAmendmentCenterPtr_}}; - auto const req = boost::json::parse(fmt::format( - R"JSON({{ + auto const req = boost::json::parse( + fmt::format( + R"JSON({{ "ledger_index": "{}" }})JSON", - kRANGE_MAX - )); + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -222,12 +226,14 @@ TEST_F(RPCFeatureHandlerTest, LedgerNotExistViaHash) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{FeatureHandler{backend_, mockAmendmentCenterPtr_}}; - auto const req = boost::json::parse(fmt::format( - R"JSON({{ + auto const req = boost::json::parse( + fmt::format( + R"JSON({{ "ledger_hash": "{}" }})JSON", - kLEDGER_HASH - )); + kLEDGER_HASH + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); diff --git a/tests/unit/rpc/handlers/GatewayBalancesTests.cpp b/tests/unit/rpc/handlers/GatewayBalancesTests.cpp index 615c56c9..ccdfe094 100644 --- a/tests/unit/rpc/handlers/GatewayBalancesTests.cpp +++ b/tests/unit/rpc/handlers/GatewayBalancesTests.cpp @@ -275,14 +275,16 @@ TEST_F(RPCGatewayBalancesHandlerTest, LedgerNotFoundViaStringIndex) auto const handler = AnyHandler{GatewayBalancesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process( - json::parse(fmt::format( - R"JSON({{ + json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": "{}" }})JSON", - kACCOUNT, - seq - )), + kACCOUNT, + seq + ) + ), Context{yield} ); ASSERT_FALSE(output); @@ -301,14 +303,16 @@ TEST_F(RPCGatewayBalancesHandlerTest, LedgerNotFoundViaIntIndex) auto const handler = AnyHandler{GatewayBalancesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process( - json::parse(fmt::format( - R"JSON({{ + json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - seq - )), + kACCOUNT, + seq + ) + ), Context{yield} ); ASSERT_FALSE(output); @@ -326,14 +330,16 @@ TEST_F(RPCGatewayBalancesHandlerTest, LedgerNotFoundViaHash) auto const handler = AnyHandler{GatewayBalancesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process( - json::parse(fmt::format( - R"JSON({{ + json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )), + kACCOUNT, + kLEDGER_HASH + ) + ), Context{yield} ); ASSERT_FALSE(output); @@ -356,12 +362,14 @@ TEST_F(RPCGatewayBalancesHandlerTest, AccountNotFound) auto const handler = AnyHandler{GatewayBalancesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process( - json::parse(fmt::format( - R"JSON({{ + json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )), + kACCOUNT + ) + ), Context{yield} ); ASSERT_FALSE(output); @@ -409,14 +417,16 @@ TEST_P(NormalPathTest, CheckOutput) auto const handler = AnyHandler{GatewayBalancesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process( - json::parse(fmt::format( - R"JSON({{ + json::parse( + fmt::format( + R"JSON({{ "account": "{}", {} }})JSON", - kACCOUNT, - bundle.hotwallet - )), + kACCOUNT, + bundle.hotwallet + ) + ), Context{yield} ); ASSERT_TRUE(output); @@ -547,8 +557,10 @@ generateNormalPathTestBundles() NormalTestBundle{ .testName = "HighID", .mockedDir = createOwnerDirLedgerObject( - {ripple::uint256{kINDEX2}, ripple::uint256{kINDEX2}, ripple::uint256{kINDEX2}, ripple::uint256{kINDEX2} - }, + {ripple::uint256{kINDEX2}, + ripple::uint256{kINDEX2}, + ripple::uint256{kINDEX2}, + ripple::uint256{kINDEX2}}, kINDEX1 ), .mockedObjects = @@ -680,12 +692,14 @@ TEST_P(EscrowTest, CheckEscrowOutput) auto const handler = AnyHandler{GatewayBalancesHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process( - json::parse(fmt::format( - R"JSON({{ + json::parse( + fmt::format( + R"JSON({{ "account": "{}" }})JSON", - kACCOUNT - )), + kACCOUNT + ) + ), Context{yield} ); ASSERT_TRUE(output); diff --git a/tests/unit/rpc/handlers/GetAggregatePriceTests.cpp b/tests/unit/rpc/handlers/GetAggregatePriceTests.cpp index 38c5b812..4ed06942 100644 --- a/tests/unit/rpc/handlers/GetAggregatePriceTests.cpp +++ b/tests/unit/rpc/handlers/GetAggregatePriceTests.cpp @@ -457,8 +457,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, LedgerNotFound) { EXPECT_CALL(*backend_, fetchLedgerBySequence(kRANGE_MAX, _)).WillOnce(Return(std::nullopt)); constexpr auto kDOCUMENT_ID = 1; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "oracles": @@ -469,9 +470,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, LedgerNotFound) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); @@ -491,8 +493,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntrySinglePriceData) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID, kTX1, 1e3, 2); // 10 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "oracles": @@ -503,12 +506,14 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntrySinglePriceData) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "10", @@ -521,9 +526,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntrySinglePriceData) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -540,8 +546,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryStrOracleDocumentId) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID, kTX1, 1e3, 2); // 10 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "oracles": @@ -552,12 +559,14 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryStrOracleDocumentId) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "10", @@ -570,9 +579,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryStrOracleDocumentId) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -589,8 +599,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, PreviousTxNotFound) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID, kTX1, 1e3, 2); // 10 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "JPY", "quote_asset": "XRP", "oracles": @@ -601,12 +612,14 @@ TEST_F(RPCGetAggregatePriceHandlerTest, PreviousTxNotFound) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "10", @@ -619,9 +632,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, PreviousTxNotFound) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -646,16 +660,18 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NewLedgerObjectHasNoPricePair) 123, 1, 4321u, - createPriceDataSeries({createOraclePriceData(1e3, ripple::to_currency("EUR"), ripple::to_currency("XRP"), 2) - }), + createPriceDataSeries( + {createOraclePriceData(1e3, ripple::to_currency("EUR"), ripple::to_currency("XRP"), 2)} + ), kINDEX, true, kTX2 ))); auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "JPY", "quote_asset": "XRP", "oracles": @@ -666,12 +682,14 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NewLedgerObjectHasNoPricePair) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "10", @@ -684,9 +702,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NewLedgerObjectHasNoPricePair) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -710,8 +729,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesOdd) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID3, kTX1, 3e3, 1); // 300 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "oracles": @@ -730,16 +750,18 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesOdd) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID1, - kACCOUNT, - kDOCUMENT_ID2, - kACCOUNT, - kDOCUMENT_ID3 - )); + kACCOUNT, + kDOCUMENT_ID1, + kACCOUNT, + kDOCUMENT_ID2, + kACCOUNT, + kDOCUMENT_ID3 + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "110", @@ -752,9 +774,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesOdd) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -778,8 +801,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesEven) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID3, kTX1, 3e3, 1); // 300 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "oracles": @@ -802,18 +826,20 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesEven) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID1, - kACCOUNT, - kDOCUMENT_ID2, - kACCOUNT, - kDOCUMENT_ID3, - kACCOUNT, - kDOCUMENT_ID4 - )); + kACCOUNT, + kDOCUMENT_ID1, + kACCOUNT, + kDOCUMENT_ID2, + kACCOUNT, + kDOCUMENT_ID3, + kACCOUNT, + kDOCUMENT_ID4 + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "92.5", @@ -826,9 +852,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryMultipleOraclesEven) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -852,8 +879,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryTrim) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID3, kTX1, 3e3, 1); // 300 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "trim": {}, @@ -877,19 +905,21 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryTrim) }} ] }})JSON", - 25, - kACCOUNT, - kDOCUMENT_ID1, - kACCOUNT, - kDOCUMENT_ID2, - kACCOUNT, - kDOCUMENT_ID3, - kACCOUNT, - kDOCUMENT_ID4 - )); + 25, + kACCOUNT, + kDOCUMENT_ID1, + kACCOUNT, + kDOCUMENT_ID2, + kACCOUNT, + kDOCUMENT_ID3, + kACCOUNT, + kDOCUMENT_ID4 + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "92.5", @@ -908,9 +938,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, OracleLedgerEntryTrim) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -928,8 +959,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NoOracleEntryFound) EXPECT_CALL(*backend_, doFetchLedgerObject(oracleIndex, kRANGE_MAX, _)).WillOnce(Return(std::nullopt)); auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "oracles": @@ -940,9 +972,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NoOracleEntryFound) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); @@ -962,8 +995,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NoMatchAssetPair) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID, kTX1, 1e3, 2); // 10 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "JPY", "quote_asset": "XRP", "oracles": @@ -974,9 +1008,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NoMatchAssetPair) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); @@ -1006,8 +1041,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIsZero) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID3, kTX1, 3e3, 1, kTIMESTAMP4); // 300 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "time_threshold": {}, @@ -1031,19 +1067,21 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIsZero) }} ] }})JSON", - 0, - kACCOUNT, - kDOCUMENT_ID1, - kACCOUNT, - kDOCUMENT_ID2, - kACCOUNT, - kDOCUMENT_ID3, - kACCOUNT, - kDOCUMENT_ID4 - )); + 0, + kACCOUNT, + kDOCUMENT_ID1, + kACCOUNT, + kDOCUMENT_ID2, + kACCOUNT, + kDOCUMENT_ID3, + kACCOUNT, + kDOCUMENT_ID4 + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "10", @@ -1056,10 +1094,11 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIsZero) "ledger_hash": "{}", "validated": true }})JSON", - kTIMESTAMP1, - kRANGE_MAX, - kLEDGER_HASH - )); + kTIMESTAMP1, + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -1086,8 +1125,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, ValidTimeThreshold) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID3, kTX1, 3e3, 1, kTIMESTAMP4); // 300 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "time_threshold": {}, @@ -1111,19 +1151,21 @@ TEST_F(RPCGetAggregatePriceHandlerTest, ValidTimeThreshold) }} ] }})JSON", - kTIMESTAMP1 - kTIMESTAMP2, - kACCOUNT, - kDOCUMENT_ID1, - kACCOUNT, - kDOCUMENT_ID2, - kACCOUNT, - kDOCUMENT_ID3, - kACCOUNT, - kDOCUMENT_ID4 - )); + kTIMESTAMP1 - kTIMESTAMP2, + kACCOUNT, + kDOCUMENT_ID1, + kACCOUNT, + kDOCUMENT_ID2, + kACCOUNT, + kDOCUMENT_ID3, + kACCOUNT, + kDOCUMENT_ID4 + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "15", @@ -1136,10 +1178,11 @@ TEST_F(RPCGetAggregatePriceHandlerTest, ValidTimeThreshold) "ledger_hash": "{}", "validated": true }})JSON", - kTIMESTAMP1, - kRANGE_MAX, - kLEDGER_HASH - )); + kTIMESTAMP1, + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -1166,8 +1209,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdTooLong) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID3, kTX1, 3e3, 1, kTIMESTAMP4); // 300 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "time_threshold": {}, @@ -1191,19 +1235,21 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdTooLong) }} ] }})JSON", - kTIMESTAMP1 + 1, - kACCOUNT, - kDOCUMENT_ID1, - kACCOUNT, - kDOCUMENT_ID2, - kACCOUNT, - kDOCUMENT_ID3, - kACCOUNT, - kDOCUMENT_ID4 - )); + kTIMESTAMP1 + 1, + kACCOUNT, + kDOCUMENT_ID1, + kACCOUNT, + kDOCUMENT_ID2, + kACCOUNT, + kDOCUMENT_ID3, + kACCOUNT, + kDOCUMENT_ID4 + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "92.5", @@ -1216,9 +1262,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdTooLong) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -1245,8 +1292,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIncludeOldest) mockLedgerObject(*backend_, kACCOUNT, kDOCUMENT_ID3, kTX1, 3e3, 1, kTIMESTAMP4); // 300 auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "USD", "quote_asset": "XRP", "time_threshold": {}, @@ -1270,19 +1318,21 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIncludeOldest) }} ] }})JSON", - kTIMESTAMP4 - kTIMESTAMP1, - kACCOUNT, - kDOCUMENT_ID1, - kACCOUNT, - kDOCUMENT_ID2, - kACCOUNT, - kDOCUMENT_ID3, - kACCOUNT, - kDOCUMENT_ID4 - )); + kTIMESTAMP4 - kTIMESTAMP1, + kACCOUNT, + kDOCUMENT_ID1, + kACCOUNT, + kDOCUMENT_ID2, + kACCOUNT, + kDOCUMENT_ID3, + kACCOUNT, + kDOCUMENT_ID4 + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "92.5", @@ -1295,9 +1345,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, TimeThresholdIncludeOldest) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -1322,16 +1373,18 @@ TEST_F(RPCGetAggregatePriceHandlerTest, FromTx) 123, 1, 4321u, - createPriceDataSeries({createOraclePriceData(1e3, ripple::to_currency("JPY"), ripple::to_currency("XRP"), 2) - }), + createPriceDataSeries( + {createOraclePriceData(1e3, ripple::to_currency("JPY"), ripple::to_currency("XRP"), 2)} + ), ripple::to_string(oracleIndex), false, kTX1 ))); auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "JPY", "quote_asset": "XRP", "oracles": @@ -1342,12 +1395,14 @@ TEST_F(RPCGetAggregatePriceHandlerTest, FromTx) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); - auto const expected = json::parse(fmt::format( - R"JSON({{ + auto const expected = json::parse( + fmt::format( + R"JSON({{ "entire_set": {{ "mean": "10", @@ -1360,9 +1415,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, FromTx) "ledger_hash": "{}", "validated": true }})JSON", - kRANGE_MAX, - kLEDGER_HASH - )); + kRANGE_MAX, + kLEDGER_HASH + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -1385,8 +1441,9 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NotFoundInTxHistory) 123, 1, 4321u, - createPriceDataSeries({createOraclePriceData(1e3, ripple::to_currency("EUR"), ripple::to_currency("XRP"), 2) - }), + createPriceDataSeries( + {createOraclePriceData(1e3, ripple::to_currency("EUR"), ripple::to_currency("XRP"), 2)} + ), ripple::to_string(oracleIndex), false, kTX2 @@ -1399,16 +1456,18 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NotFoundInTxHistory) 123, 1, 4321u, - createPriceDataSeries({createOraclePriceData(1e3, ripple::to_currency("EUR"), ripple::to_currency("XRP"), 2) - }), + createPriceDataSeries( + {createOraclePriceData(1e3, ripple::to_currency("EUR"), ripple::to_currency("XRP"), 2)} + ), ripple::to_string(oracleIndex), false, kTX2 ))); auto const handler = AnyHandler{GetAggregatePriceHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "base_asset": "JPY", "quote_asset": "XRP", "oracles": @@ -1419,9 +1478,10 @@ TEST_F(RPCGetAggregatePriceHandlerTest, NotFoundInTxHistory) }} ] }})JSON", - kACCOUNT, - kDOCUMENT_ID - )); + kACCOUNT, + kDOCUMENT_ID + ) + ); runSpawn([&](auto yield) { auto const output = handler.process(req, Context{yield}); diff --git a/tests/unit/rpc/handlers/LedgerDataTests.cpp b/tests/unit/rpc/handlers/LedgerDataTests.cpp index b1e87700..1aecd1ac 100644 --- a/tests/unit/rpc/handlers/LedgerDataTests.cpp +++ b/tests/unit/rpc/handlers/LedgerDataTests.cpp @@ -186,12 +186,14 @@ TEST_F(RPCLedgerDataHandlerTest, LedgerNotExistViaIntSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "ledger_index": {} }})JSON", - kRANGE_MAX - )); + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -207,12 +209,14 @@ TEST_F(RPCLedgerDataHandlerTest, LedgerNotExistViaStringSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "ledger_index": "{}" }})JSON", - kRANGE_MAX - )); + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -228,12 +232,14 @@ TEST_F(RPCLedgerDataHandlerTest, LedgerNotExistViaHash) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "ledger_hash": "{}" }})JSON", - kLEDGER_HASH - )); + kLEDGER_HASH + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -254,12 +260,14 @@ TEST_F(RPCLedgerDataHandlerTest, MarkerNotExist) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "marker": "{}" }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -588,13 +596,15 @@ TEST_F(RPCLedgerDataHandlerTest, Marker) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "limit": 10, "marker": "{}" }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_FALSE(output.result->as_object().contains("ledger")); @@ -621,7 +631,8 @@ TEST_F(RPCLedgerDataHandlerTest, DiffMarker) auto const line = createRippleStateLedgerObject("USD", kACCOUNT2, 10, kACCOUNT, 100, kACCOUNT2, 200, kTXN_ID, 123); bbs.push_back(line.getSerializer().peekData()); - los.emplace_back(LedgerObject{.key = ripple::uint256{kINDEX2}, .blob = Blob{}} + los.emplace_back( + LedgerObject{.key = ripple::uint256{kINDEX2}, .blob = Blob{}} ); // NOLINT(modernize-use-emplace) } ON_CALL(*backend_, fetchLedgerDiff(kRANGE_MAX, _)).WillByDefault(Return(los)); @@ -631,14 +642,16 @@ TEST_F(RPCLedgerDataHandlerTest, DiffMarker) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "limit": 10, "marker": {}, "out_of_order": true }})JSON", - kRANGE_MAX - )); + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_FALSE(output.result->as_object().contains("ledger")); @@ -712,13 +725,15 @@ TEST_F(RPCLedgerDataHandlerTest, BinaryLimitMoreThanMax) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "limit": {}, "binary": true }})JSON", - LedgerDataHandler::kLIMIT_BINARY + 1 - )); + LedgerDataHandler::kLIMIT_BINARY + 1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_TRUE(output.result->as_object().contains("ledger")); @@ -753,13 +768,15 @@ TEST_F(RPCLedgerDataHandlerTest, JsonLimitMoreThanMax) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerDataHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "limit": {}, "binary": false }})JSON", - LedgerDataHandler::kLIMIT_JSON + 1 - )); + LedgerDataHandler::kLIMIT_JSON + 1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_TRUE(output.result->as_object().contains("ledger")); diff --git a/tests/unit/rpc/handlers/LedgerEntryTests.cpp b/tests/unit/rpc/handlers/LedgerEntryTests.cpp index af522450..136002ab 100644 --- a/tests/unit/rpc/handlers/LedgerEntryTests.cpp +++ b/tests/unit/rpc/handlers/LedgerEntryTests.cpp @@ -2415,12 +2415,14 @@ TEST_P(IndexTest, InvalidIndexUint256) auto const index = GetParam(); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "{}": "invalid" }})JSON", - index - )); + index + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -2435,12 +2437,14 @@ TEST_P(IndexTest, InvalidIndexNotString) auto const index = GetParam(); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "{}": 123 }})JSON", - index - )); + index + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -2462,12 +2466,14 @@ TEST_F(RPCLedgerEntryTest, LedgerEntryNotFound) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "account_root": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -3283,12 +3289,14 @@ TEST_F(RPCLedgerEntryTest, BinaryFalse) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "payment_channel": "{}" }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -3327,17 +3335,19 @@ TEST_F(RPCLedgerEntryTest, Vault_BinaryFalse) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "binary": false, "vault": {{ "owner": "{}", "seq": {} }} }})JSON", - kACCOUNT, - kRANGE_MAX - )); + kACCOUNT, + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -3359,12 +3369,14 @@ TEST_F(RPCLedgerEntryTest, UnexpectedLedgerType) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "check": "{}" }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -3378,14 +3390,16 @@ TEST_F(RPCLedgerEntryTest, LedgerNotExistViaIntSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "check": "{}", "ledger_index": {} }})JSON", - kINDEX1, - kRANGE_MAX - )); + kINDEX1, + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -3400,14 +3414,16 @@ TEST_F(RPCLedgerEntryTest, LedgerNotExistViaStringSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "check": "{}", "ledger_index": "{}" }})JSON", - kINDEX1, - kRANGE_MAX - )); + kINDEX1, + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -3422,14 +3438,16 @@ TEST_F(RPCLedgerEntryTest, LedgerNotExistViaHash) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "check": "{}", "ledger_hash": "{}" }})JSON", - kINDEX1, - kLEDGER_HASH - )); + kINDEX1, + kLEDGER_HASH + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -3514,13 +3532,15 @@ TEST_F(RPCLedgerEntryTest, BinaryFalseIncludeDeleted) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "index": "{}", "include_deleted": true }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -3562,13 +3582,15 @@ TEST_F(RPCLedgerEntryTest, LedgerEntryDeleted) .WillOnce(Return(offer.getSerializer().peekData())); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "index": "{}", "include_deleted": true }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -3590,13 +3612,15 @@ TEST_F(RPCLedgerEntryTest, LedgerEntryNotExist) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "index": "{}", "include_deleted": true }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -3641,13 +3665,15 @@ TEST_F(RPCLedgerEntryTest, BinaryFalseIncludeDeleteFalse) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "payment_channel": "{}", "include_deleted": false }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -3701,13 +3727,15 @@ TEST_F(RPCLedgerEntryTest, ObjectUpdateIncludeDelete) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "index": "{}", "include_deleted": true }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -3749,13 +3777,15 @@ TEST_F(RPCLedgerEntryTest, ObjectDeletedPreviously) .WillOnce(Return(offer.getSerializer().peekData())); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "index": "{}", "include_deleted": true }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -3775,13 +3805,15 @@ TEST_F(RPCLedgerEntryTest, ObjectSeqNotExist) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "index": "{}", "include_deleted": true }})JSON", - kINDEX1 - )); + kINDEX1 + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -3827,12 +3859,14 @@ TEST_F(RPCLedgerEntryTest, SyntheticMPTIssuanceID) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance": "{}" }})JSON", - ripple::to_string(mptId) - )); + ripple::to_string(mptId) + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); diff --git a/tests/unit/rpc/handlers/LedgerTests.cpp b/tests/unit/rpc/handlers/LedgerTests.cpp index 75224583..5c9d4b1a 100644 --- a/tests/unit/rpc/handlers/LedgerTests.cpp +++ b/tests/unit/rpc/handlers/LedgerTests.cpp @@ -204,12 +204,14 @@ TEST_F(RPCLedgerHandlerTest, LedgerNotExistViaIntSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerHandler{backend_, mockAmendmentCenterPtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "ledger_index": {} }})JSON", - kRANGE_MAX - )); + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -225,12 +227,14 @@ TEST_F(RPCLedgerHandlerTest, LedgerNotExistViaStringSequence) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerHandler{backend_, mockAmendmentCenterPtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "ledger_index": "{}" }})JSON", - kRANGE_MAX - )); + kRANGE_MAX + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -246,12 +250,14 @@ TEST_F(RPCLedgerHandlerTest, LedgerNotExistViaHash) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{LedgerHandler{backend_, mockAmendmentCenterPtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "ledger_hash": "{}" }})JSON", - kLEDGER_HASH - )); + kLEDGER_HASH + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -694,15 +700,17 @@ TEST_F(RPCLedgerHandlerTest, TwoRequestInARowTransactionsExpandNotBinaryV2) auto output = handler.process(req, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output); - auto const req2 = json::parse(fmt::format( - R"JSON({{ + auto const req2 = json::parse( + fmt::format( + R"JSON({{ "binary": false, "expand": true, "transactions": true, "ledger_index": {} }})JSON", - kRANGE_MAX - 1 - )); + kRANGE_MAX - 1 + ) + ); auto output2 = handler.process(req2, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output2); EXPECT_NE( @@ -772,11 +780,14 @@ TEST_F(RPCLedgerHandlerTest, DiffNotBinary) EXPECT_CALL(*backend_, fetchLedgerDiff).Times(1); los.push_back(LedgerObject{.key = ripple::uint256{kINDEX2}, .blob = Blob{}}); // NOLINT(modernize-use-emplace) - los.push_back(LedgerObject{ - .key = ripple::uint256{kINDEX1}, - .blob = - createAccountRootObject(kACCOUNT, ripple::lsfGlobalFreeze, 1, 10, 2, kINDEX1, 3).getSerializer().peekData() - }); + los.push_back( + LedgerObject{ + .key = ripple::uint256{kINDEX1}, + .blob = createAccountRootObject(kACCOUNT, ripple::lsfGlobalFreeze, 1, 10, 2, kINDEX1, 3) + .getSerializer() + .peekData() + } + ); ON_CALL(*backend_, fetchLedgerDiff(kRANGE_MAX, _)).WillByDefault(Return(los)); @@ -816,11 +827,14 @@ TEST_F(RPCLedgerHandlerTest, DiffBinary) EXPECT_CALL(*backend_, fetchLedgerDiff).Times(1); los.push_back(LedgerObject{.key = ripple::uint256{kINDEX2}, .blob = Blob{}}); // NOLINT(modernize-use-emplace) - los.push_back(LedgerObject{ - .key = ripple::uint256{kINDEX1}, - .blob = - createAccountRootObject(kACCOUNT, ripple::lsfGlobalFreeze, 1, 10, 2, kINDEX1, 3).getSerializer().peekData() - }); + los.push_back( + LedgerObject{ + .key = ripple::uint256{kINDEX1}, + .blob = createAccountRootObject(kACCOUNT, ripple::lsfGlobalFreeze, 1, 10, 2, kINDEX1, 3) + .getSerializer() + .peekData() + } + ); ON_CALL(*backend_, fetchLedgerDiff(kRANGE_MAX, _)).WillByDefault(Return(los)); diff --git a/tests/unit/rpc/handlers/MPTHoldersTests.cpp b/tests/unit/rpc/handlers/MPTHoldersTests.cpp index a8217091..4103cbe1 100644 --- a/tests/unit/rpc/handlers/MPTHoldersTests.cpp +++ b/tests/unit/rpc/handlers/MPTHoldersTests.cpp @@ -81,13 +81,15 @@ TEST_F(RPCMPTHoldersHandlerTest, NonHexLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_hash": "xxx" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); @@ -101,13 +103,15 @@ TEST_F(RPCMPTHoldersHandlerTest, NonStringLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_hash": 123 }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); @@ -121,13 +125,15 @@ TEST_F(RPCMPTHoldersHandlerTest, InvalidLedgerIndexString) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_index": "notvalidated" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); @@ -189,13 +195,15 @@ TEST_F(RPCMPTHoldersHandlerTest, MarkerInvalidFormat) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "marker": "xxx" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -209,13 +217,15 @@ TEST_F(RPCMPTHoldersHandlerTest, MarkerNotString) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "marker": 1 }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -232,14 +242,16 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerHash) ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillByDefault(Return(std::optional{})); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_hash": "{}" }})JSON", - kMPT_ID, - kLEDGER_HASH - )); + kMPT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -256,13 +268,15 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerStringIndex) { // mock fetchLedgerBySequence return empty EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional{})); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_index": "4" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -277,13 +291,15 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerIntIndex) { // mock fetchLedgerBySequence return empty EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional{})); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_index": 4 }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -302,14 +318,16 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerHash2) auto ledgerinfo = createLedgerHeader(kLEDGER_HASH, 31); ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerinfo)); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_hash": "{}" }})JSON", - kMPT_ID, - kLEDGER_HASH - )); + kMPT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -326,13 +344,15 @@ TEST_F(RPCMPTHoldersHandlerTest, NonExistLedgerViaLedgerIndex2) // no need to check from db,call fetchLedgerBySequence 0 time // differ from previous logic EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_index": "31" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{MPTHoldersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -352,14 +372,16 @@ TEST_F(RPCMPTHoldersHandlerTest, MPTNotFound) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_hash": "{}" }})JSON", - kMPT_ID, - kLEDGER_HASH - )); + kMPT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto handler = AnyHandler{MPTHoldersHandler{this->backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -399,12 +421,14 @@ TEST_F(RPCMPTHoldersHandlerTest, DefaultParameters) ) .Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{MPTHoldersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -447,12 +471,14 @@ TEST_F(RPCMPTHoldersHandlerTest, CustomAmounts) ) .Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{MPTHoldersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -495,14 +521,16 @@ TEST_F(RPCMPTHoldersHandlerTest, SpecificLedgerIndex) ) .Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "ledger_index": {} }})JSON", - kMPT_ID, - specificLedger - )); + kMPT_ID, + specificLedger + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{MPTHoldersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -542,14 +570,16 @@ TEST_F(RPCMPTHoldersHandlerTest, MarkerParameter) .Times(1); auto const holder1AccountId = ripple::strHex(getAccountIdWithString(kHOLDE_R1_ACCOUNT)); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "marker": "{}" }})JSON", - kMPT_ID, - holder1AccountId - )); + kMPT_ID, + holder1AccountId + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{MPTHoldersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -588,12 +618,14 @@ TEST_F(RPCMPTHoldersHandlerTest, MultipleMPTs) ) .Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}" }})JSON", - kMPT_ID - )); + kMPT_ID + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{MPTHoldersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -636,14 +668,16 @@ TEST_F(RPCMPTHoldersHandlerTest, LimitMoreThanMAx) ) .Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "mpt_issuance_id": "{}", "limit": {} }})JSON", - kMPT_ID, - MPTHoldersHandler::kLIMIT_MAX + 1 - )); + kMPT_ID, + MPTHoldersHandler::kLIMIT_MAX + 1 + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{MPTHoldersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); diff --git a/tests/unit/rpc/handlers/NFTBuyOffersTests.cpp b/tests/unit/rpc/handlers/NFTBuyOffersTests.cpp index 16abca8e..e20eaa22 100644 --- a/tests/unit/rpc/handlers/NFTBuyOffersTests.cpp +++ b/tests/unit/rpc/handlers/NFTBuyOffersTests.cpp @@ -66,13 +66,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonHexLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "xxx" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -86,13 +88,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitNotInt) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": "xxx" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -105,13 +109,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitNegative) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": -1 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -124,13 +130,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitZero) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 0 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -143,13 +151,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonStringLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": 123 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -163,13 +173,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, InvalidLedgerIndexString) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "notvalidated" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -220,14 +232,16 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonExistLedgerViaLedgerHash) .WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -245,13 +259,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonExistLedgerViaLedgerIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "4" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -270,14 +286,16 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonExistLedgerViaLedgerHash2) auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31); ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader)); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -294,13 +312,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NonExistLedgerViaLedgerIndex2) // no need to check from db, call fetchLedgerBySequence 0 time // differ from previous logic EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "31" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -319,14 +339,16 @@ TEST_F(RPCNFTBuyOffersHandlerTest, NoNFT) EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::nullopt)); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -341,13 +363,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, MarkerNotString) { runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": 9 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -363,13 +387,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, InvalidMarker) { runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": "123invalid" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -379,13 +405,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, InvalidMarker) }); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": 250 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -436,12 +464,14 @@ TEST_F(RPCNFTBuyOffersHandlerTest, DefaultParameters) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTBuyOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -476,13 +506,15 @@ TEST_F(RPCNFTBuyOffersHandlerTest, MultipleResultsWithMarkerAndLimitOutput) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 50 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTBuyOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -534,14 +566,16 @@ TEST_F(RPCNFTBuyOffersHandlerTest, ResultsForInputWithMarkerAndLimit) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": "E6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC353", "limit": 50 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTBuyOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -597,14 +631,16 @@ TEST_F(RPCNFTBuyOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLimi runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTBuyOffersHandler{this->backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": "E6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC353", "limit": 50 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); @@ -616,26 +652,30 @@ TEST_F(RPCNFTBuyOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLimi runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 49 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit somehow? }); runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTBuyOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 501 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit somehow? }); @@ -665,14 +705,16 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitLessThanMin) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": {} }})JSON", - kNFT_ID, - NFTBuyOffersHandler::kLIMIT_MIN - 1 - )); + kNFT_ID, + NFTBuyOffersHandler::kLIMIT_MIN - 1 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTBuyOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -707,14 +749,16 @@ TEST_F(RPCNFTBuyOffersHandlerTest, LimitMoreThanMax) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": {} }})JSON", - kNFT_ID, - NFTBuyOffersHandler::kLIMIT_MAX + 1 - )); + kNFT_ID, + NFTBuyOffersHandler::kLIMIT_MAX + 1 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTBuyOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); diff --git a/tests/unit/rpc/handlers/NFTHistoryTests.cpp b/tests/unit/rpc/handlers/NFTHistoryTests.cpp index 838b7650..7f742c64 100644 --- a/tests/unit/rpc/handlers/NFTHistoryTests.cpp +++ b/tests/unit/rpc/handlers/NFTHistoryTests.cpp @@ -310,17 +310,19 @@ TEST_F(RPCNFTHistoryHandlerTest, IndexSpecificForwardTrue) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": true }})JSON", - kNFT_ID, - kMIN_SEQ + 1, - kMAX_SEQ - 1 - )); + kNFT_ID, + kMIN_SEQ + 1, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -454,17 +456,19 @@ TEST_F(RPCNFTHistoryHandlerTest, IndexSpecificForwardFalseV1) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kNFT_ID, - kMIN_SEQ + 1, - kMAX_SEQ - 1 - )); + kNFT_ID, + kMIN_SEQ + 1, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), boost::json::parse(kOUTPUT)); @@ -608,17 +612,19 @@ TEST_F(RPCNFTHistoryHandlerTest, IndexSpecificForwardFalseV2) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kNFT_ID, - kMIN_SEQ + 1, - kMAX_SEQ - 1 - )); + kNFT_ID, + kMIN_SEQ + 1, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output); EXPECT_EQ(output.result.value(), boost::json::parse(kOUTPUT)); @@ -640,17 +646,19 @@ TEST_F(RPCNFTHistoryHandlerTest, IndexNotSpecificForwardTrue) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": true }})JSON", - kNFT_ID, - -1, - -1 - )); + kNFT_ID, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -681,17 +689,19 @@ TEST_F(RPCNFTHistoryHandlerTest, IndexNotSpecificForwardFalse) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kNFT_ID, - -1, - -1 - )); + kNFT_ID, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -722,17 +732,19 @@ TEST_F(RPCNFTHistoryHandlerTest, BinaryTrueV1) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "binary": true }})JSON", - kNFT_ID, - -1, - -1 - )); + kNFT_ID, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -776,17 +788,19 @@ TEST_F(RPCNFTHistoryHandlerTest, BinaryTrueV2) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "binary": true }})JSON", - kNFT_ID, - -1, - -1 - )); + kNFT_ID, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -827,8 +841,9 @@ TEST_F(RPCNFTHistoryHandlerTest, LimitAndMarker) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, @@ -836,10 +851,11 @@ TEST_F(RPCNFTHistoryHandlerTest, LimitAndMarker) "forward": false, "marker": {{"ledger": 10, "seq": 11}} }})JSON", - kNFT_ID, - -1, - -1 - )); + kNFT_ID, + -1, + -1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -875,14 +891,16 @@ TEST_F(RPCNFTHistoryHandlerTest, SpecificLedgerIndex) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": {} }})JSON", - kNFT_ID, - kMAX_SEQ - 1 - )); + kNFT_ID, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -901,14 +919,16 @@ TEST_F(RPCNFTHistoryHandlerTest, SpecificNonexistLedgerIntIndex) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": {} }})JSON", - kNFT_ID, - kMAX_SEQ - 1 - )); + kNFT_ID, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -924,14 +944,16 @@ TEST_F(RPCNFTHistoryHandlerTest, SpecificNonexistLedgerStringIndex) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "{}" }})JSON", - kNFT_ID, - kMAX_SEQ - 1 - )); + kNFT_ID, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -964,14 +986,16 @@ TEST_F(RPCNFTHistoryHandlerTest, SpecificLedgerHash) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -1002,17 +1026,19 @@ TEST_F(RPCNFTHistoryHandlerTest, TxLessThanMinSeq) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kNFT_ID, - kMIN_SEQ + 2, - kMAX_SEQ - 1 - )); + kNFT_ID, + kMIN_SEQ + 2, + kMAX_SEQ - 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -1043,17 +1069,19 @@ TEST_F(RPCNFTHistoryHandlerTest, TxLargerThanMaxSeq) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false }})JSON", - kNFT_ID, - kMIN_SEQ + 1, - kMAX_SEQ - 2 - )); + kNFT_ID, + kMIN_SEQ + 1, + kMAX_SEQ - 2 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); @@ -1084,19 +1112,21 @@ TEST_F(RPCNFTHistoryHandlerTest, LimitMoreThanMax) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTHistoryHandler{backend_}}; - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index_min": {}, "ledger_index_max": {}, "forward": false, "limit": {} }})JSON", - kNFT_ID, - kMIN_SEQ + 1, - kMAX_SEQ - 1, - NFTHistoryHandler::kLIMIT_MAX + 1 - )); + kNFT_ID, + kMIN_SEQ + 1, + kMAX_SEQ - 1, + NFTHistoryHandler::kLIMIT_MAX + 1 + ) + ); auto const output = handler.process(kINPUT, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("nft_id").as_string(), kNFT_ID); diff --git a/tests/unit/rpc/handlers/NFTInfoTests.cpp b/tests/unit/rpc/handlers/NFTInfoTests.cpp index 02fe9141..d568a715 100644 --- a/tests/unit/rpc/handlers/NFTInfoTests.cpp +++ b/tests/unit/rpc/handlers/NFTInfoTests.cpp @@ -61,13 +61,15 @@ TEST_F(RPCNFTInfoHandlerTest, NonHexLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "xxx" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -81,13 +83,15 @@ TEST_F(RPCNFTInfoHandlerTest, NonStringLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": 123 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -101,13 +105,15 @@ TEST_F(RPCNFTInfoHandlerTest, InvalidLedgerIndexString) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "notvalidated" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -158,14 +164,16 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerHash) .WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -183,13 +191,15 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerStringIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "4" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -205,13 +215,15 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerIntIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": 4 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -230,14 +242,16 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerHash2) auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31); ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader)); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -254,13 +268,15 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistLedgerViaLedgerIndex2) // no need to check from db,call fetchLedgerBySequence 0 time // differ from previous logic EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "31" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -280,14 +296,16 @@ TEST_F(RPCNFTInfoHandlerTest, NonExistNFT) // fetch nft return empty ON_CALL(*backend_, fetchNFT).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchNFT(ripple::uint256{kNFT_ID}, 30, _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTInfoHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -324,12 +342,14 @@ TEST_F(RPCNFTInfoHandlerTest, DefaultParameters) ON_CALL(*backend_, fetchNFT).WillByDefault(Return(nft)); EXPECT_CALL(*backend_, fetchNFT(ripple::uint256{kNFT_ID}, 30, _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTInfoHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -365,12 +385,14 @@ TEST_F(RPCNFTInfoHandlerTest, BurnedNFT) ON_CALL(*backend_, fetchNFT).WillByDefault(Return(nft)); EXPECT_CALL(*backend_, fetchNFT(ripple::uint256{kNFT_ID}, 30, _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTInfoHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -405,12 +427,14 @@ TEST_F(RPCNFTInfoHandlerTest, NotBurnedNFTWithoutURI) ON_CALL(*backend_, fetchNFT).WillByDefault(Return(nft)); EXPECT_CALL(*backend_, fetchNFT(ripple::uint256{kNFT_ID}, 30, _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTInfoHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -445,12 +469,14 @@ TEST_F(RPCNFTInfoHandlerTest, NFTWithExtraFieldsSet) ON_CALL(*backend_, fetchNFT).WillByDefault(Return(nft)); EXPECT_CALL(*backend_, fetchNFT(ripple::uint256{kNFT_ID2}, 30, _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}" }})JSON", - kNFT_ID2 - )); + kNFT_ID2 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTInfoHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); diff --git a/tests/unit/rpc/handlers/NFTSellOffersTests.cpp b/tests/unit/rpc/handlers/NFTSellOffersTests.cpp index 356cb909..a0b41656 100644 --- a/tests/unit/rpc/handlers/NFTSellOffersTests.cpp +++ b/tests/unit/rpc/handlers/NFTSellOffersTests.cpp @@ -66,13 +66,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitNotInt) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": "xxx" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -85,13 +87,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitNegative) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": -1 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -104,13 +108,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitZero) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 0 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -123,13 +129,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonHexLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "xxx" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -143,13 +151,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonStringLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": 123 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -163,13 +173,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, InvalidLedgerIndexString) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "notvalidated" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{.yield = yield}); ASSERT_FALSE(output); @@ -220,14 +232,16 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonExistLedgerViaLedgerHash) .WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -245,13 +259,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonExistLedgerViaLedgerIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "4" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -270,14 +286,16 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonExistLedgerViaLedgerHash2) auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31); ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader)); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -294,13 +312,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, NonExistLedgerViaLedgerIndex2) // no need to check from db, call fetchLedgerBySequence 0 time // differ from previous logic EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_index": "31" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -319,14 +339,16 @@ TEST_F(RPCNFTSellOffersHandlerTest, NoNFT) EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::nullopt)); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "ledger_hash": "{}" }})JSON", - kNFT_ID, - kLEDGER_HASH - )); + kNFT_ID, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -341,13 +363,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, MarkerNotString) { runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": 9 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -363,13 +387,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, InvalidMarker) { runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": "123invalid" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -379,13 +405,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, InvalidMarker) }); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": 250 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_FALSE(output); @@ -436,12 +464,14 @@ TEST_F(RPCNFTSellOffersHandlerTest, DefaultParameters) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}" }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTSellOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -476,13 +506,15 @@ TEST_F(RPCNFTSellOffersHandlerTest, MultipleResultsWithMarkerAndLimitOutput) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 50 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTSellOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -534,14 +566,16 @@ TEST_F(RPCNFTSellOffersHandlerTest, ResultsForInputWithMarkerAndLimit) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": "E6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC353", "limit": 50 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTSellOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -597,14 +631,16 @@ TEST_F(RPCNFTSellOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLim runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTSellOffersHandler{this->backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "marker": "E6DBAFC99223B42257915A63DFC6B0C032D4070F9A574B255AD97466726FC353", "limit": 50 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); @@ -616,26 +652,30 @@ TEST_F(RPCNFTSellOffersHandlerTest, ResultsWithoutMarkerForInputWithMarkerAndLim runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 49 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit? }); runSpawn([this](auto yield) { auto const handler = AnyHandler{NFTSellOffersHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": 501 }})JSON", - kNFT_ID - )); + kNFT_ID + ) + ); auto const output = handler.process(input, Context{yield}); ASSERT_TRUE(output); // todo: check limit? }); @@ -666,14 +706,16 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitLessThanMin) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": {} }})JSON", - kNFT_ID, - NFTSellOffersHandler::kLIMIT_MIN - 1 - )); + kNFT_ID, + NFTSellOffersHandler::kLIMIT_MIN - 1 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTSellOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -709,14 +751,16 @@ TEST_F(RPCNFTSellOffersHandlerTest, LimitMoreThanMax) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "nft_id": "{}", "limit": {} }})JSON", - kNFT_ID, - NFTSellOffersHandler::kLIMIT_MAX + 1 - )); + kNFT_ID, + NFTSellOffersHandler::kLIMIT_MAX + 1 + ) + ); runSpawn([&, this](auto yield) { auto handler = AnyHandler{NFTSellOffersHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); diff --git a/tests/unit/rpc/handlers/NFTsByIssuerTest.cpp b/tests/unit/rpc/handlers/NFTsByIssuerTest.cpp index 46fd9796..3b5bbbae 100644 --- a/tests/unit/rpc/handlers/NFTsByIssuerTest.cpp +++ b/tests/unit/rpc/handlers/NFTsByIssuerTest.cpp @@ -105,13 +105,15 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonHexLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_hash": "xxx" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); @@ -125,13 +127,15 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonStringLedgerHash) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_hash": 123 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); @@ -145,13 +149,15 @@ TEST_F(RPCNFTsByIssuerHandlerTest, InvalidLedgerIndexString) { runSpawn([this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_index": "notvalidated" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const output = handler.process(input, Context{.yield = std::ref(yield)}); ASSERT_FALSE(output); @@ -216,14 +222,16 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerHash) ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)) .WillByDefault(Return(std::optional{})); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -240,13 +248,15 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerStringIndex) { // mock fetchLedgerBySequence return empty EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional{})); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_index": "4" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -261,13 +271,15 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerIntIndex) { // mock fetchLedgerBySequence return empty EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(std::optional{})); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_index": 4 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -286,14 +298,16 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerHash2) auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 31); ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(ledgerHeader)); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -310,13 +324,15 @@ TEST_F(RPCNFTsByIssuerHandlerTest, NonExistLedgerViaLedgerIndex2) // no need to check from db,call fetchLedgerBySequence 0 time // differ from previous logic EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(0); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_index": "31" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto const handler = AnyHandler{NFTsByIssuerHandler{backend_}}; auto const output = handler.process(input, Context{.yield = std::ref(yield)}); @@ -336,14 +352,16 @@ TEST_F(RPCNFTsByIssuerHandlerTest, AccountNotFound) ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](boost::asio::yield_context yield) { auto handler = AnyHandler{NFTsByIssuerHandler{this->backend_}}; auto const output = handler.process(input, Context{.yield = yield}); @@ -379,12 +397,14 @@ TEST_F(RPCNFTsByIssuerHandlerTest, DefaultParameters) ON_CALL(*backend_, fetchNFTsByIssuer).WillByDefault(Return(NFTsAndCursor{.nfts = nfts, .cursor = {}})); EXPECT_CALL(*backend_, fetchNFTsByIssuer(account, Eq(std::nullopt), Const(30), _, Eq(std::nullopt), _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{NFTsByIssuerHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -432,14 +452,16 @@ TEST_F(RPCNFTsByIssuerHandlerTest, SpecificLedgerIndex) EXPECT_CALL(*backend_, fetchNFTsByIssuer(account, Eq(std::nullopt), Const(specificLedger), _, Eq(std::nullopt), _)) .Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "ledger_index": {} }})JSON", - kACCOUNT, - specificLedger - )); + kACCOUNT, + specificLedger + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{NFTsByIssuerHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -473,13 +495,15 @@ TEST_F(RPCNFTsByIssuerHandlerTest, TaxonParameter) ON_CALL(*backend_, fetchNFTsByIssuer).WillByDefault(Return(NFTsAndCursor{.nfts = nfts, .cursor = {}})); EXPECT_CALL(*backend_, fetchNFTsByIssuer(account, Optional(0), Const(30), _, Eq(std::nullopt), _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "nft_taxon": 0 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{NFTsByIssuerHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -514,14 +538,16 @@ TEST_F(RPCNFTsByIssuerHandlerTest, MarkerParameter) .WillByDefault(Return(NFTsAndCursor{.nfts = nfts, .cursor = ripple::uint256{kNFT_ID3}})); EXPECT_CALL(*backend_, fetchNFTsByIssuer(account, _, Const(30), _, Eq(ripple::uint256{kNFT_ID1}), _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "marker": "{}" }})JSON", - kACCOUNT, - kNFT_ID1 - )); + kACCOUNT, + kNFT_ID1 + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{NFTsByIssuerHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -558,12 +584,14 @@ TEST_F(RPCNFTsByIssuerHandlerTest, MultipleNFTs) ON_CALL(*backend_, fetchNFTsByIssuer).WillByDefault(Return(NFTsAndCursor{.nfts = nfts, .cursor = {}})); EXPECT_CALL(*backend_, fetchNFTsByIssuer(account, Eq(std::nullopt), Const(30), _, Eq(std::nullopt), _)).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}" }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{NFTsByIssuerHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); @@ -600,14 +628,16 @@ TEST_F(RPCNFTsByIssuerHandlerTest, LimitMoreThanMax) ) .Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "issuer": "{}", "limit": {} }})JSON", - kACCOUNT, - NFTsByIssuerHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + NFTsByIssuerHandler::kLIMIT_MAX + 1 + ) + ); runSpawn([&, this](auto& yield) { auto handler = AnyHandler{NFTsByIssuerHandler{this->backend_}}; auto const output = handler.process(input, Context{yield}); diff --git a/tests/unit/rpc/handlers/NoRippleCheckTests.cpp b/tests/unit/rpc/handlers/NoRippleCheckTests.cpp index b67e0203..2298c8f4 100644 --- a/tests/unit/rpc/handlers/NoRippleCheckTests.cpp +++ b/tests/unit/rpc/handlers/NoRippleCheckTests.cpp @@ -213,15 +213,17 @@ TEST_F(RPCNoRippleCheckTest, LedgerNotExistViaHash) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerByHash(ripple::uint256{kLEDGER_HASH}, _)).WillByDefault(Return(std::nullopt)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "role": "gateway", "ledger_hash": "{}" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -240,15 +242,17 @@ TEST_F(RPCNoRippleCheckTest, LedgerNotExistViaIntIndex) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(kSEQ, _)).WillByDefault(Return(std::nullopt)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "role": "gateway", "ledger_index": {} }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -267,15 +271,17 @@ TEST_F(RPCNoRippleCheckTest, LedgerNotExistViaStringIndex) // return empty ledgerHeader ON_CALL(*backend_, fetchLedgerBySequence(kSEQ, _)).WillByDefault(Return(std::nullopt)); - static auto const kINPUT = json::parse(fmt::format( - R"JSON({{ + static auto const kINPUT = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "role": "gateway", "ledger_index": "{}" }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; runSpawn([&](auto yield) { auto const output = handler.process(kINPUT, Context{yield}); @@ -294,15 +300,17 @@ TEST_F(RPCNoRippleCheckTest, AccountNotExist) // fetch account object return empty ON_CALL(*backend_, doFetchLedgerObject).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, doFetchLedgerObject).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "gateway" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -357,15 +365,17 @@ TEST_F(RPCNoRippleCheckTest, NormalPathRoleUserDefaultRippleSetTrustLineNoRipple ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "user" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -412,15 +422,17 @@ TEST_F(RPCNoRippleCheckTest, NormalPathRoleUserDefaultRippleUnsetTrustLineNoRipp ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "user" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -474,15 +486,17 @@ TEST_F(RPCNoRippleCheckTest, NormalPathRoleGatewayDefaultRippleSetTrustLineNoRip ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "gateway" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -529,15 +543,17 @@ TEST_F(RPCNoRippleCheckTest, NormalPathRoleGatewayDefaultRippleUnsetTrustLineNoR ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "gateway" }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -576,16 +592,18 @@ TEST_F(RPCNoRippleCheckTest, NormalPathRoleGatewayDefaultRippleUnsetTrustLineNoR ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "gateway", "transactions": true }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -629,16 +647,18 @@ TEST_F(RPCNoRippleCheckTest, NormalPathLimit) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "gateway", "limit": 1 }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -734,16 +754,18 @@ TEST_F(RPCNoRippleCheckTest, NormalPathTransactions) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "gateway", "transactions": true }})JSON", - kACCOUNT, - kLEDGER_HASH - )); + kACCOUNT, + kLEDGER_HASH + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -786,17 +808,19 @@ TEST_F(RPCNoRippleCheckTest, LimitMoreThanMax) ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs)); EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "account": "{}", "ledger_hash": "{}", "role": "gateway", "limit": {} }})JSON", - kACCOUNT, - kLEDGER_HASH, - NoRippleCheckHandler::kLIMIT_MAX + 1 - )); + kACCOUNT, + kLEDGER_HASH, + NoRippleCheckHandler::kLIMIT_MAX + 1 + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{NoRippleCheckHandler{backend_}}; auto const output = handler.process(input, Context{yield}); diff --git a/tests/unit/rpc/handlers/SubscribeTests.cpp b/tests/unit/rpc/handlers/SubscribeTests.cpp index b4400f41..2d307172 100644 --- a/tests/unit/rpc/handlers/SubscribeTests.cpp +++ b/tests/unit/rpc/handlers/SubscribeTests.cpp @@ -684,14 +684,16 @@ TEST_F(RPCSubscribeHandlerTest, StreamsLedger) TEST_F(RPCSubscribeHandlerTest, Accounts) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "accounts": ["{}", "{}", "{}"] }})JSON", - kACCOUNT, - kACCOUNT2, - kACCOUNT2 - )); + kACCOUNT, + kACCOUNT2, + kACCOUNT2 + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{SubscribeHandler{backend_, mockAmendmentCenterPtr_, mockSubscriptionManagerPtr_}}; @@ -707,14 +709,16 @@ TEST_F(RPCSubscribeHandlerTest, Accounts) TEST_F(RPCSubscribeHandlerTest, AccountsProposed) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "accounts_proposed": ["{}", "{}", "{}"] }})JSON", - kACCOUNT, - kACCOUNT2, - kACCOUNT2 - )); + kACCOUNT, + kACCOUNT2, + kACCOUNT2 + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{SubscribeHandler{backend_, mockAmendmentCenterPtr_, mockSubscriptionManagerPtr_}}; @@ -731,8 +735,9 @@ TEST_F(RPCSubscribeHandlerTest, AccountsProposed) TEST_F(RPCSubscribeHandlerTest, JustBooks) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "books": [ {{ @@ -748,8 +753,9 @@ TEST_F(RPCSubscribeHandlerTest, JustBooks) }} ] }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{SubscribeHandler{backend_, mockAmendmentCenterPtr_, mockSubscriptionManagerPtr_}}; @@ -763,8 +769,9 @@ TEST_F(RPCSubscribeHandlerTest, JustBooks) TEST_F(RPCSubscribeHandlerTest, BooksBothSet) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "books": [ {{ @@ -781,8 +788,9 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothSet) }} ] }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{SubscribeHandler{backend_, mockAmendmentCenterPtr_, mockSubscriptionManagerPtr_}}; @@ -796,8 +804,9 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothSet) TEST_F(RPCSubscribeHandlerTest, BooksBothSnapshotSet) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "books": [ {{ @@ -815,8 +824,9 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothSnapshotSet) }} ] }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); backend_->setRange(kMIN_SEQ, kMAX_SEQ); auto const issuer = getAccountIdWithString(kACCOUNT); @@ -969,8 +979,9 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothSnapshotSet) TEST_F(RPCSubscribeHandlerTest, BooksBothUnsetSnapshotSet) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "books": [ {{ @@ -987,8 +998,9 @@ TEST_F(RPCSubscribeHandlerTest, BooksBothUnsetSnapshotSet) }} ] }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); backend_->setRange(kMIN_SEQ, kMAX_SEQ); auto const issuer = getAccountIdWithString(kACCOUNT); diff --git a/tests/unit/rpc/handlers/TransactionEntryTests.cpp b/tests/unit/rpc/handlers/TransactionEntryTests.cpp index 98720a10..994ca5de 100644 --- a/tests/unit/rpc/handlers/TransactionEntryTests.cpp +++ b/tests/unit/rpc/handlers/TransactionEntryTests.cpp @@ -87,14 +87,16 @@ TEST_F(RPCTransactionEntryHandlerTest, NonExistLedgerViaLedgerHash) .WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerByHash).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "ledger_hash": "{}", "tx_hash": "{}" }})JSON", - kINDEX, - kTXN_ID - )); + kINDEX, + kTXN_ID + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{TransactionEntryHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -111,13 +113,15 @@ TEST_F(RPCTransactionEntryHandlerTest, NonExistLedgerViaLedgerIndex) // mock fetchLedgerBySequence return empty ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(std::optional{})); EXPECT_CALL(*backend_, fetchLedgerBySequence).Times(1); - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "ledger_index": "4", "tx_hash": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); runSpawn([&, this](auto yield) { auto const handler = AnyHandler{TransactionEntryHandler{backend_}}; auto const output = handler.process(input, Context{yield}); @@ -137,12 +141,14 @@ TEST_F(RPCTransactionEntryHandlerTest, TXNotFound) EXPECT_CALL(*backend_, fetchTransaction).Times(1); runSpawn([this](auto yield) { auto const handler = AnyHandler{TransactionEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "tx_hash": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -167,13 +173,15 @@ TEST_F(RPCTransactionEntryHandlerTest, LedgerSeqNotMatch) runSpawn([this](auto yield) { auto const handler = AnyHandler{TransactionEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "tx_hash": "{}", "ledger_index": "30" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); auto const err = rpc::makeError(output.result.error()); @@ -244,14 +252,16 @@ TEST_F(RPCTransactionEntryHandlerTest, NormalPath) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{TransactionEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "tx_hash": "{}", "ledger_index": {} }})JSON", - kTXN_ID, - tx.ledgerSequence - )); + kTXN_ID, + tx.ledgerSequence + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(json::parse(kOUTPUT), *output.result); @@ -318,14 +328,16 @@ TEST_F(RPCTransactionEntryHandlerTest, NormalPathV2) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{TransactionEntryHandler{backend_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "tx_hash": "{}", "ledger_index": {} }})JSON", - kTXN_ID, - tx.ledgerSequence - )); + kTXN_ID, + tx.ledgerSequence + ) + ); auto const output = handler.process(req, Context{.yield = yield, .apiVersion = 2}); ASSERT_TRUE(output); EXPECT_EQ(json::parse(kOUTPUT), *output.result); diff --git a/tests/unit/rpc/handlers/TxTests.cpp b/tests/unit/rpc/handlers/TxTests.cpp index 912509be..60c36eb5 100644 --- a/tests/unit/rpc/handlers/TxTests.cpp +++ b/tests/unit/rpc/handlers/TxTests.cpp @@ -150,15 +150,17 @@ TEST_F(RPCTxTest, ExcessiveLgrRange) { runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "min_ledger": 1, "max_ledger": 1002 }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -184,14 +186,16 @@ TEST_F(RPCTxTest, InvalidBinaryV1) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "binary": 12 }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{.yield = yield, .apiVersion = 1u}); ASSERT_TRUE(output); }); @@ -201,14 +205,16 @@ TEST_F(RPCTxTest, InvalidBinaryV2) { runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "binary": 12 }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{.yield = yield, .apiVersion = 2u}); ASSERT_FALSE(output); @@ -222,15 +228,17 @@ TEST_F(RPCTxTest, InvalidLgrRange) { runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "max_ledger": 1, "min_ledger": 10 }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -251,13 +259,15 @@ TEST_F(RPCTxTest, TxnNotFound) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -279,15 +289,17 @@ TEST_F(RPCTxTest, TxnNotFoundInGivenRangeSearchAllFalse) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "min_ledger": 1, "max_ledger": 1000 }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -310,15 +322,17 @@ TEST_F(RPCTxTest, TxnNotFoundInGivenRangeSearchAllTrue) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "min_ledger": 1, "max_ledger": 1000 }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -342,14 +356,16 @@ TEST_F(RPCTxTest, CtidNotFoundSearchAllFalse) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "ctid": "{}", "min_ledger": 1, "max_ledger": 1000 }})JSON", - kCTID - )); + kCTID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -377,13 +393,15 @@ TEST_F(RPCTxTest, DefaultParameter_API_v1) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{.yield = yield, .apiVersion = 1u}); ASSERT_TRUE(output); @@ -407,13 +425,15 @@ TEST_F(RPCTxTest, PaymentTx_API_v1) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{.yield = yield, .apiVersion = 1u}); ASSERT_TRUE(output); EXPECT_TRUE(output.result->as_object().contains("DeliverMax")); @@ -438,13 +458,15 @@ TEST_F(RPCTxTest, PaymentTx_API_v2) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output); EXPECT_TRUE(output.result->as_object().contains("tx_json")); @@ -472,13 +494,15 @@ TEST_F(RPCTxTest, DefaultParameter_API_v2) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{.yield = yield, .apiVersion = 2u}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kDEFAULT_OUT2)); @@ -513,14 +537,16 @@ TEST_F(RPCTxTest, ReturnBinary) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "binary": true }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -556,14 +582,16 @@ TEST_F(RPCTxTest, ReturnBinaryWithCTID) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "binary": true }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -644,13 +672,15 @@ TEST_F(RPCTxTest, MintNFT) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -671,13 +701,15 @@ TEST_F(RPCTxTest, NFTAcceptOffer) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("meta").at("nftoken_id").as_string(), kNFT_ID); @@ -699,13 +731,15 @@ TEST_F(RPCTxTest, NFTCancelOffer) runSpawn([this, &ids](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); @@ -734,13 +768,15 @@ TEST_F(RPCTxTest, NFTCreateOffer) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_TRUE(output.result->at("meta").at("offer_id").as_string() == kNFT_ID2); @@ -751,15 +787,17 @@ TEST_F(RPCTxTest, CTIDAndTransactionBothProvided) { runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}", "ctid": "{}" }})JSON", - kTXN_ID, - kCTID - )); + kTXN_ID, + kCTID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -823,13 +861,15 @@ TEST_F(RPCTxTest, CTIDNotMatch) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "ctid": "{}" }})JSON", - kCTID - )); + kCTID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_FALSE(output); @@ -904,13 +944,15 @@ TEST_F(RPCTxTest, ReturnCTIDForTxInput) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -977,13 +1019,15 @@ TEST_F(RPCTxTest, NotReturnCTIDIfETLNotAvailable) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "transaction": "{}" }})JSON", - kTXN_ID - )); + kTXN_ID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -1062,13 +1106,15 @@ TEST_F(RPCTxTest, ViaCTID) runSpawn([this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "ctid": "{}" }})JSON", - kCTID - )); + kCTID + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(*output.result, json::parse(kOUT)); @@ -1100,13 +1146,15 @@ TEST_F(RPCTxTest, ViaLowercaseCTID) runSpawn([&, this](auto yield) { auto const handler = AnyHandler{TestTxHandler{backend_, mockETLServicePtr_}}; - auto const req = json::parse(fmt::format( - R"JSON({{ + auto const req = json::parse( + fmt::format( + R"JSON({{ "command": "tx", "ctid": "{}" }})JSON", - ctid - )); + ctid + ) + ); auto const output = handler.process(req, Context{yield}); ASSERT_TRUE(output); EXPECT_EQ(output.result->at("ctid").as_string(), kCTID); diff --git a/tests/unit/rpc/handlers/UnsubscribeTests.cpp b/tests/unit/rpc/handlers/UnsubscribeTests.cpp index a745da03..7de26634 100644 --- a/tests/unit/rpc/handlers/UnsubscribeTests.cpp +++ b/tests/unit/rpc/handlers/UnsubscribeTests.cpp @@ -589,13 +589,15 @@ TEST_F(RPCUnsubscribeTest, Streams) TEST_F(RPCUnsubscribeTest, Accounts) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "accounts": ["{}", "{}"] }})JSON", - kACCOUNT, - kACCOUNT2 - )); + kACCOUNT, + kACCOUNT2 + ) + ); EXPECT_CALL(*mockSubscriptionManagerPtr_, unsubAccount(rpc::accountFromStringStrict(kACCOUNT).value(), _)).Times(1); EXPECT_CALL(*mockSubscriptionManagerPtr_, unsubAccount(rpc::accountFromStringStrict(kACCOUNT2).value(), _)) @@ -611,13 +613,15 @@ TEST_F(RPCUnsubscribeTest, Accounts) TEST_F(RPCUnsubscribeTest, AccountsProposed) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "accounts_proposed": ["{}", "{}"] }})JSON", - kACCOUNT, - kACCOUNT2 - )); + kACCOUNT, + kACCOUNT2 + ) + ); EXPECT_CALL(*mockSubscriptionManagerPtr_, unsubProposedAccount(rpc::accountFromStringStrict(kACCOUNT).value(), _)) .Times(1); @@ -634,8 +638,9 @@ TEST_F(RPCUnsubscribeTest, AccountsProposed) TEST_F(RPCUnsubscribeTest, Books) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "books": [ {{ "taker_pays": {{ @@ -649,8 +654,9 @@ TEST_F(RPCUnsubscribeTest, Books) }} ] }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const parsedBookMaybe = rpc::parseBook(input.as_object().at("books").as_array()[0].as_object()); auto const book = parsedBookMaybe.value(); @@ -668,8 +674,9 @@ TEST_F(RPCUnsubscribeTest, Books) TEST_F(RPCUnsubscribeTest, SingleBooks) { - auto const input = json::parse(fmt::format( - R"JSON({{ + auto const input = json::parse( + fmt::format( + R"JSON({{ "books": [ {{ "taker_pays": {{ @@ -682,8 +689,9 @@ TEST_F(RPCUnsubscribeTest, SingleBooks) }} ] }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); auto const parsedBookMaybe = rpc::parseBook(input.as_object().at("books").as_array()[0].as_object()); auto const book = parsedBookMaybe.value(); diff --git a/tests/unit/rpc/handlers/VaultInfoTests.cpp b/tests/unit/rpc/handlers/VaultInfoTests.cpp index c6999b6f..3ff058da 100644 --- a/tests/unit/rpc/handlers/VaultInfoTests.cpp +++ b/tests/unit/rpc/handlers/VaultInfoTests.cpp @@ -191,13 +191,15 @@ TEST_F(RPCVaultInfoHandlerTest, InputHasOwnerButNotFoundResultsInError) EXPECT_CALL(*backend_, fetchLedgerBySequence).WillOnce(Return(ledgerHeader)); // Input JSON using vault object - auto static const kINPUT = boost::json::parse(fmt::format( - R"JSON({{ + auto static const kINPUT = boost::json::parse( + fmt::format( + R"JSON({{ "owner": "{}", "seq": 3 }})JSON", - kACCOUNT - )); + kACCOUNT + ) + ); // Run the handler auto const handler = AnyHandler{VaultInfoHandler{backend_}}; @@ -219,12 +221,14 @@ TEST_F(RPCVaultInfoHandlerTest, VaultIDFailsVaultDeserializationReturnsEntryNotF EXPECT_CALL(*backend_, doFetchLedgerObject(vaultKey, kSEQ, _)) .WillOnce(Return(std::nullopt)); // intentionally invalid vault - auto const kINPUT = boost::json::parse(fmt::format( - R"({{ + auto const kINPUT = boost::json::parse( + fmt::format( + R"({{ "vault_id": "{}" }})", - kVAULT_ID - )); + kVAULT_ID + ) + ); auto const handler = AnyHandler{VaultInfoHandler{backend_}}; runSpawn([&](auto yield) { @@ -258,12 +262,14 @@ TEST_F(RPCVaultInfoHandlerTest, MissingIssuanceObject) EXPECT_CALL(*backend_, doFetchLedgerObject(mptIssuance, kSEQ, _)) .WillOnce(Return(std::nullopt)); // Missing issuance - auto static const kINPUT = boost::json::parse(fmt::format( - R"({{ + auto static const kINPUT = boost::json::parse( + fmt::format( + R"({{ "vault_id": "{}" }})", - kVAULT_ID - )); + kVAULT_ID + ) + ); auto const handler = AnyHandler{VaultInfoHandler{backend_}}; runSpawn([&](auto yield) { @@ -340,12 +346,14 @@ TEST_F(RPCVaultInfoHandlerTest, ValidVaultObjectQueryByVaultID) .WillOnce(Return(issuance.getSerializer().peekData())); // Input JSON using vault_id - auto static const kINPUT = boost::json::parse(fmt::format( - R"({{ + auto static const kINPUT = boost::json::parse( + fmt::format( + R"({{ "vault_id": "{}" }})", - kVAULT_ID - )); + kVAULT_ID + ) + ); // Run the handler auto const handler = AnyHandler{VaultInfoHandler{backend_}}; @@ -427,15 +435,17 @@ TEST_F(RPCVaultInfoHandlerTest, ValidVaultObjectQueryByOwnerAndSeq) .WillOnce(Return(issuance.getSerializer().peekData())); // Input JSON using vault object - auto static const kINPUT = boost::json::parse(fmt::format( - R"JSON({{ + auto static const kINPUT = boost::json::parse( + fmt::format( + R"JSON({{ "owner": "{}", "seq": {}, "ledger_index": 30 }})JSON", - kACCOUNT, - kSEQ - )); + kACCOUNT, + kSEQ + ) + ); // Run the handler auto const handler = AnyHandler{VaultInfoHandler{backend_}}; diff --git a/tests/unit/rpc/handlers/VersionHandlerTests.cpp b/tests/unit/rpc/handlers/VersionHandlerTests.cpp index 62852eff..4dd8eb56 100644 --- a/tests/unit/rpc/handlers/VersionHandlerTests.cpp +++ b/tests/unit/rpc/handlers/VersionHandlerTests.cpp @@ -52,16 +52,18 @@ TEST_F(RPCVersionHandlerTest, Default) {"api_version.default", ConfigValue{ConfigType::Integer}.defaultValue(kDEFAULT_API_VERSION)} }; - boost::json::value jsonData = boost::json::parse(fmt::format( - R"JSON({{ + boost::json::value jsonData = boost::json::parse( + fmt::format( + R"JSON({{ "api_version.min": {}, "api_version.max": {}, "api_version.default": {} }})JSON", - kMIN_API_VERSION, - kMAX_API_VERSION, - kDEFAULT_API_VERSION - )); + kMIN_API_VERSION, + kMAX_API_VERSION, + kDEFAULT_API_VERSION + ) + ); runSpawn([&](auto yield) { auto const handler = AnyHandler{VersionHandler{cfg}}; diff --git a/tests/unit/util/AccountUtilsTests.cpp b/tests/unit/util/AccountUtilsTests.cpp index b3438b06..b2ea54d7 100644 --- a/tests/unit/util/AccountUtilsTests.cpp +++ b/tests/unit/util/AccountUtilsTests.cpp @@ -33,10 +33,14 @@ TEST(AccountUtils, parseBase58Wrapper) EXPECT_FALSE(util::parseBase58Wrapper("rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp!")); EXPECT_TRUE(util::parseBase58Wrapper(kACCOUNT)); - EXPECT_TRUE(util::parseBase58Wrapper( - ripple::TokenType::NodePrivate, "paQmjZ37pKKPMrgadBLsuf9ab7Y7EUNzh27LQrZqoexpAs31nJi" - )); - EXPECT_FALSE(util::parseBase58Wrapper( - ripple::TokenType::NodePrivate, "??paQmjZ37pKKPMrgadBLsuf9ab7Y7EUNzh27LQrZqoexpAs31n" - )); + EXPECT_TRUE( + util::parseBase58Wrapper( + ripple::TokenType::NodePrivate, "paQmjZ37pKKPMrgadBLsuf9ab7Y7EUNzh27LQrZqoexpAs31nJi" + ) + ); + EXPECT_FALSE( + util::parseBase58Wrapper( + ripple::TokenType::NodePrivate, "??paQmjZ37pKKPMrgadBLsuf9ab7Y7EUNzh27LQrZqoexpAs31n" + ) + ); } diff --git a/tests/unit/util/ResponseExpirationCacheTests.cpp b/tests/unit/util/ResponseExpirationCacheTests.cpp index 0c02bdc3..0038f709 100644 --- a/tests/unit/util/ResponseExpirationCacheTests.cpp +++ b/tests/unit/util/ResponseExpirationCacheTests.cpp @@ -90,10 +90,12 @@ TEST_F(ResponseExpirationCacheTest, GetOrUpdateNoValueInCacheCallsUpdaterAndVeri runSpawn([&](boost::asio::yield_context yield) { EXPECT_CALL(mockUpdater, Call) - .WillOnce(Return(ResponseExpirationCache::EntryData{ - .lastUpdated = std::chrono::steady_clock::now(), - .response = obj, - })); + .WillOnce(Return( + ResponseExpirationCache::EntryData{ + .lastUpdated = std::chrono::steady_clock::now(), + .response = obj, + } + )); EXPECT_CALL(mockVerifier, Call).WillOnce(Return(true)); auto result = @@ -111,10 +113,12 @@ TEST_F(ResponseExpirationCacheTest, GetOrUpdateExpiredValueInCacheCallsUpdaterAn runSpawn([&](boost::asio::yield_context yield) { boost::json::object const expiredObject = {{"some key", "expired value"}}; EXPECT_CALL(mockUpdater, Call) - .WillOnce(Return(ResponseExpirationCache::EntryData{ - .lastUpdated = std::chrono::steady_clock::now(), - .response = expiredObject, - })); + .WillOnce(Return( + ResponseExpirationCache::EntryData{ + .lastUpdated = std::chrono::steady_clock::now(), + .response = expiredObject, + } + )); EXPECT_CALL(mockVerifier, Call).WillOnce(Return(true)); auto result = @@ -145,10 +149,12 @@ TEST_F(ResponseExpirationCacheTest, GetOrUpdateCachedValueNotExpiredDoesNotCallU runSpawn([&](boost::asio::yield_context yield) { // First call to populate cache EXPECT_CALL(mockUpdater, Call) - .WillOnce(Return(ResponseExpirationCache::EntryData{ - .lastUpdated = std::chrono::steady_clock::now(), - .response = obj, - })); + .WillOnce(Return( + ResponseExpirationCache::EntryData{ + .lastUpdated = std::chrono::steady_clock::now(), + .response = obj, + } + )); EXPECT_CALL(mockVerifier, Call).WillOnce(Return(true)); auto result = @@ -190,10 +196,12 @@ TEST_F(ResponseExpirationCacheTest, GetOrUpdateVerifierRejection) runSpawn([&](boost::asio::yield_context yield) { EXPECT_CALL(mockUpdater, Call) - .WillOnce(Return(ResponseExpirationCache::EntryData{ - .lastUpdated = std::chrono::steady_clock::now(), - .response = obj, - })); + .WillOnce(Return( + ResponseExpirationCache::EntryData{ + .lastUpdated = std::chrono::steady_clock::now(), + .response = obj, + } + )); EXPECT_CALL(mockVerifier, Call).WillOnce(Return(false)); auto result = @@ -204,10 +212,12 @@ TEST_F(ResponseExpirationCacheTest, GetOrUpdateVerifierRejection) boost::json::object const anotherObj = {{"some key", "another value"}}; EXPECT_CALL(mockUpdater, Call) - .WillOnce(Return(ResponseExpirationCache::EntryData{ - .lastUpdated = std::chrono::steady_clock::now(), - .response = anotherObj, - })); + .WillOnce(Return( + ResponseExpirationCache::EntryData{ + .lastUpdated = std::chrono::steady_clock::now(), + .response = anotherObj, + } + )); EXPECT_CALL(mockVerifier, Call).WillOnce(Return(true)); result = cache.getOrUpdate(yield, "server_info", mockUpdater.AsStdFunction(), mockVerifier.AsStdFunction()); @@ -233,7 +243,8 @@ TEST_F(ResponseExpirationCacheTest, GetOrUpdateMultipleConcurrentUpdates) EXPECT_CALL(mockUpdater, Call) .WillOnce( - [this, &waitingCoroutine](boost::asio::yield_context yield + [this, &waitingCoroutine]( + boost::asio::yield_context yield ) -> std::expected { boost::asio::spawn(yield, waitingCoroutine); return ResponseExpirationCache::EntryData{ @@ -261,10 +272,12 @@ TEST_F(ResponseExpirationCacheTest, InvalidateForcesRefresh) runSpawn([&](boost::asio::yield_context yield) { boost::json::object const oldObject = {{"some key", "old value"}}; EXPECT_CALL(mockUpdater, Call) - .WillOnce(Return(ResponseExpirationCache::EntryData{ - .lastUpdated = std::chrono::steady_clock::now(), - .response = oldObject, - })); + .WillOnce(Return( + ResponseExpirationCache::EntryData{ + .lastUpdated = std::chrono::steady_clock::now(), + .response = oldObject, + } + )); EXPECT_CALL(mockVerifier, Call).WillOnce(Return(true)); auto result = @@ -276,10 +289,12 @@ TEST_F(ResponseExpirationCacheTest, InvalidateForcesRefresh) cache.invalidate(); EXPECT_CALL(mockUpdater, Call) - .WillOnce(Return(ResponseExpirationCache::EntryData{ - .lastUpdated = std::chrono::steady_clock::now(), - .response = obj, - })); + .WillOnce(Return( + ResponseExpirationCache::EntryData{ + .lastUpdated = std::chrono::steady_clock::now(), + .response = obj, + } + )); EXPECT_CALL(mockVerifier, Call).WillOnce(Return(true)); result = cache.getOrUpdate(yield, "server_info", mockUpdater.AsStdFunction(), mockVerifier.AsStdFunction()); diff --git a/tests/unit/util/TxUtilTests.cpp b/tests/unit/util/TxUtilTests.cpp index 051da05f..9f979e0a 100644 --- a/tests/unit/util/TxUtilTests.cpp +++ b/tests/unit/util/TxUtilTests.cpp @@ -36,8 +36,8 @@ TEST(TxUtilTests, txTypesInLowercase) ); std::for_each( - ripple::TxFormats::getInstance().begin(), - ripple::TxFormats::getInstance().end(), - [&](auto const& pair) { EXPECT_TRUE(types.find(util::toLower(pair.getName())) != types.end()); } + ripple::TxFormats::getInstance().begin(), ripple::TxFormats::getInstance().end(), [&](auto const& pair) { + EXPECT_TRUE(types.find(util::toLower(pair.getName())) != types.end()); + } ); } diff --git a/tests/unit/util/async/AsyncExecutionContextTests.cpp b/tests/unit/util/async/AsyncExecutionContextTests.cpp index 5bb2c51e..29166633 100644 --- a/tests/unit/util/async/AsyncExecutionContextTests.cpp +++ b/tests/unit/util/async/AsyncExecutionContextTests.cpp @@ -143,8 +143,7 @@ TYPED_TEST(ExecutionContextTests, timerCancel) std::binary_semaphore sem{0}; auto res = this->ctx.scheduleAfter( - std::chrono::milliseconds(10), - [&value, &sem]([[maybe_unused]] auto stopRequested, auto cancelled) { + std::chrono::milliseconds(10), [&value, &sem]([[maybe_unused]] auto stopRequested, auto cancelled) { if (cancelled) value = 42; @@ -163,8 +162,7 @@ TYPED_TEST(ExecutionContextTests, timerAutoCancels) std::binary_semaphore sem{0}; { auto res = this->ctx.scheduleAfter( - std::chrono::milliseconds(1), - [&value, &sem]([[maybe_unused]] auto stopRequested, auto cancelled) { + std::chrono::milliseconds(1), [&value, &sem]([[maybe_unused]] auto stopRequested, auto cancelled) { if (cancelled) value = 42; diff --git a/tests/unit/util/prometheus/HttpTests.cpp b/tests/unit/util/prometheus/HttpTests.cpp index 309f8f12..8624974b 100644 --- a/tests/unit/util/prometheus/HttpTests.cpp +++ b/tests/unit/util/prometheus/HttpTests.cpp @@ -228,10 +228,12 @@ TEST_F(PrometheusHandleRequestTests, responseWithCounterAndGauge) TEST_F(PrometheusHandleRequestTests, compressReply) { - PrometheusService::init(ClioConfigDefinition{ - {"prometheus.compress_reply", ConfigValue{ConfigType::Boolean}.defaultValue(true)}, - {"prometheus.enabled", ConfigValue{ConfigType::Boolean}.defaultValue(true)}, - }); + PrometheusService::init( + ClioConfigDefinition{ + {"prometheus.compress_reply", ConfigValue{ConfigType::Boolean}.defaultValue(true)}, + {"prometheus.enabled", ConfigValue{ConfigType::Boolean}.defaultValue(true)}, + } + ); auto& gauge = PrometheusService::gaugeInt("test_gauge", Labels{}); ++gauge; diff --git a/tests/unit/web/RPCServerHandlerTests.cpp b/tests/unit/web/RPCServerHandlerTests.cpp index 00573683..13d17df0 100644 --- a/tests/unit/web/RPCServerHandlerTests.cpp +++ b/tests/unit/web/RPCServerHandlerTests.cpp @@ -477,8 +477,9 @@ TEST_F(WebRPCServerHandlerTest, HTTPErrorPath) .WillOnce(testing::Return(true)); EXPECT_CALL(*rpcEngine, buildResponse(testing::_)) - .WillOnce(testing::Return(rpc::Result{rpc::Status{rpc::RippledError::rpcINVALID_PARAMS, "ledgerIndexMalformed"}} - )); + .WillOnce( + testing::Return(rpc::Result{rpc::Status{rpc::RippledError::rpcINVALID_PARAMS, "ledgerIndexMalformed"}}) + ); EXPECT_CALL(*etl, lastCloseAgeSeconds()).WillOnce(testing::Return(45)); @@ -525,8 +526,9 @@ TEST_F(WebRPCServerHandlerTest, WsErrorPath) .WillOnce(testing::Return(true)); EXPECT_CALL(*rpcEngine, buildResponse(testing::_)) - .WillOnce(testing::Return(rpc::Result{rpc::Status{rpc::RippledError::rpcINVALID_PARAMS, "ledgerIndexMalformed"}} - )); + .WillOnce( + testing::Return(rpc::Result{rpc::Status{rpc::RippledError::rpcINVALID_PARAMS, "ledgerIndexMalformed"}}) + ); EXPECT_CALL(*etl, lastCloseAgeSeconds()).WillOnce(testing::Return(45)); @@ -1026,8 +1028,8 @@ generateInvalidVersions() .wsMessage = fmt::format("Requested API version is lower than minimum supported ({})", rpc::kAPI_VERSION_MIN)}, {.testName = "v4", .version = "4", - .wsMessage = fmt::format("Requested API version is higher than maximum supported ({})", rpc::kAPI_VERSION_MAX) - }, + .wsMessage = + fmt::format("Requested API version is higher than maximum supported ({})", rpc::kAPI_VERSION_MAX)}, {.testName = "null", .version = "null", .wsMessage = "API version must be an integer"}, {.testName = "str", .version = "\"bogus\"", .wsMessage = "API version must be an integer"}, {.testName = "bool", .version = "false", .wsMessage = "API version must be an integer"}, diff --git a/tests/unit/web/ServerTests.cpp b/tests/unit/web/ServerTests.cpp index eb3fadad..790f81d2 100644 --- a/tests/unit/web/ServerTests.cpp +++ b/tests/unit/web/ServerTests.cpp @@ -73,8 +73,9 @@ using namespace web; static boost::json::value generateJSONWithDynamicPort(std::string_view port) { - return boost::json::parse(fmt::format( - R"JSON({{ + return boost::json::parse( + fmt::format( + R"JSON({{ "server": {{ "ip": "0.0.0.0", "port": {} @@ -87,15 +88,17 @@ generateJSONWithDynamicPort(std::string_view port) "whitelist": ["127.0.0.1"] }} }})JSON", - port - )); + port + ) + ); } static boost::json::value generateJSONDataOverload(std::string_view port) { - return boost::json::parse(fmt::format( - R"JSON({{ + return boost::json::parse( + fmt::format( + R"JSON({{ "server": {{ "ip": "0.0.0.0", "port": {} @@ -107,8 +110,9 @@ generateJSONDataOverload(std::string_view port) "max_requests": 1 }} }})JSON", - port - )); + port + ) + ); } inline static ClioConfigDefinition diff --git a/tests/unit/web/dosguard/WeightsTests.cpp b/tests/unit/web/dosguard/WeightsTests.cpp index 5b585a71..5f193b99 100644 --- a/tests/unit/web/dosguard/WeightsTests.cpp +++ b/tests/unit/web/dosguard/WeightsTests.cpp @@ -202,8 +202,8 @@ INSTANTIATE_TEST_SUITE_P( TEST(WeightsMakeTest, CreateFromConfig) { util::config::ClioConfigDefinition mockConfig{ - {"dos_guard.__ng_default_weight", util::config::ConfigValue{util::config::ConfigType::Integer}.defaultValue(10) - }, + {"dos_guard.__ng_default_weight", + util::config::ConfigValue{util::config::ConfigType::Integer}.defaultValue(10)}, {"dos_guard.__ng_weights.[].method", util::config::Array{util::config::ConfigValue{util::config::ConfigType::String}}}, {"dos_guard.__ng_weights.[].weight", diff --git a/tests/unit/web/ng/RPCServerHandlerTests.cpp b/tests/unit/web/ng/RPCServerHandlerTests.cpp index 2d121364..85606cb3 100644 --- a/tests/unit/web/ng/RPCServerHandlerTests.cpp +++ b/tests/unit/web/ng/RPCServerHandlerTests.cpp @@ -174,8 +174,7 @@ TEST_F(NgRpcServerHandlerTest, CoroutineSleepsUntilRpcEngineFinishes) EXPECT_CALL(dosguard_, add(ip_, testing::_)).WillOnce(Return(true)); EXPECT_CALL(*rpcEngine_, post).WillOnce([&](auto&& fn, auto&&) { boost::asio::spawn( - ctx_, - [this, &rpcEngineDone, fn = std::forward(fn)](boost::asio::yield_context yield) { + ctx_, [this, &rpcEngineDone, fn = std::forward(fn)](boost::asio::yield_context yield) { EXPECT_CALL(*rpcEngine_, notifyBadSyntax); fn(yield); rpcEngineDone.Call(); @@ -440,9 +439,9 @@ TEST_F(NgRpcServerHandlerTest, HandleRequest_OutdatedWarning) std::unordered_set warningCodes; std::ranges::transform( - jsonResponse.at("warnings").as_array(), - std::inserter(warningCodes, warningCodes.end()), - [](auto const& w) { return w.as_object().at("id").as_int64(); } + jsonResponse.at("warnings").as_array(), std::inserter(warningCodes, warningCodes.end()), [](auto const& w) { + return w.as_object().at("id").as_int64(); + } ); EXPECT_EQ(warningCodes.size(), 2); @@ -464,9 +463,11 @@ TEST_F(NgRpcServerHandlerTest, HandleRequest_Successful_HttpRequest_Forwarded) EXPECT_CALL(*rpcEngine_, post).WillOnce([&](auto&& fn, auto&&) { EXPECT_CALL(connectionMetadata_, wasUpgraded).WillRepeatedly(Return(not request.isHttp())); EXPECT_CALL(*rpcEngine_, buildResponse) - .WillOnce(Return(rpc::Result{rpc::ReturnType{boost::json::object{ - {"result", boost::json::object{{"some key", "some value"}}}, {"forwarded", true} - }}})); + .WillOnce(Return( + rpc::Result{rpc::ReturnType{boost::json::object{ + {"result", boost::json::object{{"some key", "some value"}}}, {"forwarded", true} + }}} + )); EXPECT_CALL(*rpcEngine_, notifyComplete); EXPECT_CALL(*etl_, lastCloseAgeSeconds).WillOnce(Return(1)); fn(yield); @@ -500,9 +501,11 @@ TEST_F(NgRpcServerHandlerTest, HandleRequest_Successful_HttpRequest_HasError) EXPECT_CALL(*rpcEngine_, post).WillOnce([&](auto&& fn, auto&&) { EXPECT_CALL(connectionMetadata_, wasUpgraded).WillRepeatedly(Return(not request.isHttp())); EXPECT_CALL(*rpcEngine_, buildResponse) - .WillOnce(Return(rpc::Result{ - rpc::ReturnType{boost::json::object{{"some key", "some value"}, {"error", "some error"}}} - })); + .WillOnce(Return( + rpc::Result{ + rpc::ReturnType{boost::json::object{{"some key", "some value"}, {"error", "some error"}}} + } + )); EXPECT_CALL(*rpcEngine_, notifyComplete); EXPECT_CALL(*etl_, lastCloseAgeSeconds).WillOnce(Return(1)); fn(yield); @@ -587,9 +590,11 @@ TEST_F(NgRpcServerHandlerWsTest, HandleRequest_Successful_WsRequest_HasError) EXPECT_CALL(*rpcEngine_, post).WillOnce([&](auto&& fn, auto&&) { EXPECT_CALL(connectionMetadata_, wasUpgraded).WillRepeatedly(Return(not request.isHttp())); EXPECT_CALL(*rpcEngine_, buildResponse) - .WillOnce(Return(rpc::Result{ - rpc::ReturnType{boost::json::object{{"some key", "some value"}, {"error", "some error"}}} - })); + .WillOnce(Return( + rpc::Result{ + rpc::ReturnType{boost::json::object{{"some key", "some value"}, {"error", "some error"}}} + } + )); EXPECT_CALL(*rpcEngine_, notifyComplete); EXPECT_CALL(*etl_, lastCloseAgeSeconds).WillOnce(Return(1)); fn(yield);