mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-07 12:15:49 +00:00
Compare commits
2 Commits
patch-tsh
...
fix-warnin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
44d7d6fef7 | ||
|
|
7b581443d1 |
@@ -311,10 +311,10 @@ Ledger::Ledger(
|
||||
Family& family,
|
||||
SHAMap const& baseState)
|
||||
: mImmutable(false)
|
||||
, info_(info)
|
||||
, txMap_(SHAMapType::TRANSACTION, family)
|
||||
, stateMap_(baseState, true)
|
||||
, rules_{config.features}
|
||||
, info_(info)
|
||||
, j_(beast::Journal(beast::Journal::getNullSink()))
|
||||
{
|
||||
}
|
||||
|
||||
@@ -199,7 +199,7 @@ public:
|
||||
strOperatingMode(bool const admin = false) const override;
|
||||
|
||||
StateAccounting::CounterData
|
||||
getStateAccountingData();
|
||||
getStateAccountingData() override;
|
||||
|
||||
//
|
||||
// Transaction operations.
|
||||
@@ -711,10 +711,10 @@ private:
|
||||
std::mutex validationsMutex_;
|
||||
|
||||
RCLConsensus&
|
||||
getConsensus();
|
||||
getConsensus() override;
|
||||
|
||||
LedgerMaster&
|
||||
getLedgerMaster();
|
||||
getLedgerMaster() override;
|
||||
|
||||
private:
|
||||
struct Stats
|
||||
|
||||
@@ -296,8 +296,6 @@ SHAMapStoreImp::run()
|
||||
fullBelowCache_ = &(*app_.getNodeFamily().getFullBelowCache(0));
|
||||
treeNodeCache_ = &(*app_.getNodeFamily().getTreeNodeCache(0));
|
||||
|
||||
bool const isMem = app_.config().mem_backend();
|
||||
|
||||
if (advisoryDelete_)
|
||||
canDelete_ = state_db_.getCanDelete();
|
||||
|
||||
|
||||
@@ -52,8 +52,6 @@ private:
|
||||
};
|
||||
|
||||
Application& app_;
|
||||
Config const& config_;
|
||||
JobQueue& jobQueue_;
|
||||
|
||||
boost::unordered::concurrent_flat_map<LedgerIndex, LedgerData> ledgers_;
|
||||
boost::unordered::
|
||||
@@ -67,7 +65,7 @@ private:
|
||||
|
||||
public:
|
||||
FlatmapDatabase(Application& app, Config const& config, JobQueue& jobQueue)
|
||||
: app_(app), config_(config), jobQueue_(jobQueue)
|
||||
: app_(app)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -794,7 +794,6 @@ public:
|
||||
// reached before the result set has been exhausted (we always query for
|
||||
// one more than the limit), then we return an opaque marker that can be
|
||||
// supplied in a subsequent query.
|
||||
std::uint32_t queryLimit = numberOfResults + 1;
|
||||
std::uint32_t findLedger = 0, findSeq = 0;
|
||||
|
||||
if (lookingForMarker)
|
||||
|
||||
@@ -446,8 +446,8 @@ EscrowFinish::preflight(PreflightContext const& ctx)
|
||||
{
|
||||
if ((!ctx.tx.isFieldPresent(sfEscrowID) &&
|
||||
!ctx.tx.isFieldPresent(sfOfferSequence)) ||
|
||||
ctx.tx.isFieldPresent(sfEscrowID) &&
|
||||
ctx.tx.isFieldPresent(sfOfferSequence))
|
||||
(ctx.tx.isFieldPresent(sfEscrowID) &&
|
||||
ctx.tx.isFieldPresent(sfOfferSequence)))
|
||||
return temMALFORMED;
|
||||
}
|
||||
|
||||
@@ -729,8 +729,8 @@ EscrowCancel::preflight(PreflightContext const& ctx)
|
||||
{
|
||||
if ((!ctx.tx.isFieldPresent(sfEscrowID) &&
|
||||
!ctx.tx.isFieldPresent(sfOfferSequence)) ||
|
||||
ctx.tx.isFieldPresent(sfEscrowID) &&
|
||||
ctx.tx.isFieldPresent(sfOfferSequence))
|
||||
(ctx.tx.isFieldPresent(sfEscrowID) &&
|
||||
ctx.tx.isFieldPresent(sfOfferSequence)))
|
||||
return temMALFORMED;
|
||||
}
|
||||
|
||||
|
||||
@@ -924,9 +924,8 @@ Reader::getLocationLineAndColumn(Location location) const
|
||||
{
|
||||
int line, column;
|
||||
getLocationLineAndColumn(location, line, column);
|
||||
char buffer[18 + 16 + 16 + 1];
|
||||
sprintf(buffer, "Line %d, Column %d", line, column);
|
||||
return buffer;
|
||||
return "Line " + std::to_string(line) + ", Column " +
|
||||
std::to_string(column);
|
||||
}
|
||||
|
||||
std::string
|
||||
|
||||
@@ -24,7 +24,11 @@
|
||||
#include <ripple/core/Config.h>
|
||||
#include <boost/asio/io_service.hpp>
|
||||
#include <boost/asio/streambuf.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <deque>
|
||||
#include <functional>
|
||||
#include <string>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
|
||||
@@ -25,7 +25,9 @@
|
||||
#include <ripple/beast/rfc2616.h>
|
||||
#include <ripple/overlay/impl/Handshake.h>
|
||||
#include <ripple/protocol/digest.h>
|
||||
|
||||
#include <boost/regex.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
#include <ripple/protocol/impl/secp256k1.h>
|
||||
#include <boost/multiprecision/cpp_int.hpp>
|
||||
#include <ed25519-donna/ed25519.h>
|
||||
#include <type_traits>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ static constexpr uint16_t CATALOGUE_VERSION_MASK =
|
||||
0x00FF; // Lower 8 bits for version
|
||||
static constexpr uint16_t CATALOGUE_COMPRESS_LEVEL_MASK =
|
||||
0x0F00; // Bits 8-11: compression level
|
||||
static constexpr uint16_t CATALOGUE_RESERVED_MASK =
|
||||
[[maybe_unused]] static constexpr uint16_t CATALOGUE_RESERVED_MASK =
|
||||
0xF000; // Bits 12-15: reserved
|
||||
|
||||
std::string
|
||||
@@ -229,7 +229,7 @@ class CatalogueSizePredictor
|
||||
private:
|
||||
uint32_t minLedger_;
|
||||
uint32_t maxLedger_;
|
||||
uint64_t headerSize_;
|
||||
[[maybe_unused]] uint64_t headerSize_;
|
||||
|
||||
// Keep track of actual bytes
|
||||
uint64_t totalBytesWritten_;
|
||||
@@ -246,9 +246,9 @@ public:
|
||||
: minLedger_(minLedger)
|
||||
, maxLedger_(maxLedger)
|
||||
, headerSize_(headerSize)
|
||||
, processedLedgers_(0)
|
||||
, totalBytesWritten_(headerSize)
|
||||
, firstLedgerSize_(0)
|
||||
, processedLedgers_(0)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -499,11 +499,6 @@ ServerHandlerImp::processUDP(
|
||||
return jr;
|
||||
}
|
||||
|
||||
auto required = RPC::roleRequired(
|
||||
apiVersion,
|
||||
app_.config().BETA_RPC_API,
|
||||
jv.isMember(jss::command) ? jv[jss::command].asString()
|
||||
: jv[jss::method].asString());
|
||||
if (Role::FORBID == role)
|
||||
{
|
||||
jr[jss::result] = rpcError(rpcFORBIDDEN);
|
||||
|
||||
@@ -1301,10 +1301,8 @@ SHAMap::serializeToStream(
|
||||
|
||||
std::size_t nodeCount = 0;
|
||||
|
||||
auto serializeLeaf = [&stream,
|
||||
&localBytesWritten,
|
||||
flushThreshold,
|
||||
&tryFlush](SHAMapLeafNode const& node) -> bool {
|
||||
auto serializeLeaf = [&stream, &localBytesWritten, &tryFlush](
|
||||
SHAMapLeafNode const& node) -> bool {
|
||||
// write the node type
|
||||
auto t = node.getType();
|
||||
stream.write(reinterpret_cast<char const*>(&t), 1);
|
||||
@@ -1335,10 +1333,8 @@ SHAMap::serializeToStream(
|
||||
return !stream.fail();
|
||||
};
|
||||
|
||||
auto serializeRemovedLeaf = [&stream,
|
||||
&localBytesWritten,
|
||||
flushThreshold,
|
||||
&tryFlush](uint256 const& key) -> bool {
|
||||
auto serializeRemovedLeaf =
|
||||
[&stream, &localBytesWritten, &tryFlush](uint256 const& key) -> bool {
|
||||
// to indicate a node is removed it is written with a removal type
|
||||
auto t = SHAMapNodeType::tnREMOVE;
|
||||
stream.write(reinterpret_cast<char const*>(&t), 1);
|
||||
|
||||
@@ -1110,7 +1110,6 @@ public:
|
||||
env.fund(XRP(10000000), bob);
|
||||
|
||||
auto const preHookCount = (*env.le(alice))[sfHookStateCount];
|
||||
auto const preOwnerCount = (*env.le(alice))[sfOwnerCount];
|
||||
|
||||
std::string hook =
|
||||
"0061736D01000000012A0660057F7F7F7F7F017E60027F7F017E60027F7F017F60"
|
||||
|
||||
@@ -643,7 +643,7 @@ private:
|
||||
auto const sleep_sec =
|
||||
boost::lexical_cast<unsigned int>(path.substr(7));
|
||||
std::this_thread::sleep_for(
|
||||
std::chrono::seconds{sleep_sec});
|
||||
std::chrono::seconds(sleep_sec));
|
||||
}
|
||||
else if (boost::starts_with(path, "/redirect"))
|
||||
{
|
||||
|
||||
@@ -110,14 +110,6 @@ public:
|
||||
}
|
||||
// test errors on marker
|
||||
{
|
||||
auto const key = uint256::fromVoid(
|
||||
(std::array<uint8_t, 32>{
|
||||
0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U,
|
||||
0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U,
|
||||
0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U, 0x00U,
|
||||
0x00U, 0x00U, 0x00U, 0x00U, 'k', 'e', 'y', 0x00U})
|
||||
.data());
|
||||
|
||||
auto const ns = uint256::fromVoid(
|
||||
(std::array<uint8_t, 32>{
|
||||
0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU,
|
||||
@@ -126,14 +118,6 @@ public:
|
||||
0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU})
|
||||
.data());
|
||||
|
||||
auto const nons = uint256::fromVoid(
|
||||
(std::array<uint8_t, 32>{
|
||||
0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU,
|
||||
0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU,
|
||||
0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU,
|
||||
0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFEU, 0xCAU, 0xFFU})
|
||||
.data());
|
||||
|
||||
// Lambda to create a hook.
|
||||
auto setHook = [](test::jtx::Account const& account) {
|
||||
std::string const createCodeHex =
|
||||
|
||||
@@ -401,18 +401,8 @@ class Catalogue_test : public beast::unit_test::suite
|
||||
sourceLedger->info().accepted == loadedLedger->info().accepted);
|
||||
|
||||
// Check SLE counts
|
||||
std::size_t sourceCount = 0;
|
||||
std::size_t loadedCount = 0;
|
||||
|
||||
for (auto const& sle : sourceLedger->sles)
|
||||
{
|
||||
sourceCount++;
|
||||
}
|
||||
|
||||
for (auto const& sle : loadedLedger->sles)
|
||||
{
|
||||
loadedCount++;
|
||||
}
|
||||
std::size_t sourceCount = std::ranges::distance(sourceLedger->sles);
|
||||
std::size_t loadedCount = std::ranges::distance(loadedLedger->sles);
|
||||
|
||||
BEAST_EXPECT(sourceCount == loadedCount);
|
||||
|
||||
@@ -561,7 +551,13 @@ class Catalogue_test : public beast::unit_test::suite
|
||||
using namespace test::jtx;
|
||||
|
||||
// Create environment and test data
|
||||
Env env{*this, envconfig(), features};
|
||||
Env env{
|
||||
*this,
|
||||
envconfig(),
|
||||
features,
|
||||
nullptr,
|
||||
beast::severities::kDisabled,
|
||||
};
|
||||
prepareLedgerData(env, 3);
|
||||
|
||||
boost::filesystem::path tempDir =
|
||||
@@ -652,7 +648,13 @@ class Catalogue_test : public beast::unit_test::suite
|
||||
using namespace test::jtx;
|
||||
|
||||
// Create environment and test data
|
||||
Env env{*this, envconfig(), features};
|
||||
Env env{
|
||||
*this,
|
||||
envconfig(),
|
||||
features,
|
||||
nullptr,
|
||||
beast::severities::kDisabled,
|
||||
};
|
||||
prepareLedgerData(env, 3);
|
||||
|
||||
boost::filesystem::path tempDir =
|
||||
@@ -829,7 +831,7 @@ class Catalogue_test : public beast::unit_test::suite
|
||||
{
|
||||
auto result = env.client().invoke(
|
||||
"catalogue_status", Json::objectValue)[jss::result];
|
||||
std::cout << to_string(result) << "\n";
|
||||
// std::cout << to_string(result) << "\n";
|
||||
BEAST_EXPECT(result[jss::job_status] == "no_job_running");
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user