Make the SHAMap hash a distinct type from a uint256.

* Implement a type safe distinction between a node hash and a node id.
* This is just the first phase, limited to SHAMap's internals.
This commit is contained in:
Howard Hinnant
2015-11-10 13:51:28 -05:00
committed by Nik Bougalis
parent 49c4a063c1
commit 21a7b62c2b
17 changed files with 182 additions and 134 deletions

View File

@@ -225,14 +225,14 @@ Ledger::Ledger (uint256 const& parentHash,
loaded = true;
if (info_.txHash.isNonZero () &&
!txMap_->fetchRoot (info_.txHash, nullptr))
!txMap_->fetchRoot (SHAMapHash{info_.txHash}, nullptr))
{
loaded = false;
JLOG (j.warning) << "Don't have TX root for ledger";
}
if (info_.accountHash.isNonZero () &&
!stateMap_->fetchRoot (info_.accountHash, nullptr))
!stateMap_->fetchRoot (SHAMapHash{info_.accountHash}, nullptr))
{
loaded = false;
JLOG (j.warning) << "Don't have AS root for ledger";
@@ -350,12 +350,12 @@ void Ledger::updateHash()
if (! mImmutable)
{
if (txMap_)
info_.txHash = txMap_->getHash ();
info_.txHash = txMap_->getHash ().as_uint256();
else
info_.txHash.zero ();
if (stateMap_)
info_.accountHash = stateMap_->getHash ();
info_.accountHash = stateMap_->getHash ().as_uint256();
else
info_.accountHash.zero ();
}
@@ -609,12 +609,12 @@ auto
Ledger::digest (key_type const& key) const ->
boost::optional<digest_type>
{
digest_type digest;
SHAMapHash digest;
// VFALCO Unfortunately this loads the item
// from the NodeStore needlessly.
if (! stateMap_->peekItem(key, digest))
return boost::none;
return digest;
return digest.as_uint256();
}
//------------------------------------------------------------------------------
@@ -794,9 +794,9 @@ bool Ledger::walkLedger (beast::Journal j) const
if (stateMap_->getHash().isZero() &&
! info_.accountHash.isZero() &&
! stateMap_->fetchRoot (info_.accountHash, nullptr))
! stateMap_->fetchRoot (SHAMapHash{info_.accountHash}, nullptr))
{
missingNodes1.emplace_back (SHAMapType::STATE, info_.accountHash);
missingNodes1.emplace_back (SHAMapType::STATE, SHAMapHash{info_.accountHash});
}
else
{
@@ -813,9 +813,9 @@ bool Ledger::walkLedger (beast::Journal j) const
if (txMap_->getHash().isZero() &&
info_.txHash.isNonZero() &&
! txMap_->fetchRoot (info_.txHash, nullptr))
! txMap_->fetchRoot (SHAMapHash{info_.txHash}, nullptr))
{
missingNodes2.emplace_back (SHAMapType::TRANSACTION, info_.txHash);
missingNodes2.emplace_back (SHAMapType::TRANSACTION, SHAMapHash{info_.txHash});
}
else
{
@@ -839,8 +839,8 @@ bool Ledger::assertSane (beast::Journal ledgerJ)
info_.accountHash.isNonZero () &&
stateMap_ &&
txMap_ &&
(info_.accountHash == stateMap_->getHash ()) &&
(info_.txHash == txMap_->getHash ()))
(info_.accountHash == stateMap_->getHash ().as_uint256()) &&
(info_.txHash == txMap_->getHash ().as_uint256()))
{
return true;
}
@@ -968,7 +968,7 @@ static bool saveValidatedLedger (
assert (false);
}
if (ledger->info().accountHash != ledger->stateMap().getHash ())
if (ledger->info().accountHash != ledger->stateMap().getHash ().as_uint256())
{
JLOG (j.fatal) << "sAL: " << ledger->info().accountHash
<< " != " << ledger->stateMap().getHash ();
@@ -977,7 +977,7 @@ static bool saveValidatedLedger (
assert (false);
}
assert (ledger->info().txHash == ledger->txMap().getHash ());
assert (ledger->info().txHash == ledger->txMap().getHash ().as_uint256());
// Save the ledger header in the hashed object store
{

View File

@@ -148,7 +148,7 @@ ConsensusImp::storeProposal (
void
ConsensusImp::takePosition (int seq, std::shared_ptr<SHAMap> const& position)
{
recentPositions_[position->getHash ()] = std::make_pair (seq, position);
recentPositions_[position->getHash ().as_uint256()] = std::make_pair (seq, position);
if (recentPositions_.size () > 4)
{

View File

@@ -217,7 +217,7 @@ bool InboundLedger::tryLocal ()
TransactionStateSF filter(app_);
if (mLedger->txMap().fetchRoot (
mLedger->info().txHash, &filter))
SHAMapHash{mLedger->info().txHash}, &filter))
{
auto h (mLedger->getNeededTransactionHashes (1, &filter));
@@ -245,7 +245,7 @@ bool InboundLedger::tryLocal ()
AccountStateSF filter(app_);
if (mLedger->stateMap().fetchRoot (
mLedger->info().accountHash, &filter))
SHAMapHash{mLedger->info().accountHash}, &filter))
{
auto h (mLedger->getNeededAccountStateHashes (1, &filter));
@@ -854,7 +854,7 @@ bool InboundLedger::takeTxNode (const std::vector<SHAMapNodeID>& nodeIDs,
if (nodeIDit->isRoot ())
{
san += mLedger->txMap().addRootNode (
mLedger->info().txHash, *nodeDatait, snfWIRE, &tFilter);
SHAMapHash{mLedger->info().txHash}, *nodeDatait, snfWIRE, &tFilter);
if (!san.isGood())
return false;
}
@@ -920,7 +920,7 @@ bool InboundLedger::takeAsNode (const std::vector<SHAMapNodeID>& nodeIDs,
if (nodeIDit->isRoot ())
{
san += mLedger->stateMap().addRootNode (
mLedger->info().accountHash, *nodeDatait, snfWIRE, &tFilter);
SHAMapHash{mLedger->info().accountHash}, *nodeDatait, snfWIRE, &tFilter);
if (!san.isGood ())
{
if (m_journal.warning) m_journal.warning <<
@@ -977,7 +977,7 @@ bool InboundLedger::takeAsRootNode (Blob const& data, SHAMapAddNode& san)
AccountStateSF tFilter(app_);
san += mLedger->stateMap().addRootNode (
mLedger->info().accountHash, data, snfWIRE, &tFilter);
SHAMapHash{mLedger->info().accountHash}, data, snfWIRE, &tFilter);
return san.isGood();
}
@@ -1000,7 +1000,7 @@ bool InboundLedger::takeTxRootNode (Blob const& data, SHAMapAddNode& san)
TransactionStateSF tFilter(app_);
san += mLedger->txMap().addRootNode (
mLedger->info().txHash, data, snfWIRE, &tFilter);
SHAMapHash{mLedger->info().txHash}, data, snfWIRE, &tFilter);
return san.isGood();
}

View File

@@ -257,7 +257,7 @@ public:
auto newNode = SHAMapAbstractNode::make(
Blob (node.nodedata().begin(), node.nodedata().end()),
0, snfWIRE, uZero, false, app_.journal ("SHAMapNodeID"));
0, snfWIRE, SHAMapHash{uZero}, false, app_.journal ("SHAMapNodeID"));
if (!newNode)
return;
@@ -268,7 +268,7 @@ public:
auto blob = std::make_shared<Blob> (s.begin(), s.end());
app_.getLedgerMaster().addFetchPack(
newNode->getNodeHash(), blob);
newNode->getNodeHash().as_uint256(), blob);
}
}
catch (...)

View File

@@ -445,7 +445,7 @@ void LedgerConsensusImp::mapCompleteInternal (
return;
}
assert (hash == map->getHash ());
assert (hash == map->getHash ().as_uint256());
auto it = mAcquired.find (hash);
@@ -505,7 +505,7 @@ void LedgerConsensusImp::mapCompleteInternal (
std::vector<NodeID> peers;
for (auto& it : mPeerPositions)
{
if (it.second->getCurrentHash () == map->getHash ())
if (it.second->getCurrentHash () == map->getHash ().as_uint256())
peers.push_back (it.second->getPeerID ());
}
@@ -981,7 +981,7 @@ void LedgerConsensusImp::accept (std::shared_ptr<SHAMap> set)
if (set->getHash ().isNonZero ())
consensus_.takePosition (mPreviousLedger->info().seq, set);
assert (set->getHash () == mOurPosition->getCurrentHash ());
assert (set->getHash ().as_uint256() == mOurPosition->getCurrentHash ());
}
auto closeTime = mOurPosition->getCloseTime ();
@@ -1020,7 +1020,7 @@ void LedgerConsensusImp::accept (std::shared_ptr<SHAMap> set)
<< ", close " << closeTime << (closeTimeCorrect ? "" : "X");
// Put transactions into a deterministic, but unpredictable, order
CanonicalTXSet retriableTxs (set->getHash ());
CanonicalTXSet retriableTxs (set->getHash ().as_uint256());
// Build the new last closed ledger
auto newLCL = std::make_shared<Ledger>(
@@ -1464,7 +1464,7 @@ void LedgerConsensusImp::takeInitialPosition (
// Tell the ledger master not to acquire the ledger we're probably building
ledgerMaster_.setBuildingLedger (mPreviousLedger->info().seq + 1);
uint256 txSet = initialSet->getHash ();
auto txSet = initialSet->getHash ().as_uint256();
JLOG (j_.info) << "initial position " << txSet;
mapCompleteInternal (txSet, initialSet, false);
@@ -1489,7 +1489,7 @@ void LedgerConsensusImp::takeInitialPosition (
if (iit != mAcquired.end ())
{
mCompares.insert(iit->second->getHash());
mCompares.insert(iit->second->getHash().as_uint256());
createDisputes (initialSet, iit->second);
}
}
@@ -1680,7 +1680,7 @@ void LedgerConsensusImp::updateOurPositions ()
if (changes)
{
uint256 newHash = ourPosition->getHash ();
auto newHash = ourPosition->getHash ().as_uint256();
JLOG (j_.info)
<< "Position change: CTime " << closeTime
<< ", tx " << newHash;

View File

@@ -204,7 +204,8 @@ SHAMapAddNode TransactionAcquire::takeNodes (const std::list<SHAMapNodeID>& node
{
if (mHaveRoot)
JLOG (j_.debug) << "Got root TXS node, already have it";
else if (!mMap->addRootNode (getHash (), *nodeDatait, snfWIRE, nullptr).isGood())
else if (!mMap->addRootNode (SHAMapHash{getHash ()},
*nodeDatait, snfWIRE, nullptr).isGood())
{
JLOG (j_.warning) << "TX acquire got bad root node";
}

View File

@@ -254,7 +254,7 @@ SHAMapStoreImp::copyNode (std::uint64_t& nodeCount,
SHAMapAbstractNode const& node)
{
// Copy a single record from node to database_
database_->fetchNode (node.getNodeHash());
database_->fetchNode (node.getNodeHash().as_uint256());
if (! (++nodeCount % checkHealthInterval_))
{
if (health())

View File

@@ -190,9 +190,9 @@ private:
{
std::uint64_t check = 0;
for (uint256 it: cache.getKeys())
for (uint256 const& key: cache.getKeys())
{
database_->fetchNode (it);
database_->fetchNode (key);
if (! (++check % checkHealthInterval_) && health())
return true;
}

View File

@@ -135,14 +135,14 @@ public:
// Handles copy on write for mutable snapshots.
std::shared_ptr<SHAMap> snapShot (bool isMutable) const;
void setLedgerSeq (std::uint32_t lseq);
bool fetchRoot (uint256 const& hash, SHAMapSyncFilter * filter);
bool fetchRoot (SHAMapHash const& hash, SHAMapSyncFilter * filter);
// normal hash access functions
bool hasItem (uint256 const& id) const;
bool delItem (uint256 const& id);
bool addItem (SHAMapItem const& i, bool isTransaction, bool hasMeta);
bool addItem (SHAMapItem&& i, bool isTransaction, bool hasMeta);
uint256 getHash () const;
SHAMapHash getHash () const;
// save a copy if you have a temporary anyway
bool updateGiveItem (std::shared_ptr<SHAMapItem const> const&,
@@ -164,7 +164,7 @@ public:
// of the SHAMapItem beyond this SHAMap
std::shared_ptr<SHAMapItem const> const& peekItem (uint256 const& id) const;
std::shared_ptr<SHAMapItem const> const&
peekItem (uint256 const& id, uint256 & hash) const;
peekItem (uint256 const& id, SHAMapHash& hash) const;
std::shared_ptr<SHAMapItem const> const&
peekItem (uint256 const& id, SHAMapTreeNode::TNType & type) const;
@@ -187,8 +187,8 @@ public:
bool getRootNode (Serializer & s, SHANodeFormat format) const;
std::vector<uint256> getNeededHashes (int max, SHAMapSyncFilter * filter);
SHAMapAddNode addRootNode (uint256 const& hash, Blob const& rootNode, SHANodeFormat format,
SHAMapSyncFilter * filter);
SHAMapAddNode addRootNode (SHAMapHash const& hash, Blob const& rootNode,
SHANodeFormat format, SHAMapSyncFilter * filter);
SHAMapAddNode addRootNode (Blob const& rootNode, SHANodeFormat format,
SHAMapSyncFilter * filter);
SHAMapAddNode addKnownNode (SHAMapNodeID const& nodeID, Blob const& rawNode,
@@ -230,18 +230,18 @@ private:
int unshare ();
// tree node cache operations
std::shared_ptr<SHAMapAbstractNode> getCache (uint256 const& hash) const;
void canonicalize (uint256 const& hash, std::shared_ptr<SHAMapAbstractNode>&) const;
std::shared_ptr<SHAMapAbstractNode> getCache (SHAMapHash const& hash) const;
void canonicalize (SHAMapHash const& hash, std::shared_ptr<SHAMapAbstractNode>&) const;
// database operations
std::shared_ptr<SHAMapAbstractNode> fetchNodeFromDB (uint256 const& hash) const;
std::shared_ptr<SHAMapAbstractNode> fetchNodeNT (uint256 const& hash) const;
std::shared_ptr<SHAMapAbstractNode> fetchNodeFromDB (SHAMapHash const& hash) const;
std::shared_ptr<SHAMapAbstractNode> fetchNodeNT (SHAMapHash const& hash) const;
std::shared_ptr<SHAMapAbstractNode> fetchNodeNT (
SHAMapNodeID const& id,
uint256 const& hash,
SHAMapHash const& hash,
SHAMapSyncFilter *filter) const;
std::shared_ptr<SHAMapAbstractNode> fetchNode (uint256 const& hash) const;
std::shared_ptr<SHAMapAbstractNode> checkFilter(uint256 const& hash,
std::shared_ptr<SHAMapAbstractNode> fetchNode (SHAMapHash const& hash) const;
std::shared_ptr<SHAMapAbstractNode> checkFilter(SHAMapHash const& hash,
SHAMapNodeID const& id, SHAMapSyncFilter* filter) const;
/** Update hashes up to the root */
@@ -295,8 +295,8 @@ private:
/** If there is only one leaf below this node, get its contents */
std::shared_ptr<SHAMapItem const> const& onlyBelow (SHAMapAbstractNode*) const;
bool hasInnerNode (SHAMapNodeID const& nodeID, uint256 const& hash) const;
bool hasLeafNode (uint256 const& tag, uint256 const& hash) const;
bool hasInnerNode (SHAMapNodeID const& nodeID, SHAMapHash const& hash) const;
bool hasLeafNode (uint256 const& tag, SHAMapHash const& hash) const;
SHAMapItem const* peekFirstItem(NodeStack& stack) const;
SHAMapItem const* peekNextItem(uint256 const& id, NodeStack& stack) const;

View File

@@ -21,6 +21,7 @@
#define RIPPLE_SHAMAP_SHAMAPMISSINGNODE_H_INCLUDED
#include <ripple/basics/base_uint.h>
#include <ripple/shamap/SHAMapTreeNode.h>
#include <iosfwd>
#include <stdexcept>
@@ -38,29 +39,28 @@ class SHAMapMissingNode
{
private:
SHAMapType mType;
uint256 mNodeHash;
SHAMapHash mNodeHash;
uint256 mNodeID;
public:
SHAMapMissingNode (SHAMapType t,
uint256 const& nodeHash)
SHAMapHash const& nodeHash)
: std::runtime_error ("SHAMapMissingNode")
, mType (t)
, mNodeHash (nodeHash)
{
}
SHAMapType getMapType () const
SHAMapMissingNode (SHAMapType t,
uint256 const& nodeID)
: std::runtime_error ("SHAMapMissingNode")
, mType (t)
, mNodeID (nodeID)
{
return mType;
}
uint256 const& getNodeHash () const
{
return mNodeHash;
}
friend std::ostream& operator<< (std::ostream&, SHAMapMissingNode const&);
};
extern std::ostream& operator<< (std::ostream&, SHAMapMissingNode const&);
} // ripple
#endif

View File

@@ -39,6 +39,48 @@ enum SHANodeFormat
snfHASH = 3, // just the hash
};
// A SHAMapHash is the hash of a node in a SHAMap, and also the
// type of the hash of the entire SHAMap.
class SHAMapHash
{
uint256 hash_;
public:
SHAMapHash() = default;
explicit SHAMapHash(uint256 const& hash)
: hash_(hash)
{}
uint256 const& as_uint256() const {return hash_;}
uint256& as_uint256() {return hash_;}
bool isZero() const {return hash_.isZero();}
bool isNonZero() const {return hash_.isNonZero();}
int signum() const {return hash_.signum();}
void zero() {hash_.zero();}
friend bool operator==(SHAMapHash const& x, SHAMapHash const& y)
{
return x.hash_ == y.hash_;
}
friend bool operator<(SHAMapHash const& x, SHAMapHash const& y)
{
return x.hash_ < y.hash_;
}
friend std::ostream& operator<<(std::ostream& os, SHAMapHash const& x)
{
return os << x.hash_;
}
friend std::string to_string(SHAMapHash const& x) {return to_string(x.hash_);}
};
inline
bool operator!=(SHAMapHash const& x, SHAMapHash const& y)
{
return !(x == y);
}
class SHAMapAbstractNode
{
public:
@@ -53,7 +95,7 @@ public:
protected:
TNType mType;
uint256 mHash;
SHAMapHash mHash;
std::uint32_t mSeq;
protected:
@@ -62,12 +104,12 @@ protected:
SHAMapAbstractNode& operator=(SHAMapAbstractNode const&) = delete;
SHAMapAbstractNode(TNType type, std::uint32_t seq);
SHAMapAbstractNode(TNType type, std::uint32_t seq, uint256 const& hash);
SHAMapAbstractNode(TNType type, std::uint32_t seq, SHAMapHash const& hash);
public:
std::uint32_t getSeq () const;
void setSeq (std::uint32_t s);
uint256 const& getNodeHash () const;
SHAMapHash const& getNodeHash () const;
TNType getType () const;
bool isLeaf () const;
bool isInner () const;
@@ -81,7 +123,7 @@ public:
static std::shared_ptr<SHAMapAbstractNode>
make(Blob const& rawNode, std::uint32_t seq, SHANodeFormat format,
uint256 const& hash, bool hashValid, beast::Journal j);
SHAMapHash const& hash, bool hashValid, beast::Journal j);
// debugging
#ifdef BEAST_DEBUG
@@ -92,7 +134,7 @@ public:
class SHAMapInnerNode
: public SHAMapAbstractNode
{
uint256 mHashes[16];
SHAMapHash mHashes[16];
std::shared_ptr<SHAMapAbstractNode> mChildren[16];
int mIsBranch = 0;
std::uint32_t mFullBelowGen = 0;
@@ -105,7 +147,7 @@ public:
bool isEmpty () const;
bool isEmptyBranch (int m) const;
int getBranchCount () const;
uint256 const& getChildHash (int m) const;
SHAMapHash const& getChildHash (int m) const;
void setChild(int m, std::shared_ptr<SHAMapAbstractNode> const& child);
void shareChild (int m, std::shared_ptr<SHAMapAbstractNode> const& child);
@@ -125,7 +167,7 @@ public:
friend std::shared_ptr<SHAMapAbstractNode>
SHAMapAbstractNode::make(Blob const& rawNode, std::uint32_t seq,
SHANodeFormat format, uint256 const& hash, bool hashValid,
SHANodeFormat format, SHAMapHash const& hash, bool hashValid,
beast::Journal j);
};
@@ -143,7 +185,7 @@ public:
SHAMapTreeNode (std::shared_ptr<SHAMapItem const> const& item,
TNType type, std::uint32_t seq);
SHAMapTreeNode(std::shared_ptr<SHAMapItem const> const& item, TNType type,
std::uint32_t seq, uint256 const& hash);
std::uint32_t seq, SHAMapHash const& hash);
std::shared_ptr<SHAMapAbstractNode> clone(std::uint32_t seq) const override;
void addRaw (Serializer&, SHANodeFormat format) const override;
@@ -173,7 +215,7 @@ SHAMapAbstractNode::SHAMapAbstractNode(TNType type, std::uint32_t seq)
inline
SHAMapAbstractNode::SHAMapAbstractNode(TNType type, std::uint32_t seq,
uint256 const& hash)
SHAMapHash const& hash)
: mType(type)
, mHash(hash)
, mSeq(seq)
@@ -195,7 +237,7 @@ SHAMapAbstractNode::setSeq (std::uint32_t s)
}
inline
uint256 const&
SHAMapHash const&
SHAMapAbstractNode::getNodeHash () const
{
return mHash;
@@ -254,7 +296,7 @@ SHAMapInnerNode::isEmptyBranch (int m) const
}
inline
uint256 const&
SHAMapHash const&
SHAMapInnerNode::getChildHash (int m) const
{
assert ((m >= 0) && (m < 16) && (getType() == tnINNER));

View File

@@ -164,13 +164,13 @@ SHAMapTreeNode* SHAMap::walkToPointer (uint256 const& id) const
}
std::shared_ptr<SHAMapAbstractNode>
SHAMap::fetchNodeFromDB (uint256 const& hash) const
SHAMap::fetchNodeFromDB (SHAMapHash const& hash) const
{
std::shared_ptr<SHAMapAbstractNode> node;
if (backed_)
{
std::shared_ptr<NodeObject> obj = f_.db().fetch (hash);
std::shared_ptr<NodeObject> obj = f_.db().fetch (hash.as_uint256());
if (obj)
{
try
@@ -199,18 +199,18 @@ SHAMap::fetchNodeFromDB (uint256 const& hash) const
// See if a sync filter has a node
std::shared_ptr<SHAMapAbstractNode>
SHAMap::checkFilter(uint256 const& hash, SHAMapNodeID const& id,
SHAMap::checkFilter(SHAMapHash const& hash, SHAMapNodeID const& id,
SHAMapSyncFilter* filter) const
{
std::shared_ptr<SHAMapAbstractNode> node;
Blob nodeData;
if (filter->haveNode (id, hash, nodeData))
if (filter->haveNode (id, hash.as_uint256(), nodeData))
{
node = SHAMapAbstractNode::make(
nodeData, 0, snfPREFIX, hash, true, f_.journal ());
if (node)
{
filter->gotNode (true, id, hash, nodeData, node->getType ());
filter->gotNode (true, id, hash.as_uint256(), nodeData, node->getType ());
if (backed_)
canonicalize (hash, node);
}
@@ -222,7 +222,7 @@ SHAMap::checkFilter(uint256 const& hash, SHAMapNodeID const& id,
// Used on maps where missing nodes are expected
std::shared_ptr<SHAMapAbstractNode> SHAMap::fetchNodeNT(
SHAMapNodeID const& id,
uint256 const& hash,
SHAMapHash const& hash,
SHAMapSyncFilter* filter) const
{
std::shared_ptr<SHAMapAbstractNode> node = getCache (hash);
@@ -245,7 +245,7 @@ std::shared_ptr<SHAMapAbstractNode> SHAMap::fetchNodeNT(
return node;
}
std::shared_ptr<SHAMapAbstractNode> SHAMap::fetchNodeNT (uint256 const& hash) const
std::shared_ptr<SHAMapAbstractNode> SHAMap::fetchNodeNT (SHAMapHash const& hash) const
{
auto node = getCache (hash);
@@ -256,7 +256,7 @@ std::shared_ptr<SHAMapAbstractNode> SHAMap::fetchNodeNT (uint256 const& hash) co
}
// Throw if the node is missing
std::shared_ptr<SHAMapAbstractNode> SHAMap::fetchNode (uint256 const& hash) const
std::shared_ptr<SHAMapAbstractNode> SHAMap::fetchNode (SHAMapHash const& hash) const
{
auto node = fetchNodeNT (hash);
@@ -337,7 +337,7 @@ SHAMap::descend (SHAMapInnerNode * parent, SHAMapNodeID const& parentID,
SHAMapNodeID childID = parentID.getChildNodeID (branch);
SHAMapAbstractNode* child = parent->getChildPointer (branch);
uint256 const& childHash = parent->getChildHash (branch);
auto const& childHash = parent->getChildHash (branch);
if (!child)
{
@@ -363,7 +363,7 @@ SHAMap::descendAsync (SHAMapInnerNode* parent, int branch,
if (ret)
return ret;
uint256 const& hash = parent->getChildHash (branch);
auto const& hash = parent->getChildHash (branch);
std::shared_ptr<SHAMapAbstractNode> ptr = getCache (hash);
if (!ptr)
@@ -374,7 +374,7 @@ SHAMap::descendAsync (SHAMapInnerNode* parent, int branch,
if (!ptr && backed_)
{
std::shared_ptr<NodeObject> obj;
if (! f_.db().asyncFetch (hash, obj))
if (! f_.db().asyncFetch (hash.as_uint256(), obj))
{
pending = true;
return nullptr;
@@ -565,7 +565,7 @@ SHAMap::peekItem (uint256 const& id, SHAMapTreeNode::TNType& type) const
}
std::shared_ptr<SHAMapItem const> const&
SHAMap::peekItem (uint256 const& id, uint256& hash) const
SHAMap::peekItem (uint256 const& id, SHAMapHash& hash) const
{
SHAMapTreeNode* leaf = walkToPointer (id);
@@ -659,7 +659,7 @@ bool SHAMap::delItem (uint256 const& id)
// What gets attached to the end of the chain
// (For now, nothing, since we deleted the leaf)
uint256 prevHash;
SHAMapHash prevHash;
std::shared_ptr<SHAMapAbstractNode> prevNode;
while (!stack.empty ())
@@ -682,7 +682,7 @@ bool SHAMap::delItem (uint256 const& id)
if (bc == 0)
{
// no children below this branch
prevHash = uint256 ();
prevHash.zero();
prevNode.reset ();
}
else if (bc == 1)
@@ -808,7 +808,7 @@ SHAMap::addItem(SHAMapItem&& i, bool isTransaction, bool hasMetaData)
isTransaction, hasMetaData);
}
uint256
SHAMapHash
SHAMap::getHash () const
{
auto hash = root_->getNodeHash();
@@ -858,7 +858,7 @@ SHAMap::updateGiveItem (std::shared_ptr<SHAMapItem const> const& item,
return true;
}
bool SHAMap::fetchRoot (uint256 const& hash, SHAMapSyncFilter* filter)
bool SHAMap::fetchRoot (SHAMapHash const& hash, SHAMapSyncFilter* filter)
{
if (hash == root_->getNodeHash ())
return true;
@@ -917,7 +917,7 @@ SHAMap::writeNode (
Serializer s;
node->addRaw (s, snfPREFIX);
f_.db().store (t,
std::move (s.modData ()), node->getNodeHash ());
std::move (s.modData ()), node->getNodeHash ().as_uint256());
return node;
}
@@ -1109,21 +1109,21 @@ void SHAMap::dump (bool hash) const
leafCount << " resident leaves";
}
std::shared_ptr<SHAMapAbstractNode> SHAMap::getCache (uint256 const& hash) const
std::shared_ptr<SHAMapAbstractNode> SHAMap::getCache (SHAMapHash const& hash) const
{
auto ret = f_.treecache().fetch (hash);
auto ret = f_.treecache().fetch (hash.as_uint256());
assert (!ret || !ret->getSeq());
return ret;
}
void
SHAMap::canonicalize(uint256 const& hash, std::shared_ptr<SHAMapAbstractNode>& node) const
SHAMap::canonicalize(SHAMapHash const& hash, std::shared_ptr<SHAMapAbstractNode>& node) const
{
assert (backed_);
assert (node->getSeq() == 0);
assert (node->getNodeHash() == hash);
f_.treecache().canonicalize (hash, node);
f_.treecache().canonicalize (hash.as_uint256(), node);
}
} // ripple

View File

@@ -26,22 +26,27 @@ namespace ripple {
std::ostream&
operator<< (std::ostream& out, const SHAMapMissingNode& mn)
{
switch (mn.getMapType ())
switch (mn.mType)
{
case SHAMapType::TRANSACTION:
out << "Missing/TXN(" << mn.getNodeHash () << ")";
out << "Missing/TXN(";
break;
case SHAMapType::STATE:
out << "Missing/STA(" << mn.getNodeHash () << ")";
out << "Missing/STA(";
break;
case SHAMapType::FREE:
default:
out << "Missing/" << mn.getNodeHash ();
out << "Missing/(";
break;
};
if (mn.mNodeHash == zero)
out << "id : " << mn.mNodeID;
else
out << "hash : " << mn.mNodeHash;
out << ")";
return out;
}

View File

@@ -141,7 +141,7 @@ SHAMap::getMissingNodes(std::vector<SHAMapNodeID>& nodeIDs, std::vector<uint256>
int const maxDefer = f_.db().getDesiredAsyncReadCount ();
// Track the missing hashes we have found so far
std::set <uint256> missingHashes;
std::set <SHAMapHash> missingHashes;
while (1)
@@ -173,13 +173,13 @@ SHAMap::getMissingNodes(std::vector<SHAMapNodeID>& nodeIDs, std::vector<uint256>
int branch = (firstChild + currentChild++) % 16;
if (!node->isEmptyBranch (branch))
{
uint256 const& childHash = node->getChildHash (branch);
auto const& childHash = node->getChildHash (branch);
if (missingHashes.count (childHash) != 0)
{
fullBelow = false;
}
else if (! backed_ || ! f_.fullbelow().touch_if_exists (childHash))
else if (! backed_ || ! f_.fullbelow().touch_if_exists (childHash.as_uint256()))
{
SHAMapNodeID childID = nodeID.getChildNodeID (branch);
bool pending = false;
@@ -190,7 +190,7 @@ SHAMap::getMissingNodes(std::vector<SHAMapNodeID>& nodeIDs, std::vector<uint256>
if (!pending)
{ // node is not in the database
nodeIDs.push_back (childID);
hashes.push_back (childHash);
hashes.push_back (childHash.as_uint256());
if (--max <= 0)
return;
@@ -226,7 +226,7 @@ SHAMap::getMissingNodes(std::vector<SHAMapNodeID>& nodeIDs, std::vector<uint256>
{ // No partial node encountered below this node
node->setFullBelowGen (generation);
if (backed_)
f_.fullbelow().insert (node->getNodeHash ());
f_.fullbelow().insert (node->getNodeHash ().as_uint256());
}
if (stack.empty ())
@@ -274,7 +274,7 @@ SHAMap::getMissingNodes(std::vector<SHAMapNodeID>& nodeIDs, std::vector<uint256>
else if ((max > 0) && (missingHashes.insert (nodeHash).second))
{
nodeIDs.push_back (nodeID);
hashes.push_back (nodeHash);
hashes.push_back (nodeHash.as_uint256());
--max;
}
@@ -419,7 +419,7 @@ SHAMapAddNode SHAMap::addRootNode (Blob const& rootNode,
assert (seq_ >= 1);
auto node = SHAMapAbstractNode::make(
rootNode, 0, format, uZero, false, f_.journal ());
rootNode, 0, format, SHAMapHash{uZero}, false, f_.journal ());
if (!node || !node->isValid ())
return SHAMapAddNode::invalid ();
@@ -439,14 +439,14 @@ SHAMapAddNode SHAMap::addRootNode (Blob const& rootNode,
{
Serializer s;
root_->addRaw (s, snfPREFIX);
filter->gotNode (false, SHAMapNodeID{}, root_->getNodeHash (),
filter->gotNode (false, SHAMapNodeID{}, root_->getNodeHash ().as_uint256(),
s.modData (), root_->getType ());
}
return SHAMapAddNode::useful ();
}
SHAMapAddNode SHAMap::addRootNode (uint256 const& hash, Blob const& rootNode, SHANodeFormat format,
SHAMapAddNode SHAMap::addRootNode (SHAMapHash const& hash, Blob const& rootNode, SHANodeFormat format,
SHAMapSyncFilter* filter)
{
// we already have a root_ node
@@ -460,7 +460,7 @@ SHAMapAddNode SHAMap::addRootNode (uint256 const& hash, Blob const& rootNode, SH
assert (seq_ >= 1);
auto node = SHAMapAbstractNode::make(
rootNode, 0, format, uZero, false, f_.journal ());
rootNode, 0, format, SHAMapHash{uZero}, false, f_.journal ());
if (!node || !node->isValid() || node->getNodeHash () != hash)
return SHAMapAddNode::invalid ();
@@ -476,8 +476,8 @@ SHAMapAddNode SHAMap::addRootNode (uint256 const& hash, Blob const& rootNode, SH
{
Serializer s;
root_->addRaw (s, snfPREFIX);
filter->gotNode (false, SHAMapNodeID{}, root_->getNodeHash (), s.modData (),
root_->getType ());
filter->gotNode (false, SHAMapNodeID{}, root_->getNodeHash ().as_uint256(),
s.modData (), root_->getType ());
}
return SHAMapAddNode::useful ();
@@ -515,8 +515,8 @@ SHAMap::addKnownNode (const SHAMapNodeID& node, Blob const& rawNode,
return SHAMapAddNode::invalid ();
}
uint256 childHash = inner->getChildHash (branch);
if (f_.fullbelow().touch_if_exists (childHash))
auto childHash = inner->getChildHash (branch);
if (f_.fullbelow().touch_if_exists (childHash.as_uint256()))
return SHAMapAddNode::duplicate ();
auto prevNode = inner;
@@ -538,7 +538,7 @@ SHAMap::addKnownNode (const SHAMapNodeID& node, Blob const& rawNode,
}
auto newNode = SHAMapAbstractNode::make(
rawNode, 0, snfWIRE, uZero, false, f_.journal ());
rawNode, 0, snfWIRE, SHAMapHash{uZero}, false, f_.journal ());
if (!newNode || !newNode->isValid() || childHash != newNode->getNodeHash ())
{
@@ -563,7 +563,7 @@ SHAMap::addKnownNode (const SHAMapNodeID& node, Blob const& rawNode,
{
Serializer s;
newNode->addRaw (s, snfPREFIX);
filter->gotNode (false, node, childHash,
filter->gotNode (false, node, childHash.as_uint256(),
s.modData (), newNode->getType ());
}
@@ -653,7 +653,7 @@ bool SHAMap::deepCompare (SHAMap& other) const
*/
bool
SHAMap::hasInnerNode (SHAMapNodeID const& targetNodeID,
uint256 const& targetNodeHash) const
SHAMapHash const& targetNodeHash) const
{
auto node = root_.get();
SHAMapNodeID nodeID;
@@ -675,7 +675,7 @@ SHAMap::hasInnerNode (SHAMapNodeID const& targetNodeID,
/** Does this map have this leaf node?
*/
bool
SHAMap::hasLeafNode (uint256 const& tag, uint256 const& targetNodeHash) const
SHAMap::hasLeafNode (uint256 const& tag, SHAMapHash const& targetNodeHash) const
{
auto node = root_.get();
SHAMapNodeID nodeID;
@@ -720,7 +720,7 @@ void SHAMap::getFetchPack (SHAMap* have, bool includeLeaves, int max,
{
Serializer s;
smn.addRaw (s, snfPREFIX);
func (smn.getNodeHash(), s.peekData());
func (smn.getNodeHash().as_uint256(), s.peekData());
if (--max <= 0)
return false;
@@ -772,7 +772,7 @@ SHAMap::visitDifferences(SHAMap* have,
{
if (!node->isEmptyBranch (i))
{
uint256 const& childHash = node->getChildHash (i);
auto const& childHash = node->getChildHash (i);
SHAMapNodeID childID = nodeID.getChildNodeID (i);
auto next = descendThrow(node, i);

View File

@@ -65,7 +65,7 @@ SHAMapTreeNode::SHAMapTreeNode (std::shared_ptr<SHAMapItem const> const& item,
}
SHAMapTreeNode::SHAMapTreeNode (std::shared_ptr<SHAMapItem const> const& item,
TNType type, std::uint32_t seq, uint256 const& hash)
TNType type, std::uint32_t seq, SHAMapHash const& hash)
: SHAMapAbstractNode(type, seq, hash)
, mItem (item)
{
@@ -74,7 +74,7 @@ SHAMapTreeNode::SHAMapTreeNode (std::shared_ptr<SHAMapItem const> const& item,
std::shared_ptr<SHAMapAbstractNode>
SHAMapAbstractNode::make(Blob const& rawNode, std::uint32_t seq, SHANodeFormat format,
uint256 const& hash, bool hashValid, beast::Journal j)
SHAMapHash const& hash, bool hashValid, beast::Journal j)
{
if (format == snfWIRE)
{
@@ -125,7 +125,7 @@ SHAMapAbstractNode::make(Blob const& rawNode, std::uint32_t seq, SHANodeFormat f
auto ret = std::make_shared<SHAMapInnerNode>(seq);
for (int i = 0; i < 16; ++i)
{
s.get256 (ret->mHashes[i], i * 32);
s.get256 (ret->mHashes[i].as_uint256(), i * 32);
if (ret->mHashes[i].isNonZero ())
ret->mIsBranch |= (1 << i);
@@ -147,7 +147,7 @@ SHAMapAbstractNode::make(Blob const& rawNode, std::uint32_t seq, SHANodeFormat f
throw std::runtime_error ("short CI node");
if ((pos < 0) || (pos >= 16))
throw std::runtime_error ("invalid CI node");
s.get256 (ret->mHashes[pos], i * 33);
s.get256 (ret->mHashes[pos].as_uint256(), i * 33);
if (ret->mHashes[pos].isNonZero ())
ret->mIsBranch |= (1 << pos);
}
@@ -230,7 +230,7 @@ SHAMapAbstractNode::make(Blob const& rawNode, std::uint32_t seq, SHANodeFormat f
auto ret = std::make_shared<SHAMapInnerNode>(seq);
for (int i = 0; i < 16; ++i)
{
s.get256 (ret->mHashes[i], i * 32);
s.get256 (ret->mHashes[i].as_uint256(), i * 32);
if (ret->mHashes[i].isNonZero ())
ret->mIsBranch |= (1 << i);
@@ -276,9 +276,9 @@ SHAMapInnerNode::updateHash()
Slice(reinterpret_cast<unsigned char const*>(mHashes),
sizeof (mHashes)));
}
if (nh == mHash)
if (nh == mHash.as_uint256())
return false;
mHash = nh;
mHash = SHAMapHash{nh};
return true;
}
@@ -317,10 +317,10 @@ SHAMapTreeNode::updateHash()
else
assert (false);
if (nh == mHash)
if (nh == mHash.as_uint256())
return false;
mHash = nh;
mHash = SHAMapHash{nh};
return true;
}
@@ -334,7 +334,7 @@ SHAMapInnerNode::addRaw(Serializer& s, SHANodeFormat format) const
if (format == snfHASH)
{
s.add256 (getNodeHash ());
s.add256 (mHash.as_uint256());
}
else if (mType == tnINNER)
{
@@ -345,7 +345,7 @@ SHAMapInnerNode::addRaw(Serializer& s, SHANodeFormat format) const
s.add32 (HashPrefix::innerNode);
for (int i = 0; i < 16; ++i)
s.add256 (mHashes[i]);
s.add256 (mHashes[i].as_uint256());
}
else
{
@@ -355,7 +355,7 @@ SHAMapInnerNode::addRaw(Serializer& s, SHANodeFormat format) const
for (int i = 0; i < 16; ++i)
if (!isEmptyBranch (i))
{
s.add256 (mHashes[i]);
s.add256 (mHashes[i].as_uint256());
s.add8 (i);
}
@@ -364,7 +364,7 @@ SHAMapInnerNode::addRaw(Serializer& s, SHANodeFormat format) const
else
{
for (int i = 0; i < 16; ++i)
s.add256 (mHashes[i]);
s.add256 (mHashes[i].as_uint256());
s.add8 (2);
}
@@ -384,7 +384,7 @@ SHAMapTreeNode::addRaw(Serializer& s, SHANodeFormat format) const
if (format == snfHASH)
{
s.add256 (getNodeHash ());
s.add256 (mHash.as_uint256());
}
else if (mType == tnACCOUNT_STATE)
{

View File

@@ -87,7 +87,7 @@ public:
unexpected (i != e, "bad traverse");
testcase ("snapshot");
uint256 mapHash = sMap.getHash ();
SHAMapHash mapHash = sMap.getHash ();
std::shared_ptr<SHAMap> map2 = sMap.snapShot (false);
unexpected (sMap.getHash () != mapHash, "bad snapshot");
unexpected (map2->getHash () != mapHash, "bad snapshot");
@@ -119,19 +119,19 @@ public:
SHAMap map (SHAMapType::FREE, f);
expect (map.getHash() == uint256(), "bad initial empty map hash");
expect (map.getHash() == zero, "bad initial empty map hash");
for (int i = 0; i < keys.size(); ++i)
{
SHAMapItem item (keys[i], IntToVUC (i));
map.addItem (item, true, false);
expect (map.getHash() == hashes[i], "bad buildup map hash");
expect (map.getHash().as_uint256() == hashes[i], "bad buildup map hash");
}
for (int i = keys.size() - 1; i >= 0; --i)
{
expect (map.getHash() == hashes[i], "bad teardown hash");
expect (map.getHash().as_uint256() == hashes[i], "bad teardown hash");
map.delItem (keys[i]);
}
expect (map.getHash() == uint256(), "bad final empty map hash");
expect (map.getHash() == zero, "bad final empty map hash");
}
}
};

View File

@@ -50,7 +50,7 @@ public:
{
// add a bunch of random states to a map, then remove them
// map should be the same
uint256 beforeHash = map.getHash ();
SHAMapHash beforeHash = map.getHash ();
std::list<uint256> items;