Remove application dependency from SyncFilters

This commit is contained in:
Miguel Portilla
2016-10-12 15:22:51 -04:00
committed by Brad Chase
parent cdaafeb4b6
commit 19258cf980
15 changed files with 148 additions and 96 deletions

View File

@@ -789,6 +789,8 @@
</ClInclude> </ClInclude>
<ClInclude Include="..\..\src\ripple\app\consensus\RCLCxTx.h"> <ClInclude Include="..\..\src\ripple\app\consensus\RCLCxTx.h">
</ClInclude> </ClInclude>
<ClInclude Include="..\..\src\ripple\app\ledger\AbstractFetchPackContainer.h">
</ClInclude>
<ClCompile Include="..\..\src\ripple\app\ledger\AcceptedLedger.cpp"> <ClCompile Include="..\..\src\ripple\app\ledger\AcceptedLedger.cpp">
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='debug|x64'">True</ExcludedFromBuild> <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='debug|x64'">True</ExcludedFromBuild>
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='release|x64'">True</ExcludedFromBuild> <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='release|x64'">True</ExcludedFromBuild>

View File

@@ -1311,6 +1311,9 @@
<ClInclude Include="..\..\src\ripple\app\consensus\RCLCxTx.h"> <ClInclude Include="..\..\src\ripple\app\consensus\RCLCxTx.h">
<Filter>ripple\app\consensus</Filter> <Filter>ripple\app\consensus</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="..\..\src\ripple\app\ledger\AbstractFetchPackContainer.h">
<Filter>ripple\app\ledger</Filter>
</ClInclude>
<ClCompile Include="..\..\src\ripple\app\ledger\AcceptedLedger.cpp"> <ClCompile Include="..\..\src\ripple\app\ledger\AcceptedLedger.cpp">
<Filter>ripple\app\ledger</Filter> <Filter>ripple\app\ledger</Filter>
</ClCompile> </ClCompile>

View File

@@ -0,0 +1,48 @@
//------------------------------------------------------------------------------
/*
This file is part of rippled: https://github.com/ripple/rippled
Copyright (c) 2012, 2013 Ripple Labs Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#ifndef RIPPLE_APP_LEDGER_ABSTRACTFETCHPACKCONTAINER_H_INCLUDED
#define RIPPLE_APP_LEDGER_ABSTRACTFETCHPACKCONTAINER_H_INCLUDED
#include <ripple/basics/base_uint.h>
#include <ripple/basics/Blob.h>
#include <boost/optional.hpp>
namespace ripple {
/** An interface facilitating retrieval of fetch packs without
an application or ledgermaster object.
*/
class AbstractFetchPackContainer
{
public:
virtual ~AbstractFetchPackContainer() = default;
/** Retrieves partial ledger data of the coresponding hash from peers.`
@param nodeHash The 256-bit hash of the data to fetch.
@return `boost::none` if the hash isn't cached,
otherwise, the hash associated data.
*/
virtual boost::optional<Blob> getFetchPack(uint256 const& nodeHash) = 0;
};
} // ripple
#endif

View File

@@ -19,17 +19,12 @@
#include <BeastConfig.h> #include <BeastConfig.h>
#include <ripple/app/ledger/AccountStateSF.h> #include <ripple/app/ledger/AccountStateSF.h>
#include <ripple/app/ledger/LedgerMaster.h>
#include <ripple/app/ledger/TransactionMaster.h>
#include <ripple/app/main/Application.h>
#include <ripple/app/misc/NetworkOPs.h>
#include <ripple/nodestore/Database.h>
#include <ripple/protocol/HashPrefix.h>
namespace ripple { namespace ripple {
AccountStateSF::AccountStateSF(Application& app) AccountStateSF::AccountStateSF(Family& f, AbstractFetchPackContainer& fp)
: app_ (app) : f_(f)
, fp_(fp)
{ {
} }
@@ -38,17 +33,14 @@ void AccountStateSF::gotNode (bool fromFilter,
Blob&& nodeData, Blob&& nodeData,
SHAMapTreeNode::TNType) const SHAMapTreeNode::TNType) const
{ {
// VFALCO SHAMapSync filters should be passed the SHAMap, the f_.db().store(hotACCOUNT_NODE, std::move(nodeData),
// SHAMap should provide an accessor to get the injected Database, nodeHash.as_uint256());
// and this should use that Database instad of getNodeStore
app_.getNodeStore ().store (
hotACCOUNT_NODE, std::move (nodeData), nodeHash.as_uint256());
} }
bool AccountStateSF::haveNode (SHAMapHash const& nodeHash, boost::optional<Blob>
Blob& nodeData) const AccountStateSF::getNode(SHAMapHash const& nodeHash) const
{ {
return app_.getLedgerMaster ().getFetchPack (nodeHash.as_uint256(), nodeData); return fp_.getFetchPack(nodeHash.as_uint256());
} }
} // ripple } // ripple

View File

@@ -20,8 +20,9 @@
#ifndef RIPPLE_APP_LEDGER_ACCOUNTSTATESF_H_INCLUDED #ifndef RIPPLE_APP_LEDGER_ACCOUNTSTATESF_H_INCLUDED
#define RIPPLE_APP_LEDGER_ACCOUNTSTATESF_H_INCLUDED #define RIPPLE_APP_LEDGER_ACCOUNTSTATESF_H_INCLUDED
#include <ripple/app/main/Application.h> #include <ripple/app/ledger/AbstractFetchPackContainer.h>
#include <ripple/shamap/SHAMapSyncFilter.h> #include <ripple/shamap/SHAMapSyncFilter.h>
#include <ripple/shamap/Family.h>
namespace ripple { namespace ripple {
@@ -31,11 +32,11 @@ class AccountStateSF
: public SHAMapSyncFilter : public SHAMapSyncFilter
{ {
private: private:
Application& app_; Family& f_;
AbstractFetchPackContainer& fp_;
public: public:
explicit AccountStateSF(Family&, AbstractFetchPackContainer&);
AccountStateSF (Application& app);
// Note that the nodeData is overwritten by this call // Note that the nodeData is overwritten by this call
void gotNode (bool fromFilter, void gotNode (bool fromFilter,
@@ -43,8 +44,8 @@ public:
Blob&& nodeData, Blob&& nodeData,
SHAMapTreeNode::TNType) const override; SHAMapTreeNode::TNType) const override;
bool haveNode (SHAMapHash const& nodeHash, boost::optional<Blob>
Blob& nodeData) const override; getNode(SHAMapHash const& nodeHash) const override;
}; };
} // ripple } // ripple

View File

@@ -75,11 +75,12 @@ void ConsensusTransSetSF::gotNode (
} }
} }
bool ConsensusTransSetSF::haveNode ( boost::optional<Blob>
SHAMapHash const& nodeHash, Blob& nodeData) const ConsensusTransSetSF::getNode (SHAMapHash const& nodeHash) const
{ {
Blob nodeData;
if (m_nodeCache.retrieve (nodeHash, nodeData)) if (m_nodeCache.retrieve (nodeHash, nodeData))
return true; return nodeData;
auto txn = app_.getMasterTransaction().fetch(nodeHash.as_uint256(), false); auto txn = app_.getMasterTransaction().fetch(nodeHash.as_uint256(), false);
@@ -93,10 +94,10 @@ bool ConsensusTransSetSF::haveNode (
txn->getSTransaction ()->add (s); txn->getSTransaction ()->add (s);
assert(sha512Half(s.slice()) == nodeHash.as_uint256()); assert(sha512Half(s.slice()) == nodeHash.as_uint256());
nodeData = s.peekData (); nodeData = s.peekData ();
return true; return nodeData;
} }
return false; return boost::none;
} }
} // ripple } // ripple

View File

@@ -44,8 +44,8 @@ public:
Blob&& nodeData, Blob&& nodeData,
SHAMapTreeNode::TNType) const override; SHAMapTreeNode::TNType) const override;
bool haveNode (SHAMapHash const& nodeHash, boost::optional<Blob>
Blob& nodeData) const override; getNode (SHAMapHash const& nodeHash) const override;
private: private:
Application& app_; Application& app_;

View File

@@ -21,6 +21,7 @@
#define RIPPLE_APP_LEDGER_LEDGERMASTER_H_INCLUDED #define RIPPLE_APP_LEDGER_LEDGERMASTER_H_INCLUDED
#include <ripple/app/main/Application.h> #include <ripple/app/main/Application.h>
#include <ripple/app/ledger/AbstractFetchPackContainer.h>
#include <ripple/app/ledger/Ledger.h> #include <ripple/app/ledger/Ledger.h>
#include <ripple/app/ledger/LedgerCleaner.h> #include <ripple/app/ledger/LedgerCleaner.h>
#include <ripple/app/ledger/LedgerHistory.h> #include <ripple/app/ledger/LedgerHistory.h>
@@ -61,6 +62,7 @@ struct LedgerReplay
// //
class LedgerMaster class LedgerMaster
: public Stoppable : public Stoppable
, public AbstractFetchPackContainer
{ {
public: public:
explicit explicit
@@ -232,9 +234,8 @@ public:
uint256 const& hash, uint256 const& hash,
std::shared_ptr<Blob>& data); std::shared_ptr<Blob>& data);
bool getFetchPack ( boost::optional<Blob>
uint256 const& hash, getFetchPack (uint256 const& hash) override;
Blob& data);
void makeFetchPack ( void makeFetchPack (
std::weak_ptr<Peer> const& wPeer, std::weak_ptr<Peer> const& wPeer,

View File

@@ -19,40 +19,31 @@
#include <BeastConfig.h> #include <BeastConfig.h>
#include <ripple/app/ledger/TransactionStateSF.h> #include <ripple/app/ledger/TransactionStateSF.h>
#include <ripple/app/ledger/LedgerMaster.h>
#include <ripple/app/ledger/TransactionMaster.h>
#include <ripple/app/main/Application.h>
#include <ripple/app/misc/NetworkOPs.h>
#include <ripple/nodestore/Database.h>
#include <ripple/protocol/HashPrefix.h>
namespace ripple { namespace ripple {
TransactionStateSF::TransactionStateSF(Application& app) TransactionStateSF::TransactionStateSF(Family& f,
: app_ (app) AbstractFetchPackContainer& fp)
: f_(f)
, fp_(fp)
{ {
} }
// VFALCO This might be better as Blob&&
void TransactionStateSF::gotNode (bool fromFilter, void TransactionStateSF::gotNode (bool fromFilter,
SHAMapHash const& nodeHash, SHAMapHash const& nodeHash,
Blob&& nodeData, Blob&& nodeData,
SHAMapTreeNode::TNType type) const SHAMapTreeNode::TNType type) const
{ {
// VFALCO SHAMapSync filters should be passed the SHAMap, the
// SHAMap should provide an accessor to get the injected Database,
// and this should use that Database instad of getNodeStore
assert(type != assert(type !=
SHAMapTreeNode::tnTRANSACTION_NM); SHAMapTreeNode::tnTRANSACTION_NM);
app_.getNodeStore().store( f_.db().store(hotTRANSACTION_NODE,
hotTRANSACTION_NODE, std::move (nodeData), nodeHash.as_uint256());
std::move (nodeData), nodeHash.as_uint256());
} }
bool TransactionStateSF::haveNode (SHAMapHash const& nodeHash, boost::optional<Blob>
Blob& nodeData) const TransactionStateSF::getNode(SHAMapHash const& nodeHash) const
{ {
return app_.getLedgerMaster ().getFetchPack (nodeHash.as_uint256(), nodeData); return fp_.getFetchPack(nodeHash.as_uint256());
} }
} // ripple } // ripple

View File

@@ -20,8 +20,9 @@
#ifndef RIPPLE_APP_LEDGER_TRANSACTIONSTATESF_H_INCLUDED #ifndef RIPPLE_APP_LEDGER_TRANSACTIONSTATESF_H_INCLUDED
#define RIPPLE_APP_LEDGER_TRANSACTIONSTATESF_H_INCLUDED #define RIPPLE_APP_LEDGER_TRANSACTIONSTATESF_H_INCLUDED
#include <ripple/app/main/Application.h> #include <ripple/app/ledger/AbstractFetchPackContainer.h>
#include <ripple/shamap/SHAMapSyncFilter.h> #include <ripple/shamap/SHAMapSyncFilter.h>
#include <ripple/shamap/Family.h>
#include <cstdint> #include <cstdint>
namespace ripple { namespace ripple {
@@ -32,11 +33,12 @@ class TransactionStateSF
: public SHAMapSyncFilter : public SHAMapSyncFilter
{ {
private: private:
Application& app_; Family& f_;
AbstractFetchPackContainer& fp_;
public: public:
explicit explicit
TransactionStateSF(Application& app); TransactionStateSF(Family&, AbstractFetchPackContainer&);
// Note that the nodeData is overwritten by this call // Note that the nodeData is overwritten by this call
void gotNode (bool fromFilter, void gotNode (bool fromFilter,
@@ -44,8 +46,8 @@ public:
Blob&& nodeData, Blob&& nodeData,
SHAMapTreeNode::TNType) const override; SHAMapTreeNode::TNType) const override;
bool haveNode (SHAMapHash const& nodeHash, boost::optional<Blob>
Blob& nodeData) const override; getNode(SHAMapHash const& nodeHash) const override;
}; };
} // ripple } // ripple

View File

@@ -246,20 +246,19 @@ bool InboundLedger::tryLocal ()
if (!node) if (!node)
{ {
Blob data; auto data = app_.getLedgerMaster().getFetchPack(mHash);
if (! data)
if (!app_.getLedgerMaster ().getFetchPack (mHash, data))
return false; return false;
JLOG (m_journal.trace()) << JLOG (m_journal.trace()) <<
"Ledger header found in fetch pack"; "Ledger header found in fetch pack";
mLedger = std::make_shared<Ledger> ( mLedger = std::make_shared<Ledger> (
deserializeHeader (makeSlice(data), true), deserializeHeader (makeSlice(*data), true),
app_.family()); app_.family());
app_.getNodeStore ().store ( app_.getNodeStore ().store (
hotLEDGER, std::move (data), mHash); hotLEDGER, std::move (*data), mHash);
} }
else else
{ {
@@ -290,7 +289,8 @@ bool InboundLedger::tryLocal ()
} }
else else
{ {
TransactionStateSF filter(app_); TransactionStateSF filter(mLedger->txMap().family(),
app_.getLedgerMaster());
if (mLedger->txMap().fetchRoot ( if (mLedger->txMap().fetchRoot (
SHAMapHash{mLedger->info().txHash}, &filter)) SHAMapHash{mLedger->info().txHash}, &filter))
@@ -316,7 +316,8 @@ bool InboundLedger::tryLocal ()
return true; return true;
} }
AccountStateSF filter(app_); AccountStateSF filter(mLedger->stateMap().family(),
app_.getLedgerMaster());
if (mLedger->stateMap().fetchRoot ( if (mLedger->stateMap().fetchRoot (
SHAMapHash{mLedger->info().accountHash}, &filter)) SHAMapHash{mLedger->info().accountHash}, &filter))
@@ -607,7 +608,8 @@ void InboundLedger::trigger (std::shared_ptr<Peer> const& peer, TriggerReason re
} }
else else
{ {
AccountStateSF filter(app_); AccountStateSF filter(mLedger->stateMap().family(),
app_.getLedgerMaster());
// Release the lock while we process the large state map // Release the lock while we process the large state map
sl.unlock(); sl.unlock();
@@ -680,7 +682,8 @@ void InboundLedger::trigger (std::shared_ptr<Peer> const& peer, TriggerReason re
} }
else else
{ {
TransactionStateSF filter(app_); TransactionStateSF filter(mLedger->txMap().family(),
app_.getLedgerMaster());
auto nodes = mLedger->txMap().getMissingNodes ( auto nodes = mLedger->txMap().getMissingNodes (
missingNodesFind, &filter); missingNodesFind, &filter);
@@ -849,21 +852,23 @@ bool InboundLedger::takeTxNode (const std::vector<SHAMapNodeID>& nodeIDs,
auto nodeIDit = nodeIDs.cbegin (); auto nodeIDit = nodeIDs.cbegin ();
auto nodeDatait = data.begin (); auto nodeDatait = data.begin ();
TransactionStateSF tFilter(app_); TransactionStateSF filter(mLedger->txMap().family(),
app_.getLedgerMaster());
while (nodeIDit != nodeIDs.cend ()) while (nodeIDit != nodeIDs.cend ())
{ {
if (nodeIDit->isRoot ()) if (nodeIDit->isRoot ())
{ {
san += mLedger->txMap().addRootNode ( san += mLedger->txMap().addRootNode (
SHAMapHash{mLedger->info().txHash}, makeSlice(*nodeDatait), snfWIRE, &tFilter); SHAMapHash{mLedger->info().txHash},
makeSlice(*nodeDatait), snfWIRE, &filter);
if (!san.isGood()) if (!san.isGood())
return false; return false;
} }
else else
{ {
san += mLedger->txMap().addKnownNode ( san += mLedger->txMap().addKnownNode (
*nodeIDit, makeSlice(*nodeDatait), &tFilter); *nodeIDit, makeSlice(*nodeDatait), &filter);
if (!san.isGood()) if (!san.isGood())
return false; return false;
} }
@@ -919,14 +924,16 @@ bool InboundLedger::takeAsNode (const std::vector<SHAMapNodeID>& nodeIDs,
auto nodeIDit = nodeIDs.cbegin (); auto nodeIDit = nodeIDs.cbegin ();
auto nodeDatait = data.begin (); auto nodeDatait = data.begin ();
AccountStateSF tFilter(app_); AccountStateSF filter(mLedger->stateMap().family(),
app_.getLedgerMaster());
while (nodeIDit != nodeIDs.cend ()) while (nodeIDit != nodeIDs.cend ())
{ {
if (nodeIDit->isRoot ()) if (nodeIDit->isRoot ())
{ {
san += mLedger->stateMap().addRootNode ( san += mLedger->stateMap().addRootNode (
SHAMapHash{mLedger->info().accountHash}, makeSlice(*nodeDatait), snfWIRE, &tFilter); SHAMapHash{mLedger->info().accountHash},
makeSlice(*nodeDatait), snfWIRE, &filter);
if (!san.isGood ()) if (!san.isGood ())
{ {
JLOG (m_journal.warn()) << JLOG (m_journal.warn()) <<
@@ -937,7 +944,7 @@ bool InboundLedger::takeAsNode (const std::vector<SHAMapNodeID>& nodeIDs,
else else
{ {
san += mLedger->stateMap().addKnownNode ( san += mLedger->stateMap().addKnownNode (
*nodeIDit, makeSlice(*nodeDatait), &tFilter); *nodeIDit, makeSlice(*nodeDatait), &filter);
if (!san.isGood ()) if (!san.isGood ())
{ {
JLOG (m_journal.warn()) << JLOG (m_journal.warn()) <<
@@ -981,9 +988,10 @@ bool InboundLedger::takeAsRootNode (Slice const& data, SHAMapAddNode& san)
return false; return false;
} }
AccountStateSF tFilter(app_); AccountStateSF filter(mLedger->stateMap().family(),
app_.getLedgerMaster());
san += mLedger->stateMap().addRootNode ( san += mLedger->stateMap().addRootNode (
SHAMapHash{mLedger->info().accountHash}, data, snfWIRE, &tFilter); SHAMapHash{mLedger->info().accountHash}, data, snfWIRE, &filter);
return san.isGood(); return san.isGood();
} }
@@ -1004,9 +1012,10 @@ bool InboundLedger::takeTxRootNode (Slice const& data, SHAMapAddNode& san)
return false; return false;
} }
TransactionStateSF tFilter(app_); TransactionStateSF filter(mLedger->txMap().family(),
app_.getLedgerMaster());
san += mLedger->txMap().addRootNode ( san += mLedger->txMap().addRootNode (
SHAMapHash{mLedger->info().txHash}, data, snfWIRE, &tFilter); SHAMapHash{mLedger->info().txHash}, data, snfWIRE, &filter);
return san.isGood(); return san.isGood();
} }
@@ -1024,7 +1033,8 @@ InboundLedger::getNeededHashes ()
if (!mHaveState) if (!mHaveState)
{ {
AccountStateSF filter(app_); AccountStateSF filter(mLedger->stateMap().family(),
app_.getLedgerMaster());
for (auto const& h : neededStateHashes (4, &filter)) for (auto const& h : neededStateHashes (4, &filter))
{ {
ret.push_back (std::make_pair ( ret.push_back (std::make_pair (
@@ -1034,7 +1044,8 @@ InboundLedger::getNeededHashes ()
if (!mHaveTransactions) if (!mHaveTransactions)
{ {
TransactionStateSF filter(app_); TransactionStateSF filter(mLedger->txMap().family(),
app_.getLedgerMaster());
for (auto const& h : neededTxHashes (4, &filter)) for (auto const& h : neededTxHashes (4, &filter))
{ {
ret.push_back (std::make_pair ( ret.push_back (std::make_pair (

View File

@@ -1779,17 +1779,18 @@ LedgerMaster::addFetchPack (
fetch_packs_.canonicalize (hash, data); fetch_packs_.canonicalize (hash, data);
} }
bool boost::optional<Blob>
LedgerMaster::getFetchPack ( LedgerMaster::getFetchPack (
uint256 const& hash, uint256 const& hash)
Blob& data)
{ {
if (!fetch_packs_.retrieve (hash, data)) Blob data;
return false; if (fetch_packs_.retrieve(hash, data))
{
fetch_packs_.del (hash, false); fetch_packs_.del(hash, false);
if (hash == sha512Half(makeSlice(data)))
return hash == sha512Half(makeSlice(data)); return data;
}
return boost::none;
} }
void void

View File

@@ -40,8 +40,9 @@ public:
Blob&& nodeData, Blob&& nodeData,
SHAMapTreeNode::TNType type) const = 0; SHAMapTreeNode::TNType type) const = 0;
virtual bool haveNode (SHAMapHash const& nodeHash, virtual
Blob& nodeData) const = 0; boost::optional<Blob>
getNode(SHAMapHash const& nodeHash) const = 0;
}; };
} }

View File

@@ -306,15 +306,14 @@ SHAMap::checkFilter(SHAMapHash const& hash,
SHAMapSyncFilter* filter) const SHAMapSyncFilter* filter) const
{ {
std::shared_ptr<SHAMapAbstractNode> node; std::shared_ptr<SHAMapAbstractNode> node;
Blob nodeData; if (auto nodeData = filter->getNode (hash))
if (filter->haveNode (hash, nodeData))
{ {
node = SHAMapAbstractNode::make( node = SHAMapAbstractNode::make(
makeSlice(nodeData), 0, snfPREFIX, hash, true, f_.journal ()); makeSlice(*nodeData), 0, snfPREFIX, hash, true, f_.journal ());
if (node) if (node)
{ {
filter->gotNode (true, hash, filter->gotNode (true, hash,
std::move(nodeData), node->getType ()); std::move(*nodeData), node->getType ());
if (backed_) if (backed_)
canonicalize (hash, node); canonicalize (hash, node);
} }

View File

@@ -66,17 +66,16 @@ public:
{ {
} }
bool haveNode (SHAMapHash const& nodeHash, boost::optional<Blob>
Blob& nodeData) const override getNode (SHAMapHash const& nodeHash) const override
{ {
Map::iterator it = mMap.find (nodeHash); Map::iterator it = mMap.find (nodeHash);
if (it == mMap.end ()) if (it == mMap.end ())
{ {
JLOG(mJournal.fatal()) << "Test filter missing node"; JLOG(mJournal.fatal()) << "Test filter missing node";
return false; return boost::none;
} }
nodeData = it->second; return it->second;
return true;
} }
Map& mMap; Map& mMap;