mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-04 11:15:56 +00:00
Remove application dependency from SyncFilters
This commit is contained in:
committed by
Brad Chase
parent
cdaafeb4b6
commit
19258cf980
@@ -789,6 +789,8 @@
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\src\ripple\app\consensus\RCLCxTx.h">
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\src\ripple\app\ledger\AbstractFetchPackContainer.h">
|
||||
</ClInclude>
|
||||
<ClCompile Include="..\..\src\ripple\app\ledger\AcceptedLedger.cpp">
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='debug|x64'">True</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='release|x64'">True</ExcludedFromBuild>
|
||||
|
||||
@@ -1311,6 +1311,9 @@
|
||||
<ClInclude Include="..\..\src\ripple\app\consensus\RCLCxTx.h">
|
||||
<Filter>ripple\app\consensus</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\src\ripple\app\ledger\AbstractFetchPackContainer.h">
|
||||
<Filter>ripple\app\ledger</Filter>
|
||||
</ClInclude>
|
||||
<ClCompile Include="..\..\src\ripple\app\ledger\AcceptedLedger.cpp">
|
||||
<Filter>ripple\app\ledger</Filter>
|
||||
</ClCompile>
|
||||
|
||||
48
src/ripple/app/ledger/AbstractFetchPackContainer.h
Normal file
48
src/ripple/app/ledger/AbstractFetchPackContainer.h
Normal file
@@ -0,0 +1,48 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2012, 2013 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_APP_LEDGER_ABSTRACTFETCHPACKCONTAINER_H_INCLUDED
|
||||
#define RIPPLE_APP_LEDGER_ABSTRACTFETCHPACKCONTAINER_H_INCLUDED
|
||||
|
||||
#include <ripple/basics/base_uint.h>
|
||||
#include <ripple/basics/Blob.h>
|
||||
#include <boost/optional.hpp>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
/** An interface facilitating retrieval of fetch packs without
|
||||
an application or ledgermaster object.
|
||||
*/
|
||||
class AbstractFetchPackContainer
|
||||
{
|
||||
public:
|
||||
virtual ~AbstractFetchPackContainer() = default;
|
||||
|
||||
/** Retrieves partial ledger data of the coresponding hash from peers.`
|
||||
|
||||
@param nodeHash The 256-bit hash of the data to fetch.
|
||||
@return `boost::none` if the hash isn't cached,
|
||||
otherwise, the hash associated data.
|
||||
*/
|
||||
virtual boost::optional<Blob> getFetchPack(uint256 const& nodeHash) = 0;
|
||||
};
|
||||
|
||||
} // ripple
|
||||
|
||||
#endif
|
||||
@@ -19,17 +19,12 @@
|
||||
|
||||
#include <BeastConfig.h>
|
||||
#include <ripple/app/ledger/AccountStateSF.h>
|
||||
#include <ripple/app/ledger/LedgerMaster.h>
|
||||
#include <ripple/app/ledger/TransactionMaster.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/misc/NetworkOPs.h>
|
||||
#include <ripple/nodestore/Database.h>
|
||||
#include <ripple/protocol/HashPrefix.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
AccountStateSF::AccountStateSF(Application& app)
|
||||
: app_ (app)
|
||||
AccountStateSF::AccountStateSF(Family& f, AbstractFetchPackContainer& fp)
|
||||
: f_(f)
|
||||
, fp_(fp)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -38,17 +33,14 @@ void AccountStateSF::gotNode (bool fromFilter,
|
||||
Blob&& nodeData,
|
||||
SHAMapTreeNode::TNType) const
|
||||
{
|
||||
// VFALCO SHAMapSync filters should be passed the SHAMap, the
|
||||
// SHAMap should provide an accessor to get the injected Database,
|
||||
// and this should use that Database instad of getNodeStore
|
||||
app_.getNodeStore ().store (
|
||||
hotACCOUNT_NODE, std::move (nodeData), nodeHash.as_uint256());
|
||||
f_.db().store(hotACCOUNT_NODE, std::move(nodeData),
|
||||
nodeHash.as_uint256());
|
||||
}
|
||||
|
||||
bool AccountStateSF::haveNode (SHAMapHash const& nodeHash,
|
||||
Blob& nodeData) const
|
||||
boost::optional<Blob>
|
||||
AccountStateSF::getNode(SHAMapHash const& nodeHash) const
|
||||
{
|
||||
return app_.getLedgerMaster ().getFetchPack (nodeHash.as_uint256(), nodeData);
|
||||
return fp_.getFetchPack(nodeHash.as_uint256());
|
||||
}
|
||||
|
||||
} // ripple
|
||||
|
||||
@@ -20,8 +20,9 @@
|
||||
#ifndef RIPPLE_APP_LEDGER_ACCOUNTSTATESF_H_INCLUDED
|
||||
#define RIPPLE_APP_LEDGER_ACCOUNTSTATESF_H_INCLUDED
|
||||
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/ledger/AbstractFetchPackContainer.h>
|
||||
#include <ripple/shamap/SHAMapSyncFilter.h>
|
||||
#include <ripple/shamap/Family.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -31,11 +32,11 @@ class AccountStateSF
|
||||
: public SHAMapSyncFilter
|
||||
{
|
||||
private:
|
||||
Application& app_;
|
||||
Family& f_;
|
||||
AbstractFetchPackContainer& fp_;
|
||||
|
||||
public:
|
||||
explicit
|
||||
AccountStateSF (Application& app);
|
||||
AccountStateSF(Family&, AbstractFetchPackContainer&);
|
||||
|
||||
// Note that the nodeData is overwritten by this call
|
||||
void gotNode (bool fromFilter,
|
||||
@@ -43,8 +44,8 @@ public:
|
||||
Blob&& nodeData,
|
||||
SHAMapTreeNode::TNType) const override;
|
||||
|
||||
bool haveNode (SHAMapHash const& nodeHash,
|
||||
Blob& nodeData) const override;
|
||||
boost::optional<Blob>
|
||||
getNode(SHAMapHash const& nodeHash) const override;
|
||||
};
|
||||
|
||||
} // ripple
|
||||
|
||||
@@ -75,11 +75,12 @@ void ConsensusTransSetSF::gotNode (
|
||||
}
|
||||
}
|
||||
|
||||
bool ConsensusTransSetSF::haveNode (
|
||||
SHAMapHash const& nodeHash, Blob& nodeData) const
|
||||
boost::optional<Blob>
|
||||
ConsensusTransSetSF::getNode (SHAMapHash const& nodeHash) const
|
||||
{
|
||||
Blob nodeData;
|
||||
if (m_nodeCache.retrieve (nodeHash, nodeData))
|
||||
return true;
|
||||
return nodeData;
|
||||
|
||||
auto txn = app_.getMasterTransaction().fetch(nodeHash.as_uint256(), false);
|
||||
|
||||
@@ -93,10 +94,10 @@ bool ConsensusTransSetSF::haveNode (
|
||||
txn->getSTransaction ()->add (s);
|
||||
assert(sha512Half(s.slice()) == nodeHash.as_uint256());
|
||||
nodeData = s.peekData ();
|
||||
return true;
|
||||
return nodeData;
|
||||
}
|
||||
|
||||
return false;
|
||||
return boost::none;
|
||||
}
|
||||
|
||||
} // ripple
|
||||
|
||||
@@ -44,8 +44,8 @@ public:
|
||||
Blob&& nodeData,
|
||||
SHAMapTreeNode::TNType) const override;
|
||||
|
||||
bool haveNode (SHAMapHash const& nodeHash,
|
||||
Blob& nodeData) const override;
|
||||
boost::optional<Blob>
|
||||
getNode (SHAMapHash const& nodeHash) const override;
|
||||
|
||||
private:
|
||||
Application& app_;
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#define RIPPLE_APP_LEDGER_LEDGERMASTER_H_INCLUDED
|
||||
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/ledger/AbstractFetchPackContainer.h>
|
||||
#include <ripple/app/ledger/Ledger.h>
|
||||
#include <ripple/app/ledger/LedgerCleaner.h>
|
||||
#include <ripple/app/ledger/LedgerHistory.h>
|
||||
@@ -61,6 +62,7 @@ struct LedgerReplay
|
||||
//
|
||||
class LedgerMaster
|
||||
: public Stoppable
|
||||
, public AbstractFetchPackContainer
|
||||
{
|
||||
public:
|
||||
explicit
|
||||
@@ -232,9 +234,8 @@ public:
|
||||
uint256 const& hash,
|
||||
std::shared_ptr<Blob>& data);
|
||||
|
||||
bool getFetchPack (
|
||||
uint256 const& hash,
|
||||
Blob& data);
|
||||
boost::optional<Blob>
|
||||
getFetchPack (uint256 const& hash) override;
|
||||
|
||||
void makeFetchPack (
|
||||
std::weak_ptr<Peer> const& wPeer,
|
||||
|
||||
@@ -19,40 +19,31 @@
|
||||
|
||||
#include <BeastConfig.h>
|
||||
#include <ripple/app/ledger/TransactionStateSF.h>
|
||||
#include <ripple/app/ledger/LedgerMaster.h>
|
||||
#include <ripple/app/ledger/TransactionMaster.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/misc/NetworkOPs.h>
|
||||
#include <ripple/nodestore/Database.h>
|
||||
#include <ripple/protocol/HashPrefix.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
TransactionStateSF::TransactionStateSF(Application& app)
|
||||
: app_ (app)
|
||||
TransactionStateSF::TransactionStateSF(Family& f,
|
||||
AbstractFetchPackContainer& fp)
|
||||
: f_(f)
|
||||
, fp_(fp)
|
||||
{
|
||||
}
|
||||
|
||||
// VFALCO This might be better as Blob&&
|
||||
void TransactionStateSF::gotNode (bool fromFilter,
|
||||
SHAMapHash const& nodeHash,
|
||||
Blob&& nodeData,
|
||||
SHAMapTreeNode::TNType type) const
|
||||
{
|
||||
// VFALCO SHAMapSync filters should be passed the SHAMap, the
|
||||
// SHAMap should provide an accessor to get the injected Database,
|
||||
// and this should use that Database instad of getNodeStore
|
||||
assert(type !=
|
||||
SHAMapTreeNode::tnTRANSACTION_NM);
|
||||
app_.getNodeStore().store(
|
||||
hotTRANSACTION_NODE,
|
||||
std::move (nodeData), nodeHash.as_uint256());
|
||||
f_.db().store(hotTRANSACTION_NODE,
|
||||
std::move (nodeData), nodeHash.as_uint256());
|
||||
}
|
||||
|
||||
bool TransactionStateSF::haveNode (SHAMapHash const& nodeHash,
|
||||
Blob& nodeData) const
|
||||
boost::optional<Blob>
|
||||
TransactionStateSF::getNode(SHAMapHash const& nodeHash) const
|
||||
{
|
||||
return app_.getLedgerMaster ().getFetchPack (nodeHash.as_uint256(), nodeData);
|
||||
return fp_.getFetchPack(nodeHash.as_uint256());
|
||||
}
|
||||
|
||||
} // ripple
|
||||
|
||||
@@ -20,8 +20,9 @@
|
||||
#ifndef RIPPLE_APP_LEDGER_TRANSACTIONSTATESF_H_INCLUDED
|
||||
#define RIPPLE_APP_LEDGER_TRANSACTIONSTATESF_H_INCLUDED
|
||||
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/ledger/AbstractFetchPackContainer.h>
|
||||
#include <ripple/shamap/SHAMapSyncFilter.h>
|
||||
#include <ripple/shamap/Family.h>
|
||||
#include <cstdint>
|
||||
|
||||
namespace ripple {
|
||||
@@ -32,11 +33,12 @@ class TransactionStateSF
|
||||
: public SHAMapSyncFilter
|
||||
{
|
||||
private:
|
||||
Application& app_;
|
||||
Family& f_;
|
||||
AbstractFetchPackContainer& fp_;
|
||||
|
||||
public:
|
||||
explicit
|
||||
TransactionStateSF(Application& app);
|
||||
TransactionStateSF(Family&, AbstractFetchPackContainer&);
|
||||
|
||||
// Note that the nodeData is overwritten by this call
|
||||
void gotNode (bool fromFilter,
|
||||
@@ -44,8 +46,8 @@ public:
|
||||
Blob&& nodeData,
|
||||
SHAMapTreeNode::TNType) const override;
|
||||
|
||||
bool haveNode (SHAMapHash const& nodeHash,
|
||||
Blob& nodeData) const override;
|
||||
boost::optional<Blob>
|
||||
getNode(SHAMapHash const& nodeHash) const override;
|
||||
};
|
||||
|
||||
} // ripple
|
||||
|
||||
@@ -246,20 +246,19 @@ bool InboundLedger::tryLocal ()
|
||||
|
||||
if (!node)
|
||||
{
|
||||
Blob data;
|
||||
|
||||
if (!app_.getLedgerMaster ().getFetchPack (mHash, data))
|
||||
auto data = app_.getLedgerMaster().getFetchPack(mHash);
|
||||
if (! data)
|
||||
return false;
|
||||
|
||||
JLOG (m_journal.trace()) <<
|
||||
"Ledger header found in fetch pack";
|
||||
|
||||
mLedger = std::make_shared<Ledger> (
|
||||
deserializeHeader (makeSlice(data), true),
|
||||
deserializeHeader (makeSlice(*data), true),
|
||||
app_.family());
|
||||
|
||||
app_.getNodeStore ().store (
|
||||
hotLEDGER, std::move (data), mHash);
|
||||
hotLEDGER, std::move (*data), mHash);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -290,7 +289,8 @@ bool InboundLedger::tryLocal ()
|
||||
}
|
||||
else
|
||||
{
|
||||
TransactionStateSF filter(app_);
|
||||
TransactionStateSF filter(mLedger->txMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
|
||||
if (mLedger->txMap().fetchRoot (
|
||||
SHAMapHash{mLedger->info().txHash}, &filter))
|
||||
@@ -316,7 +316,8 @@ bool InboundLedger::tryLocal ()
|
||||
return true;
|
||||
}
|
||||
|
||||
AccountStateSF filter(app_);
|
||||
AccountStateSF filter(mLedger->stateMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
|
||||
if (mLedger->stateMap().fetchRoot (
|
||||
SHAMapHash{mLedger->info().accountHash}, &filter))
|
||||
@@ -607,7 +608,8 @@ void InboundLedger::trigger (std::shared_ptr<Peer> const& peer, TriggerReason re
|
||||
}
|
||||
else
|
||||
{
|
||||
AccountStateSF filter(app_);
|
||||
AccountStateSF filter(mLedger->stateMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
|
||||
// Release the lock while we process the large state map
|
||||
sl.unlock();
|
||||
@@ -680,7 +682,8 @@ void InboundLedger::trigger (std::shared_ptr<Peer> const& peer, TriggerReason re
|
||||
}
|
||||
else
|
||||
{
|
||||
TransactionStateSF filter(app_);
|
||||
TransactionStateSF filter(mLedger->txMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
|
||||
auto nodes = mLedger->txMap().getMissingNodes (
|
||||
missingNodesFind, &filter);
|
||||
@@ -849,21 +852,23 @@ bool InboundLedger::takeTxNode (const std::vector<SHAMapNodeID>& nodeIDs,
|
||||
|
||||
auto nodeIDit = nodeIDs.cbegin ();
|
||||
auto nodeDatait = data.begin ();
|
||||
TransactionStateSF tFilter(app_);
|
||||
TransactionStateSF filter(mLedger->txMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
|
||||
while (nodeIDit != nodeIDs.cend ())
|
||||
{
|
||||
if (nodeIDit->isRoot ())
|
||||
{
|
||||
san += mLedger->txMap().addRootNode (
|
||||
SHAMapHash{mLedger->info().txHash}, makeSlice(*nodeDatait), snfWIRE, &tFilter);
|
||||
SHAMapHash{mLedger->info().txHash},
|
||||
makeSlice(*nodeDatait), snfWIRE, &filter);
|
||||
if (!san.isGood())
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
san += mLedger->txMap().addKnownNode (
|
||||
*nodeIDit, makeSlice(*nodeDatait), &tFilter);
|
||||
*nodeIDit, makeSlice(*nodeDatait), &filter);
|
||||
if (!san.isGood())
|
||||
return false;
|
||||
}
|
||||
@@ -919,14 +924,16 @@ bool InboundLedger::takeAsNode (const std::vector<SHAMapNodeID>& nodeIDs,
|
||||
|
||||
auto nodeIDit = nodeIDs.cbegin ();
|
||||
auto nodeDatait = data.begin ();
|
||||
AccountStateSF tFilter(app_);
|
||||
AccountStateSF filter(mLedger->stateMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
|
||||
while (nodeIDit != nodeIDs.cend ())
|
||||
{
|
||||
if (nodeIDit->isRoot ())
|
||||
{
|
||||
san += mLedger->stateMap().addRootNode (
|
||||
SHAMapHash{mLedger->info().accountHash}, makeSlice(*nodeDatait), snfWIRE, &tFilter);
|
||||
SHAMapHash{mLedger->info().accountHash},
|
||||
makeSlice(*nodeDatait), snfWIRE, &filter);
|
||||
if (!san.isGood ())
|
||||
{
|
||||
JLOG (m_journal.warn()) <<
|
||||
@@ -937,7 +944,7 @@ bool InboundLedger::takeAsNode (const std::vector<SHAMapNodeID>& nodeIDs,
|
||||
else
|
||||
{
|
||||
san += mLedger->stateMap().addKnownNode (
|
||||
*nodeIDit, makeSlice(*nodeDatait), &tFilter);
|
||||
*nodeIDit, makeSlice(*nodeDatait), &filter);
|
||||
if (!san.isGood ())
|
||||
{
|
||||
JLOG (m_journal.warn()) <<
|
||||
@@ -981,9 +988,10 @@ bool InboundLedger::takeAsRootNode (Slice const& data, SHAMapAddNode& san)
|
||||
return false;
|
||||
}
|
||||
|
||||
AccountStateSF tFilter(app_);
|
||||
AccountStateSF filter(mLedger->stateMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
san += mLedger->stateMap().addRootNode (
|
||||
SHAMapHash{mLedger->info().accountHash}, data, snfWIRE, &tFilter);
|
||||
SHAMapHash{mLedger->info().accountHash}, data, snfWIRE, &filter);
|
||||
return san.isGood();
|
||||
}
|
||||
|
||||
@@ -1004,9 +1012,10 @@ bool InboundLedger::takeTxRootNode (Slice const& data, SHAMapAddNode& san)
|
||||
return false;
|
||||
}
|
||||
|
||||
TransactionStateSF tFilter(app_);
|
||||
TransactionStateSF filter(mLedger->txMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
san += mLedger->txMap().addRootNode (
|
||||
SHAMapHash{mLedger->info().txHash}, data, snfWIRE, &tFilter);
|
||||
SHAMapHash{mLedger->info().txHash}, data, snfWIRE, &filter);
|
||||
return san.isGood();
|
||||
}
|
||||
|
||||
@@ -1024,7 +1033,8 @@ InboundLedger::getNeededHashes ()
|
||||
|
||||
if (!mHaveState)
|
||||
{
|
||||
AccountStateSF filter(app_);
|
||||
AccountStateSF filter(mLedger->stateMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
for (auto const& h : neededStateHashes (4, &filter))
|
||||
{
|
||||
ret.push_back (std::make_pair (
|
||||
@@ -1034,7 +1044,8 @@ InboundLedger::getNeededHashes ()
|
||||
|
||||
if (!mHaveTransactions)
|
||||
{
|
||||
TransactionStateSF filter(app_);
|
||||
TransactionStateSF filter(mLedger->txMap().family(),
|
||||
app_.getLedgerMaster());
|
||||
for (auto const& h : neededTxHashes (4, &filter))
|
||||
{
|
||||
ret.push_back (std::make_pair (
|
||||
|
||||
@@ -1779,17 +1779,18 @@ LedgerMaster::addFetchPack (
|
||||
fetch_packs_.canonicalize (hash, data);
|
||||
}
|
||||
|
||||
bool
|
||||
boost::optional<Blob>
|
||||
LedgerMaster::getFetchPack (
|
||||
uint256 const& hash,
|
||||
Blob& data)
|
||||
uint256 const& hash)
|
||||
{
|
||||
if (!fetch_packs_.retrieve (hash, data))
|
||||
return false;
|
||||
|
||||
fetch_packs_.del (hash, false);
|
||||
|
||||
return hash == sha512Half(makeSlice(data));
|
||||
Blob data;
|
||||
if (fetch_packs_.retrieve(hash, data))
|
||||
{
|
||||
fetch_packs_.del(hash, false);
|
||||
if (hash == sha512Half(makeSlice(data)))
|
||||
return data;
|
||||
}
|
||||
return boost::none;
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
@@ -40,8 +40,9 @@ public:
|
||||
Blob&& nodeData,
|
||||
SHAMapTreeNode::TNType type) const = 0;
|
||||
|
||||
virtual bool haveNode (SHAMapHash const& nodeHash,
|
||||
Blob& nodeData) const = 0;
|
||||
virtual
|
||||
boost::optional<Blob>
|
||||
getNode(SHAMapHash const& nodeHash) const = 0;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -306,15 +306,14 @@ SHAMap::checkFilter(SHAMapHash const& hash,
|
||||
SHAMapSyncFilter* filter) const
|
||||
{
|
||||
std::shared_ptr<SHAMapAbstractNode> node;
|
||||
Blob nodeData;
|
||||
if (filter->haveNode (hash, nodeData))
|
||||
if (auto nodeData = filter->getNode (hash))
|
||||
{
|
||||
node = SHAMapAbstractNode::make(
|
||||
makeSlice(nodeData), 0, snfPREFIX, hash, true, f_.journal ());
|
||||
makeSlice(*nodeData), 0, snfPREFIX, hash, true, f_.journal ());
|
||||
if (node)
|
||||
{
|
||||
filter->gotNode (true, hash,
|
||||
std::move(nodeData), node->getType ());
|
||||
std::move(*nodeData), node->getType ());
|
||||
if (backed_)
|
||||
canonicalize (hash, node);
|
||||
}
|
||||
|
||||
@@ -66,17 +66,16 @@ public:
|
||||
{
|
||||
}
|
||||
|
||||
bool haveNode (SHAMapHash const& nodeHash,
|
||||
Blob& nodeData) const override
|
||||
boost::optional<Blob>
|
||||
getNode (SHAMapHash const& nodeHash) const override
|
||||
{
|
||||
Map::iterator it = mMap.find (nodeHash);
|
||||
if (it == mMap.end ())
|
||||
{
|
||||
JLOG(mJournal.fatal()) << "Test filter missing node";
|
||||
return false;
|
||||
return boost::none;
|
||||
}
|
||||
nodeData = it->second;
|
||||
return true;
|
||||
return it->second;
|
||||
}
|
||||
|
||||
Map& mMap;
|
||||
|
||||
Reference in New Issue
Block a user