Complete SHAMap V2 cutover

This commit is contained in:
Howard Hinnant
2016-05-26 11:02:44 -04:00
committed by Miguel Portilla
parent 63a5522406
commit d88b63d4c8
9 changed files with 79 additions and 22 deletions

View File

@@ -981,11 +981,13 @@ void LedgerConsensusImp::accept (std::shared_ptr<SHAMap> set)
auto buildLCL = std::make_shared<Ledger>(
*mPreviousLedger,
app_.timeKeeper().closeTime());
auto constexpr v2_ledger_seq_switch = 40'000'000;
if (buildLCL->info().seq > v2_ledger_seq_switch &&
!buildLCL->stateMap().is_v2())
auto const v2_enabled = buildLCL->rules().enabled(featureSHAMapV2,
app_.config().features);
auto v2_transition = false;
if (v2_enabled && !buildLCL->stateMap().is_v2())
{
buildLCL->make_v2();
v2_transition = true;
}
// Set up to write SHAMap changes to our database,
@@ -1022,15 +1024,10 @@ void LedgerConsensusImp::accept (std::shared_ptr<SHAMap> set)
buildLCL->updateSkipList ();
// unshare in case a nodestore load changed the
// version back, otherwise the map is inconsistent
if (buildLCL->info().seq > v2_ledger_seq_switch &&
!buildLCL->stateMap().is_v2())
{
buildLCL->unshare();
}
// Write the final version of all modified SHAMap
// nodes to the node store to preserve the new LCL
{
int asf = buildLCL->stateMap().flushDirty (
hotACCOUNT_NODE, buildLCL->info().seq);
int tmf = buildLCL->txMap().flushDirty (
@@ -1039,6 +1036,7 @@ void LedgerConsensusImp::accept (std::shared_ptr<SHAMap> set)
asf << " accounts and " <<
tmf << " transaction nodes";
}
buildLCL->unshare();
// Accept ledger
buildLCL->setAccepted(closeTime, mCloseResolution,

View File

@@ -1006,7 +1006,7 @@ bool ApplicationImp::setup()
enabledAmendments.append (detail::preEnabledAmendments ());
m_amendmentTable = make_AmendmentTable (
weeks(2),
weeks{2},
MAJORITY_FRACTION,
supportedAmendments,
enabledAmendments,

View File

@@ -42,6 +42,7 @@ extern uint256 const featureFeeEscalation;
extern uint256 const featureFlowV2;
extern uint256 const featureOwnerPaysFee;
extern uint256 const featureCompareFlowV1V2;
extern uint256 const featureSHAMapV2;
} // ripple

View File

@@ -53,5 +53,6 @@ uint256 const featureFeeEscalation = feature("FeeEscalation");
uint256 const featureFlowV2 = feature("FlowV2");
uint256 const featureOwnerPaysFee = feature("OwnerPaysFee");
uint256 const featureCompareFlowV1V2 = feature("CompareFlowV1V2");
uint256 const featureSHAMapV2 = feature("SHAMapV2");
} // ripple

View File

@@ -315,6 +315,7 @@ private:
std::shared_ptr<SHAMapItem const> const& otherMapItem,
bool isFirstMap, Delta & differences, int & maxCount) const;
int walkSubTree (bool doWrite, NodeObjectType t, std::uint32_t seq);
bool isInconsistentNode(std::shared_ptr<SHAMapAbstractNode> const& node) const;
};
inline

View File

@@ -166,7 +166,7 @@ public:
void shareChild (int m, std::shared_ptr<SHAMapAbstractNode> const& child);
SHAMapAbstractNode* getChildPointer (int branch);
std::shared_ptr<SHAMapAbstractNode> getChild (int branch);
std::shared_ptr<SHAMapAbstractNode>
virtual std::shared_ptr<SHAMapAbstractNode>
canonicalizeChild (int branch, std::shared_ptr<SHAMapAbstractNode> node);
// sync functions
@@ -212,6 +212,8 @@ public:
uint256 const& key() const override;
void setChildren(std::shared_ptr<SHAMapTreeNode> const& child1,
std::shared_ptr<SHAMapTreeNode> const& child2);
std::shared_ptr<SHAMapAbstractNode>
canonicalizeChild (int branch, std::shared_ptr<SHAMapAbstractNode> node) override;
void invariants(bool is_v2, bool is_root = false) const override;
friend std::shared_ptr<SHAMapAbstractNode>

View File

@@ -207,7 +207,8 @@ SHAMap::walkTowardsKey(uint256 const& id, SharedPtrNodeStack* stack) const
if (inNode->isInner())
{
auto n = std::dynamic_pointer_cast<SHAMapInnerNodeV2>(inNode);
assert(n);
if (n == nullptr)
return nullptr;
nodeID = SHAMapNodeID{n->depth(), n->common()};
}
else
@@ -255,6 +256,11 @@ SHAMap::fetchNodeFromDB (SHAMapHash const& hash) const
{
auto root = std::dynamic_pointer_cast<SHAMapInnerNode>(root_);
assert(root);
if (!root->isEmpty())
{
std::cerr << "isv2 = " << isv2 << '\n';
std::cerr << "is_v2() = " << is_v2() << '\n';
}
assert(root->isEmpty());
if (isv2)
{
@@ -405,7 +411,7 @@ SHAMapAbstractNode* SHAMap::descend (SHAMapInnerNode* parent, int branch) const
return ret;
std::shared_ptr<SHAMapAbstractNode> node = fetchNodeNT (parent->getChildHash (branch));
if (!node)
if (!node || isInconsistentNode(node))
return nullptr;
node = parent->canonicalizeChild (branch, std::move(node));
@@ -420,7 +426,7 @@ SHAMap::descend (std::shared_ptr<SHAMapInnerNode> const& parent, int branch) con
return node;
node = fetchNode (parent->getChildHash (branch));
if (!node)
if (!node || isInconsistentNode(node))
return nullptr;
node = parent->canonicalizeChild (branch, std::move(node));
@@ -452,6 +458,8 @@ SHAMap::descend (SHAMapInnerNode * parent, SHAMapNodeID const& parentID,
if (!child)
{
std::shared_ptr<SHAMapAbstractNode> childNode = fetchNodeNT (childHash, filter);
if (isInconsistentNode(childNode))
childNode = nullptr;
if (childNode)
{
@@ -518,6 +526,8 @@ SHAMap::descendAsync (SHAMapInnerNode* parent, int branch,
}
}
if (isInconsistentNode(ptr))
ptr = nullptr;
if (ptr)
ptr = parent->canonicalizeChild (branch, std::move(ptr));
@@ -561,7 +571,8 @@ SHAMap::firstBelow(std::shared_ptr<SHAMapAbstractNode> node,
{
if (is_v2())
{
auto inner2 = std::static_pointer_cast<SHAMapInnerNodeV2>(inner);
auto inner2 = std::dynamic_pointer_cast<SHAMapInnerNodeV2>(inner);
assert(inner2 != nullptr);
stack.push({inner2, {inner2->depth(), inner2->common()}});
}
else
@@ -1373,7 +1384,20 @@ SHAMap::invariants() const
auto node = root_.get();
assert(node != nullptr);
assert(!node->isLeaf());
SharedPtrNodeStack stack;
for (auto leaf = peekFirstItem(stack); leaf != nullptr;
leaf = peekNextItem(leaf->peekItem()->key(), stack))
;
node->invariants(is_v2(), true);
}
bool
SHAMap::isInconsistentNode(std::shared_ptr<SHAMapAbstractNode> const& node) const
{
if (std::dynamic_pointer_cast<SHAMapTreeNode>(node) != nullptr)
return false;
bool is_node_v2 = std::dynamic_pointer_cast<SHAMapInnerNodeV2>(node) != nullptr;
return is_v2() != is_node_v2;
}
} // ripple

View File

@@ -262,7 +262,7 @@ SHAMap::getMissingNodes(std::size_t max, SHAMapSyncFilter* filter)
auto const& nodeHash = parent->getChildHash (branch);
auto nodePtr = fetchNodeNT(nodeHash, filter);
if (nodePtr)
if (nodePtr && !isInconsistentNode(nodePtr))
{
++hits;
if (backed_)
@@ -523,15 +523,18 @@ SHAMap::addKnownNode (const SHAMapNodeID& node, Blob const& rawNode,
return SHAMapAddNode::useful ();
}
#ifndef NDEBUG
if (newNode && newNode->isInner())
if (newNode && isInconsistentNode(newNode))
{
bool isv2 = std::dynamic_pointer_cast<SHAMapInnerNodeV2>(newNode) != nullptr;
assert(isv2 == is_v2());
return SHAMapAddNode::invalid();
}
#endif
if (backed_)
{
auto temp = newNode;
canonicalize (childHash, newNode);
if (isInconsistentNode(newNode))
newNode = temp;
}
newNode = prevNode->canonicalizeChild (branch, std::move(newNode));
if (filter)

View File

@@ -710,6 +710,33 @@ SHAMapInnerNode::canonicalizeChild(int branch, std::shared_ptr<SHAMapAbstractNod
else
{
// Hook this node up
// node must not be a v2 inner node
assert(std::dynamic_pointer_cast<SHAMapInnerNodeV2>(node) == nullptr);
mChildren[branch] = node;
}
return node;
}
std::shared_ptr<SHAMapAbstractNode>
SHAMapInnerNodeV2::canonicalizeChild(int branch, std::shared_ptr<SHAMapAbstractNode> node)
{
assert (branch >= 0 && branch < 16);
assert (isInner());
assert (node);
assert (node->getNodeHash() == mHashes[branch]);
std::unique_lock <std::mutex> lock (childLock);
if (mChildren[branch])
{
// There is already a node hooked up, return it
node = mChildren[branch];
}
else
{
// Hook this node up
// node must not be a v1 inner node
assert(std::dynamic_pointer_cast<SHAMapInnerNodeV2>(node) != nullptr ||
std::dynamic_pointer_cast<SHAMapTreeNode>(node) != nullptr);
mChildren[branch] = node;
}
return node;