1#include <test/unit_test/SuiteJournal.h>
3#include <xrpl/basics/IntrusivePointer.ipp>
4#include <xrpl/basics/IntrusiveRefCounts.h>
5#include <xrpl/beast/unit_test.h>
6#include <xrpl/beast/utility/Journal.h>
26 partiallyDeletedStarted,
41 assert(
id < state.size());
42 return state[id].load(std::memory_order_acquire);
45 resetStates(
bool resetCallback)
47 for (
int i = 0; i < maxStates; ++i)
50 TrackedState::uninitialized, std::memory_order_release);
52 nextId.
store(0, std::memory_order_release);
54 TIBase::tracingCallback_ = [](TrackedState,
58 struct ResetStatesGuard
60 bool resetCallback_{
false};
62 ResetStatesGuard(
bool resetCallback) : resetCallback_{resetCallback}
64 TIBase::resetStates(resetCallback_);
68 TIBase::resetStates(resetCallback_);
72 TIBase() : id_{checkoutID()}
74 assert(state.size() > id_);
75 state[id_].store(TrackedState::alive, std::memory_order_relaxed);
79 using enum TrackedState;
81 assert(state.size() > id_);
83 state[id_].load(std::memory_order_relaxed), deletedStarted);
85 assert(state.size() > id_);
89 state[id_].store(deletedStarted, std::memory_order_relaxed);
91 tracingCallback_(deletedStarted, deleted);
93 assert(state.size() > id_);
94 state[id_].store(TrackedState::deleted, std::memory_order_relaxed);
96 tracingCallback_(TrackedState::deleted, std::nullopt);
102 using enum TrackedState;
104 assert(state.size() > id_);
106 state[id_].load(std::memory_order_relaxed),
107 partiallyDeletedStarted);
109 assert(state.size() > id_);
110 state[id_].store(partiallyDeletedStarted, std::memory_order_relaxed);
112 tracingCallback_(partiallyDeletedStarted, partiallyDeleted);
114 assert(state.size() > id_);
115 state[id_].store(partiallyDeleted, std::memory_order_relaxed);
117 tracingCallback_(partiallyDeleted, std::nullopt);
129 return nextId.
fetch_add(1, std::memory_order_acq_rel);
150 TIBase::ResetStatesGuard rsg{
true};
153 BEAST_EXPECT(b.use_count() == 1);
155 BEAST_EXPECT(b.use_count() == 1);
156 auto s = b.releaseStrongRef();
158 BEAST_EXPECT(b.use_count() == 0);
162 auto w = b.releaseWeakRef();
169 TIBase::ResetStatesGuard rsg{
true};
171 using enum TrackedState;
172 auto b = make_SharedIntrusive<TIBase>();
174 BEAST_EXPECT(TIBase::getState(
id) == alive);
175 BEAST_EXPECT(b->use_count() == 1);
176 for (
int i = 0; i < 10; ++i)
181 BEAST_EXPECT(TIBase::getState(
id) == alive);
183 BEAST_EXPECT(TIBase::getState(
id) == alive);
185 BEAST_EXPECT(TIBase::getState(
id) == deleted);
187 b = make_SharedIntrusive<TIBase>();
189 BEAST_EXPECT(TIBase::getState(
id) == alive);
190 BEAST_EXPECT(b->use_count() == 1);
191 for (
int i = 0; i < 10; ++i)
194 BEAST_EXPECT(b->use_count() == 1);
196 BEAST_EXPECT(TIBase::getState(
id) == alive);
198 BEAST_EXPECT(TIBase::getState(
id) == alive);
200 BEAST_EXPECT(TIBase::getState(
id) == partiallyDeleted);
201 while (!weak.
empty())
205 BEAST_EXPECT(TIBase::getState(
id) == partiallyDeleted);
207 BEAST_EXPECT(TIBase::getState(
id) == deleted);
210 TIBase::ResetStatesGuard rsg{
true};
212 using enum TrackedState;
213 auto b = make_SharedIntrusive<TIBase>();
215 BEAST_EXPECT(TIBase::getState(
id) == alive);
217 BEAST_EXPECT(TIBase::getState(
id) == alive);
219 BEAST_EXPECT(s && s->use_count() == 2);
221 BEAST_EXPECT(TIBase::getState(
id) == alive);
222 BEAST_EXPECT(s && s->use_count() == 1);
224 BEAST_EXPECT(TIBase::getState(
id) == partiallyDeleted);
225 BEAST_EXPECT(w.expired());
231 BEAST_EXPECT(TIBase::getState(
id) == deleted);
234 TIBase::ResetStatesGuard rsg{
true};
236 using enum TrackedState;
238 swu b = make_SharedIntrusive<TIBase>();
239 BEAST_EXPECT(b.isStrong() && b.use_count() == 1);
240 auto id = b.get()->id_;
241 BEAST_EXPECT(TIBase::getState(
id) == alive);
243 BEAST_EXPECT(TIBase::getState(
id) == alive);
244 BEAST_EXPECT(w.isStrong() && b.use_count() == 2);
246 BEAST_EXPECT(w.isWeak() && b.use_count() == 1);
248 BEAST_EXPECT(s.isWeak() && b.use_count() == 1);
250 BEAST_EXPECT(s.isStrong() && b.use_count() == 2);
252 BEAST_EXPECT(TIBase::getState(
id) == alive);
253 BEAST_EXPECT(s.use_count() == 1);
254 BEAST_EXPECT(!w.expired());
256 BEAST_EXPECT(TIBase::getState(
id) == partiallyDeleted);
257 BEAST_EXPECT(w.expired());
261 BEAST_EXPECT(w.isWeak());
263 BEAST_EXPECT(TIBase::getState(
id) == deleted);
268 TIBase::ResetStatesGuard rsg{
true};
270 auto strong1 = make_SharedIntrusive<TIBase>();
271 auto strong2 = make_SharedIntrusive<TIBase>();
273 auto id1 = strong1->id_;
274 auto id2 = strong2->id_;
276 BEAST_EXPECT(id1 != id2);
283 BEAST_EXPECT(union1.
get() == strong1.get());
284 BEAST_EXPECT(union2.
get() == strong2.get());
290 BEAST_EXPECT(union1.
get() == union2.
get());
291 BEAST_EXPECT(TIBase::getState(id1) == TrackedState::alive);
292 BEAST_EXPECT(TIBase::getState(id2) == TrackedState::alive);
296 BEAST_EXPECT(TIBase::getState(id1) == TrackedState::alive);
297 int initialRefCount = strong1->use_count();
298#pragma clang diagnostic push
299#pragma clang diagnostic ignored "-Wself-assign-overloaded"
301#pragma clang diagnostic pop
303 BEAST_EXPECT(TIBase::getState(id1) == TrackedState::alive);
304 BEAST_EXPECT(strong1->use_count() == initialRefCount);
308 BEAST_EXPECT(union1.
get() ==
nullptr);
314 BEAST_EXPECT(union1.
get() ==
nullptr);
315 BEAST_EXPECT(TIBase::getState(id2) == TrackedState::deleted);
331 using enum TrackedState;
333 TIBase::ResetStatesGuard rsg{
true};
335 auto strong = make_SharedIntrusive<TIBase>();
337 bool destructorRan =
false;
338 bool partialDeleteRan =
false;
339 std::latch partialDeleteStartedSyncPoint{2};
340 strong->tracingCallback_ = [&](TrackedState cur,
342 using enum TrackedState;
343 if (next == deletedStarted)
350 BEAST_EXPECT(cur == partiallyDeleted);
352 if (next == partiallyDeletedStarted)
354 partialDeleteStartedSyncPoint.arrive_and_wait();
355 using namespace std::chrono_literals;
361 if (next == partiallyDeleted)
363 BEAST_EXPECT(!partialDeleteRan && !destructorRan);
364 partialDeleteRan =
true;
368 BEAST_EXPECT(!destructorRan);
369 destructorRan =
true;
373 partialDeleteStartedSyncPoint.arrive_and_wait();
383 BEAST_EXPECT(destructorRan && partialDeleteRan);
399 using enum TrackedState;
401 TIBase::ResetStatesGuard rsg{
true};
403 auto strong = make_SharedIntrusive<TIBase>();
405 bool destructorRan =
false;
406 bool partialDeleteRan =
false;
407 std::latch weakResetSyncPoint{2};
408 strong->tracingCallback_ = [&](TrackedState cur,
410 using enum TrackedState;
411 if (next == partiallyDeleted)
413 BEAST_EXPECT(!partialDeleteRan && !destructorRan);
414 partialDeleteRan =
true;
418 BEAST_EXPECT(!destructorRan);
419 destructorRan =
true;
424 weakResetSyncPoint.arrive_and_wait();
427 weakResetSyncPoint.arrive_and_wait();
433 BEAST_EXPECT(destructorRan && !partialDeleteRan);
439 testcase(
"Multithreaded Clear Mixed Variant");
446 using enum TrackedState;
447 TIBase::ResetStatesGuard rsg{
true};
452 int s = destructionState.load(std::memory_order_relaxed);
453 return {(s & 1) != 0, (s & 2) != 0};
455 auto setDestructorRan = [&]() ->
void {
456 destructionState.fetch_or(1, std::memory_order_acq_rel);
458 auto setPartialDeleteRan = [&]() ->
void {
459 destructionState.fetch_or(2, std::memory_order_acq_rel);
461 auto tracingCallback = [&](TrackedState cur,
463 using enum TrackedState;
464 auto [destructorRan, partialDeleteRan] = getDestructorState();
465 if (next == partiallyDeleted)
467 BEAST_EXPECT(!partialDeleteRan && !destructorRan);
468 setPartialDeleteRan();
472 BEAST_EXPECT(!destructorRan);
476 auto createVecOfPointers = [&](
auto const& toClone,
485 auto numToCreate = toCreateDist(eng);
486 result.reserve(numToCreate);
487 for (
int i = 0; i < numToCreate; ++i)
489 if (isStrongDist(eng))
500 constexpr int loopIters = 2 * 1024;
501 constexpr int numThreads = 16;
503 std::barrier loopStartSyncPoint{numThreads};
504 std::barrier postCreateToCloneSyncPoint{numThreads};
505 std::barrier postCreateVecOfPointersSyncPoint{numThreads};
510 for (
int i = 0; i < numThreads; ++i)
518 auto cloneAndDestroy = [&](
int threadId) {
519 for (
int i = 0; i < loopIters; ++i)
522 loopStartSyncPoint.arrive_and_wait();
534 auto [destructorRan, partialDeleteRan] =
535 getDestructorState();
536 BEAST_EXPECT(!i || destructorRan);
537 destructionState.store(0, std::memory_order_release);
540 toClone.
resize(numThreads);
541 auto strong = make_SharedIntrusive<TIBase>();
542 strong->tracingCallback_ = tracingCallback;
547 postCreateToCloneSyncPoint.arrive_and_wait();
550 createVecOfPointers(toClone[threadId], engines[threadId]);
551 toClone[threadId].reset();
554 postCreateVecOfPointersSyncPoint.arrive_and_wait();
560 for (
int i = 0; i < numThreads; ++i)
564 for (
int i = 0; i < numThreads; ++i)
573 testcase(
"Multithreaded Clear Mixed Union");
585 using enum TrackedState;
587 TIBase::ResetStatesGuard rsg{
true};
592 int s = destructionState.load(std::memory_order_relaxed);
593 return {(s & 1) != 0, (s & 2) != 0};
595 auto setDestructorRan = [&]() ->
void {
596 destructionState.fetch_or(1, std::memory_order_acq_rel);
598 auto setPartialDeleteRan = [&]() ->
void {
599 destructionState.fetch_or(2, std::memory_order_acq_rel);
601 auto tracingCallback = [&](TrackedState cur,
603 using enum TrackedState;
604 auto [destructorRan, partialDeleteRan] = getDestructorState();
605 if (next == partiallyDeleted)
607 BEAST_EXPECT(!partialDeleteRan && !destructorRan);
608 setPartialDeleteRan();
612 BEAST_EXPECT(!destructorRan);
616 auto createVecOfPointers = [&](
auto const& toClone,
621 auto numToCreate = toCreateDist(eng);
623 for (
int i = 0; i < numToCreate; ++i)
627 constexpr int loopIters = 2 * 1024;
628 constexpr int flipPointersLoopIters = 256;
629 constexpr int numThreads = 16;
631 std::barrier loopStartSyncPoint{numThreads};
632 std::barrier postCreateToCloneSyncPoint{numThreads};
633 std::barrier postCreateVecOfPointersSyncPoint{numThreads};
634 std::barrier postFlipPointersLoopSyncPoint{numThreads};
639 for (
int i = 0; i < numThreads; ++i)
648 auto cloneAndDestroy = [&](
int threadId) {
649 for (
int i = 0; i < loopIters; ++i)
652 loopStartSyncPoint.arrive_and_wait();
663 auto [destructorRan, partialDeleteRan] =
664 getDestructorState();
665 BEAST_EXPECT(!i || destructorRan);
666 destructionState.store(0, std::memory_order_release);
669 toClone.
resize(numThreads);
670 auto strong = make_SharedIntrusive<TIBase>();
671 strong->tracingCallback_ = tracingCallback;
676 postCreateToCloneSyncPoint.arrive_and_wait();
679 createVecOfPointers(toClone[threadId], engines[threadId]);
680 toClone[threadId].reset();
683 postCreateVecOfPointersSyncPoint.arrive_and_wait();
686 for (
int f = 0; f < flipPointersLoopIters; ++f)
690 if (isStrongDist(engines[threadId]))
702 postFlipPointersLoopSyncPoint.arrive_and_wait();
708 for (
int i = 0; i < numThreads; ++i)
712 for (
int i = 0; i < numThreads; ++i)
721 testcase(
"Multithreaded Locking Weak");
728 using enum TrackedState;
730 TIBase::ResetStatesGuard rsg{
true};
735 int s = destructionState.load(std::memory_order_relaxed);
736 return {(s & 1) != 0, (s & 2) != 0};
738 auto setDestructorRan = [&]() ->
void {
739 destructionState.fetch_or(1, std::memory_order_acq_rel);
741 auto setPartialDeleteRan = [&]() ->
void {
742 destructionState.fetch_or(2, std::memory_order_acq_rel);
744 auto tracingCallback = [&](TrackedState cur,
746 using enum TrackedState;
747 auto [destructorRan, partialDeleteRan] = getDestructorState();
748 if (next == partiallyDeleted)
750 BEAST_EXPECT(!partialDeleteRan && !destructorRan);
751 setPartialDeleteRan();
755 BEAST_EXPECT(!destructorRan);
760 constexpr int loopIters = 2 * 1024;
761 constexpr int lockWeakLoopIters = 256;
762 constexpr int numThreads = 16;
764 std::barrier loopStartSyncPoint{numThreads};
765 std::barrier postCreateToLockSyncPoint{numThreads};
766 std::barrier postLockWeakLoopSyncPoint{numThreads};
771 auto lockAndDestroy = [&](
int threadId) {
772 for (
int i = 0; i < loopIters; ++i)
775 loopStartSyncPoint.arrive_and_wait();
786 auto [destructorRan, partialDeleteRan] =
787 getDestructorState();
788 BEAST_EXPECT(!i || destructorRan);
789 destructionState.store(0, std::memory_order_release);
792 toLock.
resize(numThreads);
793 auto strong = make_SharedIntrusive<TIBase>();
794 strong->tracingCallback_ = tracingCallback;
799 postCreateToLockSyncPoint.arrive_and_wait();
804 for (
int wi = 0; wi < lockWeakLoopIters; ++wi)
806 BEAST_EXPECT(!weak.expired());
807 auto strong = weak.lock();
808 BEAST_EXPECT(strong);
812 postLockWeakLoopSyncPoint.arrive_and_wait();
814 toLock[threadId].reset();
818 for (
int i = 0; i < numThreads; ++i)
822 for (
int i = 0; i < numThreads; ++i)
840BEAST_DEFINE_TESTSUITE(IntrusiveShared, ripple_basics,
ripple);
testcase_t testcase
Memberspace for declaring test cases.
A shared intrusive pointer class that supports weak pointers.
A combination of a strong and a weak intrusive pointer stored in the space of a single pointer.
bool isStrong() const
Return true is this represents a strong pointer.
T * get() const
If this is a strong pointer, return the raw pointer.
void reset()
Set the pointer to null, decrement the appropriate ref count, and run the appropriate release action.
A weak intrusive pointer class for the SharedIntrusive pointer class.
void testMultithreadedClearMixedVariant()
void testMultithreadedClearMixedUnion()
void testMultithreadedLockingWeak()
void run() override
Runs the suite.
T emplace_back(T... args)
Use hash_* containers for keys that do not need a cryptographically secure hashing algorithm.
void partialDestructorFinished(T **o)
Implement the strong count, weak count, and bit flags for an intrusive pointer.