rippled
Loading...
Searching...
No Matches
NFTokenBurn_test.cpp
1//------------------------------------------------------------------------------
2/*
3 This file is part of rippled: https://github.com/ripple/rippled
4 Copyright (c) 2021 Ripple Labs Inc.
5
6 Permission to use, copy, modify, and/or distribute this software for any
7 purpose with or without fee is hereby granted, provided that the above
8 copyright notice and this permission notice appear in all copies.
9
10 THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17*/
18//==============================================================================
19
20#include <test/jtx.h>
21
22#include <xrpld/app/tx/detail/NFTokenUtils.h>
23
24#include <xrpl/protocol/Feature.h>
25#include <xrpl/protocol/jss.h>
26
27#include <random>
28
29namespace ripple {
30
32{
33 // Helper function that returns the number of nfts owned by an account.
34 static std::uint32_t
36 {
37 Json::Value params;
38 params[jss::account] = acct.human();
39 params[jss::type] = "state";
40 Json::Value nfts = env.rpc("json", "account_nfts", to_string(params));
41 return nfts[jss::result][jss::account_nfts].size();
42 };
43
44 // Helper function that returns new nft id for an account and create
45 // specified number of sell offers
48 test::jtx::Env& env,
49 test::jtx::Account const& owner,
50 std::vector<uint256>& offerIndexes,
51 size_t const tokenCancelCount)
52 {
53 using namespace test::jtx;
54 uint256 const nftokenID =
55 token::getNextID(env, owner, 0, tfTransferable);
56 env(token::mint(owner, 0),
57 token::uri(std::string(maxTokenURILength, 'u')),
58 txflags(tfTransferable));
59 env.close();
60
61 offerIndexes.reserve(tokenCancelCount);
62
63 for (uint32_t i = 0; i < tokenCancelCount; ++i)
64 {
65 // Create sell offer
66 offerIndexes.push_back(keylet::nftoffer(owner, env.seq(owner)).key);
67 env(token::createOffer(owner, nftokenID, drops(1)),
68 txflags(tfSellNFToken));
69 env.close();
70 }
71
72 return nftokenID;
73 };
74
75 // printNFTPages is a helper function that may be used for debugging.
76 //
77 // It uses the ledger RPC command to show the NFT pages in the ledger.
78 // This parameter controls how noisy the output is.
79 enum Volume : bool {
80 quiet = false,
81 noisy = true,
82 };
83
84 void
86 {
87 Json::Value jvParams;
88 jvParams[jss::ledger_index] = "current";
89 jvParams[jss::binary] = false;
90 {
91 Json::Value jrr = env.rpc(
92 "json",
93 "ledger_data",
94 boost::lexical_cast<std::string>(jvParams));
95
96 // Iterate the state and print all NFTokenPages.
97 if (!jrr.isMember(jss::result) ||
98 !jrr[jss::result].isMember(jss::state))
99 {
100 std::cout << "No ledger state found!" << std::endl;
101 return;
102 }
103 Json::Value& state = jrr[jss::result][jss::state];
104 if (!state.isArray())
105 {
106 std::cout << "Ledger state is not array!" << std::endl;
107 return;
108 }
109 for (Json::UInt i = 0; i < state.size(); ++i)
110 {
111 if (state[i].isMember(sfNFTokens.jsonName) &&
112 state[i][sfNFTokens.jsonName].isArray())
113 {
114 std::uint32_t tokenCount =
115 state[i][sfNFTokens.jsonName].size();
116 std::cout << tokenCount << " NFtokens in page "
117 << state[i][jss::index].asString() << std::endl;
118
119 if (vol == noisy)
120 {
121 std::cout << state[i].toStyledString() << std::endl;
122 }
123 else
124 {
125 if (tokenCount > 0)
126 std::cout << "first: "
127 << state[i][sfNFTokens.jsonName][0u]
129 << std::endl;
130 if (tokenCount > 1)
132 << "last: "
133 << state[i][sfNFTokens.jsonName][tokenCount - 1]
135 << std::endl;
136 }
137 }
138 }
139 }
140 }
141
142 void
144 {
145 // Exercise a number of conditions with NFT burning.
146 testcase("Burn random");
147
148 using namespace test::jtx;
149
150 Env env{*this, features};
151
152 // Keep information associated with each account together.
153 struct AcctStat
154 {
155 test::jtx::Account const acct;
157
158 AcctStat(char const* name) : acct(name)
159 {
160 }
161
162 operator test::jtx::Account() const
163 {
164 return acct;
165 }
166 };
167 AcctStat alice{"alice"};
168 AcctStat becky{"becky"};
169 AcctStat minter{"minter"};
170
171 env.fund(XRP(10000), alice, becky, minter);
172 env.close();
173
174 // Both alice and minter mint nfts in case that makes any difference.
175 env(token::setMinter(alice, minter));
176 env.close();
177
178 // Create enough NFTs that alice, becky, and minter can all have
179 // at least three pages of NFTs. This will cause more activity in
180 // the page coalescing code. If we make 210 NFTs in total, we can
181 // have alice and minter each make 105. That will allow us to
182 // distribute 70 NFTs to our three participants.
183 //
184 // Give each NFT a pseudo-randomly chosen fee so the NFTs are
185 // distributed pseudo-randomly through the pages. This should
186 // prevent alice's and minter's NFTs from clustering together
187 // in becky's directory.
188 //
189 // Use a default initialized mercenne_twister because we want the
190 // effect of random numbers, but we want the test to run the same
191 // way each time.
192 std::mt19937 engine;
194 decltype(maxTransferFee){}, maxTransferFee);
195
196 alice.nfts.reserve(105);
197 while (alice.nfts.size() < 105)
198 {
199 std::uint16_t const xferFee = feeDist(engine);
200 alice.nfts.push_back(token::getNextID(
201 env, alice, 0u, tfTransferable | tfBurnable, xferFee));
202 env(token::mint(alice),
203 txflags(tfTransferable | tfBurnable),
204 token::xferFee(xferFee));
205 env.close();
206 }
207
208 minter.nfts.reserve(105);
209 while (minter.nfts.size() < 105)
210 {
211 std::uint16_t const xferFee = feeDist(engine);
212 minter.nfts.push_back(token::getNextID(
213 env, alice, 0u, tfTransferable | tfBurnable, xferFee));
214 env(token::mint(minter),
215 txflags(tfTransferable | tfBurnable),
216 token::xferFee(xferFee),
217 token::issuer(alice));
218 env.close();
219 }
220
221 // All of the NFTs are now minted. Transfer 35 each over to becky so
222 // we end up with 70 NFTs in each account.
223 becky.nfts.reserve(70);
224 {
225 auto aliceIter = alice.nfts.begin();
226 auto minterIter = minter.nfts.begin();
227 while (becky.nfts.size() < 70)
228 {
229 // We do the same work on alice and minter, so make a lambda.
230 auto xferNFT = [&env, &becky](AcctStat& acct, auto& iter) {
231 uint256 offerIndex =
232 keylet::nftoffer(acct.acct, env.seq(acct.acct)).key;
233 env(token::createOffer(acct, *iter, XRP(0)),
234 txflags(tfSellNFToken));
235 env.close();
236 env(token::acceptSellOffer(becky, offerIndex));
237 env.close();
238 becky.nfts.push_back(*iter);
239 iter = acct.nfts.erase(iter);
240 iter += 2;
241 };
242 xferNFT(alice, aliceIter);
243 xferNFT(minter, minterIter);
244 }
245 BEAST_EXPECT(aliceIter == alice.nfts.end());
246 BEAST_EXPECT(minterIter == minter.nfts.end());
247 }
248
249 // Now all three participants have 70 NFTs.
250 BEAST_EXPECT(nftCount(env, alice.acct) == 70);
251 BEAST_EXPECT(nftCount(env, becky.acct) == 70);
252 BEAST_EXPECT(nftCount(env, minter.acct) == 70);
253
254 // Next we'll create offers for all of those NFTs. This calls for
255 // another lambda.
256 auto addOffers =
257 [&env](AcctStat& owner, AcctStat& other1, AcctStat& other2) {
258 for (uint256 nft : owner.nfts)
259 {
260 // Create sell offers for owner.
261 env(token::createOffer(owner, nft, drops(1)),
262 txflags(tfSellNFToken),
263 token::destination(other1));
264 env(token::createOffer(owner, nft, drops(1)),
265 txflags(tfSellNFToken),
266 token::destination(other2));
267 env.close();
268
269 // Create buy offers for other1 and other2.
270 env(token::createOffer(other1, nft, drops(1)),
271 token::owner(owner));
272 env(token::createOffer(other2, nft, drops(1)),
273 token::owner(owner));
274 env.close();
275
276 env(token::createOffer(other2, nft, drops(2)),
277 token::owner(owner));
278 env(token::createOffer(other1, nft, drops(2)),
279 token::owner(owner));
280 env.close();
281 }
282 };
283 addOffers(alice, becky, minter);
284 addOffers(becky, minter, alice);
285 addOffers(minter, alice, becky);
286 BEAST_EXPECT(ownerCount(env, alice) == 424);
287 BEAST_EXPECT(ownerCount(env, becky) == 424);
288 BEAST_EXPECT(ownerCount(env, minter) == 424);
289
290 // Now each of the 270 NFTs has six offers associated with it.
291 // Randomly select an NFT out of the pile and burn it. Continue
292 // the process until all NFTs are burned.
293 AcctStat* const stats[3] = {&alice, &becky, &minter};
296
297 while (stats[0]->nfts.size() > 0 || stats[1]->nfts.size() > 0 ||
298 stats[2]->nfts.size() > 0)
299 {
300 // Pick an account to burn an nft. If there are no nfts left
301 // pick again.
302 AcctStat& owner = *(stats[acctDist(engine)]);
303 if (owner.nfts.empty())
304 continue;
305
306 // Pick one of the nfts.
308 0lu, owner.nfts.size() - 1);
309 auto nftIter = owner.nfts.begin() + nftDist(engine);
310 uint256 const nft = *nftIter;
311 owner.nfts.erase(nftIter);
312
313 // Decide which of the accounts should burn the nft. If the
314 // owner is becky then any of the three accounts can burn.
315 // Otherwise either alice or minter can burn.
316 AcctStat& burner = owner.acct == becky.acct
317 ? *(stats[acctDist(engine)])
318 : mintDist(engine) ? alice
319 : minter;
320
321 if (owner.acct == burner.acct)
322 env(token::burn(burner, nft));
323 else
324 env(token::burn(burner, nft), token::owner(owner));
325 env.close();
326
327 // Every time we burn an nft, the number of nfts they hold should
328 // match the number of nfts we think they hold.
329 BEAST_EXPECT(nftCount(env, alice.acct) == alice.nfts.size());
330 BEAST_EXPECT(nftCount(env, becky.acct) == becky.nfts.size());
331 BEAST_EXPECT(nftCount(env, minter.acct) == minter.nfts.size());
332 }
333 BEAST_EXPECT(nftCount(env, alice.acct) == 0);
334 BEAST_EXPECT(nftCount(env, becky.acct) == 0);
335 BEAST_EXPECT(nftCount(env, minter.acct) == 0);
336
337 // When all nfts are burned none of the accounts should have
338 // an ownerCount.
339 BEAST_EXPECT(ownerCount(env, alice) == 0);
340 BEAST_EXPECT(ownerCount(env, becky) == 0);
341 BEAST_EXPECT(ownerCount(env, minter) == 0);
342 }
343
344 void
346 {
347 // The earlier burn test randomizes which nft is burned. There are
348 // a couple of directory merging scenarios that can only be tested by
349 // inserting and deleting in an ordered fashion. We do that testing
350 // now.
351 testcase("Burn sequential");
352
353 using namespace test::jtx;
354
355 Account const alice{"alice"};
356
357 Env env{*this, features};
358 env.fund(XRP(1000), alice);
359
360 // A lambda that generates 96 nfts packed into three pages of 32 each.
361 // Returns a sorted vector of the NFTokenIDs packed into the pages.
362 auto genPackedTokens = [this, &env, &alice]() {
364 nfts.reserve(96);
365
366 // We want to create fully packed NFT pages. This is a little
367 // tricky since the system currently in place is inclined to
368 // assign consecutive tokens to only 16 entries per page.
369 //
370 // By manipulating the internal form of the taxon we can force
371 // creation of NFT pages that are completely full. This lambda
372 // tells us the taxon value we should pass in in order for the
373 // internal representation to match the passed in value.
374 auto internalTaxon = [&env](
375 Account const& acct,
376 std::uint32_t taxon) -> std::uint32_t {
377 std::uint32_t tokenSeq =
378 env.le(acct)->at(~sfMintedNFTokens).value_or(0);
379
380 // If fixNFTokenRemint amendment is on, we must
381 // add FirstNFTokenSequence.
382 if (env.current()->rules().enabled(fixNFTokenRemint))
383 tokenSeq += env.le(acct)
384 ->at(~sfFirstNFTokenSequence)
385 .value_or(env.seq(acct));
386
387 return toUInt32(
388 nft::cipheredTaxon(tokenSeq, nft::toTaxon(taxon)));
389 };
390
391 for (std::uint32_t i = 0; i < 96; ++i)
392 {
393 // In order to fill the pages we use the taxon to break them
394 // into groups of 16 entries. By having the internal
395 // representation of the taxon go...
396 // 0, 3, 2, 5, 4, 7...
397 // in sets of 16 NFTs we can get each page to be fully
398 // populated.
399 std::uint32_t const intTaxon = (i / 16) + (i & 0b10000 ? 2 : 0);
400 uint32_t const extTaxon = internalTaxon(alice, intTaxon);
401 nfts.push_back(token::getNextID(env, alice, extTaxon));
402 env(token::mint(alice, extTaxon));
403 env.close();
404 }
405
406 // Sort the NFTs so they are listed in storage order, not
407 // creation order.
408 std::sort(nfts.begin(), nfts.end());
409
410 // Verify that the ledger does indeed contain exactly three pages
411 // of NFTs with 32 entries in each page.
412 Json::Value jvParams;
413 jvParams[jss::ledger_index] = "current";
414 jvParams[jss::binary] = false;
415 {
416 Json::Value jrr = env.rpc(
417 "json",
418 "ledger_data",
419 boost::lexical_cast<std::string>(jvParams));
420
421 Json::Value& state = jrr[jss::result][jss::state];
422
423 int pageCount = 0;
424 for (Json::UInt i = 0; i < state.size(); ++i)
425 {
426 if (state[i].isMember(sfNFTokens.jsonName) &&
427 state[i][sfNFTokens.jsonName].isArray())
428 {
429 BEAST_EXPECT(
430 state[i][sfNFTokens.jsonName].size() == 32);
431 ++pageCount;
432 }
433 }
434 // If this check fails then the internal NFT directory logic
435 // has changed.
436 BEAST_EXPECT(pageCount == 3);
437 }
438 return nfts;
439 };
440 {
441 // Generate three packed pages. Then burn the tokens in order from
442 // first to last. This exercises specific cases where coalescing
443 // pages is not possible.
444 std::vector<uint256> nfts = genPackedTokens();
445 BEAST_EXPECT(nftCount(env, alice) == 96);
446 BEAST_EXPECT(ownerCount(env, alice) == 3);
447
448 for (uint256 const& nft : nfts)
449 {
450 env(token::burn(alice, {nft}));
451 env.close();
452 }
453 BEAST_EXPECT(nftCount(env, alice) == 0);
454 BEAST_EXPECT(ownerCount(env, alice) == 0);
455 }
456
457 // A lambda verifies that the ledger no longer contains any NFT pages.
458 auto checkNoTokenPages = [this, &env]() {
459 Json::Value jvParams;
460 jvParams[jss::ledger_index] = "current";
461 jvParams[jss::binary] = false;
462 {
463 Json::Value jrr = env.rpc(
464 "json",
465 "ledger_data",
466 boost::lexical_cast<std::string>(jvParams));
467
468 Json::Value& state = jrr[jss::result][jss::state];
469
470 for (Json::UInt i = 0; i < state.size(); ++i)
471 {
472 BEAST_EXPECT(!state[i].isMember(sfNFTokens.jsonName));
473 }
474 }
475 };
476 checkNoTokenPages();
477 {
478 // Generate three packed pages. Then burn the tokens in order from
479 // last to first. This exercises different specific cases where
480 // coalescing pages is not possible.
481 std::vector<uint256> nfts = genPackedTokens();
482 BEAST_EXPECT(nftCount(env, alice) == 96);
483 BEAST_EXPECT(ownerCount(env, alice) == 3);
484
485 // Verify that that all three pages are present and remember the
486 // indexes.
487 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
488 if (!BEAST_EXPECT(lastNFTokenPage))
489 return;
490
491 uint256 const middleNFTokenPageIndex =
492 lastNFTokenPage->at(sfPreviousPageMin);
493 auto middleNFTokenPage = env.le(keylet::nftpage(
494 keylet::nftpage_min(alice), middleNFTokenPageIndex));
495 if (!BEAST_EXPECT(middleNFTokenPage))
496 return;
497
498 uint256 const firstNFTokenPageIndex =
499 middleNFTokenPage->at(sfPreviousPageMin);
500 auto firstNFTokenPage = env.le(keylet::nftpage(
501 keylet::nftpage_min(alice), firstNFTokenPageIndex));
502 if (!BEAST_EXPECT(firstNFTokenPage))
503 return;
504
505 // Burn almost all the tokens in the very last page.
506 for (int i = 0; i < 31; ++i)
507 {
508 env(token::burn(alice, {nfts.back()}));
509 nfts.pop_back();
510 env.close();
511 }
512
513 // Verify that the last page is still present and contains just one
514 // NFT.
515 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
516 if (!BEAST_EXPECT(lastNFTokenPage))
517 return;
518
519 BEAST_EXPECT(
520 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 1);
521 BEAST_EXPECT(lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
522 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
523
524 // Delete the last token from the last page.
525 env(token::burn(alice, {nfts.back()}));
526 nfts.pop_back();
527 env.close();
528
529 if (features[fixNFTokenPageLinks])
530 {
531 // Removing the last token from the last page deletes the
532 // _previous_ page because we need to preserve that last
533 // page an an anchor. The contents of the next-to-last page
534 // are moved into the last page.
535 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
536 BEAST_EXPECT(lastNFTokenPage);
537 BEAST_EXPECT(
538 lastNFTokenPage->at(~sfPreviousPageMin) ==
539 firstNFTokenPageIndex);
540 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
541 BEAST_EXPECT(
542 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
543
544 // The "middle" page should be gone.
545 middleNFTokenPage = env.le(keylet::nftpage(
546 keylet::nftpage_min(alice), middleNFTokenPageIndex));
547 BEAST_EXPECT(!middleNFTokenPage);
548
549 // The "first" page should still be present and linked to
550 // the last page.
551 firstNFTokenPage = env.le(keylet::nftpage(
552 keylet::nftpage_min(alice), firstNFTokenPageIndex));
553 BEAST_EXPECT(firstNFTokenPage);
554 BEAST_EXPECT(
555 !firstNFTokenPage->isFieldPresent(sfPreviousPageMin));
556 BEAST_EXPECT(
557 firstNFTokenPage->at(~sfNextPageMin) ==
558 lastNFTokenPage->key());
559 BEAST_EXPECT(
560 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
561 }
562 else
563 {
564 // Removing the last token from the last page deletes the last
565 // page. This is a bug. The contents of the next-to-last page
566 // should have been moved into the last page.
567 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
568 BEAST_EXPECT(!lastNFTokenPage);
569
570 // The "middle" page is still present, but has lost the
571 // NextPageMin field.
572 middleNFTokenPage = env.le(keylet::nftpage(
573 keylet::nftpage_min(alice), middleNFTokenPageIndex));
574 if (!BEAST_EXPECT(middleNFTokenPage))
575 return;
576 BEAST_EXPECT(
577 middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
578 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
579 }
580
581 // Delete the rest of the NFTokens.
582 while (!nfts.empty())
583 {
584 env(token::burn(alice, {nfts.back()}));
585 nfts.pop_back();
586 env.close();
587 }
588 BEAST_EXPECT(nftCount(env, alice) == 0);
589 BEAST_EXPECT(ownerCount(env, alice) == 0);
590 }
591 checkNoTokenPages();
592 {
593 // Generate three packed pages. Then burn all tokens in the middle
594 // page. This exercises the case where a page is removed between
595 // two fully populated pages.
596 std::vector<uint256> nfts = genPackedTokens();
597 BEAST_EXPECT(nftCount(env, alice) == 96);
598 BEAST_EXPECT(ownerCount(env, alice) == 3);
599
600 // Verify that that all three pages are present and remember the
601 // indexes.
602 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
603 if (!BEAST_EXPECT(lastNFTokenPage))
604 return;
605
606 uint256 const middleNFTokenPageIndex =
607 lastNFTokenPage->at(sfPreviousPageMin);
608 auto middleNFTokenPage = env.le(keylet::nftpage(
609 keylet::nftpage_min(alice), middleNFTokenPageIndex));
610 if (!BEAST_EXPECT(middleNFTokenPage))
611 return;
612
613 uint256 const firstNFTokenPageIndex =
614 middleNFTokenPage->at(sfPreviousPageMin);
615 auto firstNFTokenPage = env.le(keylet::nftpage(
616 keylet::nftpage_min(alice), firstNFTokenPageIndex));
617 if (!BEAST_EXPECT(firstNFTokenPage))
618 return;
619
620 for (std::size_t i = 32; i < 64; ++i)
621 {
622 env(token::burn(alice, nfts[i]));
623 env.close();
624 }
625 nfts.erase(nfts.begin() + 32, nfts.begin() + 64);
626 BEAST_EXPECT(nftCount(env, alice) == 64);
627 BEAST_EXPECT(ownerCount(env, alice) == 2);
628
629 // Verify that middle page is gone and the links in the two
630 // remaining pages are correct.
631 middleNFTokenPage = env.le(keylet::nftpage(
632 keylet::nftpage_min(alice), middleNFTokenPageIndex));
633 BEAST_EXPECT(!middleNFTokenPage);
634
635 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
636 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
637 BEAST_EXPECT(
638 lastNFTokenPage->getFieldH256(sfPreviousPageMin) ==
639 firstNFTokenPageIndex);
640
641 firstNFTokenPage = env.le(keylet::nftpage(
642 keylet::nftpage_min(alice), firstNFTokenPageIndex));
643 BEAST_EXPECT(
644 firstNFTokenPage->getFieldH256(sfNextPageMin) ==
645 keylet::nftpage_max(alice).key);
646 BEAST_EXPECT(!firstNFTokenPage->isFieldPresent(sfPreviousPageMin));
647
648 // Burn the remaining nfts.
649 for (uint256 const& nft : nfts)
650 {
651 env(token::burn(alice, {nft}));
652 env.close();
653 }
654 BEAST_EXPECT(nftCount(env, alice) == 0);
655 BEAST_EXPECT(ownerCount(env, alice) == 0);
656 }
657 checkNoTokenPages();
658 {
659 // Generate three packed pages. Then burn all the tokens in the
660 // first page followed by all the tokens in the last page. This
661 // exercises a specific case where coalescing pages is not possible.
662 std::vector<uint256> nfts = genPackedTokens();
663 BEAST_EXPECT(nftCount(env, alice) == 96);
664 BEAST_EXPECT(ownerCount(env, alice) == 3);
665
666 // Verify that that all three pages are present and remember the
667 // indexes.
668 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
669 if (!BEAST_EXPECT(lastNFTokenPage))
670 return;
671
672 uint256 const middleNFTokenPageIndex =
673 lastNFTokenPage->at(sfPreviousPageMin);
674 auto middleNFTokenPage = env.le(keylet::nftpage(
675 keylet::nftpage_min(alice), middleNFTokenPageIndex));
676 if (!BEAST_EXPECT(middleNFTokenPage))
677 return;
678
679 uint256 const firstNFTokenPageIndex =
680 middleNFTokenPage->at(sfPreviousPageMin);
681 auto firstNFTokenPage = env.le(keylet::nftpage(
682 keylet::nftpage_min(alice), firstNFTokenPageIndex));
683 if (!BEAST_EXPECT(firstNFTokenPage))
684 return;
685
686 // Burn all the tokens in the first page.
687 std::reverse(nfts.begin(), nfts.end());
688 for (int i = 0; i < 32; ++i)
689 {
690 env(token::burn(alice, {nfts.back()}));
691 nfts.pop_back();
692 env.close();
693 }
694
695 // Verify the first page is gone.
696 firstNFTokenPage = env.le(keylet::nftpage(
697 keylet::nftpage_min(alice), firstNFTokenPageIndex));
698 BEAST_EXPECT(!firstNFTokenPage);
699
700 // Check the links in the other two pages.
701 middleNFTokenPage = env.le(keylet::nftpage(
702 keylet::nftpage_min(alice), middleNFTokenPageIndex));
703 if (!BEAST_EXPECT(middleNFTokenPage))
704 return;
705 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
706 BEAST_EXPECT(middleNFTokenPage->isFieldPresent(sfNextPageMin));
707
708 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
709 if (!BEAST_EXPECT(lastNFTokenPage))
710 return;
711 BEAST_EXPECT(lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
712 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
713
714 // Burn all the tokens in the last page.
715 std::reverse(nfts.begin(), nfts.end());
716 for (int i = 0; i < 32; ++i)
717 {
718 env(token::burn(alice, {nfts.back()}));
719 nfts.pop_back();
720 env.close();
721 }
722
723 if (features[fixNFTokenPageLinks])
724 {
725 // Removing the last token from the last page deletes the
726 // _previous_ page because we need to preserve that last
727 // page an an anchor. The contents of the next-to-last page
728 // are moved into the last page.
729 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
730 BEAST_EXPECT(lastNFTokenPage);
731 BEAST_EXPECT(
732 !lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
733 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
734 BEAST_EXPECT(
735 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
736
737 // The "middle" page should be gone.
738 middleNFTokenPage = env.le(keylet::nftpage(
739 keylet::nftpage_min(alice), middleNFTokenPageIndex));
740 BEAST_EXPECT(!middleNFTokenPage);
741
742 // The "first" page should still be gone.
743 firstNFTokenPage = env.le(keylet::nftpage(
744 keylet::nftpage_min(alice), firstNFTokenPageIndex));
745 BEAST_EXPECT(!firstNFTokenPage);
746 }
747 else
748 {
749 // Removing the last token from the last page deletes the last
750 // page. This is a bug. The contents of the next-to-last page
751 // should have been moved into the last page.
752 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
753 BEAST_EXPECT(!lastNFTokenPage);
754
755 // The "middle" page is still present, but has lost the
756 // NextPageMin field.
757 middleNFTokenPage = env.le(keylet::nftpage(
758 keylet::nftpage_min(alice), middleNFTokenPageIndex));
759 if (!BEAST_EXPECT(middleNFTokenPage))
760 return;
761 BEAST_EXPECT(
762 !middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
763 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
764 }
765
766 // Delete the rest of the NFTokens.
767 while (!nfts.empty())
768 {
769 env(token::burn(alice, {nfts.back()}));
770 nfts.pop_back();
771 env.close();
772 }
773 BEAST_EXPECT(nftCount(env, alice) == 0);
774 BEAST_EXPECT(ownerCount(env, alice) == 0);
775 }
776 checkNoTokenPages();
777
778 if (features[fixNFTokenPageLinks])
779 {
780 // Exercise the invariant that the final NFTokenPage of a directory
781 // may not be removed if there are NFTokens in other pages of the
782 // directory.
783 //
784 // We're going to fire an Invariant failure that is difficult to
785 // cause. We do it here because the tools are here.
786 //
787 // See Invariants_test.cpp for examples of other invariant tests
788 // that this one is modeled after.
789
790 // Generate three closely packed NFTokenPages.
791 std::vector<uint256> nfts = genPackedTokens();
792 BEAST_EXPECT(nftCount(env, alice) == 96);
793 BEAST_EXPECT(ownerCount(env, alice) == 3);
794
795 // Burn almost all the tokens in the very last page.
796 for (int i = 0; i < 31; ++i)
797 {
798 env(token::burn(alice, {nfts.back()}));
799 nfts.pop_back();
800 env.close();
801 }
802 {
803 // Create an ApplyContext we can use to run the invariant
804 // checks. These variables must outlive the ApplyContext.
805 OpenView ov{*env.current()};
806 STTx tx{ttACCOUNT_SET, [](STObject&) {}};
808 beast::Journal jlog{sink};
809 ApplyContext ac{
810 env.app(),
811 ov,
812 tx,
814 env.current()->fees().base,
815 tapNONE,
816 jlog};
817
818 // Verify that the last page is present and contains one NFT.
819 auto lastNFTokenPage =
820 ac.view().peek(keylet::nftpage_max(alice));
821 if (!BEAST_EXPECT(lastNFTokenPage))
822 return;
823 BEAST_EXPECT(
824 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 1);
825
826 // Erase that last page.
827 ac.view().erase(lastNFTokenPage);
828
829 // Exercise the invariant.
830 TER terActual = tesSUCCESS;
831 for (TER const& terExpect :
833 {
834 terActual = ac.checkInvariants(terActual, XRPAmount{});
835 BEAST_EXPECT(terExpect == terActual);
836 BEAST_EXPECT(
837 sink.messages().str().starts_with("Invariant failed:"));
838 // uncomment to log the invariant failure message
839 // log << " --> " << sink.messages().str() << std::endl;
840 BEAST_EXPECT(
841 sink.messages().str().find(
842 "Last NFT page deleted with non-empty directory") !=
843 std::string::npos);
844 }
845 }
846 {
847 // Create an ApplyContext we can use to run the invariant
848 // checks. These variables must outlive the ApplyContext.
849 OpenView ov{*env.current()};
850 STTx tx{ttACCOUNT_SET, [](STObject&) {}};
852 beast::Journal jlog{sink};
853 ApplyContext ac{
854 env.app(),
855 ov,
856 tx,
858 env.current()->fees().base,
859 tapNONE,
860 jlog};
861
862 // Verify that the middle page is present.
863 auto lastNFTokenPage =
864 ac.view().peek(keylet::nftpage_max(alice));
865 auto middleNFTokenPage = ac.view().peek(keylet::nftpage(
866 keylet::nftpage_min(alice),
867 lastNFTokenPage->getFieldH256(sfPreviousPageMin)));
868 BEAST_EXPECT(middleNFTokenPage);
869
870 // Remove the NextMinPage link from the middle page to fire
871 // the invariant.
872 middleNFTokenPage->makeFieldAbsent(sfNextPageMin);
873 ac.view().update(middleNFTokenPage);
874
875 // Exercise the invariant.
876 TER terActual = tesSUCCESS;
877 for (TER const& terExpect :
879 {
880 terActual = ac.checkInvariants(terActual, XRPAmount{});
881 BEAST_EXPECT(terExpect == terActual);
882 BEAST_EXPECT(
883 sink.messages().str().starts_with("Invariant failed:"));
884 // uncomment to log the invariant failure message
885 // log << " --> " << sink.messages().str() << std::endl;
886 BEAST_EXPECT(
887 sink.messages().str().find("Lost NextMinPage link") !=
888 std::string::npos);
889 }
890 }
891 }
892 }
893
894 void
896 {
897 // Look at the case where too many offers prevents burning a token.
898 testcase("Burn too many offers");
899
900 using namespace test::jtx;
901
902 // Test what happens if a NFT is unburnable when there are
903 // more than 500 offers, before fixNonFungibleTokensV1_2 goes live
904 if (!features[fixNonFungibleTokensV1_2])
905 {
906 Env env{*this, features};
907
908 Account const alice("alice");
909 Account const becky("becky");
910 env.fund(XRP(1000), alice, becky);
911 env.close();
912
913 // We structure the test to try and maximize the metadata produced.
914 // This verifies that we don't create too much metadata during a
915 // maximal burn operation.
916 //
917 // 1. alice mints an nft with a full-sized URI.
918 // 2. We create 500 new accounts, each of which creates an offer
919 // for alice's nft.
920 // 3. becky creates one more offer for alice's NFT
921 // 4. Attempt to burn the nft which fails because there are too
922 // many offers.
923 // 5. Cancel becky's offer and the nft should become burnable.
924 uint256 const nftokenID =
925 token::getNextID(env, alice, 0, tfTransferable);
926 env(token::mint(alice, 0),
927 token::uri(std::string(maxTokenURILength, 'u')),
928 txflags(tfTransferable));
929 env.close();
930
931 std::vector<uint256> offerIndexes;
932 offerIndexes.reserve(maxTokenOfferCancelCount);
933 for (std::uint32_t i = 0; i < maxTokenOfferCancelCount; ++i)
934 {
935 Account const acct(std::string("acct") + std::to_string(i));
936 env.fund(XRP(1000), acct);
937 env.close();
938
939 offerIndexes.push_back(
940 keylet::nftoffer(acct, env.seq(acct)).key);
941 env(token::createOffer(acct, nftokenID, drops(1)),
942 token::owner(alice));
943 env.close();
944 }
945
946 // Verify all offers are present in the ledger.
947 for (uint256 const& offerIndex : offerIndexes)
948 {
949 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
950 }
951
952 // Create one too many offers.
953 uint256 const beckyOfferIndex =
954 keylet::nftoffer(becky, env.seq(becky)).key;
955 env(token::createOffer(becky, nftokenID, drops(1)),
956 token::owner(alice));
957
958 // Attempt to burn the nft which should fail.
959 env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
960
961 // Close enough ledgers that the burn transaction is no longer
962 // retried.
963 for (int i = 0; i < 10; ++i)
964 env.close();
965
966 // Cancel becky's offer, but alice adds a sell offer. The token
967 // should still not be burnable.
968 env(token::cancelOffer(becky, {beckyOfferIndex}));
969 env.close();
970
971 uint256 const aliceOfferIndex =
972 keylet::nftoffer(alice, env.seq(alice)).key;
973 env(token::createOffer(alice, nftokenID, drops(1)),
974 txflags(tfSellNFToken));
975 env.close();
976
977 env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
978 env.close();
979
980 // Cancel alice's sell offer. Now the token should be burnable.
981 env(token::cancelOffer(alice, {aliceOfferIndex}));
982 env.close();
983
984 env(token::burn(alice, nftokenID));
985 env.close();
986
987 // Burning the token should remove all the offers from the ledger.
988 for (uint256 const& offerIndex : offerIndexes)
989 {
990 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
991 }
992
993 // Both alice and becky should have ownerCounts of zero.
994 BEAST_EXPECT(ownerCount(env, alice) == 0);
995 BEAST_EXPECT(ownerCount(env, becky) == 0);
996 }
997
998 // Test that up to 499 buy/sell offers will be removed when NFT is
999 // burned after fixNonFungibleTokensV1_2 is enabled. This is to test
1000 // that we can successfully remove all offers if the number of offers is
1001 // less than 500.
1002 if (features[fixNonFungibleTokensV1_2])
1003 {
1004 Env env{*this, features};
1005
1006 Account const alice("alice");
1007 Account const becky("becky");
1008 env.fund(XRP(100000), alice, becky);
1009 env.close();
1010
1011 // alice creates 498 sell offers and becky creates 1 buy offers.
1012 // When the token is burned, 498 sell offers and 1 buy offer are
1013 // removed. In total, 499 offers are removed
1014 std::vector<uint256> offerIndexes;
1015 auto const nftokenID = createNftAndOffers(
1016 env, alice, offerIndexes, maxDeletableTokenOfferEntries - 2);
1017
1018 // Verify all sell offers are present in the ledger.
1019 for (uint256 const& offerIndex : offerIndexes)
1020 {
1021 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1022 }
1023
1024 // Becky creates a buy offer
1025 uint256 const beckyOfferIndex =
1026 keylet::nftoffer(becky, env.seq(becky)).key;
1027 env(token::createOffer(becky, nftokenID, drops(1)),
1028 token::owner(alice));
1029 env.close();
1030
1031 // Burn the token
1032 env(token::burn(alice, nftokenID));
1033 env.close();
1034
1035 // Burning the token should remove all 498 sell offers
1036 // that alice created
1037 for (uint256 const& offerIndex : offerIndexes)
1038 {
1039 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
1040 }
1041
1042 // Burning the token should also remove the one buy offer
1043 // that becky created
1044 BEAST_EXPECT(!env.le(keylet::nftoffer(beckyOfferIndex)));
1045
1046 // alice and becky should have ownerCounts of zero
1047 BEAST_EXPECT(ownerCount(env, alice) == 0);
1048 BEAST_EXPECT(ownerCount(env, becky) == 0);
1049 }
1050
1051 // Test that up to 500 buy offers are removed when NFT is burned
1052 // after fixNonFungibleTokensV1_2 is enabled
1053 if (features[fixNonFungibleTokensV1_2])
1054 {
1055 Env env{*this, features};
1056
1057 Account const alice("alice");
1058 Account const becky("becky");
1059 env.fund(XRP(100000), alice, becky);
1060 env.close();
1061
1062 // alice creates 501 sell offers for the token
1063 // After we burn the token, 500 of the sell offers should be
1064 // removed, and one is left over
1065 std::vector<uint256> offerIndexes;
1066 auto const nftokenID = createNftAndOffers(
1067 env, alice, offerIndexes, maxDeletableTokenOfferEntries + 1);
1068
1069 // Verify all sell offers are present in the ledger.
1070 for (uint256 const& offerIndex : offerIndexes)
1071 {
1072 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1073 }
1074
1075 // Burn the token
1076 env(token::burn(alice, nftokenID));
1077 env.close();
1078
1079 uint32_t offerDeletedCount = 0;
1080 // Count the number of sell offers that have been deleted
1081 for (uint256 const& offerIndex : offerIndexes)
1082 {
1083 if (!env.le(keylet::nftoffer(offerIndex)))
1084 offerDeletedCount++;
1085 }
1086
1087 BEAST_EXPECT(offerIndexes.size() == maxTokenOfferCancelCount + 1);
1088
1089 // 500 sell offers should be removed
1090 BEAST_EXPECT(offerDeletedCount == maxTokenOfferCancelCount);
1091
1092 // alice should have ownerCounts of one for the orphaned sell offer
1093 BEAST_EXPECT(ownerCount(env, alice) == 1);
1094 }
1095
1096 // Test that up to 500 buy/sell offers are removed when NFT is burned
1097 // after fixNonFungibleTokensV1_2 is enabled
1098 if (features[fixNonFungibleTokensV1_2])
1099 {
1100 Env env{*this, features};
1101
1102 Account const alice("alice");
1103 Account const becky("becky");
1104 env.fund(XRP(100000), alice, becky);
1105 env.close();
1106
1107 // alice creates 499 sell offers and becky creates 2 buy offers.
1108 // When the token is burned, 499 sell offers and 1 buy offer
1109 // are removed.
1110 // In total, 500 offers are removed
1111 std::vector<uint256> offerIndexes;
1112 auto const nftokenID = createNftAndOffers(
1113 env, alice, offerIndexes, maxDeletableTokenOfferEntries - 1);
1114
1115 // Verify all sell offers are present in the ledger.
1116 for (uint256 const& offerIndex : offerIndexes)
1117 {
1118 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1119 }
1120
1121 // becky creates 2 buy offers
1122 env(token::createOffer(becky, nftokenID, drops(1)),
1123 token::owner(alice));
1124 env.close();
1125 env(token::createOffer(becky, nftokenID, drops(1)),
1126 token::owner(alice));
1127 env.close();
1128
1129 // Burn the token
1130 env(token::burn(alice, nftokenID));
1131 env.close();
1132
1133 // Burning the token should remove all 499 sell offers from the
1134 // ledger.
1135 for (uint256 const& offerIndex : offerIndexes)
1136 {
1137 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
1138 }
1139
1140 // alice should have ownerCount of zero because all her
1141 // sell offers have been deleted
1142 BEAST_EXPECT(ownerCount(env, alice) == 0);
1143
1144 // becky has ownerCount of one due to an orphaned buy offer
1145 BEAST_EXPECT(ownerCount(env, becky) == 1);
1146 }
1147 }
1148
1149 void
1151 {
1152 // Amendment fixNFTokenPageLinks prevents the breakage we want
1153 // to observe.
1154 if (features[fixNFTokenPageLinks])
1155 return;
1156
1157 // a couple of directory merging scenarios that can only be tested by
1158 // inserting and deleting in an ordered fashion. We do that testing
1159 // now.
1160 testcase("Exercise broken links");
1161
1162 using namespace test::jtx;
1163
1164 Account const alice{"alice"};
1165 Account const minter{"minter"};
1166
1167 Env env{*this, features};
1168 env.fund(XRP(1000), alice, minter);
1169
1170 // A lambda that generates 96 nfts packed into three pages of 32 each.
1171 // Returns a sorted vector of the NFTokenIDs packed into the pages.
1172 auto genPackedTokens = [this, &env, &alice, &minter]() {
1174 nfts.reserve(96);
1175
1176 // We want to create fully packed NFT pages. This is a little
1177 // tricky since the system currently in place is inclined to
1178 // assign consecutive tokens to only 16 entries per page.
1179 //
1180 // By manipulating the internal form of the taxon we can force
1181 // creation of NFT pages that are completely full. This lambda
1182 // tells us the taxon value we should pass in in order for the
1183 // internal representation to match the passed in value.
1184 auto internalTaxon = [&env](
1185 Account const& acct,
1186 std::uint32_t taxon) -> std::uint32_t {
1187 std::uint32_t tokenSeq =
1188 env.le(acct)->at(~sfMintedNFTokens).value_or(0);
1189
1190 // If fixNFTokenRemint amendment is on, we must
1191 // add FirstNFTokenSequence.
1192 if (env.current()->rules().enabled(fixNFTokenRemint))
1193 tokenSeq += env.le(acct)
1194 ->at(~sfFirstNFTokenSequence)
1195 .value_or(env.seq(acct));
1196
1197 return toUInt32(
1198 nft::cipheredTaxon(tokenSeq, nft::toTaxon(taxon)));
1199 };
1200
1201 for (std::uint32_t i = 0; i < 96; ++i)
1202 {
1203 // In order to fill the pages we use the taxon to break them
1204 // into groups of 16 entries. By having the internal
1205 // representation of the taxon go...
1206 // 0, 3, 2, 5, 4, 7...
1207 // in sets of 16 NFTs we can get each page to be fully
1208 // populated.
1209 std::uint32_t const intTaxon = (i / 16) + (i & 0b10000 ? 2 : 0);
1210 uint32_t const extTaxon = internalTaxon(minter, intTaxon);
1211 nfts.push_back(
1212 token::getNextID(env, minter, extTaxon, tfTransferable));
1213 env(token::mint(minter, extTaxon), txflags(tfTransferable));
1214 env.close();
1215
1216 // Minter creates an offer for the NFToken.
1217 uint256 const minterOfferIndex =
1218 keylet::nftoffer(minter, env.seq(minter)).key;
1219 env(token::createOffer(minter, nfts.back(), XRP(0)),
1220 txflags(tfSellNFToken));
1221 env.close();
1222
1223 // alice accepts the offer.
1224 env(token::acceptSellOffer(alice, minterOfferIndex));
1225 env.close();
1226 }
1227
1228 // Sort the NFTs so they are listed in storage order, not
1229 // creation order.
1230 std::sort(nfts.begin(), nfts.end());
1231
1232 // Verify that the ledger does indeed contain exactly three pages
1233 // of NFTs with 32 entries in each page.
1234 Json::Value jvParams;
1235 jvParams[jss::ledger_index] = "current";
1236 jvParams[jss::binary] = false;
1237 {
1238 Json::Value jrr = env.rpc(
1239 "json",
1240 "ledger_data",
1241 boost::lexical_cast<std::string>(jvParams));
1242
1243 Json::Value& state = jrr[jss::result][jss::state];
1244
1245 int pageCount = 0;
1246 for (Json::UInt i = 0; i < state.size(); ++i)
1247 {
1248 if (state[i].isMember(sfNFTokens.jsonName) &&
1249 state[i][sfNFTokens.jsonName].isArray())
1250 {
1251 BEAST_EXPECT(
1252 state[i][sfNFTokens.jsonName].size() == 32);
1253 ++pageCount;
1254 }
1255 }
1256 // If this check fails then the internal NFT directory logic
1257 // has changed.
1258 BEAST_EXPECT(pageCount == 3);
1259 }
1260 return nfts;
1261 };
1262
1263 // Generate three packed pages.
1264 std::vector<uint256> nfts = genPackedTokens();
1265 BEAST_EXPECT(nftCount(env, alice) == 96);
1266 BEAST_EXPECT(ownerCount(env, alice) == 3);
1267
1268 // Verify that that all three pages are present and remember the
1269 // indexes.
1270 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
1271 if (!BEAST_EXPECT(lastNFTokenPage))
1272 return;
1273
1274 uint256 const middleNFTokenPageIndex =
1275 lastNFTokenPage->at(sfPreviousPageMin);
1276 auto middleNFTokenPage = env.le(keylet::nftpage(
1277 keylet::nftpage_min(alice), middleNFTokenPageIndex));
1278 if (!BEAST_EXPECT(middleNFTokenPage))
1279 return;
1280
1281 uint256 const firstNFTokenPageIndex =
1282 middleNFTokenPage->at(sfPreviousPageMin);
1283 auto firstNFTokenPage = env.le(
1284 keylet::nftpage(keylet::nftpage_min(alice), firstNFTokenPageIndex));
1285 if (!BEAST_EXPECT(firstNFTokenPage))
1286 return;
1287
1288 // Sell all the tokens in the very last page back to minter.
1289 std::vector<uint256> last32NFTs;
1290 for (int i = 0; i < 32; ++i)
1291 {
1292 last32NFTs.push_back(nfts.back());
1293 nfts.pop_back();
1294
1295 // alice creates an offer for the NFToken.
1296 uint256 const aliceOfferIndex =
1297 keylet::nftoffer(alice, env.seq(alice)).key;
1298 env(token::createOffer(alice, last32NFTs.back(), XRP(0)),
1299 txflags(tfSellNFToken));
1300 env.close();
1301
1302 // minter accepts the offer.
1303 env(token::acceptSellOffer(minter, aliceOfferIndex));
1304 env.close();
1305 }
1306
1307 // Removing the last token from the last page deletes alice's last
1308 // page. This is a bug. The contents of the next-to-last page
1309 // should have been moved into the last page.
1310 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
1311 BEAST_EXPECT(!lastNFTokenPage);
1312 BEAST_EXPECT(ownerCount(env, alice) == 2);
1313
1314 // The "middle" page is still present, but has lost the
1315 // NextPageMin field.
1316 middleNFTokenPage = env.le(keylet::nftpage(
1317 keylet::nftpage_min(alice), middleNFTokenPageIndex));
1318 if (!BEAST_EXPECT(middleNFTokenPage))
1319 return;
1320 BEAST_EXPECT(middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
1321 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
1322
1323 // Attempt to delete alice's account, but fail because she owns NFTs.
1324 auto const acctDelFee{drops(env.current()->fees().increment)};
1325 env(acctdelete(alice, minter),
1326 fee(acctDelFee),
1327 ter(tecHAS_OBLIGATIONS));
1328 env.close();
1329
1330 // minter sells the last 32 NFTs back to alice.
1331 for (uint256 nftID : last32NFTs)
1332 {
1333 // minter creates an offer for the NFToken.
1334 uint256 const minterOfferIndex =
1335 keylet::nftoffer(minter, env.seq(minter)).key;
1336 env(token::createOffer(minter, nftID, XRP(0)),
1337 txflags(tfSellNFToken));
1338 env.close();
1339
1340 // alice accepts the offer.
1341 env(token::acceptSellOffer(alice, minterOfferIndex));
1342 env.close();
1343 }
1344 BEAST_EXPECT(ownerCount(env, alice) == 3); // Three NFTokenPages.
1345
1346 // alice has an NFToken directory with a broken link in the middle.
1347 {
1348 // Try the account_objects RPC command. Alice's account only shows
1349 // two NFT pages even though she owns more.
1350 Json::Value acctObjs = [&env, &alice]() {
1351 Json::Value params;
1352 params[jss::account] = alice.human();
1353 return env.rpc("json", "account_objects", to_string(params));
1354 }();
1355 BEAST_EXPECT(!acctObjs.isMember(jss::marker));
1356 BEAST_EXPECT(
1357 acctObjs[jss::result][jss::account_objects].size() == 2);
1358 }
1359 {
1360 // Try the account_nfts RPC command. It only returns 64 NFTs
1361 // although alice owns 96.
1362 Json::Value aliceNFTs = [&env, &alice]() {
1363 Json::Value params;
1364 params[jss::account] = alice.human();
1365 params[jss::type] = "state";
1366 return env.rpc("json", "account_nfts", to_string(params));
1367 }();
1368 BEAST_EXPECT(!aliceNFTs.isMember(jss::marker));
1369 BEAST_EXPECT(
1370 aliceNFTs[jss::result][jss::account_nfts].size() == 64);
1371 }
1372 }
1373
1374 void
1376 {
1377 testBurnRandom(features);
1378 testBurnSequential(features);
1379 testBurnTooManyOffers(features);
1380 exerciseBrokenLinks(features);
1381 }
1382
1383protected:
1384 void
1385 run(std::uint32_t instance, bool last = false)
1386 {
1387 using namespace test::jtx;
1388 static FeatureBitset const all{supported_amendments()};
1389 static FeatureBitset const fixNFTV1_2{fixNonFungibleTokensV1_2};
1390 static FeatureBitset const fixNFTDir{fixNFTokenDirV1};
1391 static FeatureBitset const fixNFTRemint{fixNFTokenRemint};
1392 static FeatureBitset const fixNFTPageLinks{fixNFTokenPageLinks};
1393
1394 static std::array<FeatureBitset, 5> const feats{
1395 all - fixNFTV1_2 - fixNFTDir - fixNFTRemint - fixNFTPageLinks,
1396 all - fixNFTV1_2 - fixNFTRemint - fixNFTPageLinks,
1397 all - fixNFTRemint - fixNFTPageLinks,
1398 all - fixNFTPageLinks,
1399 all,
1400 };
1401
1402 if (BEAST_EXPECT(instance < feats.size()))
1403 {
1404 testWithFeats(feats[instance]);
1405 }
1406 BEAST_EXPECT(!last || instance == feats.size() - 1);
1407 }
1408
1409public:
1410 void
1411 run() override
1412 {
1413 run(0);
1414 }
1415};
1416
1418{
1419public:
1420 void
1421 run() override
1422 {
1424 }
1425};
1426
1428{
1429public:
1430 void
1431 run() override
1432 {
1434 }
1435};
1436
1438{
1439public:
1440 void
1441 run() override
1442 {
1444 }
1445};
1446
1448{
1449public:
1450 void
1451 run() override
1452 {
1454 }
1455};
1456
1457BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnBaseUtil, tx, ripple, 3);
1458BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOfixFungTokens, tx, ripple, 3);
1459BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOFixTokenRemint, tx, ripple, 3);
1460BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOFixNFTPageLinks, tx, ripple, 3);
1461BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnAllFeatures, tx, ripple, 3);
1462
1463} // namespace ripple
T back(T... args)
T begin(T... args)
Represents a JSON value.
Definition: json_value.h:150
bool isArray() const
UInt size() const
Number of values in array or object.
Definition: json_value.cpp:719
std::string toStyledString() const
std::string asString() const
Returns the unquoted string value.
Definition: json_value.cpp:482
bool isMember(char const *key) const
Return true if the object has a member named key.
Definition: json_value.cpp:962
A generic endpoint for log messages.
Definition: Journal.h:60
A testsuite class.
Definition: suite.h:55
testcase_t testcase
Memberspace for declaring test cases.
Definition: suite.h:155
State information when applying a tx.
Definition: ApplyContext.h:37
Application & app
Definition: ApplyContext.h:71
void run() override
Runs the suite.
static std::uint32_t nftCount(test::jtx::Env &env, test::jtx::Account const &acct)
uint256 createNftAndOffers(test::jtx::Env &env, test::jtx::Account const &owner, std::vector< uint256 > &offerIndexes, size_t const tokenCancelCount)
void exerciseBrokenLinks(FeatureBitset features)
void run(std::uint32_t instance, bool last=false)
void run() override
Runs the suite.
void testWithFeats(FeatureBitset features)
void testBurnTooManyOffers(FeatureBitset features)
void printNFTPages(test::jtx::Env &env, Volume vol)
void testBurnRandom(FeatureBitset features)
void testBurnSequential(FeatureBitset features)
void run() override
Runs the suite.
Writable ledger view that accumulates state and tx changes.
Definition: OpenView.h:66
Immutable cryptographic account descriptor.
Definition: Account.h:39
std::string const & human() const
Returns the human readable public key.
Definition: Account.h:114
A transaction testing environment.
Definition: Env.h:121
std::uint32_t seq(Account const &account) const
Returns the next sequence number on account.
Definition: Env.cpp:212
bool close(NetClock::time_point closeTime, std::optional< std::chrono::milliseconds > consensusDelay=std::nullopt)
Close and advance the ledger.
Definition: Env.cpp:117
Json::Value rpc(unsigned apiVersion, std::unordered_map< std::string, std::string > const &headers, std::string const &cmd, Args &&... args)
Execute an RPC command.
Definition: Env.h:779
T empty(T... args)
T end(T... args)
T endl(T... args)
T erase(T... args)
unsigned int UInt
Definition: json_forwards.h:27
Keylet nftpage(Keylet const &k, uint256 const &token)
Definition: Indexes.cpp:419
Keylet nftpage_min(AccountID const &owner)
NFT page keylets.
Definition: Indexes.cpp:403
Keylet nftpage_max(AccountID const &owner)
A keylet for the owner's last possible NFT page.
Definition: Indexes.cpp:411
Keylet nftoffer(AccountID const &owner, std::uint32_t seq)
An offer from an account to buy or sell an NFT.
Definition: Indexes.cpp:427
Taxon cipheredTaxon(std::uint32_t tokenSeq, Taxon taxon)
Definition: nft.h:84
Taxon toTaxon(std::uint32_t i)
Definition: nft.h:42
Use hash_* containers for keys that do not need a cryptographically secure hashing algorithm.
Definition: algorithm.h:26
constexpr std::uint32_t const tfSellNFToken
Definition: TxFlags.h:200
std::size_t constexpr maxTokenOfferCancelCount
The maximum number of token offers that can be canceled at once.
Definition: Protocol.h:69
std::uint16_t constexpr maxTransferFee
The maximum token transfer fee allowed.
Definition: Protocol.h:83
std::size_t constexpr maxDeletableTokenOfferEntries
The maximum number of offers in an offer directory for NFT to be burnable.
Definition: Protocol.h:72
constexpr std::uint32_t const tfBurnable
Definition: TxFlags.h:142
@ tefINVARIANT_FAILED
Definition: TER.h:183
@ tefTOO_BIG
Definition: TER.h:184
std::size_t constexpr maxTokenURILength
The maximum length of a URI inside an NFT.
Definition: Protocol.h:86
@ tecHAS_OBLIGATIONS
Definition: TER.h:317
@ tecINVARIANT_FAILED
Definition: TER.h:313
@ tesSUCCESS
Definition: TER.h:244
std::string to_string(base_uint< Bits, Tag > const &a)
Definition: base_uint.h:630
@ tapNONE
Definition: ApplyView.h:32
TERSubset< CanCvtToTER > TER
Definition: TER.h:643
constexpr std::uint32_t const tfTransferable
Definition: TxFlags.h:145
T pop_back(T... args)
T push_back(T... args)
T reserve(T... args)
T reverse(T... args)
T size(T... args)
T sort(T... args)
uint256 key
Definition: Keylet.h:40
T to_string(T... args)