rippled
Loading...
Searching...
No Matches
NFTokenBurn_test.cpp
1//------------------------------------------------------------------------------
2/*
3 This file is part of rippled: https://github.com/ripple/rippled
4 Copyright (c) 2021 Ripple Labs Inc.
5
6 Permission to use, copy, modify, and/or distribute this software for any
7 purpose with or without fee is hereby granted, provided that the above
8 copyright notice and this permission notice appear in all copies.
9
10 THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17*/
18//==============================================================================
19
20#include <test/jtx.h>
21#include <xrpld/app/tx/detail/NFTokenUtils.h>
22#include <xrpl/protocol/Feature.h>
23#include <xrpl/protocol/jss.h>
24
25#include <random>
26
27namespace ripple {
28
30{
31 // Helper function that returns the number of nfts owned by an account.
32 static std::uint32_t
34 {
35 Json::Value params;
36 params[jss::account] = acct.human();
37 params[jss::type] = "state";
38 Json::Value nfts = env.rpc("json", "account_nfts", to_string(params));
39 return nfts[jss::result][jss::account_nfts].size();
40 };
41
42 // Helper function that returns new nft id for an account and create
43 // specified number of sell offers
46 test::jtx::Env& env,
47 test::jtx::Account const& owner,
48 std::vector<uint256>& offerIndexes,
49 size_t const tokenCancelCount)
50 {
51 using namespace test::jtx;
52 uint256 const nftokenID =
53 token::getNextID(env, owner, 0, tfTransferable);
54 env(token::mint(owner, 0),
55 token::uri(std::string(maxTokenURILength, 'u')),
56 txflags(tfTransferable));
57 env.close();
58
59 offerIndexes.reserve(tokenCancelCount);
60
61 for (uint32_t i = 0; i < tokenCancelCount; ++i)
62 {
63 // Create sell offer
64 offerIndexes.push_back(keylet::nftoffer(owner, env.seq(owner)).key);
65 env(token::createOffer(owner, nftokenID, drops(1)),
66 txflags(tfSellNFToken));
67 env.close();
68 }
69
70 return nftokenID;
71 };
72
73 // printNFTPages is a helper function that may be used for debugging.
74 //
75 // It uses the ledger RPC command to show the NFT pages in the ledger.
76 // This parameter controls how noisy the output is.
77 enum Volume : bool {
78 quiet = false,
79 noisy = true,
80 };
81
82 void
84 {
85 Json::Value jvParams;
86 jvParams[jss::ledger_index] = "current";
87 jvParams[jss::binary] = false;
88 {
89 Json::Value jrr = env.rpc(
90 "json",
91 "ledger_data",
92 boost::lexical_cast<std::string>(jvParams));
93
94 // Iterate the state and print all NFTokenPages.
95 if (!jrr.isMember(jss::result) ||
96 !jrr[jss::result].isMember(jss::state))
97 {
98 std::cout << "No ledger state found!" << std::endl;
99 return;
100 }
101 Json::Value& state = jrr[jss::result][jss::state];
102 if (!state.isArray())
103 {
104 std::cout << "Ledger state is not array!" << std::endl;
105 return;
106 }
107 for (Json::UInt i = 0; i < state.size(); ++i)
108 {
109 if (state[i].isMember(sfNFTokens.jsonName) &&
110 state[i][sfNFTokens.jsonName].isArray())
111 {
112 std::uint32_t tokenCount =
113 state[i][sfNFTokens.jsonName].size();
114 std::cout << tokenCount << " NFtokens in page "
115 << state[i][jss::index].asString() << std::endl;
116
117 if (vol == noisy)
118 {
119 std::cout << state[i].toStyledString() << std::endl;
120 }
121 else
122 {
123 if (tokenCount > 0)
124 std::cout << "first: "
125 << state[i][sfNFTokens.jsonName][0u]
127 << std::endl;
128 if (tokenCount > 1)
130 << "last: "
131 << state[i][sfNFTokens.jsonName][tokenCount - 1]
133 << std::endl;
134 }
135 }
136 }
137 }
138 }
139
140 void
142 {
143 // Exercise a number of conditions with NFT burning.
144 testcase("Burn random");
145
146 using namespace test::jtx;
147
148 Env env{*this, features};
149
150 // Keep information associated with each account together.
151 struct AcctStat
152 {
153 test::jtx::Account const acct;
155
156 AcctStat(char const* name) : acct(name)
157 {
158 }
159
160 operator test::jtx::Account() const
161 {
162 return acct;
163 }
164 };
165 AcctStat alice{"alice"};
166 AcctStat becky{"becky"};
167 AcctStat minter{"minter"};
168
169 env.fund(XRP(10000), alice, becky, minter);
170 env.close();
171
172 // Both alice and minter mint nfts in case that makes any difference.
173 env(token::setMinter(alice, minter));
174 env.close();
175
176 // Create enough NFTs that alice, becky, and minter can all have
177 // at least three pages of NFTs. This will cause more activity in
178 // the page coalescing code. If we make 210 NFTs in total, we can
179 // have alice and minter each make 105. That will allow us to
180 // distribute 70 NFTs to our three participants.
181 //
182 // Give each NFT a pseudo-randomly chosen fee so the NFTs are
183 // distributed pseudo-randomly through the pages. This should
184 // prevent alice's and minter's NFTs from clustering together
185 // in becky's directory.
186 //
187 // Use a default initialized mercenne_twister because we want the
188 // effect of random numbers, but we want the test to run the same
189 // way each time.
190 std::mt19937 engine;
192 decltype(maxTransferFee){}, maxTransferFee);
193
194 alice.nfts.reserve(105);
195 while (alice.nfts.size() < 105)
196 {
197 std::uint16_t const xferFee = feeDist(engine);
198 alice.nfts.push_back(token::getNextID(
199 env, alice, 0u, tfTransferable | tfBurnable, xferFee));
200 env(token::mint(alice),
201 txflags(tfTransferable | tfBurnable),
202 token::xferFee(xferFee));
203 env.close();
204 }
205
206 minter.nfts.reserve(105);
207 while (minter.nfts.size() < 105)
208 {
209 std::uint16_t const xferFee = feeDist(engine);
210 minter.nfts.push_back(token::getNextID(
211 env, alice, 0u, tfTransferable | tfBurnable, xferFee));
212 env(token::mint(minter),
213 txflags(tfTransferable | tfBurnable),
214 token::xferFee(xferFee),
215 token::issuer(alice));
216 env.close();
217 }
218
219 // All of the NFTs are now minted. Transfer 35 each over to becky so
220 // we end up with 70 NFTs in each account.
221 becky.nfts.reserve(70);
222 {
223 auto aliceIter = alice.nfts.begin();
224 auto minterIter = minter.nfts.begin();
225 while (becky.nfts.size() < 70)
226 {
227 // We do the same work on alice and minter, so make a lambda.
228 auto xferNFT = [&env, &becky](AcctStat& acct, auto& iter) {
229 uint256 offerIndex =
230 keylet::nftoffer(acct.acct, env.seq(acct.acct)).key;
231 env(token::createOffer(acct, *iter, XRP(0)),
232 txflags(tfSellNFToken));
233 env.close();
234 env(token::acceptSellOffer(becky, offerIndex));
235 env.close();
236 becky.nfts.push_back(*iter);
237 iter = acct.nfts.erase(iter);
238 iter += 2;
239 };
240 xferNFT(alice, aliceIter);
241 xferNFT(minter, minterIter);
242 }
243 BEAST_EXPECT(aliceIter == alice.nfts.end());
244 BEAST_EXPECT(minterIter == minter.nfts.end());
245 }
246
247 // Now all three participants have 70 NFTs.
248 BEAST_EXPECT(nftCount(env, alice.acct) == 70);
249 BEAST_EXPECT(nftCount(env, becky.acct) == 70);
250 BEAST_EXPECT(nftCount(env, minter.acct) == 70);
251
252 // Next we'll create offers for all of those NFTs. This calls for
253 // another lambda.
254 auto addOffers =
255 [&env](AcctStat& owner, AcctStat& other1, AcctStat& other2) {
256 for (uint256 nft : owner.nfts)
257 {
258 // Create sell offers for owner.
259 env(token::createOffer(owner, nft, drops(1)),
260 txflags(tfSellNFToken),
261 token::destination(other1));
262 env(token::createOffer(owner, nft, drops(1)),
263 txflags(tfSellNFToken),
264 token::destination(other2));
265 env.close();
266
267 // Create buy offers for other1 and other2.
268 env(token::createOffer(other1, nft, drops(1)),
269 token::owner(owner));
270 env(token::createOffer(other2, nft, drops(1)),
271 token::owner(owner));
272 env.close();
273
274 env(token::createOffer(other2, nft, drops(2)),
275 token::owner(owner));
276 env(token::createOffer(other1, nft, drops(2)),
277 token::owner(owner));
278 env.close();
279 }
280 };
281 addOffers(alice, becky, minter);
282 addOffers(becky, minter, alice);
283 addOffers(minter, alice, becky);
284 BEAST_EXPECT(ownerCount(env, alice) == 424);
285 BEAST_EXPECT(ownerCount(env, becky) == 424);
286 BEAST_EXPECT(ownerCount(env, minter) == 424);
287
288 // Now each of the 270 NFTs has six offers associated with it.
289 // Randomly select an NFT out of the pile and burn it. Continue
290 // the process until all NFTs are burned.
291 AcctStat* const stats[3] = {&alice, &becky, &minter};
294
295 while (stats[0]->nfts.size() > 0 || stats[1]->nfts.size() > 0 ||
296 stats[2]->nfts.size() > 0)
297 {
298 // Pick an account to burn an nft. If there are no nfts left
299 // pick again.
300 AcctStat& owner = *(stats[acctDist(engine)]);
301 if (owner.nfts.empty())
302 continue;
303
304 // Pick one of the nfts.
306 0lu, owner.nfts.size() - 1);
307 auto nftIter = owner.nfts.begin() + nftDist(engine);
308 uint256 const nft = *nftIter;
309 owner.nfts.erase(nftIter);
310
311 // Decide which of the accounts should burn the nft. If the
312 // owner is becky then any of the three accounts can burn.
313 // Otherwise either alice or minter can burn.
314 AcctStat& burner = owner.acct == becky.acct
315 ? *(stats[acctDist(engine)])
316 : mintDist(engine) ? alice
317 : minter;
318
319 if (owner.acct == burner.acct)
320 env(token::burn(burner, nft));
321 else
322 env(token::burn(burner, nft), token::owner(owner));
323 env.close();
324
325 // Every time we burn an nft, the number of nfts they hold should
326 // match the number of nfts we think they hold.
327 BEAST_EXPECT(nftCount(env, alice.acct) == alice.nfts.size());
328 BEAST_EXPECT(nftCount(env, becky.acct) == becky.nfts.size());
329 BEAST_EXPECT(nftCount(env, minter.acct) == minter.nfts.size());
330 }
331 BEAST_EXPECT(nftCount(env, alice.acct) == 0);
332 BEAST_EXPECT(nftCount(env, becky.acct) == 0);
333 BEAST_EXPECT(nftCount(env, minter.acct) == 0);
334
335 // When all nfts are burned none of the accounts should have
336 // an ownerCount.
337 BEAST_EXPECT(ownerCount(env, alice) == 0);
338 BEAST_EXPECT(ownerCount(env, becky) == 0);
339 BEAST_EXPECT(ownerCount(env, minter) == 0);
340 }
341
342 void
344 {
345 // The earlier burn test randomizes which nft is burned. There are
346 // a couple of directory merging scenarios that can only be tested by
347 // inserting and deleting in an ordered fashion. We do that testing
348 // now.
349 testcase("Burn sequential");
350
351 using namespace test::jtx;
352
353 Account const alice{"alice"};
354
355 Env env{*this, features};
356 env.fund(XRP(1000), alice);
357
358 // A lambda that generates 96 nfts packed into three pages of 32 each.
359 // Returns a sorted vector of the NFTokenIDs packed into the pages.
360 auto genPackedTokens = [this, &env, &alice]() {
362 nfts.reserve(96);
363
364 // We want to create fully packed NFT pages. This is a little
365 // tricky since the system currently in place is inclined to
366 // assign consecutive tokens to only 16 entries per page.
367 //
368 // By manipulating the internal form of the taxon we can force
369 // creation of NFT pages that are completely full. This lambda
370 // tells us the taxon value we should pass in in order for the
371 // internal representation to match the passed in value.
372 auto internalTaxon = [&env](
373 Account const& acct,
374 std::uint32_t taxon) -> std::uint32_t {
375 std::uint32_t tokenSeq =
376 env.le(acct)->at(~sfMintedNFTokens).value_or(0);
377
378 // If fixNFTokenRemint amendment is on, we must
379 // add FirstNFTokenSequence.
380 if (env.current()->rules().enabled(fixNFTokenRemint))
381 tokenSeq += env.le(acct)
382 ->at(~sfFirstNFTokenSequence)
383 .value_or(env.seq(acct));
384
385 return toUInt32(
386 nft::cipheredTaxon(tokenSeq, nft::toTaxon(taxon)));
387 };
388
389 for (std::uint32_t i = 0; i < 96; ++i)
390 {
391 // In order to fill the pages we use the taxon to break them
392 // into groups of 16 entries. By having the internal
393 // representation of the taxon go...
394 // 0, 3, 2, 5, 4, 7...
395 // in sets of 16 NFTs we can get each page to be fully
396 // populated.
397 std::uint32_t const intTaxon = (i / 16) + (i & 0b10000 ? 2 : 0);
398 uint32_t const extTaxon = internalTaxon(alice, intTaxon);
399 nfts.push_back(token::getNextID(env, alice, extTaxon));
400 env(token::mint(alice, extTaxon));
401 env.close();
402 }
403
404 // Sort the NFTs so they are listed in storage order, not
405 // creation order.
406 std::sort(nfts.begin(), nfts.end());
407
408 // Verify that the ledger does indeed contain exactly three pages
409 // of NFTs with 32 entries in each page.
410 Json::Value jvParams;
411 jvParams[jss::ledger_index] = "current";
412 jvParams[jss::binary] = false;
413 {
414 Json::Value jrr = env.rpc(
415 "json",
416 "ledger_data",
417 boost::lexical_cast<std::string>(jvParams));
418
419 Json::Value& state = jrr[jss::result][jss::state];
420
421 int pageCount = 0;
422 for (Json::UInt i = 0; i < state.size(); ++i)
423 {
424 if (state[i].isMember(sfNFTokens.jsonName) &&
425 state[i][sfNFTokens.jsonName].isArray())
426 {
427 BEAST_EXPECT(
428 state[i][sfNFTokens.jsonName].size() == 32);
429 ++pageCount;
430 }
431 }
432 // If this check fails then the internal NFT directory logic
433 // has changed.
434 BEAST_EXPECT(pageCount == 3);
435 }
436 return nfts;
437 };
438 {
439 // Generate three packed pages. Then burn the tokens in order from
440 // first to last. This exercises specific cases where coalescing
441 // pages is not possible.
442 std::vector<uint256> nfts = genPackedTokens();
443 BEAST_EXPECT(nftCount(env, alice) == 96);
444 BEAST_EXPECT(ownerCount(env, alice) == 3);
445
446 for (uint256 const& nft : nfts)
447 {
448 env(token::burn(alice, {nft}));
449 env.close();
450 }
451 BEAST_EXPECT(nftCount(env, alice) == 0);
452 BEAST_EXPECT(ownerCount(env, alice) == 0);
453 }
454
455 // A lambda verifies that the ledger no longer contains any NFT pages.
456 auto checkNoTokenPages = [this, &env]() {
457 Json::Value jvParams;
458 jvParams[jss::ledger_index] = "current";
459 jvParams[jss::binary] = false;
460 {
461 Json::Value jrr = env.rpc(
462 "json",
463 "ledger_data",
464 boost::lexical_cast<std::string>(jvParams));
465
466 Json::Value& state = jrr[jss::result][jss::state];
467
468 for (Json::UInt i = 0; i < state.size(); ++i)
469 {
470 BEAST_EXPECT(!state[i].isMember(sfNFTokens.jsonName));
471 }
472 }
473 };
474 checkNoTokenPages();
475 {
476 // Generate three packed pages. Then burn the tokens in order from
477 // last to first. This exercises different specific cases where
478 // coalescing pages is not possible.
479 std::vector<uint256> nfts = genPackedTokens();
480 BEAST_EXPECT(nftCount(env, alice) == 96);
481 BEAST_EXPECT(ownerCount(env, alice) == 3);
482
483 // Verify that that all three pages are present and remember the
484 // indexes.
485 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
486 if (!BEAST_EXPECT(lastNFTokenPage))
487 return;
488
489 uint256 const middleNFTokenPageIndex =
490 lastNFTokenPage->at(sfPreviousPageMin);
491 auto middleNFTokenPage = env.le(keylet::nftpage(
492 keylet::nftpage_min(alice), middleNFTokenPageIndex));
493 if (!BEAST_EXPECT(middleNFTokenPage))
494 return;
495
496 uint256 const firstNFTokenPageIndex =
497 middleNFTokenPage->at(sfPreviousPageMin);
498 auto firstNFTokenPage = env.le(keylet::nftpage(
499 keylet::nftpage_min(alice), firstNFTokenPageIndex));
500 if (!BEAST_EXPECT(firstNFTokenPage))
501 return;
502
503 // Burn almost all the tokens in the very last page.
504 for (int i = 0; i < 31; ++i)
505 {
506 env(token::burn(alice, {nfts.back()}));
507 nfts.pop_back();
508 env.close();
509 }
510
511 // Verify that the last page is still present and contains just one
512 // NFT.
513 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
514 if (!BEAST_EXPECT(lastNFTokenPage))
515 return;
516
517 BEAST_EXPECT(
518 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 1);
519 BEAST_EXPECT(lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
520 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
521
522 // Delete the last token from the last page.
523 env(token::burn(alice, {nfts.back()}));
524 nfts.pop_back();
525 env.close();
526
527 if (features[fixNFTokenPageLinks])
528 {
529 // Removing the last token from the last page deletes the
530 // _previous_ page because we need to preserve that last
531 // page an an anchor. The contents of the next-to-last page
532 // are moved into the last page.
533 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
534 BEAST_EXPECT(lastNFTokenPage);
535 BEAST_EXPECT(
536 lastNFTokenPage->at(~sfPreviousPageMin) ==
537 firstNFTokenPageIndex);
538 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
539 BEAST_EXPECT(
540 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
541
542 // The "middle" page should be gone.
543 middleNFTokenPage = env.le(keylet::nftpage(
544 keylet::nftpage_min(alice), middleNFTokenPageIndex));
545 BEAST_EXPECT(!middleNFTokenPage);
546
547 // The "first" page should still be present and linked to
548 // the last page.
549 firstNFTokenPage = env.le(keylet::nftpage(
550 keylet::nftpage_min(alice), firstNFTokenPageIndex));
551 BEAST_EXPECT(firstNFTokenPage);
552 BEAST_EXPECT(
553 !firstNFTokenPage->isFieldPresent(sfPreviousPageMin));
554 BEAST_EXPECT(
555 firstNFTokenPage->at(~sfNextPageMin) ==
556 lastNFTokenPage->key());
557 BEAST_EXPECT(
558 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
559 }
560 else
561 {
562 // Removing the last token from the last page deletes the last
563 // page. This is a bug. The contents of the next-to-last page
564 // should have been moved into the last page.
565 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
566 BEAST_EXPECT(!lastNFTokenPage);
567
568 // The "middle" page is still present, but has lost the
569 // NextPageMin field.
570 middleNFTokenPage = env.le(keylet::nftpage(
571 keylet::nftpage_min(alice), middleNFTokenPageIndex));
572 if (!BEAST_EXPECT(middleNFTokenPage))
573 return;
574 BEAST_EXPECT(
575 middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
576 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
577 }
578
579 // Delete the rest of the NFTokens.
580 while (!nfts.empty())
581 {
582 env(token::burn(alice, {nfts.back()}));
583 nfts.pop_back();
584 env.close();
585 }
586 BEAST_EXPECT(nftCount(env, alice) == 0);
587 BEAST_EXPECT(ownerCount(env, alice) == 0);
588 }
589 checkNoTokenPages();
590 {
591 // Generate three packed pages. Then burn all tokens in the middle
592 // page. This exercises the case where a page is removed between
593 // two fully populated pages.
594 std::vector<uint256> nfts = genPackedTokens();
595 BEAST_EXPECT(nftCount(env, alice) == 96);
596 BEAST_EXPECT(ownerCount(env, alice) == 3);
597
598 // Verify that that all three pages are present and remember the
599 // indexes.
600 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
601 if (!BEAST_EXPECT(lastNFTokenPage))
602 return;
603
604 uint256 const middleNFTokenPageIndex =
605 lastNFTokenPage->at(sfPreviousPageMin);
606 auto middleNFTokenPage = env.le(keylet::nftpage(
607 keylet::nftpage_min(alice), middleNFTokenPageIndex));
608 if (!BEAST_EXPECT(middleNFTokenPage))
609 return;
610
611 uint256 const firstNFTokenPageIndex =
612 middleNFTokenPage->at(sfPreviousPageMin);
613 auto firstNFTokenPage = env.le(keylet::nftpage(
614 keylet::nftpage_min(alice), firstNFTokenPageIndex));
615 if (!BEAST_EXPECT(firstNFTokenPage))
616 return;
617
618 for (std::size_t i = 32; i < 64; ++i)
619 {
620 env(token::burn(alice, nfts[i]));
621 env.close();
622 }
623 nfts.erase(nfts.begin() + 32, nfts.begin() + 64);
624 BEAST_EXPECT(nftCount(env, alice) == 64);
625 BEAST_EXPECT(ownerCount(env, alice) == 2);
626
627 // Verify that middle page is gone and the links in the two
628 // remaining pages are correct.
629 middleNFTokenPage = env.le(keylet::nftpage(
630 keylet::nftpage_min(alice), middleNFTokenPageIndex));
631 BEAST_EXPECT(!middleNFTokenPage);
632
633 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
634 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
635 BEAST_EXPECT(
636 lastNFTokenPage->getFieldH256(sfPreviousPageMin) ==
637 firstNFTokenPageIndex);
638
639 firstNFTokenPage = env.le(keylet::nftpage(
640 keylet::nftpage_min(alice), firstNFTokenPageIndex));
641 BEAST_EXPECT(
642 firstNFTokenPage->getFieldH256(sfNextPageMin) ==
643 keylet::nftpage_max(alice).key);
644 BEAST_EXPECT(!firstNFTokenPage->isFieldPresent(sfPreviousPageMin));
645
646 // Burn the remaining nfts.
647 for (uint256 const& nft : nfts)
648 {
649 env(token::burn(alice, {nft}));
650 env.close();
651 }
652 BEAST_EXPECT(nftCount(env, alice) == 0);
653 BEAST_EXPECT(ownerCount(env, alice) == 0);
654 }
655 checkNoTokenPages();
656 {
657 // Generate three packed pages. Then burn all the tokens in the
658 // first page followed by all the tokens in the last page. This
659 // exercises a specific case where coalescing pages is not possible.
660 std::vector<uint256> nfts = genPackedTokens();
661 BEAST_EXPECT(nftCount(env, alice) == 96);
662 BEAST_EXPECT(ownerCount(env, alice) == 3);
663
664 // Verify that that all three pages are present and remember the
665 // indexes.
666 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
667 if (!BEAST_EXPECT(lastNFTokenPage))
668 return;
669
670 uint256 const middleNFTokenPageIndex =
671 lastNFTokenPage->at(sfPreviousPageMin);
672 auto middleNFTokenPage = env.le(keylet::nftpage(
673 keylet::nftpage_min(alice), middleNFTokenPageIndex));
674 if (!BEAST_EXPECT(middleNFTokenPage))
675 return;
676
677 uint256 const firstNFTokenPageIndex =
678 middleNFTokenPage->at(sfPreviousPageMin);
679 auto firstNFTokenPage = env.le(keylet::nftpage(
680 keylet::nftpage_min(alice), firstNFTokenPageIndex));
681 if (!BEAST_EXPECT(firstNFTokenPage))
682 return;
683
684 // Burn all the tokens in the first page.
685 std::reverse(nfts.begin(), nfts.end());
686 for (int i = 0; i < 32; ++i)
687 {
688 env(token::burn(alice, {nfts.back()}));
689 nfts.pop_back();
690 env.close();
691 }
692
693 // Verify the first page is gone.
694 firstNFTokenPage = env.le(keylet::nftpage(
695 keylet::nftpage_min(alice), firstNFTokenPageIndex));
696 BEAST_EXPECT(!firstNFTokenPage);
697
698 // Check the links in the other two pages.
699 middleNFTokenPage = env.le(keylet::nftpage(
700 keylet::nftpage_min(alice), middleNFTokenPageIndex));
701 if (!BEAST_EXPECT(middleNFTokenPage))
702 return;
703 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
704 BEAST_EXPECT(middleNFTokenPage->isFieldPresent(sfNextPageMin));
705
706 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
707 if (!BEAST_EXPECT(lastNFTokenPage))
708 return;
709 BEAST_EXPECT(lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
710 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
711
712 // Burn all the tokens in the last page.
713 std::reverse(nfts.begin(), nfts.end());
714 for (int i = 0; i < 32; ++i)
715 {
716 env(token::burn(alice, {nfts.back()}));
717 nfts.pop_back();
718 env.close();
719 }
720
721 if (features[fixNFTokenPageLinks])
722 {
723 // Removing the last token from the last page deletes the
724 // _previous_ page because we need to preserve that last
725 // page an an anchor. The contents of the next-to-last page
726 // are moved into the last page.
727 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
728 BEAST_EXPECT(lastNFTokenPage);
729 BEAST_EXPECT(
730 !lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
731 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
732 BEAST_EXPECT(
733 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
734
735 // The "middle" page should be gone.
736 middleNFTokenPage = env.le(keylet::nftpage(
737 keylet::nftpage_min(alice), middleNFTokenPageIndex));
738 BEAST_EXPECT(!middleNFTokenPage);
739
740 // The "first" page should still be gone.
741 firstNFTokenPage = env.le(keylet::nftpage(
742 keylet::nftpage_min(alice), firstNFTokenPageIndex));
743 BEAST_EXPECT(!firstNFTokenPage);
744 }
745 else
746 {
747 // Removing the last token from the last page deletes the last
748 // page. This is a bug. The contents of the next-to-last page
749 // should have been moved into the last page.
750 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
751 BEAST_EXPECT(!lastNFTokenPage);
752
753 // The "middle" page is still present, but has lost the
754 // NextPageMin field.
755 middleNFTokenPage = env.le(keylet::nftpage(
756 keylet::nftpage_min(alice), middleNFTokenPageIndex));
757 if (!BEAST_EXPECT(middleNFTokenPage))
758 return;
759 BEAST_EXPECT(
760 !middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
761 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
762 }
763
764 // Delete the rest of the NFTokens.
765 while (!nfts.empty())
766 {
767 env(token::burn(alice, {nfts.back()}));
768 nfts.pop_back();
769 env.close();
770 }
771 BEAST_EXPECT(nftCount(env, alice) == 0);
772 BEAST_EXPECT(ownerCount(env, alice) == 0);
773 }
774 checkNoTokenPages();
775
776 if (features[fixNFTokenPageLinks])
777 {
778 // Exercise the invariant that the final NFTokenPage of a directory
779 // may not be removed if there are NFTokens in other pages of the
780 // directory.
781 //
782 // We're going to fire an Invariant failure that is difficult to
783 // cause. We do it here because the tools are here.
784 //
785 // See Invariants_test.cpp for examples of other invariant tests
786 // that this one is modeled after.
787
788 // Generate three closely packed NFTokenPages.
789 std::vector<uint256> nfts = genPackedTokens();
790 BEAST_EXPECT(nftCount(env, alice) == 96);
791 BEAST_EXPECT(ownerCount(env, alice) == 3);
792
793 // Burn almost all the tokens in the very last page.
794 for (int i = 0; i < 31; ++i)
795 {
796 env(token::burn(alice, {nfts.back()}));
797 nfts.pop_back();
798 env.close();
799 }
800 {
801 // Create an ApplyContext we can use to run the invariant
802 // checks. These variables must outlive the ApplyContext.
803 OpenView ov{*env.current()};
804 STTx tx{ttACCOUNT_SET, [](STObject&) {}};
806 beast::Journal jlog{sink};
807 ApplyContext ac{
808 env.app(),
809 ov,
810 tx,
812 env.current()->fees().base,
813 tapNONE,
814 jlog};
815
816 // Verify that the last page is present and contains one NFT.
817 auto lastNFTokenPage =
818 ac.view().peek(keylet::nftpage_max(alice));
819 if (!BEAST_EXPECT(lastNFTokenPage))
820 return;
821 BEAST_EXPECT(
822 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 1);
823
824 // Erase that last page.
825 ac.view().erase(lastNFTokenPage);
826
827 // Exercise the invariant.
828 TER terActual = tesSUCCESS;
829 for (TER const& terExpect :
831 {
832 terActual = ac.checkInvariants(terActual, XRPAmount{});
833 BEAST_EXPECT(terExpect == terActual);
834 BEAST_EXPECT(
835 sink.messages().str().starts_with("Invariant failed:"));
836 // uncomment to log the invariant failure message
837 // log << " --> " << sink.messages().str() << std::endl;
838 BEAST_EXPECT(
839 sink.messages().str().find(
840 "Last NFT page deleted with non-empty directory") !=
841 std::string::npos);
842 }
843 }
844 {
845 // Create an ApplyContext we can use to run the invariant
846 // checks. These variables must outlive the ApplyContext.
847 OpenView ov{*env.current()};
848 STTx tx{ttACCOUNT_SET, [](STObject&) {}};
850 beast::Journal jlog{sink};
851 ApplyContext ac{
852 env.app(),
853 ov,
854 tx,
856 env.current()->fees().base,
857 tapNONE,
858 jlog};
859
860 // Verify that the middle page is present.
861 auto lastNFTokenPage =
862 ac.view().peek(keylet::nftpage_max(alice));
863 auto middleNFTokenPage = ac.view().peek(keylet::nftpage(
864 keylet::nftpage_min(alice),
865 lastNFTokenPage->getFieldH256(sfPreviousPageMin)));
866 BEAST_EXPECT(middleNFTokenPage);
867
868 // Remove the NextMinPage link from the middle page to fire
869 // the invariant.
870 middleNFTokenPage->makeFieldAbsent(sfNextPageMin);
871 ac.view().update(middleNFTokenPage);
872
873 // Exercise the invariant.
874 TER terActual = tesSUCCESS;
875 for (TER const& terExpect :
877 {
878 terActual = ac.checkInvariants(terActual, XRPAmount{});
879 BEAST_EXPECT(terExpect == terActual);
880 BEAST_EXPECT(
881 sink.messages().str().starts_with("Invariant failed:"));
882 // uncomment to log the invariant failure message
883 // log << " --> " << sink.messages().str() << std::endl;
884 BEAST_EXPECT(
885 sink.messages().str().find("Lost NextMinPage link") !=
886 std::string::npos);
887 }
888 }
889 }
890 }
891
892 void
894 {
895 // Look at the case where too many offers prevents burning a token.
896 testcase("Burn too many offers");
897
898 using namespace test::jtx;
899
900 // Test what happens if a NFT is unburnable when there are
901 // more than 500 offers, before fixNonFungibleTokensV1_2 goes live
902 if (!features[fixNonFungibleTokensV1_2])
903 {
904 Env env{*this, features};
905
906 Account const alice("alice");
907 Account const becky("becky");
908 env.fund(XRP(1000), alice, becky);
909 env.close();
910
911 // We structure the test to try and maximize the metadata produced.
912 // This verifies that we don't create too much metadata during a
913 // maximal burn operation.
914 //
915 // 1. alice mints an nft with a full-sized URI.
916 // 2. We create 500 new accounts, each of which creates an offer
917 // for alice's nft.
918 // 3. becky creates one more offer for alice's NFT
919 // 4. Attempt to burn the nft which fails because there are too
920 // many offers.
921 // 5. Cancel becky's offer and the nft should become burnable.
922 uint256 const nftokenID =
923 token::getNextID(env, alice, 0, tfTransferable);
924 env(token::mint(alice, 0),
925 token::uri(std::string(maxTokenURILength, 'u')),
926 txflags(tfTransferable));
927 env.close();
928
929 std::vector<uint256> offerIndexes;
930 offerIndexes.reserve(maxTokenOfferCancelCount);
931 for (std::uint32_t i = 0; i < maxTokenOfferCancelCount; ++i)
932 {
933 Account const acct(std::string("acct") + std::to_string(i));
934 env.fund(XRP(1000), acct);
935 env.close();
936
937 offerIndexes.push_back(
938 keylet::nftoffer(acct, env.seq(acct)).key);
939 env(token::createOffer(acct, nftokenID, drops(1)),
940 token::owner(alice));
941 env.close();
942 }
943
944 // Verify all offers are present in the ledger.
945 for (uint256 const& offerIndex : offerIndexes)
946 {
947 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
948 }
949
950 // Create one too many offers.
951 uint256 const beckyOfferIndex =
952 keylet::nftoffer(becky, env.seq(becky)).key;
953 env(token::createOffer(becky, nftokenID, drops(1)),
954 token::owner(alice));
955
956 // Attempt to burn the nft which should fail.
957 env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
958
959 // Close enough ledgers that the burn transaction is no longer
960 // retried.
961 for (int i = 0; i < 10; ++i)
962 env.close();
963
964 // Cancel becky's offer, but alice adds a sell offer. The token
965 // should still not be burnable.
966 env(token::cancelOffer(becky, {beckyOfferIndex}));
967 env.close();
968
969 uint256 const aliceOfferIndex =
970 keylet::nftoffer(alice, env.seq(alice)).key;
971 env(token::createOffer(alice, nftokenID, drops(1)),
972 txflags(tfSellNFToken));
973 env.close();
974
975 env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
976 env.close();
977
978 // Cancel alice's sell offer. Now the token should be burnable.
979 env(token::cancelOffer(alice, {aliceOfferIndex}));
980 env.close();
981
982 env(token::burn(alice, nftokenID));
983 env.close();
984
985 // Burning the token should remove all the offers from the ledger.
986 for (uint256 const& offerIndex : offerIndexes)
987 {
988 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
989 }
990
991 // Both alice and becky should have ownerCounts of zero.
992 BEAST_EXPECT(ownerCount(env, alice) == 0);
993 BEAST_EXPECT(ownerCount(env, becky) == 0);
994 }
995
996 // Test that up to 499 buy/sell offers will be removed when NFT is
997 // burned after fixNonFungibleTokensV1_2 is enabled. This is to test
998 // that we can successfully remove all offers if the number of offers is
999 // less than 500.
1000 if (features[fixNonFungibleTokensV1_2])
1001 {
1002 Env env{*this, features};
1003
1004 Account const alice("alice");
1005 Account const becky("becky");
1006 env.fund(XRP(100000), alice, becky);
1007 env.close();
1008
1009 // alice creates 498 sell offers and becky creates 1 buy offers.
1010 // When the token is burned, 498 sell offers and 1 buy offer are
1011 // removed. In total, 499 offers are removed
1012 std::vector<uint256> offerIndexes;
1013 auto const nftokenID = createNftAndOffers(
1014 env, alice, offerIndexes, maxDeletableTokenOfferEntries - 2);
1015
1016 // Verify all sell offers are present in the ledger.
1017 for (uint256 const& offerIndex : offerIndexes)
1018 {
1019 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1020 }
1021
1022 // Becky creates a buy offer
1023 uint256 const beckyOfferIndex =
1024 keylet::nftoffer(becky, env.seq(becky)).key;
1025 env(token::createOffer(becky, nftokenID, drops(1)),
1026 token::owner(alice));
1027 env.close();
1028
1029 // Burn the token
1030 env(token::burn(alice, nftokenID));
1031 env.close();
1032
1033 // Burning the token should remove all 498 sell offers
1034 // that alice created
1035 for (uint256 const& offerIndex : offerIndexes)
1036 {
1037 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
1038 }
1039
1040 // Burning the token should also remove the one buy offer
1041 // that becky created
1042 BEAST_EXPECT(!env.le(keylet::nftoffer(beckyOfferIndex)));
1043
1044 // alice and becky should have ownerCounts of zero
1045 BEAST_EXPECT(ownerCount(env, alice) == 0);
1046 BEAST_EXPECT(ownerCount(env, becky) == 0);
1047 }
1048
1049 // Test that up to 500 buy offers are removed when NFT is burned
1050 // after fixNonFungibleTokensV1_2 is enabled
1051 if (features[fixNonFungibleTokensV1_2])
1052 {
1053 Env env{*this, features};
1054
1055 Account const alice("alice");
1056 Account const becky("becky");
1057 env.fund(XRP(100000), alice, becky);
1058 env.close();
1059
1060 // alice creates 501 sell offers for the token
1061 // After we burn the token, 500 of the sell offers should be
1062 // removed, and one is left over
1063 std::vector<uint256> offerIndexes;
1064 auto const nftokenID = createNftAndOffers(
1065 env, alice, offerIndexes, maxDeletableTokenOfferEntries + 1);
1066
1067 // Verify all sell offers are present in the ledger.
1068 for (uint256 const& offerIndex : offerIndexes)
1069 {
1070 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1071 }
1072
1073 // Burn the token
1074 env(token::burn(alice, nftokenID));
1075 env.close();
1076
1077 uint32_t offerDeletedCount = 0;
1078 // Count the number of sell offers that have been deleted
1079 for (uint256 const& offerIndex : offerIndexes)
1080 {
1081 if (!env.le(keylet::nftoffer(offerIndex)))
1082 offerDeletedCount++;
1083 }
1084
1085 BEAST_EXPECT(offerIndexes.size() == maxTokenOfferCancelCount + 1);
1086
1087 // 500 sell offers should be removed
1088 BEAST_EXPECT(offerDeletedCount == maxTokenOfferCancelCount);
1089
1090 // alice should have ownerCounts of one for the orphaned sell offer
1091 BEAST_EXPECT(ownerCount(env, alice) == 1);
1092 }
1093
1094 // Test that up to 500 buy/sell offers are removed when NFT is burned
1095 // after fixNonFungibleTokensV1_2 is enabled
1096 if (features[fixNonFungibleTokensV1_2])
1097 {
1098 Env env{*this, features};
1099
1100 Account const alice("alice");
1101 Account const becky("becky");
1102 env.fund(XRP(100000), alice, becky);
1103 env.close();
1104
1105 // alice creates 499 sell offers and becky creates 2 buy offers.
1106 // When the token is burned, 499 sell offers and 1 buy offer
1107 // are removed.
1108 // In total, 500 offers are removed
1109 std::vector<uint256> offerIndexes;
1110 auto const nftokenID = createNftAndOffers(
1111 env, alice, offerIndexes, maxDeletableTokenOfferEntries - 1);
1112
1113 // Verify all sell offers are present in the ledger.
1114 for (uint256 const& offerIndex : offerIndexes)
1115 {
1116 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1117 }
1118
1119 // becky creates 2 buy offers
1120 env(token::createOffer(becky, nftokenID, drops(1)),
1121 token::owner(alice));
1122 env.close();
1123 env(token::createOffer(becky, nftokenID, drops(1)),
1124 token::owner(alice));
1125 env.close();
1126
1127 // Burn the token
1128 env(token::burn(alice, nftokenID));
1129 env.close();
1130
1131 // Burning the token should remove all 499 sell offers from the
1132 // ledger.
1133 for (uint256 const& offerIndex : offerIndexes)
1134 {
1135 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
1136 }
1137
1138 // alice should have ownerCount of zero because all her
1139 // sell offers have been deleted
1140 BEAST_EXPECT(ownerCount(env, alice) == 0);
1141
1142 // becky has ownerCount of one due to an orphaned buy offer
1143 BEAST_EXPECT(ownerCount(env, becky) == 1);
1144 }
1145 }
1146
1147 void
1149 {
1150 // Amendment fixNFTokenPageLinks prevents the breakage we want
1151 // to observe.
1152 if (features[fixNFTokenPageLinks])
1153 return;
1154
1155 // a couple of directory merging scenarios that can only be tested by
1156 // inserting and deleting in an ordered fashion. We do that testing
1157 // now.
1158 testcase("Exercise broken links");
1159
1160 using namespace test::jtx;
1161
1162 Account const alice{"alice"};
1163 Account const minter{"minter"};
1164
1165 Env env{*this, features};
1166 env.fund(XRP(1000), alice, minter);
1167
1168 // A lambda that generates 96 nfts packed into three pages of 32 each.
1169 // Returns a sorted vector of the NFTokenIDs packed into the pages.
1170 auto genPackedTokens = [this, &env, &alice, &minter]() {
1172 nfts.reserve(96);
1173
1174 // We want to create fully packed NFT pages. This is a little
1175 // tricky since the system currently in place is inclined to
1176 // assign consecutive tokens to only 16 entries per page.
1177 //
1178 // By manipulating the internal form of the taxon we can force
1179 // creation of NFT pages that are completely full. This lambda
1180 // tells us the taxon value we should pass in in order for the
1181 // internal representation to match the passed in value.
1182 auto internalTaxon = [&env](
1183 Account const& acct,
1184 std::uint32_t taxon) -> std::uint32_t {
1185 std::uint32_t tokenSeq =
1186 env.le(acct)->at(~sfMintedNFTokens).value_or(0);
1187
1188 // If fixNFTokenRemint amendment is on, we must
1189 // add FirstNFTokenSequence.
1190 if (env.current()->rules().enabled(fixNFTokenRemint))
1191 tokenSeq += env.le(acct)
1192 ->at(~sfFirstNFTokenSequence)
1193 .value_or(env.seq(acct));
1194
1195 return toUInt32(
1196 nft::cipheredTaxon(tokenSeq, nft::toTaxon(taxon)));
1197 };
1198
1199 for (std::uint32_t i = 0; i < 96; ++i)
1200 {
1201 // In order to fill the pages we use the taxon to break them
1202 // into groups of 16 entries. By having the internal
1203 // representation of the taxon go...
1204 // 0, 3, 2, 5, 4, 7...
1205 // in sets of 16 NFTs we can get each page to be fully
1206 // populated.
1207 std::uint32_t const intTaxon = (i / 16) + (i & 0b10000 ? 2 : 0);
1208 uint32_t const extTaxon = internalTaxon(minter, intTaxon);
1209 nfts.push_back(
1210 token::getNextID(env, minter, extTaxon, tfTransferable));
1211 env(token::mint(minter, extTaxon), txflags(tfTransferable));
1212 env.close();
1213
1214 // Minter creates an offer for the NFToken.
1215 uint256 const minterOfferIndex =
1216 keylet::nftoffer(minter, env.seq(minter)).key;
1217 env(token::createOffer(minter, nfts.back(), XRP(0)),
1218 txflags(tfSellNFToken));
1219 env.close();
1220
1221 // alice accepts the offer.
1222 env(token::acceptSellOffer(alice, minterOfferIndex));
1223 env.close();
1224 }
1225
1226 // Sort the NFTs so they are listed in storage order, not
1227 // creation order.
1228 std::sort(nfts.begin(), nfts.end());
1229
1230 // Verify that the ledger does indeed contain exactly three pages
1231 // of NFTs with 32 entries in each page.
1232 Json::Value jvParams;
1233 jvParams[jss::ledger_index] = "current";
1234 jvParams[jss::binary] = false;
1235 {
1236 Json::Value jrr = env.rpc(
1237 "json",
1238 "ledger_data",
1239 boost::lexical_cast<std::string>(jvParams));
1240
1241 Json::Value& state = jrr[jss::result][jss::state];
1242
1243 int pageCount = 0;
1244 for (Json::UInt i = 0; i < state.size(); ++i)
1245 {
1246 if (state[i].isMember(sfNFTokens.jsonName) &&
1247 state[i][sfNFTokens.jsonName].isArray())
1248 {
1249 BEAST_EXPECT(
1250 state[i][sfNFTokens.jsonName].size() == 32);
1251 ++pageCount;
1252 }
1253 }
1254 // If this check fails then the internal NFT directory logic
1255 // has changed.
1256 BEAST_EXPECT(pageCount == 3);
1257 }
1258 return nfts;
1259 };
1260
1261 // Generate three packed pages.
1262 std::vector<uint256> nfts = genPackedTokens();
1263 BEAST_EXPECT(nftCount(env, alice) == 96);
1264 BEAST_EXPECT(ownerCount(env, alice) == 3);
1265
1266 // Verify that that all three pages are present and remember the
1267 // indexes.
1268 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
1269 if (!BEAST_EXPECT(lastNFTokenPage))
1270 return;
1271
1272 uint256 const middleNFTokenPageIndex =
1273 lastNFTokenPage->at(sfPreviousPageMin);
1274 auto middleNFTokenPage = env.le(keylet::nftpage(
1275 keylet::nftpage_min(alice), middleNFTokenPageIndex));
1276 if (!BEAST_EXPECT(middleNFTokenPage))
1277 return;
1278
1279 uint256 const firstNFTokenPageIndex =
1280 middleNFTokenPage->at(sfPreviousPageMin);
1281 auto firstNFTokenPage = env.le(
1282 keylet::nftpage(keylet::nftpage_min(alice), firstNFTokenPageIndex));
1283 if (!BEAST_EXPECT(firstNFTokenPage))
1284 return;
1285
1286 // Sell all the tokens in the very last page back to minter.
1287 std::vector<uint256> last32NFTs;
1288 for (int i = 0; i < 32; ++i)
1289 {
1290 last32NFTs.push_back(nfts.back());
1291 nfts.pop_back();
1292
1293 // alice creates an offer for the NFToken.
1294 uint256 const aliceOfferIndex =
1295 keylet::nftoffer(alice, env.seq(alice)).key;
1296 env(token::createOffer(alice, last32NFTs.back(), XRP(0)),
1297 txflags(tfSellNFToken));
1298 env.close();
1299
1300 // minter accepts the offer.
1301 env(token::acceptSellOffer(minter, aliceOfferIndex));
1302 env.close();
1303 }
1304
1305 // Removing the last token from the last page deletes alice's last
1306 // page. This is a bug. The contents of the next-to-last page
1307 // should have been moved into the last page.
1308 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
1309 BEAST_EXPECT(!lastNFTokenPage);
1310 BEAST_EXPECT(ownerCount(env, alice) == 2);
1311
1312 // The "middle" page is still present, but has lost the
1313 // NextPageMin field.
1314 middleNFTokenPage = env.le(keylet::nftpage(
1315 keylet::nftpage_min(alice), middleNFTokenPageIndex));
1316 if (!BEAST_EXPECT(middleNFTokenPage))
1317 return;
1318 BEAST_EXPECT(middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
1319 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
1320
1321 // Attempt to delete alice's account, but fail because she owns NFTs.
1322 auto const acctDelFee{drops(env.current()->fees().increment)};
1323 env(acctdelete(alice, minter),
1324 fee(acctDelFee),
1325 ter(tecHAS_OBLIGATIONS));
1326 env.close();
1327
1328 // minter sells the last 32 NFTs back to alice.
1329 for (uint256 nftID : last32NFTs)
1330 {
1331 // minter creates an offer for the NFToken.
1332 uint256 const minterOfferIndex =
1333 keylet::nftoffer(minter, env.seq(minter)).key;
1334 env(token::createOffer(minter, nftID, XRP(0)),
1335 txflags(tfSellNFToken));
1336 env.close();
1337
1338 // alice accepts the offer.
1339 env(token::acceptSellOffer(alice, minterOfferIndex));
1340 env.close();
1341 }
1342 BEAST_EXPECT(ownerCount(env, alice) == 3); // Three NFTokenPages.
1343
1344 // alice has an NFToken directory with a broken link in the middle.
1345 {
1346 // Try the account_objects RPC command. Alice's account only shows
1347 // two NFT pages even though she owns more.
1348 Json::Value acctObjs = [&env, &alice]() {
1349 Json::Value params;
1350 params[jss::account] = alice.human();
1351 return env.rpc("json", "account_objects", to_string(params));
1352 }();
1353 BEAST_EXPECT(!acctObjs.isMember(jss::marker));
1354 BEAST_EXPECT(
1355 acctObjs[jss::result][jss::account_objects].size() == 2);
1356 }
1357 {
1358 // Try the account_nfts RPC command. It only returns 64 NFTs
1359 // although alice owns 96.
1360 Json::Value aliceNFTs = [&env, &alice]() {
1361 Json::Value params;
1362 params[jss::account] = alice.human();
1363 params[jss::type] = "state";
1364 return env.rpc("json", "account_nfts", to_string(params));
1365 }();
1366 BEAST_EXPECT(!aliceNFTs.isMember(jss::marker));
1367 BEAST_EXPECT(
1368 aliceNFTs[jss::result][jss::account_nfts].size() == 64);
1369 }
1370 }
1371
1372 void
1374 {
1375 testBurnRandom(features);
1376 testBurnSequential(features);
1377 testBurnTooManyOffers(features);
1378 exerciseBrokenLinks(features);
1379 }
1380
1381protected:
1382 void
1383 run(std::uint32_t instance, bool last = false)
1384 {
1385 using namespace test::jtx;
1386 static FeatureBitset const all{supported_amendments()};
1387 static FeatureBitset const fixNFTV1_2{fixNonFungibleTokensV1_2};
1388 static FeatureBitset const fixNFTDir{fixNFTokenDirV1};
1389 static FeatureBitset const fixNFTRemint{fixNFTokenRemint};
1390 static FeatureBitset const fixNFTPageLinks{fixNFTokenPageLinks};
1391
1392 static std::array<FeatureBitset, 5> const feats{
1393 all - fixNFTV1_2 - fixNFTDir - fixNFTRemint - fixNFTPageLinks,
1394 all - fixNFTV1_2 - fixNFTRemint - fixNFTPageLinks,
1395 all - fixNFTRemint - fixNFTPageLinks,
1396 all - fixNFTPageLinks,
1397 all,
1398 };
1399
1400 if (BEAST_EXPECT(instance < feats.size()))
1401 {
1402 testWithFeats(feats[instance]);
1403 }
1404 BEAST_EXPECT(!last || instance == feats.size() - 1);
1405 }
1406
1407public:
1408 void
1409 run() override
1410 {
1411 run(0);
1412 }
1413};
1414
1416{
1417public:
1418 void
1419 run() override
1420 {
1422 }
1423};
1424
1426{
1427public:
1428 void
1429 run() override
1430 {
1432 }
1433};
1434
1436{
1437public:
1438 void
1439 run() override
1440 {
1442 }
1443};
1444
1446{
1447public:
1448 void
1449 run() override
1450 {
1452 }
1453};
1454
1455BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnBaseUtil, tx, ripple, 3);
1456BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOfixFungTokens, tx, ripple, 3);
1457BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOFixTokenRemint, tx, ripple, 3);
1458BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOFixNFTPageLinks, tx, ripple, 3);
1459BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnAllFeatures, tx, ripple, 3);
1460
1461} // namespace ripple
T back(T... args)
T begin(T... args)
Represents a JSON value.
Definition: json_value.h:148
bool isArray() const
UInt size() const
Number of values in array or object.
Definition: json_value.cpp:712
std::string toStyledString() const
std::string asString() const
Returns the unquoted string value.
Definition: json_value.cpp:475
bool isMember(const char *key) const
Return true if the object has a member named key.
Definition: json_value.cpp:949
A generic endpoint for log messages.
Definition: Journal.h:60
A testsuite class.
Definition: suite.h:55
testcase_t testcase
Memberspace for declaring test cases.
Definition: suite.h:155
State information when applying a tx.
Definition: ApplyContext.h:36
Application & app
Definition: ApplyContext.h:47
void run() override
Runs the suite.
static std::uint32_t nftCount(test::jtx::Env &env, test::jtx::Account const &acct)
uint256 createNftAndOffers(test::jtx::Env &env, test::jtx::Account const &owner, std::vector< uint256 > &offerIndexes, size_t const tokenCancelCount)
void exerciseBrokenLinks(FeatureBitset features)
void run(std::uint32_t instance, bool last=false)
void run() override
Runs the suite.
void testWithFeats(FeatureBitset features)
void testBurnTooManyOffers(FeatureBitset features)
void printNFTPages(test::jtx::Env &env, Volume vol)
void testBurnRandom(FeatureBitset features)
void testBurnSequential(FeatureBitset features)
void run() override
Runs the suite.
Writable ledger view that accumulates state and tx changes.
Definition: OpenView.h:56
Immutable cryptographic account descriptor.
Definition: Account.h:39
std::string const & human() const
Returns the human readable public key.
Definition: Account.h:114
A transaction testing environment.
Definition: Env.h:118
std::uint32_t seq(Account const &account) const
Returns the next sequence number on account.
Definition: Env.cpp:210
bool close(NetClock::time_point closeTime, std::optional< std::chrono::milliseconds > consensusDelay=std::nullopt)
Close and advance the ledger.
Definition: Env.cpp:115
Json::Value rpc(unsigned apiVersion, std::unordered_map< std::string, std::string > const &headers, std::string const &cmd, Args &&... args)
Execute an RPC command.
Definition: Env.h:765
T empty(T... args)
T end(T... args)
T endl(T... args)
T erase(T... args)
unsigned int UInt
Definition: json_forwards.h:27
Keylet nftpage(Keylet const &k, uint256 const &token)
Definition: Indexes.cpp:410
Keylet nftpage_min(AccountID const &owner)
NFT page keylets.
Definition: Indexes.cpp:394
Keylet nftpage_max(AccountID const &owner)
A keylet for the owner's last possible NFT page.
Definition: Indexes.cpp:402
Keylet nftoffer(AccountID const &owner, std::uint32_t seq)
An offer from an account to buy or sell an NFT.
Definition: Indexes.cpp:418
Taxon cipheredTaxon(std::uint32_t tokenSeq, Taxon taxon)
Definition: nft.h:84
Taxon toTaxon(std::uint32_t i)
Definition: nft.h:42
Use hash_* containers for keys that do not need a cryptographically secure hashing algorithm.
Definition: algorithm.h:26
constexpr std::uint32_t const tfSellNFToken
Definition: TxFlags.h:189
std::size_t constexpr maxTokenOfferCancelCount
The maximum number of token offers that can be canceled at once.
Definition: Protocol.h:69
std::uint16_t constexpr maxTransferFee
The maximum token transfer fee allowed.
Definition: Protocol.h:83
std::size_t constexpr maxDeletableTokenOfferEntries
The maximum number of offers in an offer directory for NFT to be burnable.
Definition: Protocol.h:72
constexpr std::uint32_t const tfBurnable
Definition: TxFlags.h:133
@ tefINVARIANT_FAILED
Definition: TER.h:183
@ tefTOO_BIG
Definition: TER.h:184
std::size_t constexpr maxTokenURILength
The maximum length of a URI inside an NFT.
Definition: Protocol.h:86
@ tecHAS_OBLIGATIONS
Definition: TER.h:304
@ tecINVARIANT_FAILED
Definition: TER.h:300
@ tesSUCCESS
Definition: TER.h:242
std::string to_string(base_uint< Bits, Tag > const &a)
Definition: base_uint.h:630
@ tapNONE
Definition: ApplyView.h:31
TERSubset< CanCvtToTER > TER
Definition: TER.h:627
constexpr std::uint32_t const tfTransferable
Definition: TxFlags.h:136
T pop_back(T... args)
T push_back(T... args)
T reserve(T... args)
T reverse(T... args)
T size(T... args)
T sort(T... args)
uint256 key
Definition: Keylet.h:40
T to_string(T... args)