rippled
Loading...
Searching...
No Matches
NFTokenBurn_test.cpp
1//------------------------------------------------------------------------------
2/*
3 This file is part of rippled: https://github.com/ripple/rippled
4 Copyright (c) 2021 Ripple Labs Inc.
5
6 Permission to use, copy, modify, and/or distribute this software for any
7 purpose with or without fee is hereby granted, provided that the above
8 copyright notice and this permission notice appear in all copies.
9
10 THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17*/
18//==============================================================================
19
20#include <test/jtx.h>
21
22#include <xrpld/app/tx/detail/NFTokenUtils.h>
23
24#include <xrpl/protocol/Feature.h>
25#include <xrpl/protocol/jss.h>
26
27#include <random>
28
29namespace ripple {
30
32{
33 // Helper function that returns the number of nfts owned by an account.
34 static std::uint32_t
36 {
37 Json::Value params;
38 params[jss::account] = acct.human();
39 params[jss::type] = "state";
40 Json::Value nfts = env.rpc("json", "account_nfts", to_string(params));
41 return nfts[jss::result][jss::account_nfts].size();
42 };
43
44 // Helper function that returns new nft id for an account and create
45 // specified number of sell offers
48 test::jtx::Env& env,
49 test::jtx::Account const& owner,
50 std::vector<uint256>& offerIndexes,
51 size_t const tokenCancelCount)
52 {
53 using namespace test::jtx;
54 uint256 const nftokenID =
55 token::getNextID(env, owner, 0, tfTransferable);
56 env(token::mint(owner, 0),
57 token::uri(std::string(maxTokenURILength, 'u')),
58 txflags(tfTransferable));
59 env.close();
60
61 offerIndexes.reserve(tokenCancelCount);
62
63 for (uint32_t i = 0; i < tokenCancelCount; ++i)
64 {
65 // Create sell offer
66 offerIndexes.push_back(keylet::nftoffer(owner, env.seq(owner)).key);
67 env(token::createOffer(owner, nftokenID, drops(1)),
68 txflags(tfSellNFToken));
69 env.close();
70 }
71
72 return nftokenID;
73 };
74
75 // printNFTPages is a helper function that may be used for debugging.
76 //
77 // It uses the ledger RPC command to show the NFT pages in the ledger.
78 // This parameter controls how noisy the output is.
79 enum Volume : bool {
80 quiet = false,
81 noisy = true,
82 };
83
84 void
86 {
87 Json::Value jvParams;
88 jvParams[jss::ledger_index] = "current";
89 jvParams[jss::binary] = false;
90 {
91 Json::Value jrr =
92 env.rpc("json", "ledger_data", to_string(jvParams));
93
94 // Iterate the state and print all NFTokenPages.
95 if (!jrr.isMember(jss::result) ||
96 !jrr[jss::result].isMember(jss::state))
97 {
98 std::cout << "No ledger state found!" << std::endl;
99 return;
100 }
101 Json::Value& state = jrr[jss::result][jss::state];
102 if (!state.isArray())
103 {
104 std::cout << "Ledger state is not array!" << std::endl;
105 return;
106 }
107 for (Json::UInt i = 0; i < state.size(); ++i)
108 {
109 if (state[i].isMember(sfNFTokens.jsonName) &&
110 state[i][sfNFTokens.jsonName].isArray())
111 {
112 std::uint32_t tokenCount =
113 state[i][sfNFTokens.jsonName].size();
114 std::cout << tokenCount << " NFtokens in page "
115 << state[i][jss::index].asString() << std::endl;
116
117 if (vol == noisy)
118 {
119 std::cout << state[i].toStyledString() << std::endl;
120 }
121 else
122 {
123 if (tokenCount > 0)
124 std::cout << "first: "
125 << state[i][sfNFTokens.jsonName][0u]
127 << std::endl;
128 if (tokenCount > 1)
130 << "last: "
131 << state[i][sfNFTokens.jsonName][tokenCount - 1]
133 << std::endl;
134 }
135 }
136 }
137 }
138 }
139
140 void
142 {
143 // Exercise a number of conditions with NFT burning.
144 testcase("Burn random");
145
146 using namespace test::jtx;
147
148 Env env{*this, features};
149
150 // Keep information associated with each account together.
151 struct AcctStat
152 {
153 test::jtx::Account const acct;
155
156 AcctStat(char const* name) : acct(name)
157 {
158 }
159
160 operator test::jtx::Account() const
161 {
162 return acct;
163 }
164 };
165 AcctStat alice{"alice"};
166 AcctStat becky{"becky"};
167 AcctStat minter{"minter"};
168
169 env.fund(XRP(10000), alice, becky, minter);
170 env.close();
171
172 // Both alice and minter mint nfts in case that makes any difference.
173 env(token::setMinter(alice, minter));
174 env.close();
175
176 // Create enough NFTs that alice, becky, and minter can all have
177 // at least three pages of NFTs. This will cause more activity in
178 // the page coalescing code. If we make 210 NFTs in total, we can
179 // have alice and minter each make 105. That will allow us to
180 // distribute 70 NFTs to our three participants.
181 //
182 // Give each NFT a pseudo-randomly chosen fee so the NFTs are
183 // distributed pseudo-randomly through the pages. This should
184 // prevent alice's and minter's NFTs from clustering together
185 // in becky's directory.
186 //
187 // Use a default initialized mercenne_twister because we want the
188 // effect of random numbers, but we want the test to run the same
189 // way each time.
190 std::mt19937 engine;
192 decltype(maxTransferFee){}, maxTransferFee);
193
194 alice.nfts.reserve(105);
195 while (alice.nfts.size() < 105)
196 {
197 std::uint16_t const xferFee = feeDist(engine);
198 alice.nfts.push_back(token::getNextID(
199 env, alice, 0u, tfTransferable | tfBurnable, xferFee));
200 env(token::mint(alice),
201 txflags(tfTransferable | tfBurnable),
202 token::xferFee(xferFee));
203 env.close();
204 }
205
206 minter.nfts.reserve(105);
207 while (minter.nfts.size() < 105)
208 {
209 std::uint16_t const xferFee = feeDist(engine);
210 minter.nfts.push_back(token::getNextID(
211 env, alice, 0u, tfTransferable | tfBurnable, xferFee));
212 env(token::mint(minter),
213 txflags(tfTransferable | tfBurnable),
214 token::xferFee(xferFee),
215 token::issuer(alice));
216 env.close();
217 }
218
219 // All of the NFTs are now minted. Transfer 35 each over to becky so
220 // we end up with 70 NFTs in each account.
221 becky.nfts.reserve(70);
222 {
223 auto aliceIter = alice.nfts.begin();
224 auto minterIter = minter.nfts.begin();
225 while (becky.nfts.size() < 70)
226 {
227 // We do the same work on alice and minter, so make a lambda.
228 auto xferNFT = [&env, &becky](AcctStat& acct, auto& iter) {
229 uint256 offerIndex =
230 keylet::nftoffer(acct.acct, env.seq(acct.acct)).key;
231 env(token::createOffer(acct, *iter, XRP(0)),
232 txflags(tfSellNFToken));
233 env.close();
234 env(token::acceptSellOffer(becky, offerIndex));
235 env.close();
236 becky.nfts.push_back(*iter);
237 iter = acct.nfts.erase(iter);
238 iter += 2;
239 };
240 xferNFT(alice, aliceIter);
241 xferNFT(minter, minterIter);
242 }
243 BEAST_EXPECT(aliceIter == alice.nfts.end());
244 BEAST_EXPECT(minterIter == minter.nfts.end());
245 }
246
247 // Now all three participants have 70 NFTs.
248 BEAST_EXPECT(nftCount(env, alice.acct) == 70);
249 BEAST_EXPECT(nftCount(env, becky.acct) == 70);
250 BEAST_EXPECT(nftCount(env, minter.acct) == 70);
251
252 // Next we'll create offers for all of those NFTs. This calls for
253 // another lambda.
254 auto addOffers =
255 [&env](AcctStat& owner, AcctStat& other1, AcctStat& other2) {
256 for (uint256 nft : owner.nfts)
257 {
258 // Create sell offers for owner.
259 env(token::createOffer(owner, nft, drops(1)),
260 txflags(tfSellNFToken),
261 token::destination(other1));
262 env(token::createOffer(owner, nft, drops(1)),
263 txflags(tfSellNFToken),
264 token::destination(other2));
265 env.close();
266
267 // Create buy offers for other1 and other2.
268 env(token::createOffer(other1, nft, drops(1)),
269 token::owner(owner));
270 env(token::createOffer(other2, nft, drops(1)),
271 token::owner(owner));
272 env.close();
273
274 env(token::createOffer(other2, nft, drops(2)),
275 token::owner(owner));
276 env(token::createOffer(other1, nft, drops(2)),
277 token::owner(owner));
278 env.close();
279 }
280 };
281 addOffers(alice, becky, minter);
282 addOffers(becky, minter, alice);
283 addOffers(minter, alice, becky);
284 BEAST_EXPECT(ownerCount(env, alice) == 424);
285 BEAST_EXPECT(ownerCount(env, becky) == 424);
286 BEAST_EXPECT(ownerCount(env, minter) == 424);
287
288 // Now each of the 270 NFTs has six offers associated with it.
289 // Randomly select an NFT out of the pile and burn it. Continue
290 // the process until all NFTs are burned.
291 AcctStat* const stats[3] = {&alice, &becky, &minter};
294
295 while (stats[0]->nfts.size() > 0 || stats[1]->nfts.size() > 0 ||
296 stats[2]->nfts.size() > 0)
297 {
298 // Pick an account to burn an nft. If there are no nfts left
299 // pick again.
300 AcctStat& owner = *(stats[acctDist(engine)]);
301 if (owner.nfts.empty())
302 continue;
303
304 // Pick one of the nfts.
306 0lu, owner.nfts.size() - 1);
307 auto nftIter = owner.nfts.begin() + nftDist(engine);
308 uint256 const nft = *nftIter;
309 owner.nfts.erase(nftIter);
310
311 // Decide which of the accounts should burn the nft. If the
312 // owner is becky then any of the three accounts can burn.
313 // Otherwise either alice or minter can burn.
314 AcctStat& burner = owner.acct == becky.acct
315 ? *(stats[acctDist(engine)])
316 : mintDist(engine) ? alice
317 : minter;
318
319 if (owner.acct == burner.acct)
320 env(token::burn(burner, nft));
321 else
322 env(token::burn(burner, nft), token::owner(owner));
323 env.close();
324
325 // Every time we burn an nft, the number of nfts they hold should
326 // match the number of nfts we think they hold.
327 BEAST_EXPECT(nftCount(env, alice.acct) == alice.nfts.size());
328 BEAST_EXPECT(nftCount(env, becky.acct) == becky.nfts.size());
329 BEAST_EXPECT(nftCount(env, minter.acct) == minter.nfts.size());
330 }
331 BEAST_EXPECT(nftCount(env, alice.acct) == 0);
332 BEAST_EXPECT(nftCount(env, becky.acct) == 0);
333 BEAST_EXPECT(nftCount(env, minter.acct) == 0);
334
335 // When all nfts are burned none of the accounts should have
336 // an ownerCount.
337 BEAST_EXPECT(ownerCount(env, alice) == 0);
338 BEAST_EXPECT(ownerCount(env, becky) == 0);
339 BEAST_EXPECT(ownerCount(env, minter) == 0);
340 }
341
342 void
344 {
345 // The earlier burn test randomizes which nft is burned. There are
346 // a couple of directory merging scenarios that can only be tested by
347 // inserting and deleting in an ordered fashion. We do that testing
348 // now.
349 testcase("Burn sequential");
350
351 using namespace test::jtx;
352
353 Account const alice{"alice"};
354
355 Env env{*this, features};
356 env.fund(XRP(1000), alice);
357
358 // A lambda that generates 96 nfts packed into three pages of 32 each.
359 // Returns a sorted vector of the NFTokenIDs packed into the pages.
360 auto genPackedTokens = [this, &env, &alice]() {
362 nfts.reserve(96);
363
364 // We want to create fully packed NFT pages. This is a little
365 // tricky since the system currently in place is inclined to
366 // assign consecutive tokens to only 16 entries per page.
367 //
368 // By manipulating the internal form of the taxon we can force
369 // creation of NFT pages that are completely full. This lambda
370 // tells us the taxon value we should pass in in order for the
371 // internal representation to match the passed in value.
372 auto internalTaxon = [&env](
373 Account const& acct,
374 std::uint32_t taxon) -> std::uint32_t {
375 std::uint32_t tokenSeq =
376 env.le(acct)->at(~sfMintedNFTokens).value_or(0);
377
378 // If fixNFTokenRemint amendment is on, we must
379 // add FirstNFTokenSequence.
380 if (env.current()->rules().enabled(fixNFTokenRemint))
381 tokenSeq += env.le(acct)
382 ->at(~sfFirstNFTokenSequence)
383 .value_or(env.seq(acct));
384
385 return toUInt32(
386 nft::cipheredTaxon(tokenSeq, nft::toTaxon(taxon)));
387 };
388
389 for (std::uint32_t i = 0; i < 96; ++i)
390 {
391 // In order to fill the pages we use the taxon to break them
392 // into groups of 16 entries. By having the internal
393 // representation of the taxon go...
394 // 0, 3, 2, 5, 4, 7...
395 // in sets of 16 NFTs we can get each page to be fully
396 // populated.
397 std::uint32_t const intTaxon = (i / 16) + (i & 0b10000 ? 2 : 0);
398 uint32_t const extTaxon = internalTaxon(alice, intTaxon);
399 nfts.push_back(token::getNextID(env, alice, extTaxon));
400 env(token::mint(alice, extTaxon));
401 env.close();
402 }
403
404 // Sort the NFTs so they are listed in storage order, not
405 // creation order.
406 std::sort(nfts.begin(), nfts.end());
407
408 // Verify that the ledger does indeed contain exactly three pages
409 // of NFTs with 32 entries in each page.
410 Json::Value jvParams;
411 jvParams[jss::ledger_index] = "current";
412 jvParams[jss::binary] = false;
413 {
414 Json::Value jrr =
415 env.rpc("json", "ledger_data", to_string(jvParams));
416
417 Json::Value& state = jrr[jss::result][jss::state];
418
419 int pageCount = 0;
420 for (Json::UInt i = 0; i < state.size(); ++i)
421 {
422 if (state[i].isMember(sfNFTokens.jsonName) &&
423 state[i][sfNFTokens.jsonName].isArray())
424 {
425 BEAST_EXPECT(
426 state[i][sfNFTokens.jsonName].size() == 32);
427 ++pageCount;
428 }
429 }
430 // If this check fails then the internal NFT directory logic
431 // has changed.
432 BEAST_EXPECT(pageCount == 3);
433 }
434 return nfts;
435 };
436 {
437 // Generate three packed pages. Then burn the tokens in order from
438 // first to last. This exercises specific cases where coalescing
439 // pages is not possible.
440 std::vector<uint256> nfts = genPackedTokens();
441 BEAST_EXPECT(nftCount(env, alice) == 96);
442 BEAST_EXPECT(ownerCount(env, alice) == 3);
443
444 for (uint256 const& nft : nfts)
445 {
446 env(token::burn(alice, {nft}));
447 env.close();
448 }
449 BEAST_EXPECT(nftCount(env, alice) == 0);
450 BEAST_EXPECT(ownerCount(env, alice) == 0);
451 }
452
453 // A lambda verifies that the ledger no longer contains any NFT pages.
454 auto checkNoTokenPages = [this, &env]() {
455 Json::Value jvParams;
456 jvParams[jss::ledger_index] = "current";
457 jvParams[jss::binary] = false;
458 {
459 Json::Value jrr =
460 env.rpc("json", "ledger_data", to_string(jvParams));
461
462 Json::Value& state = jrr[jss::result][jss::state];
463
464 for (Json::UInt i = 0; i < state.size(); ++i)
465 {
466 BEAST_EXPECT(!state[i].isMember(sfNFTokens.jsonName));
467 }
468 }
469 };
470 checkNoTokenPages();
471 {
472 // Generate three packed pages. Then burn the tokens in order from
473 // last to first. This exercises different specific cases where
474 // coalescing pages is not possible.
475 std::vector<uint256> nfts = genPackedTokens();
476 BEAST_EXPECT(nftCount(env, alice) == 96);
477 BEAST_EXPECT(ownerCount(env, alice) == 3);
478
479 // Verify that that all three pages are present and remember the
480 // indexes.
481 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
482 if (!BEAST_EXPECT(lastNFTokenPage))
483 return;
484
485 uint256 const middleNFTokenPageIndex =
486 lastNFTokenPage->at(sfPreviousPageMin);
487 auto middleNFTokenPage = env.le(keylet::nftpage(
488 keylet::nftpage_min(alice), middleNFTokenPageIndex));
489 if (!BEAST_EXPECT(middleNFTokenPage))
490 return;
491
492 uint256 const firstNFTokenPageIndex =
493 middleNFTokenPage->at(sfPreviousPageMin);
494 auto firstNFTokenPage = env.le(keylet::nftpage(
495 keylet::nftpage_min(alice), firstNFTokenPageIndex));
496 if (!BEAST_EXPECT(firstNFTokenPage))
497 return;
498
499 // Burn almost all the tokens in the very last page.
500 for (int i = 0; i < 31; ++i)
501 {
502 env(token::burn(alice, {nfts.back()}));
503 nfts.pop_back();
504 env.close();
505 }
506
507 // Verify that the last page is still present and contains just one
508 // NFT.
509 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
510 if (!BEAST_EXPECT(lastNFTokenPage))
511 return;
512
513 BEAST_EXPECT(
514 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 1);
515 BEAST_EXPECT(lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
516 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
517
518 // Delete the last token from the last page.
519 env(token::burn(alice, {nfts.back()}));
520 nfts.pop_back();
521 env.close();
522
523 if (features[fixNFTokenPageLinks])
524 {
525 // Removing the last token from the last page deletes the
526 // _previous_ page because we need to preserve that last
527 // page an an anchor. The contents of the next-to-last page
528 // are moved into the last page.
529 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
530 BEAST_EXPECT(lastNFTokenPage);
531 BEAST_EXPECT(
532 lastNFTokenPage->at(~sfPreviousPageMin) ==
533 firstNFTokenPageIndex);
534 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
535 BEAST_EXPECT(
536 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
537
538 // The "middle" page should be gone.
539 middleNFTokenPage = env.le(keylet::nftpage(
540 keylet::nftpage_min(alice), middleNFTokenPageIndex));
541 BEAST_EXPECT(!middleNFTokenPage);
542
543 // The "first" page should still be present and linked to
544 // the last page.
545 firstNFTokenPage = env.le(keylet::nftpage(
546 keylet::nftpage_min(alice), firstNFTokenPageIndex));
547 BEAST_EXPECT(firstNFTokenPage);
548 BEAST_EXPECT(
549 !firstNFTokenPage->isFieldPresent(sfPreviousPageMin));
550 BEAST_EXPECT(
551 firstNFTokenPage->at(~sfNextPageMin) ==
552 lastNFTokenPage->key());
553 BEAST_EXPECT(
554 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
555 }
556 else
557 {
558 // Removing the last token from the last page deletes the last
559 // page. This is a bug. The contents of the next-to-last page
560 // should have been moved into the last page.
561 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
562 BEAST_EXPECT(!lastNFTokenPage);
563
564 // The "middle" page is still present, but has lost the
565 // NextPageMin field.
566 middleNFTokenPage = env.le(keylet::nftpage(
567 keylet::nftpage_min(alice), middleNFTokenPageIndex));
568 if (!BEAST_EXPECT(middleNFTokenPage))
569 return;
570 BEAST_EXPECT(
571 middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
572 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
573 }
574
575 // Delete the rest of the NFTokens.
576 while (!nfts.empty())
577 {
578 env(token::burn(alice, {nfts.back()}));
579 nfts.pop_back();
580 env.close();
581 }
582 BEAST_EXPECT(nftCount(env, alice) == 0);
583 BEAST_EXPECT(ownerCount(env, alice) == 0);
584 }
585 checkNoTokenPages();
586 {
587 // Generate three packed pages. Then burn all tokens in the middle
588 // page. This exercises the case where a page is removed between
589 // two fully populated pages.
590 std::vector<uint256> nfts = genPackedTokens();
591 BEAST_EXPECT(nftCount(env, alice) == 96);
592 BEAST_EXPECT(ownerCount(env, alice) == 3);
593
594 // Verify that that all three pages are present and remember the
595 // indexes.
596 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
597 if (!BEAST_EXPECT(lastNFTokenPage))
598 return;
599
600 uint256 const middleNFTokenPageIndex =
601 lastNFTokenPage->at(sfPreviousPageMin);
602 auto middleNFTokenPage = env.le(keylet::nftpage(
603 keylet::nftpage_min(alice), middleNFTokenPageIndex));
604 if (!BEAST_EXPECT(middleNFTokenPage))
605 return;
606
607 uint256 const firstNFTokenPageIndex =
608 middleNFTokenPage->at(sfPreviousPageMin);
609 auto firstNFTokenPage = env.le(keylet::nftpage(
610 keylet::nftpage_min(alice), firstNFTokenPageIndex));
611 if (!BEAST_EXPECT(firstNFTokenPage))
612 return;
613
614 for (std::size_t i = 32; i < 64; ++i)
615 {
616 env(token::burn(alice, nfts[i]));
617 env.close();
618 }
619 nfts.erase(nfts.begin() + 32, nfts.begin() + 64);
620 BEAST_EXPECT(nftCount(env, alice) == 64);
621 BEAST_EXPECT(ownerCount(env, alice) == 2);
622
623 // Verify that middle page is gone and the links in the two
624 // remaining pages are correct.
625 middleNFTokenPage = env.le(keylet::nftpage(
626 keylet::nftpage_min(alice), middleNFTokenPageIndex));
627 BEAST_EXPECT(!middleNFTokenPage);
628
629 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
630 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
631 BEAST_EXPECT(
632 lastNFTokenPage->getFieldH256(sfPreviousPageMin) ==
633 firstNFTokenPageIndex);
634
635 firstNFTokenPage = env.le(keylet::nftpage(
636 keylet::nftpage_min(alice), firstNFTokenPageIndex));
637 BEAST_EXPECT(
638 firstNFTokenPage->getFieldH256(sfNextPageMin) ==
639 keylet::nftpage_max(alice).key);
640 BEAST_EXPECT(!firstNFTokenPage->isFieldPresent(sfPreviousPageMin));
641
642 // Burn the remaining nfts.
643 for (uint256 const& nft : nfts)
644 {
645 env(token::burn(alice, {nft}));
646 env.close();
647 }
648 BEAST_EXPECT(nftCount(env, alice) == 0);
649 BEAST_EXPECT(ownerCount(env, alice) == 0);
650 }
651 checkNoTokenPages();
652 {
653 // Generate three packed pages. Then burn all the tokens in the
654 // first page followed by all the tokens in the last page. This
655 // exercises a specific case where coalescing pages is not possible.
656 std::vector<uint256> nfts = genPackedTokens();
657 BEAST_EXPECT(nftCount(env, alice) == 96);
658 BEAST_EXPECT(ownerCount(env, alice) == 3);
659
660 // Verify that that all three pages are present and remember the
661 // indexes.
662 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
663 if (!BEAST_EXPECT(lastNFTokenPage))
664 return;
665
666 uint256 const middleNFTokenPageIndex =
667 lastNFTokenPage->at(sfPreviousPageMin);
668 auto middleNFTokenPage = env.le(keylet::nftpage(
669 keylet::nftpage_min(alice), middleNFTokenPageIndex));
670 if (!BEAST_EXPECT(middleNFTokenPage))
671 return;
672
673 uint256 const firstNFTokenPageIndex =
674 middleNFTokenPage->at(sfPreviousPageMin);
675 auto firstNFTokenPage = env.le(keylet::nftpage(
676 keylet::nftpage_min(alice), firstNFTokenPageIndex));
677 if (!BEAST_EXPECT(firstNFTokenPage))
678 return;
679
680 // Burn all the tokens in the first page.
681 std::reverse(nfts.begin(), nfts.end());
682 for (int i = 0; i < 32; ++i)
683 {
684 env(token::burn(alice, {nfts.back()}));
685 nfts.pop_back();
686 env.close();
687 }
688
689 // Verify the first page is gone.
690 firstNFTokenPage = env.le(keylet::nftpage(
691 keylet::nftpage_min(alice), firstNFTokenPageIndex));
692 BEAST_EXPECT(!firstNFTokenPage);
693
694 // Check the links in the other two pages.
695 middleNFTokenPage = env.le(keylet::nftpage(
696 keylet::nftpage_min(alice), middleNFTokenPageIndex));
697 if (!BEAST_EXPECT(middleNFTokenPage))
698 return;
699 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
700 BEAST_EXPECT(middleNFTokenPage->isFieldPresent(sfNextPageMin));
701
702 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
703 if (!BEAST_EXPECT(lastNFTokenPage))
704 return;
705 BEAST_EXPECT(lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
706 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
707
708 // Burn all the tokens in the last page.
709 std::reverse(nfts.begin(), nfts.end());
710 for (int i = 0; i < 32; ++i)
711 {
712 env(token::burn(alice, {nfts.back()}));
713 nfts.pop_back();
714 env.close();
715 }
716
717 if (features[fixNFTokenPageLinks])
718 {
719 // Removing the last token from the last page deletes the
720 // _previous_ page because we need to preserve that last
721 // page an an anchor. The contents of the next-to-last page
722 // are moved into the last page.
723 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
724 BEAST_EXPECT(lastNFTokenPage);
725 BEAST_EXPECT(
726 !lastNFTokenPage->isFieldPresent(sfPreviousPageMin));
727 BEAST_EXPECT(!lastNFTokenPage->isFieldPresent(sfNextPageMin));
728 BEAST_EXPECT(
729 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 32);
730
731 // The "middle" page should be gone.
732 middleNFTokenPage = env.le(keylet::nftpage(
733 keylet::nftpage_min(alice), middleNFTokenPageIndex));
734 BEAST_EXPECT(!middleNFTokenPage);
735
736 // The "first" page should still be gone.
737 firstNFTokenPage = env.le(keylet::nftpage(
738 keylet::nftpage_min(alice), firstNFTokenPageIndex));
739 BEAST_EXPECT(!firstNFTokenPage);
740 }
741 else
742 {
743 // Removing the last token from the last page deletes the last
744 // page. This is a bug. The contents of the next-to-last page
745 // should have been moved into the last page.
746 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
747 BEAST_EXPECT(!lastNFTokenPage);
748
749 // The "middle" page is still present, but has lost the
750 // NextPageMin field.
751 middleNFTokenPage = env.le(keylet::nftpage(
752 keylet::nftpage_min(alice), middleNFTokenPageIndex));
753 if (!BEAST_EXPECT(middleNFTokenPage))
754 return;
755 BEAST_EXPECT(
756 !middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
757 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
758 }
759
760 // Delete the rest of the NFTokens.
761 while (!nfts.empty())
762 {
763 env(token::burn(alice, {nfts.back()}));
764 nfts.pop_back();
765 env.close();
766 }
767 BEAST_EXPECT(nftCount(env, alice) == 0);
768 BEAST_EXPECT(ownerCount(env, alice) == 0);
769 }
770 checkNoTokenPages();
771
772 if (features[fixNFTokenPageLinks])
773 {
774 // Exercise the invariant that the final NFTokenPage of a directory
775 // may not be removed if there are NFTokens in other pages of the
776 // directory.
777 //
778 // We're going to fire an Invariant failure that is difficult to
779 // cause. We do it here because the tools are here.
780 //
781 // See Invariants_test.cpp for examples of other invariant tests
782 // that this one is modeled after.
783
784 // Generate three closely packed NFTokenPages.
785 std::vector<uint256> nfts = genPackedTokens();
786 BEAST_EXPECT(nftCount(env, alice) == 96);
787 BEAST_EXPECT(ownerCount(env, alice) == 3);
788
789 // Burn almost all the tokens in the very last page.
790 for (int i = 0; i < 31; ++i)
791 {
792 env(token::burn(alice, {nfts.back()}));
793 nfts.pop_back();
794 env.close();
795 }
796 {
797 // Create an ApplyContext we can use to run the invariant
798 // checks. These variables must outlive the ApplyContext.
799 OpenView ov{*env.current()};
800 STTx tx{ttACCOUNT_SET, [](STObject&) {}};
802 beast::Journal jlog{sink};
803 ApplyContext ac{
804 env.app(),
805 ov,
806 tx,
808 env.current()->fees().base,
809 tapNONE,
810 jlog};
811
812 // Verify that the last page is present and contains one NFT.
813 auto lastNFTokenPage =
814 ac.view().peek(keylet::nftpage_max(alice));
815 if (!BEAST_EXPECT(lastNFTokenPage))
816 return;
817 BEAST_EXPECT(
818 lastNFTokenPage->getFieldArray(sfNFTokens).size() == 1);
819
820 // Erase that last page.
821 ac.view().erase(lastNFTokenPage);
822
823 // Exercise the invariant.
824 TER terActual = tesSUCCESS;
825 for (TER const& terExpect :
827 {
828 terActual = ac.checkInvariants(terActual, XRPAmount{});
829 BEAST_EXPECT(terExpect == terActual);
830 BEAST_EXPECT(
831 sink.messages().str().starts_with("Invariant failed:"));
832 // uncomment to log the invariant failure message
833 // log << " --> " << sink.messages().str() << std::endl;
834 BEAST_EXPECT(
835 sink.messages().str().find(
836 "Last NFT page deleted with non-empty directory") !=
837 std::string::npos);
838 }
839 }
840 {
841 // Create an ApplyContext we can use to run the invariant
842 // checks. These variables must outlive the ApplyContext.
843 OpenView ov{*env.current()};
844 STTx tx{ttACCOUNT_SET, [](STObject&) {}};
846 beast::Journal jlog{sink};
847 ApplyContext ac{
848 env.app(),
849 ov,
850 tx,
852 env.current()->fees().base,
853 tapNONE,
854 jlog};
855
856 // Verify that the middle page is present.
857 auto lastNFTokenPage =
858 ac.view().peek(keylet::nftpage_max(alice));
859 auto middleNFTokenPage = ac.view().peek(keylet::nftpage(
860 keylet::nftpage_min(alice),
861 lastNFTokenPage->getFieldH256(sfPreviousPageMin)));
862 BEAST_EXPECT(middleNFTokenPage);
863
864 // Remove the NextMinPage link from the middle page to fire
865 // the invariant.
866 middleNFTokenPage->makeFieldAbsent(sfNextPageMin);
867 ac.view().update(middleNFTokenPage);
868
869 // Exercise the invariant.
870 TER terActual = tesSUCCESS;
871 for (TER const& terExpect :
873 {
874 terActual = ac.checkInvariants(terActual, XRPAmount{});
875 BEAST_EXPECT(terExpect == terActual);
876 BEAST_EXPECT(
877 sink.messages().str().starts_with("Invariant failed:"));
878 // uncomment to log the invariant failure message
879 // log << " --> " << sink.messages().str() << std::endl;
880 BEAST_EXPECT(
881 sink.messages().str().find("Lost NextMinPage link") !=
882 std::string::npos);
883 }
884 }
885 }
886 }
887
888 void
890 {
891 // Look at the case where too many offers prevents burning a token.
892 testcase("Burn too many offers");
893
894 using namespace test::jtx;
895
896 // Test what happens if a NFT is unburnable when there are
897 // more than 500 offers, before fixNonFungibleTokensV1_2 goes live
898 if (!features[fixNonFungibleTokensV1_2])
899 {
900 Env env{*this, features};
901
902 Account const alice("alice");
903 Account const becky("becky");
904 env.fund(XRP(1000), alice, becky);
905 env.close();
906
907 // We structure the test to try and maximize the metadata produced.
908 // This verifies that we don't create too much metadata during a
909 // maximal burn operation.
910 //
911 // 1. alice mints an nft with a full-sized URI.
912 // 2. We create 500 new accounts, each of which creates an offer
913 // for alice's nft.
914 // 3. becky creates one more offer for alice's NFT
915 // 4. Attempt to burn the nft which fails because there are too
916 // many offers.
917 // 5. Cancel becky's offer and the nft should become burnable.
918 uint256 const nftokenID =
919 token::getNextID(env, alice, 0, tfTransferable);
920 env(token::mint(alice, 0),
921 token::uri(std::string(maxTokenURILength, 'u')),
922 txflags(tfTransferable));
923 env.close();
924
925 std::vector<uint256> offerIndexes;
926 offerIndexes.reserve(maxTokenOfferCancelCount);
927 for (std::uint32_t i = 0; i < maxTokenOfferCancelCount; ++i)
928 {
929 Account const acct(std::string("acct") + std::to_string(i));
930 env.fund(XRP(1000), acct);
931 env.close();
932
933 offerIndexes.push_back(
934 keylet::nftoffer(acct, env.seq(acct)).key);
935 env(token::createOffer(acct, nftokenID, drops(1)),
936 token::owner(alice));
937 env.close();
938 }
939
940 // Verify all offers are present in the ledger.
941 for (uint256 const& offerIndex : offerIndexes)
942 {
943 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
944 }
945
946 // Create one too many offers.
947 uint256 const beckyOfferIndex =
948 keylet::nftoffer(becky, env.seq(becky)).key;
949 env(token::createOffer(becky, nftokenID, drops(1)),
950 token::owner(alice));
951
952 // Attempt to burn the nft which should fail.
953 env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
954
955 // Close enough ledgers that the burn transaction is no longer
956 // retried.
957 for (int i = 0; i < 10; ++i)
958 env.close();
959
960 // Cancel becky's offer, but alice adds a sell offer. The token
961 // should still not be burnable.
962 env(token::cancelOffer(becky, {beckyOfferIndex}));
963 env.close();
964
965 uint256 const aliceOfferIndex =
966 keylet::nftoffer(alice, env.seq(alice)).key;
967 env(token::createOffer(alice, nftokenID, drops(1)),
968 txflags(tfSellNFToken));
969 env.close();
970
971 env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
972 env.close();
973
974 // Cancel alice's sell offer. Now the token should be burnable.
975 env(token::cancelOffer(alice, {aliceOfferIndex}));
976 env.close();
977
978 env(token::burn(alice, nftokenID));
979 env.close();
980
981 // Burning the token should remove all the offers from the ledger.
982 for (uint256 const& offerIndex : offerIndexes)
983 {
984 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
985 }
986
987 // Both alice and becky should have ownerCounts of zero.
988 BEAST_EXPECT(ownerCount(env, alice) == 0);
989 BEAST_EXPECT(ownerCount(env, becky) == 0);
990 }
991
992 // Test that up to 499 buy/sell offers will be removed when NFT is
993 // burned after fixNonFungibleTokensV1_2 is enabled. This is to test
994 // that we can successfully remove all offers if the number of offers is
995 // less than 500.
996 if (features[fixNonFungibleTokensV1_2])
997 {
998 Env env{*this, features};
999
1000 Account const alice("alice");
1001 Account const becky("becky");
1002 env.fund(XRP(100000), alice, becky);
1003 env.close();
1004
1005 // alice creates 498 sell offers and becky creates 1 buy offers.
1006 // When the token is burned, 498 sell offers and 1 buy offer are
1007 // removed. In total, 499 offers are removed
1008 std::vector<uint256> offerIndexes;
1009 auto const nftokenID = createNftAndOffers(
1010 env, alice, offerIndexes, maxDeletableTokenOfferEntries - 2);
1011
1012 // Verify all sell offers are present in the ledger.
1013 for (uint256 const& offerIndex : offerIndexes)
1014 {
1015 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1016 }
1017
1018 // Becky creates a buy offer
1019 uint256 const beckyOfferIndex =
1020 keylet::nftoffer(becky, env.seq(becky)).key;
1021 env(token::createOffer(becky, nftokenID, drops(1)),
1022 token::owner(alice));
1023 env.close();
1024
1025 // Burn the token
1026 env(token::burn(alice, nftokenID));
1027 env.close();
1028
1029 // Burning the token should remove all 498 sell offers
1030 // that alice created
1031 for (uint256 const& offerIndex : offerIndexes)
1032 {
1033 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
1034 }
1035
1036 // Burning the token should also remove the one buy offer
1037 // that becky created
1038 BEAST_EXPECT(!env.le(keylet::nftoffer(beckyOfferIndex)));
1039
1040 // alice and becky should have ownerCounts of zero
1041 BEAST_EXPECT(ownerCount(env, alice) == 0);
1042 BEAST_EXPECT(ownerCount(env, becky) == 0);
1043 }
1044
1045 // Test that up to 500 buy offers are removed when NFT is burned
1046 // after fixNonFungibleTokensV1_2 is enabled
1047 if (features[fixNonFungibleTokensV1_2])
1048 {
1049 Env env{*this, features};
1050
1051 Account const alice("alice");
1052 Account const becky("becky");
1053 env.fund(XRP(100000), alice, becky);
1054 env.close();
1055
1056 // alice creates 501 sell offers for the token
1057 // After we burn the token, 500 of the sell offers should be
1058 // removed, and one is left over
1059 std::vector<uint256> offerIndexes;
1060 auto const nftokenID = createNftAndOffers(
1061 env, alice, offerIndexes, maxDeletableTokenOfferEntries + 1);
1062
1063 // Verify all sell offers are present in the ledger.
1064 for (uint256 const& offerIndex : offerIndexes)
1065 {
1066 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1067 }
1068
1069 // Burn the token
1070 env(token::burn(alice, nftokenID));
1071 env.close();
1072
1073 uint32_t offerDeletedCount = 0;
1074 // Count the number of sell offers that have been deleted
1075 for (uint256 const& offerIndex : offerIndexes)
1076 {
1077 if (!env.le(keylet::nftoffer(offerIndex)))
1078 offerDeletedCount++;
1079 }
1080
1081 BEAST_EXPECT(offerIndexes.size() == maxTokenOfferCancelCount + 1);
1082
1083 // 500 sell offers should be removed
1084 BEAST_EXPECT(offerDeletedCount == maxTokenOfferCancelCount);
1085
1086 // alice should have ownerCounts of one for the orphaned sell offer
1087 BEAST_EXPECT(ownerCount(env, alice) == 1);
1088 }
1089
1090 // Test that up to 500 buy/sell offers are removed when NFT is burned
1091 // after fixNonFungibleTokensV1_2 is enabled
1092 if (features[fixNonFungibleTokensV1_2])
1093 {
1094 Env env{*this, features};
1095
1096 Account const alice("alice");
1097 Account const becky("becky");
1098 env.fund(XRP(100000), alice, becky);
1099 env.close();
1100
1101 // alice creates 499 sell offers and becky creates 2 buy offers.
1102 // When the token is burned, 499 sell offers and 1 buy offer
1103 // are removed.
1104 // In total, 500 offers are removed
1105 std::vector<uint256> offerIndexes;
1106 auto const nftokenID = createNftAndOffers(
1107 env, alice, offerIndexes, maxDeletableTokenOfferEntries - 1);
1108
1109 // Verify all sell offers are present in the ledger.
1110 for (uint256 const& offerIndex : offerIndexes)
1111 {
1112 BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
1113 }
1114
1115 // becky creates 2 buy offers
1116 env(token::createOffer(becky, nftokenID, drops(1)),
1117 token::owner(alice));
1118 env.close();
1119 env(token::createOffer(becky, nftokenID, drops(1)),
1120 token::owner(alice));
1121 env.close();
1122
1123 // Burn the token
1124 env(token::burn(alice, nftokenID));
1125 env.close();
1126
1127 // Burning the token should remove all 499 sell offers from the
1128 // ledger.
1129 for (uint256 const& offerIndex : offerIndexes)
1130 {
1131 BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
1132 }
1133
1134 // alice should have ownerCount of zero because all her
1135 // sell offers have been deleted
1136 BEAST_EXPECT(ownerCount(env, alice) == 0);
1137
1138 // becky has ownerCount of one due to an orphaned buy offer
1139 BEAST_EXPECT(ownerCount(env, becky) == 1);
1140 }
1141 }
1142
1143 void
1145 {
1146 // Amendment fixNFTokenPageLinks prevents the breakage we want
1147 // to observe.
1148 if (features[fixNFTokenPageLinks])
1149 return;
1150
1151 // a couple of directory merging scenarios that can only be tested by
1152 // inserting and deleting in an ordered fashion. We do that testing
1153 // now.
1154 testcase("Exercise broken links");
1155
1156 using namespace test::jtx;
1157
1158 Account const alice{"alice"};
1159 Account const minter{"minter"};
1160
1161 Env env{*this, features};
1162 env.fund(XRP(1000), alice, minter);
1163
1164 // A lambda that generates 96 nfts packed into three pages of 32 each.
1165 // Returns a sorted vector of the NFTokenIDs packed into the pages.
1166 auto genPackedTokens = [this, &env, &alice, &minter]() {
1168 nfts.reserve(96);
1169
1170 // We want to create fully packed NFT pages. This is a little
1171 // tricky since the system currently in place is inclined to
1172 // assign consecutive tokens to only 16 entries per page.
1173 //
1174 // By manipulating the internal form of the taxon we can force
1175 // creation of NFT pages that are completely full. This lambda
1176 // tells us the taxon value we should pass in in order for the
1177 // internal representation to match the passed in value.
1178 auto internalTaxon = [&env](
1179 Account const& acct,
1180 std::uint32_t taxon) -> std::uint32_t {
1181 std::uint32_t tokenSeq =
1182 env.le(acct)->at(~sfMintedNFTokens).value_or(0);
1183
1184 // If fixNFTokenRemint amendment is on, we must
1185 // add FirstNFTokenSequence.
1186 if (env.current()->rules().enabled(fixNFTokenRemint))
1187 tokenSeq += env.le(acct)
1188 ->at(~sfFirstNFTokenSequence)
1189 .value_or(env.seq(acct));
1190
1191 return toUInt32(
1192 nft::cipheredTaxon(tokenSeq, nft::toTaxon(taxon)));
1193 };
1194
1195 for (std::uint32_t i = 0; i < 96; ++i)
1196 {
1197 // In order to fill the pages we use the taxon to break them
1198 // into groups of 16 entries. By having the internal
1199 // representation of the taxon go...
1200 // 0, 3, 2, 5, 4, 7...
1201 // in sets of 16 NFTs we can get each page to be fully
1202 // populated.
1203 std::uint32_t const intTaxon = (i / 16) + (i & 0b10000 ? 2 : 0);
1204 uint32_t const extTaxon = internalTaxon(minter, intTaxon);
1205 nfts.push_back(
1206 token::getNextID(env, minter, extTaxon, tfTransferable));
1207 env(token::mint(minter, extTaxon), txflags(tfTransferable));
1208 env.close();
1209
1210 // Minter creates an offer for the NFToken.
1211 uint256 const minterOfferIndex =
1212 keylet::nftoffer(minter, env.seq(minter)).key;
1213 env(token::createOffer(minter, nfts.back(), XRP(0)),
1214 txflags(tfSellNFToken));
1215 env.close();
1216
1217 // alice accepts the offer.
1218 env(token::acceptSellOffer(alice, minterOfferIndex));
1219 env.close();
1220 }
1221
1222 // Sort the NFTs so they are listed in storage order, not
1223 // creation order.
1224 std::sort(nfts.begin(), nfts.end());
1225
1226 // Verify that the ledger does indeed contain exactly three pages
1227 // of NFTs with 32 entries in each page.
1228 Json::Value jvParams;
1229 jvParams[jss::ledger_index] = "current";
1230 jvParams[jss::binary] = false;
1231 {
1232 Json::Value jrr =
1233 env.rpc("json", "ledger_data", to_string(jvParams));
1234
1235 Json::Value& state = jrr[jss::result][jss::state];
1236
1237 int pageCount = 0;
1238 for (Json::UInt i = 0; i < state.size(); ++i)
1239 {
1240 if (state[i].isMember(sfNFTokens.jsonName) &&
1241 state[i][sfNFTokens.jsonName].isArray())
1242 {
1243 BEAST_EXPECT(
1244 state[i][sfNFTokens.jsonName].size() == 32);
1245 ++pageCount;
1246 }
1247 }
1248 // If this check fails then the internal NFT directory logic
1249 // has changed.
1250 BEAST_EXPECT(pageCount == 3);
1251 }
1252 return nfts;
1253 };
1254
1255 // Generate three packed pages.
1256 std::vector<uint256> nfts = genPackedTokens();
1257 BEAST_EXPECT(nftCount(env, alice) == 96);
1258 BEAST_EXPECT(ownerCount(env, alice) == 3);
1259
1260 // Verify that that all three pages are present and remember the
1261 // indexes.
1262 auto lastNFTokenPage = env.le(keylet::nftpage_max(alice));
1263 if (!BEAST_EXPECT(lastNFTokenPage))
1264 return;
1265
1266 uint256 const middleNFTokenPageIndex =
1267 lastNFTokenPage->at(sfPreviousPageMin);
1268 auto middleNFTokenPage = env.le(keylet::nftpage(
1269 keylet::nftpage_min(alice), middleNFTokenPageIndex));
1270 if (!BEAST_EXPECT(middleNFTokenPage))
1271 return;
1272
1273 uint256 const firstNFTokenPageIndex =
1274 middleNFTokenPage->at(sfPreviousPageMin);
1275 auto firstNFTokenPage = env.le(
1276 keylet::nftpage(keylet::nftpage_min(alice), firstNFTokenPageIndex));
1277 if (!BEAST_EXPECT(firstNFTokenPage))
1278 return;
1279
1280 // Sell all the tokens in the very last page back to minter.
1281 std::vector<uint256> last32NFTs;
1282 for (int i = 0; i < 32; ++i)
1283 {
1284 last32NFTs.push_back(nfts.back());
1285 nfts.pop_back();
1286
1287 // alice creates an offer for the NFToken.
1288 uint256 const aliceOfferIndex =
1289 keylet::nftoffer(alice, env.seq(alice)).key;
1290 env(token::createOffer(alice, last32NFTs.back(), XRP(0)),
1291 txflags(tfSellNFToken));
1292 env.close();
1293
1294 // minter accepts the offer.
1295 env(token::acceptSellOffer(minter, aliceOfferIndex));
1296 env.close();
1297 }
1298
1299 // Removing the last token from the last page deletes alice's last
1300 // page. This is a bug. The contents of the next-to-last page
1301 // should have been moved into the last page.
1302 lastNFTokenPage = env.le(keylet::nftpage_max(alice));
1303 BEAST_EXPECT(!lastNFTokenPage);
1304 BEAST_EXPECT(ownerCount(env, alice) == 2);
1305
1306 // The "middle" page is still present, but has lost the
1307 // NextPageMin field.
1308 middleNFTokenPage = env.le(keylet::nftpage(
1309 keylet::nftpage_min(alice), middleNFTokenPageIndex));
1310 if (!BEAST_EXPECT(middleNFTokenPage))
1311 return;
1312 BEAST_EXPECT(middleNFTokenPage->isFieldPresent(sfPreviousPageMin));
1313 BEAST_EXPECT(!middleNFTokenPage->isFieldPresent(sfNextPageMin));
1314
1315 // Attempt to delete alice's account, but fail because she owns NFTs.
1316 auto const acctDelFee{drops(env.current()->fees().increment)};
1317 env(acctdelete(alice, minter),
1318 fee(acctDelFee),
1319 ter(tecHAS_OBLIGATIONS));
1320 env.close();
1321
1322 // minter sells the last 32 NFTs back to alice.
1323 for (uint256 nftID : last32NFTs)
1324 {
1325 // minter creates an offer for the NFToken.
1326 uint256 const minterOfferIndex =
1327 keylet::nftoffer(minter, env.seq(minter)).key;
1328 env(token::createOffer(minter, nftID, XRP(0)),
1329 txflags(tfSellNFToken));
1330 env.close();
1331
1332 // alice accepts the offer.
1333 env(token::acceptSellOffer(alice, minterOfferIndex));
1334 env.close();
1335 }
1336 BEAST_EXPECT(ownerCount(env, alice) == 3); // Three NFTokenPages.
1337
1338 // alice has an NFToken directory with a broken link in the middle.
1339 {
1340 // Try the account_objects RPC command. Alice's account only shows
1341 // two NFT pages even though she owns more.
1342 Json::Value acctObjs = [&env, &alice]() {
1343 Json::Value params;
1344 params[jss::account] = alice.human();
1345 return env.rpc("json", "account_objects", to_string(params));
1346 }();
1347 BEAST_EXPECT(!acctObjs.isMember(jss::marker));
1348 BEAST_EXPECT(
1349 acctObjs[jss::result][jss::account_objects].size() == 2);
1350 }
1351 {
1352 // Try the account_nfts RPC command. It only returns 64 NFTs
1353 // although alice owns 96.
1354 Json::Value aliceNFTs = [&env, &alice]() {
1355 Json::Value params;
1356 params[jss::account] = alice.human();
1357 params[jss::type] = "state";
1358 return env.rpc("json", "account_nfts", to_string(params));
1359 }();
1360 BEAST_EXPECT(!aliceNFTs.isMember(jss::marker));
1361 BEAST_EXPECT(
1362 aliceNFTs[jss::result][jss::account_nfts].size() == 64);
1363 }
1364 }
1365
1366 void
1368 {
1369 testBurnRandom(features);
1370 testBurnSequential(features);
1371 testBurnTooManyOffers(features);
1372 exerciseBrokenLinks(features);
1373 }
1374
1375protected:
1376 void
1377 run(std::uint32_t instance, bool last = false)
1378 {
1379 using namespace test::jtx;
1380 static FeatureBitset const all{testable_amendments()};
1381 static FeatureBitset const fixNFTV1_2{fixNonFungibleTokensV1_2};
1382 static FeatureBitset const fixNFTDir{fixNFTokenDirV1};
1383 static FeatureBitset const fixNFTRemint{fixNFTokenRemint};
1384 static FeatureBitset const fixNFTPageLinks{fixNFTokenPageLinks};
1385
1386 static std::array<FeatureBitset, 5> const feats{
1387 all - fixNFTV1_2 - fixNFTDir - fixNFTRemint - fixNFTPageLinks,
1388 all - fixNFTV1_2 - fixNFTRemint - fixNFTPageLinks,
1389 all - fixNFTRemint - fixNFTPageLinks,
1390 all - fixNFTPageLinks,
1391 all,
1392 };
1393
1394 if (BEAST_EXPECT(instance < feats.size()))
1395 {
1396 testWithFeats(feats[instance]);
1397 }
1398 BEAST_EXPECT(!last || instance == feats.size() - 1);
1399 }
1400
1401public:
1402 void
1403 run() override
1404 {
1405 run(0);
1406 }
1407};
1408
1410{
1411public:
1412 void
1413 run() override
1414 {
1416 }
1417};
1418
1420{
1421public:
1422 void
1423 run() override
1424 {
1426 }
1427};
1428
1430{
1431public:
1432 void
1433 run() override
1434 {
1436 }
1437};
1438
1440{
1441public:
1442 void
1443 run() override
1444 {
1446 }
1447};
1448
1449BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnBaseUtil, app, ripple, 3);
1450BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOfixFungTokens, app, ripple, 3);
1451BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOFixTokenRemint, app, ripple, 3);
1452BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnWOFixNFTPageLinks, app, ripple, 3);
1453BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurnAllFeatures, app, ripple, 3);
1454
1455} // namespace ripple
T back(T... args)
T begin(T... args)
Represents a JSON value.
Definition json_value.h:149
bool isArray() const
UInt size() const
Number of values in array or object.
std::string toStyledString() const
std::string asString() const
Returns the unquoted string value.
bool isMember(char const *key) const
Return true if the object has a member named key.
A generic endpoint for log messages.
Definition Journal.h:60
A testsuite class.
Definition suite.h:55
testcase_t testcase
Memberspace for declaring test cases.
Definition suite.h:155
State information when applying a tx.
Application & app
void run() override
Runs the suite.
static std::uint32_t nftCount(test::jtx::Env &env, test::jtx::Account const &acct)
uint256 createNftAndOffers(test::jtx::Env &env, test::jtx::Account const &owner, std::vector< uint256 > &offerIndexes, size_t const tokenCancelCount)
void exerciseBrokenLinks(FeatureBitset features)
void run(std::uint32_t instance, bool last=false)
void run() override
Runs the suite.
void testWithFeats(FeatureBitset features)
void testBurnTooManyOffers(FeatureBitset features)
void printNFTPages(test::jtx::Env &env, Volume vol)
void testBurnRandom(FeatureBitset features)
void testBurnSequential(FeatureBitset features)
Writable ledger view that accumulates state and tx changes.
Definition OpenView.h:65
Immutable cryptographic account descriptor.
Definition Account.h:39
std::string const & human() const
Returns the human readable public key.
Definition Account.h:118
A transaction testing environment.
Definition Env.h:121
std::uint32_t seq(Account const &account) const
Returns the next sequence number on account.
Definition Env.cpp:269
bool close(NetClock::time_point closeTime, std::optional< std::chrono::milliseconds > consensusDelay=std::nullopt)
Close and advance the ledger.
Definition Env.cpp:122
Json::Value rpc(unsigned apiVersion, std::unordered_map< std::string, std::string > const &headers, std::string const &cmd, Args &&... args)
Execute an RPC command.
Definition Env.h:791
T empty(T... args)
T end(T... args)
T endl(T... args)
T erase(T... args)
unsigned int UInt
Keylet nftpage(Keylet const &k, uint256 const &token)
Definition Indexes.cpp:419
Keylet nftpage_min(AccountID const &owner)
NFT page keylets.
Definition Indexes.cpp:403
Keylet nftpage_max(AccountID const &owner)
A keylet for the owner's last possible NFT page.
Definition Indexes.cpp:411
Keylet nftoffer(AccountID const &owner, std::uint32_t seq)
An offer from an account to buy or sell an NFT.
Definition Indexes.cpp:427
Taxon cipheredTaxon(std::uint32_t tokenSeq, Taxon taxon)
Definition nft.h:84
Taxon toTaxon(std::uint32_t i)
Definition nft.h:42
Use hash_* containers for keys that do not need a cryptographically secure hashing algorithm.
Definition algorithm.h:25
constexpr std::uint32_t const tfSellNFToken
Definition TxFlags.h:230
std::size_t constexpr maxTokenOfferCancelCount
The maximum number of token offers that can be canceled at once.
Definition Protocol.h:71
std::uint16_t constexpr maxTransferFee
The maximum token transfer fee allowed.
Definition Protocol.h:85
std::size_t constexpr maxDeletableTokenOfferEntries
The maximum number of offers in an offer directory for NFT to be burnable.
Definition Protocol.h:74
constexpr std::uint32_t const tfBurnable
Definition TxFlags.h:139
@ tefINVARIANT_FAILED
Definition TER.h:183
@ tefTOO_BIG
Definition TER.h:184
std::size_t constexpr maxTokenURILength
The maximum length of a URI inside an NFT.
Definition Protocol.h:88
@ tecHAS_OBLIGATIONS
Definition TER.h:317
@ tecINVARIANT_FAILED
Definition TER.h:313
@ tesSUCCESS
Definition TER.h:244
std::string to_string(base_uint< Bits, Tag > const &a)
Definition base_uint.h:630
@ tapNONE
Definition ApplyView.h:31
TERSubset< CanCvtToTER > TER
Definition TER.h:645
constexpr std::uint32_t const tfTransferable
Definition TxFlags.h:142
T pop_back(T... args)
T push_back(T... args)
T reserve(T... args)
T reverse(T... args)
T size(T... args)
T sort(T... args)
uint256 key
Definition Keylet.h:40
T to_string(T... args)