mirror of
https://github.com/XRPLF/rippled.git
synced 2025-12-06 17:27:55 +00:00
Introduce NFT support (XLS020)
This commit is contained in:
committed by
manojsdoshi
parent
525aaecbca
commit
70779f6850
605
src/test/app/NFTokenBurn_test.cpp
Normal file
605
src/test/app/NFTokenBurn_test.cpp
Normal file
@@ -0,0 +1,605 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2021 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/app/tx/impl/details/NFTokenUtils.h>
|
||||
#include <ripple/protocol/Feature.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <test/jtx.h>
|
||||
|
||||
#include <random>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
class NFTokenBurn_test : public beast::unit_test::suite
|
||||
{
|
||||
// Helper function that returns the owner count of an account root.
|
||||
static std::uint32_t
|
||||
ownerCount(test::jtx::Env const& env, test::jtx::Account const& acct)
|
||||
{
|
||||
std::uint32_t ret{0};
|
||||
if (auto const sleAcct = env.le(acct))
|
||||
ret = sleAcct->at(sfOwnerCount);
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Helper function that returns the number of nfts owned by an account.
|
||||
static std::uint32_t
|
||||
nftCount(test::jtx::Env& env, test::jtx::Account const& acct)
|
||||
{
|
||||
Json::Value params;
|
||||
params[jss::account] = acct.human();
|
||||
params[jss::type] = "state";
|
||||
Json::Value nfts = env.rpc("json", "account_nfts", to_string(params));
|
||||
return nfts[jss::result][jss::account_nfts].size();
|
||||
};
|
||||
|
||||
void
|
||||
testBurnRandom(FeatureBitset features)
|
||||
{
|
||||
// Exercise a number of conditions with NFT burning.
|
||||
testcase("Burn random");
|
||||
|
||||
using namespace test::jtx;
|
||||
|
||||
Env env{*this, features};
|
||||
|
||||
// Keep information associated with each account together.
|
||||
struct AcctStat
|
||||
{
|
||||
test::jtx::Account const acct;
|
||||
std::vector<uint256> nfts;
|
||||
|
||||
AcctStat(char const* name) : acct(name)
|
||||
{
|
||||
}
|
||||
|
||||
operator test::jtx::Account() const
|
||||
{
|
||||
return acct;
|
||||
}
|
||||
};
|
||||
AcctStat alice{"alice"};
|
||||
AcctStat becky{"becky"};
|
||||
AcctStat minter{"minter"};
|
||||
|
||||
env.fund(XRP(10000), alice, becky, minter);
|
||||
env.close();
|
||||
|
||||
// Both alice and minter mint nfts in case that makes any difference.
|
||||
env(token::setMinter(alice, minter));
|
||||
env.close();
|
||||
|
||||
// Create enough NFTs that alice, becky, and minter can all have
|
||||
// at least three pages of NFTs. This will cause more activity in
|
||||
// the page coalescing code. If we make 210 NFTs in total, we can
|
||||
// have alice and minter each make 105. That will allow us to
|
||||
// distribute 70 NFTs to our three participants.
|
||||
//
|
||||
// Give each NFT a pseudo-randomly chosen fee so the NFTs are
|
||||
// distributed pseudo-randomly through the pages. This should
|
||||
// prevent alice's and minter's NFTs from clustering together
|
||||
// in becky's directory.
|
||||
//
|
||||
// Use a default initialized mercenne_twister because we want the
|
||||
// effect of random numbers, but we want the test to run the same
|
||||
// way each time.
|
||||
std::mt19937 engine;
|
||||
std::uniform_int_distribution<std::size_t> feeDist(
|
||||
decltype(maxTransferFee){}, maxTransferFee);
|
||||
|
||||
alice.nfts.reserve(105);
|
||||
while (alice.nfts.size() < 105)
|
||||
{
|
||||
std::uint16_t const xferFee = feeDist(engine);
|
||||
alice.nfts.push_back(token::getNextID(
|
||||
env, alice, 0u, tfTransferable | tfBurnable, xferFee));
|
||||
env(token::mint(alice),
|
||||
txflags(tfTransferable | tfBurnable),
|
||||
token::xferFee(xferFee));
|
||||
env.close();
|
||||
}
|
||||
|
||||
minter.nfts.reserve(105);
|
||||
while (minter.nfts.size() < 105)
|
||||
{
|
||||
std::uint16_t const xferFee = feeDist(engine);
|
||||
minter.nfts.push_back(token::getNextID(
|
||||
env, alice, 0u, tfTransferable | tfBurnable, xferFee));
|
||||
env(token::mint(minter),
|
||||
txflags(tfTransferable | tfBurnable),
|
||||
token::xferFee(xferFee),
|
||||
token::issuer(alice));
|
||||
env.close();
|
||||
}
|
||||
|
||||
// All of the NFTs are now minted. Transfer 35 each over to becky so
|
||||
// we end up with 70 NFTs in each account.
|
||||
becky.nfts.reserve(70);
|
||||
{
|
||||
auto aliceIter = alice.nfts.begin();
|
||||
auto minterIter = minter.nfts.begin();
|
||||
while (becky.nfts.size() < 70)
|
||||
{
|
||||
// We do the same work on alice and minter, so make a lambda.
|
||||
auto xferNFT = [&env, &becky](AcctStat& acct, auto& iter) {
|
||||
uint256 offerIndex =
|
||||
keylet::nftoffer(acct.acct, env.seq(acct.acct)).key;
|
||||
env(token::createOffer(acct, *iter, XRP(0)),
|
||||
txflags(tfSellNFToken));
|
||||
env.close();
|
||||
env(token::acceptSellOffer(becky, offerIndex));
|
||||
env.close();
|
||||
becky.nfts.push_back(*iter);
|
||||
iter = acct.nfts.erase(iter);
|
||||
iter += 2;
|
||||
};
|
||||
xferNFT(alice, aliceIter);
|
||||
xferNFT(minter, minterIter);
|
||||
}
|
||||
BEAST_EXPECT(aliceIter == alice.nfts.end());
|
||||
BEAST_EXPECT(minterIter == minter.nfts.end());
|
||||
}
|
||||
|
||||
// Now all three participants have 70 NFTs.
|
||||
BEAST_EXPECT(nftCount(env, alice.acct) == 70);
|
||||
BEAST_EXPECT(nftCount(env, becky.acct) == 70);
|
||||
BEAST_EXPECT(nftCount(env, minter.acct) == 70);
|
||||
|
||||
// Next we'll create offers for all of those NFTs. This calls for
|
||||
// another lambda.
|
||||
auto addOffers =
|
||||
[&env](AcctStat& owner, AcctStat& other1, AcctStat& other2) {
|
||||
for (uint256 nft : owner.nfts)
|
||||
{
|
||||
// Create sell offers for owner.
|
||||
env(token::createOffer(owner, nft, drops(1)),
|
||||
txflags(tfSellNFToken),
|
||||
token::destination(other1));
|
||||
env(token::createOffer(owner, nft, drops(1)),
|
||||
txflags(tfSellNFToken),
|
||||
token::destination(other2));
|
||||
env.close();
|
||||
|
||||
// Create buy offers for other1 and other2.
|
||||
env(token::createOffer(other1, nft, drops(1)),
|
||||
token::owner(owner));
|
||||
env(token::createOffer(other2, nft, drops(1)),
|
||||
token::owner(owner));
|
||||
env.close();
|
||||
|
||||
env(token::createOffer(other2, nft, drops(2)),
|
||||
token::owner(owner));
|
||||
env(token::createOffer(other1, nft, drops(2)),
|
||||
token::owner(owner));
|
||||
env.close();
|
||||
}
|
||||
};
|
||||
addOffers(alice, becky, minter);
|
||||
addOffers(becky, minter, alice);
|
||||
addOffers(minter, alice, becky);
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 424);
|
||||
BEAST_EXPECT(ownerCount(env, becky) == 424);
|
||||
BEAST_EXPECT(ownerCount(env, minter) == 424);
|
||||
|
||||
// Now each of the 270 NFTs has six offers associated with it.
|
||||
// Randomly select an NFT out of the pile and burn it. Continue
|
||||
// the process until all NFTs are burned.
|
||||
AcctStat* const stats[3] = {&alice, &becky, &minter};
|
||||
std::uniform_int_distribution<std::size_t> acctDist(0, 2);
|
||||
std::uniform_int_distribution<std::size_t> mintDist(0, 1);
|
||||
|
||||
while (stats[0]->nfts.size() > 0 || stats[1]->nfts.size() > 0 ||
|
||||
stats[2]->nfts.size() > 0)
|
||||
{
|
||||
// Pick an account to burn an nft. If there are no nfts left
|
||||
// pick again.
|
||||
AcctStat& owner = *(stats[acctDist(engine)]);
|
||||
if (owner.nfts.empty())
|
||||
continue;
|
||||
|
||||
// Pick one of the nfts.
|
||||
std::uniform_int_distribution<std::size_t> nftDist(
|
||||
0lu, owner.nfts.size() - 1);
|
||||
auto nftIter = owner.nfts.begin() + nftDist(engine);
|
||||
uint256 const nft = *nftIter;
|
||||
owner.nfts.erase(nftIter);
|
||||
|
||||
// Decide which of the accounts should burn the nft. If the
|
||||
// owner is becky then any of the three accounts can burn.
|
||||
// Otherwise either alice or minter can burn.
|
||||
AcctStat& burner = owner.acct == becky.acct
|
||||
? *(stats[acctDist(engine)])
|
||||
: mintDist(engine) ? alice : minter;
|
||||
|
||||
if (owner.acct == burner.acct)
|
||||
env(token::burn(burner, nft));
|
||||
else
|
||||
env(token::burn(burner, nft), token::owner(owner));
|
||||
env.close();
|
||||
|
||||
// Every time we burn an nft, the number of nfts they hold should
|
||||
// match the number of nfts we think they hold.
|
||||
BEAST_EXPECT(nftCount(env, alice.acct) == alice.nfts.size());
|
||||
BEAST_EXPECT(nftCount(env, becky.acct) == becky.nfts.size());
|
||||
BEAST_EXPECT(nftCount(env, minter.acct) == minter.nfts.size());
|
||||
}
|
||||
BEAST_EXPECT(nftCount(env, alice.acct) == 0);
|
||||
BEAST_EXPECT(nftCount(env, becky.acct) == 0);
|
||||
BEAST_EXPECT(nftCount(env, minter.acct) == 0);
|
||||
|
||||
// When all nfts are burned none of the accounts should have
|
||||
// an ownerCount.
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 0);
|
||||
BEAST_EXPECT(ownerCount(env, becky) == 0);
|
||||
BEAST_EXPECT(ownerCount(env, minter) == 0);
|
||||
}
|
||||
|
||||
void
|
||||
testBurnSequential(FeatureBitset features)
|
||||
{
|
||||
// The earlier burn test randomizes which nft is burned. There are
|
||||
// a couple of directory merging scenarios that can only be tested by
|
||||
// inserting and deleting in an ordered fashion. We do that testing
|
||||
// now.
|
||||
testcase("Burn sequential");
|
||||
|
||||
using namespace test::jtx;
|
||||
|
||||
Account const alice{"alice"};
|
||||
|
||||
Env env{*this, features};
|
||||
env.fund(XRP(1000), alice);
|
||||
|
||||
// printNFTPages is a lambda that may be used for debugging.
|
||||
//
|
||||
// It uses the ledger RPC command to show the NFT pages in the ledger.
|
||||
// This parameter controls how noisy the output is.
|
||||
enum Volume : bool {
|
||||
quiet = false,
|
||||
noisy = true,
|
||||
};
|
||||
|
||||
[[maybe_unused]] auto printNFTPages = [&env](Volume vol) {
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::ledger_index] = "current";
|
||||
jvParams[jss::binary] = false;
|
||||
{
|
||||
Json::Value jrr = env.rpc(
|
||||
"json",
|
||||
"ledger_data",
|
||||
boost::lexical_cast<std::string>(jvParams));
|
||||
|
||||
// Iterate the state and print all NFTokenPages.
|
||||
if (!jrr.isMember(jss::result) ||
|
||||
!jrr[jss::result].isMember(jss::state))
|
||||
{
|
||||
std::cout << "No ledger state found!" << std::endl;
|
||||
return;
|
||||
}
|
||||
Json::Value& state = jrr[jss::result][jss::state];
|
||||
if (!state.isArray())
|
||||
{
|
||||
std::cout << "Ledger state is not array!" << std::endl;
|
||||
return;
|
||||
}
|
||||
for (Json::UInt i = 0; i < state.size(); ++i)
|
||||
{
|
||||
if (state[i].isMember(sfNFTokens.jsonName) &&
|
||||
state[i][sfNFTokens.jsonName].isArray())
|
||||
{
|
||||
std::uint32_t tokenCount =
|
||||
state[i][sfNFTokens.jsonName].size();
|
||||
std::cout << tokenCount << " NFTokens in page "
|
||||
<< state[i][jss::index].asString()
|
||||
<< std::endl;
|
||||
|
||||
if (vol == noisy)
|
||||
{
|
||||
std::cout << state[i].toStyledString() << std::endl;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (tokenCount > 0)
|
||||
std::cout << "first: "
|
||||
<< state[i][sfNFTokens.jsonName][0u]
|
||||
.toStyledString()
|
||||
<< std::endl;
|
||||
if (tokenCount > 1)
|
||||
std::cout << "last: "
|
||||
<< state[i][sfNFTokens.jsonName]
|
||||
[tokenCount - 1]
|
||||
.toStyledString()
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// A lambda that generates 96 nfts packed into three pages of 32 each.
|
||||
auto genPackedTokens = [this, &env, &alice](
|
||||
std::vector<uint256>& nfts) {
|
||||
nfts.clear();
|
||||
nfts.reserve(96);
|
||||
|
||||
// We want to create fully packed NFT pages. This is a little
|
||||
// tricky since the system currently in place is inclined to
|
||||
// assign consecutive tokens to only 16 entries per page.
|
||||
//
|
||||
// By manipulating the internal form of the taxon we can force
|
||||
// creation of NFT pages that are completely full. This lambda
|
||||
// tells us the taxon value we should pass in in order for the
|
||||
// internal representation to match the passed in value.
|
||||
auto internalTaxon = [&env](
|
||||
Account const& acct,
|
||||
std::uint32_t taxon) -> std::uint32_t {
|
||||
std::uint32_t const tokenSeq = {
|
||||
env.le(acct)->at(~sfMintedNFTokens).value_or(0)};
|
||||
return toUInt32(
|
||||
nft::cipheredTaxon(tokenSeq, nft::toTaxon(taxon)));
|
||||
};
|
||||
|
||||
for (std::uint32_t i = 0; i < 96; ++i)
|
||||
{
|
||||
// In order to fill the pages we use the taxon to break them
|
||||
// into groups of 16 entries. By having the internal
|
||||
// representation of the taxon go...
|
||||
// 0, 3, 2, 5, 4, 7...
|
||||
// in sets of 16 NFTs we can get each page to be fully
|
||||
// populated.
|
||||
std::uint32_t const intTaxon = (i / 16) + (i & 0b10000 ? 2 : 0);
|
||||
uint32_t const extTaxon = internalTaxon(alice, intTaxon);
|
||||
nfts.push_back(token::getNextID(env, alice, extTaxon));
|
||||
env(token::mint(alice, extTaxon));
|
||||
env.close();
|
||||
}
|
||||
|
||||
// Sort the NFTs so they are listed in storage order, not
|
||||
// creation order.
|
||||
std::sort(nfts.begin(), nfts.end());
|
||||
|
||||
// Verify that the ledger does indeed contain exactly three pages
|
||||
// of NFTs with 32 entries in each page.
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::ledger_index] = "current";
|
||||
jvParams[jss::binary] = false;
|
||||
{
|
||||
Json::Value jrr = env.rpc(
|
||||
"json",
|
||||
"ledger_data",
|
||||
boost::lexical_cast<std::string>(jvParams));
|
||||
|
||||
Json::Value& state = jrr[jss::result][jss::state];
|
||||
|
||||
int pageCount = 0;
|
||||
for (Json::UInt i = 0; i < state.size(); ++i)
|
||||
{
|
||||
if (state[i].isMember(sfNFTokens.jsonName) &&
|
||||
state[i][sfNFTokens.jsonName].isArray())
|
||||
{
|
||||
BEAST_EXPECT(
|
||||
state[i][sfNFTokens.jsonName].size() == 32);
|
||||
++pageCount;
|
||||
}
|
||||
}
|
||||
// If this check fails then the internal NFT directory logic
|
||||
// has changed.
|
||||
BEAST_EXPECT(pageCount == 3);
|
||||
}
|
||||
};
|
||||
|
||||
// Generate three packed pages. Then burn the tokens in order from
|
||||
// first to last. This exercises specific cases where coalescing
|
||||
// pages is not possible.
|
||||
std::vector<uint256> nfts;
|
||||
genPackedTokens(nfts);
|
||||
BEAST_EXPECT(nftCount(env, alice) == 96);
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 3);
|
||||
|
||||
for (uint256 const& nft : nfts)
|
||||
{
|
||||
env(token::burn(alice, {nft}));
|
||||
env.close();
|
||||
}
|
||||
BEAST_EXPECT(nftCount(env, alice) == 0);
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 0);
|
||||
|
||||
// A lambda verifies that the ledger no longer contains any NFT pages.
|
||||
auto checkNoTokenPages = [this, &env]() {
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::ledger_index] = "current";
|
||||
jvParams[jss::binary] = false;
|
||||
{
|
||||
Json::Value jrr = env.rpc(
|
||||
"json",
|
||||
"ledger_data",
|
||||
boost::lexical_cast<std::string>(jvParams));
|
||||
|
||||
Json::Value& state = jrr[jss::result][jss::state];
|
||||
|
||||
for (Json::UInt i = 0; i < state.size(); ++i)
|
||||
{
|
||||
BEAST_EXPECT(!state[i].isMember(sfNFTokens.jsonName));
|
||||
}
|
||||
}
|
||||
};
|
||||
checkNoTokenPages();
|
||||
|
||||
// Generate three packed pages. Then burn the tokens in order from
|
||||
// last to first. This exercises different specific cases where
|
||||
// coalescing pages is not possible.
|
||||
genPackedTokens(nfts);
|
||||
BEAST_EXPECT(nftCount(env, alice) == 96);
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 3);
|
||||
|
||||
std::reverse(nfts.begin(), nfts.end());
|
||||
for (uint256 const& nft : nfts)
|
||||
{
|
||||
env(token::burn(alice, {nft}));
|
||||
env.close();
|
||||
}
|
||||
BEAST_EXPECT(nftCount(env, alice) == 0);
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 0);
|
||||
checkNoTokenPages();
|
||||
|
||||
// Generate three packed pages. Then burn all tokens in the middle
|
||||
// page. This exercises the case where a page is removed between
|
||||
// two fully populated pages.
|
||||
genPackedTokens(nfts);
|
||||
BEAST_EXPECT(nftCount(env, alice) == 96);
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 3);
|
||||
|
||||
for (std::size_t i = 32; i < 64; ++i)
|
||||
{
|
||||
env(token::burn(alice, nfts[i]));
|
||||
env.close();
|
||||
}
|
||||
nfts.erase(nfts.begin() + 32, nfts.begin() + 64);
|
||||
BEAST_EXPECT(nftCount(env, alice) == 64);
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 2);
|
||||
|
||||
// Burn the remaining nfts.
|
||||
for (uint256 const& nft : nfts)
|
||||
{
|
||||
env(token::burn(alice, {nft}));
|
||||
env.close();
|
||||
}
|
||||
BEAST_EXPECT(nftCount(env, alice) == 0);
|
||||
checkNoTokenPages();
|
||||
}
|
||||
|
||||
void
|
||||
testBurnTooManyOffers(FeatureBitset features)
|
||||
{
|
||||
// Look at the case where too many offers prevents burning a token.
|
||||
testcase("Burn too many offers");
|
||||
|
||||
using namespace test::jtx;
|
||||
|
||||
Env env{*this, features};
|
||||
|
||||
Account const alice("alice");
|
||||
Account const becky("becky");
|
||||
env.fund(XRP(1000), alice, becky);
|
||||
env.close();
|
||||
|
||||
// We structure the test to try and maximize the metadata produced.
|
||||
// This verifies that we don't create too much metadata during a
|
||||
// maximal burn operation.
|
||||
//
|
||||
// 1. alice mints an nft with a full-sized URI.
|
||||
// 2. We create 1000 new accounts, each of which creates an offer for
|
||||
// alice's nft.
|
||||
// 3. becky creates one more offer for alice's NFT
|
||||
// 4. Attempt to burn the nft which fails because there are too
|
||||
// many offers.
|
||||
// 5. Cancel becky's offer and the nft should become burnable.
|
||||
uint256 const nftokenID =
|
||||
token::getNextID(env, alice, 0, tfTransferable);
|
||||
env(token::mint(alice, 0),
|
||||
token::uri(std::string(maxTokenURILength, 'u')),
|
||||
txflags(tfTransferable));
|
||||
env.close();
|
||||
|
||||
std::vector<uint256> offerIndexes;
|
||||
offerIndexes.reserve(maxTokenOfferCancelCount);
|
||||
for (uint32_t i = 0; i < maxTokenOfferCancelCount; ++i)
|
||||
{
|
||||
Account const acct(std::string("acct") + std::to_string(i));
|
||||
env.fund(XRP(1000), acct);
|
||||
env.close();
|
||||
|
||||
offerIndexes.push_back(keylet::nftoffer(acct, env.seq(acct)).key);
|
||||
env(token::createOffer(acct, nftokenID, drops(1)),
|
||||
token::owner(alice));
|
||||
env.close();
|
||||
}
|
||||
|
||||
// Verify all offers are present in the ledger.
|
||||
for (uint256 const& offerIndex : offerIndexes)
|
||||
{
|
||||
BEAST_EXPECT(env.le(keylet::nftoffer(offerIndex)));
|
||||
}
|
||||
|
||||
// Create one too many offers.
|
||||
uint256 const beckyOfferIndex =
|
||||
keylet::nftoffer(becky, env.seq(becky)).key;
|
||||
env(token::createOffer(becky, nftokenID, drops(1)),
|
||||
token::owner(alice));
|
||||
|
||||
// Attempt to burn the nft which should fail.
|
||||
env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
|
||||
|
||||
// Close enough ledgers that the burn transaction is no longer retried.
|
||||
for (int i = 0; i < 10; ++i)
|
||||
env.close();
|
||||
|
||||
// Cancel becky's offer, but alice adds a sell offer. The token
|
||||
// should still not be burnable.
|
||||
env(token::cancelOffer(becky, {beckyOfferIndex}));
|
||||
env.close();
|
||||
|
||||
uint256 const aliceOfferIndex =
|
||||
keylet::nftoffer(alice, env.seq(alice)).key;
|
||||
env(token::createOffer(alice, nftokenID, drops(1)),
|
||||
txflags(tfSellNFToken));
|
||||
env.close();
|
||||
|
||||
env(token::burn(alice, nftokenID), ter(tefTOO_BIG));
|
||||
env.close();
|
||||
|
||||
// Cancel alice's sell offer. Now the token should be burnable.
|
||||
env(token::cancelOffer(alice, {aliceOfferIndex}));
|
||||
env.close();
|
||||
|
||||
env(token::burn(alice, nftokenID));
|
||||
env.close();
|
||||
|
||||
// Burning the token should remove all the offers from the ledger.
|
||||
for (uint256 const& offerIndex : offerIndexes)
|
||||
{
|
||||
BEAST_EXPECT(!env.le(keylet::nftoffer(offerIndex)));
|
||||
}
|
||||
|
||||
// Both alice and becky should have ownerCounts of zero.
|
||||
BEAST_EXPECT(ownerCount(env, alice) == 0);
|
||||
BEAST_EXPECT(ownerCount(env, becky) == 0);
|
||||
}
|
||||
|
||||
void
|
||||
testWithFeats(FeatureBitset features)
|
||||
{
|
||||
testBurnRandom(features);
|
||||
testBurnSequential(features);
|
||||
testBurnTooManyOffers(features);
|
||||
}
|
||||
|
||||
public:
|
||||
void
|
||||
run() override
|
||||
{
|
||||
using namespace test::jtx;
|
||||
auto const sa = supported_amendments();
|
||||
testWithFeats(sa);
|
||||
}
|
||||
};
|
||||
|
||||
BEAST_DEFINE_TESTSUITE_PRIO(NFTokenBurn, tx, ripple, 3);
|
||||
|
||||
} // namespace ripple
|
||||
468
src/test/app/NFTokenDir_test.cpp
Normal file
468
src/test/app/NFTokenDir_test.cpp
Normal file
@@ -0,0 +1,468 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2022 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/protocol/Feature.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <ripple/protocol/nftPageMask.h>
|
||||
#include <test/jtx.h>
|
||||
|
||||
#include <initializer_list>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
class NFTokenDir_test : public beast::unit_test::suite
|
||||
{
|
||||
// printNFTPages is a helper function that may be used for debugging.
|
||||
//
|
||||
// It uses the ledger RPC command to show the NFT pages in the ledger.
|
||||
// This parameter controls how noisy the output is.
|
||||
enum Volume : bool {
|
||||
quiet = false,
|
||||
noisy = true,
|
||||
};
|
||||
|
||||
void
|
||||
printNFTPages(test::jtx::Env& env, Volume vol)
|
||||
{
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::ledger_index] = "current";
|
||||
jvParams[jss::binary] = false;
|
||||
{
|
||||
Json::Value jrr = env.rpc(
|
||||
"json",
|
||||
"ledger_data",
|
||||
boost::lexical_cast<std::string>(jvParams));
|
||||
|
||||
// Iterate the state and print all NFTokenPages.
|
||||
if (!jrr.isMember(jss::result) ||
|
||||
!jrr[jss::result].isMember(jss::state))
|
||||
{
|
||||
std::cout << "No ledger state found!" << std::endl;
|
||||
return;
|
||||
}
|
||||
Json::Value& state = jrr[jss::result][jss::state];
|
||||
if (!state.isArray())
|
||||
{
|
||||
std::cout << "Ledger state is not array!" << std::endl;
|
||||
return;
|
||||
}
|
||||
for (Json::UInt i = 0; i < state.size(); ++i)
|
||||
{
|
||||
if (state[i].isMember(sfNFTokens.jsonName) &&
|
||||
state[i][sfNFTokens.jsonName].isArray())
|
||||
{
|
||||
std::uint32_t tokenCount =
|
||||
state[i][sfNFTokens.jsonName].size();
|
||||
std::cout << tokenCount << " NFtokens in page "
|
||||
<< state[i][jss::index].asString() << std::endl;
|
||||
|
||||
if (vol == noisy)
|
||||
{
|
||||
std::cout << state[i].toStyledString() << std::endl;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (tokenCount > 0)
|
||||
std::cout << "first: "
|
||||
<< state[i][sfNFTokens.jsonName][0u]
|
||||
.toStyledString()
|
||||
<< std::endl;
|
||||
if (tokenCount > 1)
|
||||
std::cout
|
||||
<< "last: "
|
||||
<< state[i][sfNFTokens.jsonName][tokenCount - 1]
|
||||
.toStyledString()
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
testLopsidedSplits(FeatureBitset features)
|
||||
{
|
||||
// All NFT IDs with the same low 96 bits must stay on the same NFT page.
|
||||
testcase("Lopsided splits");
|
||||
|
||||
using namespace test::jtx;
|
||||
|
||||
// When a single NFT page exceeds 32 entries, the code is inclined
|
||||
// to split that page into two equal pieces. That's fine, but
|
||||
// the code also needs to keep NFTs with identical low 96-bits on
|
||||
// the same page.
|
||||
//
|
||||
// Here we synthesize cases where there are several NFTs with
|
||||
// identical 96-low-bits in the middle of a page. When that page
|
||||
// is split because it overflows, we need to see that the NFTs
|
||||
// with identical 96-low-bits are all kept on the same page.
|
||||
|
||||
// Lambda that exercises the lopsided splits.
|
||||
auto exerciseLopsided =
|
||||
[this,
|
||||
&features](std::initializer_list<std::string_view const> seeds) {
|
||||
Env env{*this, features};
|
||||
|
||||
// Eventually all of the NFTokens will be owned by buyer.
|
||||
Account const buyer{"buyer"};
|
||||
env.fund(XRP(10000), buyer);
|
||||
env.close();
|
||||
|
||||
// Create accounts for all of the seeds and fund those accounts.
|
||||
std::vector<Account> accounts;
|
||||
accounts.reserve(seeds.size());
|
||||
for (std::string_view const& seed : seeds)
|
||||
{
|
||||
Account const& account = accounts.emplace_back(
|
||||
Account::base58Seed, std::string(seed));
|
||||
env.fund(XRP(10000), account);
|
||||
env.close();
|
||||
}
|
||||
|
||||
// All of the accounts create one NFT and and offer that NFT to
|
||||
// buyer.
|
||||
std::vector<uint256> nftIDs;
|
||||
std::vector<uint256> offers;
|
||||
offers.reserve(accounts.size());
|
||||
for (Account const& account : accounts)
|
||||
{
|
||||
// Mint the NFT.
|
||||
uint256 const& nftID = nftIDs.emplace_back(
|
||||
token::getNextID(env, account, 0, tfTransferable));
|
||||
env(token::mint(account, 0), txflags(tfTransferable));
|
||||
env.close();
|
||||
|
||||
// Create an offer to give the NFT to buyer for free.
|
||||
offers.emplace_back(
|
||||
keylet::nftoffer(account, env.seq(account)).key);
|
||||
env(token::createOffer(account, nftID, XRP(0)),
|
||||
token::destination(buyer),
|
||||
txflags((tfSellNFToken)));
|
||||
}
|
||||
env.close();
|
||||
|
||||
// buyer accepts all of the offers.
|
||||
for (uint256 const& offer : offers)
|
||||
{
|
||||
env(token::acceptSellOffer(buyer, offer));
|
||||
env.close();
|
||||
}
|
||||
|
||||
// This can be a good time to look at the NFT pages.
|
||||
// printNFTPages(env, noisy);
|
||||
|
||||
// Verify that all NFTs are owned by buyer and findable in the
|
||||
// ledger by having buyer create sell offers for all of their
|
||||
// NFTs. Attempting to sell an offer that the ledger can't find
|
||||
// generates a non-tesSUCCESS error code.
|
||||
for (uint256 const& nftID : nftIDs)
|
||||
{
|
||||
uint256 const offerID =
|
||||
keylet::nftoffer(buyer, env.seq(buyer)).key;
|
||||
env(token::createOffer(buyer, nftID, XRP(100)),
|
||||
txflags(tfSellNFToken));
|
||||
env.close();
|
||||
|
||||
env(token::cancelOffer(buyer, {offerID}));
|
||||
}
|
||||
|
||||
// Verify that all the NFTs are owned by buyer.
|
||||
Json::Value buyerNFTs = [&env, &buyer]() {
|
||||
Json::Value params;
|
||||
params[jss::account] = buyer.human();
|
||||
params[jss::type] = "state";
|
||||
return env.rpc("json", "account_nfts", to_string(params));
|
||||
}();
|
||||
|
||||
BEAST_EXPECT(
|
||||
buyerNFTs[jss::result][jss::account_nfts].size() ==
|
||||
nftIDs.size());
|
||||
for (Json::Value const& ownedNFT :
|
||||
buyerNFTs[jss::result][jss::account_nfts])
|
||||
{
|
||||
uint256 ownedID;
|
||||
BEAST_EXPECT(ownedID.parseHex(
|
||||
ownedNFT[sfNFTokenID.jsonName].asString()));
|
||||
auto const foundIter =
|
||||
std::find(nftIDs.begin(), nftIDs.end(), ownedID);
|
||||
|
||||
// Assuming we find the NFT, erase it so we know it's been
|
||||
// found and can't be found again.
|
||||
if (BEAST_EXPECT(foundIter != nftIDs.end()))
|
||||
nftIDs.erase(foundIter);
|
||||
}
|
||||
|
||||
// All NFTs should now be accounted for, so nftIDs should be
|
||||
// empty.
|
||||
BEAST_EXPECT(nftIDs.empty());
|
||||
};
|
||||
|
||||
// These seeds cause a lopsided split where the new NFT is added
|
||||
// to the upper page.
|
||||
static std::initializer_list<std::string_view const> const
|
||||
splitAndAddToHi{
|
||||
"sp6JS7f14BuwFY8Mw5p3b8jjQBBTK", // 0. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6F7X3EiGKazu", // 1. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6FxjntJJfKXq", // 2. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6eSF1ydEozJg", // 3. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6koPB91um2ej", // 4. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6m6D64iwquSe", // 5. 0x1d2932ea
|
||||
|
||||
"sp6JS7f14BuwFY8Mw5rC43sN4adC2", // 6. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw65L9DDQqgebz", // 7. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw65nKvU8pPQNn", // 8. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw6bxZLyTrdipw", // 9. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw6d5abucntSoX", // 10. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw6qXK5awrRRP8", // 11. 0x208dbc24
|
||||
|
||||
// These eight need to be kept together by the implementation.
|
||||
"sp6JS7f14BuwFY8Mw66EBtMxoMcCa", // 12. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw66dGfE9jVfGv", // 13. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6APdZa7PH566", // 14. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6C3QX5CZyET5", // 15. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6CSysFf8GvaR", // 16. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6c7QSDmoAeRV", // 17. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6mvonveaZhW7", // 18. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6vtHHG7dYcXi", // 19. 0x309b67ed
|
||||
|
||||
"sp6JS7f14BuwFY8Mw66yppUNxESaw", // 20. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6ATYQvobXiDT", // 21. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6bis8D1Wa9Uy", // 22. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6cTiGCWA8Wfa", // 23. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6eAy2fpXmyYf", // 24. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6icn58TRs8YG", // 25. 0x40d4b96f
|
||||
|
||||
"sp6JS7f14BuwFY8Mw68tj2eQEWoJt", // 26. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6AjnAinNnMHT", // 27. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6CKDUwB4LrhL", // 28. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6d2yPszEFA6J", // 29. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6jcBQBH3PfnB", // 30. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6qxx19KSnN1w", // 31. 0x503b6ba9
|
||||
|
||||
// Adding this NFT splits the page. It is added to the upper
|
||||
// page.
|
||||
"sp6JS7f14BuwFY8Mw6ut1hFrqWoY5", // 32. 0x503b6ba9
|
||||
};
|
||||
|
||||
// These seeds cause a lopsided split where the new NFT is added
|
||||
// to the lower page.
|
||||
static std::initializer_list<std::string_view const> const
|
||||
splitAndAddToLo{
|
||||
"sp6JS7f14BuwFY8Mw5p3b8jjQBBTK", // 0. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6F7X3EiGKazu", // 1. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6FxjntJJfKXq", // 2. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6eSF1ydEozJg", // 3. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6koPB91um2ej", // 4. 0x1d2932ea
|
||||
"sp6JS7f14BuwFY8Mw6m6D64iwquSe", // 5. 0x1d2932ea
|
||||
|
||||
"sp6JS7f14BuwFY8Mw5rC43sN4adC2", // 6. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw65L9DDQqgebz", // 7. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw65nKvU8pPQNn", // 8. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw6bxZLyTrdipw", // 9. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw6d5abucntSoX", // 10. 0x208dbc24
|
||||
"sp6JS7f14BuwFY8Mw6qXK5awrRRP8", // 11. 0x208dbc24
|
||||
|
||||
// These eight need to be kept together by the implementation.
|
||||
"sp6JS7f14BuwFY8Mw66EBtMxoMcCa", // 12. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw66dGfE9jVfGv", // 13. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6APdZa7PH566", // 14. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6C3QX5CZyET5", // 15. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6CSysFf8GvaR", // 16. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6c7QSDmoAeRV", // 17. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6mvonveaZhW7", // 18. 0x309b67ed
|
||||
"sp6JS7f14BuwFY8Mw6vtHHG7dYcXi", // 19. 0x309b67ed
|
||||
|
||||
"sp6JS7f14BuwFY8Mw66yppUNxESaw", // 20. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6ATYQvobXiDT", // 21. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6bis8D1Wa9Uy", // 22. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6cTiGCWA8Wfa", // 23. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6eAy2fpXmyYf", // 24. 0x40d4b96f
|
||||
"sp6JS7f14BuwFY8Mw6icn58TRs8YG", // 25. 0x40d4b96f
|
||||
|
||||
"sp6JS7f14BuwFY8Mw68tj2eQEWoJt", // 26. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6AjnAinNnMHT", // 27. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6CKDUwB4LrhL", // 28. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6d2yPszEFA6J", // 29. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6jcBQBH3PfnB", // 30. 0x503b6ba9
|
||||
"sp6JS7f14BuwFY8Mw6qxx19KSnN1w", // 31. 0x503b6ba9
|
||||
|
||||
// Adding this NFT splits the page. It is added to the lower
|
||||
// page.
|
||||
"sp6JS7f14BuwFY8Mw6xCigaMwC6Dp", // 32. 0x309b67ed
|
||||
};
|
||||
|
||||
// FUTURE TEST
|
||||
// These seeds fill the last 17 entries of the initial page with
|
||||
// equivalent NFTs. The split should keep these together.
|
||||
|
||||
// FUTURE TEST
|
||||
// These seeds fill the first entries of the initial page with
|
||||
// equivalent NFTs. The split should keep these together.
|
||||
|
||||
// Run the test cases.
|
||||
exerciseLopsided(splitAndAddToHi);
|
||||
exerciseLopsided(splitAndAddToLo);
|
||||
}
|
||||
|
||||
void
|
||||
testTooManyEquivalent(FeatureBitset features)
|
||||
{
|
||||
// Exercise the case where 33 NFTs with identical sort
|
||||
// characteristics are owned by the same account.
|
||||
testcase("NFToken too many same");
|
||||
|
||||
using namespace test::jtx;
|
||||
|
||||
Env env{*this, features};
|
||||
|
||||
// Eventually all of the NFTokens will be owned by buyer.
|
||||
Account const buyer{"buyer"};
|
||||
env.fund(XRP(10000), buyer);
|
||||
env.close();
|
||||
|
||||
// Here are 33 seeds that produce identical low 32-bits in their
|
||||
// corresponding AccountIDs.
|
||||
//
|
||||
// NOTE: We've not yet identified 33 AccountIDs that meet the
|
||||
// requirements. At the moment 12 is the best we can do. We'll fill
|
||||
// in the full count when they are available.
|
||||
static std::initializer_list<std::string_view const> const seeds{
|
||||
"sp6JS7f14BuwFY8Mw5G5vCrbxB3TZ",
|
||||
"sp6JS7f14BuwFY8Mw5H6qyXhorcip",
|
||||
"sp6JS7f14BuwFY8Mw5suWxsBQRqLx",
|
||||
"sp6JS7f14BuwFY8Mw66gtwamvGgSg",
|
||||
"sp6JS7f14BuwFY8Mw66iNV4PPcmyt",
|
||||
"sp6JS7f14BuwFY8Mw68Qz2P58ybfE",
|
||||
"sp6JS7f14BuwFY8Mw6AYtLXKzi2Bo",
|
||||
"sp6JS7f14BuwFY8Mw6boCES4j62P2",
|
||||
"sp6JS7f14BuwFY8Mw6kv7QDDv7wjw",
|
||||
"sp6JS7f14BuwFY8Mw6mHXMvpBjjwg",
|
||||
"sp6JS7f14BuwFY8Mw6qfGbznyYvVp",
|
||||
"sp6JS7f14BuwFY8Mw6zg6qHKDfSoU",
|
||||
};
|
||||
|
||||
// Create accounts for all of the seeds and fund those accounts.
|
||||
std::vector<Account> accounts;
|
||||
accounts.reserve(seeds.size());
|
||||
for (std::string_view const& seed : seeds)
|
||||
{
|
||||
Account const& account =
|
||||
accounts.emplace_back(Account::base58Seed, std::string(seed));
|
||||
env.fund(XRP(10000), account);
|
||||
env.close();
|
||||
}
|
||||
|
||||
// All of the accounts create one NFT and and offer that NFT to buyer.
|
||||
std::vector<uint256> nftIDs;
|
||||
std::vector<uint256> offers;
|
||||
offers.reserve(accounts.size());
|
||||
for (Account const& account : accounts)
|
||||
{
|
||||
// Mint the NFT.
|
||||
uint256 const& nftID = nftIDs.emplace_back(
|
||||
token::getNextID(env, account, 0, tfTransferable));
|
||||
env(token::mint(account, 0), txflags(tfTransferable));
|
||||
env.close();
|
||||
|
||||
// Create an offer to give the NFT to buyer for free.
|
||||
offers.emplace_back(
|
||||
keylet::nftoffer(account, env.seq(account)).key);
|
||||
env(token::createOffer(account, nftID, XRP(0)),
|
||||
token::destination(buyer),
|
||||
txflags((tfSellNFToken)));
|
||||
}
|
||||
env.close();
|
||||
|
||||
// Verify that the low 96 bits of all generated NFTs is identical.
|
||||
uint256 const expectLowBits = nftIDs.front() & nft::pageMask;
|
||||
for (uint256 const& nftID : nftIDs)
|
||||
{
|
||||
BEAST_EXPECT(expectLowBits == (nftID & nft::pageMask));
|
||||
}
|
||||
|
||||
// buyer accepts all of the offers.
|
||||
for (uint256 const& offer : offers)
|
||||
{
|
||||
env(token::acceptSellOffer(buyer, offer));
|
||||
env.close();
|
||||
}
|
||||
|
||||
// Verify that all NFTs are owned by buyer and findable in the
|
||||
// ledger by having buyer create sell offers for all of their NFTs.
|
||||
// Attempting to sell an offer that the ledger can't find generates
|
||||
// a non-tesSUCCESS error code.
|
||||
for (uint256 const& nftID : nftIDs)
|
||||
{
|
||||
uint256 const offerID = keylet::nftoffer(buyer, env.seq(buyer)).key;
|
||||
env(token::createOffer(buyer, nftID, XRP(100)),
|
||||
txflags(tfSellNFToken));
|
||||
env.close();
|
||||
|
||||
env(token::cancelOffer(buyer, {offerID}));
|
||||
}
|
||||
|
||||
// Verify that all the NFTs are owned by buyer.
|
||||
Json::Value buyerNFTs = [&env, &buyer]() {
|
||||
Json::Value params;
|
||||
params[jss::account] = buyer.human();
|
||||
params[jss::type] = "state";
|
||||
return env.rpc("json", "account_nfts", to_string(params));
|
||||
}();
|
||||
|
||||
BEAST_EXPECT(
|
||||
buyerNFTs[jss::result][jss::account_nfts].size() == nftIDs.size());
|
||||
for (Json::Value const& ownedNFT :
|
||||
buyerNFTs[jss::result][jss::account_nfts])
|
||||
{
|
||||
uint256 ownedID;
|
||||
BEAST_EXPECT(
|
||||
ownedID.parseHex(ownedNFT[sfNFTokenID.jsonName].asString()));
|
||||
auto const foundIter =
|
||||
std::find(nftIDs.begin(), nftIDs.end(), ownedID);
|
||||
|
||||
// Assuming we find the NFT, erase it so we know it's been found
|
||||
// and can't be found again.
|
||||
if (BEAST_EXPECT(foundIter != nftIDs.end()))
|
||||
nftIDs.erase(foundIter);
|
||||
}
|
||||
|
||||
// All NFTs should now be accounted for, so nftIDs should be empty.
|
||||
BEAST_EXPECT(nftIDs.empty());
|
||||
}
|
||||
|
||||
void
|
||||
testWithFeats(FeatureBitset features)
|
||||
{
|
||||
testLopsidedSplits(features);
|
||||
testTooManyEquivalent(features);
|
||||
}
|
||||
|
||||
public:
|
||||
void
|
||||
run() override
|
||||
{
|
||||
using namespace test::jtx;
|
||||
auto const sa = supported_amendments();
|
||||
testWithFeats(sa);
|
||||
}
|
||||
};
|
||||
|
||||
BEAST_DEFINE_TESTSUITE_PRIO(NFTokenDir, tx, ripple, 1);
|
||||
|
||||
} // namespace ripple
|
||||
4290
src/test/app/NFToken_test.cpp
Normal file
4290
src/test/app/NFToken_test.cpp
Normal file
File diff suppressed because it is too large
Load Diff
@@ -59,6 +59,7 @@
|
||||
#include <test/jtx/tags.h>
|
||||
#include <test/jtx/ter.h>
|
||||
#include <test/jtx/ticket.h>
|
||||
#include <test/jtx/token.h>
|
||||
#include <test/jtx/trust.h>
|
||||
#include <test/jtx/txflags.h>
|
||||
#include <test/jtx/utility.h>
|
||||
|
||||
@@ -73,6 +73,10 @@ public:
|
||||
|
||||
/** @} */
|
||||
|
||||
enum AcctStringType { base58Seed, other };
|
||||
/** Create an account from a base58 seed string. Throws on invalid seed. */
|
||||
Account(AcctStringType stringType, std::string base58SeedStr);
|
||||
|
||||
/** Return the name */
|
||||
std::string const&
|
||||
name() const
|
||||
@@ -132,7 +136,7 @@ private:
|
||||
|
||||
// Return the account from the cache & add it to the cache if needed
|
||||
static Account
|
||||
fromCache(std::string name, KeyType type);
|
||||
fromCache(AcctStringType stringType, std::string name, KeyType type);
|
||||
|
||||
std::string name_;
|
||||
PublicKey pk_;
|
||||
|
||||
@@ -46,14 +46,25 @@ Account::Account(
|
||||
}
|
||||
|
||||
Account
|
||||
Account::fromCache(std::string name, KeyType type)
|
||||
Account::fromCache(AcctStringType stringType, std::string name, KeyType type)
|
||||
{
|
||||
auto p = std::make_pair(name, type); // non-const so it can be moved from
|
||||
auto const iter = cache_.find(p);
|
||||
if (iter != cache_.end())
|
||||
return iter->second;
|
||||
|
||||
auto const keys = generateKeyPair(type, generateSeed(name));
|
||||
auto const keys = [stringType, &name, type]() {
|
||||
// Special handling for base58Seeds.
|
||||
if (stringType == base58Seed)
|
||||
{
|
||||
std::optional<Seed> const seed = parseBase58<Seed>(name);
|
||||
if (!seed.has_value())
|
||||
Throw<std::runtime_error>("Account:: invalid base58 seed");
|
||||
|
||||
return generateKeyPair(type, *seed);
|
||||
}
|
||||
return generateKeyPair(type, generateSeed(name));
|
||||
}();
|
||||
auto r = cache_.emplace(
|
||||
std::piecewise_construct,
|
||||
std::forward_as_tuple(std::move(p)),
|
||||
@@ -62,7 +73,15 @@ Account::fromCache(std::string name, KeyType type)
|
||||
}
|
||||
|
||||
Account::Account(std::string name, KeyType type)
|
||||
: Account(fromCache(std::move(name), type))
|
||||
: Account(fromCache(Account::other, std::move(name), type))
|
||||
{
|
||||
}
|
||||
|
||||
Account::Account(AcctStringType stringType, std::string base58SeedStr)
|
||||
: Account(fromCache(
|
||||
Account::base58Seed,
|
||||
std::move(base58SeedStr),
|
||||
KeyType::secp256k1))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -27,14 +27,14 @@ namespace jtx {
|
||||
Json::Value
|
||||
offer(
|
||||
Account const& account,
|
||||
STAmount const& in,
|
||||
STAmount const& out,
|
||||
STAmount const& takerPays,
|
||||
STAmount const& takerGets,
|
||||
std::uint32_t flags)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[jss::Account] = account.human();
|
||||
jv[jss::TakerPays] = in.getJson(JsonOptions::none);
|
||||
jv[jss::TakerGets] = out.getJson(JsonOptions::none);
|
||||
jv[jss::TakerPays] = takerPays.getJson(JsonOptions::none);
|
||||
jv[jss::TakerGets] = takerGets.getJson(JsonOptions::none);
|
||||
if (flags)
|
||||
jv[jss::Flags] = flags;
|
||||
jv[jss::TransactionType] = jss::OfferCreate;
|
||||
|
||||
223
src/test/jtx/impl/token.cpp
Normal file
223
src/test/jtx/impl/token.cpp
Normal file
@@ -0,0 +1,223 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2012, 2013 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <test/jtx/flags.h>
|
||||
#include <test/jtx/token.h>
|
||||
|
||||
#include <ripple/app/tx/impl/NFTokenMint.h>
|
||||
#include <ripple/protocol/SField.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
|
||||
namespace ripple {
|
||||
namespace test {
|
||||
namespace jtx {
|
||||
namespace token {
|
||||
|
||||
Json::Value
|
||||
mint(jtx::Account const& account, std::uint32_t nfTokenTaxon)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[sfAccount.jsonName] = account.human();
|
||||
jv[sfNFTokenTaxon.jsonName] = nfTokenTaxon;
|
||||
jv[sfTransactionType.jsonName] = jss::NFTokenMint;
|
||||
return jv;
|
||||
}
|
||||
|
||||
void
|
||||
xferFee::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfTransferFee.jsonName] = xferFee_;
|
||||
}
|
||||
|
||||
void
|
||||
issuer::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfIssuer.jsonName] = issuer_;
|
||||
}
|
||||
|
||||
void
|
||||
uri::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfURI.jsonName] = uri_;
|
||||
}
|
||||
|
||||
uint256
|
||||
getNextID(
|
||||
jtx::Env const& env,
|
||||
jtx::Account const& issuer,
|
||||
std::uint32_t nfTokenTaxon,
|
||||
std::uint16_t flags,
|
||||
std::uint16_t xferFee)
|
||||
{
|
||||
// Get the nftSeq from the account root of the issuer.
|
||||
std::uint32_t const nftSeq = {
|
||||
env.le(issuer)->at(~sfMintedNFTokens).value_or(0)};
|
||||
return getID(issuer, nfTokenTaxon, nftSeq, flags, xferFee);
|
||||
}
|
||||
|
||||
uint256
|
||||
getID(
|
||||
jtx::Account const& issuer,
|
||||
std::uint32_t nfTokenTaxon,
|
||||
std::uint32_t nftSeq,
|
||||
std::uint16_t flags,
|
||||
std::uint16_t xferFee)
|
||||
{
|
||||
return ripple::NFTokenMint::createNFTokenID(
|
||||
flags, xferFee, issuer, nft::toTaxon(nfTokenTaxon), nftSeq);
|
||||
}
|
||||
|
||||
Json::Value
|
||||
burn(jtx::Account const& account, uint256 const& nftokenID)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[sfAccount.jsonName] = account.human();
|
||||
jv[sfNFTokenID.jsonName] = to_string(nftokenID);
|
||||
jv[jss::TransactionType] = jss::NFTokenBurn;
|
||||
return jv;
|
||||
}
|
||||
|
||||
Json::Value
|
||||
createOffer(
|
||||
jtx::Account const& account,
|
||||
uint256 const& nftokenID,
|
||||
STAmount const& amount)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[sfAccount.jsonName] = account.human();
|
||||
jv[sfNFTokenID.jsonName] = to_string(nftokenID);
|
||||
jv[sfAmount.jsonName] = amount.getJson(JsonOptions::none);
|
||||
jv[jss::TransactionType] = jss::NFTokenCreateOffer;
|
||||
return jv;
|
||||
}
|
||||
|
||||
void
|
||||
owner::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfOwner.jsonName] = owner_;
|
||||
}
|
||||
|
||||
void
|
||||
expiration::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfExpiration.jsonName] = expires_;
|
||||
}
|
||||
|
||||
void
|
||||
destination::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfDestination.jsonName] = dest_;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static Json::Value
|
||||
cancelOfferImpl(jtx::Account const& account, T const& nftokenOffers)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[sfAccount.jsonName] = account.human();
|
||||
if (!empty(nftokenOffers))
|
||||
{
|
||||
jv[sfNFTokenOffers.jsonName] = Json::arrayValue;
|
||||
for (uint256 const& nftokenOffer : nftokenOffers)
|
||||
jv[sfNFTokenOffers.jsonName].append(to_string(nftokenOffer));
|
||||
}
|
||||
jv[jss::TransactionType] = jss::NFTokenCancelOffer;
|
||||
return jv;
|
||||
}
|
||||
|
||||
Json::Value
|
||||
cancelOffer(
|
||||
jtx::Account const& account,
|
||||
std::initializer_list<uint256> const& nftokenOffers)
|
||||
{
|
||||
return cancelOfferImpl(account, nftokenOffers);
|
||||
}
|
||||
|
||||
Json::Value
|
||||
cancelOffer(
|
||||
jtx::Account const& account,
|
||||
std::vector<uint256> const& nftokenOffers)
|
||||
{
|
||||
return cancelOfferImpl(account, nftokenOffers);
|
||||
}
|
||||
|
||||
void
|
||||
rootIndex::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfRootIndex.jsonName] = rootIndex_;
|
||||
}
|
||||
|
||||
Json::Value
|
||||
acceptBuyOffer(jtx::Account const& account, uint256 const& offerIndex)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[sfAccount.jsonName] = account.human();
|
||||
jv[sfNFTokenBuyOffer.jsonName] = to_string(offerIndex);
|
||||
jv[jss::TransactionType] = jss::NFTokenAcceptOffer;
|
||||
return jv;
|
||||
}
|
||||
|
||||
Json::Value
|
||||
acceptSellOffer(jtx::Account const& account, uint256 const& offerIndex)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[sfAccount.jsonName] = account.human();
|
||||
jv[sfNFTokenSellOffer.jsonName] = to_string(offerIndex);
|
||||
jv[jss::TransactionType] = jss::NFTokenAcceptOffer;
|
||||
return jv;
|
||||
}
|
||||
|
||||
Json::Value
|
||||
brokerOffers(
|
||||
jtx::Account const& account,
|
||||
uint256 const& buyOfferIndex,
|
||||
uint256 const& sellOfferIndex)
|
||||
{
|
||||
Json::Value jv;
|
||||
jv[sfAccount.jsonName] = account.human();
|
||||
jv[sfNFTokenBuyOffer.jsonName] = to_string(buyOfferIndex);
|
||||
jv[sfNFTokenSellOffer.jsonName] = to_string(sellOfferIndex);
|
||||
jv[jss::TransactionType] = jss::NFTokenAcceptOffer;
|
||||
return jv;
|
||||
}
|
||||
|
||||
void
|
||||
brokerFee::operator()(Env& env, JTx& jt) const
|
||||
{
|
||||
jt.jv[sfNFTokenBrokerFee.jsonName] = brokerFee_.getJson(JsonOptions::none);
|
||||
}
|
||||
|
||||
Json::Value
|
||||
setMinter(jtx::Account const& account, jtx::Account const& minter)
|
||||
{
|
||||
Json::Value jt = fset(account, asfAuthorizedNFTokenMinter);
|
||||
jt[sfNFTokenMinter.fieldName] = minter.human();
|
||||
return jt;
|
||||
}
|
||||
|
||||
Json::Value
|
||||
clearMinter(jtx::Account const& account)
|
||||
{
|
||||
return fclear(account, asfAuthorizedNFTokenMinter);
|
||||
}
|
||||
|
||||
} // namespace token
|
||||
} // namespace jtx
|
||||
} // namespace test
|
||||
} // namespace ripple
|
||||
@@ -32,8 +32,8 @@ namespace jtx {
|
||||
Json::Value
|
||||
offer(
|
||||
Account const& account,
|
||||
STAmount const& in,
|
||||
STAmount const& out,
|
||||
STAmount const& takerPays,
|
||||
STAmount const& takerGets,
|
||||
std::uint32_t flags = 0);
|
||||
|
||||
/** Cancel an offer. */
|
||||
|
||||
231
src/test/jtx/token.h
Normal file
231
src/test/jtx/token.h
Normal file
@@ -0,0 +1,231 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2021 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_TEST_JTX_NFT_H_INCLUDED
|
||||
#define RIPPLE_TEST_JTX_NFT_H_INCLUDED
|
||||
|
||||
#include <test/jtx/Account.h>
|
||||
#include <test/jtx/Env.h>
|
||||
#include <test/jtx/owners.h>
|
||||
|
||||
#include <ripple/basics/strHex.h>
|
||||
|
||||
#include <initializer_list>
|
||||
|
||||
namespace ripple {
|
||||
namespace test {
|
||||
namespace jtx {
|
||||
|
||||
namespace token {
|
||||
|
||||
/** Mint an NFToken. */
|
||||
Json::Value
|
||||
mint(jtx::Account const& account, std::uint32_t tokenTaxon = 0);
|
||||
|
||||
/** Sets the optional TransferFee on an NFTokenMint. */
|
||||
class xferFee
|
||||
{
|
||||
private:
|
||||
std::uint16_t xferFee_;
|
||||
|
||||
public:
|
||||
explicit xferFee(std::uint16_t fee) : xferFee_(fee)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Sets the optional Issuer on an NFTokenMint. */
|
||||
class issuer
|
||||
{
|
||||
private:
|
||||
std::string issuer_;
|
||||
|
||||
public:
|
||||
explicit issuer(jtx::Account const& issue) : issuer_(issue.human())
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Sets the optional URI on an NFTokenMint. */
|
||||
class uri
|
||||
{
|
||||
private:
|
||||
std::string uri_;
|
||||
|
||||
public:
|
||||
explicit uri(std::string const& u) : uri_(strHex(u))
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Get the next NFTokenID that will be issued. */
|
||||
uint256
|
||||
getNextID(
|
||||
jtx::Env const& env,
|
||||
jtx::Account const& account,
|
||||
std::uint32_t nftokenTaxon,
|
||||
std::uint16_t flags = 0,
|
||||
std::uint16_t xferFee = 0);
|
||||
|
||||
/** Get the NFTokenID for a particular nftSequence. */
|
||||
uint256
|
||||
getID(
|
||||
jtx::Account const& account,
|
||||
std::uint32_t tokenTaxon,
|
||||
std::uint32_t nftSeq,
|
||||
std::uint16_t flags = 0,
|
||||
std::uint16_t xferFee = 0);
|
||||
|
||||
/** Burn an NFToken. */
|
||||
Json::Value
|
||||
burn(jtx::Account const& account, uint256 const& nftokenID);
|
||||
|
||||
/** Create an NFTokenOffer. */
|
||||
Json::Value
|
||||
createOffer(
|
||||
jtx::Account const& account,
|
||||
uint256 const& nftokenID,
|
||||
STAmount const& amount);
|
||||
|
||||
/** Sets the optional Owner on an NFTokenOffer. */
|
||||
class owner
|
||||
{
|
||||
private:
|
||||
std::string owner_;
|
||||
|
||||
public:
|
||||
explicit owner(jtx::Account const& ownedBy) : owner_(ownedBy.human())
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Sets the optional Expiration field on an NFTokenOffer. */
|
||||
class expiration
|
||||
{
|
||||
private:
|
||||
std::uint32_t expires_;
|
||||
|
||||
public:
|
||||
explicit expiration(std::uint32_t const& expires) : expires_(expires)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Sets the optional Destination field on an NFTokenOffer. */
|
||||
class destination
|
||||
{
|
||||
private:
|
||||
std::string dest_;
|
||||
|
||||
public:
|
||||
explicit destination(jtx::Account const& dest) : dest_(dest.human())
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Cancel NFTokenOffers. */
|
||||
Json::Value
|
||||
cancelOffer(
|
||||
jtx::Account const& account,
|
||||
std::initializer_list<uint256> const& nftokenOffers = {});
|
||||
|
||||
Json::Value
|
||||
cancelOffer(
|
||||
jtx::Account const& account,
|
||||
std::vector<uint256> const& nftokenOffers);
|
||||
|
||||
/** Sets the optional RootIndex field when canceling NFTokenOffers. */
|
||||
class rootIndex
|
||||
{
|
||||
private:
|
||||
std::string rootIndex_;
|
||||
|
||||
public:
|
||||
explicit rootIndex(uint256 const& index) : rootIndex_(to_string(index))
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Accept an NFToken buy offer. */
|
||||
Json::Value
|
||||
acceptBuyOffer(jtx::Account const& account, uint256 const& offerIndex);
|
||||
|
||||
/** Accept an NFToken sell offer. */
|
||||
Json::Value
|
||||
acceptSellOffer(jtx::Account const& account, uint256 const& offerIndex);
|
||||
|
||||
/** Broker two NFToken offers. */
|
||||
Json::Value
|
||||
brokerOffers(
|
||||
jtx::Account const& account,
|
||||
uint256 const& buyOfferIndex,
|
||||
uint256 const& sellOfferIndex);
|
||||
|
||||
/** Sets the optional NFTokenBrokerFee field in a brokerOffer transaction. */
|
||||
class brokerFee
|
||||
{
|
||||
private:
|
||||
STAmount const brokerFee_;
|
||||
|
||||
public:
|
||||
explicit brokerFee(STAmount const fee) : brokerFee_(fee)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
operator()(Env&, JTx& jtx) const;
|
||||
};
|
||||
|
||||
/** Set the authorized minter on an account root. */
|
||||
Json::Value
|
||||
setMinter(jtx::Account const& account, jtx::Account const& minter);
|
||||
|
||||
/** Clear any authorized minter from an account root. */
|
||||
Json::Value
|
||||
clearMinter(jtx::Account const& account);
|
||||
|
||||
} // namespace token
|
||||
|
||||
} // namespace jtx
|
||||
|
||||
} // namespace test
|
||||
} // namespace ripple
|
||||
|
||||
#endif // RIPPLE_TEST_JTX_NFT_H_INCLUDED
|
||||
@@ -133,7 +133,7 @@ class Hooks_test : public beast::unit_test::suite
|
||||
break;
|
||||
}
|
||||
|
||||
case STI_HASH256: {
|
||||
case STI_UINT256: {
|
||||
uint256 u = uint256::fromVoid(
|
||||
"DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBE"
|
||||
"EFDEADBEEF");
|
||||
|
||||
@@ -235,72 +235,77 @@ private:
|
||||
// We'll be running through two sets of pbuf::Descriptors: the ones in
|
||||
// the OneOf and the common fields. Here is a lambda that factors out
|
||||
// the common checking code for these two cases.
|
||||
auto checkFieldDesc =
|
||||
[this, &sFields, &knownFormatName](
|
||||
pbuf::FieldDescriptor const* const fieldDesc) {
|
||||
// gRPC has different handling for repeated vs non-repeated
|
||||
// types. So we need to do that too.
|
||||
std::string name;
|
||||
if (fieldDesc->is_repeated())
|
||||
auto checkFieldDesc = [this, &sFields, &knownFormatName](
|
||||
pbuf::FieldDescriptor const* const
|
||||
fieldDesc) {
|
||||
// gRPC has different handling for repeated vs non-repeated
|
||||
// types. So we need to do that too.
|
||||
std::string name;
|
||||
if (fieldDesc->is_repeated())
|
||||
{
|
||||
// Repeated-type handling.
|
||||
|
||||
// Munge the fieldDescriptor name so it looks like the
|
||||
// name in sFields.
|
||||
name = fieldDesc->camelcase_name();
|
||||
name[0] = toupper(name[0]);
|
||||
|
||||
// The ledger gives UNL all caps. Adapt to that.
|
||||
if (size_t const i = name.find("Unl"); i != std::string::npos)
|
||||
{
|
||||
// Repeated-type handling.
|
||||
|
||||
// Munge the fieldDescriptor name so it looks like the
|
||||
// name in sFields.
|
||||
name = fieldDesc->camelcase_name();
|
||||
name[0] = toupper(name[0]);
|
||||
|
||||
// The ledger gives UNL all caps. Adapt to that.
|
||||
if (size_t const i = name.find("Unl");
|
||||
i != std::string::npos)
|
||||
{
|
||||
name[i + 1] = 'N';
|
||||
name[i + 2] = 'L';
|
||||
}
|
||||
|
||||
if (!sFields.count(name))
|
||||
{
|
||||
fail(
|
||||
std::string("Repeated Protobuf Descriptor '") +
|
||||
name + "' expected in KnownFormat '" +
|
||||
knownFormatName + "' and not found",
|
||||
__FILE__,
|
||||
__LINE__);
|
||||
return;
|
||||
}
|
||||
pass();
|
||||
|
||||
validateRepeatedField(fieldDesc, sFields.at(name));
|
||||
name[i + 1] = 'N';
|
||||
name[i + 2] = 'L';
|
||||
}
|
||||
else
|
||||
|
||||
// The ledger gives the NFT part of NFToken all caps.
|
||||
// Adapt to that.
|
||||
if (size_t const i = name.find("Nft"); i != std::string::npos)
|
||||
{
|
||||
// Non-repeated handling.
|
||||
pbuf::Descriptor const* const entryDesc =
|
||||
fieldDesc->message_type();
|
||||
if (entryDesc == nullptr)
|
||||
return;
|
||||
|
||||
name = entryDesc->name();
|
||||
if (!sFields.count(name))
|
||||
{
|
||||
fail(
|
||||
std::string("Protobuf Descriptor '") +
|
||||
entryDesc->name() +
|
||||
"' expected in KnownFormat '" +
|
||||
knownFormatName + "' and not found",
|
||||
__FILE__,
|
||||
__LINE__);
|
||||
return;
|
||||
}
|
||||
pass();
|
||||
|
||||
validateDescriptor(
|
||||
entryDesc, sFields.at(entryDesc->name()));
|
||||
name[i + 1] = 'F';
|
||||
name[i + 2] = 'T';
|
||||
}
|
||||
// Remove the validated field from the map so we can tell if
|
||||
// there are left over fields at the end of all comparisons.
|
||||
sFields.erase(name);
|
||||
};
|
||||
|
||||
if (!sFields.count(name))
|
||||
{
|
||||
fail(
|
||||
std::string("Repeated Protobuf Descriptor '") + name +
|
||||
"' expected in KnownFormat '" + knownFormatName +
|
||||
"' and not found",
|
||||
__FILE__,
|
||||
__LINE__);
|
||||
return;
|
||||
}
|
||||
pass();
|
||||
|
||||
validateRepeatedField(fieldDesc, sFields.at(name));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Non-repeated handling.
|
||||
pbuf::Descriptor const* const entryDesc =
|
||||
fieldDesc->message_type();
|
||||
if (entryDesc == nullptr)
|
||||
return;
|
||||
|
||||
name = entryDesc->name();
|
||||
if (!sFields.count(name))
|
||||
{
|
||||
fail(
|
||||
std::string("Protobuf Descriptor '") +
|
||||
entryDesc->name() + "' expected in KnownFormat '" +
|
||||
knownFormatName + "' and not found",
|
||||
__FILE__,
|
||||
__LINE__);
|
||||
return;
|
||||
}
|
||||
pass();
|
||||
|
||||
validateDescriptor(entryDesc, sFields.at(entryDesc->name()));
|
||||
}
|
||||
// Remove the validated field from the map so we can tell if
|
||||
// there are left over fields at the end of all comparisons.
|
||||
sFields.erase(name);
|
||||
};
|
||||
|
||||
// Compare the SFields to the FieldDescriptor->Descriptors.
|
||||
for (int i = 0; i < pbufDescriptor->field_count(); ++i)
|
||||
@@ -453,7 +458,7 @@ private:
|
||||
// clang-format off
|
||||
static const std::array specialEntries{
|
||||
SpecialEntry{
|
||||
"Currency", STI_HASH160,
|
||||
"Currency", STI_UINT160,
|
||||
{
|
||||
{"name", fieldTYPE_STRING},
|
||||
{"code", fieldTYPE_BYTES}
|
||||
@@ -581,9 +586,9 @@ private:
|
||||
{STI_ACCOUNT, fieldTYPE_STRING},
|
||||
|
||||
{STI_AMOUNT, fieldTYPE_BYTES},
|
||||
{STI_HASH128, fieldTYPE_BYTES},
|
||||
{STI_HASH160, fieldTYPE_BYTES},
|
||||
{STI_HASH256, fieldTYPE_BYTES},
|
||||
{STI_UINT128, fieldTYPE_BYTES},
|
||||
{STI_UINT160, fieldTYPE_BYTES},
|
||||
{STI_UINT256, fieldTYPE_BYTES},
|
||||
{STI_VL, fieldTYPE_BYTES},
|
||||
};
|
||||
//clang-format on
|
||||
@@ -601,7 +606,8 @@ private:
|
||||
static const std::map<int, pbuf::FieldDescriptor::Type>
|
||||
sFieldCodeToFieldDescType{
|
||||
{sfDomain.fieldCode, fieldTYPE_STRING},
|
||||
{sfFee.fieldCode, fieldTYPE_UINT64}};
|
||||
{sfFee.fieldCode, fieldTYPE_UINT64},
|
||||
{sfURI.fieldCode, fieldTYPE_STRING}};
|
||||
|
||||
if (auto const iter = sFieldCodeToFieldDescType.find(sField->fieldCode);
|
||||
iter != sFieldCodeToFieldDescType.end() &&
|
||||
@@ -703,7 +709,9 @@ private:
|
||||
// The following repeated types provide no further structure for their
|
||||
// in-ledger representation. We just have to trust that the gRPC
|
||||
// representation is reasonable for what the ledger implements.
|
||||
static const std::set<std::string> noFurtherDetail{{sfPaths.getName()}};
|
||||
static const std::set<std::string> noFurtherDetail{
|
||||
{sfPaths.getName()},
|
||||
};
|
||||
|
||||
if (noFurtherDetail.count(sField->getName()))
|
||||
{
|
||||
@@ -721,8 +729,10 @@ private:
|
||||
{sfIndexes.getName(), &sfLedgerIndex},
|
||||
{sfMajorities.getName(), &sfMajority},
|
||||
{sfMemos.getName(), &sfMemo},
|
||||
{sfNFTokens.getName(), &sfNFToken},
|
||||
{sfSignerEntries.getName(), &sfSignerEntry},
|
||||
{sfSigners.getName(), &sfSigner}};
|
||||
{sfSigners.getName(), &sfSigner},
|
||||
{sfNFTokenOffers.getName(), &sfLedgerIndex}};
|
||||
|
||||
if (!repeatsWhat.count(sField->getName()))
|
||||
{
|
||||
|
||||
@@ -257,7 +257,7 @@ public:
|
||||
BEAST_EXPECT(shouldBeInvalid == sfInvalid);
|
||||
};
|
||||
testInvalid(STI_VL, 255);
|
||||
testInvalid(STI_HASH256, 255);
|
||||
testInvalid(STI_UINT256, 255);
|
||||
testInvalid(STI_UINT32, 255);
|
||||
testInvalid(STI_VECTOR256, 255);
|
||||
testInvalid(STI_OBJECT, 255);
|
||||
|
||||
@@ -75,6 +75,13 @@ public:
|
||||
// elsewhere.
|
||||
continue;
|
||||
}
|
||||
if (flag == asfAuthorizedNFTokenMinter)
|
||||
{
|
||||
// The asfAuthorizedNFTokenMinter flag requires the
|
||||
// presence or absence of the sfNFTokenMinter field in
|
||||
// the transaction. It is tested elsewhere.
|
||||
continue;
|
||||
}
|
||||
else if (
|
||||
std::find(goodFlags.begin(), goodFlags.end(), flag) !=
|
||||
goodFlags.end())
|
||||
@@ -398,6 +405,18 @@ public:
|
||||
env(rate(gw, 2.0));
|
||||
env.close();
|
||||
|
||||
// Because we're hacking the ledger we need the account to have
|
||||
// non-zero sfMintedNFTokens and sfBurnedNFTokens fields. This
|
||||
// prevents an exception when the AccountRoot template is applied.
|
||||
{
|
||||
uint256 const nftId0{token::getNextID(env, gw, 0u)};
|
||||
env(token::mint(gw, 0u));
|
||||
env.close();
|
||||
|
||||
env(token::burn(gw, nftId0));
|
||||
env.close();
|
||||
}
|
||||
|
||||
// Note that we're bypassing almost all of the ledger's safety
|
||||
// checks with this modify() call. If you call close() between
|
||||
// here and the end of the test all the effort will be lost.
|
||||
|
||||
@@ -411,7 +411,7 @@ class LedgerRPC_test : public beast::unit_test::suite
|
||||
jvParams[jss::ledger_hash] = ledgerHash;
|
||||
Json::Value const jrr = env.rpc(
|
||||
"json", "ledger_entry", to_string(jvParams))[jss::result];
|
||||
checkErrorValue(jrr, "malformedRequest", "");
|
||||
checkErrorValue(jrr, "unexpectedLedgerType", "");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1170,7 +1170,7 @@ class LedgerRPC_test : public beast::unit_test::suite
|
||||
jvParams[jss::ledger_hash] = ledgerHash;
|
||||
Json::Value const jrr = env.rpc(
|
||||
"json", "ledger_entry", to_string(jvParams))[jss::result];
|
||||
checkErrorValue(jrr, "malformedRequest", "");
|
||||
checkErrorValue(jrr, "unexpectedLedgerType", "");
|
||||
}
|
||||
{
|
||||
// Malformed account entry.
|
||||
|
||||
Reference in New Issue
Block a user