mirror of
https://github.com/XRPLF/rippled.git
synced 2025-12-06 17:27:55 +00:00
Killing Transform Rep
Summary: Let's get rid of TransformRep and it's children. We have confirmed that HashSkipListRep works better with multifeed, so there is no benefit to keeping this around. This diff is mostly just deleting references to obsoleted functions. I also have a diff for fbcode that we'll need to push when we switch to new release. I had to expose HashSkipListRepFactory in the client header files because db_impl.cc needs access to GetTransform() function for SanitizeOptions. Test Plan: make check Reviewers: dhruba, haobo, kailiu, sdong Reviewed By: dhruba CC: leveldb Differential Revision: https://reviews.facebook.net/D14397
This commit is contained in:
@@ -431,12 +431,11 @@ static bool ValidatePrefixSize(const char* flagname, int32_t value) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
DEFINE_int32(prefix_size, 0, "Control the prefix size for PrefixHashRep");
|
||||
DEFINE_int32(prefix_size, 0, "Control the prefix size for HashSkipList");
|
||||
|
||||
enum RepFactory {
|
||||
kSkipList,
|
||||
kPrefixHash,
|
||||
kUnsorted,
|
||||
kVectorRep
|
||||
};
|
||||
enum RepFactory StringToRepFactory(const char* ctype) {
|
||||
@@ -446,8 +445,6 @@ enum RepFactory StringToRepFactory(const char* ctype) {
|
||||
return kSkipList;
|
||||
else if (!strcasecmp(ctype, "prefix_hash"))
|
||||
return kPrefixHash;
|
||||
else if (!strcasecmp(ctype, "unsorted"))
|
||||
return kUnsorted;
|
||||
else if (!strcasecmp(ctype, "vector"))
|
||||
return kVectorRep;
|
||||
|
||||
@@ -803,9 +800,6 @@ class Benchmark {
|
||||
case kSkipList:
|
||||
fprintf(stdout, "Memtablerep: skip_list\n");
|
||||
break;
|
||||
case kUnsorted:
|
||||
fprintf(stdout, "Memtablerep: unsorted\n");
|
||||
break;
|
||||
case kVectorRep:
|
||||
fprintf(stdout, "Memtablerep: vector\n");
|
||||
break;
|
||||
@@ -1328,14 +1322,8 @@ class Benchmark {
|
||||
}
|
||||
switch (FLAGS_rep_factory) {
|
||||
case kPrefixHash:
|
||||
options.memtable_factory.reset(
|
||||
new PrefixHashRepFactory(NewFixedPrefixTransform(FLAGS_prefix_size))
|
||||
);
|
||||
break;
|
||||
case kUnsorted:
|
||||
options.memtable_factory.reset(
|
||||
new UnsortedRepFactory
|
||||
);
|
||||
options.memtable_factory.reset(NewHashSkipListRepFactory(
|
||||
NewFixedPrefixTransform(FLAGS_prefix_size)));
|
||||
break;
|
||||
case kSkipList:
|
||||
// no need to do anything
|
||||
|
||||
@@ -50,6 +50,7 @@
|
||||
#include "util/auto_roll_logger.h"
|
||||
#include "util/build_version.h"
|
||||
#include "util/coding.h"
|
||||
#include "util/hash_skiplist_rep.h"
|
||||
#include "util/logging.h"
|
||||
#include "util/mutexlock.h"
|
||||
#include "util/perf_context_imp.h"
|
||||
@@ -162,10 +163,10 @@ Options SanitizeOptions(const std::string& dbname,
|
||||
Log(result.info_log, "Compaction filter specified, ignore factory");
|
||||
}
|
||||
if (result.prefix_extractor) {
|
||||
// If a prefix extractor has been supplied and a PrefixHashRepFactory is
|
||||
// If a prefix extractor has been supplied and a HashSkipListRepFactory is
|
||||
// being used, make sure that the latter uses the former as its transform
|
||||
// function.
|
||||
auto factory = dynamic_cast<PrefixHashRepFactory*>(
|
||||
auto factory = dynamic_cast<HashSkipListRepFactory*>(
|
||||
result.memtable_factory.get());
|
||||
if (factory &&
|
||||
factory->GetTransform() != result.prefix_extractor) {
|
||||
|
||||
170
db/db_test.cc
170
db/db_test.cc
@@ -244,7 +244,6 @@ class DBTest {
|
||||
enum OptionConfig {
|
||||
kDefault,
|
||||
kVectorRep,
|
||||
kUnsortedRep,
|
||||
kMergePut,
|
||||
kFilter,
|
||||
kUncompressed,
|
||||
@@ -255,7 +254,7 @@ class DBTest {
|
||||
kCompactOnFlush,
|
||||
kPerfOptions,
|
||||
kDeletesFilterFirst,
|
||||
kPrefixHashRep,
|
||||
kHashSkipList,
|
||||
kUniversalCompaction,
|
||||
kCompressedBlockCache,
|
||||
kEnd
|
||||
@@ -339,9 +338,9 @@ class DBTest {
|
||||
Options CurrentOptions() {
|
||||
Options options;
|
||||
switch (option_config_) {
|
||||
case kPrefixHashRep:
|
||||
options.memtable_factory.reset(new
|
||||
PrefixHashRepFactory(NewFixedPrefixTransform(1)));
|
||||
case kHashSkipList:
|
||||
options.memtable_factory.reset(
|
||||
NewHashSkipListRepFactory(NewFixedPrefixTransform(1)));
|
||||
break;
|
||||
case kMergePut:
|
||||
options.merge_operator = MergeOperators::CreatePutOperator();
|
||||
@@ -375,9 +374,6 @@ class DBTest {
|
||||
case kDeletesFilterFirst:
|
||||
options.filter_deletes = true;
|
||||
break;
|
||||
case kUnsortedRep:
|
||||
options.memtable_factory.reset(new UnsortedRepFactory);
|
||||
break;
|
||||
case kVectorRep:
|
||||
options.memtable_factory.reset(new VectorRepFactory(100));
|
||||
break;
|
||||
@@ -4600,7 +4596,7 @@ TEST(DBTest, Randomized) {
|
||||
// TODO(sanjay): Test Get() works
|
||||
int p = rnd.Uniform(100);
|
||||
int minimum = 0;
|
||||
if (option_config_ == kPrefixHashRep) {
|
||||
if (option_config_ == kHashSkipList) {
|
||||
minimum = 1;
|
||||
}
|
||||
if (p < 45) { // Put
|
||||
@@ -4770,90 +4766,82 @@ void PrefixScanInit(DBTest *dbtest) {
|
||||
}
|
||||
|
||||
TEST(DBTest, PrefixScan) {
|
||||
for (int it = 0; it < 2; ++it) {
|
||||
ReadOptions ro = ReadOptions();
|
||||
int count;
|
||||
Slice prefix;
|
||||
Slice key;
|
||||
char buf[100];
|
||||
Iterator* iter;
|
||||
snprintf(buf, sizeof(buf), "03______:");
|
||||
prefix = Slice(buf, 8);
|
||||
key = Slice(buf, 9);
|
||||
auto prefix_extractor = NewFixedPrefixTransform(8);
|
||||
// db configs
|
||||
env_->count_random_reads_ = true;
|
||||
Options options = CurrentOptions();
|
||||
options.env = env_;
|
||||
options.no_block_cache = true;
|
||||
options.filter_policy = NewBloomFilterPolicy(10);
|
||||
options.prefix_extractor = prefix_extractor;
|
||||
options.whole_key_filtering = false;
|
||||
options.disable_auto_compactions = true;
|
||||
options.max_background_compactions = 2;
|
||||
options.create_if_missing = true;
|
||||
options.disable_seek_compaction = true;
|
||||
if (it == 0) {
|
||||
options.memtable_factory.reset(NewHashSkipListRepFactory(
|
||||
prefix_extractor));
|
||||
} else {
|
||||
options.memtable_factory = std::make_shared<PrefixHashRepFactory>(
|
||||
prefix_extractor);
|
||||
}
|
||||
ReadOptions ro = ReadOptions();
|
||||
int count;
|
||||
Slice prefix;
|
||||
Slice key;
|
||||
char buf[100];
|
||||
Iterator* iter;
|
||||
snprintf(buf, sizeof(buf), "03______:");
|
||||
prefix = Slice(buf, 8);
|
||||
key = Slice(buf, 9);
|
||||
auto prefix_extractor = NewFixedPrefixTransform(8);
|
||||
// db configs
|
||||
env_->count_random_reads_ = true;
|
||||
Options options = CurrentOptions();
|
||||
options.env = env_;
|
||||
options.no_block_cache = true;
|
||||
options.filter_policy = NewBloomFilterPolicy(10);
|
||||
options.prefix_extractor = prefix_extractor;
|
||||
options.whole_key_filtering = false;
|
||||
options.disable_auto_compactions = true;
|
||||
options.max_background_compactions = 2;
|
||||
options.create_if_missing = true;
|
||||
options.disable_seek_compaction = true;
|
||||
options.memtable_factory.reset(NewHashSkipListRepFactory(prefix_extractor));
|
||||
|
||||
// prefix specified, with blooms: 2 RAND I/Os
|
||||
// SeekToFirst
|
||||
DestroyAndReopen(&options);
|
||||
PrefixScanInit(this);
|
||||
count = 0;
|
||||
env_->random_read_counter_.Reset();
|
||||
ro.prefix = &prefix;
|
||||
iter = db_->NewIterator(ro);
|
||||
for (iter->SeekToFirst(); iter->Valid(); iter->Next()) {
|
||||
assert(iter->key().starts_with(prefix));
|
||||
count++;
|
||||
}
|
||||
ASSERT_OK(iter->status());
|
||||
delete iter;
|
||||
ASSERT_EQ(count, 2);
|
||||
ASSERT_EQ(env_->random_read_counter_.Read(), 2);
|
||||
|
||||
// prefix specified, with blooms: 2 RAND I/Os
|
||||
// Seek
|
||||
DestroyAndReopen(&options);
|
||||
PrefixScanInit(this);
|
||||
count = 0;
|
||||
env_->random_read_counter_.Reset();
|
||||
ro.prefix = &prefix;
|
||||
iter = db_->NewIterator(ro);
|
||||
for (iter->Seek(key); iter->Valid(); iter->Next()) {
|
||||
assert(iter->key().starts_with(prefix));
|
||||
count++;
|
||||
}
|
||||
ASSERT_OK(iter->status());
|
||||
delete iter;
|
||||
ASSERT_EQ(count, 2);
|
||||
ASSERT_EQ(env_->random_read_counter_.Read(), 2);
|
||||
|
||||
// no prefix specified: 11 RAND I/Os
|
||||
DestroyAndReopen(&options);
|
||||
PrefixScanInit(this);
|
||||
count = 0;
|
||||
env_->random_read_counter_.Reset();
|
||||
iter = db_->NewIterator(ReadOptions());
|
||||
for (iter->Seek(prefix); iter->Valid(); iter->Next()) {
|
||||
if (! iter->key().starts_with(prefix)) {
|
||||
break;
|
||||
}
|
||||
count++;
|
||||
}
|
||||
ASSERT_OK(iter->status());
|
||||
delete iter;
|
||||
ASSERT_EQ(count, 2);
|
||||
ASSERT_EQ(env_->random_read_counter_.Read(), 11);
|
||||
Close();
|
||||
delete options.filter_policy;
|
||||
// prefix specified, with blooms: 2 RAND I/Os
|
||||
// SeekToFirst
|
||||
DestroyAndReopen(&options);
|
||||
PrefixScanInit(this);
|
||||
count = 0;
|
||||
env_->random_read_counter_.Reset();
|
||||
ro.prefix = &prefix;
|
||||
iter = db_->NewIterator(ro);
|
||||
for (iter->SeekToFirst(); iter->Valid(); iter->Next()) {
|
||||
assert(iter->key().starts_with(prefix));
|
||||
count++;
|
||||
}
|
||||
ASSERT_OK(iter->status());
|
||||
delete iter;
|
||||
ASSERT_EQ(count, 2);
|
||||
ASSERT_EQ(env_->random_read_counter_.Read(), 2);
|
||||
|
||||
// prefix specified, with blooms: 2 RAND I/Os
|
||||
// Seek
|
||||
DestroyAndReopen(&options);
|
||||
PrefixScanInit(this);
|
||||
count = 0;
|
||||
env_->random_read_counter_.Reset();
|
||||
ro.prefix = &prefix;
|
||||
iter = db_->NewIterator(ro);
|
||||
for (iter->Seek(key); iter->Valid(); iter->Next()) {
|
||||
assert(iter->key().starts_with(prefix));
|
||||
count++;
|
||||
}
|
||||
ASSERT_OK(iter->status());
|
||||
delete iter;
|
||||
ASSERT_EQ(count, 2);
|
||||
ASSERT_EQ(env_->random_read_counter_.Read(), 2);
|
||||
|
||||
// no prefix specified: 11 RAND I/Os
|
||||
DestroyAndReopen(&options);
|
||||
PrefixScanInit(this);
|
||||
count = 0;
|
||||
env_->random_read_counter_.Reset();
|
||||
iter = db_->NewIterator(ReadOptions());
|
||||
for (iter->Seek(prefix); iter->Valid(); iter->Next()) {
|
||||
if (! iter->key().starts_with(prefix)) {
|
||||
break;
|
||||
}
|
||||
count++;
|
||||
}
|
||||
ASSERT_OK(iter->status());
|
||||
delete iter;
|
||||
ASSERT_EQ(count, 2);
|
||||
ASSERT_EQ(env_->random_read_counter_.Read(), 11);
|
||||
Close();
|
||||
delete options.filter_policy;
|
||||
}
|
||||
|
||||
std::string MakeKey(unsigned int num) {
|
||||
|
||||
@@ -38,8 +38,8 @@ std::shared_ptr<DB> OpenDb() {
|
||||
|
||||
if (FLAGS_use_set_based_memetable) {
|
||||
auto prefix_extractor = rocksdb::NewFixedPrefixTransform(0);
|
||||
options.memtable_factory =
|
||||
std::make_shared<rocksdb::PrefixHashRepFactory>(prefix_extractor);
|
||||
options.memtable_factory.reset(
|
||||
NewHashSkipListRepFactory(prefix_extractor));
|
||||
}
|
||||
|
||||
Status s = DB::Open(options, kDbName, &db);
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
#include "util/testharness.h"
|
||||
|
||||
DEFINE_bool(use_prefix_hash_memtable, true, "");
|
||||
DEFINE_bool(use_nolock_version, true, "");
|
||||
DEFINE_bool(trigger_deadlock, false,
|
||||
"issue delete in range scan to trigger PrefixHashMap deadlock");
|
||||
DEFINE_uint64(bucket_count, 100000, "number of buckets");
|
||||
@@ -109,14 +108,8 @@ class PrefixTest {
|
||||
if (FLAGS_use_prefix_hash_memtable) {
|
||||
auto prefix_extractor = NewFixedPrefixTransform(8);
|
||||
options.prefix_extractor = prefix_extractor;
|
||||
if (FLAGS_use_nolock_version) {
|
||||
options.memtable_factory.reset(NewHashSkipListRepFactory(
|
||||
prefix_extractor, FLAGS_bucket_count));
|
||||
} else {
|
||||
options.memtable_factory =
|
||||
std::make_shared<rocksdb::PrefixHashRepFactory>(
|
||||
prefix_extractor, FLAGS_bucket_count, FLAGS_num_locks);
|
||||
}
|
||||
options.memtable_factory.reset(NewHashSkipListRepFactory(
|
||||
prefix_extractor, FLAGS_bucket_count));
|
||||
}
|
||||
|
||||
Status s = DB::Open(options, kDbName, &db);
|
||||
|
||||
Reference in New Issue
Block a user