Separate the aligned and unaligned memory allocation

Summary: Use two vectors for different types of memory allocation.

Test Plan: run all unit tests.

Reviewers: haobo, sdong

Reviewed By: haobo

CC: leveldb

Differential Revision: https://reviews.facebook.net/D15027
This commit is contained in:
kailiu
2014-01-08 15:06:07 -08:00
parent 50994bf699
commit 12b6d2b839
4 changed files with 153 additions and 98 deletions

View File

@@ -8,71 +8,86 @@
// found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "util/arena_impl.h"
#include <algorithm>
namespace rocksdb {
ArenaImpl::ArenaImpl(size_t block_size) {
if (block_size < kMinBlockSize) {
block_size_ = kMinBlockSize;
} else if (block_size > kMaxBlockSize) {
block_size_ = kMaxBlockSize;
} else {
block_size_ = block_size;
const size_t ArenaImpl::kMinBlockSize = 4096;
const size_t ArenaImpl::kMaxBlockSize = 2 << 30;
static const int kAlignUnit = sizeof(void*);
size_t OptimizeBlockSize(size_t block_size) {
// Make sure block_size is in optimal range
block_size = std::max(ArenaImpl::kMinBlockSize, block_size);
block_size = std::min(ArenaImpl::kMaxBlockSize, block_size);
// make sure block_size is the multiple of kAlignUnit
if (block_size % kAlignUnit != 0) {
block_size = (1 + block_size / kAlignUnit) * kAlignUnit;
}
blocks_memory_ = 0;
alloc_ptr_ = nullptr; // First allocation will allocate a block
alloc_bytes_remaining_ = 0;
return block_size;
}
ArenaImpl::ArenaImpl(size_t block_size)
: kBlockSize(OptimizeBlockSize(block_size)) {
assert(kBlockSize >= kMinBlockSize && kBlockSize <= kMaxBlockSize &&
kBlockSize % kAlignUnit == 0);
}
ArenaImpl::~ArenaImpl() {
for (size_t i = 0; i < blocks_.size(); i++) {
delete[] blocks_[i];
for (const auto& block : blocks_) {
delete[] block;
}
}
char* ArenaImpl::AllocateFallback(size_t bytes) {
if (bytes > block_size_ / 4) {
char* ArenaImpl::AllocateFallback(size_t bytes, bool aligned) {
if (bytes > kBlockSize / 4) {
// Object is more than a quarter of our block size. Allocate it separately
// to avoid wasting too much space in leftover bytes.
char* result = AllocateNewBlock(bytes);
return result;
return AllocateNewBlock(bytes);
}
// We waste the remaining space in the current block.
alloc_ptr_ = AllocateNewBlock(block_size_);
alloc_bytes_remaining_ = block_size_;
auto block_head = AllocateNewBlock(kBlockSize);
alloc_bytes_remaining_ = kBlockSize - bytes;
char* result = alloc_ptr_;
alloc_ptr_ += bytes;
alloc_bytes_remaining_ -= bytes;
return result;
if (aligned) {
aligned_alloc_ptr_ = block_head + bytes;
unaligned_alloc_ptr_ = block_head + kBlockSize;
return block_head;
} else {
aligned_alloc_ptr_ = block_head;
unaligned_alloc_ptr_ = block_head + kBlockSize - bytes;
return unaligned_alloc_ptr_;
}
}
char* ArenaImpl::AllocateAligned(size_t bytes) {
const int align = sizeof(void*); // We'll align to pointer size
assert((align & (align-1)) == 0); // Pointer size should be a power of 2
size_t current_mod = reinterpret_cast<uintptr_t>(alloc_ptr_) & (align-1);
size_t slop = (current_mod == 0 ? 0 : align - current_mod);
assert((kAlignUnit & (kAlignUnit - 1)) ==
0); // Pointer size should be a power of 2
size_t current_mod =
reinterpret_cast<uintptr_t>(aligned_alloc_ptr_) & (kAlignUnit - 1);
size_t slop = (current_mod == 0 ? 0 : kAlignUnit - current_mod);
size_t needed = bytes + slop;
char* result;
if (needed <= alloc_bytes_remaining_) {
result = alloc_ptr_ + slop;
alloc_ptr_ += needed;
result = aligned_alloc_ptr_ + slop;
aligned_alloc_ptr_ += needed;
alloc_bytes_remaining_ -= needed;
} else {
// AllocateFallback always returned aligned memory
result = AllocateFallback(bytes);
result = AllocateFallback(bytes, true /* aligned */);
}
assert((reinterpret_cast<uintptr_t>(result) & (align-1)) == 0);
assert((reinterpret_cast<uintptr_t>(result) & (kAlignUnit - 1)) == 0);
return result;
}
char* ArenaImpl::AllocateNewBlock(size_t block_bytes) {
char* result = new char[block_bytes];
char* block = new char[block_bytes];
blocks_memory_ += block_bytes;
blocks_.push_back(result);
return result;
blocks_.push_back(block);
return block;
}
} // namespace rocksdb

View File

@@ -22,49 +22,54 @@ namespace rocksdb {
class ArenaImpl : public Arena {
public:
// No copying allowed
ArenaImpl(const ArenaImpl&) = delete;
void operator=(const ArenaImpl&) = delete;
static const size_t kMinBlockSize;
static const size_t kMaxBlockSize;
explicit ArenaImpl(size_t block_size = kMinBlockSize);
virtual ~ArenaImpl();
virtual char* Allocate(size_t bytes);
virtual char* Allocate(size_t bytes) override;
virtual char* AllocateAligned(size_t bytes);
virtual char* AllocateAligned(size_t bytes) override;
// Returns an estimate of the total memory usage of data allocated
// by the arena (including space allocated but not yet used for user
// by the arena (exclude the space allocated but not yet used for future
// allocations).
//
// TODO: Do we need to exclude space allocated but not used?
virtual const size_t ApproximateMemoryUsage() {
return blocks_memory_ + blocks_.capacity() * sizeof(char*);
return blocks_memory_ + blocks_.capacity() * sizeof(char*) -
alloc_bytes_remaining_;
}
virtual const size_t MemoryAllocatedBytes() {
virtual const size_t MemoryAllocatedBytes() override {
return blocks_memory_;
}
private:
char* AllocateFallback(size_t bytes);
// Number of bytes allocated in one block
const size_t kBlockSize;
// Array of new[] allocated memory blocks
typedef std::vector<char*> Blocks;
Blocks blocks_;
// Stats for current active block.
// For each block, we allocate aligned memory chucks from one end and
// allocate unaligned memory chucks from the other end. Otherwise the
// memory waste for alignment will be higher if we allocate both types of
// memory from one direction.
char* unaligned_alloc_ptr_ = nullptr;
char* aligned_alloc_ptr_ = nullptr;
// How many bytes left in currently active block?
size_t alloc_bytes_remaining_ = 0;
char* AllocateFallback(size_t bytes, bool aligned);
char* AllocateNewBlock(size_t block_bytes);
static const size_t kMinBlockSize = 4096;
static const size_t kMaxBlockSize = 2 << 30;
// Number of bytes allocated in one block
size_t block_size_;
// Allocation state
char* alloc_ptr_;
size_t alloc_bytes_remaining_;
// Array of new[] allocated memory blocks
std::vector<char*> blocks_;
// Bytes of memory in blocks allocated so far
size_t blocks_memory_;
// No copying allowed
ArenaImpl(const ArenaImpl&);
void operator=(const ArenaImpl&);
size_t blocks_memory_ = 0;
};
inline char* ArenaImpl::Allocate(size_t bytes) {
@@ -73,12 +78,16 @@ inline char* ArenaImpl::Allocate(size_t bytes) {
// them for our internal use).
assert(bytes > 0);
if (bytes <= alloc_bytes_remaining_) {
char* result = alloc_ptr_;
alloc_ptr_ += bytes;
unaligned_alloc_ptr_ -= bytes;
alloc_bytes_remaining_ -= bytes;
return result;
return unaligned_alloc_ptr_;
}
return AllocateFallback(bytes);
return AllocateFallback(bytes, false /* unaligned */);
}
// check and adjust the block_size so that the return value is
// 1. in the range of [kMinBlockSize, kMaxBlockSize].
// 2. the multiple of align unit.
extern size_t OptimizeBlockSize(size_t block_size);
} // namespace rocksdb

View File

@@ -57,8 +57,33 @@ TEST(ArenaImplTest, MemoryAllocatedBytes) {
ASSERT_EQ(arena_impl.MemoryAllocatedBytes(), expected_memory_allocated);
}
// Make sure we didn't count the allocate but not used memory space in
// Arena::ApproximateMemoryUsage()
TEST(ArenaImplTest, ApproximateMemoryUsageTest) {
const size_t kBlockSize = 4096;
const size_t kEntrySize = kBlockSize / 8;
ArenaImpl arena(kBlockSize);
ASSERT_EQ(0, arena.ApproximateMemoryUsage());
auto num_blocks = kBlockSize / kEntrySize;
// first allocation
arena.AllocateAligned(kEntrySize);
auto mem_usage = arena.MemoryAllocatedBytes();
ASSERT_EQ(mem_usage, kBlockSize);
auto usage = arena.ApproximateMemoryUsage();
ASSERT_LT(usage, mem_usage);
for (size_t i = 1; i < num_blocks; ++i) {
arena.AllocateAligned(kEntrySize);
ASSERT_EQ(mem_usage, arena.MemoryAllocatedBytes());
ASSERT_EQ(arena.ApproximateMemoryUsage(), usage + kEntrySize);
usage = arena.ApproximateMemoryUsage();
}
ASSERT_GT(usage, mem_usage);
}
TEST(ArenaImplTest, Simple) {
std::vector<std::pair<size_t, char*> > allocated;
std::vector<std::pair<size_t, char*>> allocated;
ArenaImpl arena_impl;
const int N = 100000;
size_t bytes = 0;
@@ -68,8 +93,9 @@ TEST(ArenaImplTest, Simple) {
if (i % (N / 10) == 0) {
s = i;
} else {
s = rnd.OneIn(4000) ? rnd.Uniform(6000) :
(rnd.OneIn(10) ? rnd.Uniform(100) : rnd.Uniform(20));
s = rnd.OneIn(4000)
? rnd.Uniform(6000)
: (rnd.OneIn(10) ? rnd.Uniform(100) : rnd.Uniform(20));
}
if (s == 0) {
// Our arena disallows size 0 allocations.
@@ -89,7 +115,7 @@ TEST(ArenaImplTest, Simple) {
bytes += s;
allocated.push_back(std::make_pair(s, r));
ASSERT_GE(arena_impl.ApproximateMemoryUsage(), bytes);
if (i > N/10) {
if (i > N / 10) {
ASSERT_LE(arena_impl.ApproximateMemoryUsage(), bytes * 1.10);
}
}