make hash_link_list Node's key space consecutively followed at the end
Summary: per sdong's request, this will help processor prefetch on n->key case. Test Plan: make all check Reviewers: sdong, haobo, igor Reviewed By: sdong CC: leveldb Differential Revision: https://reviews.facebook.net/D17415
This commit is contained in:
parent
318eace49d
commit
c90d446ee7
@ -1162,8 +1162,8 @@ class Benchmark {
|
||||
fresh_db = true;
|
||||
if (num_threads > 1) {
|
||||
fprintf(stderr, "filluniquerandom multithreaded not supported"
|
||||
" set --threads=1");
|
||||
exit(1);
|
||||
", use 1 thread");
|
||||
num_threads = 1;
|
||||
}
|
||||
method = &Benchmark::WriteUniqueRandom;
|
||||
} else if (name == Slice("overwrite")) {
|
||||
|
@ -142,6 +142,11 @@ Slice MemTableRep::UserKey(const char* key) const {
|
||||
return Slice(slice.data(), slice.size() - 8);
|
||||
}
|
||||
|
||||
KeyHandle MemTableRep::Allocate(const size_t len, char** buf) {
|
||||
*buf = arena_->Allocate(len);
|
||||
return static_cast<KeyHandle>(*buf);
|
||||
}
|
||||
|
||||
// Encode a suitable internal key target for "target" and return it.
|
||||
// Uses *scratch as scratch space, and the returned pointer will point
|
||||
// into this scratch space.
|
||||
@ -243,7 +248,9 @@ void MemTable::Add(SequenceNumber s, ValueType type,
|
||||
const size_t encoded_len =
|
||||
VarintLength(internal_key_size) + internal_key_size +
|
||||
VarintLength(val_size) + val_size;
|
||||
char* buf = arena_.Allocate(encoded_len);
|
||||
char* buf = nullptr;
|
||||
KeyHandle handle = table_->Allocate(encoded_len, &buf);
|
||||
assert(buf != nullptr);
|
||||
char* p = EncodeVarint32(buf, internal_key_size);
|
||||
memcpy(p, key.data(), key_size);
|
||||
p += key_size;
|
||||
@ -252,7 +259,7 @@ void MemTable::Add(SequenceNumber s, ValueType type,
|
||||
p = EncodeVarint32(p, val_size);
|
||||
memcpy(p, value.data(), val_size);
|
||||
assert((unsigned)(p + val_size - buf) == (unsigned)encoded_len);
|
||||
table_->Insert(buf);
|
||||
table_->Insert(handle);
|
||||
|
||||
if (prefix_bloom_) {
|
||||
assert(prefix_extractor_);
|
||||
|
@ -45,6 +45,8 @@ class LookupKey;
|
||||
class Slice;
|
||||
class SliceTransform;
|
||||
|
||||
typedef void* KeyHandle;
|
||||
|
||||
class MemTableRep {
|
||||
public:
|
||||
// KeyComparator provides a means to compare keys, which are internal keys
|
||||
@ -62,11 +64,19 @@ class MemTableRep {
|
||||
virtual ~KeyComparator() { }
|
||||
};
|
||||
|
||||
explicit MemTableRep(Arena* arena) : arena_(arena) {}
|
||||
|
||||
// Allocate a buf of len size for storing key. The idea is that a specific
|
||||
// memtable representation knows its underlying data structure better. By
|
||||
// allowing it to allocate memory, it can possibly put correlated stuff
|
||||
// in consecutive memory area to make processor prefetching more efficient.
|
||||
virtual KeyHandle Allocate(const size_t len, char** buf);
|
||||
|
||||
// Insert key into the collection. (The caller will pack key and value into a
|
||||
// single buffer and pass that in as the parameter to Insert)
|
||||
// single buffer and pass that in as the parameter to Insert).
|
||||
// REQUIRES: nothing that compares equal to key is currently in the
|
||||
// collection.
|
||||
virtual void Insert(const char* key) = 0;
|
||||
virtual void Insert(KeyHandle handle) = 0;
|
||||
|
||||
// Returns true iff an entry that compares equal to key is in the collection.
|
||||
virtual bool Contains(const char* key) const = 0;
|
||||
@ -153,6 +163,8 @@ class MemTableRep {
|
||||
// When *key is an internal key concatenated with the value, returns the
|
||||
// user key.
|
||||
virtual Slice UserKey(const char* key) const;
|
||||
|
||||
Arena* arena_;
|
||||
};
|
||||
|
||||
// This is the base class for all factories that are used by RocksDB to create
|
||||
|
@ -22,12 +22,6 @@ namespace {
|
||||
typedef const char* Key;
|
||||
|
||||
struct Node {
|
||||
explicit Node(const Key& k) :
|
||||
key(k) {
|
||||
}
|
||||
|
||||
Key const key;
|
||||
|
||||
// Accessors/mutators for links. Wrapped in methods so we can
|
||||
// add the appropriate barriers as necessary.
|
||||
Node* Next() {
|
||||
@ -40,17 +34,19 @@ struct Node {
|
||||
// pointer observes a fully initialized version of the inserted node.
|
||||
next_.Release_Store(x);
|
||||
}
|
||||
|
||||
// No-barrier variants that can be safely used in a few locations.
|
||||
Node* NoBarrier_Next() {
|
||||
return reinterpret_cast<Node*>(next_.NoBarrier_Load());
|
||||
}
|
||||
|
||||
void NoBarrier_SetNext(Node* x) {
|
||||
next_.NoBarrier_Store(x);
|
||||
}
|
||||
|
||||
private:
|
||||
private:
|
||||
port::AtomicPointer next_;
|
||||
public:
|
||||
char key[0];
|
||||
};
|
||||
|
||||
class HashLinkListRep : public MemTableRep {
|
||||
@ -58,7 +54,9 @@ class HashLinkListRep : public MemTableRep {
|
||||
HashLinkListRep(const MemTableRep::KeyComparator& compare, Arena* arena,
|
||||
const SliceTransform* transform, size_t bucket_size);
|
||||
|
||||
virtual void Insert(const char* key) override;
|
||||
virtual KeyHandle Allocate(const size_t len, char** buf) override;
|
||||
|
||||
virtual void Insert(KeyHandle handle) override;
|
||||
|
||||
virtual bool Contains(const char* key) const override;
|
||||
|
||||
@ -93,8 +91,6 @@ class HashLinkListRep : public MemTableRep {
|
||||
const SliceTransform* transform_;
|
||||
|
||||
const MemTableRep::KeyComparator& compare_;
|
||||
// immutable after construction
|
||||
Arena* const arena_;
|
||||
|
||||
bool BucketContains(Node* head, const Slice& key) const;
|
||||
|
||||
@ -114,11 +110,6 @@ class HashLinkListRep : public MemTableRep {
|
||||
return GetBucket(GetHash(slice));
|
||||
}
|
||||
|
||||
Node* NewNode(const Key& key) {
|
||||
char* mem = arena_->AllocateAligned(sizeof(Node));
|
||||
return new (mem) Node(key);
|
||||
}
|
||||
|
||||
bool Equal(const Slice& a, const Key& b) const {
|
||||
return (compare_(b, a) == 0);
|
||||
}
|
||||
@ -318,10 +309,10 @@ class HashLinkListRep : public MemTableRep {
|
||||
HashLinkListRep::HashLinkListRep(const MemTableRep::KeyComparator& compare,
|
||||
Arena* arena, const SliceTransform* transform,
|
||||
size_t bucket_size)
|
||||
: bucket_size_(bucket_size),
|
||||
: MemTableRep(arena),
|
||||
bucket_size_(bucket_size),
|
||||
transform_(transform),
|
||||
compare_(compare),
|
||||
arena_(arena) {
|
||||
compare_(compare) {
|
||||
char* mem = arena_->AllocateAligned(
|
||||
sizeof(port::AtomicPointer) * bucket_size);
|
||||
|
||||
@ -335,15 +326,22 @@ HashLinkListRep::HashLinkListRep(const MemTableRep::KeyComparator& compare,
|
||||
HashLinkListRep::~HashLinkListRep() {
|
||||
}
|
||||
|
||||
void HashLinkListRep::Insert(const char* key) {
|
||||
assert(!Contains(key));
|
||||
Slice internal_key = GetLengthPrefixedSlice(key);
|
||||
KeyHandle HashLinkListRep::Allocate(const size_t len, char** buf) {
|
||||
char* mem = arena_->AllocateAligned(sizeof(Node) + len);
|
||||
Node* x = new (mem) Node();
|
||||
*buf = x->key;
|
||||
return static_cast<void*>(x);
|
||||
}
|
||||
|
||||
void HashLinkListRep::Insert(KeyHandle handle) {
|
||||
Node* x = static_cast<Node*>(handle);
|
||||
assert(!Contains(x->key));
|
||||
Slice internal_key = GetLengthPrefixedSlice(x->key);
|
||||
auto transformed = GetPrefix(internal_key);
|
||||
auto& bucket = buckets_[GetHash(transformed)];
|
||||
Node* head = static_cast<Node*>(bucket.Acquire_Load());
|
||||
|
||||
if (!head) {
|
||||
Node* x = NewNode(key);
|
||||
// NoBarrier_SetNext() suffices since we will add a barrier when
|
||||
// we publish a pointer to "x" in prev[i].
|
||||
x->NoBarrier_SetNext(nullptr);
|
||||
@ -372,9 +370,7 @@ void HashLinkListRep::Insert(const char* key) {
|
||||
}
|
||||
|
||||
// Our data structure does not allow duplicate insertion
|
||||
assert(cur == nullptr || !Equal(key, cur->key));
|
||||
|
||||
Node* x = NewNode(key);
|
||||
assert(cur == nullptr || !Equal(x->key, cur->key));
|
||||
|
||||
// NoBarrier_SetNext() suffices since we will add a barrier when
|
||||
// we publish a pointer to "x" in prev[i].
|
||||
|
@ -25,7 +25,7 @@ class HashSkipListRep : public MemTableRep {
|
||||
const SliceTransform* transform, size_t bucket_size,
|
||||
int32_t skiplist_height, int32_t skiplist_branching_factor);
|
||||
|
||||
virtual void Insert(const char* key) override;
|
||||
virtual void Insert(KeyHandle handle) override;
|
||||
|
||||
virtual bool Contains(const char* key) const override;
|
||||
|
||||
@ -225,7 +225,8 @@ HashSkipListRep::HashSkipListRep(const MemTableRep::KeyComparator& compare,
|
||||
Arena* arena, const SliceTransform* transform,
|
||||
size_t bucket_size, int32_t skiplist_height,
|
||||
int32_t skiplist_branching_factor)
|
||||
: bucket_size_(bucket_size),
|
||||
: MemTableRep(arena),
|
||||
bucket_size_(bucket_size),
|
||||
skiplist_height_(skiplist_height),
|
||||
skiplist_branching_factor_(skiplist_branching_factor),
|
||||
transform_(transform),
|
||||
@ -255,7 +256,8 @@ HashSkipListRep::Bucket* HashSkipListRep::GetInitializedBucket(
|
||||
return bucket;
|
||||
}
|
||||
|
||||
void HashSkipListRep::Insert(const char* key) {
|
||||
void HashSkipListRep::Insert(KeyHandle handle) {
|
||||
auto* key = static_cast<char*>(handle);
|
||||
assert(!Contains(key));
|
||||
auto transformed = transform_->Transform(UserKey(key));
|
||||
auto bucket = GetInitializedBucket(transformed);
|
||||
|
@ -13,13 +13,13 @@ class SkipListRep : public MemTableRep {
|
||||
SkipList<const char*, const MemTableRep::KeyComparator&> skip_list_;
|
||||
public:
|
||||
explicit SkipListRep(const MemTableRep::KeyComparator& compare, Arena* arena)
|
||||
: skip_list_(compare, arena) {
|
||||
: MemTableRep(arena), skip_list_(compare, arena) {
|
||||
}
|
||||
|
||||
// Insert key into the list.
|
||||
// REQUIRES: nothing that compares equal to key is currently in the list.
|
||||
virtual void Insert(const char* key) override {
|
||||
skip_list_.Insert(key);
|
||||
virtual void Insert(KeyHandle handle) override {
|
||||
skip_list_.Insert(static_cast<char*>(handle));
|
||||
}
|
||||
|
||||
// Returns true iff an entry that compares equal to key is in the list.
|
||||
|
@ -30,7 +30,7 @@ class VectorRep : public MemTableRep {
|
||||
// single buffer and pass that in as the parameter to Insert)
|
||||
// REQUIRES: nothing that compares equal to key is currently in the
|
||||
// collection.
|
||||
virtual void Insert(const char* key) override;
|
||||
virtual void Insert(KeyHandle handle) override;
|
||||
|
||||
// Returns true iff an entry that compares equal to key is in the collection.
|
||||
virtual bool Contains(const char* key) const override;
|
||||
@ -106,7 +106,8 @@ class VectorRep : public MemTableRep {
|
||||
const KeyComparator& compare_;
|
||||
};
|
||||
|
||||
void VectorRep::Insert(const char* key) {
|
||||
void VectorRep::Insert(KeyHandle handle) {
|
||||
auto* key = static_cast<char*>(handle);
|
||||
assert(!Contains(key));
|
||||
WriteLock l(&rwlock_);
|
||||
assert(!immutable_);
|
||||
@ -134,7 +135,8 @@ size_t VectorRep::ApproximateMemoryUsage() {
|
||||
}
|
||||
|
||||
VectorRep::VectorRep(const KeyComparator& compare, Arena* arena, size_t count)
|
||||
: bucket_(new Bucket()),
|
||||
: MemTableRep(arena),
|
||||
bucket_(new Bucket()),
|
||||
immutable_(false),
|
||||
sorted_(false),
|
||||
compare_(compare) { bucket_.get()->reserve(count); }
|
||||
|
Loading…
Reference in New Issue
Block a user