2013-11-06 20:55:43 +01:00
|
|
|
// Copyright (c) 2013, Facebook, Inc. All rights reserved.
|
|
|
|
// This source code is licensed under the BSD-style license found in the
|
|
|
|
// LICENSE file in the root directory of this source tree. An additional grant
|
|
|
|
// of patent rights can be found in the PATENTS file in the same directory.
|
|
|
|
//
|
|
|
|
|
2013-12-03 21:42:15 +01:00
|
|
|
#include "util/hash_skiplist_rep.h"
|
|
|
|
|
2013-11-06 20:55:43 +01:00
|
|
|
#include "rocksdb/memtablerep.h"
|
2014-01-31 02:18:17 +01:00
|
|
|
#include "util/arena.h"
|
2013-11-06 20:55:43 +01:00
|
|
|
#include "rocksdb/slice.h"
|
|
|
|
#include "rocksdb/slice_transform.h"
|
|
|
|
#include "port/port.h"
|
|
|
|
#include "port/atomic_pointer.h"
|
|
|
|
#include "util/murmurhash.h"
|
2013-11-21 04:49:27 +01:00
|
|
|
#include "db/memtable.h"
|
2013-11-06 20:55:43 +01:00
|
|
|
#include "db/skiplist.h"
|
|
|
|
|
|
|
|
namespace rocksdb {
|
|
|
|
namespace {
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
class HashSkipListRep : public MemTableRep {
|
2013-11-06 20:55:43 +01:00
|
|
|
public:
|
2013-11-08 09:31:09 +01:00
|
|
|
HashSkipListRep(MemTableRep::KeyComparator& compare, Arena* arena,
|
2013-11-22 22:31:00 +01:00
|
|
|
const SliceTransform* transform, size_t bucket_size,
|
|
|
|
int32_t skiplist_height, int32_t skiplist_branching_factor);
|
2013-11-06 20:55:43 +01:00
|
|
|
|
|
|
|
virtual void Insert(const char* key) override;
|
|
|
|
|
|
|
|
virtual bool Contains(const char* key) const override;
|
|
|
|
|
|
|
|
virtual size_t ApproximateMemoryUsage() override;
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
virtual ~HashSkipListRep();
|
2013-11-06 20:55:43 +01:00
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
virtual MemTableRep::Iterator* GetIterator() override;
|
2013-11-06 20:55:43 +01:00
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
virtual MemTableRep::Iterator* GetIterator(const Slice& slice) override;
|
2013-11-06 20:55:43 +01:00
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
virtual MemTableRep::Iterator* GetPrefixIterator(const Slice& prefix)
|
2013-11-08 09:31:09 +01:00
|
|
|
override;
|
2013-11-06 20:55:43 +01:00
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
virtual MemTableRep::Iterator* GetDynamicPrefixIterator() override;
|
|
|
|
|
2013-11-06 20:55:43 +01:00
|
|
|
private:
|
2013-11-08 09:31:09 +01:00
|
|
|
friend class DynamicIterator;
|
2013-11-06 20:55:43 +01:00
|
|
|
typedef SkipList<const char*, MemTableRep::KeyComparator&> Bucket;
|
|
|
|
|
|
|
|
size_t bucket_size_;
|
|
|
|
|
2013-11-22 22:31:00 +01:00
|
|
|
const int32_t skiplist_height_;
|
|
|
|
const int32_t skiplist_branching_factor_;
|
|
|
|
|
2013-11-06 20:55:43 +01:00
|
|
|
// Maps slices (which are transformed user keys) to buckets of keys sharing
|
|
|
|
// the same transform.
|
|
|
|
port::AtomicPointer* buckets_;
|
|
|
|
|
|
|
|
// The user-supplied transform whose domain is the user keys.
|
|
|
|
const SliceTransform* transform_;
|
|
|
|
|
|
|
|
MemTableRep::KeyComparator& compare_;
|
|
|
|
// immutable after construction
|
|
|
|
Arena* const arena_;
|
|
|
|
|
|
|
|
inline size_t GetHash(const Slice& slice) const {
|
|
|
|
return MurmurHash(slice.data(), slice.size(), 0) % bucket_size_;
|
|
|
|
}
|
|
|
|
inline Bucket* GetBucket(size_t i) const {
|
|
|
|
return static_cast<Bucket*>(buckets_[i].Acquire_Load());
|
|
|
|
}
|
|
|
|
inline Bucket* GetBucket(const Slice& slice) const {
|
|
|
|
return GetBucket(GetHash(slice));
|
|
|
|
}
|
|
|
|
// Get a bucket from buckets_. If the bucket hasn't been initialized yet,
|
|
|
|
// initialize it before returning.
|
|
|
|
Bucket* GetInitializedBucket(const Slice& transformed);
|
|
|
|
|
|
|
|
class Iterator : public MemTableRep::Iterator {
|
|
|
|
public:
|
|
|
|
explicit Iterator(Bucket* list, bool own_list = true)
|
|
|
|
: list_(list),
|
|
|
|
iter_(list),
|
|
|
|
own_list_(own_list) {}
|
|
|
|
|
|
|
|
virtual ~Iterator() {
|
|
|
|
// if we own the list, we should also delete it
|
|
|
|
if (own_list_) {
|
2013-11-08 09:31:09 +01:00
|
|
|
assert(list_ != nullptr);
|
2013-11-06 20:55:43 +01:00
|
|
|
delete list_;
|
|
|
|
}
|
2013-11-08 09:31:09 +01:00
|
|
|
}
|
2013-11-06 20:55:43 +01:00
|
|
|
|
|
|
|
// Returns true iff the iterator is positioned at a valid node.
|
|
|
|
virtual bool Valid() const {
|
2013-11-08 09:31:09 +01:00
|
|
|
return list_ != nullptr && iter_.Valid();
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the key at the current position.
|
|
|
|
// REQUIRES: Valid()
|
|
|
|
virtual const char* key() const {
|
2013-11-08 09:31:09 +01:00
|
|
|
assert(Valid());
|
2013-11-06 20:55:43 +01:00
|
|
|
return iter_.key();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Advances to the next position.
|
|
|
|
// REQUIRES: Valid()
|
|
|
|
virtual void Next() {
|
2013-11-08 09:31:09 +01:00
|
|
|
assert(Valid());
|
2013-11-06 20:55:43 +01:00
|
|
|
iter_.Next();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Advances to the previous position.
|
|
|
|
// REQUIRES: Valid()
|
|
|
|
virtual void Prev() {
|
2013-11-08 09:31:09 +01:00
|
|
|
assert(Valid());
|
2013-11-06 20:55:43 +01:00
|
|
|
iter_.Prev();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Advance to the first entry with a key >= target
|
2013-11-27 23:27:02 +01:00
|
|
|
virtual void Seek(const Slice& internal_key, const char* memtable_key) {
|
2013-11-08 09:31:09 +01:00
|
|
|
if (list_ != nullptr) {
|
2013-11-21 04:49:27 +01:00
|
|
|
const char* encoded_key =
|
|
|
|
(memtable_key != nullptr) ?
|
2013-11-27 23:27:02 +01:00
|
|
|
memtable_key : EncodeKey(&tmp_, internal_key);
|
2013-11-21 04:49:27 +01:00
|
|
|
iter_.Seek(encoded_key);
|
2013-11-08 09:31:09 +01:00
|
|
|
}
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Position at the first entry in collection.
|
|
|
|
// Final state of iterator is Valid() iff collection is not empty.
|
|
|
|
virtual void SeekToFirst() {
|
2013-11-08 09:31:09 +01:00
|
|
|
if (list_ != nullptr) {
|
|
|
|
iter_.SeekToFirst();
|
|
|
|
}
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Position at the last entry in collection.
|
|
|
|
// Final state of iterator is Valid() iff collection is not empty.
|
|
|
|
virtual void SeekToLast() {
|
2013-11-08 09:31:09 +01:00
|
|
|
if (list_ != nullptr) {
|
|
|
|
iter_.SeekToLast();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
protected:
|
|
|
|
void Reset(Bucket* list) {
|
|
|
|
if (own_list_) {
|
|
|
|
assert(list_ != nullptr);
|
|
|
|
delete list_;
|
|
|
|
}
|
|
|
|
list_ = list;
|
|
|
|
iter_.SetList(list);
|
|
|
|
own_list_ = false;
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
private:
|
2013-11-08 09:31:09 +01:00
|
|
|
// if list_ is nullptr, we should NEVER call any methods on iter_
|
|
|
|
// if list_ is nullptr, this Iterator is not Valid()
|
2013-11-06 20:55:43 +01:00
|
|
|
Bucket* list_;
|
|
|
|
Bucket::Iterator iter_;
|
|
|
|
// here we track if we own list_. If we own it, we are also
|
|
|
|
// responsible for it's cleaning. This is a poor man's shared_ptr
|
|
|
|
bool own_list_;
|
2013-11-21 04:49:27 +01:00
|
|
|
std::string tmp_; // For passing to EncodeKey
|
2013-11-06 20:55:43 +01:00
|
|
|
};
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
class DynamicIterator : public HashSkipListRep::Iterator {
|
|
|
|
public:
|
|
|
|
explicit DynamicIterator(const HashSkipListRep& memtable_rep)
|
|
|
|
: HashSkipListRep::Iterator(nullptr, false),
|
|
|
|
memtable_rep_(memtable_rep) {}
|
|
|
|
|
|
|
|
// Advance to the first entry with a key >= target
|
2013-11-21 04:49:27 +01:00
|
|
|
virtual void Seek(const Slice& k, const char* memtable_key) {
|
2014-01-25 02:50:59 +01:00
|
|
|
auto transformed = memtable_rep_.transform_->Transform(ExtractUserKey(k));
|
2013-11-08 09:31:09 +01:00
|
|
|
Reset(memtable_rep_.GetBucket(transformed));
|
2013-11-21 04:49:27 +01:00
|
|
|
HashSkipListRep::Iterator::Seek(k, memtable_key);
|
2013-11-08 09:31:09 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Position at the first entry in collection.
|
|
|
|
// Final state of iterator is Valid() iff collection is not empty.
|
|
|
|
virtual void SeekToFirst() {
|
|
|
|
// Prefix iterator does not support total order.
|
|
|
|
// We simply set the iterator to invalid state
|
|
|
|
Reset(nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Position at the last entry in collection.
|
|
|
|
// Final state of iterator is Valid() iff collection is not empty.
|
|
|
|
virtual void SeekToLast() {
|
|
|
|
// Prefix iterator does not support total order.
|
|
|
|
// We simply set the iterator to invalid state
|
|
|
|
Reset(nullptr);
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
// the underlying memtable
|
|
|
|
const HashSkipListRep& memtable_rep_;
|
|
|
|
};
|
|
|
|
|
2013-11-06 20:55:43 +01:00
|
|
|
class EmptyIterator : public MemTableRep::Iterator {
|
|
|
|
// This is used when there wasn't a bucket. It is cheaper than
|
|
|
|
// instantiating an empty bucket over which to iterate.
|
|
|
|
public:
|
|
|
|
EmptyIterator() { }
|
|
|
|
virtual bool Valid() const {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
virtual const char* key() const {
|
|
|
|
assert(false);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
virtual void Next() { }
|
|
|
|
virtual void Prev() { }
|
2014-01-25 02:50:59 +01:00
|
|
|
virtual void Seek(const Slice& internal_key,
|
|
|
|
const char* memtable_key) { }
|
2013-11-06 20:55:43 +01:00
|
|
|
virtual void SeekToFirst() { }
|
|
|
|
virtual void SeekToLast() { }
|
|
|
|
private:
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
HashSkipListRep::HashSkipListRep(MemTableRep::KeyComparator& compare,
|
2013-11-22 22:31:00 +01:00
|
|
|
Arena* arena, const SliceTransform* transform,
|
|
|
|
size_t bucket_size, int32_t skiplist_height,
|
|
|
|
int32_t skiplist_branching_factor)
|
2014-01-16 03:17:58 +01:00
|
|
|
: bucket_size_(bucket_size),
|
2014-01-16 08:12:31 +01:00
|
|
|
skiplist_height_(skiplist_height),
|
|
|
|
skiplist_branching_factor_(skiplist_branching_factor),
|
2014-01-16 03:17:58 +01:00
|
|
|
transform_(transform),
|
|
|
|
compare_(compare),
|
|
|
|
arena_(arena) {
|
2013-11-06 20:55:43 +01:00
|
|
|
buckets_ = new port::AtomicPointer[bucket_size];
|
|
|
|
|
|
|
|
for (size_t i = 0; i < bucket_size_; ++i) {
|
|
|
|
buckets_[i].NoBarrier_Store(nullptr);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
HashSkipListRep::~HashSkipListRep() {
|
2013-11-06 20:55:43 +01:00
|
|
|
delete[] buckets_;
|
|
|
|
}
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
HashSkipListRep::Bucket* HashSkipListRep::GetInitializedBucket(
|
2013-11-06 20:55:43 +01:00
|
|
|
const Slice& transformed) {
|
|
|
|
size_t hash = GetHash(transformed);
|
|
|
|
auto bucket = GetBucket(hash);
|
|
|
|
if (bucket == nullptr) {
|
|
|
|
auto addr = arena_->AllocateAligned(sizeof(Bucket));
|
2013-11-22 22:31:00 +01:00
|
|
|
bucket = new (addr) Bucket(compare_, arena_, skiplist_height_,
|
|
|
|
skiplist_branching_factor_);
|
2013-11-06 20:55:43 +01:00
|
|
|
buckets_[hash].Release_Store(static_cast<void*>(bucket));
|
|
|
|
}
|
|
|
|
return bucket;
|
|
|
|
}
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
void HashSkipListRep::Insert(const char* key) {
|
2013-11-06 20:55:43 +01:00
|
|
|
assert(!Contains(key));
|
|
|
|
auto transformed = transform_->Transform(UserKey(key));
|
|
|
|
auto bucket = GetInitializedBucket(transformed);
|
|
|
|
bucket->Insert(key);
|
|
|
|
}
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
bool HashSkipListRep::Contains(const char* key) const {
|
2013-11-06 20:55:43 +01:00
|
|
|
auto transformed = transform_->Transform(UserKey(key));
|
|
|
|
auto bucket = GetBucket(transformed);
|
|
|
|
if (bucket == nullptr) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return bucket->Contains(key);
|
|
|
|
}
|
|
|
|
|
2013-11-08 09:31:09 +01:00
|
|
|
size_t HashSkipListRep::ApproximateMemoryUsage() {
|
2013-11-06 20:55:43 +01:00
|
|
|
return sizeof(buckets_);
|
|
|
|
}
|
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
MemTableRep::Iterator* HashSkipListRep::GetIterator() {
|
2013-11-06 20:55:43 +01:00
|
|
|
auto list = new Bucket(compare_, arena_);
|
|
|
|
for (size_t i = 0; i < bucket_size_; ++i) {
|
|
|
|
auto bucket = GetBucket(i);
|
|
|
|
if (bucket != nullptr) {
|
|
|
|
Bucket::Iterator itr(bucket);
|
|
|
|
for (itr.SeekToFirst(); itr.Valid(); itr.Next()) {
|
|
|
|
list->Insert(itr.key());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-01-16 03:17:58 +01:00
|
|
|
return new Iterator(list);
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
MemTableRep::Iterator* HashSkipListRep::GetPrefixIterator(const Slice& prefix) {
|
2013-11-08 09:31:09 +01:00
|
|
|
auto bucket = GetBucket(prefix);
|
2013-11-06 20:55:43 +01:00
|
|
|
if (bucket == nullptr) {
|
2014-01-16 03:17:58 +01:00
|
|
|
return new EmptyIterator();
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
2014-01-16 03:17:58 +01:00
|
|
|
return new Iterator(bucket, false);
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
MemTableRep::Iterator* HashSkipListRep::GetIterator(const Slice& slice) {
|
2013-11-08 09:31:09 +01:00
|
|
|
return GetPrefixIterator(transform_->Transform(slice));
|
|
|
|
}
|
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
MemTableRep::Iterator* HashSkipListRep::GetDynamicPrefixIterator() {
|
|
|
|
return new DynamicIterator(*this);
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
} // anon namespace
|
|
|
|
|
2014-01-16 03:17:58 +01:00
|
|
|
MemTableRep* HashSkipListRepFactory::CreateMemTableRep(
|
2013-12-06 23:15:42 +01:00
|
|
|
MemTableRep::KeyComparator& compare, Arena* arena) {
|
2014-01-16 08:12:31 +01:00
|
|
|
return new HashSkipListRep(compare, arena, transform_, bucket_count_,
|
|
|
|
skiplist_height_, skiplist_branching_factor_);
|
2013-12-03 21:42:15 +01:00
|
|
|
}
|
2013-11-08 09:31:09 +01:00
|
|
|
|
|
|
|
MemTableRepFactory* NewHashSkipListRepFactory(
|
2013-11-22 22:31:00 +01:00
|
|
|
const SliceTransform* transform, size_t bucket_count,
|
|
|
|
int32_t skiplist_height, int32_t skiplist_branching_factor) {
|
|
|
|
return new HashSkipListRepFactory(transform, bucket_count,
|
|
|
|
skiplist_height, skiplist_branching_factor);
|
2013-11-06 20:55:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace rocksdb
|