]>
git.proxmox.com Git - ceph.git/blob - ceph/src/rocksdb/memtable/vectorrep.cc
1 // Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
2 // This source code is licensed under both the GPLv2 (found in the
3 // COPYING file in the root directory) and Apache 2.0 License
4 // (found in the LICENSE.Apache file in the root directory).
7 #include "rocksdb/memtablerep.h"
9 #include <unordered_set>
13 #include <type_traits>
15 #include "util/arena.h"
16 #include "db/memtable.h"
17 #include "memtable/stl_wrappers.h"
18 #include "port/port.h"
19 #include "util/mutexlock.h"
24 using namespace stl_wrappers
;
26 class VectorRep
: public MemTableRep
{
28 VectorRep(const KeyComparator
& compare
, Allocator
* allocator
, size_t count
);
30 // Insert key into the collection. (The caller will pack key and value into a
31 // single buffer and pass that in as the parameter to Insert)
32 // REQUIRES: nothing that compares equal to key is currently in the
34 virtual void Insert(KeyHandle handle
) override
;
36 // Returns true iff an entry that compares equal to key is in the collection.
37 virtual bool Contains(const char* key
) const override
;
39 virtual void MarkReadOnly() override
;
41 virtual size_t ApproximateMemoryUsage() override
;
43 virtual void Get(const LookupKey
& k
, void* callback_args
,
44 bool (*callback_func
)(void* arg
,
45 const char* entry
)) override
;
47 virtual ~VectorRep() override
{ }
49 class Iterator
: public MemTableRep::Iterator
{
50 class VectorRep
* vrep_
;
51 std::shared_ptr
<std::vector
<const char*>> bucket_
;
52 std::vector
<const char*>::const_iterator
mutable cit_
;
53 const KeyComparator
& compare_
;
54 std::string tmp_
; // For passing to EncodeKey
58 explicit Iterator(class VectorRep
* vrep
,
59 std::shared_ptr
<std::vector
<const char*>> bucket
,
60 const KeyComparator
& compare
);
62 // Initialize an iterator over the specified collection.
63 // The returned iterator is not valid.
64 // explicit Iterator(const MemTableRep* collection);
65 virtual ~Iterator() override
{ };
67 // Returns true iff the iterator is positioned at a valid node.
68 virtual bool Valid() const override
;
70 // Returns the key at the current position.
72 virtual const char* key() const override
;
74 // Advances to the next position.
76 virtual void Next() override
;
78 // Advances to the previous position.
80 virtual void Prev() override
;
82 // Advance to the first entry with a key >= target
83 virtual void Seek(const Slice
& user_key
, const char* memtable_key
) override
;
85 // Advance to the first entry with a key <= target
86 virtual void SeekForPrev(const Slice
& user_key
,
87 const char* memtable_key
) override
;
89 // Position at the first entry in collection.
90 // Final state of iterator is Valid() iff collection is not empty.
91 virtual void SeekToFirst() override
;
93 // Position at the last entry in collection.
94 // Final state of iterator is Valid() iff collection is not empty.
95 virtual void SeekToLast() override
;
98 // Return an iterator over the keys in this representation.
99 virtual MemTableRep::Iterator
* GetIterator(Arena
* arena
) override
;
102 friend class Iterator
;
103 typedef std::vector
<const char*> Bucket
;
104 std::shared_ptr
<Bucket
> bucket_
;
105 mutable port::RWMutex rwlock_
;
108 const KeyComparator
& compare_
;
111 void VectorRep::Insert(KeyHandle handle
) {
112 auto* key
= static_cast<char*>(handle
);
113 WriteLock
l(&rwlock_
);
115 bucket_
->push_back(key
);
118 // Returns true iff an entry that compares equal to key is in the collection.
119 bool VectorRep::Contains(const char* key
) const {
120 ReadLock
l(&rwlock_
);
121 return std::find(bucket_
->begin(), bucket_
->end(), key
) != bucket_
->end();
124 void VectorRep::MarkReadOnly() {
125 WriteLock
l(&rwlock_
);
129 size_t VectorRep::ApproximateMemoryUsage() {
131 sizeof(bucket_
) + sizeof(*bucket_
) +
134 std::remove_reference
<decltype(*bucket_
)>::type::value_type
138 VectorRep::VectorRep(const KeyComparator
& compare
, Allocator
* allocator
,
140 : MemTableRep(allocator
),
141 bucket_(new Bucket()),
145 bucket_
.get()->reserve(count
);
148 VectorRep::Iterator::Iterator(class VectorRep
* vrep
,
149 std::shared_ptr
<std::vector
<const char*>> bucket
,
150 const KeyComparator
& compare
)
153 cit_(bucket_
->end()),
157 void VectorRep::Iterator::DoSort() const {
158 // vrep is non-null means that we are working on an immutable memtable
159 if (!sorted_
&& vrep_
!= nullptr) {
160 WriteLock
l(&vrep_
->rwlock_
);
161 if (!vrep_
->sorted_
) {
162 std::sort(bucket_
->begin(), bucket_
->end(), Compare(compare_
));
163 cit_
= bucket_
->begin();
164 vrep_
->sorted_
= true;
169 std::sort(bucket_
->begin(), bucket_
->end(), Compare(compare_
));
170 cit_
= bucket_
->begin();
174 assert(vrep_
== nullptr || vrep_
->sorted_
);
177 // Returns true iff the iterator is positioned at a valid node.
178 bool VectorRep::Iterator::Valid() const {
180 return cit_
!= bucket_
->end();
183 // Returns the key at the current position.
185 const char* VectorRep::Iterator::key() const {
190 // Advances to the next position.
192 void VectorRep::Iterator::Next() {
194 if (cit_
== bucket_
->end()) {
200 // Advances to the previous position.
202 void VectorRep::Iterator::Prev() {
204 if (cit_
== bucket_
->begin()) {
205 // If you try to go back from the first element, the iterator should be
206 // invalidated. So we set it to past-the-end. This means that you can
207 // treat the container circularly.
208 cit_
= bucket_
->end();
214 // Advance to the first entry with a key >= target
215 void VectorRep::Iterator::Seek(const Slice
& user_key
,
216 const char* memtable_key
) {
218 // Do binary search to find first value not less than the target
219 const char* encoded_key
=
220 (memtable_key
!= nullptr) ? memtable_key
: EncodeKey(&tmp_
, user_key
);
221 cit_
= std::equal_range(bucket_
->begin(),
224 [this] (const char* a
, const char* b
) {
225 return compare_(a
, b
) < 0;
229 // Advance to the first entry with a key <= target
230 void VectorRep::Iterator::SeekForPrev(const Slice
& /*user_key*/,
231 const char* /*memtable_key*/) {
235 // Position at the first entry in collection.
236 // Final state of iterator is Valid() iff collection is not empty.
237 void VectorRep::Iterator::SeekToFirst() {
239 cit_
= bucket_
->begin();
242 // Position at the last entry in collection.
243 // Final state of iterator is Valid() iff collection is not empty.
244 void VectorRep::Iterator::SeekToLast() {
246 cit_
= bucket_
->end();
247 if (bucket_
->size() != 0) {
252 void VectorRep::Get(const LookupKey
& k
, void* callback_args
,
253 bool (*callback_func
)(void* arg
, const char* entry
)) {
255 VectorRep
* vector_rep
;
256 std::shared_ptr
<Bucket
> bucket
;
260 vector_rep
= nullptr;
261 bucket
.reset(new Bucket(*bucket_
)); // make a copy
263 VectorRep::Iterator
iter(vector_rep
, immutable_
? bucket_
: bucket
, compare_
);
264 rwlock_
.ReadUnlock();
266 for (iter
.Seek(k
.user_key(), k
.memtable_key().data());
267 iter
.Valid() && callback_func(callback_args
, iter
.key()); iter
.Next()) {
271 MemTableRep::Iterator
* VectorRep::GetIterator(Arena
* arena
) {
273 if (arena
!= nullptr) {
274 mem
= arena
->AllocateAligned(sizeof(Iterator
));
276 ReadLock
l(&rwlock_
);
277 // Do not sort here. The sorting would be done the first time
278 // a Seek is performed on the iterator.
280 if (arena
== nullptr) {
281 return new Iterator(this, bucket_
, compare_
);
283 return new (mem
) Iterator(this, bucket_
, compare_
);
286 std::shared_ptr
<Bucket
> tmp
;
287 tmp
.reset(new Bucket(*bucket_
)); // make a copy
288 if (arena
== nullptr) {
289 return new Iterator(nullptr, tmp
, compare_
);
291 return new (mem
) Iterator(nullptr, tmp
, compare_
);
297 MemTableRep
* VectorRepFactory::CreateMemTableRep(
298 const MemTableRep::KeyComparator
& compare
, Allocator
* allocator
,
299 const SliceTransform
*, Logger
* /*logger*/) {
300 return new VectorRep(compare
, allocator
, count_
);
302 } // namespace rocksdb
303 #endif // ROCKSDB_LITE