]> git.proxmox.com Git - ceph.git/blame - ceph/src/rocksdb/memtable/vectorrep.cc
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / rocksdb / memtable / vectorrep.cc
CommitLineData
7c673cae 1// Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
11fdf7f2
TL
2// This source code is licensed under both the GPLv2 (found in the
3// COPYING file in the root directory) and Apache 2.0 License
4// (found in the LICENSE.Apache file in the root directory).
7c673cae
FG
5//
6#ifndef ROCKSDB_LITE
7c673cae 7#include <algorithm>
1e59de90
TL
8#include <memory>
9#include <set>
7c673cae 10#include <type_traits>
1e59de90 11#include <unordered_set>
7c673cae 12
7c673cae 13#include "db/memtable.h"
f67539c2 14#include "memory/arena.h"
7c673cae
FG
15#include "memtable/stl_wrappers.h"
16#include "port/port.h"
1e59de90
TL
17#include "rocksdb/memtablerep.h"
18#include "rocksdb/utilities/options_type.h"
7c673cae
FG
19#include "util/mutexlock.h"
20
f67539c2 21namespace ROCKSDB_NAMESPACE {
7c673cae
FG
22namespace {
23
7c673cae
FG
24class VectorRep : public MemTableRep {
25 public:
11fdf7f2 26 VectorRep(const KeyComparator& compare, Allocator* allocator, size_t count);
7c673cae
FG
27
28 // Insert key into the collection. (The caller will pack key and value into a
29 // single buffer and pass that in as the parameter to Insert)
30 // REQUIRES: nothing that compares equal to key is currently in the
31 // collection.
494da23a 32 void Insert(KeyHandle handle) override;
7c673cae
FG
33
34 // Returns true iff an entry that compares equal to key is in the collection.
494da23a 35 bool Contains(const char* key) const override;
7c673cae 36
494da23a 37 void MarkReadOnly() override;
7c673cae 38
494da23a 39 size_t ApproximateMemoryUsage() override;
7c673cae 40
494da23a
TL
41 void Get(const LookupKey& k, void* callback_args,
42 bool (*callback_func)(void* arg, const char* entry)) override;
7c673cae 43
494da23a 44 ~VectorRep() override {}
7c673cae
FG
45
46 class Iterator : public MemTableRep::Iterator {
47 class VectorRep* vrep_;
48 std::shared_ptr<std::vector<const char*>> bucket_;
49 std::vector<const char*>::const_iterator mutable cit_;
50 const KeyComparator& compare_;
1e59de90 51 std::string tmp_; // For passing to EncodeKey
7c673cae
FG
52 bool mutable sorted_;
53 void DoSort() const;
1e59de90 54
7c673cae
FG
55 public:
56 explicit Iterator(class VectorRep* vrep,
1e59de90
TL
57 std::shared_ptr<std::vector<const char*>> bucket,
58 const KeyComparator& compare);
7c673cae
FG
59
60 // Initialize an iterator over the specified collection.
61 // The returned iterator is not valid.
62 // explicit Iterator(const MemTableRep* collection);
494da23a 63 ~Iterator() override{};
7c673cae
FG
64
65 // Returns true iff the iterator is positioned at a valid node.
494da23a 66 bool Valid() const override;
7c673cae
FG
67
68 // Returns the key at the current position.
69 // REQUIRES: Valid()
494da23a 70 const char* key() const override;
7c673cae
FG
71
72 // Advances to the next position.
73 // REQUIRES: Valid()
494da23a 74 void Next() override;
7c673cae
FG
75
76 // Advances to the previous position.
77 // REQUIRES: Valid()
494da23a 78 void Prev() override;
7c673cae
FG
79
80 // Advance to the first entry with a key >= target
494da23a 81 void Seek(const Slice& user_key, const char* memtable_key) override;
7c673cae
FG
82
83 // Advance to the first entry with a key <= target
494da23a 84 void SeekForPrev(const Slice& user_key, const char* memtable_key) override;
7c673cae
FG
85
86 // Position at the first entry in collection.
87 // Final state of iterator is Valid() iff collection is not empty.
494da23a 88 void SeekToFirst() override;
7c673cae
FG
89
90 // Position at the last entry in collection.
91 // Final state of iterator is Valid() iff collection is not empty.
494da23a 92 void SeekToLast() override;
7c673cae
FG
93 };
94
95 // Return an iterator over the keys in this representation.
494da23a 96 MemTableRep::Iterator* GetIterator(Arena* arena) override;
7c673cae
FG
97
98 private:
99 friend class Iterator;
1e59de90 100 using Bucket = std::vector<const char*>;
7c673cae
FG
101 std::shared_ptr<Bucket> bucket_;
102 mutable port::RWMutex rwlock_;
103 bool immutable_;
104 bool sorted_;
105 const KeyComparator& compare_;
106};
107
108void VectorRep::Insert(KeyHandle handle) {
109 auto* key = static_cast<char*>(handle);
110 WriteLock l(&rwlock_);
111 assert(!immutable_);
112 bucket_->push_back(key);
113}
114
115// Returns true iff an entry that compares equal to key is in the collection.
116bool VectorRep::Contains(const char* key) const {
117 ReadLock l(&rwlock_);
118 return std::find(bucket_->begin(), bucket_->end(), key) != bucket_->end();
119}
120
121void VectorRep::MarkReadOnly() {
122 WriteLock l(&rwlock_);
123 immutable_ = true;
124}
125
126size_t VectorRep::ApproximateMemoryUsage() {
1e59de90
TL
127 return sizeof(bucket_) + sizeof(*bucket_) +
128 bucket_->size() *
129 sizeof(
130 std::remove_reference<decltype(*bucket_)>::type::value_type);
7c673cae
FG
131}
132
11fdf7f2 133VectorRep::VectorRep(const KeyComparator& compare, Allocator* allocator,
7c673cae 134 size_t count)
11fdf7f2
TL
135 : MemTableRep(allocator),
136 bucket_(new Bucket()),
137 immutable_(false),
138 sorted_(false),
139 compare_(compare) {
140 bucket_.get()->reserve(count);
141}
7c673cae
FG
142
143VectorRep::Iterator::Iterator(class VectorRep* vrep,
1e59de90
TL
144 std::shared_ptr<std::vector<const char*>> bucket,
145 const KeyComparator& compare)
146 : vrep_(vrep),
147 bucket_(bucket),
148 cit_(bucket_->end()),
149 compare_(compare),
150 sorted_(false) {}
7c673cae
FG
151
152void VectorRep::Iterator::DoSort() const {
153 // vrep is non-null means that we are working on an immutable memtable
154 if (!sorted_ && vrep_ != nullptr) {
155 WriteLock l(&vrep_->rwlock_);
156 if (!vrep_->sorted_) {
1e59de90
TL
157 std::sort(bucket_->begin(), bucket_->end(),
158 stl_wrappers::Compare(compare_));
7c673cae
FG
159 cit_ = bucket_->begin();
160 vrep_->sorted_ = true;
161 }
162 sorted_ = true;
163 }
164 if (!sorted_) {
1e59de90
TL
165 std::sort(bucket_->begin(), bucket_->end(),
166 stl_wrappers::Compare(compare_));
7c673cae
FG
167 cit_ = bucket_->begin();
168 sorted_ = true;
169 }
170 assert(sorted_);
171 assert(vrep_ == nullptr || vrep_->sorted_);
172}
173
174// Returns true iff the iterator is positioned at a valid node.
175bool VectorRep::Iterator::Valid() const {
176 DoSort();
177 return cit_ != bucket_->end();
178}
179
180// Returns the key at the current position.
181// REQUIRES: Valid()
182const char* VectorRep::Iterator::key() const {
183 assert(sorted_);
184 return *cit_;
185}
186
187// Advances to the next position.
188// REQUIRES: Valid()
189void VectorRep::Iterator::Next() {
190 assert(sorted_);
191 if (cit_ == bucket_->end()) {
192 return;
193 }
194 ++cit_;
195}
196
197// Advances to the previous position.
198// REQUIRES: Valid()
199void VectorRep::Iterator::Prev() {
200 assert(sorted_);
201 if (cit_ == bucket_->begin()) {
202 // If you try to go back from the first element, the iterator should be
203 // invalidated. So we set it to past-the-end. This means that you can
204 // treat the container circularly.
205 cit_ = bucket_->end();
206 } else {
207 --cit_;
208 }
209}
210
211// Advance to the first entry with a key >= target
212void VectorRep::Iterator::Seek(const Slice& user_key,
213 const char* memtable_key) {
214 DoSort();
215 // Do binary search to find first value not less than the target
216 const char* encoded_key =
217 (memtable_key != nullptr) ? memtable_key : EncodeKey(&tmp_, user_key);
1e59de90
TL
218 cit_ = std::equal_range(bucket_->begin(), bucket_->end(), encoded_key,
219 [this](const char* a, const char* b) {
7c673cae 220 return compare_(a, b) < 0;
1e59de90
TL
221 })
222 .first;
7c673cae
FG
223}
224
225// Advance to the first entry with a key <= target
11fdf7f2
TL
226void VectorRep::Iterator::SeekForPrev(const Slice& /*user_key*/,
227 const char* /*memtable_key*/) {
7c673cae
FG
228 assert(false);
229}
230
231// Position at the first entry in collection.
232// Final state of iterator is Valid() iff collection is not empty.
233void VectorRep::Iterator::SeekToFirst() {
234 DoSort();
235 cit_ = bucket_->begin();
236}
237
238// Position at the last entry in collection.
239// Final state of iterator is Valid() iff collection is not empty.
240void VectorRep::Iterator::SeekToLast() {
241 DoSort();
242 cit_ = bucket_->end();
243 if (bucket_->size() != 0) {
244 --cit_;
245 }
246}
247
248void VectorRep::Get(const LookupKey& k, void* callback_args,
249 bool (*callback_func)(void* arg, const char* entry)) {
250 rwlock_.ReadLock();
251 VectorRep* vector_rep;
252 std::shared_ptr<Bucket> bucket;
253 if (immutable_) {
254 vector_rep = this;
255 } else {
256 vector_rep = nullptr;
257 bucket.reset(new Bucket(*bucket_)); // make a copy
258 }
259 VectorRep::Iterator iter(vector_rep, immutable_ ? bucket_ : bucket, compare_);
260 rwlock_.ReadUnlock();
261
262 for (iter.Seek(k.user_key(), k.memtable_key().data());
263 iter.Valid() && callback_func(callback_args, iter.key()); iter.Next()) {
264 }
265}
266
267MemTableRep::Iterator* VectorRep::GetIterator(Arena* arena) {
268 char* mem = nullptr;
269 if (arena != nullptr) {
270 mem = arena->AllocateAligned(sizeof(Iterator));
271 }
272 ReadLock l(&rwlock_);
273 // Do not sort here. The sorting would be done the first time
274 // a Seek is performed on the iterator.
275 if (immutable_) {
276 if (arena == nullptr) {
277 return new Iterator(this, bucket_, compare_);
278 } else {
279 return new (mem) Iterator(this, bucket_, compare_);
280 }
281 } else {
282 std::shared_ptr<Bucket> tmp;
1e59de90 283 tmp.reset(new Bucket(*bucket_)); // make a copy
7c673cae
FG
284 if (arena == nullptr) {
285 return new Iterator(nullptr, tmp, compare_);
286 } else {
287 return new (mem) Iterator(nullptr, tmp, compare_);
288 }
289 }
290}
1e59de90
TL
291} // namespace
292
293static std::unordered_map<std::string, OptionTypeInfo> vector_rep_table_info = {
294 {"count",
295 {0, OptionType::kSizeT, OptionVerificationType::kNormal,
296 OptionTypeFlags::kNone}},
297};
298
299VectorRepFactory::VectorRepFactory(size_t count) : count_(count) {
300 RegisterOptions("VectorRepFactoryOptions", &count_, &vector_rep_table_info);
301}
7c673cae
FG
302
303MemTableRep* VectorRepFactory::CreateMemTableRep(
11fdf7f2
TL
304 const MemTableRep::KeyComparator& compare, Allocator* allocator,
305 const SliceTransform*, Logger* /*logger*/) {
7c673cae
FG
306 return new VectorRep(compare, allocator, count_);
307}
f67539c2 308} // namespace ROCKSDB_NAMESPACE
7c673cae 309#endif // ROCKSDB_LITE