]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/boost/poly_collection/detail/split_segment.hpp
import new upstream nautilus stable release 14.2.8
[ceph.git] / ceph / src / boost / boost / poly_collection / detail / split_segment.hpp
1 /* Copyright 2016-2018 Joaquin M Lopez Munoz.
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * See http://www.boost.org/libs/poly_collection for library home page.
7 */
8
9 #ifndef BOOST_POLY_COLLECTION_DETAIL_SPLIT_SEGMENT_HPP
10 #define BOOST_POLY_COLLECTION_DETAIL_SPLIT_SEGMENT_HPP
11
12 #if defined(_MSC_VER)
13 #pragma once
14 #endif
15
16 #include <boost/poly_collection/detail/segment_backend.hpp>
17 #include <boost/poly_collection/detail/value_holder.hpp>
18 #include <iterator>
19 #include <memory>
20 #include <new>
21 #include <utility>
22 #include <vector>
23
24 namespace boost{
25
26 namespace poly_collection{
27
28 namespace detail{
29
30 /* segment_backend implementation that maintains two internal vectors, one for
31 * value_type's (the index) and another for the concrete elements those refer
32 * to (the store).
33 *
34 * Requires:
35 * - [const_]base_iterator is constructible from value_type*.
36 * - value_type is copy constructible.
37 * - Model::make_value_type(x) returns a value_type created from a reference
38 * to the concrete type.
39 *
40 * Conversion from base_iterator to local_iterator<Concrete> requires accesing
41 * value_type internal info, so the end() base_iterator has to be made to point
42 * to a valid element of index, which implies size(index)=size(store)+1. This
43 * slightly complicates the memory management.
44 */
45
46 template<typename Model,typename Concrete,typename Allocator>
47 class split_segment:public segment_backend<Model,Allocator>
48 {
49 using value_type=typename Model::value_type;
50 using store_value_type=value_holder<Concrete>;
51 using store=std::vector<
52 store_value_type,
53 typename std::allocator_traits<Allocator>::
54 template rebind_alloc<store_value_type>
55 >;
56 using store_iterator=typename store::iterator;
57 using const_store_iterator=typename store::const_iterator;
58 using index=std::vector<
59 value_type,
60 typename std::allocator_traits<Allocator>::
61 template rebind_alloc<value_type>
62 >;
63 using const_index_iterator=typename index::const_iterator;
64 using segment_backend=detail::segment_backend<Model,Allocator>;
65 using typename segment_backend::segment_backend_unique_ptr;
66 using typename segment_backend::value_pointer;
67 using typename segment_backend::const_value_pointer;
68 using typename segment_backend::base_iterator;
69 using typename segment_backend::const_base_iterator;
70 using const_iterator=
71 typename segment_backend::template const_iterator<Concrete>;
72 using typename segment_backend::base_sentinel;
73 using typename segment_backend::range;
74 using segment_allocator_type=typename std::allocator_traits<Allocator>::
75 template rebind_alloc<split_segment>;
76
77 public:
78 virtual ~split_segment()=default;
79
80 static segment_backend_unique_ptr make(const segment_allocator_type& al)
81 {
82 return new_(al,al);
83 }
84
85 virtual segment_backend_unique_ptr copy()const
86 {
87 return new_(s.get_allocator(),store{s});
88 }
89
90 virtual segment_backend_unique_ptr copy(const Allocator& al)const
91 {
92 return new_(al,store{s,al});
93 }
94
95 virtual segment_backend_unique_ptr empty_copy(const Allocator& al)const
96 {
97 return new_(al,al);
98 }
99
100 virtual segment_backend_unique_ptr move(const Allocator& al)const
101 {
102 return new_(al,store{std::move(s),al});
103 }
104
105 virtual bool equal(const segment_backend& x)const
106 {
107 return s==static_cast<const split_segment&>(x).s;
108 }
109
110 virtual Allocator get_allocator()const noexcept
111 {return s.get_allocator();}
112 virtual base_iterator begin()const noexcept{return nv_begin();}
113 base_iterator nv_begin()const noexcept
114 {return base_iterator{value_ptr(i.data())};}
115 virtual base_iterator end()const noexcept{return nv_end();}
116 base_iterator nv_end()const noexcept
117 {return base_iterator{value_ptr(i.data()+s.size())};}
118 virtual bool empty()const noexcept{return nv_empty();}
119 bool nv_empty()const noexcept{return s.empty();}
120 virtual std::size_t size()const noexcept{return nv_size();}
121 std::size_t nv_size()const noexcept{return s.size();}
122 virtual std::size_t max_size()const noexcept{return nv_max_size();}
123 std::size_t nv_max_size()const noexcept{return s.max_size()-1;}
124 virtual std::size_t capacity()const noexcept{return nv_capacity();}
125 std::size_t nv_capacity()const noexcept{return s.capacity();}
126
127 virtual base_sentinel reserve(std::size_t n){return nv_reserve(n);}
128
129 base_sentinel nv_reserve(std::size_t n)
130 {
131 bool rebuild=n>s.capacity();
132 i.reserve(n+1);
133 s.reserve(n);
134 if(rebuild)rebuild_index();
135 return sentinel();
136 };
137
138 virtual base_sentinel shrink_to_fit(){return nv_shrink_to_fit();}
139
140 base_sentinel nv_shrink_to_fit()
141 {
142 try{
143 auto p=s.data();
144 if(!s.empty())s.shrink_to_fit();
145 else{
146 store ss{s.get_allocator()};
147 ss.reserve(1); /* --> s.data()!=nullptr */
148 s.swap(ss);
149 }
150 if(p!=s.data()){
151 index ii{{},i.get_allocator()};
152 ii.reserve(s.capacity()+1);
153 i.swap(ii);
154 build_index();
155 }
156 }
157 catch(...){
158 rebuild_index();
159 throw;
160 }
161 return sentinel();
162 }
163
164 template<typename Iterator,typename... Args>
165 range nv_emplace(Iterator p,Args&&... args)
166 {
167 auto q=prereserve(p);
168 auto it=s.emplace(
169 iterator_from(q),
170 value_holder_emplacing_ctor,std::forward<Args>(args)...);
171 push_index_entry();
172 return range_from(it);
173 }
174
175 template<typename... Args>
176 range nv_emplace_back(Args&&... args)
177 {
178 prereserve();
179 s.emplace_back(value_holder_emplacing_ctor,std::forward<Args>(args)...);
180 push_index_entry();
181 return range_from(s.size()-1);
182 }
183
184 virtual range push_back(const_value_pointer x)
185 {return nv_push_back(const_concrete_ref(x));}
186
187 range nv_push_back(const Concrete& x)
188 {
189 prereserve();
190 s.emplace_back(x);
191 push_index_entry();
192 return range_from(s.size()-1);
193 }
194
195 virtual range push_back_move(value_pointer x)
196 {return nv_push_back(std::move(concrete_ref(x)));}
197
198 range nv_push_back(Concrete&& x)
199 {
200 prereserve();
201 s.emplace_back(std::move(x));
202 push_index_entry();
203 return range_from(s.size()-1);
204 }
205
206 virtual range insert(const_base_iterator p,const_value_pointer x)
207 {return nv_insert(const_iterator(p),const_concrete_ref(x));}
208
209 range nv_insert(const_iterator p,const Concrete& x)
210 {
211 p=prereserve(p);
212 auto it=s.emplace(iterator_from(p),x);
213 push_index_entry();
214 return range_from(it);
215 }
216
217 virtual range insert_move(const_base_iterator p,value_pointer x)
218 {return nv_insert(const_iterator(p),std::move(concrete_ref(x)));}
219
220 range nv_insert(const_iterator p,Concrete&& x)
221 {
222 p=prereserve(p);
223 auto it=s.emplace(iterator_from(p),std::move(x));
224 push_index_entry();
225 return range_from(it);
226 }
227
228 template<typename InputIterator>
229 range nv_insert(InputIterator first,InputIterator last)
230 {
231 return nv_insert(
232 const_iterator(concrete_ptr(s.data()+s.size())),first,last);
233 }
234
235 template<typename InputIterator>
236 range nv_insert(const_iterator p,InputIterator first,InputIterator last)
237 {
238 return insert(
239 p,first,last,
240 typename std::iterator_traits<InputIterator>::iterator_category{});
241 }
242
243 virtual range erase(const_base_iterator p)
244 {return nv_erase(const_iterator(p));}
245
246 range nv_erase(const_iterator p)
247 {
248 pop_index_entry();
249 return range_from(s.erase(iterator_from(p)));
250 }
251
252 virtual range erase(const_base_iterator first,const_base_iterator last)
253 {return nv_erase(const_iterator(first),const_iterator(last));}
254
255 range nv_erase(const_iterator first,const_iterator last)
256 {
257 std::size_t n=s.size();
258 auto it=s.erase(iterator_from(first),iterator_from(last));
259 pop_index_entry(n-s.size());
260 return range_from(it);
261 }
262
263 virtual range erase_till_end(const_base_iterator first)
264 {
265 std::size_t n=s.size();
266 auto it=s.erase(iterator_from(first),s.end());
267 pop_index_entry(n-s.size());
268 return range_from(it);
269 }
270
271 virtual range erase_from_begin(const_base_iterator last)
272 {
273 std::size_t n=s.size();
274 auto it=s.erase(s.begin(),iterator_from(last));
275 pop_index_entry(n-s.size());
276 return range_from(it);
277 }
278
279 base_sentinel clear()noexcept{return nv_clear();}
280
281 base_sentinel nv_clear()noexcept
282 {
283 s.clear();
284 for(std::size_t n=i.size()-1;n--;)i.pop_back();
285 return sentinel();
286 }
287
288 private:
289 template<typename... Args>
290 static segment_backend_unique_ptr new_(
291 segment_allocator_type al,Args&&... args)
292 {
293 auto p=std::allocator_traits<segment_allocator_type>::allocate(al,1);
294 try{
295 ::new ((void*)p) split_segment{std::forward<Args>(args)...};
296 }
297 catch(...){
298 std::allocator_traits<segment_allocator_type>::deallocate(al,p,1);
299 throw;
300 }
301 return {p,&delete_};
302 }
303
304 static void delete_(segment_backend* p)
305 {
306 auto q=static_cast<split_segment*>(p);
307 auto al=segment_allocator_type{q->s.get_allocator()};
308 q->~split_segment();
309 std::allocator_traits<segment_allocator_type>::deallocate(al,q,1);
310 }
311
312 split_segment(const Allocator& al):
313 s{typename store::allocator_type{al}},
314 i{{},typename index::allocator_type{al}}
315 {
316 s.reserve(1); /* --> s.data()!=nullptr */
317 build_index();
318 }
319
320 split_segment(store&& s_):
321 s{std::move(s_)},i{{},typename index::allocator_type{s.get_allocator()}}
322 {
323 s.reserve(1); /* --> s.data()!=nullptr */
324 build_index();
325 }
326
327 void prereserve()
328 {
329 if(s.size()==s.capacity())expand();
330 }
331
332 const_base_iterator prereserve(const_base_iterator p)
333 {
334 if(s.size()==s.capacity()){
335 auto n=p-i.data();
336 expand();
337 return const_base_iterator{i.data()+n};
338 }
339 else return p;
340 }
341
342 const_iterator prereserve(const_iterator p)
343 {
344 if(s.size()==s.capacity()){
345 auto n=p-const_concrete_ptr(s.data());
346 expand();
347 return const_concrete_ptr(s.data())+n;
348 }
349 else return p;
350 }
351
352 const_iterator prereserve(const_iterator p,std::size_t m)
353 {
354 if(s.size()+m>s.capacity()){
355 auto n=p-const_concrete_ptr(s.data());
356 expand(m);
357 return const_concrete_ptr(s.data())+n;
358 }
359 else return p;
360 }
361
362 void expand()
363 {
364 std::size_t c=
365 s.size()<=1||(s.max_size()-1-s.size())/2<s.size()?
366 s.size()+1:
367 s.size()+s.size()/2;
368 i.reserve(c+1);
369 s.reserve(c);
370 rebuild_index();
371 }
372
373 void expand(std::size_t m)
374 {
375 i.reserve(s.size()+m+1);
376 s.reserve(s.size()+m);
377 rebuild_index();
378 }
379
380 void build_index(std::size_t start=0)
381 {
382 for(std::size_t n=start,m=s.size();n<=m;++n){
383 i.push_back(Model::make_value_type(concrete_ref(s.data()[n])));
384 };
385 }
386
387 void rebuild_index()
388 {
389 i.clear();
390 build_index();
391 }
392
393 void push_index_entry()
394 {
395 build_index(s.size());
396 }
397
398 void pop_index_entry(std::size_t n=1)
399 {
400 while(n--)i.pop_back();
401 }
402
403 static Concrete& concrete_ref(value_pointer p)noexcept
404 {
405 return *static_cast<Concrete*>(p);
406 }
407
408 static Concrete& concrete_ref(store_value_type& r)noexcept
409 {
410 return *concrete_ptr(&r);
411 }
412
413 static const Concrete& const_concrete_ref(const_value_pointer p)noexcept
414 {
415 return *static_cast<const Concrete*>(p);
416 }
417
418 static Concrete* concrete_ptr(store_value_type* p)noexcept
419 {
420 return reinterpret_cast<Concrete*>(
421 static_cast<value_holder_base<Concrete>*>(p));
422 }
423
424 static const Concrete* const_concrete_ptr(const store_value_type* p)noexcept
425 {
426 return concrete_ptr(const_cast<store_value_type*>(p));
427 }
428
429 static value_type* value_ptr(const value_type* p)noexcept
430 {
431 return const_cast<value_type*>(p);
432 }
433
434 /* It would have sufficed if iterator_from returned const_store_iterator
435 * except for the fact that some old versions of libstdc++ claiming to be
436 * C++11 compliant do not however provide std::vector modifier ops taking
437 * const_iterator's.
438 */
439
440 store_iterator iterator_from(const_base_iterator p)
441 {
442 return s.begin()+(p-i.data());
443 }
444
445 store_iterator iterator_from(const_iterator p)
446 {
447 return s.begin()+(p-const_concrete_ptr(s.data()));
448 }
449
450 base_sentinel sentinel()const noexcept
451 {
452 return base_iterator{value_ptr(i.data()+s.size())};
453 }
454
455 range range_from(const_store_iterator it)const
456 {
457 return {base_iterator{value_ptr(i.data()+(it-s.begin()))},sentinel()};
458 }
459
460 range range_from(std::size_t n)const
461 {
462 return {base_iterator{value_ptr(i.data()+n)},sentinel()};
463 }
464
465 template<typename InputIterator>
466 range insert(
467 const_iterator p,InputIterator first,InputIterator last,
468 std::input_iterator_tag)
469 {
470 std::size_t n=0;
471 for(;first!=last;++first,++n,++p){
472 p=prereserve(p);
473 s.emplace(iterator_from(p),*first);
474 push_index_entry();
475 }
476 return range_from(iterator_from(p-n));
477 }
478
479 template<typename InputIterator>
480 range insert(
481 const_iterator p,InputIterator first,InputIterator last,
482 std::forward_iterator_tag)
483 {
484 auto n=s.size();
485 auto m=static_cast<std::size_t>(std::distance(first,last));
486 if(m){
487 p=prereserve(p,m);
488 try{
489 s.insert(iterator_from(p),first,last);
490 }
491 catch(...){
492 build_index(n+1);
493 throw;
494 }
495 build_index(n+1);
496 }
497 return range_from(iterator_from(p));
498 }
499
500 store s;
501 index i;
502 };
503
504 } /* namespace poly_collection::detail */
505
506 } /* namespace poly_collection */
507
508 } /* namespace boost */
509
510 #endif