]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/boost/poly_collection/detail/split_segment.hpp
update sources to v12.2.3
[ceph.git] / ceph / src / boost / boost / poly_collection / detail / split_segment.hpp
1 /* Copyright 2016-2017 Joaquin M Lopez Munoz.
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * See http://www.boost.org/libs/poly_collection for library home page.
7 */
8
9 #ifndef BOOST_POLY_COLLECTION_DETAIL_SPLIT_SEGMENT_HPP
10 #define BOOST_POLY_COLLECTION_DETAIL_SPLIT_SEGMENT_HPP
11
12 #if defined(_MSC_VER)
13 #pragma once
14 #endif
15
16 #include <boost/poly_collection/detail/newdelete_allocator.hpp>
17 #include <boost/poly_collection/detail/segment_backend.hpp>
18 #include <boost/poly_collection/detail/value_holder.hpp>
19 #include <iterator>
20 #include <memory>
21 #include <new>
22 #include <utility>
23 #include <vector>
24
25 namespace boost{
26
27 namespace poly_collection{
28
29 namespace detail{
30
31 /* segment_backend implementation that maintains two internal vectors, one for
32 * value_type's (the index) and another for the concrete elements those refer
33 * to (the store).
34 *
35 * Requires:
36 * - [const_]base_iterator is constructible from value_type*.
37 * - value_type is copy constructible.
38 * - Model::make_value_type(x) returns a value_type created from a reference
39 * to the concrete type.
40 *
41 * Conversion from base_iterator to local_iterator<Concrete> requires accesing
42 * value_type internal info, so the end() base_iterator has to be made to point
43 * to a valid element of index, which implies size(index)=size(store)+1. This
44 * slightly complicates the memory management.
45 */
46
47 template<typename Model,typename Concrete,typename Allocator>
48 class split_segment:public segment_backend<Model>
49 {
50 using value_type=typename Model::value_type;
51 using store_value_type=value_holder<Concrete>;
52 using store=std::vector<
53 store_value_type,
54 value_holder_allocator_adaptor<
55 typename std::allocator_traits<Allocator>::
56 template rebind_alloc<store_value_type>
57 >
58 >;
59 using store_iterator=typename store::iterator;
60 using const_store_iterator=typename store::const_iterator;
61 using index=std::vector<
62 value_type,
63 newdelete_allocator_adaptor<
64 typename std::allocator_traits<Allocator>::
65 template rebind_alloc<value_type>
66 >
67 >;
68 using const_index_iterator=typename index::const_iterator;
69 using segment_backend=detail::segment_backend<Model>;
70 using typename segment_backend::segment_backend_unique_ptr;
71 using typename segment_backend::value_pointer;
72 using typename segment_backend::const_value_pointer;
73 using typename segment_backend::base_iterator;
74 using typename segment_backend::const_base_iterator;
75 using const_iterator=
76 typename segment_backend::template const_iterator<Concrete>;
77 using typename segment_backend::base_sentinel;
78 using typename segment_backend::range;
79 using segment_allocator_type=newdelete_allocator_adaptor<
80 typename std::allocator_traits<Allocator>::
81 template rebind_alloc<split_segment>
82 >;
83
84 public:
85 virtual ~split_segment()=default;
86
87 virtual segment_backend_unique_ptr copy()const
88 {
89 return new_(s.get_allocator(),store{s});
90 }
91
92 virtual segment_backend_unique_ptr empty_copy()const
93 {
94 return new_(s.get_allocator(),s.get_allocator());
95 }
96
97 virtual bool equal(const segment_backend& x)const
98 {
99 return s==static_cast<const split_segment&>(x).s;
100 }
101
102 virtual base_iterator begin()const noexcept{return nv_begin();}
103 base_iterator nv_begin()const noexcept
104 {return base_iterator{value_ptr(i.data())};}
105 virtual base_iterator end()const noexcept{return nv_end();}
106 base_iterator nv_end()const noexcept
107 {return base_iterator{value_ptr(i.data()+s.size())};}
108 virtual bool empty()const noexcept{return nv_empty();}
109 bool nv_empty()const noexcept{return s.empty();}
110 virtual std::size_t size()const noexcept{return nv_size();}
111 std::size_t nv_size()const noexcept{return s.size();}
112 virtual std::size_t max_size()const noexcept{return nv_max_size();}
113 std::size_t nv_max_size()const noexcept{return s.max_size()-1;}
114 virtual std::size_t capacity()const noexcept{return nv_capacity();}
115 std::size_t nv_capacity()const noexcept{return s.capacity();}
116
117 virtual base_sentinel reserve(std::size_t n){return nv_reserve(n);}
118
119 base_sentinel nv_reserve(std::size_t n)
120 {
121 bool rebuild=n>s.capacity();
122 i.reserve(n+1);
123 s.reserve(n);
124 if(rebuild)rebuild_index();
125 return sentinel();
126 };
127
128 virtual base_sentinel shrink_to_fit(){return nv_shrink_to_fit();}
129
130 base_sentinel nv_shrink_to_fit()
131 {
132 try{
133 auto p=s.data();
134 if(!s.empty())s.shrink_to_fit();
135 else{
136 store ss{s.get_allocator()};
137 ss.reserve(1); /* --> s.data()!=nullptr */
138 s.swap(ss);
139 }
140 if(p!=s.data()){
141 index ii{{},i.get_allocator()};
142 ii.reserve(s.capacity()+1);
143 i.swap(ii);
144 build_index();
145 }
146 }
147 catch(...){
148 rebuild_index();
149 throw;
150 }
151 return sentinel();
152 }
153
154 template<typename Iterator,typename... Args>
155 range nv_emplace(Iterator p,Args&&... args)
156 {
157 auto q=prereserve(p);
158 auto it=s.emplace(
159 iterator_from(q),
160 value_holder_emplacing_ctor,std::forward<Args>(args)...);
161 push_index_entry();
162 return range_from(it);
163 }
164
165 template<typename... Args>
166 range nv_emplace_back(Args&&... args)
167 {
168 prereserve();
169 s.emplace_back(value_holder_emplacing_ctor,std::forward<Args>(args)...);
170 push_index_entry();
171 return range_from(s.size()-1);
172 }
173
174 virtual range push_back(const_value_pointer x)
175 {return nv_push_back(const_concrete_ref(x));}
176
177 range nv_push_back(const Concrete& x)
178 {
179 prereserve();
180 s.emplace_back(x);
181 push_index_entry();
182 return range_from(s.size()-1);
183 }
184
185 virtual range push_back_move(value_pointer x)
186 {return nv_push_back(std::move(concrete_ref(x)));}
187
188 range nv_push_back(Concrete&& x)
189 {
190 prereserve();
191 s.emplace_back(std::move(x));
192 push_index_entry();
193 return range_from(s.size()-1);
194 }
195
196 virtual range insert(const_base_iterator p,const_value_pointer x)
197 {return nv_insert(const_iterator(p),const_concrete_ref(x));}
198
199 range nv_insert(const_iterator p,const Concrete& x)
200 {
201 p=prereserve(p);
202 auto it=s.emplace(iterator_from(p),x);
203 push_index_entry();
204 return range_from(it);
205 }
206
207 virtual range insert_move(const_base_iterator p,value_pointer x)
208 {return nv_insert(const_iterator(p),std::move(concrete_ref(x)));}
209
210 range nv_insert(const_iterator p,Concrete&& x)
211 {
212 p=prereserve(p);
213 auto it=s.emplace(iterator_from(p),std::move(x));
214 push_index_entry();
215 return range_from(it);
216 }
217
218 template<typename InputIterator>
219 range nv_insert(InputIterator first,InputIterator last)
220 {
221 return nv_insert(
222 const_iterator(concrete_ptr(s.data()+s.size())),first,last);
223 }
224
225 template<typename InputIterator>
226 range nv_insert(const_iterator p,InputIterator first,InputIterator last)
227 {
228 return insert(
229 p,first,last,
230 typename std::iterator_traits<InputIterator>::iterator_category{});
231 }
232
233 virtual range erase(const_base_iterator p)
234 {return nv_erase(const_iterator(p));}
235
236 range nv_erase(const_iterator p)
237 {
238 pop_index_entry();
239 return range_from(s.erase(iterator_from(p)));
240 }
241
242 virtual range erase(const_base_iterator first,const_base_iterator last)
243 {return nv_erase(const_iterator(first),const_iterator(last));}
244
245 range nv_erase(const_iterator first,const_iterator last)
246 {
247 std::size_t n=s.size();
248 auto it=s.erase(iterator_from(first),iterator_from(last));
249 pop_index_entry(n-s.size());
250 return range_from(it);
251 }
252
253 virtual range erase_till_end(const_base_iterator first)
254 {
255 std::size_t n=s.size();
256 auto it=s.erase(iterator_from(first),s.end());
257 pop_index_entry(n-s.size());
258 return range_from(it);
259 }
260
261 virtual range erase_from_begin(const_base_iterator last)
262 {
263 std::size_t n=s.size();
264 auto it=s.erase(s.begin(),iterator_from(last));
265 pop_index_entry(n-s.size());
266 return range_from(it);
267 }
268
269 base_sentinel clear()noexcept{return nv_clear();}
270
271 base_sentinel nv_clear()noexcept
272 {
273 s.clear();
274 for(std::size_t n=i.size()-1;n--;)i.pop_back();
275 return sentinel();
276 }
277
278 private:
279 friend Model;
280
281 template<typename... Args>
282 static segment_backend_unique_ptr new_(
283 segment_allocator_type al,Args&&... args)
284 {
285 auto p=std::allocator_traits<segment_allocator_type>::allocate(al,1);
286 try{
287 ::new ((void*)p) split_segment{std::forward<Args>(args)...};
288 }
289 catch(...){
290 std::allocator_traits<segment_allocator_type>::deallocate(al,p,1);
291 throw;
292 }
293 return {p,&delete_};
294 }
295
296 static void delete_(segment_backend* p)
297 {
298 auto q=static_cast<split_segment*>(p);
299 auto al=segment_allocator_type{q->s.get_allocator()};
300 q->~split_segment();
301 std::allocator_traits<segment_allocator_type>::deallocate(al,q,1);
302 }
303
304 split_segment(const Allocator& al):
305 s{typename store::allocator_type{al}},
306 i{{},typename index::allocator_type{al}}
307 {
308 s.reserve(1); /* --> s.data()!=nullptr */
309 build_index();
310 }
311
312 split_segment(store&& s_):
313 s{std::move(s_)},i{{},typename index::allocator_type{s.get_allocator()}}
314 {
315 s.reserve(1); /* --> s.data()!=nullptr */
316 build_index();
317 }
318
319 void prereserve()
320 {
321 if(s.size()==s.capacity())expand();
322 }
323
324 const_base_iterator prereserve(const_base_iterator p)
325 {
326 if(s.size()==s.capacity()){
327 auto n=p-i.data();
328 expand();
329 return const_base_iterator{i.data()+n};
330 }
331 else return p;
332 }
333
334 const_iterator prereserve(const_iterator p)
335 {
336 if(s.size()==s.capacity()){
337 auto n=p-const_concrete_ptr(s.data());
338 expand();
339 return const_concrete_ptr(s.data())+n;
340 }
341 else return p;
342 }
343
344 const_iterator prereserve(const_iterator p,std::size_t m)
345 {
346 if(s.size()+m>s.capacity()){
347 auto n=p-const_concrete_ptr(s.data());
348 expand(m);
349 return const_concrete_ptr(s.data())+n;
350 }
351 else return p;
352 }
353
354 void expand()
355 {
356 std::size_t c=
357 s.size()<=1||(s.max_size()-1-s.size())/2<s.size()?
358 s.size()+1:
359 s.size()+s.size()/2;
360 i.reserve(c+1);
361 s.reserve(c);
362 rebuild_index();
363 }
364
365 void expand(std::size_t m)
366 {
367 i.reserve(s.size()+m+1);
368 s.reserve(s.size()+m);
369 rebuild_index();
370 }
371
372 void build_index(std::size_t start=0)
373 {
374 for(std::size_t n=start,m=s.size();n<=m;++n){
375 i.push_back(Model::make_value_type(concrete_ref(s.data()[n])));
376 };
377 }
378
379 void rebuild_index()
380 {
381 i.clear();
382 build_index();
383 }
384
385 void push_index_entry()
386 {
387 build_index(s.size());
388 }
389
390 void pop_index_entry(std::size_t n=1)
391 {
392 while(n--)i.pop_back();
393 }
394
395 static Concrete& concrete_ref(value_pointer p)noexcept
396 {
397 return *static_cast<Concrete*>(p);
398 }
399
400 static Concrete& concrete_ref(store_value_type& r)noexcept
401 {
402 return *concrete_ptr(&r);
403 }
404
405 static const Concrete& const_concrete_ref(const_value_pointer p)noexcept
406 {
407 return *static_cast<const Concrete*>(p);
408 }
409
410 static Concrete* concrete_ptr(store_value_type* p)noexcept
411 {
412 return reinterpret_cast<Concrete*>(
413 static_cast<value_holder_base<Concrete>*>(p));
414 }
415
416 static const Concrete* const_concrete_ptr(const store_value_type* p)noexcept
417 {
418 return concrete_ptr(const_cast<store_value_type*>(p));
419 }
420
421 static value_type* value_ptr(const value_type* p)noexcept
422 {
423 return const_cast<value_type*>(p);
424 }
425
426 /* It would have sufficed if iterator_from returned const_store_iterator
427 * except for the fact that some old versions of libstdc++ claiming to be
428 * C++11 compliant do not however provide std::vector modifier ops taking
429 * const_iterator's.
430 */
431
432 store_iterator iterator_from(const_base_iterator p)
433 {
434 return s.begin()+(p-i.data());
435 }
436
437 store_iterator iterator_from(const_iterator p)
438 {
439 return s.begin()+(p-const_concrete_ptr(s.data()));
440 }
441
442 base_sentinel sentinel()const noexcept
443 {
444 return base_iterator{value_ptr(i.data()+s.size())};
445 }
446
447 range range_from(const_store_iterator it)const
448 {
449 return {base_iterator{value_ptr(i.data()+(it-s.begin()))},sentinel()};
450 }
451
452 range range_from(std::size_t n)const
453 {
454 return {base_iterator{value_ptr(i.data()+n)},sentinel()};
455 }
456
457 template<typename InputIterator>
458 range insert(
459 const_iterator p,InputIterator first,InputIterator last,
460 std::input_iterator_tag)
461 {
462 std::size_t n=0;
463 for(;first!=last;++first,++n,++p){
464 p=prereserve(p);
465 s.emplace(iterator_from(p),*first);
466 push_index_entry();
467 }
468 return range_from(iterator_from(p-n));
469 }
470
471 template<typename InputIterator>
472 range insert(
473 const_iterator p,InputIterator first,InputIterator last,
474 std::forward_iterator_tag)
475 {
476 auto n=s.size();
477 auto m=static_cast<std::size_t>(std::distance(first,last));
478 if(m){
479 p=prereserve(p,m);
480 try{
481 s.insert(iterator_from(p),first,last);
482 }
483 catch(...){
484 build_index(n+1);
485 throw;
486 }
487 build_index(n+1);
488 }
489 return range_from(iterator_from(p));
490 }
491
492 store s;
493 index i;
494 };
495
496 } /* namespace poly_collection::detail */
497
498 } /* namespace poly_collection */
499
500 } /* namespace boost */
501
502 #endif