]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/libs/signals2/include/boost/signals2/detail/auto_buffer.hpp
bump version to 12.2.2-pve1
[ceph.git] / ceph / src / boost / libs / signals2 / include / boost / signals2 / detail / auto_buffer.hpp
1 // Copyright Thorsten Ottosen, 2009.
2 // Distributed under the Boost Software License, Version 1.0. (See
3 // accompanying file LICENSE_1_0.txt or copy at
4 // http://www.boost.org/LICENSE_1_0.txt)
5
6 #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
7 #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
8
9 #include <boost/detail/workaround.hpp>
10
11 #if defined(_MSC_VER)
12 # pragma once
13 #endif
14
15 #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
16 #pragma warning(push)
17 #pragma warning(disable:4996)
18 #endif
19
20 #include <boost/assert.hpp>
21 #include <boost/iterator/reverse_iterator.hpp>
22 #include <boost/iterator/iterator_traits.hpp>
23 #include <boost/mpl/if.hpp>
24 #include <boost/multi_index/detail/scope_guard.hpp>
25 #include <boost/swap.hpp>
26 #include <boost/type_traits/aligned_storage.hpp>
27 #include <boost/type_traits/alignment_of.hpp>
28 #include <boost/type_traits/has_nothrow_copy.hpp>
29 #include <boost/type_traits/has_nothrow_assign.hpp>
30 #include <boost/type_traits/has_trivial_assign.hpp>
31 #include <boost/type_traits/has_trivial_constructor.hpp>
32 #include <boost/type_traits/has_trivial_destructor.hpp>
33 #include <algorithm>
34 #include <cstring>
35 #include <iterator>
36 #include <memory>
37 #include <stdexcept>
38
39 namespace boost
40 {
41 namespace signals2
42 {
43 namespace detail
44 {
45 //
46 // Policies for creating the stack buffer.
47 //
48 template< unsigned N >
49 struct store_n_objects
50 {
51 BOOST_STATIC_CONSTANT( unsigned, value = N );
52 };
53
54 template< unsigned N >
55 struct store_n_bytes
56 {
57 BOOST_STATIC_CONSTANT( unsigned, value = N );
58 };
59
60 namespace auto_buffer_detail
61 {
62 template< class Policy, class T >
63 struct compute_buffer_size
64 {
65 BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
66 };
67
68 template< unsigned N, class T >
69 struct compute_buffer_size< store_n_bytes<N>, T >
70 {
71 BOOST_STATIC_CONSTANT( unsigned, value = N );
72 };
73
74 template< class Policy, class T >
75 struct compute_buffer_objects
76 {
77 BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
78 };
79
80 template< unsigned N, class T >
81 struct compute_buffer_objects< store_n_bytes<N>, T >
82 {
83 BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
84 };
85 }
86
87 struct default_grow_policy
88 {
89 template< class SizeType >
90 static SizeType new_capacity( SizeType capacity )
91 {
92 //
93 // @remark: we grow the capacity quite agressively.
94 // this is justified since we aim to minimize
95 // heap-allocations, and because we mostly use
96 // the buffer locally.
97 return capacity * 4u;
98 }
99
100 template< class SizeType >
101 static bool should_shrink( SizeType, SizeType )
102 {
103 //
104 // @remark: when defining a new grow policy, one might
105 // choose that if the waated space is less
106 // than a certain percentage, then it is of
107 // little use to shrink.
108 //
109 return true;
110 }
111 };
112
113 template< class T,
114 class StackBufferPolicy = store_n_objects<256>,
115 class GrowPolicy = default_grow_policy,
116 class Allocator = std::allocator<T> >
117 class auto_buffer;
118
119
120
121 template
122 <
123 class T,
124 class StackBufferPolicy,
125 class GrowPolicy,
126 class Allocator
127 >
128 class auto_buffer : Allocator
129 {
130 private:
131 enum { N = auto_buffer_detail::
132 compute_buffer_objects<StackBufferPolicy,T>::value };
133
134 BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
135
136 typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
137 local_buffer;
138
139 public:
140 typedef Allocator allocator_type;
141 typedef T value_type;
142 typedef typename Allocator::size_type size_type;
143 typedef typename Allocator::difference_type difference_type;
144 typedef T* pointer;
145 typedef typename Allocator::pointer allocator_pointer;
146 typedef const T* const_pointer;
147 typedef T& reference;
148 typedef const T& const_reference;
149 typedef pointer iterator;
150 typedef const_pointer const_iterator;
151 typedef boost::reverse_iterator<iterator> reverse_iterator;
152 typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
153 typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
154 && sizeof(T) <= sizeof(long double),
155 const value_type,
156 const_reference >::type
157 optimized_const_reference;
158 private:
159
160 pointer allocate( size_type capacity_arg )
161 {
162 if( capacity_arg > N )
163 return &*get_allocator().allocate( capacity_arg );
164 else
165 return static_cast<T*>( members_.address() );
166 }
167
168 void deallocate( pointer where, size_type capacity_arg )
169 {
170 if( capacity_arg <= N )
171 return;
172 get_allocator().deallocate( allocator_pointer(where), capacity_arg );
173 }
174
175 template< class I >
176 static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
177 {
178 copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
179 }
180
181 static void copy_rai( const T* begin, const T* end,
182 pointer where, const boost::true_type& )
183 {
184 std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
185 }
186
187 template< class I, bool b >
188 static void copy_rai( I begin, I end,
189 pointer where, const boost::integral_constant<bool, b>& )
190 {
191 std::uninitialized_copy( begin, end, where );
192 }
193
194 template< class I >
195 static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
196 {
197 std::uninitialized_copy( begin, end, where );
198 }
199
200 template< class I >
201 static void copy_impl( I begin, I end, pointer where )
202 {
203 copy_impl( begin, end, where,
204 typename std::iterator_traits<I>::iterator_category() );
205 }
206
207 template< class I, class I2 >
208 static void assign_impl( I begin, I end, I2 where )
209 {
210 assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
211 }
212
213 template< class I, class I2 >
214 static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
215 {
216 std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
217 }
218
219 template< class I, class I2 >
220 static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
221 {
222 for( ; begin != end; ++begin, ++where )
223 *where = *begin;
224 }
225
226 void unchecked_push_back_n( size_type n, const boost::true_type& )
227 {
228 std::uninitialized_fill( end(), end() + n, T() );
229 size_ += n;
230 }
231
232 void unchecked_push_back_n( size_type n, const boost::false_type& )
233 {
234 for( size_type i = 0u; i < n; ++i )
235 unchecked_push_back();
236 }
237
238 void auto_buffer_destroy( pointer where, const boost::false_type& )
239 {
240 (*where).~T();
241 }
242
243 void auto_buffer_destroy( pointer, const boost::true_type& )
244 { }
245
246 void auto_buffer_destroy( pointer where )
247 {
248 auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
249 }
250
251 void destroy_back_n( size_type n, const boost::false_type& )
252 {
253 BOOST_ASSERT( n > 0 );
254 pointer buffer = buffer_ + size_ - 1u;
255 pointer new_end = buffer - n;
256 for( ; buffer > new_end; --buffer )
257 auto_buffer_destroy( buffer );
258 }
259
260 void destroy_back_n( size_type, const boost::true_type& )
261 { }
262
263 void destroy_back_n( size_type n )
264 {
265 destroy_back_n( n, boost::has_trivial_destructor<T>() );
266 }
267
268 void auto_buffer_destroy( const boost::false_type& x )
269 {
270 if( size_ )
271 destroy_back_n( size_, x );
272 deallocate( buffer_, members_.capacity_ );
273 }
274
275 void auto_buffer_destroy( const boost::true_type& )
276 {
277 deallocate( buffer_, members_.capacity_ );
278 }
279
280 pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
281 {
282 pointer new_buffer = allocate( new_capacity ); // strong
283 boost::multi_index::detail::scope_guard guard =
284 boost::multi_index::detail::make_obj_guard( *this,
285 &auto_buffer::deallocate,
286 new_buffer,
287 new_capacity );
288 copy_impl( begin(), end(), new_buffer ); // strong
289 guard.dismiss(); // nothrow
290 return new_buffer;
291 }
292
293 pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
294 {
295 pointer new_buffer = allocate( new_capacity ); // strong
296 copy_impl( begin(), end(), new_buffer ); // nothrow
297 return new_buffer;
298 }
299
300 void reserve_impl( size_type new_capacity )
301 {
302 pointer new_buffer = move_to_new_buffer( new_capacity,
303 boost::has_nothrow_copy<T>() );
304 (*this).~auto_buffer();
305 buffer_ = new_buffer;
306 members_.capacity_ = new_capacity;
307 BOOST_ASSERT( size_ <= members_.capacity_ );
308 }
309
310 size_type new_capacity_impl( size_type n )
311 {
312 BOOST_ASSERT( n > members_.capacity_ );
313 size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
314 // @todo: consider to check for allocator.max_size()
315 return (std::max)(new_capacity,n);
316 }
317
318 static void swap_helper( auto_buffer& l, auto_buffer& r,
319 const boost::true_type& )
320 {
321 BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
322
323 auto_buffer temp( l.begin(), l.end() );
324 assign_impl( r.begin(), r.end(), l.begin() );
325 assign_impl( temp.begin(), temp.end(), r.begin() );
326 boost::swap( l.size_, r.size_ );
327 boost::swap( l.members_.capacity_, r.members_.capacity_ );
328 }
329
330 static void swap_helper( auto_buffer& l, auto_buffer& r,
331 const boost::false_type& )
332 {
333 BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
334 size_type min_size = (std::min)(l.size_,r.size_);
335 size_type max_size = (std::max)(l.size_,r.size_);
336 size_type diff = max_size - min_size;
337 auto_buffer* smallest = l.size_ == min_size ? &l : &r;
338 auto_buffer* largest = smallest == &l ? &r : &l;
339
340 // @remark: the implementation below is not as fast
341 // as it could be if we assumed T had a default
342 // constructor.
343
344 size_type i = 0u;
345 for( ; i < min_size; ++i )
346 boost::swap( (*smallest)[i], (*largest)[i] );
347
348 for( ; i < max_size; ++i )
349 smallest->unchecked_push_back( (*largest)[i] );
350
351 largest->pop_back_n( diff );
352 boost::swap( l.members_.capacity_, r.members_.capacity_ );
353 }
354
355 void one_sided_swap( auto_buffer& temp ) // nothrow
356 {
357 BOOST_ASSERT( !temp.is_on_stack() );
358 this->~auto_buffer();
359 // @remark: must be nothrow
360 get_allocator() = temp.get_allocator();
361 members_.capacity_ = temp.members_.capacity_;
362 buffer_ = temp.buffer_;
363 BOOST_ASSERT( temp.size_ >= size_ + 1u );
364 size_ = temp.size_;
365 temp.buffer_ = 0;
366 BOOST_ASSERT( temp.is_valid() );
367 }
368
369 template< class I >
370 void insert_impl( const_iterator before, I begin_arg, I end_arg,
371 std::input_iterator_tag )
372 {
373 for( ; begin_arg != end_arg; ++begin_arg )
374 {
375 before = insert( before, *begin_arg );
376 ++before;
377 }
378 }
379
380 void grow_back( size_type n, const boost::true_type& )
381 {
382 BOOST_ASSERT( size_ + n <= members_.capacity_ );
383 size_ += n;
384 }
385
386 void grow_back( size_type n, const boost::false_type& )
387 {
388 unchecked_push_back_n(n);
389 }
390
391 void grow_back( size_type n )
392 {
393 grow_back( n, boost::has_trivial_constructor<T>() );
394 }
395
396 void grow_back_one( const boost::true_type& )
397 {
398 BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
399 size_ += 1;
400 }
401
402 void grow_back_one( const boost::false_type& )
403 {
404 unchecked_push_back();
405 }
406
407 void grow_back_one()
408 {
409 grow_back_one( boost::has_trivial_constructor<T>() );
410 }
411
412 template< class I >
413 void insert_impl( const_iterator before, I begin_arg, I end_arg,
414 std::forward_iterator_tag )
415 {
416 difference_type n = std::distance(begin_arg, end_arg);
417
418 if( size_ + n <= members_.capacity_ )
419 {
420 bool is_back_insertion = before == cend();
421 if( !is_back_insertion )
422 {
423 grow_back( n );
424 iterator where = const_cast<T*>(before);
425 std::copy( before, cend() - n, where + n );
426 assign_impl( begin_arg, end_arg, where );
427 }
428 else
429 {
430 unchecked_push_back( begin_arg, end_arg );
431 }
432 BOOST_ASSERT( is_valid() );
433 return;
434 }
435
436 auto_buffer temp( new_capacity_impl( size_ + n ) );
437 temp.unchecked_push_back( cbegin(), before );
438 temp.unchecked_push_back( begin_arg, end_arg );
439 temp.unchecked_push_back( before, cend() );
440 one_sided_swap( temp );
441 BOOST_ASSERT( is_valid() );
442 }
443
444 public:
445 bool is_valid() const // invariant
446 {
447 // @remark: allowed for N==0 and when
448 // using a locally instance
449 // in insert()/one_sided_swap()
450 if( buffer_ == 0 )
451 return true;
452
453 if( members_.capacity_ < N )
454 return false;
455
456 if( !is_on_stack() && members_.capacity_ <= N )
457 return false;
458
459 if( buffer_ == members_.address() )
460 if( members_.capacity_ > N )
461 return false;
462
463 if( size_ > members_.capacity_ )
464 return false;
465
466 return true;
467 }
468
469 auto_buffer()
470 : members_( N ),
471 buffer_( static_cast<T*>(members_.address()) ),
472 size_( 0u )
473 {
474 BOOST_ASSERT( is_valid() );
475 }
476
477 auto_buffer( const auto_buffer& r )
478 : members_( (std::max)(r.size_,size_type(N)) ),
479 buffer_( allocate( members_.capacity_ ) ),
480 size_( 0 )
481 {
482 copy_impl( r.begin(), r.end(), buffer_ );
483 size_ = r.size_;
484 BOOST_ASSERT( is_valid() );
485 }
486
487 auto_buffer& operator=( const auto_buffer& r ) // basic
488 {
489 if( this == &r )
490 return *this;
491
492 difference_type diff = size_ - r.size_;
493 if( diff >= 0 )
494 {
495 pop_back_n( static_cast<size_type>(diff) );
496 assign_impl( r.begin(), r.end(), begin() );
497 }
498 else
499 {
500 if( members_.capacity_ >= r.size() )
501 {
502 unchecked_push_back_n( static_cast<size_type>(-diff) );
503 assign_impl( r.begin(), r.end(), begin() );
504 }
505 else
506 {
507 // @remark: we release memory as early as possible
508 // since we only give the basic guarantee
509 (*this).~auto_buffer();
510 buffer_ = 0;
511 pointer new_buffer = allocate( r.size() );
512 boost::multi_index::detail::scope_guard guard =
513 boost::multi_index::detail::make_obj_guard( *this,
514 &auto_buffer::deallocate,
515 new_buffer,
516 r.size() );
517 copy_impl( r.begin(), r.end(), new_buffer );
518 guard.dismiss();
519 buffer_ = new_buffer;
520 members_.capacity_ = r.size();
521 size_ = members_.capacity_;
522 }
523 }
524
525 BOOST_ASSERT( size() == r.size() );
526 BOOST_ASSERT( is_valid() );
527 return *this;
528 }
529
530 explicit auto_buffer( size_type capacity_arg )
531 : members_( (std::max)(capacity_arg, size_type(N)) ),
532 buffer_( allocate(members_.capacity_) ),
533 size_( 0 )
534 {
535 BOOST_ASSERT( is_valid() );
536 }
537
538 auto_buffer( size_type size_arg, optimized_const_reference init_value )
539 : members_( (std::max)(size_arg, size_type(N)) ),
540 buffer_( allocate(members_.capacity_) ),
541 size_( 0 )
542 {
543 std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
544 size_ = size_arg;
545 BOOST_ASSERT( is_valid() );
546 }
547
548 auto_buffer( size_type capacity_arg, const allocator_type& a )
549 : allocator_type( a ),
550 members_( (std::max)(capacity_arg, size_type(N)) ),
551 buffer_( allocate(members_.capacity_) ),
552 size_( 0 )
553 {
554 BOOST_ASSERT( is_valid() );
555 }
556
557 auto_buffer( size_type size_arg, optimized_const_reference init_value,
558 const allocator_type& a )
559 : allocator_type( a ),
560 members_( (std::max)(size_arg, size_type(N)) ),
561 buffer_( allocate(members_.capacity_) ),
562 size_( 0 )
563 {
564 std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
565 size_ = size_arg;
566 BOOST_ASSERT( is_valid() );
567 }
568
569 template< class ForwardIterator >
570 auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
571 :
572 members_( std::distance(begin_arg, end_arg) ),
573 buffer_( allocate(members_.capacity_) ),
574 size_( 0 )
575 {
576 copy_impl( begin_arg, end_arg, buffer_ );
577 size_ = members_.capacity_;
578 if( members_.capacity_ < N )
579 members_.capacity_ = N;
580 BOOST_ASSERT( is_valid() );
581 }
582
583 template< class ForwardIterator >
584 auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
585 const allocator_type& a )
586 : allocator_type( a ),
587 members_( std::distance(begin_arg, end_arg) ),
588 buffer_( allocate(members_.capacity_) ),
589 size_( 0 )
590 {
591 copy_impl( begin_arg, end_arg, buffer_ );
592 size_ = members_.capacity_;
593 if( members_.capacity_ < N )
594 members_.capacity_ = N;
595 BOOST_ASSERT( is_valid() );
596 }
597
598 ~auto_buffer()
599 {
600 BOOST_ASSERT( is_valid() );
601 if( buffer_ ) // do we need this check? Yes, but only
602 // for N = 0u + local instances in one_sided_swap()
603 auto_buffer_destroy( boost::has_trivial_destructor<T>() );
604 }
605
606 public:
607 bool empty() const
608 {
609 return size_ == 0;
610 }
611
612 bool full() const
613 {
614 return size_ == members_.capacity_;
615 }
616
617 bool is_on_stack() const
618 {
619 return members_.capacity_ <= N;
620 }
621
622 size_type size() const
623 {
624 return size_;
625 }
626
627 size_type capacity() const
628 {
629 return members_.capacity_;
630 }
631
632 public:
633 pointer data()
634 {
635 return buffer_;
636 }
637
638 const_pointer data() const
639 {
640 return buffer_;
641 }
642
643 allocator_type& get_allocator()
644 {
645 return static_cast<allocator_type&>(*this);
646 }
647
648 const allocator_type& get_allocator() const
649 {
650 return static_cast<const allocator_type&>(*this);
651 }
652
653 public:
654 iterator begin()
655 {
656 return buffer_;
657 }
658
659 const_iterator begin() const
660 {
661 return buffer_;
662 }
663
664 iterator end()
665 {
666 return buffer_ + size_;
667 }
668
669 const_iterator end() const
670 {
671 return buffer_ + size_;
672 }
673
674 reverse_iterator rbegin()
675 {
676 return reverse_iterator(end());
677 }
678
679 const_reverse_iterator rbegin() const
680 {
681 return const_reverse_iterator(end());
682 }
683
684 reverse_iterator rend()
685 {
686 return reverse_iterator(begin());
687 }
688
689 const_reverse_iterator rend() const
690 {
691 return const_reverse_iterator(begin());
692 }
693
694 const_iterator cbegin() const
695 {
696 return const_cast<const auto_buffer*>(this)->begin();
697 }
698
699 const_iterator cend() const
700 {
701 return const_cast<const auto_buffer*>(this)->end();
702 }
703
704 const_reverse_iterator crbegin() const
705 {
706 return const_cast<const auto_buffer*>(this)->rbegin();
707 }
708
709 const_reverse_iterator crend() const
710 {
711 return const_cast<const auto_buffer*>(this)->rend();
712 }
713
714 public:
715 reference front()
716 {
717 return buffer_[0];
718 }
719
720 optimized_const_reference front() const
721 {
722 return buffer_[0];
723 }
724
725 reference back()
726 {
727 return buffer_[size_-1];
728 }
729
730 optimized_const_reference back() const
731 {
732 return buffer_[size_-1];
733 }
734
735 reference operator[]( size_type n )
736 {
737 BOOST_ASSERT( n < size_ );
738 return buffer_[n];
739 }
740
741 optimized_const_reference operator[]( size_type n ) const
742 {
743 BOOST_ASSERT( n < size_ );
744 return buffer_[n];
745 }
746
747 void unchecked_push_back()
748 {
749 BOOST_ASSERT( !full() );
750 new (buffer_ + size_) T;
751 ++size_;
752 }
753
754 void unchecked_push_back_n( size_type n )
755 {
756 BOOST_ASSERT( size_ + n <= members_.capacity_ );
757 unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
758 }
759
760 void unchecked_push_back( optimized_const_reference x ) // non-growing
761 {
762 BOOST_ASSERT( !full() );
763 new (buffer_ + size_) T( x );
764 ++size_;
765 }
766
767 template< class ForwardIterator >
768 void unchecked_push_back( ForwardIterator begin_arg,
769 ForwardIterator end_arg ) // non-growing
770 {
771 BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
772 copy_impl( begin_arg, end_arg, buffer_ + size_ );
773 size_ += std::distance(begin_arg, end_arg);
774 }
775
776 void reserve_precisely( size_type n )
777 {
778 BOOST_ASSERT( members_.capacity_ >= N );
779
780 if( n <= members_.capacity_ )
781 return;
782 reserve_impl( n );
783 BOOST_ASSERT( members_.capacity_ == n );
784 }
785
786 void reserve( size_type n ) // strong
787 {
788 BOOST_ASSERT( members_.capacity_ >= N );
789
790 if( n <= members_.capacity_ )
791 return;
792
793 reserve_impl( new_capacity_impl( n ) );
794 BOOST_ASSERT( members_.capacity_ >= n );
795 }
796
797 void push_back()
798 {
799 if( size_ != members_.capacity_ )
800 {
801 unchecked_push_back();
802 }
803 else
804 {
805 reserve( size_ + 1u );
806 unchecked_push_back();
807 }
808 }
809
810 void push_back( optimized_const_reference x )
811 {
812 if( size_ != members_.capacity_ )
813 {
814 unchecked_push_back( x );
815 }
816 else
817 {
818 reserve( size_ + 1u );
819 unchecked_push_back( x );
820 }
821 }
822
823 template< class ForwardIterator >
824 void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
825 {
826 difference_type diff = std::distance(begin_arg, end_arg);
827 if( size_ + diff > members_.capacity_ )
828 reserve( size_ + diff );
829 unchecked_push_back( begin_arg, end_arg );
830 }
831
832 iterator insert( const_iterator before, optimized_const_reference x ) // basic
833 {
834 // @todo: consider if we want to support x in 'this'
835 if( size_ < members_.capacity_ )
836 {
837 bool is_back_insertion = before == cend();
838 iterator where = const_cast<T*>(before);
839
840 if( !is_back_insertion )
841 {
842 grow_back_one();
843 std::copy( before, cend() - 1u, where + 1u );
844 *where = x;
845 BOOST_ASSERT( is_valid() );
846 }
847 else
848 {
849 unchecked_push_back( x );
850 }
851 return where;
852 }
853
854 auto_buffer temp( new_capacity_impl( size_ + 1u ) );
855 temp.unchecked_push_back( cbegin(), before );
856 iterator result = temp.end();
857 temp.unchecked_push_back( x );
858 temp.unchecked_push_back( before, cend() );
859 one_sided_swap( temp );
860 BOOST_ASSERT( is_valid() );
861 return result;
862 }
863
864 void insert( const_iterator before, size_type n,
865 optimized_const_reference x )
866 {
867 // @todo: see problems above
868 if( size_ + n <= members_.capacity_ )
869 {
870 grow_back( n );
871 iterator where = const_cast<T*>(before);
872 std::copy( before, cend() - n, where + n );
873 std::fill( where, where + n, x );
874 BOOST_ASSERT( is_valid() );
875 return;
876 }
877
878 auto_buffer temp( new_capacity_impl( size_ + n ) );
879 temp.unchecked_push_back( cbegin(), before );
880 std::uninitialized_fill_n( temp.end(), n, x );
881 temp.size_ += n;
882 temp.unchecked_push_back( before, cend() );
883 one_sided_swap( temp );
884 BOOST_ASSERT( is_valid() );
885 }
886
887 template< class ForwardIterator >
888 void insert( const_iterator before,
889 ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
890 {
891 typedef typename std::iterator_traits<ForwardIterator>
892 ::iterator_category category;
893 insert_impl( before, begin_arg, end_arg, category() );
894 }
895
896 void pop_back()
897 {
898 BOOST_ASSERT( !empty() );
899 auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
900 --size_;
901 }
902
903 void pop_back_n( size_type n )
904 {
905 BOOST_ASSERT( n <= size_ );
906 if( n )
907 {
908 destroy_back_n( n );
909 size_ -= n;
910 }
911 }
912
913 void clear()
914 {
915 pop_back_n( size_ );
916 }
917
918 iterator erase( const_iterator where )
919 {
920 BOOST_ASSERT( !empty() );
921 BOOST_ASSERT( cbegin() <= where );
922 BOOST_ASSERT( cend() > where );
923
924 unsigned elements = cend() - where - 1u;
925
926 if( elements > 0u )
927 {
928 const_iterator start = where + 1u;
929 std::copy( start, start + elements,
930 const_cast<T*>(where) );
931 }
932 pop_back();
933 BOOST_ASSERT( !full() );
934 iterator result = const_cast<T*>( where );
935 BOOST_ASSERT( result <= end() );
936 return result;
937 }
938
939 iterator erase( const_iterator from, const_iterator to )
940 {
941 BOOST_ASSERT( !(std::distance(from,to)>0) ||
942 !empty() );
943 BOOST_ASSERT( cbegin() <= from );
944 BOOST_ASSERT( cend() >= to );
945
946 unsigned elements = std::distance(to,cend());
947
948 if( elements > 0u )
949 {
950 BOOST_ASSERT( elements > 0u );
951 std::copy( to, to + elements,
952 const_cast<T*>(from) );
953 }
954 pop_back_n( std::distance(from,to) );
955 BOOST_ASSERT( !full() );
956 iterator result = const_cast<T*>( from );
957 BOOST_ASSERT( result <= end() );
958 return result;
959 }
960
961 void shrink_to_fit()
962 {
963 if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
964 return;
965
966 reserve_impl( size_ );
967 members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
968 BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
969 BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
970 }
971
972 pointer uninitialized_grow( size_type n ) // strong
973 {
974 if( size_ + n <= members_.capacity_ )
975 reserve( size_ + n );
976
977 pointer res = end();
978 size_ += n;
979 return res;
980 }
981
982 void uninitialized_shrink( size_type n ) // nothrow
983 {
984 // @remark: test for wrap-around
985 BOOST_ASSERT( size_ - n <= members_.capacity_ );
986 size_ -= n;
987 }
988
989 void uninitialized_resize( size_type n )
990 {
991 if( n > size() )
992 uninitialized_grow( n - size() );
993 else if( n < size() )
994 uninitialized_shrink( size() - n );
995
996 BOOST_ASSERT( size() == n );
997 }
998
999 // nothrow - if both buffer are on the heap, or
1000 // - if one buffer is on the heap and one has
1001 // 'has_allocated_buffer() == false', or
1002 // - if copy-construction cannot throw
1003 // basic - otherwise (better guarantee impossible)
1004 // requirement: the allocator must be no-throw-swappable
1005 void swap( auto_buffer& r )
1006 {
1007 bool on_stack = is_on_stack();
1008 bool r_on_stack = r.is_on_stack();
1009 bool both_on_heap = !on_stack && !r_on_stack;
1010 if( both_on_heap )
1011 {
1012 boost::swap( get_allocator(), r.get_allocator() );
1013 boost::swap( members_.capacity_, r.members_.capacity_ );
1014 boost::swap( buffer_, r.buffer_ );
1015 boost::swap( size_, r.size_ );
1016 BOOST_ASSERT( is_valid() );
1017 BOOST_ASSERT( r.is_valid() );
1018 return;
1019 }
1020
1021 BOOST_ASSERT( on_stack || r_on_stack );
1022 bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
1023 (!on_stack && r_on_stack);
1024
1025 //
1026 // Remark: we now know that we can copy into
1027 // the unused stack buffer.
1028 //
1029 if( exactly_one_on_stack )
1030 {
1031 auto_buffer* one_on_stack = on_stack ? this : &r;
1032 auto_buffer* other = on_stack ? &r : this;
1033 pointer new_buffer = static_cast<T*>(other->members_.address());
1034 copy_impl( one_on_stack->begin(), one_on_stack->end(),
1035 new_buffer ); // strong
1036 one_on_stack->~auto_buffer(); // nothrow
1037 boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
1038 boost::swap( members_.capacity_, r.members_.capacity_ );
1039 boost::swap( size_, r.size_ );
1040 one_on_stack->buffer_ = other->buffer_;
1041 other->buffer_ = new_buffer;
1042 BOOST_ASSERT( other->is_on_stack() );
1043 BOOST_ASSERT( !one_on_stack->is_on_stack() );
1044 BOOST_ASSERT( is_valid() );
1045 BOOST_ASSERT( r.is_valid() );
1046 return;
1047 }
1048
1049 BOOST_ASSERT( on_stack && r_on_stack );
1050 swap_helper( *this, r, boost::has_trivial_assign<T>() );
1051 BOOST_ASSERT( is_valid() );
1052 BOOST_ASSERT( r.is_valid() );
1053 }
1054
1055 private:
1056 typedef boost::aligned_storage< N * sizeof(T),
1057 boost::alignment_of<T>::value >
1058 storage;
1059
1060 struct members_type : storage /* to enable EBO */
1061 {
1062 size_type capacity_;
1063
1064 members_type( size_type capacity )
1065 : capacity_(capacity)
1066 { }
1067
1068 void* address() const
1069 { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
1070 };
1071
1072 members_type members_;
1073 pointer buffer_;
1074 size_type size_;
1075
1076 };
1077
1078 template< class T, class SBP, class GP, class A >
1079 inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
1080 {
1081 l.swap( r );
1082 }
1083
1084 template< class T, class SBP, class GP, class A >
1085 inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
1086 const auto_buffer<T,SBP,GP,A>& r )
1087 {
1088 if( l.size() != r.size() )
1089 return false;
1090 return std::equal( l.begin(), l.end(), r.begin() );
1091 }
1092
1093 template< class T, class SBP, class GP, class A >
1094 inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
1095 const auto_buffer<T,SBP,GP,A>& r )
1096 {
1097 return !(l == r);
1098 }
1099
1100 template< class T, class SBP, class GP, class A >
1101 inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
1102 const auto_buffer<T,SBP,GP,A>& r )
1103 {
1104 return std::lexicographical_compare( l.begin(), l.end(),
1105 r.begin(), r.end() );
1106 }
1107
1108 template< class T, class SBP, class GP, class A >
1109 inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
1110 const auto_buffer<T,SBP,GP,A>& r )
1111 {
1112 return (r < l);
1113 }
1114
1115 template< class T, class SBP, class GP, class A >
1116 inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
1117 const auto_buffer<T,SBP,GP,A>& r )
1118 {
1119 return !(r > l);
1120 }
1121
1122 template< class T, class SBP, class GP, class A >
1123 inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
1124 const auto_buffer<T,SBP,GP,A>& r )
1125 {
1126 return !(l < r);
1127 }
1128
1129 } // namespace detail
1130 } // namespace signals2
1131 }
1132
1133 #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
1134 #pragma warning(pop)
1135 #endif
1136
1137 #endif