]> git.proxmox.com Git - ceph.git/blame - ceph/src/boost/boost/signals2/detail/auto_buffer.hpp
import quincy beta 17.1.0
[ceph.git] / ceph / src / boost / boost / signals2 / detail / auto_buffer.hpp
CommitLineData
7c673cae
FG
1// Copyright Thorsten Ottosen, 2009.
2// Distributed under the Boost Software License, Version 1.0. (See
3// accompanying file LICENSE_1_0.txt or copy at
4// http://www.boost.org/LICENSE_1_0.txt)
5
6#ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
7#define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
8
9#include <boost/detail/workaround.hpp>
10
11#if defined(_MSC_VER)
12# pragma once
13#endif
14
15#if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
16#pragma warning(push)
17#pragma warning(disable:4996)
18#endif
19
20#include <boost/assert.hpp>
f67539c2 21#include <boost/config.hpp>
20effc67 22#include <boost/core/allocator_access.hpp>
7c673cae
FG
23#include <boost/iterator/reverse_iterator.hpp>
24#include <boost/iterator/iterator_traits.hpp>
25#include <boost/mpl/if.hpp>
92f5a8d4 26#include <boost/signals2/detail/scope_guard.hpp>
7c673cae
FG
27#include <boost/swap.hpp>
28#include <boost/type_traits/aligned_storage.hpp>
29#include <boost/type_traits/alignment_of.hpp>
30#include <boost/type_traits/has_nothrow_copy.hpp>
31#include <boost/type_traits/has_nothrow_assign.hpp>
32#include <boost/type_traits/has_trivial_assign.hpp>
33#include <boost/type_traits/has_trivial_constructor.hpp>
34#include <boost/type_traits/has_trivial_destructor.hpp>
35#include <algorithm>
36#include <cstring>
37#include <iterator>
38#include <memory>
39#include <stdexcept>
40
41namespace boost
42{
43namespace signals2
44{
45namespace detail
46{
47 //
48 // Policies for creating the stack buffer.
49 //
50 template< unsigned N >
51 struct store_n_objects
52 {
53 BOOST_STATIC_CONSTANT( unsigned, value = N );
54 };
55
56 template< unsigned N >
57 struct store_n_bytes
58 {
59 BOOST_STATIC_CONSTANT( unsigned, value = N );
60 };
61
62 namespace auto_buffer_detail
63 {
64 template< class Policy, class T >
65 struct compute_buffer_size
66 {
67 BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
68 };
69
70 template< unsigned N, class T >
71 struct compute_buffer_size< store_n_bytes<N>, T >
72 {
73 BOOST_STATIC_CONSTANT( unsigned, value = N );
74 };
75
76 template< class Policy, class T >
77 struct compute_buffer_objects
78 {
79 BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
80 };
81
82 template< unsigned N, class T >
83 struct compute_buffer_objects< store_n_bytes<N>, T >
84 {
85 BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
86 };
87 }
88
89 struct default_grow_policy
90 {
91 template< class SizeType >
92 static SizeType new_capacity( SizeType capacity )
93 {
94 //
95 // @remark: we grow the capacity quite agressively.
96 // this is justified since we aim to minimize
97 // heap-allocations, and because we mostly use
98 // the buffer locally.
99 return capacity * 4u;
100 }
101
102 template< class SizeType >
103 static bool should_shrink( SizeType, SizeType )
104 {
105 //
106 // @remark: when defining a new grow policy, one might
107 // choose that if the waated space is less
108 // than a certain percentage, then it is of
109 // little use to shrink.
110 //
111 return true;
112 }
113 };
114
115 template< class T,
116 class StackBufferPolicy = store_n_objects<256>,
117 class GrowPolicy = default_grow_policy,
118 class Allocator = std::allocator<T> >
119 class auto_buffer;
120
121
122
123 template
124 <
125 class T,
126 class StackBufferPolicy,
127 class GrowPolicy,
128 class Allocator
129 >
130 class auto_buffer : Allocator
131 {
132 private:
133 enum { N = auto_buffer_detail::
134 compute_buffer_objects<StackBufferPolicy,T>::value };
135
136 BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
137
138 typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
139 local_buffer;
140
141 public:
142 typedef Allocator allocator_type;
143 typedef T value_type;
20effc67
TL
144 typedef typename boost::allocator_size_type<Allocator>::type size_type;
145 typedef typename boost::allocator_difference_type<Allocator>::type difference_type;
7c673cae 146 typedef T* pointer;
20effc67 147 typedef typename boost::allocator_pointer<Allocator>::type allocator_pointer;
7c673cae
FG
148 typedef const T* const_pointer;
149 typedef T& reference;
150 typedef const T& const_reference;
151 typedef pointer iterator;
152 typedef const_pointer const_iterator;
153 typedef boost::reverse_iterator<iterator> reverse_iterator;
154 typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
155 typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
156 && sizeof(T) <= sizeof(long double),
157 const value_type,
158 const_reference >::type
159 optimized_const_reference;
160 private:
161
162 pointer allocate( size_type capacity_arg )
163 {
164 if( capacity_arg > N )
165 return &*get_allocator().allocate( capacity_arg );
166 else
167 return static_cast<T*>( members_.address() );
168 }
169
170 void deallocate( pointer where, size_type capacity_arg )
171 {
172 if( capacity_arg <= N )
173 return;
174 get_allocator().deallocate( allocator_pointer(where), capacity_arg );
175 }
176
177 template< class I >
178 static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
179 {
180 copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
181 }
182
183 static void copy_rai( const T* begin, const T* end,
184 pointer where, const boost::true_type& )
185 {
186 std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
187 }
188
189 template< class I, bool b >
190 static void copy_rai( I begin, I end,
191 pointer where, const boost::integral_constant<bool, b>& )
192 {
193 std::uninitialized_copy( begin, end, where );
194 }
195
196 template< class I >
197 static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
198 {
199 std::uninitialized_copy( begin, end, where );
200 }
201
202 template< class I >
203 static void copy_impl( I begin, I end, pointer where )
204 {
205 copy_impl( begin, end, where,
206 typename std::iterator_traits<I>::iterator_category() );
207 }
208
209 template< class I, class I2 >
210 static void assign_impl( I begin, I end, I2 where )
211 {
212 assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
213 }
214
215 template< class I, class I2 >
216 static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
217 {
218 std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
219 }
220
221 template< class I, class I2 >
222 static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
223 {
224 for( ; begin != end; ++begin, ++where )
225 *where = *begin;
226 }
227
228 void unchecked_push_back_n( size_type n, const boost::true_type& )
229 {
230 std::uninitialized_fill( end(), end() + n, T() );
231 size_ += n;
232 }
233
234 void unchecked_push_back_n( size_type n, const boost::false_type& )
235 {
236 for( size_type i = 0u; i < n; ++i )
237 unchecked_push_back();
238 }
239
240 void auto_buffer_destroy( pointer where, const boost::false_type& )
241 {
242 (*where).~T();
243 }
244
245 void auto_buffer_destroy( pointer, const boost::true_type& )
246 { }
247
248 void auto_buffer_destroy( pointer where )
249 {
250 auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
251 }
252
b32b8144
FG
253 void auto_buffer_destroy()
254 {
255 BOOST_ASSERT( is_valid() );
256 if( buffer_ ) // do we need this check? Yes, but only
257 // for N = 0u + local instances in one_sided_swap()
258 auto_buffer_destroy( boost::has_trivial_destructor<T>() );
259 }
260
7c673cae
FG
261 void destroy_back_n( size_type n, const boost::false_type& )
262 {
263 BOOST_ASSERT( n > 0 );
264 pointer buffer = buffer_ + size_ - 1u;
265 pointer new_end = buffer - n;
266 for( ; buffer > new_end; --buffer )
267 auto_buffer_destroy( buffer );
268 }
269
270 void destroy_back_n( size_type, const boost::true_type& )
271 { }
272
273 void destroy_back_n( size_type n )
274 {
275 destroy_back_n( n, boost::has_trivial_destructor<T>() );
276 }
277
278 void auto_buffer_destroy( const boost::false_type& x )
279 {
280 if( size_ )
281 destroy_back_n( size_, x );
282 deallocate( buffer_, members_.capacity_ );
283 }
284
285 void auto_buffer_destroy( const boost::true_type& )
286 {
287 deallocate( buffer_, members_.capacity_ );
288 }
289
290 pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
291 {
292 pointer new_buffer = allocate( new_capacity ); // strong
92f5a8d4
TL
293 scope_guard guard = make_obj_guard( *this,
294 &auto_buffer::deallocate,
295 new_buffer,
296 new_capacity );
7c673cae
FG
297 copy_impl( begin(), end(), new_buffer ); // strong
298 guard.dismiss(); // nothrow
299 return new_buffer;
300 }
301
302 pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
303 {
304 pointer new_buffer = allocate( new_capacity ); // strong
305 copy_impl( begin(), end(), new_buffer ); // nothrow
306 return new_buffer;
307 }
308
309 void reserve_impl( size_type new_capacity )
310 {
311 pointer new_buffer = move_to_new_buffer( new_capacity,
312 boost::has_nothrow_copy<T>() );
b32b8144 313 auto_buffer_destroy();
7c673cae
FG
314 buffer_ = new_buffer;
315 members_.capacity_ = new_capacity;
316 BOOST_ASSERT( size_ <= members_.capacity_ );
317 }
318
319 size_type new_capacity_impl( size_type n )
320 {
321 BOOST_ASSERT( n > members_.capacity_ );
322 size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
323 // @todo: consider to check for allocator.max_size()
324 return (std::max)(new_capacity,n);
325 }
326
327 static void swap_helper( auto_buffer& l, auto_buffer& r,
328 const boost::true_type& )
329 {
330 BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
331
332 auto_buffer temp( l.begin(), l.end() );
333 assign_impl( r.begin(), r.end(), l.begin() );
334 assign_impl( temp.begin(), temp.end(), r.begin() );
335 boost::swap( l.size_, r.size_ );
336 boost::swap( l.members_.capacity_, r.members_.capacity_ );
337 }
338
339 static void swap_helper( auto_buffer& l, auto_buffer& r,
340 const boost::false_type& )
341 {
342 BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
343 size_type min_size = (std::min)(l.size_,r.size_);
344 size_type max_size = (std::max)(l.size_,r.size_);
345 size_type diff = max_size - min_size;
346 auto_buffer* smallest = l.size_ == min_size ? &l : &r;
347 auto_buffer* largest = smallest == &l ? &r : &l;
348
349 // @remark: the implementation below is not as fast
350 // as it could be if we assumed T had a default
351 // constructor.
352
353 size_type i = 0u;
354 for( ; i < min_size; ++i )
355 boost::swap( (*smallest)[i], (*largest)[i] );
356
357 for( ; i < max_size; ++i )
358 smallest->unchecked_push_back( (*largest)[i] );
359
360 largest->pop_back_n( diff );
361 boost::swap( l.members_.capacity_, r.members_.capacity_ );
362 }
363
364 void one_sided_swap( auto_buffer& temp ) // nothrow
365 {
366 BOOST_ASSERT( !temp.is_on_stack() );
b32b8144 367 auto_buffer_destroy();
7c673cae
FG
368 // @remark: must be nothrow
369 get_allocator() = temp.get_allocator();
370 members_.capacity_ = temp.members_.capacity_;
371 buffer_ = temp.buffer_;
372 BOOST_ASSERT( temp.size_ >= size_ + 1u );
373 size_ = temp.size_;
374 temp.buffer_ = 0;
375 BOOST_ASSERT( temp.is_valid() );
376 }
377
378 template< class I >
379 void insert_impl( const_iterator before, I begin_arg, I end_arg,
380 std::input_iterator_tag )
381 {
382 for( ; begin_arg != end_arg; ++begin_arg )
383 {
384 before = insert( before, *begin_arg );
385 ++before;
386 }
387 }
388
389 void grow_back( size_type n, const boost::true_type& )
390 {
391 BOOST_ASSERT( size_ + n <= members_.capacity_ );
392 size_ += n;
393 }
394
395 void grow_back( size_type n, const boost::false_type& )
396 {
397 unchecked_push_back_n(n);
398 }
399
400 void grow_back( size_type n )
401 {
402 grow_back( n, boost::has_trivial_constructor<T>() );
403 }
404
405 void grow_back_one( const boost::true_type& )
406 {
407 BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
408 size_ += 1;
409 }
410
411 void grow_back_one( const boost::false_type& )
412 {
413 unchecked_push_back();
414 }
415
416 void grow_back_one()
417 {
418 grow_back_one( boost::has_trivial_constructor<T>() );
419 }
420
421 template< class I >
422 void insert_impl( const_iterator before, I begin_arg, I end_arg,
423 std::forward_iterator_tag )
424 {
425 difference_type n = std::distance(begin_arg, end_arg);
426
427 if( size_ + n <= members_.capacity_ )
428 {
429 bool is_back_insertion = before == cend();
430 if( !is_back_insertion )
431 {
432 grow_back( n );
433 iterator where = const_cast<T*>(before);
434 std::copy( before, cend() - n, where + n );
435 assign_impl( begin_arg, end_arg, where );
436 }
437 else
438 {
439 unchecked_push_back( begin_arg, end_arg );
440 }
441 BOOST_ASSERT( is_valid() );
442 return;
443 }
444
445 auto_buffer temp( new_capacity_impl( size_ + n ) );
446 temp.unchecked_push_back( cbegin(), before );
447 temp.unchecked_push_back( begin_arg, end_arg );
448 temp.unchecked_push_back( before, cend() );
449 one_sided_swap( temp );
450 BOOST_ASSERT( is_valid() );
451 }
452
453 public:
454 bool is_valid() const // invariant
455 {
456 // @remark: allowed for N==0 and when
457 // using a locally instance
458 // in insert()/one_sided_swap()
459 if( buffer_ == 0 )
460 return true;
461
462 if( members_.capacity_ < N )
463 return false;
464
465 if( !is_on_stack() && members_.capacity_ <= N )
466 return false;
467
468 if( buffer_ == members_.address() )
469 if( members_.capacity_ > N )
470 return false;
471
472 if( size_ > members_.capacity_ )
473 return false;
474
475 return true;
476 }
477
478 auto_buffer()
479 : members_( N ),
480 buffer_( static_cast<T*>(members_.address()) ),
481 size_( 0u )
482 {
483 BOOST_ASSERT( is_valid() );
484 }
485
486 auto_buffer( const auto_buffer& r )
487 : members_( (std::max)(r.size_,size_type(N)) ),
488 buffer_( allocate( members_.capacity_ ) ),
489 size_( 0 )
490 {
491 copy_impl( r.begin(), r.end(), buffer_ );
492 size_ = r.size_;
493 BOOST_ASSERT( is_valid() );
494 }
495
496 auto_buffer& operator=( const auto_buffer& r ) // basic
497 {
498 if( this == &r )
499 return *this;
500
501 difference_type diff = size_ - r.size_;
502 if( diff >= 0 )
503 {
504 pop_back_n( static_cast<size_type>(diff) );
505 assign_impl( r.begin(), r.end(), begin() );
506 }
507 else
508 {
509 if( members_.capacity_ >= r.size() )
510 {
511 unchecked_push_back_n( static_cast<size_type>(-diff) );
512 assign_impl( r.begin(), r.end(), begin() );
513 }
514 else
515 {
516 // @remark: we release memory as early as possible
517 // since we only give the basic guarantee
b32b8144 518 auto_buffer_destroy();
7c673cae
FG
519 buffer_ = 0;
520 pointer new_buffer = allocate( r.size() );
92f5a8d4
TL
521 scope_guard guard = make_obj_guard( *this,
522 &auto_buffer::deallocate,
523 new_buffer,
524 r.size() );
7c673cae
FG
525 copy_impl( r.begin(), r.end(), new_buffer );
526 guard.dismiss();
527 buffer_ = new_buffer;
528 members_.capacity_ = r.size();
529 size_ = members_.capacity_;
530 }
531 }
532
533 BOOST_ASSERT( size() == r.size() );
534 BOOST_ASSERT( is_valid() );
535 return *this;
536 }
537
538 explicit auto_buffer( size_type capacity_arg )
539 : members_( (std::max)(capacity_arg, size_type(N)) ),
540 buffer_( allocate(members_.capacity_) ),
541 size_( 0 )
542 {
543 BOOST_ASSERT( is_valid() );
544 }
545
546 auto_buffer( size_type size_arg, optimized_const_reference init_value )
547 : members_( (std::max)(size_arg, size_type(N)) ),
548 buffer_( allocate(members_.capacity_) ),
549 size_( 0 )
550 {
551 std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
552 size_ = size_arg;
553 BOOST_ASSERT( is_valid() );
554 }
555
556 auto_buffer( size_type capacity_arg, const allocator_type& a )
557 : allocator_type( a ),
558 members_( (std::max)(capacity_arg, size_type(N)) ),
559 buffer_( allocate(members_.capacity_) ),
560 size_( 0 )
561 {
562 BOOST_ASSERT( is_valid() );
563 }
564
565 auto_buffer( size_type size_arg, optimized_const_reference init_value,
566 const allocator_type& a )
567 : allocator_type( a ),
568 members_( (std::max)(size_arg, size_type(N)) ),
569 buffer_( allocate(members_.capacity_) ),
570 size_( 0 )
571 {
572 std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
573 size_ = size_arg;
574 BOOST_ASSERT( is_valid() );
575 }
576
577 template< class ForwardIterator >
578 auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
579 :
580 members_( std::distance(begin_arg, end_arg) ),
581 buffer_( allocate(members_.capacity_) ),
582 size_( 0 )
583 {
584 copy_impl( begin_arg, end_arg, buffer_ );
585 size_ = members_.capacity_;
586 if( members_.capacity_ < N )
587 members_.capacity_ = N;
588 BOOST_ASSERT( is_valid() );
589 }
590
591 template< class ForwardIterator >
592 auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
593 const allocator_type& a )
594 : allocator_type( a ),
595 members_( std::distance(begin_arg, end_arg) ),
596 buffer_( allocate(members_.capacity_) ),
597 size_( 0 )
598 {
599 copy_impl( begin_arg, end_arg, buffer_ );
600 size_ = members_.capacity_;
601 if( members_.capacity_ < N )
602 members_.capacity_ = N;
603 BOOST_ASSERT( is_valid() );
604 }
605
606 ~auto_buffer()
607 {
b32b8144 608 auto_buffer_destroy();
7c673cae
FG
609 }
610
611 public:
612 bool empty() const
613 {
614 return size_ == 0;
615 }
616
617 bool full() const
618 {
619 return size_ == members_.capacity_;
620 }
621
622 bool is_on_stack() const
623 {
624 return members_.capacity_ <= N;
625 }
626
627 size_type size() const
628 {
629 return size_;
630 }
631
632 size_type capacity() const
633 {
634 return members_.capacity_;
635 }
636
637 public:
638 pointer data()
639 {
640 return buffer_;
641 }
642
643 const_pointer data() const
644 {
645 return buffer_;
646 }
647
648 allocator_type& get_allocator()
649 {
650 return static_cast<allocator_type&>(*this);
651 }
652
653 const allocator_type& get_allocator() const
654 {
655 return static_cast<const allocator_type&>(*this);
656 }
657
658 public:
659 iterator begin()
660 {
661 return buffer_;
662 }
663
664 const_iterator begin() const
665 {
666 return buffer_;
667 }
668
669 iterator end()
670 {
671 return buffer_ + size_;
672 }
673
674 const_iterator end() const
675 {
676 return buffer_ + size_;
677 }
678
679 reverse_iterator rbegin()
680 {
681 return reverse_iterator(end());
682 }
683
684 const_reverse_iterator rbegin() const
685 {
686 return const_reverse_iterator(end());
687 }
688
689 reverse_iterator rend()
690 {
691 return reverse_iterator(begin());
692 }
693
694 const_reverse_iterator rend() const
695 {
696 return const_reverse_iterator(begin());
697 }
698
699 const_iterator cbegin() const
700 {
701 return const_cast<const auto_buffer*>(this)->begin();
702 }
703
704 const_iterator cend() const
705 {
706 return const_cast<const auto_buffer*>(this)->end();
707 }
708
709 const_reverse_iterator crbegin() const
710 {
711 return const_cast<const auto_buffer*>(this)->rbegin();
712 }
713
714 const_reverse_iterator crend() const
715 {
716 return const_cast<const auto_buffer*>(this)->rend();
717 }
718
719 public:
720 reference front()
721 {
722 return buffer_[0];
723 }
724
725 optimized_const_reference front() const
726 {
727 return buffer_[0];
728 }
729
730 reference back()
731 {
732 return buffer_[size_-1];
733 }
734
735 optimized_const_reference back() const
736 {
737 return buffer_[size_-1];
738 }
739
740 reference operator[]( size_type n )
741 {
742 BOOST_ASSERT( n < size_ );
743 return buffer_[n];
744 }
745
746 optimized_const_reference operator[]( size_type n ) const
747 {
748 BOOST_ASSERT( n < size_ );
749 return buffer_[n];
750 }
751
752 void unchecked_push_back()
753 {
754 BOOST_ASSERT( !full() );
755 new (buffer_ + size_) T;
756 ++size_;
757 }
758
759 void unchecked_push_back_n( size_type n )
760 {
761 BOOST_ASSERT( size_ + n <= members_.capacity_ );
762 unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
763 }
764
765 void unchecked_push_back( optimized_const_reference x ) // non-growing
766 {
767 BOOST_ASSERT( !full() );
768 new (buffer_ + size_) T( x );
769 ++size_;
770 }
771
772 template< class ForwardIterator >
773 void unchecked_push_back( ForwardIterator begin_arg,
774 ForwardIterator end_arg ) // non-growing
775 {
776 BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
777 copy_impl( begin_arg, end_arg, buffer_ + size_ );
778 size_ += std::distance(begin_arg, end_arg);
779 }
780
781 void reserve_precisely( size_type n )
782 {
783 BOOST_ASSERT( members_.capacity_ >= N );
784
785 if( n <= members_.capacity_ )
786 return;
787 reserve_impl( n );
788 BOOST_ASSERT( members_.capacity_ == n );
789 }
790
791 void reserve( size_type n ) // strong
792 {
793 BOOST_ASSERT( members_.capacity_ >= N );
794
795 if( n <= members_.capacity_ )
796 return;
797
798 reserve_impl( new_capacity_impl( n ) );
799 BOOST_ASSERT( members_.capacity_ >= n );
800 }
801
802 void push_back()
803 {
804 if( size_ != members_.capacity_ )
805 {
806 unchecked_push_back();
807 }
808 else
809 {
810 reserve( size_ + 1u );
811 unchecked_push_back();
812 }
813 }
814
815 void push_back( optimized_const_reference x )
816 {
817 if( size_ != members_.capacity_ )
818 {
819 unchecked_push_back( x );
820 }
821 else
822 {
823 reserve( size_ + 1u );
824 unchecked_push_back( x );
825 }
826 }
827
828 template< class ForwardIterator >
829 void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
830 {
831 difference_type diff = std::distance(begin_arg, end_arg);
832 if( size_ + diff > members_.capacity_ )
833 reserve( size_ + diff );
834 unchecked_push_back( begin_arg, end_arg );
835 }
836
837 iterator insert( const_iterator before, optimized_const_reference x ) // basic
838 {
839 // @todo: consider if we want to support x in 'this'
840 if( size_ < members_.capacity_ )
841 {
842 bool is_back_insertion = before == cend();
843 iterator where = const_cast<T*>(before);
844
845 if( !is_back_insertion )
846 {
847 grow_back_one();
848 std::copy( before, cend() - 1u, where + 1u );
849 *where = x;
850 BOOST_ASSERT( is_valid() );
851 }
852 else
853 {
854 unchecked_push_back( x );
855 }
856 return where;
857 }
858
859 auto_buffer temp( new_capacity_impl( size_ + 1u ) );
860 temp.unchecked_push_back( cbegin(), before );
861 iterator result = temp.end();
862 temp.unchecked_push_back( x );
863 temp.unchecked_push_back( before, cend() );
864 one_sided_swap( temp );
865 BOOST_ASSERT( is_valid() );
866 return result;
867 }
868
869 void insert( const_iterator before, size_type n,
870 optimized_const_reference x )
871 {
872 // @todo: see problems above
873 if( size_ + n <= members_.capacity_ )
874 {
875 grow_back( n );
876 iterator where = const_cast<T*>(before);
877 std::copy( before, cend() - n, where + n );
878 std::fill( where, where + n, x );
879 BOOST_ASSERT( is_valid() );
880 return;
881 }
882
883 auto_buffer temp( new_capacity_impl( size_ + n ) );
884 temp.unchecked_push_back( cbegin(), before );
885 std::uninitialized_fill_n( temp.end(), n, x );
886 temp.size_ += n;
887 temp.unchecked_push_back( before, cend() );
888 one_sided_swap( temp );
889 BOOST_ASSERT( is_valid() );
890 }
891
892 template< class ForwardIterator >
893 void insert( const_iterator before,
894 ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
895 {
896 typedef typename std::iterator_traits<ForwardIterator>
897 ::iterator_category category;
898 insert_impl( before, begin_arg, end_arg, category() );
899 }
900
901 void pop_back()
902 {
903 BOOST_ASSERT( !empty() );
904 auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
905 --size_;
906 }
907
908 void pop_back_n( size_type n )
909 {
910 BOOST_ASSERT( n <= size_ );
911 if( n )
912 {
913 destroy_back_n( n );
914 size_ -= n;
915 }
916 }
917
918 void clear()
919 {
920 pop_back_n( size_ );
921 }
922
923 iterator erase( const_iterator where )
924 {
925 BOOST_ASSERT( !empty() );
926 BOOST_ASSERT( cbegin() <= where );
927 BOOST_ASSERT( cend() > where );
928
929 unsigned elements = cend() - where - 1u;
930
931 if( elements > 0u )
932 {
933 const_iterator start = where + 1u;
934 std::copy( start, start + elements,
935 const_cast<T*>(where) );
936 }
937 pop_back();
938 BOOST_ASSERT( !full() );
939 iterator result = const_cast<T*>( where );
940 BOOST_ASSERT( result <= end() );
941 return result;
942 }
943
944 iterator erase( const_iterator from, const_iterator to )
945 {
946 BOOST_ASSERT( !(std::distance(from,to)>0) ||
947 !empty() );
948 BOOST_ASSERT( cbegin() <= from );
949 BOOST_ASSERT( cend() >= to );
950
951 unsigned elements = std::distance(to,cend());
952
953 if( elements > 0u )
954 {
955 BOOST_ASSERT( elements > 0u );
956 std::copy( to, to + elements,
957 const_cast<T*>(from) );
958 }
959 pop_back_n( std::distance(from,to) );
960 BOOST_ASSERT( !full() );
961 iterator result = const_cast<T*>( from );
962 BOOST_ASSERT( result <= end() );
963 return result;
964 }
965
966 void shrink_to_fit()
967 {
968 if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
969 return;
970
971 reserve_impl( size_ );
972 members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
973 BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
974 BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
975 }
976
977 pointer uninitialized_grow( size_type n ) // strong
978 {
b32b8144 979 if( size_ + n > members_.capacity_ )
7c673cae
FG
980 reserve( size_ + n );
981
982 pointer res = end();
983 size_ += n;
984 return res;
985 }
986
987 void uninitialized_shrink( size_type n ) // nothrow
988 {
989 // @remark: test for wrap-around
990 BOOST_ASSERT( size_ - n <= members_.capacity_ );
991 size_ -= n;
992 }
993
994 void uninitialized_resize( size_type n )
995 {
996 if( n > size() )
997 uninitialized_grow( n - size() );
998 else if( n < size() )
999 uninitialized_shrink( size() - n );
1000
1001 BOOST_ASSERT( size() == n );
1002 }
1003
1004 // nothrow - if both buffer are on the heap, or
1005 // - if one buffer is on the heap and one has
1006 // 'has_allocated_buffer() == false', or
1007 // - if copy-construction cannot throw
1008 // basic - otherwise (better guarantee impossible)
1009 // requirement: the allocator must be no-throw-swappable
1010 void swap( auto_buffer& r )
1011 {
1012 bool on_stack = is_on_stack();
1013 bool r_on_stack = r.is_on_stack();
1014 bool both_on_heap = !on_stack && !r_on_stack;
1015 if( both_on_heap )
1016 {
1017 boost::swap( get_allocator(), r.get_allocator() );
1018 boost::swap( members_.capacity_, r.members_.capacity_ );
1019 boost::swap( buffer_, r.buffer_ );
1020 boost::swap( size_, r.size_ );
1021 BOOST_ASSERT( is_valid() );
1022 BOOST_ASSERT( r.is_valid() );
1023 return;
1024 }
1025
1026 BOOST_ASSERT( on_stack || r_on_stack );
1027 bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
1028 (!on_stack && r_on_stack);
1029
1030 //
1031 // Remark: we now know that we can copy into
1032 // the unused stack buffer.
1033 //
1034 if( exactly_one_on_stack )
1035 {
1036 auto_buffer* one_on_stack = on_stack ? this : &r;
1037 auto_buffer* other = on_stack ? &r : this;
1038 pointer new_buffer = static_cast<T*>(other->members_.address());
1039 copy_impl( one_on_stack->begin(), one_on_stack->end(),
1040 new_buffer ); // strong
b32b8144 1041 one_on_stack->auto_buffer_destroy(); // nothrow
7c673cae
FG
1042 boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
1043 boost::swap( members_.capacity_, r.members_.capacity_ );
1044 boost::swap( size_, r.size_ );
1045 one_on_stack->buffer_ = other->buffer_;
1046 other->buffer_ = new_buffer;
1047 BOOST_ASSERT( other->is_on_stack() );
1048 BOOST_ASSERT( !one_on_stack->is_on_stack() );
1049 BOOST_ASSERT( is_valid() );
1050 BOOST_ASSERT( r.is_valid() );
1051 return;
1052 }
1053
1054 BOOST_ASSERT( on_stack && r_on_stack );
1055 swap_helper( *this, r, boost::has_trivial_assign<T>() );
1056 BOOST_ASSERT( is_valid() );
1057 BOOST_ASSERT( r.is_valid() );
1058 }
1059
1060 private:
1061 typedef boost::aligned_storage< N * sizeof(T),
1062 boost::alignment_of<T>::value >
1063 storage;
1064
1065 struct members_type : storage /* to enable EBO */
1066 {
1067 size_type capacity_;
1068
1069 members_type( size_type capacity )
1070 : capacity_(capacity)
1071 { }
1072
1073 void* address() const
1074 { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
1075 };
1076
1077 members_type members_;
1078 pointer buffer_;
1079 size_type size_;
1080
1081 };
1082
1083 template< class T, class SBP, class GP, class A >
1084 inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
1085 {
1086 l.swap( r );
1087 }
1088
1089 template< class T, class SBP, class GP, class A >
1090 inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
1091 const auto_buffer<T,SBP,GP,A>& r )
1092 {
1093 if( l.size() != r.size() )
1094 return false;
1095 return std::equal( l.begin(), l.end(), r.begin() );
1096 }
1097
1098 template< class T, class SBP, class GP, class A >
1099 inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
1100 const auto_buffer<T,SBP,GP,A>& r )
1101 {
1102 return !(l == r);
1103 }
1104
1105 template< class T, class SBP, class GP, class A >
1106 inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
1107 const auto_buffer<T,SBP,GP,A>& r )
1108 {
1109 return std::lexicographical_compare( l.begin(), l.end(),
1110 r.begin(), r.end() );
1111 }
1112
1113 template< class T, class SBP, class GP, class A >
1114 inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
1115 const auto_buffer<T,SBP,GP,A>& r )
1116 {
1117 return (r < l);
1118 }
1119
1120 template< class T, class SBP, class GP, class A >
1121 inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
1122 const auto_buffer<T,SBP,GP,A>& r )
1123 {
b32b8144 1124 return !(l > r);
7c673cae
FG
1125 }
1126
1127 template< class T, class SBP, class GP, class A >
1128 inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
1129 const auto_buffer<T,SBP,GP,A>& r )
1130 {
1131 return !(l < r);
1132 }
1133
1134} // namespace detail
1135} // namespace signals2
1136}
1137
1138#if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
1139#pragma warning(pop)
1140#endif
1141
1142#endif