]>
Commit | Line | Data |
---|---|---|
7c673cae FG |
1 | #ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_STD_ATOMIC_HPP_INCLUDED |
2 | #define BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_STD_ATOMIC_HPP_INCLUDED | |
3 | ||
4 | // MS compatible compilers support #pragma once | |
5 | ||
6 | #if defined(_MSC_VER) && (_MSC_VER >= 1020) | |
7 | # pragma once | |
8 | #endif | |
9 | ||
10 | // detail/sp_counted_base_std_atomic.hpp - C++11 std::atomic | |
11 | // | |
12 | // Copyright (c) 2007, 2013 Peter Dimov | |
13 | // | |
14 | // Distributed under the Boost Software License, Version 1.0. | |
15 | // See accompanying file LICENSE_1_0.txt or copy at | |
16 | // http://www.boost.org/LICENSE_1_0.txt | |
17 | ||
18 | #include <boost/detail/sp_typeinfo.hpp> | |
19 | #include <atomic> | |
20 | #include <cstdint> | |
21 | ||
22 | namespace boost | |
23 | { | |
24 | ||
25 | namespace detail | |
26 | { | |
27 | ||
28 | inline void atomic_increment( std::atomic_int_least32_t * pw ) | |
29 | { | |
30 | pw->fetch_add( 1, std::memory_order_relaxed ); | |
31 | } | |
32 | ||
33 | inline std::int_least32_t atomic_decrement( std::atomic_int_least32_t * pw ) | |
34 | { | |
35 | return pw->fetch_sub( 1, std::memory_order_acq_rel ); | |
36 | } | |
37 | ||
38 | inline std::int_least32_t atomic_conditional_increment( std::atomic_int_least32_t * pw ) | |
39 | { | |
40 | // long r = *pw; | |
41 | // if( r != 0 ) ++*pw; | |
42 | // return r; | |
43 | ||
44 | std::int_least32_t r = pw->load( std::memory_order_relaxed ); | |
45 | ||
46 | for( ;; ) | |
47 | { | |
48 | if( r == 0 ) | |
49 | { | |
50 | return r; | |
51 | } | |
52 | ||
53 | if( pw->compare_exchange_weak( r, r + 1, std::memory_order_relaxed, std::memory_order_relaxed ) ) | |
54 | { | |
55 | return r; | |
56 | } | |
57 | } | |
58 | } | |
59 | ||
60 | class sp_counted_base | |
61 | { | |
62 | private: | |
63 | ||
64 | sp_counted_base( sp_counted_base const & ); | |
65 | sp_counted_base & operator= ( sp_counted_base const & ); | |
66 | ||
67 | std::atomic_int_least32_t use_count_; // #shared | |
68 | std::atomic_int_least32_t weak_count_; // #weak + (#shared != 0) | |
69 | ||
70 | public: | |
71 | ||
72 | sp_counted_base(): use_count_( 1 ), weak_count_( 1 ) | |
73 | { | |
74 | } | |
75 | ||
76 | virtual ~sp_counted_base() // nothrow | |
77 | { | |
78 | } | |
79 | ||
80 | // dispose() is called when use_count_ drops to zero, to release | |
81 | // the resources managed by *this. | |
82 | ||
83 | virtual void dispose() = 0; // nothrow | |
84 | ||
85 | // destroy() is called when weak_count_ drops to zero. | |
86 | ||
87 | virtual void destroy() // nothrow | |
88 | { | |
89 | delete this; | |
90 | } | |
91 | ||
92 | virtual void * get_deleter( sp_typeinfo const & ti ) = 0; | |
93 | virtual void * get_untyped_deleter() = 0; | |
94 | ||
95 | void add_ref_copy() | |
96 | { | |
97 | atomic_increment( &use_count_ ); | |
98 | } | |
99 | ||
100 | bool add_ref_lock() // true on success | |
101 | { | |
102 | return atomic_conditional_increment( &use_count_ ) != 0; | |
103 | } | |
104 | ||
105 | void release() // nothrow | |
106 | { | |
107 | if( atomic_decrement( &use_count_ ) == 1 ) | |
108 | { | |
109 | dispose(); | |
110 | weak_release(); | |
111 | } | |
112 | } | |
113 | ||
114 | void weak_add_ref() // nothrow | |
115 | { | |
116 | atomic_increment( &weak_count_ ); | |
117 | } | |
118 | ||
119 | void weak_release() // nothrow | |
120 | { | |
121 | if( atomic_decrement( &weak_count_ ) == 1 ) | |
122 | { | |
123 | destroy(); | |
124 | } | |
125 | } | |
126 | ||
127 | long use_count() const // nothrow | |
128 | { | |
129 | return use_count_.load( std::memory_order_acquire ); | |
130 | } | |
131 | }; | |
132 | ||
133 | } // namespace detail | |
134 | ||
135 | } // namespace boost | |
136 | ||
137 | #endif // #ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_STD_ATOMIC_HPP_INCLUDED |