]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/libs/atomic/include/boost/atomic/detail/atomic_template.hpp
bump version to 12.2.2-pve1
[ceph.git] / ceph / src / boost / libs / atomic / include / boost / atomic / detail / atomic_template.hpp
1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2011 Helge Bahmann
7 * Copyright (c) 2013 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
9 */
10 /*!
11 * \file atomic/detail/atomic_template.hpp
12 *
13 * This header contains interface definition of \c atomic template.
14 */
15
16 #ifndef BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_
18
19 #include <cstddef>
20 #include <boost/cstdint.hpp>
21 #include <boost/assert.hpp>
22 #include <boost/type_traits/is_signed.hpp>
23 #include <boost/type_traits/is_integral.hpp>
24 #include <boost/atomic/detail/config.hpp>
25 #include <boost/atomic/detail/bitwise_cast.hpp>
26 #include <boost/atomic/detail/operations_fwd.hpp>
27
28 #ifdef BOOST_HAS_PRAGMA_ONCE
29 #pragma once
30 #endif
31
32 #if defined(BOOST_MSVC)
33 #pragma warning(push)
34 // 'boost::atomics::atomic<T>' : multiple assignment operators specified
35 #pragma warning(disable: 4522)
36 #endif
37
38 /*
39 * IMPLEMENTATION NOTE: All interface functions MUST be declared with BOOST_FORCEINLINE,
40 * see comment for convert_memory_order_to_gcc in ops_gcc_atomic.hpp.
41 */
42
43 namespace boost {
44 namespace atomics {
45 namespace detail {
46
47 BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order deduce_failure_order(memory_order order) BOOST_NOEXCEPT
48 {
49 return order == memory_order_acq_rel ? memory_order_acquire : (order == memory_order_release ? memory_order_relaxed : order);
50 }
51
52 BOOST_FORCEINLINE BOOST_CONSTEXPR bool cas_failure_order_must_not_be_stronger_than_success_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
53 {
54 // 15 == (memory_order_seq_cst | memory_order_consume), see memory_order.hpp
55 // Given the enum values we can test the strength of memory order requirements with this single condition.
56 return (failure_order & 15u) <= (success_order & 15u);
57 }
58
59 template< typename T, bool IsInt = boost::is_integral< T >::value >
60 struct classify
61 {
62 typedef void type;
63 };
64
65 template< typename T >
66 struct classify< T, true > { typedef int type; };
67
68 template< typename T >
69 struct classify< T*, false > { typedef void* type; };
70
71 template< typename T, typename Kind >
72 class base_atomic;
73
74 //! Implementation for integers
75 template< typename T >
76 class base_atomic< T, int >
77 {
78 private:
79 typedef T value_type;
80 typedef T difference_type;
81
82 protected:
83 typedef atomics::detail::operations< storage_size_of< value_type >::value, boost::is_signed< T >::value > operations;
84 typedef value_type value_arg_type;
85
86 public:
87 typedef typename operations::storage_type storage_type;
88
89 protected:
90 typename operations::aligned_storage_type m_storage;
91
92 public:
93 BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
94 BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : m_storage(v) {}
95
96 BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
97 {
98 BOOST_ASSERT(order != memory_order_consume);
99 BOOST_ASSERT(order != memory_order_acquire);
100 BOOST_ASSERT(order != memory_order_acq_rel);
101
102 operations::store(m_storage.value, static_cast< storage_type >(v), order);
103 }
104
105 BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
106 {
107 BOOST_ASSERT(order != memory_order_release);
108 BOOST_ASSERT(order != memory_order_acq_rel);
109
110 return static_cast< value_type >(operations::load(m_storage.value, order));
111 }
112
113 BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
114 {
115 return static_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v), order));
116 }
117
118 BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
119 {
120 return static_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v), order));
121 }
122
123 BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
124 {
125 return static_cast< value_type >(operations::exchange(m_storage.value, static_cast< storage_type >(v), order));
126 }
127
128 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
129 {
130 BOOST_ASSERT(failure_order != memory_order_release);
131 BOOST_ASSERT(failure_order != memory_order_acq_rel);
132 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
133
134 storage_type old_value = static_cast< storage_type >(expected);
135 const bool res = operations::compare_exchange_strong(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
136 expected = static_cast< value_type >(old_value);
137 return res;
138 }
139
140 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
141 {
142 return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
143 }
144
145 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
146 {
147 BOOST_ASSERT(failure_order != memory_order_release);
148 BOOST_ASSERT(failure_order != memory_order_acq_rel);
149 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
150
151 storage_type old_value = static_cast< storage_type >(expected);
152 const bool res = operations::compare_exchange_weak(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
153 expected = static_cast< value_type >(old_value);
154 return res;
155 }
156
157 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
158 {
159 return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
160 }
161
162 BOOST_FORCEINLINE value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
163 {
164 return static_cast< value_type >(operations::fetch_and(m_storage.value, static_cast< storage_type >(v), order));
165 }
166
167 BOOST_FORCEINLINE value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
168 {
169 return static_cast< value_type >(operations::fetch_or(m_storage.value, static_cast< storage_type >(v), order));
170 }
171
172 BOOST_FORCEINLINE value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
173 {
174 return static_cast< value_type >(operations::fetch_xor(m_storage.value, static_cast< storage_type >(v), order));
175 }
176
177 BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
178 {
179 return operations::is_lock_free(m_storage.value);
180 }
181
182 BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT
183 {
184 return fetch_add(1);
185 }
186
187 BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT
188 {
189 return fetch_add(1) + 1;
190 }
191
192 BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT
193 {
194 return fetch_sub(1);
195 }
196
197 BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT
198 {
199 return fetch_sub(1) - 1;
200 }
201
202 BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT
203 {
204 return fetch_add(v) + v;
205 }
206
207 BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT
208 {
209 return fetch_sub(v) - v;
210 }
211
212 BOOST_FORCEINLINE value_type operator&=(value_type v) volatile BOOST_NOEXCEPT
213 {
214 return fetch_and(v) & v;
215 }
216
217 BOOST_FORCEINLINE value_type operator|=(value_type v) volatile BOOST_NOEXCEPT
218 {
219 return fetch_or(v) | v;
220 }
221
222 BOOST_FORCEINLINE value_type operator^=(value_type v) volatile BOOST_NOEXCEPT
223 {
224 return fetch_xor(v) ^ v;
225 }
226
227 BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
228 BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
229 };
230
231 //! Implementation for bool
232 template< >
233 class base_atomic< bool, int >
234 {
235 private:
236 typedef bool value_type;
237
238 protected:
239 typedef atomics::detail::operations< 1u, false > operations;
240 typedef value_type value_arg_type;
241
242 public:
243 typedef operations::storage_type storage_type;
244
245 protected:
246 operations::aligned_storage_type m_storage;
247
248 public:
249 BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
250 BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : m_storage(v) {}
251
252 BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
253 {
254 BOOST_ASSERT(order != memory_order_consume);
255 BOOST_ASSERT(order != memory_order_acquire);
256 BOOST_ASSERT(order != memory_order_acq_rel);
257
258 operations::store(m_storage.value, static_cast< storage_type >(v), order);
259 }
260
261 BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
262 {
263 BOOST_ASSERT(order != memory_order_release);
264 BOOST_ASSERT(order != memory_order_acq_rel);
265
266 return !!operations::load(m_storage.value, order);
267 }
268
269 BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
270 {
271 return !!operations::exchange(m_storage.value, static_cast< storage_type >(v), order);
272 }
273
274 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
275 {
276 BOOST_ASSERT(failure_order != memory_order_release);
277 BOOST_ASSERT(failure_order != memory_order_acq_rel);
278 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
279
280 storage_type old_value = static_cast< storage_type >(expected);
281 const bool res = operations::compare_exchange_strong(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
282 expected = !!old_value;
283 return res;
284 }
285
286 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
287 {
288 return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
289 }
290
291 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
292 {
293 BOOST_ASSERT(failure_order != memory_order_release);
294 BOOST_ASSERT(failure_order != memory_order_acq_rel);
295 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
296
297 storage_type old_value = static_cast< storage_type >(expected);
298 const bool res = operations::compare_exchange_weak(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
299 expected = !!old_value;
300 return res;
301 }
302
303 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
304 {
305 return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
306 }
307
308 BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
309 {
310 return operations::is_lock_free(m_storage.value);
311 }
312
313 BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
314 BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
315 };
316
317
318 //! Implementation for user-defined types, such as structs and enums
319 template< typename T >
320 class base_atomic< T, void >
321 {
322 private:
323 typedef T value_type;
324
325 protected:
326 typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations;
327 typedef value_type const& value_arg_type;
328
329 public:
330 typedef typename operations::storage_type storage_type;
331
332 protected:
333 typename operations::aligned_storage_type m_storage;
334
335 public:
336 BOOST_FORCEINLINE explicit base_atomic(value_type const& v = value_type()) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v))
337 {
338 }
339
340 BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
341 {
342 BOOST_ASSERT(order != memory_order_consume);
343 BOOST_ASSERT(order != memory_order_acquire);
344 BOOST_ASSERT(order != memory_order_acq_rel);
345
346 operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order);
347 }
348
349 BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
350 {
351 BOOST_ASSERT(order != memory_order_release);
352 BOOST_ASSERT(order != memory_order_acq_rel);
353
354 return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order));
355 }
356
357 BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
358 {
359 return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order));
360 }
361
362 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
363 {
364 BOOST_ASSERT(failure_order != memory_order_release);
365 BOOST_ASSERT(failure_order != memory_order_acq_rel);
366 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
367
368 storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
369 const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
370 expected = atomics::detail::bitwise_cast< value_type >(old_value);
371 return res;
372 }
373
374 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
375 {
376 return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
377 }
378
379 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
380 {
381 BOOST_ASSERT(failure_order != memory_order_release);
382 BOOST_ASSERT(failure_order != memory_order_acq_rel);
383 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
384
385 storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
386 const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
387 expected = atomics::detail::bitwise_cast< value_type >(old_value);
388 return res;
389 }
390
391 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
392 {
393 return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
394 }
395
396 BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
397 {
398 return operations::is_lock_free(m_storage.value);
399 }
400
401 BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
402 BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
403 };
404
405
406 //! Implementation for pointers
407 template< typename T >
408 class base_atomic< T*, void* >
409 {
410 private:
411 typedef T* value_type;
412 typedef std::ptrdiff_t difference_type;
413
414 protected:
415 typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations;
416 typedef value_type value_arg_type;
417
418 public:
419 typedef typename operations::storage_type storage_type;
420
421 protected:
422 typename operations::aligned_storage_type m_storage;
423
424 public:
425 BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
426 BOOST_FORCEINLINE explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v))
427 {
428 }
429
430 BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
431 {
432 BOOST_ASSERT(order != memory_order_consume);
433 BOOST_ASSERT(order != memory_order_acquire);
434 BOOST_ASSERT(order != memory_order_acq_rel);
435
436 operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order);
437 }
438
439 BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
440 {
441 BOOST_ASSERT(order != memory_order_release);
442 BOOST_ASSERT(order != memory_order_acq_rel);
443
444 return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order));
445 }
446
447 BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
448 {
449 return atomics::detail::bitwise_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v * sizeof(T)), order));
450 }
451
452 BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
453 {
454 return atomics::detail::bitwise_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v * sizeof(T)), order));
455 }
456
457 BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
458 {
459 return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order));
460 }
461
462 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
463 {
464 BOOST_ASSERT(failure_order != memory_order_release);
465 BOOST_ASSERT(failure_order != memory_order_acq_rel);
466 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
467
468 storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
469 const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
470 expected = atomics::detail::bitwise_cast< value_type >(old_value);
471 return res;
472 }
473
474 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
475 {
476 return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
477 }
478
479 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
480 {
481 BOOST_ASSERT(failure_order != memory_order_release);
482 BOOST_ASSERT(failure_order != memory_order_acq_rel);
483 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
484
485 storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
486 const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
487 expected = atomics::detail::bitwise_cast< value_type >(old_value);
488 return res;
489 }
490
491 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
492 {
493 return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
494 }
495
496 BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
497 {
498 return operations::is_lock_free(m_storage.value);
499 }
500
501 BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT
502 {
503 return fetch_add(1);
504 }
505
506 BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT
507 {
508 return fetch_add(1) + 1;
509 }
510
511 BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT
512 {
513 return fetch_sub(1);
514 }
515
516 BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT
517 {
518 return fetch_sub(1) - 1;
519 }
520
521 BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT
522 {
523 return fetch_add(v) + v;
524 }
525
526 BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT
527 {
528 return fetch_sub(v) - v;
529 }
530
531 BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
532 BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
533 };
534
535
536 //! Implementation for void pointers
537 template< >
538 class base_atomic< void*, void* >
539 {
540 private:
541 typedef void* value_type;
542 typedef std::ptrdiff_t difference_type;
543
544 protected:
545 typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations;
546 typedef value_type value_arg_type;
547
548 public:
549 typedef operations::storage_type storage_type;
550
551 protected:
552 operations::aligned_storage_type m_storage;
553
554 public:
555 BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
556 BOOST_FORCEINLINE explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v))
557 {
558 }
559
560 BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
561 {
562 BOOST_ASSERT(order != memory_order_consume);
563 BOOST_ASSERT(order != memory_order_acquire);
564 BOOST_ASSERT(order != memory_order_acq_rel);
565
566 operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order);
567 }
568
569 BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
570 {
571 BOOST_ASSERT(order != memory_order_release);
572 BOOST_ASSERT(order != memory_order_acq_rel);
573
574 return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order));
575 }
576
577 BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
578 {
579 return atomics::detail::bitwise_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v), order));
580 }
581
582 BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
583 {
584 return atomics::detail::bitwise_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v), order));
585 }
586
587 BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
588 {
589 return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order));
590 }
591
592 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
593 {
594 BOOST_ASSERT(failure_order != memory_order_release);
595 BOOST_ASSERT(failure_order != memory_order_acq_rel);
596 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
597
598 storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
599 const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
600 expected = atomics::detail::bitwise_cast< value_type >(old_value);
601 return res;
602 }
603
604 BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
605 {
606 return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
607 }
608
609 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
610 {
611 BOOST_ASSERT(failure_order != memory_order_release);
612 BOOST_ASSERT(failure_order != memory_order_acq_rel);
613 BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
614
615 storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
616 const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
617 expected = atomics::detail::bitwise_cast< value_type >(old_value);
618 return res;
619 }
620
621 BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
622 {
623 return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
624 }
625
626 BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
627 {
628 return operations::is_lock_free(m_storage.value);
629 }
630
631 BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT
632 {
633 return fetch_add(1);
634 }
635
636 BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT
637 {
638 return (char*)fetch_add(1) + 1;
639 }
640
641 BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT
642 {
643 return fetch_sub(1);
644 }
645
646 BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT
647 {
648 return (char*)fetch_sub(1) - 1;
649 }
650
651 BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT
652 {
653 return (char*)fetch_add(v) + v;
654 }
655
656 BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT
657 {
658 return (char*)fetch_sub(v) - v;
659 }
660
661 BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
662 BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
663 };
664
665 } // namespace detail
666
667 template< typename T >
668 class atomic :
669 public atomics::detail::base_atomic< T, typename atomics::detail::classify< T >::type >
670 {
671 private:
672 typedef T value_type;
673 typedef atomics::detail::base_atomic< T, typename atomics::detail::classify< T >::type > base_type;
674 typedef typename base_type::value_arg_type value_arg_type;
675
676 public:
677 typedef typename base_type::storage_type storage_type;
678
679 public:
680 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = base_type::operations::is_always_lock_free;
681
682 public:
683 BOOST_DEFAULTED_FUNCTION(atomic(), BOOST_NOEXCEPT {})
684
685 // NOTE: The constructor is made explicit because gcc 4.7 complains that
686 // operator=(value_arg_type) is considered ambiguous with operator=(atomic const&)
687 // in assignment expressions, even though conversion to atomic<> is less preferred
688 // than conversion to value_arg_type.
689 BOOST_FORCEINLINE explicit BOOST_CONSTEXPR atomic(value_arg_type v) BOOST_NOEXCEPT : base_type(v) {}
690
691 BOOST_FORCEINLINE value_type operator= (value_arg_type v) volatile BOOST_NOEXCEPT
692 {
693 this->store(v);
694 return v;
695 }
696
697 BOOST_FORCEINLINE operator value_type() volatile const BOOST_NOEXCEPT
698 {
699 return this->load();
700 }
701
702 BOOST_FORCEINLINE storage_type& storage() BOOST_NOEXCEPT { return this->m_storage.value; }
703 BOOST_FORCEINLINE storage_type volatile& storage() volatile BOOST_NOEXCEPT { return this->m_storage.value; }
704 BOOST_FORCEINLINE storage_type const& storage() const BOOST_NOEXCEPT { return this->m_storage.value; }
705 BOOST_FORCEINLINE storage_type const volatile& storage() const volatile BOOST_NOEXCEPT { return this->m_storage.value; }
706
707 BOOST_DELETED_FUNCTION(atomic(atomic const&))
708 BOOST_DELETED_FUNCTION(atomic& operator= (atomic const&))
709 BOOST_DELETED_FUNCTION(atomic& operator= (atomic const&) volatile)
710 };
711
712 template< typename T >
713 BOOST_CONSTEXPR_OR_CONST bool atomic< T >::is_always_lock_free;
714
715 typedef atomic< char > atomic_char;
716 typedef atomic< unsigned char > atomic_uchar;
717 typedef atomic< signed char > atomic_schar;
718 typedef atomic< uint8_t > atomic_uint8_t;
719 typedef atomic< int8_t > atomic_int8_t;
720 typedef atomic< unsigned short > atomic_ushort;
721 typedef atomic< short > atomic_short;
722 typedef atomic< uint16_t > atomic_uint16_t;
723 typedef atomic< int16_t > atomic_int16_t;
724 typedef atomic< unsigned int > atomic_uint;
725 typedef atomic< int > atomic_int;
726 typedef atomic< uint32_t > atomic_uint32_t;
727 typedef atomic< int32_t > atomic_int32_t;
728 typedef atomic< unsigned long > atomic_ulong;
729 typedef atomic< long > atomic_long;
730 typedef atomic< uint64_t > atomic_uint64_t;
731 typedef atomic< int64_t > atomic_int64_t;
732 #ifdef BOOST_HAS_LONG_LONG
733 typedef atomic< boost::ulong_long_type > atomic_ullong;
734 typedef atomic< boost::long_long_type > atomic_llong;
735 #endif
736 typedef atomic< void* > atomic_address;
737 typedef atomic< bool > atomic_bool;
738 typedef atomic< wchar_t > atomic_wchar_t;
739 #if !defined(BOOST_NO_CXX11_CHAR16_T)
740 typedef atomic< char16_t > atomic_char16_t;
741 #endif
742 #if !defined(BOOST_NO_CXX11_CHAR32_T)
743 typedef atomic< char32_t > atomic_char32_t;
744 #endif
745
746 typedef atomic< int_least8_t > atomic_int_least8_t;
747 typedef atomic< uint_least8_t > atomic_uint_least8_t;
748 typedef atomic< int_least16_t > atomic_int_least16_t;
749 typedef atomic< uint_least16_t > atomic_uint_least16_t;
750 typedef atomic< int_least32_t > atomic_int_least32_t;
751 typedef atomic< uint_least32_t > atomic_uint_least32_t;
752 typedef atomic< int_least64_t > atomic_int_least64_t;
753 typedef atomic< uint_least64_t > atomic_uint_least64_t;
754 typedef atomic< int_fast8_t > atomic_int_fast8_t;
755 typedef atomic< uint_fast8_t > atomic_uint_fast8_t;
756 typedef atomic< int_fast16_t > atomic_int_fast16_t;
757 typedef atomic< uint_fast16_t > atomic_uint_fast16_t;
758 typedef atomic< int_fast32_t > atomic_int_fast32_t;
759 typedef atomic< uint_fast32_t > atomic_uint_fast32_t;
760 typedef atomic< int_fast64_t > atomic_int_fast64_t;
761 typedef atomic< uint_fast64_t > atomic_uint_fast64_t;
762 typedef atomic< intmax_t > atomic_intmax_t;
763 typedef atomic< uintmax_t > atomic_uintmax_t;
764
765 typedef atomic< std::size_t > atomic_size_t;
766 typedef atomic< std::ptrdiff_t > atomic_ptrdiff_t;
767
768 #if defined(BOOST_HAS_INTPTR_T)
769 typedef atomic< intptr_t > atomic_intptr_t;
770 typedef atomic< uintptr_t > atomic_uintptr_t;
771 #endif
772
773 } // namespace atomics
774 } // namespace boost
775
776 #if defined(BOOST_MSVC)
777 #pragma warning(pop)
778 #endif
779
780 #endif // BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_