2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2012 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
11 * \file atomic/detail/ops_msvc_arm.hpp
13 * This header contains implementation of the \c operations template.
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
21 #include <boost/memory_order.hpp>
22 #include <boost/atomic/detail/config.hpp>
23 #include <boost/atomic/detail/interlocked.hpp>
24 #include <boost/atomic/detail/storage_type.hpp>
25 #include <boost/atomic/detail/operations_fwd.hpp>
26 #include <boost/atomic/detail/type_traits/make_signed.hpp>
27 #include <boost/atomic/capabilities.hpp>
28 #include <boost/atomic/detail/ops_msvc_common.hpp>
30 #ifdef BOOST_HAS_PRAGMA_ONCE
34 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
35 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
36 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
37 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
38 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
39 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
40 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
41 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
47 // A note about memory_order_consume. Technically, this architecture allows to avoid
48 // unnecessary memory barrier after consume load since it supports data dependency ordering.
49 // However, some compiler optimizations may break a seemingly valid code relying on data
50 // dependency tracking by injecting bogus branches to aid out of order execution.
51 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
52 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
53 // For this reason we promote memory_order_consume to memory_order_acquire.
55 struct msvc_arm_operations_base
57 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
59 static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
61 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
64 static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
66 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
68 if ((order & memory_order_release) != 0)
69 hardware_full_fence();
71 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
74 static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
76 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
78 if (order == memory_order_seq_cst)
79 hardware_full_fence();
81 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
84 static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
86 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
88 if ((order & (memory_order_consume | memory_order_acquire)) != 0)
89 hardware_full_fence();
91 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
94 static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
96 // Combine order flags together and promote memory_order_consume to memory_order_acquire
97 return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u));
101 template< typename T, typename Derived >
102 struct msvc_arm_operations :
103 public msvc_arm_operations_base
105 typedef T storage_type;
107 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
109 typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
110 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
113 static BOOST_FORCEINLINE bool compare_exchange_weak(
114 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
116 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
119 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
121 return !!Derived::exchange(storage, (storage_type)1, order);
124 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
126 Derived::store(storage, (storage_type)0, order);
130 template< bool Signed >
131 struct operations< 1u, Signed > :
132 public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
134 typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
135 typedef typename base_type::storage_type storage_type;
136 typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type;
138 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
139 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
141 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
143 base_type::fence_before_store(order);
144 BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
145 base_type::fence_after_store(order);
148 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
150 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
151 base_type::fence_after_load(order);
155 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
159 case memory_order_relaxed:
160 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
162 case memory_order_consume:
163 case memory_order_acquire:
164 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
166 case memory_order_release:
167 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
169 case memory_order_acq_rel:
170 case memory_order_seq_cst:
172 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
178 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
182 case memory_order_relaxed:
183 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
185 case memory_order_consume:
186 case memory_order_acquire:
187 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
189 case memory_order_release:
190 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
192 case memory_order_acq_rel:
193 case memory_order_seq_cst:
195 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
201 static BOOST_FORCEINLINE bool compare_exchange_strong(
202 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
204 storage_type previous = expected, old_val;
206 switch (cas_common_order(success_order, failure_order))
208 case memory_order_relaxed:
209 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
211 case memory_order_consume:
212 case memory_order_acquire:
213 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
215 case memory_order_release:
216 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
218 case memory_order_acq_rel:
219 case memory_order_seq_cst:
221 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
226 return (previous == old_val);
229 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
233 case memory_order_relaxed:
234 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
236 case memory_order_consume:
237 case memory_order_acquire:
238 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
240 case memory_order_release:
241 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
243 case memory_order_acq_rel:
244 case memory_order_seq_cst:
246 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
252 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
256 case memory_order_relaxed:
257 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
259 case memory_order_consume:
260 case memory_order_acquire:
261 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
263 case memory_order_release:
264 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
266 case memory_order_acq_rel:
267 case memory_order_seq_cst:
269 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
275 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
279 case memory_order_relaxed:
280 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
282 case memory_order_consume:
283 case memory_order_acquire:
284 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
286 case memory_order_release:
287 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
289 case memory_order_acq_rel:
290 case memory_order_seq_cst:
292 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
299 template< bool Signed >
300 struct operations< 2u, Signed > :
301 public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
303 typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
304 typedef typename base_type::storage_type storage_type;
305 typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type;
307 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
308 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
310 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
312 base_type::fence_before_store(order);
313 BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
314 base_type::fence_after_store(order);
317 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
319 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
320 base_type::fence_after_load(order);
324 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
328 case memory_order_relaxed:
329 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
331 case memory_order_consume:
332 case memory_order_acquire:
333 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
335 case memory_order_release:
336 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
338 case memory_order_acq_rel:
339 case memory_order_seq_cst:
341 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
347 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
351 case memory_order_relaxed:
352 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
354 case memory_order_consume:
355 case memory_order_acquire:
356 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
358 case memory_order_release:
359 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
361 case memory_order_acq_rel:
362 case memory_order_seq_cst:
364 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
370 static BOOST_FORCEINLINE bool compare_exchange_strong(
371 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
373 storage_type previous = expected, old_val;
375 switch (cas_common_order(success_order, failure_order))
377 case memory_order_relaxed:
378 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
380 case memory_order_consume:
381 case memory_order_acquire:
382 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
384 case memory_order_release:
385 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
387 case memory_order_acq_rel:
388 case memory_order_seq_cst:
390 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
395 return (previous == old_val);
398 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
402 case memory_order_relaxed:
403 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
405 case memory_order_consume:
406 case memory_order_acquire:
407 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
409 case memory_order_release:
410 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
412 case memory_order_acq_rel:
413 case memory_order_seq_cst:
415 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
421 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
425 case memory_order_relaxed:
426 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
428 case memory_order_consume:
429 case memory_order_acquire:
430 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
432 case memory_order_release:
433 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
435 case memory_order_acq_rel:
436 case memory_order_seq_cst:
438 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
444 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
448 case memory_order_relaxed:
449 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
451 case memory_order_consume:
452 case memory_order_acquire:
453 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
455 case memory_order_release:
456 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
458 case memory_order_acq_rel:
459 case memory_order_seq_cst:
461 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
468 template< bool Signed >
469 struct operations< 4u, Signed > :
470 public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
472 typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
473 typedef typename base_type::storage_type storage_type;
474 typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
476 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
477 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
479 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
481 base_type::fence_before_store(order);
482 BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
483 base_type::fence_after_store(order);
486 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
488 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
489 base_type::fence_after_load(order);
493 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
497 case memory_order_relaxed:
498 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
500 case memory_order_consume:
501 case memory_order_acquire:
502 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
504 case memory_order_release:
505 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
507 case memory_order_acq_rel:
508 case memory_order_seq_cst:
510 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
516 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
520 case memory_order_relaxed:
521 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
523 case memory_order_consume:
524 case memory_order_acquire:
525 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
527 case memory_order_release:
528 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
530 case memory_order_acq_rel:
531 case memory_order_seq_cst:
533 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
539 static BOOST_FORCEINLINE bool compare_exchange_strong(
540 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
542 storage_type previous = expected, old_val;
544 switch (cas_common_order(success_order, failure_order))
546 case memory_order_relaxed:
547 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
549 case memory_order_consume:
550 case memory_order_acquire:
551 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
553 case memory_order_release:
554 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
556 case memory_order_acq_rel:
557 case memory_order_seq_cst:
559 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
564 return (previous == old_val);
567 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
571 case memory_order_relaxed:
572 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
574 case memory_order_consume:
575 case memory_order_acquire:
576 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
578 case memory_order_release:
579 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
581 case memory_order_acq_rel:
582 case memory_order_seq_cst:
584 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
590 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
594 case memory_order_relaxed:
595 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
597 case memory_order_consume:
598 case memory_order_acquire:
599 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
601 case memory_order_release:
602 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
604 case memory_order_acq_rel:
605 case memory_order_seq_cst:
607 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
613 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
617 case memory_order_relaxed:
618 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
620 case memory_order_consume:
621 case memory_order_acquire:
622 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
624 case memory_order_release:
625 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
627 case memory_order_acq_rel:
628 case memory_order_seq_cst:
630 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
637 template< bool Signed >
638 struct operations< 8u, Signed > :
639 public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
641 typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
642 typedef typename base_type::storage_type storage_type;
643 typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
645 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
646 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
648 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
650 base_type::fence_before_store(order);
651 BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
652 base_type::fence_after_store(order);
655 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
657 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
658 base_type::fence_after_load(order);
662 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
666 case memory_order_relaxed:
667 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
669 case memory_order_consume:
670 case memory_order_acquire:
671 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
673 case memory_order_release:
674 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
676 case memory_order_acq_rel:
677 case memory_order_seq_cst:
679 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
685 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
689 case memory_order_relaxed:
690 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
692 case memory_order_consume:
693 case memory_order_acquire:
694 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
696 case memory_order_release:
697 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
699 case memory_order_acq_rel:
700 case memory_order_seq_cst:
702 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
708 static BOOST_FORCEINLINE bool compare_exchange_strong(
709 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
711 storage_type previous = expected, old_val;
713 switch (cas_common_order(success_order, failure_order))
715 case memory_order_relaxed:
716 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
718 case memory_order_consume:
719 case memory_order_acquire:
720 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
722 case memory_order_release:
723 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
725 case memory_order_acq_rel:
726 case memory_order_seq_cst:
728 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
733 return (previous == old_val);
736 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
740 case memory_order_relaxed:
741 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
743 case memory_order_consume:
744 case memory_order_acquire:
745 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
747 case memory_order_release:
748 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
750 case memory_order_acq_rel:
751 case memory_order_seq_cst:
753 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
759 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
763 case memory_order_relaxed:
764 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
766 case memory_order_consume:
767 case memory_order_acquire:
768 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
770 case memory_order_release:
771 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
773 case memory_order_acq_rel:
774 case memory_order_seq_cst:
776 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
782 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
786 case memory_order_relaxed:
787 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
789 case memory_order_consume:
790 case memory_order_acquire:
791 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
793 case memory_order_release:
794 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
796 case memory_order_acq_rel:
797 case memory_order_seq_cst:
799 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
807 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
809 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
810 if (order != memory_order_relaxed)
811 msvc_arm_operations_base::hardware_full_fence();
812 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
815 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
817 if (order != memory_order_relaxed)
818 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
821 } // namespace detail
822 } // namespace atomics
825 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
826 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
827 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
828 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
829 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
830 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
831 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
832 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
834 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_