2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2012 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
11 * \file atomic/detail/ops_msvc_arm.hpp
13 * This header contains implementation of the \c operations template.
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
20 #include <boost/memory_order.hpp>
21 #include <boost/type_traits/make_signed.hpp>
22 #include <boost/atomic/detail/config.hpp>
23 #include <boost/atomic/detail/interlocked.hpp>
24 #include <boost/atomic/detail/storage_type.hpp>
25 #include <boost/atomic/detail/operations_fwd.hpp>
26 #include <boost/atomic/capabilities.hpp>
27 #include <boost/atomic/detail/ops_msvc_common.hpp>
29 #ifdef BOOST_HAS_PRAGMA_ONCE
33 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
34 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
35 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
36 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
37 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
38 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
39 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
40 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
46 // A note about memory_order_consume. Technically, this architecture allows to avoid
47 // unnecessary memory barrier after consume load since it supports data dependency ordering.
48 // However, some compiler optimizations may break a seemingly valid code relying on data
49 // dependency tracking by injecting bogus branches to aid out of order execution.
50 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
51 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
52 // For this reason we promote memory_order_consume to memory_order_acquire.
54 struct msvc_arm_operations_base
56 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
58 static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
60 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
63 static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
65 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
67 if ((order & memory_order_release) != 0)
68 hardware_full_fence();
70 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
73 static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
75 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
77 if (order == memory_order_seq_cst)
78 hardware_full_fence();
80 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
83 static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
85 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
87 if ((order & (memory_order_consume | memory_order_acquire)) != 0)
88 hardware_full_fence();
90 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
93 static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
95 // Combine order flags together and promote memory_order_consume to memory_order_acquire
96 return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u));
100 template< typename T, typename Derived >
101 struct msvc_arm_operations :
102 public msvc_arm_operations_base
104 typedef T storage_type;
106 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
108 typedef typename make_signed< storage_type >::type signed_storage_type;
109 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
112 static BOOST_FORCEINLINE bool compare_exchange_weak(
113 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
115 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
118 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
120 return !!Derived::exchange(storage, (storage_type)1, order);
123 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
125 Derived::store(storage, (storage_type)0, order);
128 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
134 template< bool Signed >
135 struct operations< 1u, Signed > :
136 public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
138 typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
139 typedef typename base_type::storage_type storage_type;
140 typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type;
142 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
144 base_type::fence_before_store(order);
145 BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
146 base_type::fence_after_store(order);
149 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
151 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
152 base_type::fence_after_load(order);
156 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
160 case memory_order_relaxed:
161 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
163 case memory_order_consume:
164 case memory_order_acquire:
165 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
167 case memory_order_release:
168 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
170 case memory_order_acq_rel:
171 case memory_order_seq_cst:
173 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
179 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
183 case memory_order_relaxed:
184 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
186 case memory_order_consume:
187 case memory_order_acquire:
188 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
190 case memory_order_release:
191 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
193 case memory_order_acq_rel:
194 case memory_order_seq_cst:
196 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
202 static BOOST_FORCEINLINE bool compare_exchange_strong(
203 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
205 storage_type previous = expected, old_val;
207 switch (cas_common_order(success_order, failure_order))
209 case memory_order_relaxed:
210 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
212 case memory_order_consume:
213 case memory_order_acquire:
214 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
216 case memory_order_release:
217 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
219 case memory_order_acq_rel:
220 case memory_order_seq_cst:
222 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
227 return (previous == old_val);
230 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
234 case memory_order_relaxed:
235 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
237 case memory_order_consume:
238 case memory_order_acquire:
239 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
241 case memory_order_release:
242 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
244 case memory_order_acq_rel:
245 case memory_order_seq_cst:
247 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
253 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
257 case memory_order_relaxed:
258 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
260 case memory_order_consume:
261 case memory_order_acquire:
262 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
264 case memory_order_release:
265 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
267 case memory_order_acq_rel:
268 case memory_order_seq_cst:
270 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
276 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
280 case memory_order_relaxed:
281 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
283 case memory_order_consume:
284 case memory_order_acquire:
285 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
287 case memory_order_release:
288 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
290 case memory_order_acq_rel:
291 case memory_order_seq_cst:
293 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
300 template< bool Signed >
301 struct operations< 2u, Signed > :
302 public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
304 typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
305 typedef typename base_type::storage_type storage_type;
306 typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type;
308 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
310 base_type::fence_before_store(order);
311 BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
312 base_type::fence_after_store(order);
315 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
317 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
318 base_type::fence_after_load(order);
322 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
326 case memory_order_relaxed:
327 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
329 case memory_order_consume:
330 case memory_order_acquire:
331 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
333 case memory_order_release:
334 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
336 case memory_order_acq_rel:
337 case memory_order_seq_cst:
339 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
345 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
349 case memory_order_relaxed:
350 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
352 case memory_order_consume:
353 case memory_order_acquire:
354 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
356 case memory_order_release:
357 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
359 case memory_order_acq_rel:
360 case memory_order_seq_cst:
362 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
368 static BOOST_FORCEINLINE bool compare_exchange_strong(
369 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
371 storage_type previous = expected, old_val;
373 switch (cas_common_order(success_order, failure_order))
375 case memory_order_relaxed:
376 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
378 case memory_order_consume:
379 case memory_order_acquire:
380 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
382 case memory_order_release:
383 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
385 case memory_order_acq_rel:
386 case memory_order_seq_cst:
388 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
393 return (previous == old_val);
396 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
400 case memory_order_relaxed:
401 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
403 case memory_order_consume:
404 case memory_order_acquire:
405 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
407 case memory_order_release:
408 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
410 case memory_order_acq_rel:
411 case memory_order_seq_cst:
413 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
419 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
423 case memory_order_relaxed:
424 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
426 case memory_order_consume:
427 case memory_order_acquire:
428 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
430 case memory_order_release:
431 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
433 case memory_order_acq_rel:
434 case memory_order_seq_cst:
436 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
442 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
446 case memory_order_relaxed:
447 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
449 case memory_order_consume:
450 case memory_order_acquire:
451 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
453 case memory_order_release:
454 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
456 case memory_order_acq_rel:
457 case memory_order_seq_cst:
459 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
466 template< bool Signed >
467 struct operations< 4u, Signed > :
468 public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
470 typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
471 typedef typename base_type::storage_type storage_type;
472 typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
474 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
476 base_type::fence_before_store(order);
477 BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
478 base_type::fence_after_store(order);
481 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
483 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
484 base_type::fence_after_load(order);
488 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
492 case memory_order_relaxed:
493 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
495 case memory_order_consume:
496 case memory_order_acquire:
497 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
499 case memory_order_release:
500 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
502 case memory_order_acq_rel:
503 case memory_order_seq_cst:
505 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
511 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
515 case memory_order_relaxed:
516 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
518 case memory_order_consume:
519 case memory_order_acquire:
520 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
522 case memory_order_release:
523 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
525 case memory_order_acq_rel:
526 case memory_order_seq_cst:
528 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
534 static BOOST_FORCEINLINE bool compare_exchange_strong(
535 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
537 storage_type previous = expected, old_val;
539 switch (cas_common_order(success_order, failure_order))
541 case memory_order_relaxed:
542 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
544 case memory_order_consume:
545 case memory_order_acquire:
546 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
548 case memory_order_release:
549 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
551 case memory_order_acq_rel:
552 case memory_order_seq_cst:
554 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
559 return (previous == old_val);
562 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
566 case memory_order_relaxed:
567 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
569 case memory_order_consume:
570 case memory_order_acquire:
571 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
573 case memory_order_release:
574 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
576 case memory_order_acq_rel:
577 case memory_order_seq_cst:
579 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
585 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
589 case memory_order_relaxed:
590 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
592 case memory_order_consume:
593 case memory_order_acquire:
594 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
596 case memory_order_release:
597 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
599 case memory_order_acq_rel:
600 case memory_order_seq_cst:
602 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
608 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
612 case memory_order_relaxed:
613 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
615 case memory_order_consume:
616 case memory_order_acquire:
617 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
619 case memory_order_release:
620 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
622 case memory_order_acq_rel:
623 case memory_order_seq_cst:
625 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
632 template< bool Signed >
633 struct operations< 8u, Signed > :
634 public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
636 typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
637 typedef typename base_type::storage_type storage_type;
638 typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
640 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
642 base_type::fence_before_store(order);
643 BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
644 base_type::fence_after_store(order);
647 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
649 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
650 base_type::fence_after_load(order);
654 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
658 case memory_order_relaxed:
659 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
661 case memory_order_consume:
662 case memory_order_acquire:
663 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
665 case memory_order_release:
666 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
668 case memory_order_acq_rel:
669 case memory_order_seq_cst:
671 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
677 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
681 case memory_order_relaxed:
682 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
684 case memory_order_consume:
685 case memory_order_acquire:
686 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
688 case memory_order_release:
689 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
691 case memory_order_acq_rel:
692 case memory_order_seq_cst:
694 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
700 static BOOST_FORCEINLINE bool compare_exchange_strong(
701 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
703 storage_type previous = expected, old_val;
705 switch (cas_common_order(success_order, failure_order))
707 case memory_order_relaxed:
708 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
710 case memory_order_consume:
711 case memory_order_acquire:
712 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
714 case memory_order_release:
715 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
717 case memory_order_acq_rel:
718 case memory_order_seq_cst:
720 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
725 return (previous == old_val);
728 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
732 case memory_order_relaxed:
733 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
735 case memory_order_consume:
736 case memory_order_acquire:
737 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
739 case memory_order_release:
740 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
742 case memory_order_acq_rel:
743 case memory_order_seq_cst:
745 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
751 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
755 case memory_order_relaxed:
756 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
758 case memory_order_consume:
759 case memory_order_acquire:
760 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
762 case memory_order_release:
763 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
765 case memory_order_acq_rel:
766 case memory_order_seq_cst:
768 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
774 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
778 case memory_order_relaxed:
779 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
781 case memory_order_consume:
782 case memory_order_acquire:
783 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
785 case memory_order_release:
786 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
788 case memory_order_acq_rel:
789 case memory_order_seq_cst:
791 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
799 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
801 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
802 if (order != memory_order_relaxed)
803 msvc_arm_operations_base::hardware_full_fence();
804 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
807 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
809 if (order != memory_order_relaxed)
810 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
813 } // namespace detail
814 } // namespace atomics
817 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
818 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
819 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
820 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
821 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
822 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
823 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
824 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
826 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_