2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2012 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
11 * \file atomic/detail/ops_msvc_x86.hpp
13 * This header contains implementation of the \c operations template.
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_X86_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_X86_HPP_INCLUDED_
20 #include <boost/memory_order.hpp>
21 #include <boost/atomic/detail/config.hpp>
22 #include <boost/atomic/detail/interlocked.hpp>
23 #include <boost/atomic/detail/storage_type.hpp>
24 #include <boost/atomic/detail/operations_fwd.hpp>
25 #include <boost/atomic/detail/type_traits/make_signed.hpp>
26 #include <boost/atomic/capabilities.hpp>
27 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B) || defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
28 #include <boost/cstdint.hpp>
29 #include <boost/atomic/detail/ops_cas_based.hpp>
31 #include <boost/atomic/detail/ops_msvc_common.hpp>
32 #if !defined(_M_IX86) && !(defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8) && defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16))
33 #include <boost/atomic/detail/ops_extending_cas_based.hpp>
36 #ifdef BOOST_HAS_PRAGMA_ONCE
40 #if defined(BOOST_MSVC)
42 // frame pointer register 'ebx' modified by inline assembly code. See the note below.
43 #pragma warning(disable: 4731)
46 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_MFENCE)
47 extern "C" void _mm_mfence(void);
48 #if defined(BOOST_MSVC)
49 #pragma intrinsic(_mm_mfence)
58 * Implementation note for asm blocks.
60 * http://msdn.microsoft.com/en-us/data/k1a8ss06%28v=vs.105%29
62 * Some SSE types require eight-byte stack alignment, forcing the compiler to emit dynamic stack-alignment code.
63 * To be able to access both the local variables and the function parameters after the alignment, the compiler
64 * maintains two frame pointers. If the compiler performs frame pointer omission (FPO), it will use EBP and ESP.
65 * If the compiler does not perform FPO, it will use EBX and EBP. To ensure code runs correctly, do not modify EBX
66 * in asm code if the function requires dynamic stack alignment as it could modify the frame pointer.
67 * Either move the eight-byte aligned types out of the function, or avoid using EBX.
69 * Since we have no way of knowing that the compiler uses FPO, we have to always save and restore ebx
70 * whenever we have to clobber it. Additionally, we disable warning C4731 above so that the compiler
71 * doesn't spam about ebx use.
74 struct msvc_x86_operations_base
76 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
78 static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
80 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_MFENCE)
84 BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&tmp, 0);
88 static BOOST_FORCEINLINE void fence_before(memory_order) BOOST_NOEXCEPT
90 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
93 static BOOST_FORCEINLINE void fence_after(memory_order) BOOST_NOEXCEPT
95 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
98 static BOOST_FORCEINLINE void fence_after_load(memory_order) BOOST_NOEXCEPT
100 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
102 // On x86 and x86_64 there is no need for a hardware barrier,
103 // even if seq_cst memory order is requested, because all
104 // seq_cst writes are implemented with lock-prefixed operations
105 // or xchg which has implied lock prefix. Therefore normal loads
106 // are already ordered with seq_cst stores on these architectures.
110 template< typename T, typename Derived >
111 struct msvc_x86_operations :
112 public msvc_x86_operations_base
114 typedef T storage_type;
116 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
118 if (order != memory_order_seq_cst)
126 Derived::exchange(storage, v, order);
130 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
132 storage_type v = storage;
133 fence_after_load(order);
137 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
139 typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
140 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
143 static BOOST_FORCEINLINE bool compare_exchange_weak(
144 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
146 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
149 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
151 return !!Derived::exchange(storage, (storage_type)1, order);
154 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
156 store(storage, (storage_type)0, order);
160 template< bool Signed >
161 struct operations< 4u, Signed > :
162 public msvc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
164 typedef msvc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
165 typedef typename base_type::storage_type storage_type;
166 typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
168 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
169 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
171 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
173 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
176 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
178 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
181 static BOOST_FORCEINLINE bool compare_exchange_strong(
182 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
184 storage_type previous = expected;
185 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
187 return (previous == old_val);
190 #if defined(BOOST_ATOMIC_INTERLOCKED_AND)
191 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
193 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
196 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
198 storage_type res = storage;
199 while (!compare_exchange_strong(storage, res, res & v, order, memory_order_relaxed)) {}
204 #if defined(BOOST_ATOMIC_INTERLOCKED_OR)
205 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
207 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
210 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
212 storage_type res = storage;
213 while (!compare_exchange_strong(storage, res, res | v, order, memory_order_relaxed)) {}
218 #if defined(BOOST_ATOMIC_INTERLOCKED_XOR)
219 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
221 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
224 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
226 storage_type res = storage;
227 while (!compare_exchange_strong(storage, res, res ^ v, order, memory_order_relaxed)) {}
233 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8)
235 template< bool Signed >
236 struct operations< 1u, Signed > :
237 public msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
239 typedef msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
240 typedef typename base_type::storage_type storage_type;
241 typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type;
243 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
244 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
246 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
248 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
251 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
253 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
256 static BOOST_FORCEINLINE bool compare_exchange_strong(
257 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
259 storage_type previous = expected;
260 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
262 return (previous == old_val);
265 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
267 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
270 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
272 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
275 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
277 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
281 #elif defined(_M_IX86)
283 template< bool Signed >
284 struct operations< 1u, Signed > :
285 public msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
287 typedef msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
288 typedef typename base_type::storage_type storage_type;
289 typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type;
291 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
292 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
294 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
296 base_type::fence_before(order);
301 lock xadd byte ptr [edx], al
304 base_type::fence_after(order);
308 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
310 base_type::fence_before(order);
315 xchg byte ptr [edx], al
318 base_type::fence_after(order);
322 static BOOST_FORCEINLINE bool compare_exchange_strong(
323 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order) BOOST_NOEXCEPT
325 base_type::fence_before(success_order);
331 movzx eax, byte ptr [esi]
333 lock cmpxchg byte ptr [edi], dl
334 mov byte ptr [esi], al
337 // The success and failure fences are equivalent anyway
338 base_type::fence_after(success_order);
342 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
344 base_type::fence_before(order);
352 movzx eax, byte ptr [edi]
357 lock cmpxchg byte ptr [edi], dl
362 base_type::fence_after(order);
366 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
368 base_type::fence_before(order);
376 movzx eax, byte ptr [edi]
381 lock cmpxchg byte ptr [edi], dl
386 base_type::fence_after(order);
390 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
392 base_type::fence_before(order);
400 movzx eax, byte ptr [edi]
405 lock cmpxchg byte ptr [edi], dl
410 base_type::fence_after(order);
417 template< bool Signed >
418 struct operations< 1u, Signed > :
419 public extending_cas_based_operations< operations< 4u, Signed >, 1u, Signed >
425 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16)
427 template< bool Signed >
428 struct operations< 2u, Signed > :
429 public msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
431 typedef msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
432 typedef typename base_type::storage_type storage_type;
433 typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type;
435 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
436 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
438 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
440 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
443 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
445 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
448 static BOOST_FORCEINLINE bool compare_exchange_strong(
449 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
451 storage_type previous = expected;
452 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
454 return (previous == old_val);
457 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
459 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
462 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
464 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
467 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
469 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
473 #elif defined(_M_IX86)
475 template< bool Signed >
476 struct operations< 2u, Signed > :
477 public msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
479 typedef msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
480 typedef typename base_type::storage_type storage_type;
481 typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type;
483 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
484 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
486 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
488 base_type::fence_before(order);
493 lock xadd word ptr [edx], ax
496 base_type::fence_after(order);
500 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
502 base_type::fence_before(order);
507 xchg word ptr [edx], ax
510 base_type::fence_after(order);
514 static BOOST_FORCEINLINE bool compare_exchange_strong(
515 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order) BOOST_NOEXCEPT
517 base_type::fence_before(success_order);
523 movzx eax, word ptr [esi]
525 lock cmpxchg word ptr [edi], dx
526 mov word ptr [esi], ax
529 // The success and failure fences are equivalent anyway
530 base_type::fence_after(success_order);
534 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
536 base_type::fence_before(order);
544 movzx eax, word ptr [edi]
549 lock cmpxchg word ptr [edi], dx
554 base_type::fence_after(order);
558 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
560 base_type::fence_before(order);
568 movzx eax, word ptr [edi]
573 lock cmpxchg word ptr [edi], dx
578 base_type::fence_after(order);
582 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
584 base_type::fence_before(order);
592 movzx eax, word ptr [edi]
597 lock cmpxchg word ptr [edi], dx
602 base_type::fence_after(order);
609 template< bool Signed >
610 struct operations< 2u, Signed > :
611 public extending_cas_based_operations< operations< 4u, Signed >, 2u, Signed >
618 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B)
620 template< bool Signed >
623 typedef typename make_storage_type< 8u, Signed >::type storage_type;
624 typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
626 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
628 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
629 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
631 // Intel 64 and IA-32 Architectures Software Developer's Manual, Volume 3A, 8.1.1. Guaranteed Atomic Operations:
633 // The Pentium processor (and newer processors since) guarantees that the following additional memory operations will always be carried out atomically:
634 // * Reading or writing a quadword aligned on a 64-bit boundary
636 // Luckily, the memory is almost always 8-byte aligned in our case because atomic<> uses 64 bit native types for storage and dynamic memory allocations
637 // have at least 8 byte alignment. The only unfortunate case is when atomic is placed on the stack and it is not 8-byte aligned (like on 32 bit Windows).
639 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
641 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
643 storage_type volatile* p = &storage;
644 if (((uint32_t)p & 0x00000007) == 0)
646 #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
652 vmovq qword ptr [edx], xmm4
659 movq qword ptr [edx], xmm4
667 fistp qword ptr [edx]
678 mov ebx, dword ptr [v]
679 mov ecx, dword ptr [v + 4]
680 mov eax, dword ptr [edi]
681 mov edx, dword ptr [edi + 4]
684 lock cmpxchg8b qword ptr [edi]
690 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
693 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order) BOOST_NOEXCEPT
695 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
697 storage_type const volatile* p = &storage;
700 if (((uint32_t)p & 0x00000007) == 0)
702 #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
707 vmovq xmm4, qword ptr [edx]
714 movq xmm4, qword ptr [edx]
729 // We don't care for comparison result here; the previous value will be stored into value anyway.
730 // Also we don't care for ebx and ecx values, they just have to be equal to eax and edx before cmpxchg8b.
736 lock cmpxchg8b qword ptr [edi]
737 mov dword ptr [value], eax
738 mov dword ptr [value + 4], edx
742 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
747 static BOOST_FORCEINLINE bool compare_exchange_strong(
748 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
750 // MSVC-11 in 32-bit mode sometimes generates messed up code without compiler barriers,
751 // even though the _InterlockedCompareExchange64 intrinsic already provides one.
752 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
754 storage_type volatile* p = &storage;
755 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
756 const storage_type old_val = (storage_type)BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(p, desired, expected);
757 const bool result = (old_val == expected);
767 mov ebx, dword ptr [desired]
768 mov ecx, dword ptr [desired + 4]
769 mov eax, dword ptr [esi]
770 mov edx, dword ptr [esi + 4]
771 lock cmpxchg8b qword ptr [edi]
772 mov dword ptr [esi], eax
773 mov dword ptr [esi + 4], edx
778 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
783 static BOOST_FORCEINLINE bool compare_exchange_weak(
784 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
786 return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
789 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
791 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
793 storage_type volatile* p = &storage;
799 mov ebx, dword ptr [v]
800 mov ecx, dword ptr [v + 4]
801 mov eax, dword ptr [edi]
802 mov edx, dword ptr [edi + 4]
805 lock cmpxchg8b qword ptr [edi]
808 mov dword ptr [v], eax
809 mov dword ptr [v + 4], edx
812 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
818 template< bool Signed >
819 struct operations< 8u, Signed > :
820 public cas_based_operations< msvc_dcas_x86< Signed > >
824 #elif defined(_M_AMD64)
826 template< bool Signed >
827 struct operations< 8u, Signed > :
828 public msvc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
830 typedef msvc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
831 typedef typename base_type::storage_type storage_type;
832 typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
834 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
835 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
837 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
839 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
842 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
844 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
847 static BOOST_FORCEINLINE bool compare_exchange_strong(
848 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
850 storage_type previous = expected;
851 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
853 return (previous == old_val);
856 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
858 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
861 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
863 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
866 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
868 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
874 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
876 template< bool Signed >
877 struct msvc_dcas_x86_64
879 typedef typename make_storage_type< 16u, Signed >::type storage_type;
880 typedef typename make_storage_type< 16u, Signed >::aligned aligned_storage_type;
882 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
884 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 16u;
885 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
887 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
889 storage_type value = const_cast< storage_type& >(storage);
890 while (!BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, v, &value)) {}
893 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order) BOOST_NOEXCEPT
895 storage_type value = storage_type();
896 BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, value, &value);
900 static BOOST_FORCEINLINE bool compare_exchange_strong(
901 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
903 return !!BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, desired, &expected);
906 static BOOST_FORCEINLINE bool compare_exchange_weak(
907 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
909 return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
913 template< bool Signed >
914 struct operations< 16u, Signed > :
915 public cas_based_operations< cas_based_exchange< msvc_dcas_x86_64< Signed > > >
919 #endif // defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
921 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
923 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
924 if (order == memory_order_seq_cst)
925 msvc_x86_operations_base::hardware_full_fence();
926 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
929 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
931 if (order != memory_order_relaxed)
932 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
935 } // namespace detail
936 } // namespace atomics
939 #if defined(BOOST_MSVC)
943 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_X86_HPP_INCLUDED_