]> git.proxmox.com Git - ceph.git/blame - ceph/src/boost/boost/thread/win32/interlocked_read.hpp
import new upstream nautilus stable release 14.2.8
[ceph.git] / ceph / src / boost / boost / thread / win32 / interlocked_read.hpp
CommitLineData
b32b8144
FG
1#ifndef BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
2#define BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
3
4// interlocked_read_win32.hpp
5//
6// (C) Copyright 2005-8 Anthony Williams
7// (C) Copyright 2012 Vicente J. Botet Escriba
8// (C) Copyright 2017 Andrey Semashev
9//
10// Distributed under the Boost Software License, Version 1.0. (See
11// accompanying file LICENSE_1_0.txt or copy at
12// http://www.boost.org/LICENSE_1_0.txt)
13
14#include <boost/detail/interlocked.hpp>
15#include <boost/thread/detail/config.hpp>
16
17#include <boost/config/abi_prefix.hpp>
18
19// Define compiler barriers
20#if defined(__INTEL_COMPILER)
21#define BOOST_THREAD_DETAIL_COMPILER_BARRIER() __memory_barrier()
22#elif defined(_MSC_VER) && !defined(_WIN32_WCE)
23extern "C" void _ReadWriteBarrier(void);
24#pragma intrinsic(_ReadWriteBarrier)
25#define BOOST_THREAD_DETAIL_COMPILER_BARRIER() _ReadWriteBarrier()
26#endif
27
28#ifndef BOOST_THREAD_DETAIL_COMPILER_BARRIER
29#define BOOST_THREAD_DETAIL_COMPILER_BARRIER()
30#endif
31
32#if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64))
33
34// Since VS2005 and until VS2012 volatile reads always acquire and volatile writes are always release.
35// But VS2012 adds a compiler switch that can change behavior to the standard. On x86 though
36// the compiler generates a single instruction for the load/store, which is enough synchronization
37// as far as uarch is concerned. To prevent compiler reordering code around the load/store we add
38// compiler barriers.
39
40namespace boost
41{
42 namespace detail
43 {
44 inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
45 {
46 long const res=*x;
47 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
48 return res;
49 }
50 inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
51 {
52 void* const res=*x;
53 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
54 return res;
55 }
56
57 inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
58 {
59 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
60 *x=value;
61 }
62 inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
63 {
64 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
65 *x=value;
66 }
67 }
68}
69
70#elif defined(_MSC_VER) && _MSC_VER >= 1700 && (defined(_M_ARM) || defined(_M_ARM64))
71
72#include <intrin.h>
73
74namespace boost
75{
76 namespace detail
77 {
78 inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
79 {
80 long const res=__iso_volatile_load32((const volatile __int32*)x);
81 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
82 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
83 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
84 return res;
85 }
86 inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
87 {
88 void* const res=
89#if defined(_M_ARM64)
92f5a8d4 90 (void*)__iso_volatile_load64((const volatile __int64*)x);
b32b8144 91#else
92f5a8d4 92 (void*)__iso_volatile_load32((const volatile __int32*)x);
b32b8144
FG
93#endif
94 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
95 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
96 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
97 return res;
98 }
99
100 inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
101 {
102 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
103 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
104 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
105 __iso_volatile_store32((volatile __int32*)x, (__int32)value);
106 }
107 inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
108 {
109 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
110 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
111 BOOST_THREAD_DETAIL_COMPILER_BARRIER();
112#if defined(_M_ARM64)
113 __iso_volatile_store64((volatile __int64*)x, (__int64)value);
114#else
115 __iso_volatile_store32((volatile __int32*)x, (__int32)value);
116#endif
117 }
118 }
119}
120
121#elif defined(__GNUC__) && (((__GNUC__ * 100 + __GNUC_MINOR__) >= 407) || (defined(__clang__) && (__clang_major__ * 100 + __clang_minor__) >= 302))
122
123namespace boost
124{
125 namespace detail
126 {
127 inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
128 {
129 return __atomic_load_n((long*)x, __ATOMIC_ACQUIRE);
130 }
131 inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
132 {
133 return __atomic_load_n((void**)x, __ATOMIC_ACQUIRE);
134 }
135
136 inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
137 {
138 __atomic_store_n((long*)x, value, __ATOMIC_RELEASE);
139 }
140 inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
141 {
142 __atomic_store_n((void**)x, value, __ATOMIC_RELEASE);
143 }
144 }
145}
146
147#elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
148
149namespace boost
150{
151 namespace detail
152 {
153 inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
154 {
155 long res;
156 __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
157 return res;
158 }
159 inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
160 {
161 void* res;
162#if defined(__x86_64__)
163 __asm__ __volatile__ ("movq %1, %0" : "=r" (res) : "m" (*x) : "memory");
164#else
165 __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
166#endif
167 return res;
168 }
169
170 inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
171 {
172 __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
173 }
174 inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
175 {
176#if defined(__x86_64__)
177 __asm__ __volatile__ ("movq %1, %0" : "=m" (*x) : "r" (value) : "memory");
178#else
179 __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
180#endif
181 }
182 }
183}
184
185#else
186
187namespace boost
188{
189 namespace detail
190 {
191 inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
192 {
193 return BOOST_INTERLOCKED_COMPARE_EXCHANGE((long*)x,0,0);
194 }
195 inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
196 {
197 return BOOST_INTERLOCKED_COMPARE_EXCHANGE_POINTER((void**)x,0,0);
198 }
199 inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
200 {
201 BOOST_INTERLOCKED_EXCHANGE((long*)x,value);
202 }
203 inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
204 {
205 BOOST_INTERLOCKED_EXCHANGE_POINTER((void**)x,value);
206 }
207 }
208}
209
210#endif
211
212#include <boost/config/abi_suffix.hpp>
213
214#endif