]>
Commit | Line | Data |
---|---|---|
7c673cae FG |
1 | /* |
2 | * Distributed under the Boost Software License, Version 1.0. | |
3 | * (See accompanying file LICENSE_1_0.txt or copy at | |
4 | * http://www.boost.org/LICENSE_1_0.txt) | |
5 | * | |
6 | * Copyright (c) 2009 Helge Bahmann | |
7 | * Copyright (c) 2012 Tim Blechmann | |
8 | * Copyright (c) 2014 Andrey Semashev | |
9 | */ | |
10 | /*! | |
11 | * \file atomic/detail/ops_msvc_arm.hpp | |
12 | * | |
13 | * This header contains implementation of the \c operations template. | |
14 | */ | |
15 | ||
16 | #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_ | |
17 | #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_ | |
18 | ||
19 | #include <intrin.h> | |
20 | #include <boost/memory_order.hpp> | |
21 | #include <boost/type_traits/make_signed.hpp> | |
22 | #include <boost/atomic/detail/config.hpp> | |
23 | #include <boost/atomic/detail/interlocked.hpp> | |
24 | #include <boost/atomic/detail/storage_type.hpp> | |
25 | #include <boost/atomic/detail/operations_fwd.hpp> | |
26 | #include <boost/atomic/capabilities.hpp> | |
27 | #include <boost/atomic/detail/ops_msvc_common.hpp> | |
28 | ||
29 | #ifdef BOOST_HAS_PRAGMA_ONCE | |
30 | #pragma once | |
31 | #endif | |
32 | ||
33 | #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p)) | |
34 | #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p)) | |
35 | #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p)) | |
36 | #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p)) | |
37 | #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v)) | |
38 | #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v)) | |
39 | #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v)) | |
40 | #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v)) | |
41 | ||
42 | namespace boost { | |
43 | namespace atomics { | |
44 | namespace detail { | |
45 | ||
46 | // A note about memory_order_consume. Technically, this architecture allows to avoid | |
47 | // unnecessary memory barrier after consume load since it supports data dependency ordering. | |
48 | // However, some compiler optimizations may break a seemingly valid code relying on data | |
49 | // dependency tracking by injecting bogus branches to aid out of order execution. | |
50 | // This may happen not only in Boost.Atomic code but also in user's code, which we have no | |
51 | // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php. | |
52 | // For this reason we promote memory_order_consume to memory_order_acquire. | |
53 | ||
54 | struct msvc_arm_operations_base | |
55 | { | |
56 | static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true; | |
57 | ||
58 | static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT | |
59 | { | |
60 | __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later | |
61 | } | |
62 | ||
63 | static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT | |
64 | { | |
65 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
66 | ||
67 | if ((order & memory_order_release) != 0) | |
68 | hardware_full_fence(); | |
69 | ||
70 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
71 | } | |
72 | ||
73 | static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT | |
74 | { | |
75 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
76 | ||
77 | if (order == memory_order_seq_cst) | |
78 | hardware_full_fence(); | |
79 | ||
80 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
81 | } | |
82 | ||
83 | static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT | |
84 | { | |
85 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
86 | ||
87 | if ((order & (memory_order_consume | memory_order_acquire)) != 0) | |
88 | hardware_full_fence(); | |
89 | ||
90 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
91 | } | |
92 | ||
93 | static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT | |
94 | { | |
95 | // Combine order flags together and promote memory_order_consume to memory_order_acquire | |
96 | return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u)); | |
97 | } | |
98 | }; | |
99 | ||
100 | template< typename T, typename Derived > | |
101 | struct msvc_arm_operations : | |
102 | public msvc_arm_operations_base | |
103 | { | |
104 | typedef T storage_type; | |
105 | ||
106 | static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
107 | { | |
108 | typedef typename make_signed< storage_type >::type signed_storage_type; | |
109 | return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order); | |
110 | } | |
111 | ||
112 | static BOOST_FORCEINLINE bool compare_exchange_weak( | |
113 | storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT | |
114 | { | |
115 | return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order); | |
116 | } | |
117 | ||
118 | static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT | |
119 | { | |
120 | return !!Derived::exchange(storage, (storage_type)1, order); | |
121 | } | |
122 | ||
123 | static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT | |
124 | { | |
125 | Derived::store(storage, (storage_type)0, order); | |
126 | } | |
127 | ||
128 | static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT | |
129 | { | |
130 | return true; | |
131 | } | |
132 | }; | |
133 | ||
134 | template< bool Signed > | |
135 | struct operations< 1u, Signed > : | |
136 | public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > | |
137 | { | |
138 | typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type; | |
139 | typedef typename base_type::storage_type storage_type; | |
140 | typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type; | |
141 | ||
142 | static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
143 | { | |
144 | base_type::fence_before_store(order); | |
145 | BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v); | |
146 | base_type::fence_after_store(order); | |
147 | } | |
148 | ||
149 | static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT | |
150 | { | |
151 | storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage); | |
152 | base_type::fence_after_load(order); | |
153 | return v; | |
154 | } | |
155 | ||
156 | static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
157 | { | |
158 | switch (order) | |
159 | { | |
160 | case memory_order_relaxed: | |
161 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v)); | |
162 | break; | |
163 | case memory_order_consume: | |
164 | case memory_order_acquire: | |
165 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v)); | |
166 | break; | |
167 | case memory_order_release: | |
168 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v)); | |
169 | break; | |
170 | case memory_order_acq_rel: | |
171 | case memory_order_seq_cst: | |
172 | default: | |
173 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v)); | |
174 | break; | |
175 | } | |
176 | return v; | |
177 | } | |
178 | ||
179 | static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
180 | { | |
181 | switch (order) | |
182 | { | |
183 | case memory_order_relaxed: | |
184 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v)); | |
185 | break; | |
186 | case memory_order_consume: | |
187 | case memory_order_acquire: | |
188 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v)); | |
189 | break; | |
190 | case memory_order_release: | |
191 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v)); | |
192 | break; | |
193 | case memory_order_acq_rel: | |
194 | case memory_order_seq_cst: | |
195 | default: | |
196 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v)); | |
197 | break; | |
198 | } | |
199 | return v; | |
200 | } | |
201 | ||
202 | static BOOST_FORCEINLINE bool compare_exchange_strong( | |
203 | storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT | |
204 | { | |
205 | storage_type previous = expected, old_val; | |
206 | ||
207 | switch (cas_common_order(success_order, failure_order)) | |
208 | { | |
209 | case memory_order_relaxed: | |
210 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous)); | |
211 | break; | |
212 | case memory_order_consume: | |
213 | case memory_order_acquire: | |
214 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous)); | |
215 | break; | |
216 | case memory_order_release: | |
217 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous)); | |
218 | break; | |
219 | case memory_order_acq_rel: | |
220 | case memory_order_seq_cst: | |
221 | default: | |
222 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous)); | |
223 | break; | |
224 | } | |
225 | expected = old_val; | |
226 | ||
227 | return (previous == old_val); | |
228 | } | |
229 | ||
230 | static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
231 | { | |
232 | switch (order) | |
233 | { | |
234 | case memory_order_relaxed: | |
235 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v)); | |
236 | break; | |
237 | case memory_order_consume: | |
238 | case memory_order_acquire: | |
239 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v)); | |
240 | break; | |
241 | case memory_order_release: | |
242 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v)); | |
243 | break; | |
244 | case memory_order_acq_rel: | |
245 | case memory_order_seq_cst: | |
246 | default: | |
247 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v)); | |
248 | break; | |
249 | } | |
250 | return v; | |
251 | } | |
252 | ||
253 | static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
254 | { | |
255 | switch (order) | |
256 | { | |
257 | case memory_order_relaxed: | |
258 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v)); | |
259 | break; | |
260 | case memory_order_consume: | |
261 | case memory_order_acquire: | |
262 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v)); | |
263 | break; | |
264 | case memory_order_release: | |
265 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v)); | |
266 | break; | |
267 | case memory_order_acq_rel: | |
268 | case memory_order_seq_cst: | |
269 | default: | |
270 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v)); | |
271 | break; | |
272 | } | |
273 | return v; | |
274 | } | |
275 | ||
276 | static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
277 | { | |
278 | switch (order) | |
279 | { | |
280 | case memory_order_relaxed: | |
281 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v)); | |
282 | break; | |
283 | case memory_order_consume: | |
284 | case memory_order_acquire: | |
285 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v)); | |
286 | break; | |
287 | case memory_order_release: | |
288 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v)); | |
289 | break; | |
290 | case memory_order_acq_rel: | |
291 | case memory_order_seq_cst: | |
292 | default: | |
293 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v)); | |
294 | break; | |
295 | } | |
296 | return v; | |
297 | } | |
298 | }; | |
299 | ||
300 | template< bool Signed > | |
301 | struct operations< 2u, Signed > : | |
302 | public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > | |
303 | { | |
304 | typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type; | |
305 | typedef typename base_type::storage_type storage_type; | |
306 | typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type; | |
307 | ||
308 | static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
309 | { | |
310 | base_type::fence_before_store(order); | |
311 | BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v); | |
312 | base_type::fence_after_store(order); | |
313 | } | |
314 | ||
315 | static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT | |
316 | { | |
317 | storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage); | |
318 | base_type::fence_after_load(order); | |
319 | return v; | |
320 | } | |
321 | ||
322 | static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
323 | { | |
324 | switch (order) | |
325 | { | |
326 | case memory_order_relaxed: | |
327 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v)); | |
328 | break; | |
329 | case memory_order_consume: | |
330 | case memory_order_acquire: | |
331 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v)); | |
332 | break; | |
333 | case memory_order_release: | |
334 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v)); | |
335 | break; | |
336 | case memory_order_acq_rel: | |
337 | case memory_order_seq_cst: | |
338 | default: | |
339 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v)); | |
340 | break; | |
341 | } | |
342 | return v; | |
343 | } | |
344 | ||
345 | static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
346 | { | |
347 | switch (order) | |
348 | { | |
349 | case memory_order_relaxed: | |
350 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v)); | |
351 | break; | |
352 | case memory_order_consume: | |
353 | case memory_order_acquire: | |
354 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v)); | |
355 | break; | |
356 | case memory_order_release: | |
357 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v)); | |
358 | break; | |
359 | case memory_order_acq_rel: | |
360 | case memory_order_seq_cst: | |
361 | default: | |
362 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v)); | |
363 | break; | |
364 | } | |
365 | return v; | |
366 | } | |
367 | ||
368 | static BOOST_FORCEINLINE bool compare_exchange_strong( | |
369 | storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT | |
370 | { | |
371 | storage_type previous = expected, old_val; | |
372 | ||
373 | switch (cas_common_order(success_order, failure_order)) | |
374 | { | |
375 | case memory_order_relaxed: | |
376 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous)); | |
377 | break; | |
378 | case memory_order_consume: | |
379 | case memory_order_acquire: | |
380 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous)); | |
381 | break; | |
382 | case memory_order_release: | |
383 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous)); | |
384 | break; | |
385 | case memory_order_acq_rel: | |
386 | case memory_order_seq_cst: | |
387 | default: | |
388 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous)); | |
389 | break; | |
390 | } | |
391 | expected = old_val; | |
392 | ||
393 | return (previous == old_val); | |
394 | } | |
395 | ||
396 | static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
397 | { | |
398 | switch (order) | |
399 | { | |
400 | case memory_order_relaxed: | |
401 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v)); | |
402 | break; | |
403 | case memory_order_consume: | |
404 | case memory_order_acquire: | |
405 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v)); | |
406 | break; | |
407 | case memory_order_release: | |
408 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v)); | |
409 | break; | |
410 | case memory_order_acq_rel: | |
411 | case memory_order_seq_cst: | |
412 | default: | |
413 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v)); | |
414 | break; | |
415 | } | |
416 | return v; | |
417 | } | |
418 | ||
419 | static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
420 | { | |
421 | switch (order) | |
422 | { | |
423 | case memory_order_relaxed: | |
424 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v)); | |
425 | break; | |
426 | case memory_order_consume: | |
427 | case memory_order_acquire: | |
428 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v)); | |
429 | break; | |
430 | case memory_order_release: | |
431 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v)); | |
432 | break; | |
433 | case memory_order_acq_rel: | |
434 | case memory_order_seq_cst: | |
435 | default: | |
436 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v)); | |
437 | break; | |
438 | } | |
439 | return v; | |
440 | } | |
441 | ||
442 | static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
443 | { | |
444 | switch (order) | |
445 | { | |
446 | case memory_order_relaxed: | |
447 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v)); | |
448 | break; | |
449 | case memory_order_consume: | |
450 | case memory_order_acquire: | |
451 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v)); | |
452 | break; | |
453 | case memory_order_release: | |
454 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v)); | |
455 | break; | |
456 | case memory_order_acq_rel: | |
457 | case memory_order_seq_cst: | |
458 | default: | |
459 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v)); | |
460 | break; | |
461 | } | |
462 | return v; | |
463 | } | |
464 | }; | |
465 | ||
466 | template< bool Signed > | |
467 | struct operations< 4u, Signed > : | |
468 | public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > | |
469 | { | |
470 | typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type; | |
471 | typedef typename base_type::storage_type storage_type; | |
472 | typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type; | |
473 | ||
474 | static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
475 | { | |
476 | base_type::fence_before_store(order); | |
477 | BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v); | |
478 | base_type::fence_after_store(order); | |
479 | } | |
480 | ||
481 | static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT | |
482 | { | |
483 | storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage); | |
484 | base_type::fence_after_load(order); | |
485 | return v; | |
486 | } | |
487 | ||
488 | static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
489 | { | |
490 | switch (order) | |
491 | { | |
492 | case memory_order_relaxed: | |
493 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v)); | |
494 | break; | |
495 | case memory_order_consume: | |
496 | case memory_order_acquire: | |
497 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v)); | |
498 | break; | |
499 | case memory_order_release: | |
500 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v)); | |
501 | break; | |
502 | case memory_order_acq_rel: | |
503 | case memory_order_seq_cst: | |
504 | default: | |
505 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v)); | |
506 | break; | |
507 | } | |
508 | return v; | |
509 | } | |
510 | ||
511 | static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
512 | { | |
513 | switch (order) | |
514 | { | |
515 | case memory_order_relaxed: | |
516 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v)); | |
517 | break; | |
518 | case memory_order_consume: | |
519 | case memory_order_acquire: | |
520 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v)); | |
521 | break; | |
522 | case memory_order_release: | |
523 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v)); | |
524 | break; | |
525 | case memory_order_acq_rel: | |
526 | case memory_order_seq_cst: | |
527 | default: | |
528 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v)); | |
529 | break; | |
530 | } | |
531 | return v; | |
532 | } | |
533 | ||
534 | static BOOST_FORCEINLINE bool compare_exchange_strong( | |
535 | storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT | |
536 | { | |
537 | storage_type previous = expected, old_val; | |
538 | ||
539 | switch (cas_common_order(success_order, failure_order)) | |
540 | { | |
541 | case memory_order_relaxed: | |
542 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous)); | |
543 | break; | |
544 | case memory_order_consume: | |
545 | case memory_order_acquire: | |
546 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous)); | |
547 | break; | |
548 | case memory_order_release: | |
549 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous)); | |
550 | break; | |
551 | case memory_order_acq_rel: | |
552 | case memory_order_seq_cst: | |
553 | default: | |
554 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous)); | |
555 | break; | |
556 | } | |
557 | expected = old_val; | |
558 | ||
559 | return (previous == old_val); | |
560 | } | |
561 | ||
562 | static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
563 | { | |
564 | switch (order) | |
565 | { | |
566 | case memory_order_relaxed: | |
567 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v)); | |
568 | break; | |
569 | case memory_order_consume: | |
570 | case memory_order_acquire: | |
571 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v)); | |
572 | break; | |
573 | case memory_order_release: | |
574 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v)); | |
575 | break; | |
576 | case memory_order_acq_rel: | |
577 | case memory_order_seq_cst: | |
578 | default: | |
579 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v)); | |
580 | break; | |
581 | } | |
582 | return v; | |
583 | } | |
584 | ||
585 | static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
586 | { | |
587 | switch (order) | |
588 | { | |
589 | case memory_order_relaxed: | |
590 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v)); | |
591 | break; | |
592 | case memory_order_consume: | |
593 | case memory_order_acquire: | |
594 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v)); | |
595 | break; | |
596 | case memory_order_release: | |
597 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v)); | |
598 | break; | |
599 | case memory_order_acq_rel: | |
600 | case memory_order_seq_cst: | |
601 | default: | |
602 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v)); | |
603 | break; | |
604 | } | |
605 | return v; | |
606 | } | |
607 | ||
608 | static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
609 | { | |
610 | switch (order) | |
611 | { | |
612 | case memory_order_relaxed: | |
613 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v)); | |
614 | break; | |
615 | case memory_order_consume: | |
616 | case memory_order_acquire: | |
617 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v)); | |
618 | break; | |
619 | case memory_order_release: | |
620 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v)); | |
621 | break; | |
622 | case memory_order_acq_rel: | |
623 | case memory_order_seq_cst: | |
624 | default: | |
625 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v)); | |
626 | break; | |
627 | } | |
628 | return v; | |
629 | } | |
630 | }; | |
631 | ||
632 | template< bool Signed > | |
633 | struct operations< 8u, Signed > : | |
634 | public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > | |
635 | { | |
636 | typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type; | |
637 | typedef typename base_type::storage_type storage_type; | |
638 | typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type; | |
639 | ||
640 | static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
641 | { | |
642 | base_type::fence_before_store(order); | |
643 | BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v); | |
644 | base_type::fence_after_store(order); | |
645 | } | |
646 | ||
647 | static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT | |
648 | { | |
649 | storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage); | |
650 | base_type::fence_after_load(order); | |
651 | return v; | |
652 | } | |
653 | ||
654 | static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
655 | { | |
656 | switch (order) | |
657 | { | |
658 | case memory_order_relaxed: | |
659 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v)); | |
660 | break; | |
661 | case memory_order_consume: | |
662 | case memory_order_acquire: | |
663 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v)); | |
664 | break; | |
665 | case memory_order_release: | |
666 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v)); | |
667 | break; | |
668 | case memory_order_acq_rel: | |
669 | case memory_order_seq_cst: | |
670 | default: | |
671 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v)); | |
672 | break; | |
673 | } | |
674 | return v; | |
675 | } | |
676 | ||
677 | static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
678 | { | |
679 | switch (order) | |
680 | { | |
681 | case memory_order_relaxed: | |
682 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v)); | |
683 | break; | |
684 | case memory_order_consume: | |
685 | case memory_order_acquire: | |
686 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v)); | |
687 | break; | |
688 | case memory_order_release: | |
689 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v)); | |
690 | break; | |
691 | case memory_order_acq_rel: | |
692 | case memory_order_seq_cst: | |
693 | default: | |
694 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v)); | |
695 | break; | |
696 | } | |
697 | return v; | |
698 | } | |
699 | ||
700 | static BOOST_FORCEINLINE bool compare_exchange_strong( | |
701 | storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT | |
702 | { | |
703 | storage_type previous = expected, old_val; | |
704 | ||
705 | switch (cas_common_order(success_order, failure_order)) | |
706 | { | |
707 | case memory_order_relaxed: | |
708 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous)); | |
709 | break; | |
710 | case memory_order_consume: | |
711 | case memory_order_acquire: | |
712 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous)); | |
713 | break; | |
714 | case memory_order_release: | |
715 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous)); | |
716 | break; | |
717 | case memory_order_acq_rel: | |
718 | case memory_order_seq_cst: | |
719 | default: | |
720 | old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous)); | |
721 | break; | |
722 | } | |
723 | expected = old_val; | |
724 | ||
725 | return (previous == old_val); | |
726 | } | |
727 | ||
728 | static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
729 | { | |
730 | switch (order) | |
731 | { | |
732 | case memory_order_relaxed: | |
733 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v)); | |
734 | break; | |
735 | case memory_order_consume: | |
736 | case memory_order_acquire: | |
737 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v)); | |
738 | break; | |
739 | case memory_order_release: | |
740 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v)); | |
741 | break; | |
742 | case memory_order_acq_rel: | |
743 | case memory_order_seq_cst: | |
744 | default: | |
745 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v)); | |
746 | break; | |
747 | } | |
748 | return v; | |
749 | } | |
750 | ||
751 | static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
752 | { | |
753 | switch (order) | |
754 | { | |
755 | case memory_order_relaxed: | |
756 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v)); | |
757 | break; | |
758 | case memory_order_consume: | |
759 | case memory_order_acquire: | |
760 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v)); | |
761 | break; | |
762 | case memory_order_release: | |
763 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v)); | |
764 | break; | |
765 | case memory_order_acq_rel: | |
766 | case memory_order_seq_cst: | |
767 | default: | |
768 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v)); | |
769 | break; | |
770 | } | |
771 | return v; | |
772 | } | |
773 | ||
774 | static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT | |
775 | { | |
776 | switch (order) | |
777 | { | |
778 | case memory_order_relaxed: | |
779 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v)); | |
780 | break; | |
781 | case memory_order_consume: | |
782 | case memory_order_acquire: | |
783 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v)); | |
784 | break; | |
785 | case memory_order_release: | |
786 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v)); | |
787 | break; | |
788 | case memory_order_acq_rel: | |
789 | case memory_order_seq_cst: | |
790 | default: | |
791 | v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v)); | |
792 | break; | |
793 | } | |
794 | return v; | |
795 | } | |
796 | }; | |
797 | ||
798 | ||
799 | BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT | |
800 | { | |
801 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
802 | if (order != memory_order_relaxed) | |
803 | msvc_arm_operations_base::hardware_full_fence(); | |
804 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
805 | } | |
806 | ||
807 | BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT | |
808 | { | |
809 | if (order != memory_order_relaxed) | |
810 | BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); | |
811 | } | |
812 | ||
813 | } // namespace detail | |
814 | } // namespace atomics | |
815 | } // namespace boost | |
816 | ||
817 | #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8 | |
818 | #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16 | |
819 | #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32 | |
820 | #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64 | |
821 | #undef BOOST_ATOMIC_DETAIL_ARM_STORE8 | |
822 | #undef BOOST_ATOMIC_DETAIL_ARM_STORE16 | |
823 | #undef BOOST_ATOMIC_DETAIL_ARM_STORE32 | |
824 | #undef BOOST_ATOMIC_DETAIL_ARM_STORE64 | |
825 | ||
826 | #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_ |