]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/boost/atomic/detail/ops_msvc_arm.hpp
update sources to v12.2.3
[ceph.git] / ceph / src / boost / boost / atomic / detail / ops_msvc_arm.hpp
1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2012 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
9 */
10 /*!
11 * \file atomic/detail/ops_msvc_arm.hpp
12 *
13 * This header contains implementation of the \c operations template.
14 */
15
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
18
19 #include <intrin.h>
20 #include <cstddef>
21 #include <boost/memory_order.hpp>
22 #include <boost/atomic/detail/config.hpp>
23 #include <boost/atomic/detail/interlocked.hpp>
24 #include <boost/atomic/detail/storage_type.hpp>
25 #include <boost/atomic/detail/operations_fwd.hpp>
26 #include <boost/atomic/detail/type_traits/make_signed.hpp>
27 #include <boost/atomic/capabilities.hpp>
28 #include <boost/atomic/detail/ops_msvc_common.hpp>
29
30 #ifdef BOOST_HAS_PRAGMA_ONCE
31 #pragma once
32 #endif
33
34 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
35 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
36 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
37 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
38 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
39 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
40 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
41 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
42
43 namespace boost {
44 namespace atomics {
45 namespace detail {
46
47 // A note about memory_order_consume. Technically, this architecture allows to avoid
48 // unnecessary memory barrier after consume load since it supports data dependency ordering.
49 // However, some compiler optimizations may break a seemingly valid code relying on data
50 // dependency tracking by injecting bogus branches to aid out of order execution.
51 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
52 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
53 // For this reason we promote memory_order_consume to memory_order_acquire.
54
55 struct msvc_arm_operations_base
56 {
57 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
58
59 static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
60 {
61 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
62 }
63
64 static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
65 {
66 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
67
68 if ((order & memory_order_release) != 0)
69 hardware_full_fence();
70
71 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
72 }
73
74 static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
75 {
76 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
77
78 if (order == memory_order_seq_cst)
79 hardware_full_fence();
80
81 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
82 }
83
84 static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
85 {
86 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
87
88 if ((order & (memory_order_consume | memory_order_acquire)) != 0)
89 hardware_full_fence();
90
91 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
92 }
93
94 static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
95 {
96 // Combine order flags together and promote memory_order_consume to memory_order_acquire
97 return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u));
98 }
99 };
100
101 template< typename T, typename Derived >
102 struct msvc_arm_operations :
103 public msvc_arm_operations_base
104 {
105 typedef T storage_type;
106
107 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
108 {
109 typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
110 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
111 }
112
113 static BOOST_FORCEINLINE bool compare_exchange_weak(
114 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
115 {
116 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
117 }
118
119 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
120 {
121 return !!Derived::exchange(storage, (storage_type)1, order);
122 }
123
124 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
125 {
126 Derived::store(storage, (storage_type)0, order);
127 }
128 };
129
130 template< bool Signed >
131 struct operations< 1u, Signed > :
132 public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
133 {
134 typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
135 typedef typename base_type::storage_type storage_type;
136 typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type;
137
138 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
139 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
140
141 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
142 {
143 base_type::fence_before_store(order);
144 BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
145 base_type::fence_after_store(order);
146 }
147
148 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
149 {
150 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
151 base_type::fence_after_load(order);
152 return v;
153 }
154
155 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
156 {
157 switch (order)
158 {
159 case memory_order_relaxed:
160 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
161 break;
162 case memory_order_consume:
163 case memory_order_acquire:
164 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
165 break;
166 case memory_order_release:
167 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
168 break;
169 case memory_order_acq_rel:
170 case memory_order_seq_cst:
171 default:
172 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
173 break;
174 }
175 return v;
176 }
177
178 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
179 {
180 switch (order)
181 {
182 case memory_order_relaxed:
183 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
184 break;
185 case memory_order_consume:
186 case memory_order_acquire:
187 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
188 break;
189 case memory_order_release:
190 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
191 break;
192 case memory_order_acq_rel:
193 case memory_order_seq_cst:
194 default:
195 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
196 break;
197 }
198 return v;
199 }
200
201 static BOOST_FORCEINLINE bool compare_exchange_strong(
202 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
203 {
204 storage_type previous = expected, old_val;
205
206 switch (cas_common_order(success_order, failure_order))
207 {
208 case memory_order_relaxed:
209 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
210 break;
211 case memory_order_consume:
212 case memory_order_acquire:
213 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
214 break;
215 case memory_order_release:
216 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
217 break;
218 case memory_order_acq_rel:
219 case memory_order_seq_cst:
220 default:
221 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
222 break;
223 }
224 expected = old_val;
225
226 return (previous == old_val);
227 }
228
229 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
230 {
231 switch (order)
232 {
233 case memory_order_relaxed:
234 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
235 break;
236 case memory_order_consume:
237 case memory_order_acquire:
238 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
239 break;
240 case memory_order_release:
241 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
242 break;
243 case memory_order_acq_rel:
244 case memory_order_seq_cst:
245 default:
246 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
247 break;
248 }
249 return v;
250 }
251
252 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
253 {
254 switch (order)
255 {
256 case memory_order_relaxed:
257 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
258 break;
259 case memory_order_consume:
260 case memory_order_acquire:
261 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
262 break;
263 case memory_order_release:
264 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
265 break;
266 case memory_order_acq_rel:
267 case memory_order_seq_cst:
268 default:
269 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
270 break;
271 }
272 return v;
273 }
274
275 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
276 {
277 switch (order)
278 {
279 case memory_order_relaxed:
280 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
281 break;
282 case memory_order_consume:
283 case memory_order_acquire:
284 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
285 break;
286 case memory_order_release:
287 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
288 break;
289 case memory_order_acq_rel:
290 case memory_order_seq_cst:
291 default:
292 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
293 break;
294 }
295 return v;
296 }
297 };
298
299 template< bool Signed >
300 struct operations< 2u, Signed > :
301 public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
302 {
303 typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
304 typedef typename base_type::storage_type storage_type;
305 typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type;
306
307 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
308 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
309
310 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
311 {
312 base_type::fence_before_store(order);
313 BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
314 base_type::fence_after_store(order);
315 }
316
317 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
318 {
319 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
320 base_type::fence_after_load(order);
321 return v;
322 }
323
324 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
325 {
326 switch (order)
327 {
328 case memory_order_relaxed:
329 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
330 break;
331 case memory_order_consume:
332 case memory_order_acquire:
333 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
334 break;
335 case memory_order_release:
336 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
337 break;
338 case memory_order_acq_rel:
339 case memory_order_seq_cst:
340 default:
341 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
342 break;
343 }
344 return v;
345 }
346
347 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
348 {
349 switch (order)
350 {
351 case memory_order_relaxed:
352 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
353 break;
354 case memory_order_consume:
355 case memory_order_acquire:
356 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
357 break;
358 case memory_order_release:
359 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
360 break;
361 case memory_order_acq_rel:
362 case memory_order_seq_cst:
363 default:
364 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
365 break;
366 }
367 return v;
368 }
369
370 static BOOST_FORCEINLINE bool compare_exchange_strong(
371 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
372 {
373 storage_type previous = expected, old_val;
374
375 switch (cas_common_order(success_order, failure_order))
376 {
377 case memory_order_relaxed:
378 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
379 break;
380 case memory_order_consume:
381 case memory_order_acquire:
382 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
383 break;
384 case memory_order_release:
385 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
386 break;
387 case memory_order_acq_rel:
388 case memory_order_seq_cst:
389 default:
390 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
391 break;
392 }
393 expected = old_val;
394
395 return (previous == old_val);
396 }
397
398 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
399 {
400 switch (order)
401 {
402 case memory_order_relaxed:
403 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
404 break;
405 case memory_order_consume:
406 case memory_order_acquire:
407 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
408 break;
409 case memory_order_release:
410 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
411 break;
412 case memory_order_acq_rel:
413 case memory_order_seq_cst:
414 default:
415 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
416 break;
417 }
418 return v;
419 }
420
421 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
422 {
423 switch (order)
424 {
425 case memory_order_relaxed:
426 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
427 break;
428 case memory_order_consume:
429 case memory_order_acquire:
430 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
431 break;
432 case memory_order_release:
433 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
434 break;
435 case memory_order_acq_rel:
436 case memory_order_seq_cst:
437 default:
438 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
439 break;
440 }
441 return v;
442 }
443
444 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
445 {
446 switch (order)
447 {
448 case memory_order_relaxed:
449 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
450 break;
451 case memory_order_consume:
452 case memory_order_acquire:
453 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
454 break;
455 case memory_order_release:
456 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
457 break;
458 case memory_order_acq_rel:
459 case memory_order_seq_cst:
460 default:
461 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
462 break;
463 }
464 return v;
465 }
466 };
467
468 template< bool Signed >
469 struct operations< 4u, Signed > :
470 public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
471 {
472 typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
473 typedef typename base_type::storage_type storage_type;
474 typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
475
476 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
477 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
478
479 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
480 {
481 base_type::fence_before_store(order);
482 BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
483 base_type::fence_after_store(order);
484 }
485
486 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
487 {
488 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
489 base_type::fence_after_load(order);
490 return v;
491 }
492
493 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
494 {
495 switch (order)
496 {
497 case memory_order_relaxed:
498 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
499 break;
500 case memory_order_consume:
501 case memory_order_acquire:
502 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
503 break;
504 case memory_order_release:
505 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
506 break;
507 case memory_order_acq_rel:
508 case memory_order_seq_cst:
509 default:
510 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
511 break;
512 }
513 return v;
514 }
515
516 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
517 {
518 switch (order)
519 {
520 case memory_order_relaxed:
521 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
522 break;
523 case memory_order_consume:
524 case memory_order_acquire:
525 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
526 break;
527 case memory_order_release:
528 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
529 break;
530 case memory_order_acq_rel:
531 case memory_order_seq_cst:
532 default:
533 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
534 break;
535 }
536 return v;
537 }
538
539 static BOOST_FORCEINLINE bool compare_exchange_strong(
540 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
541 {
542 storage_type previous = expected, old_val;
543
544 switch (cas_common_order(success_order, failure_order))
545 {
546 case memory_order_relaxed:
547 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
548 break;
549 case memory_order_consume:
550 case memory_order_acquire:
551 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
552 break;
553 case memory_order_release:
554 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
555 break;
556 case memory_order_acq_rel:
557 case memory_order_seq_cst:
558 default:
559 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
560 break;
561 }
562 expected = old_val;
563
564 return (previous == old_val);
565 }
566
567 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
568 {
569 switch (order)
570 {
571 case memory_order_relaxed:
572 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
573 break;
574 case memory_order_consume:
575 case memory_order_acquire:
576 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
577 break;
578 case memory_order_release:
579 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
580 break;
581 case memory_order_acq_rel:
582 case memory_order_seq_cst:
583 default:
584 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
585 break;
586 }
587 return v;
588 }
589
590 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
591 {
592 switch (order)
593 {
594 case memory_order_relaxed:
595 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
596 break;
597 case memory_order_consume:
598 case memory_order_acquire:
599 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
600 break;
601 case memory_order_release:
602 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
603 break;
604 case memory_order_acq_rel:
605 case memory_order_seq_cst:
606 default:
607 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
608 break;
609 }
610 return v;
611 }
612
613 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
614 {
615 switch (order)
616 {
617 case memory_order_relaxed:
618 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
619 break;
620 case memory_order_consume:
621 case memory_order_acquire:
622 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
623 break;
624 case memory_order_release:
625 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
626 break;
627 case memory_order_acq_rel:
628 case memory_order_seq_cst:
629 default:
630 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
631 break;
632 }
633 return v;
634 }
635 };
636
637 template< bool Signed >
638 struct operations< 8u, Signed > :
639 public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
640 {
641 typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
642 typedef typename base_type::storage_type storage_type;
643 typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
644
645 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
646 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
647
648 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
649 {
650 base_type::fence_before_store(order);
651 BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
652 base_type::fence_after_store(order);
653 }
654
655 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
656 {
657 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
658 base_type::fence_after_load(order);
659 return v;
660 }
661
662 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
663 {
664 switch (order)
665 {
666 case memory_order_relaxed:
667 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
668 break;
669 case memory_order_consume:
670 case memory_order_acquire:
671 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
672 break;
673 case memory_order_release:
674 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
675 break;
676 case memory_order_acq_rel:
677 case memory_order_seq_cst:
678 default:
679 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
680 break;
681 }
682 return v;
683 }
684
685 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
686 {
687 switch (order)
688 {
689 case memory_order_relaxed:
690 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
691 break;
692 case memory_order_consume:
693 case memory_order_acquire:
694 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
695 break;
696 case memory_order_release:
697 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
698 break;
699 case memory_order_acq_rel:
700 case memory_order_seq_cst:
701 default:
702 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
703 break;
704 }
705 return v;
706 }
707
708 static BOOST_FORCEINLINE bool compare_exchange_strong(
709 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
710 {
711 storage_type previous = expected, old_val;
712
713 switch (cas_common_order(success_order, failure_order))
714 {
715 case memory_order_relaxed:
716 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
717 break;
718 case memory_order_consume:
719 case memory_order_acquire:
720 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
721 break;
722 case memory_order_release:
723 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
724 break;
725 case memory_order_acq_rel:
726 case memory_order_seq_cst:
727 default:
728 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
729 break;
730 }
731 expected = old_val;
732
733 return (previous == old_val);
734 }
735
736 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
737 {
738 switch (order)
739 {
740 case memory_order_relaxed:
741 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
742 break;
743 case memory_order_consume:
744 case memory_order_acquire:
745 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
746 break;
747 case memory_order_release:
748 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
749 break;
750 case memory_order_acq_rel:
751 case memory_order_seq_cst:
752 default:
753 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
754 break;
755 }
756 return v;
757 }
758
759 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
760 {
761 switch (order)
762 {
763 case memory_order_relaxed:
764 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
765 break;
766 case memory_order_consume:
767 case memory_order_acquire:
768 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
769 break;
770 case memory_order_release:
771 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
772 break;
773 case memory_order_acq_rel:
774 case memory_order_seq_cst:
775 default:
776 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
777 break;
778 }
779 return v;
780 }
781
782 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
783 {
784 switch (order)
785 {
786 case memory_order_relaxed:
787 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
788 break;
789 case memory_order_consume:
790 case memory_order_acquire:
791 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
792 break;
793 case memory_order_release:
794 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
795 break;
796 case memory_order_acq_rel:
797 case memory_order_seq_cst:
798 default:
799 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
800 break;
801 }
802 return v;
803 }
804 };
805
806
807 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
808 {
809 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
810 if (order != memory_order_relaxed)
811 msvc_arm_operations_base::hardware_full_fence();
812 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
813 }
814
815 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
816 {
817 if (order != memory_order_relaxed)
818 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
819 }
820
821 } // namespace detail
822 } // namespace atomics
823 } // namespace boost
824
825 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
826 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
827 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
828 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
829 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
830 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
831 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
832 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
833
834 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_