]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/boost/atomic/detail/ops_gcc_alpha.hpp
update sources to v12.2.3
[ceph.git] / ceph / src / boost / boost / atomic / detail / ops_gcc_alpha.hpp
1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2013 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
9 */
10 /*!
11 * \file atomic/detail/ops_gcc_alpha.hpp
12 *
13 * This header contains implementation of the \c operations template.
14 */
15
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_ALPHA_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_GCC_ALPHA_HPP_INCLUDED_
18
19 #include <cstddef>
20 #include <boost/memory_order.hpp>
21 #include <boost/atomic/detail/config.hpp>
22 #include <boost/atomic/detail/storage_type.hpp>
23 #include <boost/atomic/detail/operations_fwd.hpp>
24 #include <boost/atomic/capabilities.hpp>
25
26 #ifdef BOOST_HAS_PRAGMA_ONCE
27 #pragma once
28 #endif
29
30 namespace boost {
31 namespace atomics {
32 namespace detail {
33
34 /*
35 Refer to http://h71000.www7.hp.com/doc/82final/5601/5601pro_004.html
36 (HP OpenVMS systems documentation) and the Alpha Architecture Reference Manual.
37 */
38
39 /*
40 NB: The most natural thing would be to write the increment/decrement
41 operators along the following lines:
42
43 __asm__ __volatile__
44 (
45 "1: ldl_l %0,%1 \n"
46 "addl %0,1,%0 \n"
47 "stl_c %0,%1 \n"
48 "beq %0,1b\n"
49 : "=&b" (tmp)
50 : "m" (value)
51 : "cc"
52 );
53
54 However according to the comments on the HP website and matching
55 comments in the Linux kernel sources this defies branch prediction,
56 as the cpu assumes that backward branches are always taken; so
57 instead copy the trick from the Linux kernel, introduce a forward
58 branch and back again.
59
60 I have, however, had a hard time measuring the difference between
61 the two versions in microbenchmarks -- I am leaving it in nevertheless
62 as it apparently does not hurt either.
63 */
64
65 struct gcc_alpha_operations_base
66 {
67 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
68
69 static BOOST_FORCEINLINE void fence_before(memory_order order) BOOST_NOEXCEPT
70 {
71 if ((order & memory_order_release) != 0)
72 __asm__ __volatile__ ("mb" ::: "memory");
73 }
74
75 static BOOST_FORCEINLINE void fence_after(memory_order order) BOOST_NOEXCEPT
76 {
77 if ((order & (memory_order_consume | memory_order_acquire)) != 0)
78 __asm__ __volatile__ ("mb" ::: "memory");
79 }
80
81 static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
82 {
83 if (order == memory_order_seq_cst)
84 __asm__ __volatile__ ("mb" ::: "memory");
85 }
86 };
87
88
89 template< bool Signed >
90 struct operations< 4u, Signed > :
91 public gcc_alpha_operations_base
92 {
93 typedef typename make_storage_type< 4u, Signed >::type storage_type;
94 typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
95
96 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
97 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
98
99 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
100 {
101 fence_before(order);
102 storage = v;
103 fence_after_store(order);
104 }
105
106 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
107 {
108 storage_type v = storage;
109 fence_after(order);
110 return v;
111 }
112
113 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
114 {
115 storage_type original, tmp;
116 fence_before(order);
117 __asm__ __volatile__
118 (
119 "1:\n"
120 "mov %3, %1\n"
121 "ldl_l %0, %2\n"
122 "stl_c %1, %2\n"
123 "beq %1, 2f\n"
124
125 ".subsection 2\n"
126 "2: br 1b\n"
127 ".previous\n"
128
129 : "=&r" (original), // %0
130 "=&r" (tmp) // %1
131 : "m" (storage), // %2
132 "r" (v) // %3
133 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
134 );
135 fence_after(order);
136 return original;
137 }
138
139 static BOOST_FORCEINLINE bool compare_exchange_weak(
140 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
141 {
142 fence_before(success_order);
143 int success;
144 storage_type current;
145 __asm__ __volatile__
146 (
147 "1:\n"
148 "ldl_l %2, %4\n" // current = *(&storage)
149 "cmpeq %2, %0, %3\n" // success = current == expected
150 "mov %2, %0\n" // expected = current
151 "beq %3, 2f\n" // if (success == 0) goto end
152 "stl_c %1, %4\n" // storage = desired; desired = store succeeded
153 "mov %1, %3\n" // success = desired
154 "2:\n"
155 : "+&r" (expected), // %0
156 "+&r" (desired), // %1
157 "=&r" (current), // %2
158 "=&r" (success) // %3
159 : "m" (storage) // %4
160 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
161 );
162 if (success)
163 fence_after(success_order);
164 else
165 fence_after(failure_order);
166 return !!success;
167 }
168
169 static BOOST_FORCEINLINE bool compare_exchange_strong(
170 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
171 {
172 int success;
173 storage_type current, tmp;
174 fence_before(success_order);
175 __asm__ __volatile__
176 (
177 "1:\n"
178 "mov %5, %1\n" // tmp = desired
179 "ldl_l %2, %4\n" // current = *(&storage)
180 "cmpeq %2, %0, %3\n" // success = current == expected
181 "mov %2, %0\n" // expected = current
182 "beq %3, 2f\n" // if (success == 0) goto end
183 "stl_c %1, %4\n" // storage = tmp; tmp = store succeeded
184 "beq %1, 3f\n" // if (tmp == 0) goto retry
185 "mov %1, %3\n" // success = tmp
186 "2:\n"
187
188 ".subsection 2\n"
189 "3: br 1b\n"
190 ".previous\n"
191
192 : "+&r" (expected), // %0
193 "=&r" (tmp), // %1
194 "=&r" (current), // %2
195 "=&r" (success) // %3
196 : "m" (storage), // %4
197 "r" (desired) // %5
198 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
199 );
200 if (success)
201 fence_after(success_order);
202 else
203 fence_after(failure_order);
204 return !!success;
205 }
206
207 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
208 {
209 storage_type original, modified;
210 fence_before(order);
211 __asm__ __volatile__
212 (
213 "1:\n"
214 "ldl_l %0, %2\n"
215 "addl %0, %3, %1\n"
216 "stl_c %1, %2\n"
217 "beq %1, 2f\n"
218
219 ".subsection 2\n"
220 "2: br 1b\n"
221 ".previous\n"
222
223 : "=&r" (original), // %0
224 "=&r" (modified) // %1
225 : "m" (storage), // %2
226 "r" (v) // %3
227 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
228 );
229 fence_after(order);
230 return original;
231 }
232
233 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
234 {
235 storage_type original, modified;
236 fence_before(order);
237 __asm__ __volatile__
238 (
239 "1:\n"
240 "ldl_l %0, %2\n"
241 "subl %0, %3, %1\n"
242 "stl_c %1, %2\n"
243 "beq %1, 2f\n"
244
245 ".subsection 2\n"
246 "2: br 1b\n"
247 ".previous\n"
248
249 : "=&r" (original), // %0
250 "=&r" (modified) // %1
251 : "m" (storage), // %2
252 "r" (v) // %3
253 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
254 );
255 fence_after(order);
256 return original;
257 }
258
259 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
260 {
261 storage_type original, modified;
262 fence_before(order);
263 __asm__ __volatile__
264 (
265 "1:\n"
266 "ldl_l %0, %2\n"
267 "and %0, %3, %1\n"
268 "stl_c %1, %2\n"
269 "beq %1, 2f\n"
270
271 ".subsection 2\n"
272 "2: br 1b\n"
273 ".previous\n"
274
275 : "=&r" (original), // %0
276 "=&r" (modified) // %1
277 : "m" (storage), // %2
278 "r" (v) // %3
279 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
280 );
281 fence_after(order);
282 return original;
283 }
284
285 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
286 {
287 storage_type original, modified;
288 fence_before(order);
289 __asm__ __volatile__
290 (
291 "1:\n"
292 "ldl_l %0, %2\n"
293 "bis %0, %3, %1\n"
294 "stl_c %1, %2\n"
295 "beq %1, 2f\n"
296
297 ".subsection 2\n"
298 "2: br 1b\n"
299 ".previous\n"
300
301 : "=&r" (original), // %0
302 "=&r" (modified) // %1
303 : "m" (storage), // %2
304 "r" (v) // %3
305 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
306 );
307 fence_after(order);
308 return original;
309 }
310
311 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
312 {
313 storage_type original, modified;
314 fence_before(order);
315 __asm__ __volatile__
316 (
317 "1:\n"
318 "ldl_l %0, %2\n"
319 "xor %0, %3, %1\n"
320 "stl_c %1, %2\n"
321 "beq %1, 2f\n"
322
323 ".subsection 2\n"
324 "2: br 1b\n"
325 ".previous\n"
326
327 : "=&r" (original), // %0
328 "=&r" (modified) // %1
329 : "m" (storage), // %2
330 "r" (v) // %3
331 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
332 );
333 fence_after(order);
334 return original;
335 }
336
337 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
338 {
339 return !!exchange(storage, (storage_type)1, order);
340 }
341
342 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
343 {
344 store(storage, 0, order);
345 }
346 };
347
348
349 template< >
350 struct operations< 1u, false > :
351 public operations< 4u, false >
352 {
353 typedef operations< 4u, false > base_type;
354 typedef base_type::storage_type storage_type;
355
356 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
357 {
358 storage_type original, modified;
359 fence_before(order);
360 __asm__ __volatile__
361 (
362 "1:\n"
363 "ldl_l %0, %2\n"
364 "addl %0, %3, %1\n"
365 "zapnot %1, #1, %1\n"
366 "stl_c %1, %2\n"
367 "beq %1, 2f\n"
368
369 ".subsection 2\n"
370 "2: br 1b\n"
371 ".previous\n"
372
373 : "=&r" (original), // %0
374 "=&r" (modified) // %1
375 : "m" (storage), // %2
376 "r" (v) // %3
377 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
378 );
379 fence_after(order);
380 return original;
381 }
382
383 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
384 {
385 storage_type original, modified;
386 fence_before(order);
387 __asm__ __volatile__
388 (
389 "1:\n"
390 "ldl_l %0, %2\n"
391 "subl %0, %3, %1\n"
392 "zapnot %1, #1, %1\n"
393 "stl_c %1, %2\n"
394 "beq %1, 2f\n"
395
396 ".subsection 2\n"
397 "2: br 1b\n"
398 ".previous\n"
399
400 : "=&r" (original), // %0
401 "=&r" (modified) // %1
402 : "m" (storage), // %2
403 "r" (v) // %3
404 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
405 );
406 fence_after(order);
407 return original;
408 }
409 };
410
411 template< >
412 struct operations< 1u, true > :
413 public operations< 4u, true >
414 {
415 typedef operations< 4u, true > base_type;
416 typedef base_type::storage_type storage_type;
417
418 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
419 {
420 storage_type original, modified;
421 fence_before(order);
422 __asm__ __volatile__
423 (
424 "1:\n"
425 "ldl_l %0, %2\n"
426 "addl %0, %3, %1\n"
427 "sextb %1, %1\n"
428 "stl_c %1, %2\n"
429 "beq %1, 2f\n"
430
431 ".subsection 2\n"
432 "2: br 1b\n"
433 ".previous\n"
434
435 : "=&r" (original), // %0
436 "=&r" (modified) // %1
437 : "m" (storage), // %2
438 "r" (v) // %3
439 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
440 );
441 fence_after(order);
442 return original;
443 }
444
445 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
446 {
447 storage_type original, modified;
448 fence_before(order);
449 __asm__ __volatile__
450 (
451 "1:\n"
452 "ldl_l %0, %2\n"
453 "subl %0, %3, %1\n"
454 "sextb %1, %1\n"
455 "stl_c %1, %2\n"
456 "beq %1, 2f\n"
457
458 ".subsection 2\n"
459 "2: br 1b\n"
460 ".previous\n"
461
462 : "=&r" (original), // %0
463 "=&r" (modified) // %1
464 : "m" (storage), // %2
465 "r" (v) // %3
466 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
467 );
468 fence_after(order);
469 return original;
470 }
471 };
472
473
474 template< >
475 struct operations< 2u, false > :
476 public operations< 4u, false >
477 {
478 typedef operations< 4u, false > base_type;
479 typedef base_type::storage_type storage_type;
480
481 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
482 {
483 storage_type original, modified;
484 fence_before(order);
485 __asm__ __volatile__
486 (
487 "1:\n"
488 "ldl_l %0, %2\n"
489 "addl %0, %3, %1\n"
490 "zapnot %1, #3, %1\n"
491 "stl_c %1, %2\n"
492 "beq %1, 2f\n"
493
494 ".subsection 2\n"
495 "2: br 1b\n"
496 ".previous\n"
497
498 : "=&r" (original), // %0
499 "=&r" (modified) // %1
500 : "m" (storage), // %2
501 "r" (v) // %3
502 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
503 );
504 fence_after(order);
505 return original;
506 }
507
508 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
509 {
510 storage_type original, modified;
511 fence_before(order);
512 __asm__ __volatile__
513 (
514 "1:\n"
515 "ldl_l %0, %2\n"
516 "subl %0, %3, %1\n"
517 "zapnot %1, #3, %1\n"
518 "stl_c %1, %2\n"
519 "beq %1, 2f\n"
520
521 ".subsection 2\n"
522 "2: br 1b\n"
523 ".previous\n"
524
525 : "=&r" (original), // %0
526 "=&r" (modified) // %1
527 : "m" (storage), // %2
528 "r" (v) // %3
529 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
530 );
531 fence_after(order);
532 return original;
533 }
534 };
535
536 template< >
537 struct operations< 2u, true > :
538 public operations< 4u, true >
539 {
540 typedef operations< 4u, true > base_type;
541 typedef base_type::storage_type storage_type;
542
543 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
544 {
545 storage_type original, modified;
546 fence_before(order);
547 __asm__ __volatile__
548 (
549 "1:\n"
550 "ldl_l %0, %2\n"
551 "addl %0, %3, %1\n"
552 "sextw %1, %1\n"
553 "stl_c %1, %2\n"
554 "beq %1, 2f\n"
555
556 ".subsection 2\n"
557 "2: br 1b\n"
558 ".previous\n"
559
560 : "=&r" (original), // %0
561 "=&r" (modified) // %1
562 : "m" (storage), // %2
563 "r" (v) // %3
564 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
565 );
566 fence_after(order);
567 return original;
568 }
569
570 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
571 {
572 storage_type original, modified;
573 fence_before(order);
574 __asm__ __volatile__
575 (
576 "1:\n"
577 "ldl_l %0, %2\n"
578 "subl %0, %3, %1\n"
579 "sextw %1, %1\n"
580 "stl_c %1, %2\n"
581 "beq %1, 2f\n"
582
583 ".subsection 2\n"
584 "2: br 1b\n"
585 ".previous\n"
586
587 : "=&r" (original), // %0
588 "=&r" (modified) // %1
589 : "m" (storage), // %2
590 "r" (v) // %3
591 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
592 );
593 fence_after(order);
594 return original;
595 }
596 };
597
598
599 template< bool Signed >
600 struct operations< 8u, Signed > :
601 public gcc_alpha_operations_base
602 {
603 typedef typename make_storage_type< 8u, Signed >::type storage_type;
604 typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
605
606 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
607 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
608
609 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
610 {
611 fence_before(order);
612 storage = v;
613 fence_after_store(order);
614 }
615
616 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
617 {
618 storage_type v = storage;
619 fence_after(order);
620 return v;
621 }
622
623 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
624 {
625 storage_type original, tmp;
626 fence_before(order);
627 __asm__ __volatile__
628 (
629 "1:\n"
630 "mov %3, %1\n"
631 "ldq_l %0, %2\n"
632 "stq_c %1, %2\n"
633 "beq %1, 2f\n"
634
635 ".subsection 2\n"
636 "2: br 1b\n"
637 ".previous\n"
638
639 : "=&r" (original), // %0
640 "=&r" (tmp) // %1
641 : "m" (storage), // %2
642 "r" (v) // %3
643 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
644 );
645 fence_after(order);
646 return original;
647 }
648
649 static BOOST_FORCEINLINE bool compare_exchange_weak(
650 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
651 {
652 fence_before(success_order);
653 int success;
654 storage_type current;
655 __asm__ __volatile__
656 (
657 "1:\n"
658 "ldq_l %2, %4\n" // current = *(&storage)
659 "cmpeq %2, %0, %3\n" // success = current == expected
660 "mov %2, %0\n" // expected = current
661 "beq %3, 2f\n" // if (success == 0) goto end
662 "stq_c %1, %4\n" // storage = desired; desired = store succeeded
663 "mov %1, %3\n" // success = desired
664 "2:\n"
665 : "+&r" (expected), // %0
666 "+&r" (desired), // %1
667 "=&r" (current), // %2
668 "=&r" (success) // %3
669 : "m" (storage) // %4
670 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
671 );
672 if (success)
673 fence_after(success_order);
674 else
675 fence_after(failure_order);
676 return !!success;
677 }
678
679 static BOOST_FORCEINLINE bool compare_exchange_strong(
680 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
681 {
682 int success;
683 storage_type current, tmp;
684 fence_before(success_order);
685 __asm__ __volatile__
686 (
687 "1:\n"
688 "mov %5, %1\n" // tmp = desired
689 "ldq_l %2, %4\n" // current = *(&storage)
690 "cmpeq %2, %0, %3\n" // success = current == expected
691 "mov %2, %0\n" // expected = current
692 "beq %3, 2f\n" // if (success == 0) goto end
693 "stq_c %1, %4\n" // storage = tmp; tmp = store succeeded
694 "beq %1, 3f\n" // if (tmp == 0) goto retry
695 "mov %1, %3\n" // success = tmp
696 "2:\n"
697
698 ".subsection 2\n"
699 "3: br 1b\n"
700 ".previous\n"
701
702 : "+&r" (expected), // %0
703 "=&r" (tmp), // %1
704 "=&r" (current), // %2
705 "=&r" (success) // %3
706 : "m" (storage), // %4
707 "r" (desired) // %5
708 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
709 );
710 if (success)
711 fence_after(success_order);
712 else
713 fence_after(failure_order);
714 return !!success;
715 }
716
717 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
718 {
719 storage_type original, modified;
720 fence_before(order);
721 __asm__ __volatile__
722 (
723 "1:\n"
724 "ldq_l %0, %2\n"
725 "addq %0, %3, %1\n"
726 "stq_c %1, %2\n"
727 "beq %1, 2f\n"
728
729 ".subsection 2\n"
730 "2: br 1b\n"
731 ".previous\n"
732
733 : "=&r" (original), // %0
734 "=&r" (modified) // %1
735 : "m" (storage), // %2
736 "r" (v) // %3
737 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
738 );
739 fence_after(order);
740 return original;
741 }
742
743 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
744 {
745 storage_type original, modified;
746 fence_before(order);
747 __asm__ __volatile__
748 (
749 "1:\n"
750 "ldq_l %0, %2\n"
751 "subq %0, %3, %1\n"
752 "stq_c %1, %2\n"
753 "beq %1, 2f\n"
754
755 ".subsection 2\n"
756 "2: br 1b\n"
757 ".previous\n"
758
759 : "=&r" (original), // %0
760 "=&r" (modified) // %1
761 : "m" (storage), // %2
762 "r" (v) // %3
763 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
764 );
765 fence_after(order);
766 return original;
767 }
768
769 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
770 {
771 storage_type original, modified;
772 fence_before(order);
773 __asm__ __volatile__
774 (
775 "1:\n"
776 "ldq_l %0, %2\n"
777 "and %0, %3, %1\n"
778 "stq_c %1, %2\n"
779 "beq %1, 2f\n"
780
781 ".subsection 2\n"
782 "2: br 1b\n"
783 ".previous\n"
784
785 : "=&r" (original), // %0
786 "=&r" (modified) // %1
787 : "m" (storage), // %2
788 "r" (v) // %3
789 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
790 );
791 fence_after(order);
792 return original;
793 }
794
795 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
796 {
797 storage_type original, modified;
798 fence_before(order);
799 __asm__ __volatile__
800 (
801 "1:\n"
802 "ldq_l %0, %2\n"
803 "bis %0, %3, %1\n"
804 "stq_c %1, %2\n"
805 "beq %1, 2f\n"
806
807 ".subsection 2\n"
808 "2: br 1b\n"
809 ".previous\n"
810
811 : "=&r" (original), // %0
812 "=&r" (modified) // %1
813 : "m" (storage), // %2
814 "r" (v) // %3
815 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
816 );
817 fence_after(order);
818 return original;
819 }
820
821 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
822 {
823 storage_type original, modified;
824 fence_before(order);
825 __asm__ __volatile__
826 (
827 "1:\n"
828 "ldq_l %0, %2\n"
829 "xor %0, %3, %1\n"
830 "stq_c %1, %2\n"
831 "beq %1, 2f\n"
832
833 ".subsection 2\n"
834 "2: br 1b\n"
835 ".previous\n"
836
837 : "=&r" (original), // %0
838 "=&r" (modified) // %1
839 : "m" (storage), // %2
840 "r" (v) // %3
841 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
842 );
843 fence_after(order);
844 return original;
845 }
846
847 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
848 {
849 return !!exchange(storage, (storage_type)1, order);
850 }
851
852 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
853 {
854 store(storage, 0, order);
855 }
856 };
857
858
859 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
860 {
861 if (order != memory_order_relaxed)
862 __asm__ __volatile__ ("mb" ::: "memory");
863 }
864
865 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
866 {
867 if (order != memory_order_relaxed)
868 __asm__ __volatile__ ("" ::: "memory");
869 }
870
871 } // namespace detail
872 } // namespace atomics
873 } // namespace boost
874
875 #endif // BOOST_ATOMIC_DETAIL_OPS_GCC_ALPHA_HPP_INCLUDED_