]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/libs/atomic/include/boost/atomic/detail/ops_gcc_alpha.hpp
bump version to 12.2.2-pve1
[ceph.git] / ceph / src / boost / libs / atomic / include / boost / atomic / detail / ops_gcc_alpha.hpp
1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2013 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
9 */
10 /*!
11 * \file atomic/detail/ops_gcc_alpha.hpp
12 *
13 * This header contains implementation of the \c operations template.
14 */
15
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_ALPHA_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_GCC_ALPHA_HPP_INCLUDED_
18
19 #include <boost/memory_order.hpp>
20 #include <boost/atomic/detail/config.hpp>
21 #include <boost/atomic/detail/storage_type.hpp>
22 #include <boost/atomic/detail/operations_fwd.hpp>
23 #include <boost/atomic/capabilities.hpp>
24
25 #ifdef BOOST_HAS_PRAGMA_ONCE
26 #pragma once
27 #endif
28
29 namespace boost {
30 namespace atomics {
31 namespace detail {
32
33 /*
34 Refer to http://h71000.www7.hp.com/doc/82final/5601/5601pro_004.html
35 (HP OpenVMS systems documentation) and the Alpha Architecture Reference Manual.
36 */
37
38 /*
39 NB: The most natural thing would be to write the increment/decrement
40 operators along the following lines:
41
42 __asm__ __volatile__
43 (
44 "1: ldl_l %0,%1 \n"
45 "addl %0,1,%0 \n"
46 "stl_c %0,%1 \n"
47 "beq %0,1b\n"
48 : "=&b" (tmp)
49 : "m" (value)
50 : "cc"
51 );
52
53 However according to the comments on the HP website and matching
54 comments in the Linux kernel sources this defies branch prediction,
55 as the cpu assumes that backward branches are always taken; so
56 instead copy the trick from the Linux kernel, introduce a forward
57 branch and back again.
58
59 I have, however, had a hard time measuring the difference between
60 the two versions in microbenchmarks -- I am leaving it in nevertheless
61 as it apparently does not hurt either.
62 */
63
64 struct gcc_alpha_operations_base
65 {
66 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
67
68 static BOOST_FORCEINLINE void fence_before(memory_order order) BOOST_NOEXCEPT
69 {
70 if ((order & memory_order_release) != 0)
71 __asm__ __volatile__ ("mb" ::: "memory");
72 }
73
74 static BOOST_FORCEINLINE void fence_after(memory_order order) BOOST_NOEXCEPT
75 {
76 if ((order & (memory_order_consume | memory_order_acquire)) != 0)
77 __asm__ __volatile__ ("mb" ::: "memory");
78 }
79
80 static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
81 {
82 if (order == memory_order_seq_cst)
83 __asm__ __volatile__ ("mb" ::: "memory");
84 }
85 };
86
87
88 template< bool Signed >
89 struct operations< 4u, Signed > :
90 public gcc_alpha_operations_base
91 {
92 typedef typename make_storage_type< 4u, Signed >::type storage_type;
93 typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
94
95 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
96 {
97 fence_before(order);
98 storage = v;
99 fence_after_store(order);
100 }
101
102 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
103 {
104 storage_type v = storage;
105 fence_after(order);
106 return v;
107 }
108
109 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
110 {
111 storage_type original, tmp;
112 fence_before(order);
113 __asm__ __volatile__
114 (
115 "1:\n"
116 "mov %3, %1\n"
117 "ldl_l %0, %2\n"
118 "stl_c %1, %2\n"
119 "beq %1, 2f\n"
120
121 ".subsection 2\n"
122 "2: br 1b\n"
123 ".previous\n"
124
125 : "=&r" (original), // %0
126 "=&r" (tmp) // %1
127 : "m" (storage), // %2
128 "r" (v) // %3
129 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
130 );
131 fence_after(order);
132 return original;
133 }
134
135 static BOOST_FORCEINLINE bool compare_exchange_weak(
136 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
137 {
138 fence_before(success_order);
139 int success;
140 storage_type current;
141 __asm__ __volatile__
142 (
143 "1:\n"
144 "ldl_l %2, %4\n" // current = *(&storage)
145 "cmpeq %2, %0, %3\n" // success = current == expected
146 "mov %2, %0\n" // expected = current
147 "beq %3, 2f\n" // if (success == 0) goto end
148 "stl_c %1, %4\n" // storage = desired; desired = store succeeded
149 "mov %1, %3\n" // success = desired
150 "2:\n"
151 : "+&r" (expected), // %0
152 "+&r" (desired), // %1
153 "=&r" (current), // %2
154 "=&r" (success) // %3
155 : "m" (storage) // %4
156 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
157 );
158 if (success)
159 fence_after(success_order);
160 else
161 fence_after(failure_order);
162 return !!success;
163 }
164
165 static BOOST_FORCEINLINE bool compare_exchange_strong(
166 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
167 {
168 int success;
169 storage_type current, tmp;
170 fence_before(success_order);
171 __asm__ __volatile__
172 (
173 "1:\n"
174 "mov %5, %1\n" // tmp = desired
175 "ldl_l %2, %4\n" // current = *(&storage)
176 "cmpeq %2, %0, %3\n" // success = current == expected
177 "mov %2, %0\n" // expected = current
178 "beq %3, 2f\n" // if (success == 0) goto end
179 "stl_c %1, %4\n" // storage = tmp; tmp = store succeeded
180 "beq %1, 3f\n" // if (tmp == 0) goto retry
181 "mov %1, %3\n" // success = tmp
182 "2:\n"
183
184 ".subsection 2\n"
185 "3: br 1b\n"
186 ".previous\n"
187
188 : "+&r" (expected), // %0
189 "=&r" (tmp), // %1
190 "=&r" (current), // %2
191 "=&r" (success) // %3
192 : "m" (storage), // %4
193 "r" (desired) // %5
194 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
195 );
196 if (success)
197 fence_after(success_order);
198 else
199 fence_after(failure_order);
200 return !!success;
201 }
202
203 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
204 {
205 storage_type original, modified;
206 fence_before(order);
207 __asm__ __volatile__
208 (
209 "1:\n"
210 "ldl_l %0, %2\n"
211 "addl %0, %3, %1\n"
212 "stl_c %1, %2\n"
213 "beq %1, 2f\n"
214
215 ".subsection 2\n"
216 "2: br 1b\n"
217 ".previous\n"
218
219 : "=&r" (original), // %0
220 "=&r" (modified) // %1
221 : "m" (storage), // %2
222 "r" (v) // %3
223 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
224 );
225 fence_after(order);
226 return original;
227 }
228
229 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
230 {
231 storage_type original, modified;
232 fence_before(order);
233 __asm__ __volatile__
234 (
235 "1:\n"
236 "ldl_l %0, %2\n"
237 "subl %0, %3, %1\n"
238 "stl_c %1, %2\n"
239 "beq %1, 2f\n"
240
241 ".subsection 2\n"
242 "2: br 1b\n"
243 ".previous\n"
244
245 : "=&r" (original), // %0
246 "=&r" (modified) // %1
247 : "m" (storage), // %2
248 "r" (v) // %3
249 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
250 );
251 fence_after(order);
252 return original;
253 }
254
255 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
256 {
257 storage_type original, modified;
258 fence_before(order);
259 __asm__ __volatile__
260 (
261 "1:\n"
262 "ldl_l %0, %2\n"
263 "and %0, %3, %1\n"
264 "stl_c %1, %2\n"
265 "beq %1, 2f\n"
266
267 ".subsection 2\n"
268 "2: br 1b\n"
269 ".previous\n"
270
271 : "=&r" (original), // %0
272 "=&r" (modified) // %1
273 : "m" (storage), // %2
274 "r" (v) // %3
275 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
276 );
277 fence_after(order);
278 return original;
279 }
280
281 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
282 {
283 storage_type original, modified;
284 fence_before(order);
285 __asm__ __volatile__
286 (
287 "1:\n"
288 "ldl_l %0, %2\n"
289 "bis %0, %3, %1\n"
290 "stl_c %1, %2\n"
291 "beq %1, 2f\n"
292
293 ".subsection 2\n"
294 "2: br 1b\n"
295 ".previous\n"
296
297 : "=&r" (original), // %0
298 "=&r" (modified) // %1
299 : "m" (storage), // %2
300 "r" (v) // %3
301 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
302 );
303 fence_after(order);
304 return original;
305 }
306
307 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
308 {
309 storage_type original, modified;
310 fence_before(order);
311 __asm__ __volatile__
312 (
313 "1:\n"
314 "ldl_l %0, %2\n"
315 "xor %0, %3, %1\n"
316 "stl_c %1, %2\n"
317 "beq %1, 2f\n"
318
319 ".subsection 2\n"
320 "2: br 1b\n"
321 ".previous\n"
322
323 : "=&r" (original), // %0
324 "=&r" (modified) // %1
325 : "m" (storage), // %2
326 "r" (v) // %3
327 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
328 );
329 fence_after(order);
330 return original;
331 }
332
333 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
334 {
335 return !!exchange(storage, (storage_type)1, order);
336 }
337
338 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
339 {
340 store(storage, 0, order);
341 }
342
343 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
344 {
345 return true;
346 }
347 };
348
349
350 template< >
351 struct operations< 1u, false > :
352 public operations< 4u, false >
353 {
354 typedef operations< 4u, false > base_type;
355 typedef base_type::storage_type storage_type;
356
357 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
358 {
359 storage_type original, modified;
360 fence_before(order);
361 __asm__ __volatile__
362 (
363 "1:\n"
364 "ldl_l %0, %2\n"
365 "addl %0, %3, %1\n"
366 "zapnot %1, #1, %1\n"
367 "stl_c %1, %2\n"
368 "beq %1, 2f\n"
369
370 ".subsection 2\n"
371 "2: br 1b\n"
372 ".previous\n"
373
374 : "=&r" (original), // %0
375 "=&r" (modified) // %1
376 : "m" (storage), // %2
377 "r" (v) // %3
378 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
379 );
380 fence_after(order);
381 return original;
382 }
383
384 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
385 {
386 storage_type original, modified;
387 fence_before(order);
388 __asm__ __volatile__
389 (
390 "1:\n"
391 "ldl_l %0, %2\n"
392 "subl %0, %3, %1\n"
393 "zapnot %1, #1, %1\n"
394 "stl_c %1, %2\n"
395 "beq %1, 2f\n"
396
397 ".subsection 2\n"
398 "2: br 1b\n"
399 ".previous\n"
400
401 : "=&r" (original), // %0
402 "=&r" (modified) // %1
403 : "m" (storage), // %2
404 "r" (v) // %3
405 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
406 );
407 fence_after(order);
408 return original;
409 }
410 };
411
412 template< >
413 struct operations< 1u, true > :
414 public operations< 4u, true >
415 {
416 typedef operations< 4u, true > base_type;
417 typedef base_type::storage_type storage_type;
418
419 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
420 {
421 storage_type original, modified;
422 fence_before(order);
423 __asm__ __volatile__
424 (
425 "1:\n"
426 "ldl_l %0, %2\n"
427 "addl %0, %3, %1\n"
428 "sextb %1, %1\n"
429 "stl_c %1, %2\n"
430 "beq %1, 2f\n"
431
432 ".subsection 2\n"
433 "2: br 1b\n"
434 ".previous\n"
435
436 : "=&r" (original), // %0
437 "=&r" (modified) // %1
438 : "m" (storage), // %2
439 "r" (v) // %3
440 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
441 );
442 fence_after(order);
443 return original;
444 }
445
446 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
447 {
448 storage_type original, modified;
449 fence_before(order);
450 __asm__ __volatile__
451 (
452 "1:\n"
453 "ldl_l %0, %2\n"
454 "subl %0, %3, %1\n"
455 "sextb %1, %1\n"
456 "stl_c %1, %2\n"
457 "beq %1, 2f\n"
458
459 ".subsection 2\n"
460 "2: br 1b\n"
461 ".previous\n"
462
463 : "=&r" (original), // %0
464 "=&r" (modified) // %1
465 : "m" (storage), // %2
466 "r" (v) // %3
467 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
468 );
469 fence_after(order);
470 return original;
471 }
472 };
473
474
475 template< >
476 struct operations< 2u, false > :
477 public operations< 4u, false >
478 {
479 typedef operations< 4u, false > base_type;
480 typedef base_type::storage_type storage_type;
481
482 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
483 {
484 storage_type original, modified;
485 fence_before(order);
486 __asm__ __volatile__
487 (
488 "1:\n"
489 "ldl_l %0, %2\n"
490 "addl %0, %3, %1\n"
491 "zapnot %1, #3, %1\n"
492 "stl_c %1, %2\n"
493 "beq %1, 2f\n"
494
495 ".subsection 2\n"
496 "2: br 1b\n"
497 ".previous\n"
498
499 : "=&r" (original), // %0
500 "=&r" (modified) // %1
501 : "m" (storage), // %2
502 "r" (v) // %3
503 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
504 );
505 fence_after(order);
506 return original;
507 }
508
509 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
510 {
511 storage_type original, modified;
512 fence_before(order);
513 __asm__ __volatile__
514 (
515 "1:\n"
516 "ldl_l %0, %2\n"
517 "subl %0, %3, %1\n"
518 "zapnot %1, #3, %1\n"
519 "stl_c %1, %2\n"
520 "beq %1, 2f\n"
521
522 ".subsection 2\n"
523 "2: br 1b\n"
524 ".previous\n"
525
526 : "=&r" (original), // %0
527 "=&r" (modified) // %1
528 : "m" (storage), // %2
529 "r" (v) // %3
530 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
531 );
532 fence_after(order);
533 return original;
534 }
535 };
536
537 template< >
538 struct operations< 2u, true > :
539 public operations< 4u, true >
540 {
541 typedef operations< 4u, true > base_type;
542 typedef base_type::storage_type storage_type;
543
544 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
545 {
546 storage_type original, modified;
547 fence_before(order);
548 __asm__ __volatile__
549 (
550 "1:\n"
551 "ldl_l %0, %2\n"
552 "addl %0, %3, %1\n"
553 "sextw %1, %1\n"
554 "stl_c %1, %2\n"
555 "beq %1, 2f\n"
556
557 ".subsection 2\n"
558 "2: br 1b\n"
559 ".previous\n"
560
561 : "=&r" (original), // %0
562 "=&r" (modified) // %1
563 : "m" (storage), // %2
564 "r" (v) // %3
565 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
566 );
567 fence_after(order);
568 return original;
569 }
570
571 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
572 {
573 storage_type original, modified;
574 fence_before(order);
575 __asm__ __volatile__
576 (
577 "1:\n"
578 "ldl_l %0, %2\n"
579 "subl %0, %3, %1\n"
580 "sextw %1, %1\n"
581 "stl_c %1, %2\n"
582 "beq %1, 2f\n"
583
584 ".subsection 2\n"
585 "2: br 1b\n"
586 ".previous\n"
587
588 : "=&r" (original), // %0
589 "=&r" (modified) // %1
590 : "m" (storage), // %2
591 "r" (v) // %3
592 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
593 );
594 fence_after(order);
595 return original;
596 }
597 };
598
599
600 template< bool Signed >
601 struct operations< 8u, Signed > :
602 public gcc_alpha_operations_base
603 {
604 typedef typename make_storage_type< 8u, Signed >::type storage_type;
605 typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
606
607 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
608 {
609 fence_before(order);
610 storage = v;
611 fence_after_store(order);
612 }
613
614 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
615 {
616 storage_type v = storage;
617 fence_after(order);
618 return v;
619 }
620
621 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
622 {
623 storage_type original, tmp;
624 fence_before(order);
625 __asm__ __volatile__
626 (
627 "1:\n"
628 "mov %3, %1\n"
629 "ldq_l %0, %2\n"
630 "stq_c %1, %2\n"
631 "beq %1, 2f\n"
632
633 ".subsection 2\n"
634 "2: br 1b\n"
635 ".previous\n"
636
637 : "=&r" (original), // %0
638 "=&r" (tmp) // %1
639 : "m" (storage), // %2
640 "r" (v) // %3
641 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
642 );
643 fence_after(order);
644 return original;
645 }
646
647 static BOOST_FORCEINLINE bool compare_exchange_weak(
648 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
649 {
650 fence_before(success_order);
651 int success;
652 storage_type current;
653 __asm__ __volatile__
654 (
655 "1:\n"
656 "ldq_l %2, %4\n" // current = *(&storage)
657 "cmpeq %2, %0, %3\n" // success = current == expected
658 "mov %2, %0\n" // expected = current
659 "beq %3, 2f\n" // if (success == 0) goto end
660 "stq_c %1, %4\n" // storage = desired; desired = store succeeded
661 "mov %1, %3\n" // success = desired
662 "2:\n"
663 : "+&r" (expected), // %0
664 "+&r" (desired), // %1
665 "=&r" (current), // %2
666 "=&r" (success) // %3
667 : "m" (storage) // %4
668 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
669 );
670 if (success)
671 fence_after(success_order);
672 else
673 fence_after(failure_order);
674 return !!success;
675 }
676
677 static BOOST_FORCEINLINE bool compare_exchange_strong(
678 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
679 {
680 int success;
681 storage_type current, tmp;
682 fence_before(success_order);
683 __asm__ __volatile__
684 (
685 "1:\n"
686 "mov %5, %1\n" // tmp = desired
687 "ldq_l %2, %4\n" // current = *(&storage)
688 "cmpeq %2, %0, %3\n" // success = current == expected
689 "mov %2, %0\n" // expected = current
690 "beq %3, 2f\n" // if (success == 0) goto end
691 "stq_c %1, %4\n" // storage = tmp; tmp = store succeeded
692 "beq %1, 3f\n" // if (tmp == 0) goto retry
693 "mov %1, %3\n" // success = tmp
694 "2:\n"
695
696 ".subsection 2\n"
697 "3: br 1b\n"
698 ".previous\n"
699
700 : "+&r" (expected), // %0
701 "=&r" (tmp), // %1
702 "=&r" (current), // %2
703 "=&r" (success) // %3
704 : "m" (storage), // %4
705 "r" (desired) // %5
706 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
707 );
708 if (success)
709 fence_after(success_order);
710 else
711 fence_after(failure_order);
712 return !!success;
713 }
714
715 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
716 {
717 storage_type original, modified;
718 fence_before(order);
719 __asm__ __volatile__
720 (
721 "1:\n"
722 "ldq_l %0, %2\n"
723 "addq %0, %3, %1\n"
724 "stq_c %1, %2\n"
725 "beq %1, 2f\n"
726
727 ".subsection 2\n"
728 "2: br 1b\n"
729 ".previous\n"
730
731 : "=&r" (original), // %0
732 "=&r" (modified) // %1
733 : "m" (storage), // %2
734 "r" (v) // %3
735 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
736 );
737 fence_after(order);
738 return original;
739 }
740
741 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
742 {
743 storage_type original, modified;
744 fence_before(order);
745 __asm__ __volatile__
746 (
747 "1:\n"
748 "ldq_l %0, %2\n"
749 "subq %0, %3, %1\n"
750 "stq_c %1, %2\n"
751 "beq %1, 2f\n"
752
753 ".subsection 2\n"
754 "2: br 1b\n"
755 ".previous\n"
756
757 : "=&r" (original), // %0
758 "=&r" (modified) // %1
759 : "m" (storage), // %2
760 "r" (v) // %3
761 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
762 );
763 fence_after(order);
764 return original;
765 }
766
767 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
768 {
769 storage_type original, modified;
770 fence_before(order);
771 __asm__ __volatile__
772 (
773 "1:\n"
774 "ldq_l %0, %2\n"
775 "and %0, %3, %1\n"
776 "stq_c %1, %2\n"
777 "beq %1, 2f\n"
778
779 ".subsection 2\n"
780 "2: br 1b\n"
781 ".previous\n"
782
783 : "=&r" (original), // %0
784 "=&r" (modified) // %1
785 : "m" (storage), // %2
786 "r" (v) // %3
787 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
788 );
789 fence_after(order);
790 return original;
791 }
792
793 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
794 {
795 storage_type original, modified;
796 fence_before(order);
797 __asm__ __volatile__
798 (
799 "1:\n"
800 "ldq_l %0, %2\n"
801 "bis %0, %3, %1\n"
802 "stq_c %1, %2\n"
803 "beq %1, 2f\n"
804
805 ".subsection 2\n"
806 "2: br 1b\n"
807 ".previous\n"
808
809 : "=&r" (original), // %0
810 "=&r" (modified) // %1
811 : "m" (storage), // %2
812 "r" (v) // %3
813 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
814 );
815 fence_after(order);
816 return original;
817 }
818
819 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
820 {
821 storage_type original, modified;
822 fence_before(order);
823 __asm__ __volatile__
824 (
825 "1:\n"
826 "ldq_l %0, %2\n"
827 "xor %0, %3, %1\n"
828 "stq_c %1, %2\n"
829 "beq %1, 2f\n"
830
831 ".subsection 2\n"
832 "2: br 1b\n"
833 ".previous\n"
834
835 : "=&r" (original), // %0
836 "=&r" (modified) // %1
837 : "m" (storage), // %2
838 "r" (v) // %3
839 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
840 );
841 fence_after(order);
842 return original;
843 }
844
845 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
846 {
847 return !!exchange(storage, (storage_type)1, order);
848 }
849
850 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
851 {
852 store(storage, 0, order);
853 }
854
855 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
856 {
857 return true;
858 }
859 };
860
861
862 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
863 {
864 if (order != memory_order_relaxed)
865 __asm__ __volatile__ ("mb" ::: "memory");
866 }
867
868 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
869 {
870 if (order != memory_order_relaxed)
871 __asm__ __volatile__ ("" ::: "memory");
872 }
873
874 } // namespace detail
875 } // namespace atomics
876 } // namespace boost
877
878 #endif // BOOST_ATOMIC_DETAIL_OPS_GCC_ALPHA_HPP_INCLUDED_