]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/boost/atomic/detail/core_arch_ops_gcc_alpha.hpp
import quincy beta 17.1.0
[ceph.git] / ceph / src / boost / boost / atomic / detail / core_arch_ops_gcc_alpha.hpp
1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2013 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
9 */
10 /*!
11 * \file atomic/detail/core_arch_ops_gcc_alpha.hpp
12 *
13 * This header contains implementation of the \c core_arch_operations template.
14 */
15
16 #ifndef BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_ALPHA_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_ALPHA_HPP_INCLUDED_
18
19 #include <cstddef>
20 #include <boost/memory_order.hpp>
21 #include <boost/atomic/detail/config.hpp>
22 #include <boost/atomic/detail/storage_traits.hpp>
23 #include <boost/atomic/detail/core_arch_operations_fwd.hpp>
24 #include <boost/atomic/detail/header.hpp>
25
26 #ifdef BOOST_HAS_PRAGMA_ONCE
27 #pragma once
28 #endif
29
30 namespace boost {
31 namespace atomics {
32 namespace detail {
33
34 /*
35 Refer to http://h71000.www7.hp.com/doc/82final/5601/5601pro_004.html
36 (HP OpenVMS systems documentation) and the Alpha Architecture Reference Manual.
37 */
38
39 /*
40 NB: The most natural thing would be to write the increment/decrement
41 operators along the following lines:
42
43 __asm__ __volatile__
44 (
45 "1: ldl_l %0,%1 \n"
46 "addl %0,1,%0 \n"
47 "stl_c %0,%1 \n"
48 "beq %0,1b\n"
49 : "=&b" (tmp)
50 : "m" (value)
51 : "cc"
52 );
53
54 However according to the comments on the HP website and matching
55 comments in the Linux kernel sources this defies branch prediction,
56 as the cpu assumes that backward branches are always taken; so
57 instead copy the trick from the Linux kernel, introduce a forward
58 branch and back again.
59
60 I have, however, had a hard time measuring the difference between
61 the two versions in microbenchmarks -- I am leaving it in nevertheless
62 as it apparently does not hurt either.
63 */
64
65 struct core_arch_operations_gcc_alpha_base
66 {
67 static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
68 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
69
70 static BOOST_FORCEINLINE void fence_before(memory_order order) BOOST_NOEXCEPT
71 {
72 if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
73 __asm__ __volatile__ ("mb" ::: "memory");
74 }
75
76 static BOOST_FORCEINLINE void fence_after(memory_order order) BOOST_NOEXCEPT
77 {
78 if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
79 __asm__ __volatile__ ("mb" ::: "memory");
80 }
81
82 static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
83 {
84 if (order == memory_order_seq_cst)
85 __asm__ __volatile__ ("mb" ::: "memory");
86 }
87 };
88
89
90 template< bool Signed, bool Interprocess >
91 struct core_arch_operations< 4u, Signed, Interprocess > :
92 public core_arch_operations_gcc_alpha_base
93 {
94 typedef typename storage_traits< 4u >::type storage_type;
95
96 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
97 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 4u;
98 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
99 static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
100
101 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
102 {
103 fence_before(order);
104 storage = v;
105 fence_after_store(order);
106 }
107
108 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
109 {
110 storage_type v = storage;
111 fence_after(order);
112 return v;
113 }
114
115 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
116 {
117 storage_type original, tmp;
118 fence_before(order);
119 __asm__ __volatile__
120 (
121 "1:\n\t"
122 "mov %3, %1\n\t"
123 "ldl_l %0, %2\n\t"
124 "stl_c %1, %2\n\t"
125 "beq %1, 2f\n\t"
126
127 ".subsection 2\n\t"
128 "2: br 1b\n\t"
129 ".previous\n\t"
130
131 : "=&r" (original), // %0
132 "=&r" (tmp) // %1
133 : "m" (storage), // %2
134 "r" (v) // %3
135 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
136 );
137 fence_after(order);
138 return original;
139 }
140
141 static BOOST_FORCEINLINE bool compare_exchange_weak(
142 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
143 {
144 fence_before(success_order);
145 int success;
146 storage_type current;
147 __asm__ __volatile__
148 (
149 "1:\n\t"
150 "ldl_l %2, %4\n\t" // current = *(&storage)
151 "cmpeq %2, %0, %3\n\t" // success = current == expected
152 "mov %2, %0\n\t" // expected = current
153 "beq %3, 2f\n\t" // if (success == 0) goto end
154 "stl_c %1, %4\n\t" // storage = desired; desired = store succeeded
155 "mov %1, %3\n\t" // success = desired
156 "2:\n\t"
157 : "+r" (expected), // %0
158 "+r" (desired), // %1
159 "=&r" (current), // %2
160 "=&r" (success) // %3
161 : "m" (storage) // %4
162 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
163 );
164 if (success)
165 fence_after(success_order);
166 else
167 fence_after(failure_order);
168 return !!success;
169 }
170
171 static BOOST_FORCEINLINE bool compare_exchange_strong(
172 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
173 {
174 int success;
175 storage_type current, tmp;
176 fence_before(success_order);
177 __asm__ __volatile__
178 (
179 "1:\n\t"
180 "mov %5, %1\n\t" // tmp = desired
181 "ldl_l %2, %4\n\t" // current = *(&storage)
182 "cmpeq %2, %0, %3\n\t" // success = current == expected
183 "mov %2, %0\n\t" // expected = current
184 "beq %3, 2f\n\t" // if (success == 0) goto end
185 "stl_c %1, %4\n\t" // storage = tmp; tmp = store succeeded
186 "beq %1, 3f\n\t" // if (tmp == 0) goto retry
187 "mov %1, %3\n\t" // success = tmp
188 "2:\n\t"
189
190 ".subsection 2\n\t"
191 "3: br 1b\n\t"
192 ".previous\n\t"
193
194 : "+r" (expected), // %0
195 "=&r" (tmp), // %1
196 "=&r" (current), // %2
197 "=&r" (success) // %3
198 : "m" (storage), // %4
199 "r" (desired) // %5
200 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
201 );
202 if (success)
203 fence_after(success_order);
204 else
205 fence_after(failure_order);
206 return !!success;
207 }
208
209 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
210 {
211 storage_type original, modified;
212 fence_before(order);
213 __asm__ __volatile__
214 (
215 "1:\n\t"
216 "ldl_l %0, %2\n\t"
217 "addl %0, %3, %1\n\t"
218 "stl_c %1, %2\n\t"
219 "beq %1, 2f\n\t"
220
221 ".subsection 2\n\t"
222 "2: br 1b\n\t"
223 ".previous\n\t"
224
225 : "=&r" (original), // %0
226 "=&r" (modified) // %1
227 : "m" (storage), // %2
228 "r" (v) // %3
229 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
230 );
231 fence_after(order);
232 return original;
233 }
234
235 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
236 {
237 storage_type original, modified;
238 fence_before(order);
239 __asm__ __volatile__
240 (
241 "1:\n\t"
242 "ldl_l %0, %2\n\t"
243 "subl %0, %3, %1\n\t"
244 "stl_c %1, %2\n\t"
245 "beq %1, 2f\n\t"
246
247 ".subsection 2\n\t"
248 "2: br 1b\n\t"
249 ".previous\n\t"
250
251 : "=&r" (original), // %0
252 "=&r" (modified) // %1
253 : "m" (storage), // %2
254 "r" (v) // %3
255 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
256 );
257 fence_after(order);
258 return original;
259 }
260
261 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
262 {
263 storage_type original, modified;
264 fence_before(order);
265 __asm__ __volatile__
266 (
267 "1:\n\t"
268 "ldl_l %0, %2\n\t"
269 "and %0, %3, %1\n\t"
270 "stl_c %1, %2\n\t"
271 "beq %1, 2f\n\t"
272
273 ".subsection 2\n\t"
274 "2: br 1b\n\t"
275 ".previous\n\t"
276
277 : "=&r" (original), // %0
278 "=&r" (modified) // %1
279 : "m" (storage), // %2
280 "r" (v) // %3
281 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
282 );
283 fence_after(order);
284 return original;
285 }
286
287 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
288 {
289 storage_type original, modified;
290 fence_before(order);
291 __asm__ __volatile__
292 (
293 "1:\n\t"
294 "ldl_l %0, %2\n\t"
295 "bis %0, %3, %1\n\t"
296 "stl_c %1, %2\n\t"
297 "beq %1, 2f\n\t"
298
299 ".subsection 2\n\t"
300 "2: br 1b\n\t"
301 ".previous\n\t"
302
303 : "=&r" (original), // %0
304 "=&r" (modified) // %1
305 : "m" (storage), // %2
306 "r" (v) // %3
307 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
308 );
309 fence_after(order);
310 return original;
311 }
312
313 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
314 {
315 storage_type original, modified;
316 fence_before(order);
317 __asm__ __volatile__
318 (
319 "1:\n\t"
320 "ldl_l %0, %2\n\t"
321 "xor %0, %3, %1\n\t"
322 "stl_c %1, %2\n\t"
323 "beq %1, 2f\n\t"
324
325 ".subsection 2\n\t"
326 "2: br 1b\n\t"
327 ".previous\n\t"
328
329 : "=&r" (original), // %0
330 "=&r" (modified) // %1
331 : "m" (storage), // %2
332 "r" (v) // %3
333 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
334 );
335 fence_after(order);
336 return original;
337 }
338
339 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
340 {
341 return !!exchange(storage, (storage_type)1, order);
342 }
343
344 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
345 {
346 store(storage, 0, order);
347 }
348 };
349
350
351 template< bool Interprocess >
352 struct core_arch_operations< 1u, false, Interprocess > :
353 public core_arch_operations< 4u, false, Interprocess >
354 {
355 typedef core_arch_operations< 4u, false, Interprocess > base_type;
356 typedef typename base_type::storage_type storage_type;
357
358 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
359 {
360 storage_type original, modified;
361 base_type::fence_before(order);
362 __asm__ __volatile__
363 (
364 "1:\n\t"
365 "ldl_l %0, %2\n\t"
366 "addl %0, %3, %1\n\t"
367 "zapnot %1, 1, %1\n\t"
368 "stl_c %1, %2\n\t"
369 "beq %1, 2f\n\t"
370
371 ".subsection 2\n\t"
372 "2: br 1b\n\t"
373 ".previous\n\t"
374
375 : "=&r" (original), // %0
376 "=&r" (modified) // %1
377 : "m" (storage), // %2
378 "r" (v) // %3
379 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
380 );
381 base_type::fence_after(order);
382 return original;
383 }
384
385 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
386 {
387 storage_type original, modified;
388 base_type::fence_before(order);
389 __asm__ __volatile__
390 (
391 "1:\n\t"
392 "ldl_l %0, %2\n\t"
393 "subl %0, %3, %1\n\t"
394 "zapnot %1, 1, %1\n\t"
395 "stl_c %1, %2\n\t"
396 "beq %1, 2f\n\t"
397
398 ".subsection 2\n\t"
399 "2: br 1b\n\t"
400 ".previous\n\t"
401
402 : "=&r" (original), // %0
403 "=&r" (modified) // %1
404 : "m" (storage), // %2
405 "r" (v) // %3
406 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
407 );
408 base_type::fence_after(order);
409 return original;
410 }
411 };
412
413 template< bool Interprocess >
414 struct core_arch_operations< 1u, true, Interprocess > :
415 public core_arch_operations< 4u, true, Interprocess >
416 {
417 typedef core_arch_operations< 4u, true, Interprocess > base_type;
418 typedef typename base_type::storage_type storage_type;
419
420 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
421 {
422 storage_type original, modified;
423 base_type::fence_before(order);
424 __asm__ __volatile__
425 (
426 "1:\n\t"
427 "ldl_l %0, %2\n\t"
428 "addl %0, %3, %1\n\t"
429 "sextb %1, %1\n\t"
430 "stl_c %1, %2\n\t"
431 "beq %1, 2f\n\t"
432
433 ".subsection 2\n\t"
434 "2: br 1b\n\t"
435 ".previous\n\t"
436
437 : "=&r" (original), // %0
438 "=&r" (modified) // %1
439 : "m" (storage), // %2
440 "r" (v) // %3
441 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
442 );
443 base_type::fence_after(order);
444 return original;
445 }
446
447 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
448 {
449 storage_type original, modified;
450 base_type::fence_before(order);
451 __asm__ __volatile__
452 (
453 "1:\n\t"
454 "ldl_l %0, %2\n\t"
455 "subl %0, %3, %1\n\t"
456 "sextb %1, %1\n\t"
457 "stl_c %1, %2\n\t"
458 "beq %1, 2f\n\t"
459
460 ".subsection 2\n\t"
461 "2: br 1b\n\t"
462 ".previous\n\t"
463
464 : "=&r" (original), // %0
465 "=&r" (modified) // %1
466 : "m" (storage), // %2
467 "r" (v) // %3
468 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
469 );
470 base_type::fence_after(order);
471 return original;
472 }
473 };
474
475
476 template< bool Interprocess >
477 struct core_arch_operations< 2u, false, Interprocess > :
478 public core_arch_operations< 4u, false, Interprocess >
479 {
480 typedef core_arch_operations< 4u, false, Interprocess > base_type;
481 typedef typename base_type::storage_type storage_type;
482
483 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
484 {
485 storage_type original, modified;
486 base_type::fence_before(order);
487 __asm__ __volatile__
488 (
489 "1:\n\t"
490 "ldl_l %0, %2\n\t"
491 "addl %0, %3, %1\n\t"
492 "zapnot %1, 3, %1\n\t"
493 "stl_c %1, %2\n\t"
494 "beq %1, 2f\n\t"
495
496 ".subsection 2\n\t"
497 "2: br 1b\n\t"
498 ".previous\n\t"
499
500 : "=&r" (original), // %0
501 "=&r" (modified) // %1
502 : "m" (storage), // %2
503 "r" (v) // %3
504 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
505 );
506 base_type::fence_after(order);
507 return original;
508 }
509
510 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
511 {
512 storage_type original, modified;
513 base_type::fence_before(order);
514 __asm__ __volatile__
515 (
516 "1:\n\t"
517 "ldl_l %0, %2\n\t"
518 "subl %0, %3, %1\n\t"
519 "zapnot %1, 3, %1\n\t"
520 "stl_c %1, %2\n\t"
521 "beq %1, 2f\n\t"
522
523 ".subsection 2\n\t"
524 "2: br 1b\n\t"
525 ".previous\n\t"
526
527 : "=&r" (original), // %0
528 "=&r" (modified) // %1
529 : "m" (storage), // %2
530 "r" (v) // %3
531 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
532 );
533 base_type::fence_after(order);
534 return original;
535 }
536 };
537
538 template< bool Interprocess >
539 struct core_arch_operations< 2u, true, Interprocess > :
540 public core_arch_operations< 4u, true, Interprocess >
541 {
542 typedef core_arch_operations< 4u, true, Interprocess > base_type;
543 typedef typename base_type::storage_type storage_type;
544
545 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
546 {
547 storage_type original, modified;
548 base_type::fence_before(order);
549 __asm__ __volatile__
550 (
551 "1:\n\t"
552 "ldl_l %0, %2\n\t"
553 "addl %0, %3, %1\n\t"
554 "sextw %1, %1\n\t"
555 "stl_c %1, %2\n\t"
556 "beq %1, 2f\n\t"
557
558 ".subsection 2\n\t"
559 "2: br 1b\n\t"
560 ".previous\n\t"
561
562 : "=&r" (original), // %0
563 "=&r" (modified) // %1
564 : "m" (storage), // %2
565 "r" (v) // %3
566 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
567 );
568 base_type::fence_after(order);
569 return original;
570 }
571
572 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
573 {
574 storage_type original, modified;
575 base_type::fence_before(order);
576 __asm__ __volatile__
577 (
578 "1:\n\t"
579 "ldl_l %0, %2\n\t"
580 "subl %0, %3, %1\n\t"
581 "sextw %1, %1\n\t"
582 "stl_c %1, %2\n\t"
583 "beq %1, 2f\n\t"
584
585 ".subsection 2\n\t"
586 "2: br 1b\n\t"
587 ".previous\n\t"
588
589 : "=&r" (original), // %0
590 "=&r" (modified) // %1
591 : "m" (storage), // %2
592 "r" (v) // %3
593 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
594 );
595 base_type::fence_after(order);
596 return original;
597 }
598 };
599
600
601 template< bool Signed, bool Interprocess >
602 struct core_arch_operations< 8u, Signed, Interprocess > :
603 public core_arch_operations_gcc_alpha_base
604 {
605 typedef typename storage_traits< 8u >::type storage_type;
606
607 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
608 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 8u;
609 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
610 static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
611
612 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
613 {
614 fence_before(order);
615 storage = v;
616 fence_after_store(order);
617 }
618
619 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
620 {
621 storage_type v = storage;
622 fence_after(order);
623 return v;
624 }
625
626 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
627 {
628 storage_type original, tmp;
629 fence_before(order);
630 __asm__ __volatile__
631 (
632 "1:\n\t"
633 "mov %3, %1\n\t"
634 "ldq_l %0, %2\n\t"
635 "stq_c %1, %2\n\t"
636 "beq %1, 2f\n\t"
637
638 ".subsection 2\n\t"
639 "2: br 1b\n\t"
640 ".previous\n\t"
641
642 : "=&r" (original), // %0
643 "=&r" (tmp) // %1
644 : "m" (storage), // %2
645 "r" (v) // %3
646 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
647 );
648 fence_after(order);
649 return original;
650 }
651
652 static BOOST_FORCEINLINE bool compare_exchange_weak(
653 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
654 {
655 fence_before(success_order);
656 int success;
657 storage_type current;
658 __asm__ __volatile__
659 (
660 "1:\n\t"
661 "ldq_l %2, %4\n\t" // current = *(&storage)
662 "cmpeq %2, %0, %3\n\t" // success = current == expected
663 "mov %2, %0\n\t" // expected = current
664 "beq %3, 2f\n\t" // if (success == 0) goto end
665 "stq_c %1, %4\n\t" // storage = desired; desired = store succeeded
666 "mov %1, %3\n\t" // success = desired
667 "2:\n\t"
668 : "+r" (expected), // %0
669 "+r" (desired), // %1
670 "=&r" (current), // %2
671 "=&r" (success) // %3
672 : "m" (storage) // %4
673 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
674 );
675 if (success)
676 fence_after(success_order);
677 else
678 fence_after(failure_order);
679 return !!success;
680 }
681
682 static BOOST_FORCEINLINE bool compare_exchange_strong(
683 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
684 {
685 int success;
686 storage_type current, tmp;
687 fence_before(success_order);
688 __asm__ __volatile__
689 (
690 "1:\n\t"
691 "mov %5, %1\n\t" // tmp = desired
692 "ldq_l %2, %4\n\t" // current = *(&storage)
693 "cmpeq %2, %0, %3\n\t" // success = current == expected
694 "mov %2, %0\n\t" // expected = current
695 "beq %3, 2f\n\t" // if (success == 0) goto end
696 "stq_c %1, %4\n\t" // storage = tmp; tmp = store succeeded
697 "beq %1, 3f\n\t" // if (tmp == 0) goto retry
698 "mov %1, %3\n\t" // success = tmp
699 "2:\n\t"
700
701 ".subsection 2\n\t"
702 "3: br 1b\n\t"
703 ".previous\n\t"
704
705 : "+r" (expected), // %0
706 "=&r" (tmp), // %1
707 "=&r" (current), // %2
708 "=&r" (success) // %3
709 : "m" (storage), // %4
710 "r" (desired) // %5
711 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
712 );
713 if (success)
714 fence_after(success_order);
715 else
716 fence_after(failure_order);
717 return !!success;
718 }
719
720 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
721 {
722 storage_type original, modified;
723 fence_before(order);
724 __asm__ __volatile__
725 (
726 "1:\n\t"
727 "ldq_l %0, %2\n\t"
728 "addq %0, %3, %1\n\t"
729 "stq_c %1, %2\n\t"
730 "beq %1, 2f\n\t"
731
732 ".subsection 2\n\t"
733 "2: br 1b\n\t"
734 ".previous\n\t"
735
736 : "=&r" (original), // %0
737 "=&r" (modified) // %1
738 : "m" (storage), // %2
739 "r" (v) // %3
740 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
741 );
742 fence_after(order);
743 return original;
744 }
745
746 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
747 {
748 storage_type original, modified;
749 fence_before(order);
750 __asm__ __volatile__
751 (
752 "1:\n\t"
753 "ldq_l %0, %2\n\t"
754 "subq %0, %3, %1\n\t"
755 "stq_c %1, %2\n\t"
756 "beq %1, 2f\n\t"
757
758 ".subsection 2\n\t"
759 "2: br 1b\n\t"
760 ".previous\n\t"
761
762 : "=&r" (original), // %0
763 "=&r" (modified) // %1
764 : "m" (storage), // %2
765 "r" (v) // %3
766 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
767 );
768 fence_after(order);
769 return original;
770 }
771
772 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
773 {
774 storage_type original, modified;
775 fence_before(order);
776 __asm__ __volatile__
777 (
778 "1:\n\t"
779 "ldq_l %0, %2\n\t"
780 "and %0, %3, %1\n\t"
781 "stq_c %1, %2\n\t"
782 "beq %1, 2f\n\t"
783
784 ".subsection 2\n\t"
785 "2: br 1b\n\t"
786 ".previous\n\t"
787
788 : "=&r" (original), // %0
789 "=&r" (modified) // %1
790 : "m" (storage), // %2
791 "r" (v) // %3
792 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
793 );
794 fence_after(order);
795 return original;
796 }
797
798 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
799 {
800 storage_type original, modified;
801 fence_before(order);
802 __asm__ __volatile__
803 (
804 "1:\n\t"
805 "ldq_l %0, %2\n\t"
806 "bis %0, %3, %1\n\t"
807 "stq_c %1, %2\n\t"
808 "beq %1, 2f\n\t"
809
810 ".subsection 2\n\t"
811 "2: br 1b\n\t"
812 ".previous\n\t"
813
814 : "=&r" (original), // %0
815 "=&r" (modified) // %1
816 : "m" (storage), // %2
817 "r" (v) // %3
818 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
819 );
820 fence_after(order);
821 return original;
822 }
823
824 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
825 {
826 storage_type original, modified;
827 fence_before(order);
828 __asm__ __volatile__
829 (
830 "1:\n\t"
831 "ldq_l %0, %2\n\t"
832 "xor %0, %3, %1\n\t"
833 "stq_c %1, %2\n\t"
834 "beq %1, 2f\n\t"
835
836 ".subsection 2\n\t"
837 "2: br 1b\n\t"
838 ".previous\n\t"
839
840 : "=&r" (original), // %0
841 "=&r" (modified) // %1
842 : "m" (storage), // %2
843 "r" (v) // %3
844 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
845 );
846 fence_after(order);
847 return original;
848 }
849
850 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
851 {
852 return !!exchange(storage, (storage_type)1, order);
853 }
854
855 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
856 {
857 store(storage, (storage_type)0, order);
858 }
859 };
860
861 } // namespace detail
862 } // namespace atomics
863 } // namespace boost
864
865 #include <boost/atomic/detail/footer.hpp>
866
867 #endif // BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_ALPHA_HPP_INCLUDED_