]>
git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - include/linux/atomic.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Atomic operations usable in machine independent code */
3 #ifndef _LINUX_ATOMIC_H
4 #define _LINUX_ATOMIC_H
5 #include <asm/atomic.h>
6 #include <asm/barrier.h>
9 * Relaxed variants of xchg, cmpxchg and some atomic operations.
11 * We support four variants:
13 * - Fully ordered: The default implementation, no suffix required.
14 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
15 * - Release: Provides RELEASE semantics, _release suffix.
16 * - Relaxed: No ordering guarantees, _relaxed suffix.
18 * For compound atomics performing both a load and a store, ACQUIRE
19 * semantics apply only to the load and RELEASE semantics only to the
20 * store portion of the operation. Note that a failed cmpxchg_acquire
21 * does -not- imply any memory ordering constraints.
23 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
26 #ifndef atomic_read_acquire
27 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
30 #ifndef atomic_set_release
31 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
35 * The idea here is to build acquire/release variants by adding explicit
36 * barriers on top of the relaxed variant. In the case where the relaxed
37 * variant is already fully ordered, no additional barriers are needed.
39 * Besides, if an arch has a special barrier for acquire/release, it could
40 * implement its own __atomic_op_* and use the same framework for building
43 * If an architecture overrides __atomic_op_acquire() it will probably want
44 * to define smp_mb__after_spinlock().
46 #ifndef __atomic_op_acquire
47 #define __atomic_op_acquire(op, args...) \
49 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
50 smp_mb__after_atomic(); \
55 #ifndef __atomic_op_release
56 #define __atomic_op_release(op, args...) \
58 smp_mb__before_atomic(); \
63 #ifndef __atomic_op_fence
64 #define __atomic_op_fence(op, args...) \
66 typeof(op##_relaxed(args)) __ret; \
67 smp_mb__before_atomic(); \
68 __ret = op##_relaxed(args); \
69 smp_mb__after_atomic(); \
74 /* atomic_add_return_relaxed */
75 #ifndef atomic_add_return_relaxed
76 #define atomic_add_return_relaxed atomic_add_return
77 #define atomic_add_return_acquire atomic_add_return
78 #define atomic_add_return_release atomic_add_return
80 #else /* atomic_add_return_relaxed */
82 #ifndef atomic_add_return_acquire
83 #define atomic_add_return_acquire(...) \
84 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
87 #ifndef atomic_add_return_release
88 #define atomic_add_return_release(...) \
89 __atomic_op_release(atomic_add_return, __VA_ARGS__)
92 #ifndef atomic_add_return
93 #define atomic_add_return(...) \
94 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
96 #endif /* atomic_add_return_relaxed */
98 /* atomic_inc_return_relaxed */
99 #ifndef atomic_inc_return_relaxed
100 #define atomic_inc_return_relaxed atomic_inc_return
101 #define atomic_inc_return_acquire atomic_inc_return
102 #define atomic_inc_return_release atomic_inc_return
104 #else /* atomic_inc_return_relaxed */
106 #ifndef atomic_inc_return_acquire
107 #define atomic_inc_return_acquire(...) \
108 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
111 #ifndef atomic_inc_return_release
112 #define atomic_inc_return_release(...) \
113 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
116 #ifndef atomic_inc_return
117 #define atomic_inc_return(...) \
118 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
120 #endif /* atomic_inc_return_relaxed */
122 /* atomic_sub_return_relaxed */
123 #ifndef atomic_sub_return_relaxed
124 #define atomic_sub_return_relaxed atomic_sub_return
125 #define atomic_sub_return_acquire atomic_sub_return
126 #define atomic_sub_return_release atomic_sub_return
128 #else /* atomic_sub_return_relaxed */
130 #ifndef atomic_sub_return_acquire
131 #define atomic_sub_return_acquire(...) \
132 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
135 #ifndef atomic_sub_return_release
136 #define atomic_sub_return_release(...) \
137 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
140 #ifndef atomic_sub_return
141 #define atomic_sub_return(...) \
142 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
144 #endif /* atomic_sub_return_relaxed */
146 /* atomic_dec_return_relaxed */
147 #ifndef atomic_dec_return_relaxed
148 #define atomic_dec_return_relaxed atomic_dec_return
149 #define atomic_dec_return_acquire atomic_dec_return
150 #define atomic_dec_return_release atomic_dec_return
152 #else /* atomic_dec_return_relaxed */
154 #ifndef atomic_dec_return_acquire
155 #define atomic_dec_return_acquire(...) \
156 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
159 #ifndef atomic_dec_return_release
160 #define atomic_dec_return_release(...) \
161 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
164 #ifndef atomic_dec_return
165 #define atomic_dec_return(...) \
166 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
168 #endif /* atomic_dec_return_relaxed */
171 /* atomic_fetch_add_relaxed */
172 #ifndef atomic_fetch_add_relaxed
173 #define atomic_fetch_add_relaxed atomic_fetch_add
174 #define atomic_fetch_add_acquire atomic_fetch_add
175 #define atomic_fetch_add_release atomic_fetch_add
177 #else /* atomic_fetch_add_relaxed */
179 #ifndef atomic_fetch_add_acquire
180 #define atomic_fetch_add_acquire(...) \
181 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
184 #ifndef atomic_fetch_add_release
185 #define atomic_fetch_add_release(...) \
186 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
189 #ifndef atomic_fetch_add
190 #define atomic_fetch_add(...) \
191 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
193 #endif /* atomic_fetch_add_relaxed */
195 /* atomic_fetch_inc_relaxed */
196 #ifndef atomic_fetch_inc_relaxed
198 #ifndef atomic_fetch_inc
199 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
200 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
201 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
202 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
203 #else /* atomic_fetch_inc */
204 #define atomic_fetch_inc_relaxed atomic_fetch_inc
205 #define atomic_fetch_inc_acquire atomic_fetch_inc
206 #define atomic_fetch_inc_release atomic_fetch_inc
207 #endif /* atomic_fetch_inc */
209 #else /* atomic_fetch_inc_relaxed */
211 #ifndef atomic_fetch_inc_acquire
212 #define atomic_fetch_inc_acquire(...) \
213 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
216 #ifndef atomic_fetch_inc_release
217 #define atomic_fetch_inc_release(...) \
218 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
221 #ifndef atomic_fetch_inc
222 #define atomic_fetch_inc(...) \
223 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
225 #endif /* atomic_fetch_inc_relaxed */
227 /* atomic_fetch_sub_relaxed */
228 #ifndef atomic_fetch_sub_relaxed
229 #define atomic_fetch_sub_relaxed atomic_fetch_sub
230 #define atomic_fetch_sub_acquire atomic_fetch_sub
231 #define atomic_fetch_sub_release atomic_fetch_sub
233 #else /* atomic_fetch_sub_relaxed */
235 #ifndef atomic_fetch_sub_acquire
236 #define atomic_fetch_sub_acquire(...) \
237 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
240 #ifndef atomic_fetch_sub_release
241 #define atomic_fetch_sub_release(...) \
242 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
245 #ifndef atomic_fetch_sub
246 #define atomic_fetch_sub(...) \
247 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
249 #endif /* atomic_fetch_sub_relaxed */
251 /* atomic_fetch_dec_relaxed */
252 #ifndef atomic_fetch_dec_relaxed
254 #ifndef atomic_fetch_dec
255 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
256 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
257 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
258 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
259 #else /* atomic_fetch_dec */
260 #define atomic_fetch_dec_relaxed atomic_fetch_dec
261 #define atomic_fetch_dec_acquire atomic_fetch_dec
262 #define atomic_fetch_dec_release atomic_fetch_dec
263 #endif /* atomic_fetch_dec */
265 #else /* atomic_fetch_dec_relaxed */
267 #ifndef atomic_fetch_dec_acquire
268 #define atomic_fetch_dec_acquire(...) \
269 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
272 #ifndef atomic_fetch_dec_release
273 #define atomic_fetch_dec_release(...) \
274 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
277 #ifndef atomic_fetch_dec
278 #define atomic_fetch_dec(...) \
279 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
281 #endif /* atomic_fetch_dec_relaxed */
283 /* atomic_fetch_or_relaxed */
284 #ifndef atomic_fetch_or_relaxed
285 #define atomic_fetch_or_relaxed atomic_fetch_or
286 #define atomic_fetch_or_acquire atomic_fetch_or
287 #define atomic_fetch_or_release atomic_fetch_or
289 #else /* atomic_fetch_or_relaxed */
291 #ifndef atomic_fetch_or_acquire
292 #define atomic_fetch_or_acquire(...) \
293 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
296 #ifndef atomic_fetch_or_release
297 #define atomic_fetch_or_release(...) \
298 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
301 #ifndef atomic_fetch_or
302 #define atomic_fetch_or(...) \
303 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
305 #endif /* atomic_fetch_or_relaxed */
307 /* atomic_fetch_and_relaxed */
308 #ifndef atomic_fetch_and_relaxed
309 #define atomic_fetch_and_relaxed atomic_fetch_and
310 #define atomic_fetch_and_acquire atomic_fetch_and
311 #define atomic_fetch_and_release atomic_fetch_and
313 #else /* atomic_fetch_and_relaxed */
315 #ifndef atomic_fetch_and_acquire
316 #define atomic_fetch_and_acquire(...) \
317 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
320 #ifndef atomic_fetch_and_release
321 #define atomic_fetch_and_release(...) \
322 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
325 #ifndef atomic_fetch_and
326 #define atomic_fetch_and(...) \
327 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
329 #endif /* atomic_fetch_and_relaxed */
332 /* atomic_fetch_andnot_relaxed */
333 #ifndef atomic_fetch_andnot_relaxed
334 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
335 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
336 #define atomic_fetch_andnot_release atomic_fetch_andnot
338 #else /* atomic_fetch_andnot_relaxed */
340 #ifndef atomic_fetch_andnot_acquire
341 #define atomic_fetch_andnot_acquire(...) \
342 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
345 #ifndef atomic_fetch_andnot_release
346 #define atomic_fetch_andnot_release(...) \
347 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
350 #ifndef atomic_fetch_andnot
351 #define atomic_fetch_andnot(...) \
352 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
354 #endif /* atomic_fetch_andnot_relaxed */
355 #endif /* atomic_andnot */
357 /* atomic_fetch_xor_relaxed */
358 #ifndef atomic_fetch_xor_relaxed
359 #define atomic_fetch_xor_relaxed atomic_fetch_xor
360 #define atomic_fetch_xor_acquire atomic_fetch_xor
361 #define atomic_fetch_xor_release atomic_fetch_xor
363 #else /* atomic_fetch_xor_relaxed */
365 #ifndef atomic_fetch_xor_acquire
366 #define atomic_fetch_xor_acquire(...) \
367 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
370 #ifndef atomic_fetch_xor_release
371 #define atomic_fetch_xor_release(...) \
372 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
375 #ifndef atomic_fetch_xor
376 #define atomic_fetch_xor(...) \
377 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
379 #endif /* atomic_fetch_xor_relaxed */
382 /* atomic_xchg_relaxed */
383 #ifndef atomic_xchg_relaxed
384 #define atomic_xchg_relaxed atomic_xchg
385 #define atomic_xchg_acquire atomic_xchg
386 #define atomic_xchg_release atomic_xchg
388 #else /* atomic_xchg_relaxed */
390 #ifndef atomic_xchg_acquire
391 #define atomic_xchg_acquire(...) \
392 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
395 #ifndef atomic_xchg_release
396 #define atomic_xchg_release(...) \
397 __atomic_op_release(atomic_xchg, __VA_ARGS__)
401 #define atomic_xchg(...) \
402 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
404 #endif /* atomic_xchg_relaxed */
406 /* atomic_cmpxchg_relaxed */
407 #ifndef atomic_cmpxchg_relaxed
408 #define atomic_cmpxchg_relaxed atomic_cmpxchg
409 #define atomic_cmpxchg_acquire atomic_cmpxchg
410 #define atomic_cmpxchg_release atomic_cmpxchg
412 #else /* atomic_cmpxchg_relaxed */
414 #ifndef atomic_cmpxchg_acquire
415 #define atomic_cmpxchg_acquire(...) \
416 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
419 #ifndef atomic_cmpxchg_release
420 #define atomic_cmpxchg_release(...) \
421 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
424 #ifndef atomic_cmpxchg
425 #define atomic_cmpxchg(...) \
426 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
428 #endif /* atomic_cmpxchg_relaxed */
430 #ifndef atomic_try_cmpxchg
432 #define __atomic_try_cmpxchg(type, _p, _po, _n) \
434 typeof(_po) __po = (_po); \
435 typeof(*(_po)) __r, __o = *__po; \
436 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
437 if (unlikely(__r != __o)) \
439 likely(__r == __o); \
442 #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
443 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
444 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
445 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
447 #else /* atomic_try_cmpxchg */
448 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
449 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
450 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
451 #endif /* atomic_try_cmpxchg */
453 /* cmpxchg_relaxed */
454 #ifndef cmpxchg_relaxed
455 #define cmpxchg_relaxed cmpxchg
456 #define cmpxchg_acquire cmpxchg
457 #define cmpxchg_release cmpxchg
459 #else /* cmpxchg_relaxed */
461 #ifndef cmpxchg_acquire
462 #define cmpxchg_acquire(...) \
463 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
466 #ifndef cmpxchg_release
467 #define cmpxchg_release(...) \
468 __atomic_op_release(cmpxchg, __VA_ARGS__)
472 #define cmpxchg(...) \
473 __atomic_op_fence(cmpxchg, __VA_ARGS__)
475 #endif /* cmpxchg_relaxed */
477 /* cmpxchg64_relaxed */
478 #ifndef cmpxchg64_relaxed
479 #define cmpxchg64_relaxed cmpxchg64
480 #define cmpxchg64_acquire cmpxchg64
481 #define cmpxchg64_release cmpxchg64
483 #else /* cmpxchg64_relaxed */
485 #ifndef cmpxchg64_acquire
486 #define cmpxchg64_acquire(...) \
487 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
490 #ifndef cmpxchg64_release
491 #define cmpxchg64_release(...) \
492 __atomic_op_release(cmpxchg64, __VA_ARGS__)
496 #define cmpxchg64(...) \
497 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
499 #endif /* cmpxchg64_relaxed */
503 #define xchg_relaxed xchg
504 #define xchg_acquire xchg
505 #define xchg_release xchg
507 #else /* xchg_relaxed */
510 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
514 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
518 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
520 #endif /* xchg_relaxed */
523 * atomic_add_unless - add unless the number is already a given value
524 * @v: pointer of type atomic_t
525 * @a: the amount to add to v...
526 * @u: ...unless v is equal to u.
528 * Atomically adds @a to @v, so long as @v was not already @u.
529 * Returns non-zero if @v was not @u, and zero otherwise.
531 static inline int atomic_add_unless(atomic_t
*v
, int a
, int u
)
533 return __atomic_add_unless(v
, a
, u
) != u
;
537 * atomic_inc_not_zero - increment unless the number is zero
538 * @v: pointer of type atomic_t
540 * Atomically increments @v by 1, so long as @v is non-zero.
541 * Returns non-zero if @v was non-zero, and zero otherwise.
543 #ifndef atomic_inc_not_zero
544 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
547 #ifndef atomic_andnot
548 static inline void atomic_andnot(int i
, atomic_t
*v
)
553 static inline int atomic_fetch_andnot(int i
, atomic_t
*v
)
555 return atomic_fetch_and(~i
, v
);
558 static inline int atomic_fetch_andnot_relaxed(int i
, atomic_t
*v
)
560 return atomic_fetch_and_relaxed(~i
, v
);
563 static inline int atomic_fetch_andnot_acquire(int i
, atomic_t
*v
)
565 return atomic_fetch_and_acquire(~i
, v
);
568 static inline int atomic_fetch_andnot_release(int i
, atomic_t
*v
)
570 return atomic_fetch_and_release(~i
, v
);
575 * atomic_inc_not_zero_hint - increment if not null
576 * @v: pointer of type atomic_t
577 * @hint: probable value of the atomic before the increment
579 * This version of atomic_inc_not_zero() gives a hint of probable
580 * value of the atomic. This helps processor to not read the memory
581 * before doing the atomic read/modify/write cycle, lowering
582 * number of bus transactions on some arches.
584 * Returns: 0 if increment was not done, 1 otherwise.
586 #ifndef atomic_inc_not_zero_hint
587 static inline int atomic_inc_not_zero_hint(atomic_t
*v
, int hint
)
591 /* sanity test, should be removed by compiler if hint is a constant */
593 return atomic_inc_not_zero(v
);
596 val
= atomic_cmpxchg(v
, c
, c
+ 1);
606 #ifndef atomic_inc_unless_negative
607 static inline int atomic_inc_unless_negative(atomic_t
*p
)
610 for (v
= 0; v
>= 0; v
= v1
) {
611 v1
= atomic_cmpxchg(p
, v
, v
+ 1);
619 #ifndef atomic_dec_unless_positive
620 static inline int atomic_dec_unless_positive(atomic_t
*p
)
623 for (v
= 0; v
<= 0; v
= v1
) {
624 v1
= atomic_cmpxchg(p
, v
, v
- 1);
633 * atomic_dec_if_positive - decrement by 1 if old value positive
634 * @v: pointer of type atomic_t
636 * The function returns the old value of *v minus 1, even if
637 * the atomic variable, v, was not decremented.
639 #ifndef atomic_dec_if_positive
640 static inline int atomic_dec_if_positive(atomic_t
*v
)
646 if (unlikely(dec
< 0))
648 old
= atomic_cmpxchg((v
), c
, dec
);
649 if (likely(old
== c
))
657 #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
658 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
660 #ifdef CONFIG_GENERIC_ATOMIC64
661 #include <asm-generic/atomic64.h>
664 #ifndef atomic64_read_acquire
665 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
668 #ifndef atomic64_set_release
669 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
672 /* atomic64_add_return_relaxed */
673 #ifndef atomic64_add_return_relaxed
674 #define atomic64_add_return_relaxed atomic64_add_return
675 #define atomic64_add_return_acquire atomic64_add_return
676 #define atomic64_add_return_release atomic64_add_return
678 #else /* atomic64_add_return_relaxed */
680 #ifndef atomic64_add_return_acquire
681 #define atomic64_add_return_acquire(...) \
682 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
685 #ifndef atomic64_add_return_release
686 #define atomic64_add_return_release(...) \
687 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
690 #ifndef atomic64_add_return
691 #define atomic64_add_return(...) \
692 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
694 #endif /* atomic64_add_return_relaxed */
696 /* atomic64_inc_return_relaxed */
697 #ifndef atomic64_inc_return_relaxed
698 #define atomic64_inc_return_relaxed atomic64_inc_return
699 #define atomic64_inc_return_acquire atomic64_inc_return
700 #define atomic64_inc_return_release atomic64_inc_return
702 #else /* atomic64_inc_return_relaxed */
704 #ifndef atomic64_inc_return_acquire
705 #define atomic64_inc_return_acquire(...) \
706 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
709 #ifndef atomic64_inc_return_release
710 #define atomic64_inc_return_release(...) \
711 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
714 #ifndef atomic64_inc_return
715 #define atomic64_inc_return(...) \
716 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
718 #endif /* atomic64_inc_return_relaxed */
721 /* atomic64_sub_return_relaxed */
722 #ifndef atomic64_sub_return_relaxed
723 #define atomic64_sub_return_relaxed atomic64_sub_return
724 #define atomic64_sub_return_acquire atomic64_sub_return
725 #define atomic64_sub_return_release atomic64_sub_return
727 #else /* atomic64_sub_return_relaxed */
729 #ifndef atomic64_sub_return_acquire
730 #define atomic64_sub_return_acquire(...) \
731 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
734 #ifndef atomic64_sub_return_release
735 #define atomic64_sub_return_release(...) \
736 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
739 #ifndef atomic64_sub_return
740 #define atomic64_sub_return(...) \
741 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
743 #endif /* atomic64_sub_return_relaxed */
745 /* atomic64_dec_return_relaxed */
746 #ifndef atomic64_dec_return_relaxed
747 #define atomic64_dec_return_relaxed atomic64_dec_return
748 #define atomic64_dec_return_acquire atomic64_dec_return
749 #define atomic64_dec_return_release atomic64_dec_return
751 #else /* atomic64_dec_return_relaxed */
753 #ifndef atomic64_dec_return_acquire
754 #define atomic64_dec_return_acquire(...) \
755 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
758 #ifndef atomic64_dec_return_release
759 #define atomic64_dec_return_release(...) \
760 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
763 #ifndef atomic64_dec_return
764 #define atomic64_dec_return(...) \
765 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
767 #endif /* atomic64_dec_return_relaxed */
770 /* atomic64_fetch_add_relaxed */
771 #ifndef atomic64_fetch_add_relaxed
772 #define atomic64_fetch_add_relaxed atomic64_fetch_add
773 #define atomic64_fetch_add_acquire atomic64_fetch_add
774 #define atomic64_fetch_add_release atomic64_fetch_add
776 #else /* atomic64_fetch_add_relaxed */
778 #ifndef atomic64_fetch_add_acquire
779 #define atomic64_fetch_add_acquire(...) \
780 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
783 #ifndef atomic64_fetch_add_release
784 #define atomic64_fetch_add_release(...) \
785 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
788 #ifndef atomic64_fetch_add
789 #define atomic64_fetch_add(...) \
790 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
792 #endif /* atomic64_fetch_add_relaxed */
794 /* atomic64_fetch_inc_relaxed */
795 #ifndef atomic64_fetch_inc_relaxed
797 #ifndef atomic64_fetch_inc
798 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
799 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
800 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
801 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
802 #else /* atomic64_fetch_inc */
803 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
804 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
805 #define atomic64_fetch_inc_release atomic64_fetch_inc
806 #endif /* atomic64_fetch_inc */
808 #else /* atomic64_fetch_inc_relaxed */
810 #ifndef atomic64_fetch_inc_acquire
811 #define atomic64_fetch_inc_acquire(...) \
812 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
815 #ifndef atomic64_fetch_inc_release
816 #define atomic64_fetch_inc_release(...) \
817 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
820 #ifndef atomic64_fetch_inc
821 #define atomic64_fetch_inc(...) \
822 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
824 #endif /* atomic64_fetch_inc_relaxed */
826 /* atomic64_fetch_sub_relaxed */
827 #ifndef atomic64_fetch_sub_relaxed
828 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
829 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
830 #define atomic64_fetch_sub_release atomic64_fetch_sub
832 #else /* atomic64_fetch_sub_relaxed */
834 #ifndef atomic64_fetch_sub_acquire
835 #define atomic64_fetch_sub_acquire(...) \
836 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
839 #ifndef atomic64_fetch_sub_release
840 #define atomic64_fetch_sub_release(...) \
841 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
844 #ifndef atomic64_fetch_sub
845 #define atomic64_fetch_sub(...) \
846 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
848 #endif /* atomic64_fetch_sub_relaxed */
850 /* atomic64_fetch_dec_relaxed */
851 #ifndef atomic64_fetch_dec_relaxed
853 #ifndef atomic64_fetch_dec
854 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
855 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
856 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
857 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
858 #else /* atomic64_fetch_dec */
859 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
860 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
861 #define atomic64_fetch_dec_release atomic64_fetch_dec
862 #endif /* atomic64_fetch_dec */
864 #else /* atomic64_fetch_dec_relaxed */
866 #ifndef atomic64_fetch_dec_acquire
867 #define atomic64_fetch_dec_acquire(...) \
868 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
871 #ifndef atomic64_fetch_dec_release
872 #define atomic64_fetch_dec_release(...) \
873 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
876 #ifndef atomic64_fetch_dec
877 #define atomic64_fetch_dec(...) \
878 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
880 #endif /* atomic64_fetch_dec_relaxed */
882 /* atomic64_fetch_or_relaxed */
883 #ifndef atomic64_fetch_or_relaxed
884 #define atomic64_fetch_or_relaxed atomic64_fetch_or
885 #define atomic64_fetch_or_acquire atomic64_fetch_or
886 #define atomic64_fetch_or_release atomic64_fetch_or
888 #else /* atomic64_fetch_or_relaxed */
890 #ifndef atomic64_fetch_or_acquire
891 #define atomic64_fetch_or_acquire(...) \
892 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
895 #ifndef atomic64_fetch_or_release
896 #define atomic64_fetch_or_release(...) \
897 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
900 #ifndef atomic64_fetch_or
901 #define atomic64_fetch_or(...) \
902 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
904 #endif /* atomic64_fetch_or_relaxed */
906 /* atomic64_fetch_and_relaxed */
907 #ifndef atomic64_fetch_and_relaxed
908 #define atomic64_fetch_and_relaxed atomic64_fetch_and
909 #define atomic64_fetch_and_acquire atomic64_fetch_and
910 #define atomic64_fetch_and_release atomic64_fetch_and
912 #else /* atomic64_fetch_and_relaxed */
914 #ifndef atomic64_fetch_and_acquire
915 #define atomic64_fetch_and_acquire(...) \
916 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
919 #ifndef atomic64_fetch_and_release
920 #define atomic64_fetch_and_release(...) \
921 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
924 #ifndef atomic64_fetch_and
925 #define atomic64_fetch_and(...) \
926 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
928 #endif /* atomic64_fetch_and_relaxed */
930 #ifdef atomic64_andnot
931 /* atomic64_fetch_andnot_relaxed */
932 #ifndef atomic64_fetch_andnot_relaxed
933 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
934 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
935 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
937 #else /* atomic64_fetch_andnot_relaxed */
939 #ifndef atomic64_fetch_andnot_acquire
940 #define atomic64_fetch_andnot_acquire(...) \
941 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
944 #ifndef atomic64_fetch_andnot_release
945 #define atomic64_fetch_andnot_release(...) \
946 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
949 #ifndef atomic64_fetch_andnot
950 #define atomic64_fetch_andnot(...) \
951 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
953 #endif /* atomic64_fetch_andnot_relaxed */
954 #endif /* atomic64_andnot */
956 /* atomic64_fetch_xor_relaxed */
957 #ifndef atomic64_fetch_xor_relaxed
958 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
959 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
960 #define atomic64_fetch_xor_release atomic64_fetch_xor
962 #else /* atomic64_fetch_xor_relaxed */
964 #ifndef atomic64_fetch_xor_acquire
965 #define atomic64_fetch_xor_acquire(...) \
966 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
969 #ifndef atomic64_fetch_xor_release
970 #define atomic64_fetch_xor_release(...) \
971 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
974 #ifndef atomic64_fetch_xor
975 #define atomic64_fetch_xor(...) \
976 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
978 #endif /* atomic64_fetch_xor_relaxed */
981 /* atomic64_xchg_relaxed */
982 #ifndef atomic64_xchg_relaxed
983 #define atomic64_xchg_relaxed atomic64_xchg
984 #define atomic64_xchg_acquire atomic64_xchg
985 #define atomic64_xchg_release atomic64_xchg
987 #else /* atomic64_xchg_relaxed */
989 #ifndef atomic64_xchg_acquire
990 #define atomic64_xchg_acquire(...) \
991 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
994 #ifndef atomic64_xchg_release
995 #define atomic64_xchg_release(...) \
996 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
999 #ifndef atomic64_xchg
1000 #define atomic64_xchg(...) \
1001 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1003 #endif /* atomic64_xchg_relaxed */
1005 /* atomic64_cmpxchg_relaxed */
1006 #ifndef atomic64_cmpxchg_relaxed
1007 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1008 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1009 #define atomic64_cmpxchg_release atomic64_cmpxchg
1011 #else /* atomic64_cmpxchg_relaxed */
1013 #ifndef atomic64_cmpxchg_acquire
1014 #define atomic64_cmpxchg_acquire(...) \
1015 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1018 #ifndef atomic64_cmpxchg_release
1019 #define atomic64_cmpxchg_release(...) \
1020 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1023 #ifndef atomic64_cmpxchg
1024 #define atomic64_cmpxchg(...) \
1025 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1027 #endif /* atomic64_cmpxchg_relaxed */
1029 #ifndef atomic64_try_cmpxchg
1031 #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1033 typeof(_po) __po = (_po); \
1034 typeof(*(_po)) __r, __o = *__po; \
1035 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1036 if (unlikely(__r != __o)) \
1038 likely(__r == __o); \
1041 #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1042 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1043 #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1044 #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1046 #else /* atomic64_try_cmpxchg */
1047 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1048 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1049 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1050 #endif /* atomic64_try_cmpxchg */
1052 #ifndef atomic64_andnot
1053 static inline void atomic64_andnot(long long i
, atomic64_t
*v
)
1055 atomic64_and(~i
, v
);
1058 static inline long long atomic64_fetch_andnot(long long i
, atomic64_t
*v
)
1060 return atomic64_fetch_and(~i
, v
);
1063 static inline long long atomic64_fetch_andnot_relaxed(long long i
, atomic64_t
*v
)
1065 return atomic64_fetch_and_relaxed(~i
, v
);
1068 static inline long long atomic64_fetch_andnot_acquire(long long i
, atomic64_t
*v
)
1070 return atomic64_fetch_and_acquire(~i
, v
);
1073 static inline long long atomic64_fetch_andnot_release(long long i
, atomic64_t
*v
)
1075 return atomic64_fetch_and_release(~i
, v
);
1079 #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
1080 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1082 #include <asm-generic/atomic-long.h>
1084 #endif /* _LINUX_ATOMIC_H */