]>
git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - include/linux/atomic.h
1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
5 #include <asm/barrier.h>
8 * Relaxed variants of xchg, cmpxchg and some atomic operations.
10 * We support four variants:
12 * - Fully ordered: The default implementation, no suffix required.
13 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
14 * - Release: Provides RELEASE semantics, _release suffix.
15 * - Relaxed: No ordering guarantees, _relaxed suffix.
17 * For compound atomics performing both a load and a store, ACQUIRE
18 * semantics apply only to the load and RELEASE semantics only to the
19 * store portion of the operation. Note that a failed cmpxchg_acquire
20 * does -not- imply any memory ordering constraints.
22 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
25 #ifndef atomic_read_acquire
26 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
29 #ifndef atomic_set_release
30 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
34 * The idea here is to build acquire/release variants by adding explicit
35 * barriers on top of the relaxed variant. In the case where the relaxed
36 * variant is already fully ordered, no additional barriers are needed.
38 * Besides, if an arch has a special barrier for acquire/release, it could
39 * implement its own __atomic_op_* and use the same framework for building
42 * If an architecture overrides __atomic_op_acquire() it will probably want
43 * to define smp_mb__after_spinlock().
45 #ifndef __atomic_op_acquire
46 #define __atomic_op_acquire(op, args...) \
48 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
49 smp_mb__after_atomic(); \
54 #ifndef __atomic_op_release
55 #define __atomic_op_release(op, args...) \
57 smp_mb__before_atomic(); \
62 #ifndef __atomic_op_fence
63 #define __atomic_op_fence(op, args...) \
65 typeof(op##_relaxed(args)) __ret; \
66 smp_mb__before_atomic(); \
67 __ret = op##_relaxed(args); \
68 smp_mb__after_atomic(); \
73 /* atomic_add_return_relaxed */
74 #ifndef atomic_add_return_relaxed
75 #define atomic_add_return_relaxed atomic_add_return
76 #define atomic_add_return_acquire atomic_add_return
77 #define atomic_add_return_release atomic_add_return
79 #else /* atomic_add_return_relaxed */
81 #ifndef atomic_add_return_acquire
82 #define atomic_add_return_acquire(...) \
83 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
86 #ifndef atomic_add_return_release
87 #define atomic_add_return_release(...) \
88 __atomic_op_release(atomic_add_return, __VA_ARGS__)
91 #ifndef atomic_add_return
92 #define atomic_add_return(...) \
93 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
95 #endif /* atomic_add_return_relaxed */
97 /* atomic_inc_return_relaxed */
98 #ifndef atomic_inc_return_relaxed
99 #define atomic_inc_return_relaxed atomic_inc_return
100 #define atomic_inc_return_acquire atomic_inc_return
101 #define atomic_inc_return_release atomic_inc_return
103 #else /* atomic_inc_return_relaxed */
105 #ifndef atomic_inc_return_acquire
106 #define atomic_inc_return_acquire(...) \
107 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
110 #ifndef atomic_inc_return_release
111 #define atomic_inc_return_release(...) \
112 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
115 #ifndef atomic_inc_return
116 #define atomic_inc_return(...) \
117 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
119 #endif /* atomic_inc_return_relaxed */
121 /* atomic_sub_return_relaxed */
122 #ifndef atomic_sub_return_relaxed
123 #define atomic_sub_return_relaxed atomic_sub_return
124 #define atomic_sub_return_acquire atomic_sub_return
125 #define atomic_sub_return_release atomic_sub_return
127 #else /* atomic_sub_return_relaxed */
129 #ifndef atomic_sub_return_acquire
130 #define atomic_sub_return_acquire(...) \
131 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
134 #ifndef atomic_sub_return_release
135 #define atomic_sub_return_release(...) \
136 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
139 #ifndef atomic_sub_return
140 #define atomic_sub_return(...) \
141 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
143 #endif /* atomic_sub_return_relaxed */
145 /* atomic_dec_return_relaxed */
146 #ifndef atomic_dec_return_relaxed
147 #define atomic_dec_return_relaxed atomic_dec_return
148 #define atomic_dec_return_acquire atomic_dec_return
149 #define atomic_dec_return_release atomic_dec_return
151 #else /* atomic_dec_return_relaxed */
153 #ifndef atomic_dec_return_acquire
154 #define atomic_dec_return_acquire(...) \
155 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
158 #ifndef atomic_dec_return_release
159 #define atomic_dec_return_release(...) \
160 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
163 #ifndef atomic_dec_return
164 #define atomic_dec_return(...) \
165 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
167 #endif /* atomic_dec_return_relaxed */
170 /* atomic_fetch_add_relaxed */
171 #ifndef atomic_fetch_add_relaxed
172 #define atomic_fetch_add_relaxed atomic_fetch_add
173 #define atomic_fetch_add_acquire atomic_fetch_add
174 #define atomic_fetch_add_release atomic_fetch_add
176 #else /* atomic_fetch_add_relaxed */
178 #ifndef atomic_fetch_add_acquire
179 #define atomic_fetch_add_acquire(...) \
180 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
183 #ifndef atomic_fetch_add_release
184 #define atomic_fetch_add_release(...) \
185 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
188 #ifndef atomic_fetch_add
189 #define atomic_fetch_add(...) \
190 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
192 #endif /* atomic_fetch_add_relaxed */
194 /* atomic_fetch_inc_relaxed */
195 #ifndef atomic_fetch_inc_relaxed
197 #ifndef atomic_fetch_inc
198 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
199 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
200 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
201 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
202 #else /* atomic_fetch_inc */
203 #define atomic_fetch_inc_relaxed atomic_fetch_inc
204 #define atomic_fetch_inc_acquire atomic_fetch_inc
205 #define atomic_fetch_inc_release atomic_fetch_inc
206 #endif /* atomic_fetch_inc */
208 #else /* atomic_fetch_inc_relaxed */
210 #ifndef atomic_fetch_inc_acquire
211 #define atomic_fetch_inc_acquire(...) \
212 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
215 #ifndef atomic_fetch_inc_release
216 #define atomic_fetch_inc_release(...) \
217 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
220 #ifndef atomic_fetch_inc
221 #define atomic_fetch_inc(...) \
222 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
224 #endif /* atomic_fetch_inc_relaxed */
226 /* atomic_fetch_sub_relaxed */
227 #ifndef atomic_fetch_sub_relaxed
228 #define atomic_fetch_sub_relaxed atomic_fetch_sub
229 #define atomic_fetch_sub_acquire atomic_fetch_sub
230 #define atomic_fetch_sub_release atomic_fetch_sub
232 #else /* atomic_fetch_sub_relaxed */
234 #ifndef atomic_fetch_sub_acquire
235 #define atomic_fetch_sub_acquire(...) \
236 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
239 #ifndef atomic_fetch_sub_release
240 #define atomic_fetch_sub_release(...) \
241 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
244 #ifndef atomic_fetch_sub
245 #define atomic_fetch_sub(...) \
246 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
248 #endif /* atomic_fetch_sub_relaxed */
250 /* atomic_fetch_dec_relaxed */
251 #ifndef atomic_fetch_dec_relaxed
253 #ifndef atomic_fetch_dec
254 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
255 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
256 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
257 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
258 #else /* atomic_fetch_dec */
259 #define atomic_fetch_dec_relaxed atomic_fetch_dec
260 #define atomic_fetch_dec_acquire atomic_fetch_dec
261 #define atomic_fetch_dec_release atomic_fetch_dec
262 #endif /* atomic_fetch_dec */
264 #else /* atomic_fetch_dec_relaxed */
266 #ifndef atomic_fetch_dec_acquire
267 #define atomic_fetch_dec_acquire(...) \
268 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
271 #ifndef atomic_fetch_dec_release
272 #define atomic_fetch_dec_release(...) \
273 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
276 #ifndef atomic_fetch_dec
277 #define atomic_fetch_dec(...) \
278 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
280 #endif /* atomic_fetch_dec_relaxed */
282 /* atomic_fetch_or_relaxed */
283 #ifndef atomic_fetch_or_relaxed
284 #define atomic_fetch_or_relaxed atomic_fetch_or
285 #define atomic_fetch_or_acquire atomic_fetch_or
286 #define atomic_fetch_or_release atomic_fetch_or
288 #else /* atomic_fetch_or_relaxed */
290 #ifndef atomic_fetch_or_acquire
291 #define atomic_fetch_or_acquire(...) \
292 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
295 #ifndef atomic_fetch_or_release
296 #define atomic_fetch_or_release(...) \
297 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
300 #ifndef atomic_fetch_or
301 #define atomic_fetch_or(...) \
302 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
304 #endif /* atomic_fetch_or_relaxed */
306 /* atomic_fetch_and_relaxed */
307 #ifndef atomic_fetch_and_relaxed
308 #define atomic_fetch_and_relaxed atomic_fetch_and
309 #define atomic_fetch_and_acquire atomic_fetch_and
310 #define atomic_fetch_and_release atomic_fetch_and
312 #else /* atomic_fetch_and_relaxed */
314 #ifndef atomic_fetch_and_acquire
315 #define atomic_fetch_and_acquire(...) \
316 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
319 #ifndef atomic_fetch_and_release
320 #define atomic_fetch_and_release(...) \
321 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
324 #ifndef atomic_fetch_and
325 #define atomic_fetch_and(...) \
326 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
328 #endif /* atomic_fetch_and_relaxed */
331 /* atomic_fetch_andnot_relaxed */
332 #ifndef atomic_fetch_andnot_relaxed
333 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
334 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
335 #define atomic_fetch_andnot_release atomic_fetch_andnot
337 #else /* atomic_fetch_andnot_relaxed */
339 #ifndef atomic_fetch_andnot_acquire
340 #define atomic_fetch_andnot_acquire(...) \
341 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
344 #ifndef atomic_fetch_andnot_release
345 #define atomic_fetch_andnot_release(...) \
346 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
349 #ifndef atomic_fetch_andnot
350 #define atomic_fetch_andnot(...) \
351 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
353 #endif /* atomic_fetch_andnot_relaxed */
354 #endif /* atomic_andnot */
356 /* atomic_fetch_xor_relaxed */
357 #ifndef atomic_fetch_xor_relaxed
358 #define atomic_fetch_xor_relaxed atomic_fetch_xor
359 #define atomic_fetch_xor_acquire atomic_fetch_xor
360 #define atomic_fetch_xor_release atomic_fetch_xor
362 #else /* atomic_fetch_xor_relaxed */
364 #ifndef atomic_fetch_xor_acquire
365 #define atomic_fetch_xor_acquire(...) \
366 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
369 #ifndef atomic_fetch_xor_release
370 #define atomic_fetch_xor_release(...) \
371 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
374 #ifndef atomic_fetch_xor
375 #define atomic_fetch_xor(...) \
376 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
378 #endif /* atomic_fetch_xor_relaxed */
381 /* atomic_xchg_relaxed */
382 #ifndef atomic_xchg_relaxed
383 #define atomic_xchg_relaxed atomic_xchg
384 #define atomic_xchg_acquire atomic_xchg
385 #define atomic_xchg_release atomic_xchg
387 #else /* atomic_xchg_relaxed */
389 #ifndef atomic_xchg_acquire
390 #define atomic_xchg_acquire(...) \
391 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
394 #ifndef atomic_xchg_release
395 #define atomic_xchg_release(...) \
396 __atomic_op_release(atomic_xchg, __VA_ARGS__)
400 #define atomic_xchg(...) \
401 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
403 #endif /* atomic_xchg_relaxed */
405 /* atomic_cmpxchg_relaxed */
406 #ifndef atomic_cmpxchg_relaxed
407 #define atomic_cmpxchg_relaxed atomic_cmpxchg
408 #define atomic_cmpxchg_acquire atomic_cmpxchg
409 #define atomic_cmpxchg_release atomic_cmpxchg
411 #else /* atomic_cmpxchg_relaxed */
413 #ifndef atomic_cmpxchg_acquire
414 #define atomic_cmpxchg_acquire(...) \
415 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
418 #ifndef atomic_cmpxchg_release
419 #define atomic_cmpxchg_release(...) \
420 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
423 #ifndef atomic_cmpxchg
424 #define atomic_cmpxchg(...) \
425 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
427 #endif /* atomic_cmpxchg_relaxed */
429 #ifndef atomic_try_cmpxchg
431 #define __atomic_try_cmpxchg(type, _p, _po, _n) \
433 typeof(_po) __po = (_po); \
434 typeof(*(_po)) __r, __o = *__po; \
435 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
436 if (unlikely(__r != __o)) \
438 likely(__r == __o); \
441 #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
442 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
443 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
444 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
446 #else /* atomic_try_cmpxchg */
447 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
448 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
449 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
450 #endif /* atomic_try_cmpxchg */
452 /* cmpxchg_relaxed */
453 #ifndef cmpxchg_relaxed
454 #define cmpxchg_relaxed cmpxchg
455 #define cmpxchg_acquire cmpxchg
456 #define cmpxchg_release cmpxchg
458 #else /* cmpxchg_relaxed */
460 #ifndef cmpxchg_acquire
461 #define cmpxchg_acquire(...) \
462 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
465 #ifndef cmpxchg_release
466 #define cmpxchg_release(...) \
467 __atomic_op_release(cmpxchg, __VA_ARGS__)
471 #define cmpxchg(...) \
472 __atomic_op_fence(cmpxchg, __VA_ARGS__)
474 #endif /* cmpxchg_relaxed */
476 /* cmpxchg64_relaxed */
477 #ifndef cmpxchg64_relaxed
478 #define cmpxchg64_relaxed cmpxchg64
479 #define cmpxchg64_acquire cmpxchg64
480 #define cmpxchg64_release cmpxchg64
482 #else /* cmpxchg64_relaxed */
484 #ifndef cmpxchg64_acquire
485 #define cmpxchg64_acquire(...) \
486 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
489 #ifndef cmpxchg64_release
490 #define cmpxchg64_release(...) \
491 __atomic_op_release(cmpxchg64, __VA_ARGS__)
495 #define cmpxchg64(...) \
496 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
498 #endif /* cmpxchg64_relaxed */
502 #define xchg_relaxed xchg
503 #define xchg_acquire xchg
504 #define xchg_release xchg
506 #else /* xchg_relaxed */
509 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
513 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
517 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
519 #endif /* xchg_relaxed */
522 * atomic_add_unless - add unless the number is already a given value
523 * @v: pointer of type atomic_t
524 * @a: the amount to add to v...
525 * @u: ...unless v is equal to u.
527 * Atomically adds @a to @v, so long as @v was not already @u.
528 * Returns non-zero if @v was not @u, and zero otherwise.
530 static inline int atomic_add_unless(atomic_t
*v
, int a
, int u
)
532 return __atomic_add_unless(v
, a
, u
) != u
;
536 * atomic_inc_not_zero - increment unless the number is zero
537 * @v: pointer of type atomic_t
539 * Atomically increments @v by 1, so long as @v is non-zero.
540 * Returns non-zero if @v was non-zero, and zero otherwise.
542 #ifndef atomic_inc_not_zero
543 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
546 #ifndef atomic_andnot
547 static inline void atomic_andnot(int i
, atomic_t
*v
)
552 static inline int atomic_fetch_andnot(int i
, atomic_t
*v
)
554 return atomic_fetch_and(~i
, v
);
557 static inline int atomic_fetch_andnot_relaxed(int i
, atomic_t
*v
)
559 return atomic_fetch_and_relaxed(~i
, v
);
562 static inline int atomic_fetch_andnot_acquire(int i
, atomic_t
*v
)
564 return atomic_fetch_and_acquire(~i
, v
);
567 static inline int atomic_fetch_andnot_release(int i
, atomic_t
*v
)
569 return atomic_fetch_and_release(~i
, v
);
574 * atomic_inc_not_zero_hint - increment if not null
575 * @v: pointer of type atomic_t
576 * @hint: probable value of the atomic before the increment
578 * This version of atomic_inc_not_zero() gives a hint of probable
579 * value of the atomic. This helps processor to not read the memory
580 * before doing the atomic read/modify/write cycle, lowering
581 * number of bus transactions on some arches.
583 * Returns: 0 if increment was not done, 1 otherwise.
585 #ifndef atomic_inc_not_zero_hint
586 static inline int atomic_inc_not_zero_hint(atomic_t
*v
, int hint
)
590 /* sanity test, should be removed by compiler if hint is a constant */
592 return atomic_inc_not_zero(v
);
595 val
= atomic_cmpxchg(v
, c
, c
+ 1);
605 #ifndef atomic_inc_unless_negative
606 static inline int atomic_inc_unless_negative(atomic_t
*p
)
609 for (v
= 0; v
>= 0; v
= v1
) {
610 v1
= atomic_cmpxchg(p
, v
, v
+ 1);
618 #ifndef atomic_dec_unless_positive
619 static inline int atomic_dec_unless_positive(atomic_t
*p
)
622 for (v
= 0; v
<= 0; v
= v1
) {
623 v1
= atomic_cmpxchg(p
, v
, v
- 1);
632 * atomic_dec_if_positive - decrement by 1 if old value positive
633 * @v: pointer of type atomic_t
635 * The function returns the old value of *v minus 1, even if
636 * the atomic variable, v, was not decremented.
638 #ifndef atomic_dec_if_positive
639 static inline int atomic_dec_if_positive(atomic_t
*v
)
645 if (unlikely(dec
< 0))
647 old
= atomic_cmpxchg((v
), c
, dec
);
648 if (likely(old
== c
))
656 #ifdef CONFIG_GENERIC_ATOMIC64
657 #include <asm-generic/atomic64.h>
660 #ifndef atomic64_read_acquire
661 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
664 #ifndef atomic64_set_release
665 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
668 /* atomic64_add_return_relaxed */
669 #ifndef atomic64_add_return_relaxed
670 #define atomic64_add_return_relaxed atomic64_add_return
671 #define atomic64_add_return_acquire atomic64_add_return
672 #define atomic64_add_return_release atomic64_add_return
674 #else /* atomic64_add_return_relaxed */
676 #ifndef atomic64_add_return_acquire
677 #define atomic64_add_return_acquire(...) \
678 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
681 #ifndef atomic64_add_return_release
682 #define atomic64_add_return_release(...) \
683 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
686 #ifndef atomic64_add_return
687 #define atomic64_add_return(...) \
688 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
690 #endif /* atomic64_add_return_relaxed */
692 /* atomic64_inc_return_relaxed */
693 #ifndef atomic64_inc_return_relaxed
694 #define atomic64_inc_return_relaxed atomic64_inc_return
695 #define atomic64_inc_return_acquire atomic64_inc_return
696 #define atomic64_inc_return_release atomic64_inc_return
698 #else /* atomic64_inc_return_relaxed */
700 #ifndef atomic64_inc_return_acquire
701 #define atomic64_inc_return_acquire(...) \
702 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
705 #ifndef atomic64_inc_return_release
706 #define atomic64_inc_return_release(...) \
707 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
710 #ifndef atomic64_inc_return
711 #define atomic64_inc_return(...) \
712 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
714 #endif /* atomic64_inc_return_relaxed */
717 /* atomic64_sub_return_relaxed */
718 #ifndef atomic64_sub_return_relaxed
719 #define atomic64_sub_return_relaxed atomic64_sub_return
720 #define atomic64_sub_return_acquire atomic64_sub_return
721 #define atomic64_sub_return_release atomic64_sub_return
723 #else /* atomic64_sub_return_relaxed */
725 #ifndef atomic64_sub_return_acquire
726 #define atomic64_sub_return_acquire(...) \
727 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
730 #ifndef atomic64_sub_return_release
731 #define atomic64_sub_return_release(...) \
732 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
735 #ifndef atomic64_sub_return
736 #define atomic64_sub_return(...) \
737 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
739 #endif /* atomic64_sub_return_relaxed */
741 /* atomic64_dec_return_relaxed */
742 #ifndef atomic64_dec_return_relaxed
743 #define atomic64_dec_return_relaxed atomic64_dec_return
744 #define atomic64_dec_return_acquire atomic64_dec_return
745 #define atomic64_dec_return_release atomic64_dec_return
747 #else /* atomic64_dec_return_relaxed */
749 #ifndef atomic64_dec_return_acquire
750 #define atomic64_dec_return_acquire(...) \
751 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
754 #ifndef atomic64_dec_return_release
755 #define atomic64_dec_return_release(...) \
756 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
759 #ifndef atomic64_dec_return
760 #define atomic64_dec_return(...) \
761 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
763 #endif /* atomic64_dec_return_relaxed */
766 /* atomic64_fetch_add_relaxed */
767 #ifndef atomic64_fetch_add_relaxed
768 #define atomic64_fetch_add_relaxed atomic64_fetch_add
769 #define atomic64_fetch_add_acquire atomic64_fetch_add
770 #define atomic64_fetch_add_release atomic64_fetch_add
772 #else /* atomic64_fetch_add_relaxed */
774 #ifndef atomic64_fetch_add_acquire
775 #define atomic64_fetch_add_acquire(...) \
776 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
779 #ifndef atomic64_fetch_add_release
780 #define atomic64_fetch_add_release(...) \
781 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
784 #ifndef atomic64_fetch_add
785 #define atomic64_fetch_add(...) \
786 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
788 #endif /* atomic64_fetch_add_relaxed */
790 /* atomic64_fetch_inc_relaxed */
791 #ifndef atomic64_fetch_inc_relaxed
793 #ifndef atomic64_fetch_inc
794 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
795 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
796 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
797 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
798 #else /* atomic64_fetch_inc */
799 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
800 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
801 #define atomic64_fetch_inc_release atomic64_fetch_inc
802 #endif /* atomic64_fetch_inc */
804 #else /* atomic64_fetch_inc_relaxed */
806 #ifndef atomic64_fetch_inc_acquire
807 #define atomic64_fetch_inc_acquire(...) \
808 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
811 #ifndef atomic64_fetch_inc_release
812 #define atomic64_fetch_inc_release(...) \
813 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
816 #ifndef atomic64_fetch_inc
817 #define atomic64_fetch_inc(...) \
818 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
820 #endif /* atomic64_fetch_inc_relaxed */
822 /* atomic64_fetch_sub_relaxed */
823 #ifndef atomic64_fetch_sub_relaxed
824 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
825 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
826 #define atomic64_fetch_sub_release atomic64_fetch_sub
828 #else /* atomic64_fetch_sub_relaxed */
830 #ifndef atomic64_fetch_sub_acquire
831 #define atomic64_fetch_sub_acquire(...) \
832 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
835 #ifndef atomic64_fetch_sub_release
836 #define atomic64_fetch_sub_release(...) \
837 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
840 #ifndef atomic64_fetch_sub
841 #define atomic64_fetch_sub(...) \
842 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
844 #endif /* atomic64_fetch_sub_relaxed */
846 /* atomic64_fetch_dec_relaxed */
847 #ifndef atomic64_fetch_dec_relaxed
849 #ifndef atomic64_fetch_dec
850 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
851 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
852 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
853 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
854 #else /* atomic64_fetch_dec */
855 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
856 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
857 #define atomic64_fetch_dec_release atomic64_fetch_dec
858 #endif /* atomic64_fetch_dec */
860 #else /* atomic64_fetch_dec_relaxed */
862 #ifndef atomic64_fetch_dec_acquire
863 #define atomic64_fetch_dec_acquire(...) \
864 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
867 #ifndef atomic64_fetch_dec_release
868 #define atomic64_fetch_dec_release(...) \
869 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
872 #ifndef atomic64_fetch_dec
873 #define atomic64_fetch_dec(...) \
874 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
876 #endif /* atomic64_fetch_dec_relaxed */
878 /* atomic64_fetch_or_relaxed */
879 #ifndef atomic64_fetch_or_relaxed
880 #define atomic64_fetch_or_relaxed atomic64_fetch_or
881 #define atomic64_fetch_or_acquire atomic64_fetch_or
882 #define atomic64_fetch_or_release atomic64_fetch_or
884 #else /* atomic64_fetch_or_relaxed */
886 #ifndef atomic64_fetch_or_acquire
887 #define atomic64_fetch_or_acquire(...) \
888 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
891 #ifndef atomic64_fetch_or_release
892 #define atomic64_fetch_or_release(...) \
893 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
896 #ifndef atomic64_fetch_or
897 #define atomic64_fetch_or(...) \
898 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
900 #endif /* atomic64_fetch_or_relaxed */
902 /* atomic64_fetch_and_relaxed */
903 #ifndef atomic64_fetch_and_relaxed
904 #define atomic64_fetch_and_relaxed atomic64_fetch_and
905 #define atomic64_fetch_and_acquire atomic64_fetch_and
906 #define atomic64_fetch_and_release atomic64_fetch_and
908 #else /* atomic64_fetch_and_relaxed */
910 #ifndef atomic64_fetch_and_acquire
911 #define atomic64_fetch_and_acquire(...) \
912 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
915 #ifndef atomic64_fetch_and_release
916 #define atomic64_fetch_and_release(...) \
917 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
920 #ifndef atomic64_fetch_and
921 #define atomic64_fetch_and(...) \
922 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
924 #endif /* atomic64_fetch_and_relaxed */
926 #ifdef atomic64_andnot
927 /* atomic64_fetch_andnot_relaxed */
928 #ifndef atomic64_fetch_andnot_relaxed
929 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
930 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
931 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
933 #else /* atomic64_fetch_andnot_relaxed */
935 #ifndef atomic64_fetch_andnot_acquire
936 #define atomic64_fetch_andnot_acquire(...) \
937 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
940 #ifndef atomic64_fetch_andnot_release
941 #define atomic64_fetch_andnot_release(...) \
942 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
945 #ifndef atomic64_fetch_andnot
946 #define atomic64_fetch_andnot(...) \
947 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
949 #endif /* atomic64_fetch_andnot_relaxed */
950 #endif /* atomic64_andnot */
952 /* atomic64_fetch_xor_relaxed */
953 #ifndef atomic64_fetch_xor_relaxed
954 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
955 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
956 #define atomic64_fetch_xor_release atomic64_fetch_xor
958 #else /* atomic64_fetch_xor_relaxed */
960 #ifndef atomic64_fetch_xor_acquire
961 #define atomic64_fetch_xor_acquire(...) \
962 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
965 #ifndef atomic64_fetch_xor_release
966 #define atomic64_fetch_xor_release(...) \
967 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
970 #ifndef atomic64_fetch_xor
971 #define atomic64_fetch_xor(...) \
972 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
974 #endif /* atomic64_fetch_xor_relaxed */
977 /* atomic64_xchg_relaxed */
978 #ifndef atomic64_xchg_relaxed
979 #define atomic64_xchg_relaxed atomic64_xchg
980 #define atomic64_xchg_acquire atomic64_xchg
981 #define atomic64_xchg_release atomic64_xchg
983 #else /* atomic64_xchg_relaxed */
985 #ifndef atomic64_xchg_acquire
986 #define atomic64_xchg_acquire(...) \
987 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
990 #ifndef atomic64_xchg_release
991 #define atomic64_xchg_release(...) \
992 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
995 #ifndef atomic64_xchg
996 #define atomic64_xchg(...) \
997 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
999 #endif /* atomic64_xchg_relaxed */
1001 /* atomic64_cmpxchg_relaxed */
1002 #ifndef atomic64_cmpxchg_relaxed
1003 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1004 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1005 #define atomic64_cmpxchg_release atomic64_cmpxchg
1007 #else /* atomic64_cmpxchg_relaxed */
1009 #ifndef atomic64_cmpxchg_acquire
1010 #define atomic64_cmpxchg_acquire(...) \
1011 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1014 #ifndef atomic64_cmpxchg_release
1015 #define atomic64_cmpxchg_release(...) \
1016 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1019 #ifndef atomic64_cmpxchg
1020 #define atomic64_cmpxchg(...) \
1021 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1023 #endif /* atomic64_cmpxchg_relaxed */
1025 #ifndef atomic64_try_cmpxchg
1027 #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1029 typeof(_po) __po = (_po); \
1030 typeof(*(_po)) __r, __o = *__po; \
1031 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1032 if (unlikely(__r != __o)) \
1034 likely(__r == __o); \
1037 #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1038 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1039 #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1040 #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1042 #else /* atomic64_try_cmpxchg */
1043 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1044 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1045 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1046 #endif /* atomic64_try_cmpxchg */
1048 #ifndef atomic64_andnot
1049 static inline void atomic64_andnot(long long i
, atomic64_t
*v
)
1051 atomic64_and(~i
, v
);
1054 static inline long long atomic64_fetch_andnot(long long i
, atomic64_t
*v
)
1056 return atomic64_fetch_and(~i
, v
);
1059 static inline long long atomic64_fetch_andnot_relaxed(long long i
, atomic64_t
*v
)
1061 return atomic64_fetch_and_relaxed(~i
, v
);
1064 static inline long long atomic64_fetch_andnot_acquire(long long i
, atomic64_t
*v
)
1066 return atomic64_fetch_and_acquire(~i
, v
);
1069 static inline long long atomic64_fetch_andnot_release(long long i
, atomic64_t
*v
)
1071 return atomic64_fetch_and_release(~i
, v
);
1075 #include <asm-generic/atomic-long.h>
1077 #endif /* _LINUX_ATOMIC_H */