]> git.proxmox.com Git - mirror_ubuntu-kernels.git/blame - include/linux/atomic.h
License cleanup: add SPDX GPL-2.0 license identifier to files with no license
[mirror_ubuntu-kernels.git] / include / linux / atomic.h
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
acac43e2 2/* Atomic operations usable in machine independent code */
3f9d35b9
ED
3#ifndef _LINUX_ATOMIC_H
4#define _LINUX_ATOMIC_H
5#include <asm/atomic.h>
654672d4
WD
6#include <asm/barrier.h>
7
8/*
9 * Relaxed variants of xchg, cmpxchg and some atomic operations.
10 *
11 * We support four variants:
12 *
13 * - Fully ordered: The default implementation, no suffix required.
14 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
15 * - Release: Provides RELEASE semantics, _release suffix.
16 * - Relaxed: No ordering guarantees, _relaxed suffix.
17 *
18 * For compound atomics performing both a load and a store, ACQUIRE
19 * semantics apply only to the load and RELEASE semantics only to the
20 * store portion of the operation. Note that a failed cmpxchg_acquire
21 * does -not- imply any memory ordering constraints.
22 *
23 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
24 */
25
26#ifndef atomic_read_acquire
27#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
28#endif
29
30#ifndef atomic_set_release
31#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
32#endif
33
34/*
35 * The idea here is to build acquire/release variants by adding explicit
36 * barriers on top of the relaxed variant. In the case where the relaxed
37 * variant is already fully ordered, no additional barriers are needed.
e1ab7f39
BF
38 *
39 * Besides, if an arch has a special barrier for acquire/release, it could
40 * implement its own __atomic_op_* and use the same framework for building
41 * variants
d89e588c
PZ
42 *
43 * If an architecture overrides __atomic_op_acquire() it will probably want
44 * to define smp_mb__after_spinlock().
654672d4 45 */
e1ab7f39 46#ifndef __atomic_op_acquire
654672d4
WD
47#define __atomic_op_acquire(op, args...) \
48({ \
49 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
50 smp_mb__after_atomic(); \
51 __ret; \
52})
e1ab7f39 53#endif
654672d4 54
e1ab7f39 55#ifndef __atomic_op_release
654672d4
WD
56#define __atomic_op_release(op, args...) \
57({ \
58 smp_mb__before_atomic(); \
59 op##_relaxed(args); \
60})
e1ab7f39 61#endif
654672d4 62
e1ab7f39 63#ifndef __atomic_op_fence
654672d4
WD
64#define __atomic_op_fence(op, args...) \
65({ \
66 typeof(op##_relaxed(args)) __ret; \
67 smp_mb__before_atomic(); \
68 __ret = op##_relaxed(args); \
69 smp_mb__after_atomic(); \
70 __ret; \
71})
e1ab7f39 72#endif
654672d4
WD
73
74/* atomic_add_return_relaxed */
75#ifndef atomic_add_return_relaxed
76#define atomic_add_return_relaxed atomic_add_return
77#define atomic_add_return_acquire atomic_add_return
78#define atomic_add_return_release atomic_add_return
79
80#else /* atomic_add_return_relaxed */
81
82#ifndef atomic_add_return_acquire
83#define atomic_add_return_acquire(...) \
84 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
85#endif
86
87#ifndef atomic_add_return_release
88#define atomic_add_return_release(...) \
89 __atomic_op_release(atomic_add_return, __VA_ARGS__)
90#endif
91
92#ifndef atomic_add_return
93#define atomic_add_return(...) \
94 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
95#endif
96#endif /* atomic_add_return_relaxed */
97
63ab7bd0
DB
98/* atomic_inc_return_relaxed */
99#ifndef atomic_inc_return_relaxed
100#define atomic_inc_return_relaxed atomic_inc_return
101#define atomic_inc_return_acquire atomic_inc_return
102#define atomic_inc_return_release atomic_inc_return
103
104#else /* atomic_inc_return_relaxed */
105
106#ifndef atomic_inc_return_acquire
107#define atomic_inc_return_acquire(...) \
108 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
109#endif
110
111#ifndef atomic_inc_return_release
112#define atomic_inc_return_release(...) \
113 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
114#endif
115
116#ifndef atomic_inc_return
117#define atomic_inc_return(...) \
118 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
119#endif
120#endif /* atomic_inc_return_relaxed */
121
654672d4
WD
122/* atomic_sub_return_relaxed */
123#ifndef atomic_sub_return_relaxed
124#define atomic_sub_return_relaxed atomic_sub_return
125#define atomic_sub_return_acquire atomic_sub_return
126#define atomic_sub_return_release atomic_sub_return
127
128#else /* atomic_sub_return_relaxed */
129
130#ifndef atomic_sub_return_acquire
131#define atomic_sub_return_acquire(...) \
132 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
133#endif
134
135#ifndef atomic_sub_return_release
136#define atomic_sub_return_release(...) \
137 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
138#endif
139
140#ifndef atomic_sub_return
141#define atomic_sub_return(...) \
142 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
143#endif
144#endif /* atomic_sub_return_relaxed */
145
63ab7bd0
DB
146/* atomic_dec_return_relaxed */
147#ifndef atomic_dec_return_relaxed
148#define atomic_dec_return_relaxed atomic_dec_return
149#define atomic_dec_return_acquire atomic_dec_return
150#define atomic_dec_return_release atomic_dec_return
151
152#else /* atomic_dec_return_relaxed */
153
154#ifndef atomic_dec_return_acquire
155#define atomic_dec_return_acquire(...) \
156 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
157#endif
158
159#ifndef atomic_dec_return_release
160#define atomic_dec_return_release(...) \
161 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
162#endif
163
164#ifndef atomic_dec_return
165#define atomic_dec_return(...) \
166 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
167#endif
168#endif /* atomic_dec_return_relaxed */
169
28aa2bda
PZ
170
171/* atomic_fetch_add_relaxed */
172#ifndef atomic_fetch_add_relaxed
173#define atomic_fetch_add_relaxed atomic_fetch_add
174#define atomic_fetch_add_acquire atomic_fetch_add
175#define atomic_fetch_add_release atomic_fetch_add
176
177#else /* atomic_fetch_add_relaxed */
178
179#ifndef atomic_fetch_add_acquire
180#define atomic_fetch_add_acquire(...) \
181 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
182#endif
183
184#ifndef atomic_fetch_add_release
185#define atomic_fetch_add_release(...) \
186 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
187#endif
188
189#ifndef atomic_fetch_add
190#define atomic_fetch_add(...) \
191 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
192#endif
193#endif /* atomic_fetch_add_relaxed */
194
f0662863
DB
195/* atomic_fetch_inc_relaxed */
196#ifndef atomic_fetch_inc_relaxed
197
198#ifndef atomic_fetch_inc
199#define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
200#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
201#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
202#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
203#else /* atomic_fetch_inc */
204#define atomic_fetch_inc_relaxed atomic_fetch_inc
205#define atomic_fetch_inc_acquire atomic_fetch_inc
206#define atomic_fetch_inc_release atomic_fetch_inc
207#endif /* atomic_fetch_inc */
208
209#else /* atomic_fetch_inc_relaxed */
210
211#ifndef atomic_fetch_inc_acquire
212#define atomic_fetch_inc_acquire(...) \
213 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
214#endif
215
216#ifndef atomic_fetch_inc_release
217#define atomic_fetch_inc_release(...) \
218 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
219#endif
220
221#ifndef atomic_fetch_inc
222#define atomic_fetch_inc(...) \
223 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
224#endif
225#endif /* atomic_fetch_inc_relaxed */
226
28aa2bda
PZ
227/* atomic_fetch_sub_relaxed */
228#ifndef atomic_fetch_sub_relaxed
229#define atomic_fetch_sub_relaxed atomic_fetch_sub
230#define atomic_fetch_sub_acquire atomic_fetch_sub
231#define atomic_fetch_sub_release atomic_fetch_sub
232
233#else /* atomic_fetch_sub_relaxed */
234
235#ifndef atomic_fetch_sub_acquire
236#define atomic_fetch_sub_acquire(...) \
237 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
238#endif
239
240#ifndef atomic_fetch_sub_release
241#define atomic_fetch_sub_release(...) \
242 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
243#endif
244
245#ifndef atomic_fetch_sub
246#define atomic_fetch_sub(...) \
247 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
248#endif
249#endif /* atomic_fetch_sub_relaxed */
250
f0662863
DB
251/* atomic_fetch_dec_relaxed */
252#ifndef atomic_fetch_dec_relaxed
253
254#ifndef atomic_fetch_dec
255#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
256#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
257#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
258#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
259#else /* atomic_fetch_dec */
260#define atomic_fetch_dec_relaxed atomic_fetch_dec
261#define atomic_fetch_dec_acquire atomic_fetch_dec
262#define atomic_fetch_dec_release atomic_fetch_dec
263#endif /* atomic_fetch_dec */
264
265#else /* atomic_fetch_dec_relaxed */
266
267#ifndef atomic_fetch_dec_acquire
268#define atomic_fetch_dec_acquire(...) \
269 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
270#endif
271
272#ifndef atomic_fetch_dec_release
273#define atomic_fetch_dec_release(...) \
274 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
275#endif
276
277#ifndef atomic_fetch_dec
278#define atomic_fetch_dec(...) \
279 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
280#endif
281#endif /* atomic_fetch_dec_relaxed */
282
28aa2bda
PZ
283/* atomic_fetch_or_relaxed */
284#ifndef atomic_fetch_or_relaxed
285#define atomic_fetch_or_relaxed atomic_fetch_or
286#define atomic_fetch_or_acquire atomic_fetch_or
287#define atomic_fetch_or_release atomic_fetch_or
288
289#else /* atomic_fetch_or_relaxed */
290
291#ifndef atomic_fetch_or_acquire
292#define atomic_fetch_or_acquire(...) \
293 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
294#endif
295
296#ifndef atomic_fetch_or_release
297#define atomic_fetch_or_release(...) \
298 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
299#endif
300
301#ifndef atomic_fetch_or
302#define atomic_fetch_or(...) \
303 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
304#endif
305#endif /* atomic_fetch_or_relaxed */
306
307/* atomic_fetch_and_relaxed */
308#ifndef atomic_fetch_and_relaxed
309#define atomic_fetch_and_relaxed atomic_fetch_and
310#define atomic_fetch_and_acquire atomic_fetch_and
311#define atomic_fetch_and_release atomic_fetch_and
312
313#else /* atomic_fetch_and_relaxed */
314
315#ifndef atomic_fetch_and_acquire
316#define atomic_fetch_and_acquire(...) \
317 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
318#endif
319
320#ifndef atomic_fetch_and_release
321#define atomic_fetch_and_release(...) \
322 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
323#endif
324
325#ifndef atomic_fetch_and
326#define atomic_fetch_and(...) \
327 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
328#endif
329#endif /* atomic_fetch_and_relaxed */
330
331#ifdef atomic_andnot
332/* atomic_fetch_andnot_relaxed */
333#ifndef atomic_fetch_andnot_relaxed
334#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
335#define atomic_fetch_andnot_acquire atomic_fetch_andnot
336#define atomic_fetch_andnot_release atomic_fetch_andnot
337
338#else /* atomic_fetch_andnot_relaxed */
339
340#ifndef atomic_fetch_andnot_acquire
341#define atomic_fetch_andnot_acquire(...) \
342 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
343#endif
344
345#ifndef atomic_fetch_andnot_release
346#define atomic_fetch_andnot_release(...) \
347 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
348#endif
349
350#ifndef atomic_fetch_andnot
351#define atomic_fetch_andnot(...) \
352 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
353#endif
354#endif /* atomic_fetch_andnot_relaxed */
355#endif /* atomic_andnot */
356
357/* atomic_fetch_xor_relaxed */
358#ifndef atomic_fetch_xor_relaxed
359#define atomic_fetch_xor_relaxed atomic_fetch_xor
360#define atomic_fetch_xor_acquire atomic_fetch_xor
361#define atomic_fetch_xor_release atomic_fetch_xor
362
363#else /* atomic_fetch_xor_relaxed */
364
365#ifndef atomic_fetch_xor_acquire
366#define atomic_fetch_xor_acquire(...) \
367 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
368#endif
369
370#ifndef atomic_fetch_xor_release
371#define atomic_fetch_xor_release(...) \
372 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
373#endif
374
375#ifndef atomic_fetch_xor
376#define atomic_fetch_xor(...) \
377 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
378#endif
379#endif /* atomic_fetch_xor_relaxed */
380
381
654672d4
WD
382/* atomic_xchg_relaxed */
383#ifndef atomic_xchg_relaxed
384#define atomic_xchg_relaxed atomic_xchg
385#define atomic_xchg_acquire atomic_xchg
386#define atomic_xchg_release atomic_xchg
387
388#else /* atomic_xchg_relaxed */
389
390#ifndef atomic_xchg_acquire
391#define atomic_xchg_acquire(...) \
392 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
393#endif
394
395#ifndef atomic_xchg_release
396#define atomic_xchg_release(...) \
397 __atomic_op_release(atomic_xchg, __VA_ARGS__)
398#endif
399
400#ifndef atomic_xchg
401#define atomic_xchg(...) \
402 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
403#endif
404#endif /* atomic_xchg_relaxed */
405
406/* atomic_cmpxchg_relaxed */
407#ifndef atomic_cmpxchg_relaxed
408#define atomic_cmpxchg_relaxed atomic_cmpxchg
409#define atomic_cmpxchg_acquire atomic_cmpxchg
410#define atomic_cmpxchg_release atomic_cmpxchg
411
412#else /* atomic_cmpxchg_relaxed */
413
414#ifndef atomic_cmpxchg_acquire
415#define atomic_cmpxchg_acquire(...) \
416 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
417#endif
418
419#ifndef atomic_cmpxchg_release
420#define atomic_cmpxchg_release(...) \
421 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
422#endif
423
424#ifndef atomic_cmpxchg
425#define atomic_cmpxchg(...) \
426 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
427#endif
428#endif /* atomic_cmpxchg_relaxed */
429
a9ebf306
PZ
430#ifndef atomic_try_cmpxchg
431
432#define __atomic_try_cmpxchg(type, _p, _po, _n) \
433({ \
434 typeof(_po) __po = (_po); \
44fe8445
PZ
435 typeof(*(_po)) __r, __o = *__po; \
436 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
437 if (unlikely(__r != __o)) \
438 *__po = __r; \
439 likely(__r == __o); \
a9ebf306
PZ
440})
441
442#define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
443#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
444#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
445#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
446
447#else /* atomic_try_cmpxchg */
448#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
449#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
450#define atomic_try_cmpxchg_release atomic_try_cmpxchg
451#endif /* atomic_try_cmpxchg */
452
654672d4
WD
453/* cmpxchg_relaxed */
454#ifndef cmpxchg_relaxed
455#define cmpxchg_relaxed cmpxchg
456#define cmpxchg_acquire cmpxchg
457#define cmpxchg_release cmpxchg
458
459#else /* cmpxchg_relaxed */
460
461#ifndef cmpxchg_acquire
462#define cmpxchg_acquire(...) \
463 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
464#endif
465
466#ifndef cmpxchg_release
467#define cmpxchg_release(...) \
468 __atomic_op_release(cmpxchg, __VA_ARGS__)
469#endif
470
471#ifndef cmpxchg
472#define cmpxchg(...) \
473 __atomic_op_fence(cmpxchg, __VA_ARGS__)
474#endif
475#endif /* cmpxchg_relaxed */
476
477/* cmpxchg64_relaxed */
478#ifndef cmpxchg64_relaxed
479#define cmpxchg64_relaxed cmpxchg64
480#define cmpxchg64_acquire cmpxchg64
481#define cmpxchg64_release cmpxchg64
482
483#else /* cmpxchg64_relaxed */
484
485#ifndef cmpxchg64_acquire
486#define cmpxchg64_acquire(...) \
487 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
488#endif
489
490#ifndef cmpxchg64_release
491#define cmpxchg64_release(...) \
492 __atomic_op_release(cmpxchg64, __VA_ARGS__)
493#endif
494
495#ifndef cmpxchg64
496#define cmpxchg64(...) \
497 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
498#endif
499#endif /* cmpxchg64_relaxed */
500
501/* xchg_relaxed */
502#ifndef xchg_relaxed
503#define xchg_relaxed xchg
504#define xchg_acquire xchg
505#define xchg_release xchg
506
507#else /* xchg_relaxed */
508
509#ifndef xchg_acquire
510#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
511#endif
512
513#ifndef xchg_release
514#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
515#endif
516
517#ifndef xchg
518#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
519#endif
520#endif /* xchg_relaxed */
3f9d35b9 521
f24219b4
AS
522/**
523 * atomic_add_unless - add unless the number is already a given value
524 * @v: pointer of type atomic_t
525 * @a: the amount to add to v...
526 * @u: ...unless v is equal to u.
527 *
528 * Atomically adds @a to @v, so long as @v was not already @u.
529 * Returns non-zero if @v was not @u, and zero otherwise.
530 */
531static inline int atomic_add_unless(atomic_t *v, int a, int u)
532{
533 return __atomic_add_unless(v, a, u) != u;
534}
535
60063497
AS
536/**
537 * atomic_inc_not_zero - increment unless the number is zero
538 * @v: pointer of type atomic_t
539 *
540 * Atomically increments @v by 1, so long as @v is non-zero.
541 * Returns non-zero if @v was non-zero, and zero otherwise.
542 */
b1ada601 543#ifndef atomic_inc_not_zero
60063497 544#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
b1ada601 545#endif
60063497 546
de9e432c
PZ
547#ifndef atomic_andnot
548static inline void atomic_andnot(int i, atomic_t *v)
549{
550 atomic_and(~i, v);
551}
28aa2bda
PZ
552
553static inline int atomic_fetch_andnot(int i, atomic_t *v)
554{
555 return atomic_fetch_and(~i, v);
556}
557
558static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
559{
560 return atomic_fetch_and_relaxed(~i, v);
561}
562
563static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
564{
565 return atomic_fetch_and_acquire(~i, v);
566}
567
568static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
569{
570 return atomic_fetch_and_release(~i, v);
571}
de9e432c
PZ
572#endif
573
3f9d35b9
ED
574/**
575 * atomic_inc_not_zero_hint - increment if not null
576 * @v: pointer of type atomic_t
577 * @hint: probable value of the atomic before the increment
578 *
579 * This version of atomic_inc_not_zero() gives a hint of probable
580 * value of the atomic. This helps processor to not read the memory
581 * before doing the atomic read/modify/write cycle, lowering
582 * number of bus transactions on some arches.
583 *
584 * Returns: 0 if increment was not done, 1 otherwise.
585 */
586#ifndef atomic_inc_not_zero_hint
587static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
588{
589 int val, c = hint;
590
591 /* sanity test, should be removed by compiler if hint is a constant */
592 if (!hint)
593 return atomic_inc_not_zero(v);
594
595 do {
596 val = atomic_cmpxchg(v, c, c + 1);
597 if (val == c)
598 return 1;
599 c = val;
600 } while (c);
601
602 return 0;
603}
604#endif
605
07b8ce1e
AV
606#ifndef atomic_inc_unless_negative
607static inline int atomic_inc_unless_negative(atomic_t *p)
608{
609 int v, v1;
610 for (v = 0; v >= 0; v = v1) {
611 v1 = atomic_cmpxchg(p, v, v + 1);
612 if (likely(v1 == v))
613 return 1;
614 }
615 return 0;
616}
617#endif
618
619#ifndef atomic_dec_unless_positive
620static inline int atomic_dec_unless_positive(atomic_t *p)
621{
622 int v, v1;
623 for (v = 0; v <= 0; v = v1) {
624 v1 = atomic_cmpxchg(p, v, v - 1);
625 if (likely(v1 == v))
626 return 1;
627 }
628 return 0;
629}
630#endif
631
e79bee24
SL
632/*
633 * atomic_dec_if_positive - decrement by 1 if old value positive
634 * @v: pointer of type atomic_t
635 *
636 * The function returns the old value of *v minus 1, even if
637 * the atomic variable, v, was not decremented.
638 */
639#ifndef atomic_dec_if_positive
640static inline int atomic_dec_if_positive(atomic_t *v)
641{
642 int c, old, dec;
643 c = atomic_read(v);
644 for (;;) {
645 dec = c - 1;
646 if (unlikely(dec < 0))
647 break;
648 old = atomic_cmpxchg((v), c, dec);
649 if (likely(old == c))
650 break;
651 c = old;
652 }
653 return dec;
654}
655#endif
656
7847777a
AS
657#ifdef CONFIG_GENERIC_ATOMIC64
658#include <asm-generic/atomic64.h>
659#endif
de9e432c 660
e1213332
PZ
661#ifndef atomic64_read_acquire
662#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
663#endif
664
665#ifndef atomic64_set_release
666#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
667#endif
668
669/* atomic64_add_return_relaxed */
670#ifndef atomic64_add_return_relaxed
671#define atomic64_add_return_relaxed atomic64_add_return
672#define atomic64_add_return_acquire atomic64_add_return
673#define atomic64_add_return_release atomic64_add_return
674
675#else /* atomic64_add_return_relaxed */
676
677#ifndef atomic64_add_return_acquire
678#define atomic64_add_return_acquire(...) \
679 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
680#endif
681
682#ifndef atomic64_add_return_release
683#define atomic64_add_return_release(...) \
684 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
685#endif
686
687#ifndef atomic64_add_return
688#define atomic64_add_return(...) \
689 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
690#endif
691#endif /* atomic64_add_return_relaxed */
692
693/* atomic64_inc_return_relaxed */
694#ifndef atomic64_inc_return_relaxed
695#define atomic64_inc_return_relaxed atomic64_inc_return
696#define atomic64_inc_return_acquire atomic64_inc_return
697#define atomic64_inc_return_release atomic64_inc_return
698
699#else /* atomic64_inc_return_relaxed */
700
701#ifndef atomic64_inc_return_acquire
702#define atomic64_inc_return_acquire(...) \
703 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
704#endif
705
706#ifndef atomic64_inc_return_release
707#define atomic64_inc_return_release(...) \
708 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
709#endif
710
711#ifndef atomic64_inc_return
712#define atomic64_inc_return(...) \
713 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
714#endif
715#endif /* atomic64_inc_return_relaxed */
716
717
718/* atomic64_sub_return_relaxed */
719#ifndef atomic64_sub_return_relaxed
720#define atomic64_sub_return_relaxed atomic64_sub_return
721#define atomic64_sub_return_acquire atomic64_sub_return
722#define atomic64_sub_return_release atomic64_sub_return
723
724#else /* atomic64_sub_return_relaxed */
725
726#ifndef atomic64_sub_return_acquire
727#define atomic64_sub_return_acquire(...) \
728 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
729#endif
730
731#ifndef atomic64_sub_return_release
732#define atomic64_sub_return_release(...) \
733 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
734#endif
735
736#ifndef atomic64_sub_return
737#define atomic64_sub_return(...) \
738 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
739#endif
740#endif /* atomic64_sub_return_relaxed */
741
742/* atomic64_dec_return_relaxed */
743#ifndef atomic64_dec_return_relaxed
744#define atomic64_dec_return_relaxed atomic64_dec_return
745#define atomic64_dec_return_acquire atomic64_dec_return
746#define atomic64_dec_return_release atomic64_dec_return
747
748#else /* atomic64_dec_return_relaxed */
749
750#ifndef atomic64_dec_return_acquire
751#define atomic64_dec_return_acquire(...) \
752 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
753#endif
754
755#ifndef atomic64_dec_return_release
756#define atomic64_dec_return_release(...) \
757 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
758#endif
759
760#ifndef atomic64_dec_return
761#define atomic64_dec_return(...) \
762 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
763#endif
764#endif /* atomic64_dec_return_relaxed */
765
28aa2bda
PZ
766
767/* atomic64_fetch_add_relaxed */
768#ifndef atomic64_fetch_add_relaxed
769#define atomic64_fetch_add_relaxed atomic64_fetch_add
770#define atomic64_fetch_add_acquire atomic64_fetch_add
771#define atomic64_fetch_add_release atomic64_fetch_add
772
773#else /* atomic64_fetch_add_relaxed */
774
775#ifndef atomic64_fetch_add_acquire
776#define atomic64_fetch_add_acquire(...) \
777 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
778#endif
779
780#ifndef atomic64_fetch_add_release
781#define atomic64_fetch_add_release(...) \
782 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
783#endif
784
785#ifndef atomic64_fetch_add
786#define atomic64_fetch_add(...) \
787 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
788#endif
789#endif /* atomic64_fetch_add_relaxed */
790
f0662863
DB
791/* atomic64_fetch_inc_relaxed */
792#ifndef atomic64_fetch_inc_relaxed
793
794#ifndef atomic64_fetch_inc
795#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
796#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
797#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
798#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
799#else /* atomic64_fetch_inc */
800#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
801#define atomic64_fetch_inc_acquire atomic64_fetch_inc
802#define atomic64_fetch_inc_release atomic64_fetch_inc
803#endif /* atomic64_fetch_inc */
804
805#else /* atomic64_fetch_inc_relaxed */
806
807#ifndef atomic64_fetch_inc_acquire
808#define atomic64_fetch_inc_acquire(...) \
809 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
810#endif
811
812#ifndef atomic64_fetch_inc_release
813#define atomic64_fetch_inc_release(...) \
814 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
815#endif
816
817#ifndef atomic64_fetch_inc
818#define atomic64_fetch_inc(...) \
819 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
820#endif
821#endif /* atomic64_fetch_inc_relaxed */
822
28aa2bda
PZ
823/* atomic64_fetch_sub_relaxed */
824#ifndef atomic64_fetch_sub_relaxed
825#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
826#define atomic64_fetch_sub_acquire atomic64_fetch_sub
827#define atomic64_fetch_sub_release atomic64_fetch_sub
828
829#else /* atomic64_fetch_sub_relaxed */
830
831#ifndef atomic64_fetch_sub_acquire
832#define atomic64_fetch_sub_acquire(...) \
833 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
834#endif
835
836#ifndef atomic64_fetch_sub_release
837#define atomic64_fetch_sub_release(...) \
838 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
839#endif
840
841#ifndef atomic64_fetch_sub
842#define atomic64_fetch_sub(...) \
843 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
844#endif
845#endif /* atomic64_fetch_sub_relaxed */
846
f0662863
DB
847/* atomic64_fetch_dec_relaxed */
848#ifndef atomic64_fetch_dec_relaxed
849
850#ifndef atomic64_fetch_dec
851#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
852#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
853#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
854#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
855#else /* atomic64_fetch_dec */
856#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
857#define atomic64_fetch_dec_acquire atomic64_fetch_dec
858#define atomic64_fetch_dec_release atomic64_fetch_dec
859#endif /* atomic64_fetch_dec */
860
861#else /* atomic64_fetch_dec_relaxed */
862
863#ifndef atomic64_fetch_dec_acquire
864#define atomic64_fetch_dec_acquire(...) \
865 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
866#endif
867
868#ifndef atomic64_fetch_dec_release
869#define atomic64_fetch_dec_release(...) \
870 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
871#endif
872
873#ifndef atomic64_fetch_dec
874#define atomic64_fetch_dec(...) \
875 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
876#endif
877#endif /* atomic64_fetch_dec_relaxed */
878
28aa2bda
PZ
879/* atomic64_fetch_or_relaxed */
880#ifndef atomic64_fetch_or_relaxed
881#define atomic64_fetch_or_relaxed atomic64_fetch_or
882#define atomic64_fetch_or_acquire atomic64_fetch_or
883#define atomic64_fetch_or_release atomic64_fetch_or
884
885#else /* atomic64_fetch_or_relaxed */
886
887#ifndef atomic64_fetch_or_acquire
888#define atomic64_fetch_or_acquire(...) \
889 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
890#endif
891
892#ifndef atomic64_fetch_or_release
893#define atomic64_fetch_or_release(...) \
894 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
895#endif
896
897#ifndef atomic64_fetch_or
898#define atomic64_fetch_or(...) \
899 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
900#endif
901#endif /* atomic64_fetch_or_relaxed */
902
903/* atomic64_fetch_and_relaxed */
904#ifndef atomic64_fetch_and_relaxed
905#define atomic64_fetch_and_relaxed atomic64_fetch_and
906#define atomic64_fetch_and_acquire atomic64_fetch_and
907#define atomic64_fetch_and_release atomic64_fetch_and
908
909#else /* atomic64_fetch_and_relaxed */
910
911#ifndef atomic64_fetch_and_acquire
912#define atomic64_fetch_and_acquire(...) \
913 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
914#endif
915
916#ifndef atomic64_fetch_and_release
917#define atomic64_fetch_and_release(...) \
918 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
919#endif
920
921#ifndef atomic64_fetch_and
922#define atomic64_fetch_and(...) \
923 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
924#endif
925#endif /* atomic64_fetch_and_relaxed */
926
927#ifdef atomic64_andnot
928/* atomic64_fetch_andnot_relaxed */
929#ifndef atomic64_fetch_andnot_relaxed
930#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
931#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
932#define atomic64_fetch_andnot_release atomic64_fetch_andnot
933
934#else /* atomic64_fetch_andnot_relaxed */
935
936#ifndef atomic64_fetch_andnot_acquire
937#define atomic64_fetch_andnot_acquire(...) \
938 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
939#endif
940
941#ifndef atomic64_fetch_andnot_release
942#define atomic64_fetch_andnot_release(...) \
943 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
944#endif
945
946#ifndef atomic64_fetch_andnot
947#define atomic64_fetch_andnot(...) \
948 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
949#endif
950#endif /* atomic64_fetch_andnot_relaxed */
951#endif /* atomic64_andnot */
952
953/* atomic64_fetch_xor_relaxed */
954#ifndef atomic64_fetch_xor_relaxed
955#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
956#define atomic64_fetch_xor_acquire atomic64_fetch_xor
957#define atomic64_fetch_xor_release atomic64_fetch_xor
958
959#else /* atomic64_fetch_xor_relaxed */
960
961#ifndef atomic64_fetch_xor_acquire
962#define atomic64_fetch_xor_acquire(...) \
963 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
964#endif
965
966#ifndef atomic64_fetch_xor_release
967#define atomic64_fetch_xor_release(...) \
968 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
969#endif
970
971#ifndef atomic64_fetch_xor
972#define atomic64_fetch_xor(...) \
973 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
974#endif
975#endif /* atomic64_fetch_xor_relaxed */
976
977
e1213332
PZ
978/* atomic64_xchg_relaxed */
979#ifndef atomic64_xchg_relaxed
980#define atomic64_xchg_relaxed atomic64_xchg
981#define atomic64_xchg_acquire atomic64_xchg
982#define atomic64_xchg_release atomic64_xchg
983
984#else /* atomic64_xchg_relaxed */
985
986#ifndef atomic64_xchg_acquire
987#define atomic64_xchg_acquire(...) \
988 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
989#endif
990
991#ifndef atomic64_xchg_release
992#define atomic64_xchg_release(...) \
993 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
994#endif
995
996#ifndef atomic64_xchg
997#define atomic64_xchg(...) \
998 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
999#endif
1000#endif /* atomic64_xchg_relaxed */
1001
1002/* atomic64_cmpxchg_relaxed */
1003#ifndef atomic64_cmpxchg_relaxed
1004#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1005#define atomic64_cmpxchg_acquire atomic64_cmpxchg
1006#define atomic64_cmpxchg_release atomic64_cmpxchg
1007
1008#else /* atomic64_cmpxchg_relaxed */
1009
1010#ifndef atomic64_cmpxchg_acquire
1011#define atomic64_cmpxchg_acquire(...) \
1012 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1013#endif
1014
1015#ifndef atomic64_cmpxchg_release
1016#define atomic64_cmpxchg_release(...) \
1017 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1018#endif
1019
1020#ifndef atomic64_cmpxchg
1021#define atomic64_cmpxchg(...) \
1022 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1023#endif
1024#endif /* atomic64_cmpxchg_relaxed */
1025
a9ebf306
PZ
1026#ifndef atomic64_try_cmpxchg
1027
1028#define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1029({ \
1030 typeof(_po) __po = (_po); \
44fe8445
PZ
1031 typeof(*(_po)) __r, __o = *__po; \
1032 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1033 if (unlikely(__r != __o)) \
1034 *__po = __r; \
1035 likely(__r == __o); \
a9ebf306
PZ
1036})
1037
1038#define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1039#define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1040#define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1041#define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1042
1043#else /* atomic64_try_cmpxchg */
1044#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1045#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1046#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1047#endif /* atomic64_try_cmpxchg */
1048
de9e432c
PZ
1049#ifndef atomic64_andnot
1050static inline void atomic64_andnot(long long i, atomic64_t *v)
1051{
1052 atomic64_and(~i, v);
1053}
28aa2bda
PZ
1054
1055static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1056{
1057 return atomic64_fetch_and(~i, v);
1058}
1059
1060static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1061{
1062 return atomic64_fetch_and_relaxed(~i, v);
1063}
1064
1065static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1066{
1067 return atomic64_fetch_and_acquire(~i, v);
1068}
1069
1070static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1071{
1072 return atomic64_fetch_and_release(~i, v);
1073}
de9e432c
PZ
1074#endif
1075
90fe6514
PZ
1076#include <asm-generic/atomic-long.h>
1077
3f9d35b9 1078#endif /* _LINUX_ATOMIC_H */