]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - include/linux/atomic.h
tun: call dev_get_valid_name() before register_netdevice()
[mirror_ubuntu-artful-kernel.git] / include / linux / atomic.h
1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
5 #include <asm/barrier.h>
6
7 /*
8 * Relaxed variants of xchg, cmpxchg and some atomic operations.
9 *
10 * We support four variants:
11 *
12 * - Fully ordered: The default implementation, no suffix required.
13 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
14 * - Release: Provides RELEASE semantics, _release suffix.
15 * - Relaxed: No ordering guarantees, _relaxed suffix.
16 *
17 * For compound atomics performing both a load and a store, ACQUIRE
18 * semantics apply only to the load and RELEASE semantics only to the
19 * store portion of the operation. Note that a failed cmpxchg_acquire
20 * does -not- imply any memory ordering constraints.
21 *
22 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
23 */
24
25 #ifndef atomic_read_acquire
26 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
27 #endif
28
29 #ifndef atomic_set_release
30 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
31 #endif
32
33 /*
34 * The idea here is to build acquire/release variants by adding explicit
35 * barriers on top of the relaxed variant. In the case where the relaxed
36 * variant is already fully ordered, no additional barriers are needed.
37 *
38 * Besides, if an arch has a special barrier for acquire/release, it could
39 * implement its own __atomic_op_* and use the same framework for building
40 * variants
41 */
42 #ifndef __atomic_op_acquire
43 #define __atomic_op_acquire(op, args...) \
44 ({ \
45 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
46 smp_mb__after_atomic(); \
47 __ret; \
48 })
49 #endif
50
51 #ifndef __atomic_op_release
52 #define __atomic_op_release(op, args...) \
53 ({ \
54 smp_mb__before_atomic(); \
55 op##_relaxed(args); \
56 })
57 #endif
58
59 #ifndef __atomic_op_fence
60 #define __atomic_op_fence(op, args...) \
61 ({ \
62 typeof(op##_relaxed(args)) __ret; \
63 smp_mb__before_atomic(); \
64 __ret = op##_relaxed(args); \
65 smp_mb__after_atomic(); \
66 __ret; \
67 })
68 #endif
69
70 /* atomic_add_return_relaxed */
71 #ifndef atomic_add_return_relaxed
72 #define atomic_add_return_relaxed atomic_add_return
73 #define atomic_add_return_acquire atomic_add_return
74 #define atomic_add_return_release atomic_add_return
75
76 #else /* atomic_add_return_relaxed */
77
78 #ifndef atomic_add_return_acquire
79 #define atomic_add_return_acquire(...) \
80 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
81 #endif
82
83 #ifndef atomic_add_return_release
84 #define atomic_add_return_release(...) \
85 __atomic_op_release(atomic_add_return, __VA_ARGS__)
86 #endif
87
88 #ifndef atomic_add_return
89 #define atomic_add_return(...) \
90 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
91 #endif
92 #endif /* atomic_add_return_relaxed */
93
94 /* atomic_inc_return_relaxed */
95 #ifndef atomic_inc_return_relaxed
96 #define atomic_inc_return_relaxed atomic_inc_return
97 #define atomic_inc_return_acquire atomic_inc_return
98 #define atomic_inc_return_release atomic_inc_return
99
100 #else /* atomic_inc_return_relaxed */
101
102 #ifndef atomic_inc_return_acquire
103 #define atomic_inc_return_acquire(...) \
104 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
105 #endif
106
107 #ifndef atomic_inc_return_release
108 #define atomic_inc_return_release(...) \
109 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
110 #endif
111
112 #ifndef atomic_inc_return
113 #define atomic_inc_return(...) \
114 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
115 #endif
116 #endif /* atomic_inc_return_relaxed */
117
118 /* atomic_sub_return_relaxed */
119 #ifndef atomic_sub_return_relaxed
120 #define atomic_sub_return_relaxed atomic_sub_return
121 #define atomic_sub_return_acquire atomic_sub_return
122 #define atomic_sub_return_release atomic_sub_return
123
124 #else /* atomic_sub_return_relaxed */
125
126 #ifndef atomic_sub_return_acquire
127 #define atomic_sub_return_acquire(...) \
128 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
129 #endif
130
131 #ifndef atomic_sub_return_release
132 #define atomic_sub_return_release(...) \
133 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
134 #endif
135
136 #ifndef atomic_sub_return
137 #define atomic_sub_return(...) \
138 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
139 #endif
140 #endif /* atomic_sub_return_relaxed */
141
142 /* atomic_dec_return_relaxed */
143 #ifndef atomic_dec_return_relaxed
144 #define atomic_dec_return_relaxed atomic_dec_return
145 #define atomic_dec_return_acquire atomic_dec_return
146 #define atomic_dec_return_release atomic_dec_return
147
148 #else /* atomic_dec_return_relaxed */
149
150 #ifndef atomic_dec_return_acquire
151 #define atomic_dec_return_acquire(...) \
152 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
153 #endif
154
155 #ifndef atomic_dec_return_release
156 #define atomic_dec_return_release(...) \
157 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
158 #endif
159
160 #ifndef atomic_dec_return
161 #define atomic_dec_return(...) \
162 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
163 #endif
164 #endif /* atomic_dec_return_relaxed */
165
166
167 /* atomic_fetch_add_relaxed */
168 #ifndef atomic_fetch_add_relaxed
169 #define atomic_fetch_add_relaxed atomic_fetch_add
170 #define atomic_fetch_add_acquire atomic_fetch_add
171 #define atomic_fetch_add_release atomic_fetch_add
172
173 #else /* atomic_fetch_add_relaxed */
174
175 #ifndef atomic_fetch_add_acquire
176 #define atomic_fetch_add_acquire(...) \
177 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
178 #endif
179
180 #ifndef atomic_fetch_add_release
181 #define atomic_fetch_add_release(...) \
182 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
183 #endif
184
185 #ifndef atomic_fetch_add
186 #define atomic_fetch_add(...) \
187 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
188 #endif
189 #endif /* atomic_fetch_add_relaxed */
190
191 /* atomic_fetch_inc_relaxed */
192 #ifndef atomic_fetch_inc_relaxed
193
194 #ifndef atomic_fetch_inc
195 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
196 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
197 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
198 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
199 #else /* atomic_fetch_inc */
200 #define atomic_fetch_inc_relaxed atomic_fetch_inc
201 #define atomic_fetch_inc_acquire atomic_fetch_inc
202 #define atomic_fetch_inc_release atomic_fetch_inc
203 #endif /* atomic_fetch_inc */
204
205 #else /* atomic_fetch_inc_relaxed */
206
207 #ifndef atomic_fetch_inc_acquire
208 #define atomic_fetch_inc_acquire(...) \
209 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
210 #endif
211
212 #ifndef atomic_fetch_inc_release
213 #define atomic_fetch_inc_release(...) \
214 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
215 #endif
216
217 #ifndef atomic_fetch_inc
218 #define atomic_fetch_inc(...) \
219 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
220 #endif
221 #endif /* atomic_fetch_inc_relaxed */
222
223 /* atomic_fetch_sub_relaxed */
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_relaxed atomic_fetch_sub
226 #define atomic_fetch_sub_acquire atomic_fetch_sub
227 #define atomic_fetch_sub_release atomic_fetch_sub
228
229 #else /* atomic_fetch_sub_relaxed */
230
231 #ifndef atomic_fetch_sub_acquire
232 #define atomic_fetch_sub_acquire(...) \
233 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
234 #endif
235
236 #ifndef atomic_fetch_sub_release
237 #define atomic_fetch_sub_release(...) \
238 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
239 #endif
240
241 #ifndef atomic_fetch_sub
242 #define atomic_fetch_sub(...) \
243 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
244 #endif
245 #endif /* atomic_fetch_sub_relaxed */
246
247 /* atomic_fetch_dec_relaxed */
248 #ifndef atomic_fetch_dec_relaxed
249
250 #ifndef atomic_fetch_dec
251 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
252 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
253 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
254 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
255 #else /* atomic_fetch_dec */
256 #define atomic_fetch_dec_relaxed atomic_fetch_dec
257 #define atomic_fetch_dec_acquire atomic_fetch_dec
258 #define atomic_fetch_dec_release atomic_fetch_dec
259 #endif /* atomic_fetch_dec */
260
261 #else /* atomic_fetch_dec_relaxed */
262
263 #ifndef atomic_fetch_dec_acquire
264 #define atomic_fetch_dec_acquire(...) \
265 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
266 #endif
267
268 #ifndef atomic_fetch_dec_release
269 #define atomic_fetch_dec_release(...) \
270 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
271 #endif
272
273 #ifndef atomic_fetch_dec
274 #define atomic_fetch_dec(...) \
275 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
276 #endif
277 #endif /* atomic_fetch_dec_relaxed */
278
279 /* atomic_fetch_or_relaxed */
280 #ifndef atomic_fetch_or_relaxed
281 #define atomic_fetch_or_relaxed atomic_fetch_or
282 #define atomic_fetch_or_acquire atomic_fetch_or
283 #define atomic_fetch_or_release atomic_fetch_or
284
285 #else /* atomic_fetch_or_relaxed */
286
287 #ifndef atomic_fetch_or_acquire
288 #define atomic_fetch_or_acquire(...) \
289 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
290 #endif
291
292 #ifndef atomic_fetch_or_release
293 #define atomic_fetch_or_release(...) \
294 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
295 #endif
296
297 #ifndef atomic_fetch_or
298 #define atomic_fetch_or(...) \
299 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
300 #endif
301 #endif /* atomic_fetch_or_relaxed */
302
303 /* atomic_fetch_and_relaxed */
304 #ifndef atomic_fetch_and_relaxed
305 #define atomic_fetch_and_relaxed atomic_fetch_and
306 #define atomic_fetch_and_acquire atomic_fetch_and
307 #define atomic_fetch_and_release atomic_fetch_and
308
309 #else /* atomic_fetch_and_relaxed */
310
311 #ifndef atomic_fetch_and_acquire
312 #define atomic_fetch_and_acquire(...) \
313 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
314 #endif
315
316 #ifndef atomic_fetch_and_release
317 #define atomic_fetch_and_release(...) \
318 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
319 #endif
320
321 #ifndef atomic_fetch_and
322 #define atomic_fetch_and(...) \
323 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
324 #endif
325 #endif /* atomic_fetch_and_relaxed */
326
327 #ifdef atomic_andnot
328 /* atomic_fetch_andnot_relaxed */
329 #ifndef atomic_fetch_andnot_relaxed
330 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
331 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
332 #define atomic_fetch_andnot_release atomic_fetch_andnot
333
334 #else /* atomic_fetch_andnot_relaxed */
335
336 #ifndef atomic_fetch_andnot_acquire
337 #define atomic_fetch_andnot_acquire(...) \
338 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
339 #endif
340
341 #ifndef atomic_fetch_andnot_release
342 #define atomic_fetch_andnot_release(...) \
343 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
344 #endif
345
346 #ifndef atomic_fetch_andnot
347 #define atomic_fetch_andnot(...) \
348 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
349 #endif
350 #endif /* atomic_fetch_andnot_relaxed */
351 #endif /* atomic_andnot */
352
353 /* atomic_fetch_xor_relaxed */
354 #ifndef atomic_fetch_xor_relaxed
355 #define atomic_fetch_xor_relaxed atomic_fetch_xor
356 #define atomic_fetch_xor_acquire atomic_fetch_xor
357 #define atomic_fetch_xor_release atomic_fetch_xor
358
359 #else /* atomic_fetch_xor_relaxed */
360
361 #ifndef atomic_fetch_xor_acquire
362 #define atomic_fetch_xor_acquire(...) \
363 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
364 #endif
365
366 #ifndef atomic_fetch_xor_release
367 #define atomic_fetch_xor_release(...) \
368 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
369 #endif
370
371 #ifndef atomic_fetch_xor
372 #define atomic_fetch_xor(...) \
373 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
374 #endif
375 #endif /* atomic_fetch_xor_relaxed */
376
377
378 /* atomic_xchg_relaxed */
379 #ifndef atomic_xchg_relaxed
380 #define atomic_xchg_relaxed atomic_xchg
381 #define atomic_xchg_acquire atomic_xchg
382 #define atomic_xchg_release atomic_xchg
383
384 #else /* atomic_xchg_relaxed */
385
386 #ifndef atomic_xchg_acquire
387 #define atomic_xchg_acquire(...) \
388 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
389 #endif
390
391 #ifndef atomic_xchg_release
392 #define atomic_xchg_release(...) \
393 __atomic_op_release(atomic_xchg, __VA_ARGS__)
394 #endif
395
396 #ifndef atomic_xchg
397 #define atomic_xchg(...) \
398 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
399 #endif
400 #endif /* atomic_xchg_relaxed */
401
402 /* atomic_cmpxchg_relaxed */
403 #ifndef atomic_cmpxchg_relaxed
404 #define atomic_cmpxchg_relaxed atomic_cmpxchg
405 #define atomic_cmpxchg_acquire atomic_cmpxchg
406 #define atomic_cmpxchg_release atomic_cmpxchg
407
408 #else /* atomic_cmpxchg_relaxed */
409
410 #ifndef atomic_cmpxchg_acquire
411 #define atomic_cmpxchg_acquire(...) \
412 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
413 #endif
414
415 #ifndef atomic_cmpxchg_release
416 #define atomic_cmpxchg_release(...) \
417 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
418 #endif
419
420 #ifndef atomic_cmpxchg
421 #define atomic_cmpxchg(...) \
422 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
423 #endif
424 #endif /* atomic_cmpxchg_relaxed */
425
426 #ifndef atomic_try_cmpxchg
427
428 #define __atomic_try_cmpxchg(type, _p, _po, _n) \
429 ({ \
430 typeof(_po) __po = (_po); \
431 typeof(*(_po)) __r, __o = *__po; \
432 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
433 if (unlikely(__r != __o)) \
434 *__po = __r; \
435 likely(__r == __o); \
436 })
437
438 #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
439 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
440 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
441 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
442
443 #else /* atomic_try_cmpxchg */
444 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
445 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
446 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
447 #endif /* atomic_try_cmpxchg */
448
449 /* cmpxchg_relaxed */
450 #ifndef cmpxchg_relaxed
451 #define cmpxchg_relaxed cmpxchg
452 #define cmpxchg_acquire cmpxchg
453 #define cmpxchg_release cmpxchg
454
455 #else /* cmpxchg_relaxed */
456
457 #ifndef cmpxchg_acquire
458 #define cmpxchg_acquire(...) \
459 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
460 #endif
461
462 #ifndef cmpxchg_release
463 #define cmpxchg_release(...) \
464 __atomic_op_release(cmpxchg, __VA_ARGS__)
465 #endif
466
467 #ifndef cmpxchg
468 #define cmpxchg(...) \
469 __atomic_op_fence(cmpxchg, __VA_ARGS__)
470 #endif
471 #endif /* cmpxchg_relaxed */
472
473 /* cmpxchg64_relaxed */
474 #ifndef cmpxchg64_relaxed
475 #define cmpxchg64_relaxed cmpxchg64
476 #define cmpxchg64_acquire cmpxchg64
477 #define cmpxchg64_release cmpxchg64
478
479 #else /* cmpxchg64_relaxed */
480
481 #ifndef cmpxchg64_acquire
482 #define cmpxchg64_acquire(...) \
483 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
484 #endif
485
486 #ifndef cmpxchg64_release
487 #define cmpxchg64_release(...) \
488 __atomic_op_release(cmpxchg64, __VA_ARGS__)
489 #endif
490
491 #ifndef cmpxchg64
492 #define cmpxchg64(...) \
493 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
494 #endif
495 #endif /* cmpxchg64_relaxed */
496
497 /* xchg_relaxed */
498 #ifndef xchg_relaxed
499 #define xchg_relaxed xchg
500 #define xchg_acquire xchg
501 #define xchg_release xchg
502
503 #else /* xchg_relaxed */
504
505 #ifndef xchg_acquire
506 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
507 #endif
508
509 #ifndef xchg_release
510 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
511 #endif
512
513 #ifndef xchg
514 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
515 #endif
516 #endif /* xchg_relaxed */
517
518 /**
519 * atomic_add_unless - add unless the number is already a given value
520 * @v: pointer of type atomic_t
521 * @a: the amount to add to v...
522 * @u: ...unless v is equal to u.
523 *
524 * Atomically adds @a to @v, so long as @v was not already @u.
525 * Returns non-zero if @v was not @u, and zero otherwise.
526 */
527 static inline int atomic_add_unless(atomic_t *v, int a, int u)
528 {
529 return __atomic_add_unless(v, a, u) != u;
530 }
531
532 /**
533 * atomic_inc_not_zero - increment unless the number is zero
534 * @v: pointer of type atomic_t
535 *
536 * Atomically increments @v by 1, so long as @v is non-zero.
537 * Returns non-zero if @v was non-zero, and zero otherwise.
538 */
539 #ifndef atomic_inc_not_zero
540 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
541 #endif
542
543 #ifndef atomic_andnot
544 static inline void atomic_andnot(int i, atomic_t *v)
545 {
546 atomic_and(~i, v);
547 }
548
549 static inline int atomic_fetch_andnot(int i, atomic_t *v)
550 {
551 return atomic_fetch_and(~i, v);
552 }
553
554 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
555 {
556 return atomic_fetch_and_relaxed(~i, v);
557 }
558
559 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
560 {
561 return atomic_fetch_and_acquire(~i, v);
562 }
563
564 static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
565 {
566 return atomic_fetch_and_release(~i, v);
567 }
568 #endif
569
570 /**
571 * atomic_inc_not_zero_hint - increment if not null
572 * @v: pointer of type atomic_t
573 * @hint: probable value of the atomic before the increment
574 *
575 * This version of atomic_inc_not_zero() gives a hint of probable
576 * value of the atomic. This helps processor to not read the memory
577 * before doing the atomic read/modify/write cycle, lowering
578 * number of bus transactions on some arches.
579 *
580 * Returns: 0 if increment was not done, 1 otherwise.
581 */
582 #ifndef atomic_inc_not_zero_hint
583 static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
584 {
585 int val, c = hint;
586
587 /* sanity test, should be removed by compiler if hint is a constant */
588 if (!hint)
589 return atomic_inc_not_zero(v);
590
591 do {
592 val = atomic_cmpxchg(v, c, c + 1);
593 if (val == c)
594 return 1;
595 c = val;
596 } while (c);
597
598 return 0;
599 }
600 #endif
601
602 #ifndef atomic_inc_unless_negative
603 static inline int atomic_inc_unless_negative(atomic_t *p)
604 {
605 int v, v1;
606 for (v = 0; v >= 0; v = v1) {
607 v1 = atomic_cmpxchg(p, v, v + 1);
608 if (likely(v1 == v))
609 return 1;
610 }
611 return 0;
612 }
613 #endif
614
615 #ifndef atomic_dec_unless_positive
616 static inline int atomic_dec_unless_positive(atomic_t *p)
617 {
618 int v, v1;
619 for (v = 0; v <= 0; v = v1) {
620 v1 = atomic_cmpxchg(p, v, v - 1);
621 if (likely(v1 == v))
622 return 1;
623 }
624 return 0;
625 }
626 #endif
627
628 /*
629 * atomic_dec_if_positive - decrement by 1 if old value positive
630 * @v: pointer of type atomic_t
631 *
632 * The function returns the old value of *v minus 1, even if
633 * the atomic variable, v, was not decremented.
634 */
635 #ifndef atomic_dec_if_positive
636 static inline int atomic_dec_if_positive(atomic_t *v)
637 {
638 int c, old, dec;
639 c = atomic_read(v);
640 for (;;) {
641 dec = c - 1;
642 if (unlikely(dec < 0))
643 break;
644 old = atomic_cmpxchg((v), c, dec);
645 if (likely(old == c))
646 break;
647 c = old;
648 }
649 return dec;
650 }
651 #endif
652
653 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
654
655 #ifdef CONFIG_GENERIC_ATOMIC64
656 #include <asm-generic/atomic64.h>
657 #endif
658
659 #ifndef atomic64_read_acquire
660 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
661 #endif
662
663 #ifndef atomic64_set_release
664 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
665 #endif
666
667 /* atomic64_add_return_relaxed */
668 #ifndef atomic64_add_return_relaxed
669 #define atomic64_add_return_relaxed atomic64_add_return
670 #define atomic64_add_return_acquire atomic64_add_return
671 #define atomic64_add_return_release atomic64_add_return
672
673 #else /* atomic64_add_return_relaxed */
674
675 #ifndef atomic64_add_return_acquire
676 #define atomic64_add_return_acquire(...) \
677 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
678 #endif
679
680 #ifndef atomic64_add_return_release
681 #define atomic64_add_return_release(...) \
682 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
683 #endif
684
685 #ifndef atomic64_add_return
686 #define atomic64_add_return(...) \
687 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
688 #endif
689 #endif /* atomic64_add_return_relaxed */
690
691 /* atomic64_inc_return_relaxed */
692 #ifndef atomic64_inc_return_relaxed
693 #define atomic64_inc_return_relaxed atomic64_inc_return
694 #define atomic64_inc_return_acquire atomic64_inc_return
695 #define atomic64_inc_return_release atomic64_inc_return
696
697 #else /* atomic64_inc_return_relaxed */
698
699 #ifndef atomic64_inc_return_acquire
700 #define atomic64_inc_return_acquire(...) \
701 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
702 #endif
703
704 #ifndef atomic64_inc_return_release
705 #define atomic64_inc_return_release(...) \
706 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
707 #endif
708
709 #ifndef atomic64_inc_return
710 #define atomic64_inc_return(...) \
711 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
712 #endif
713 #endif /* atomic64_inc_return_relaxed */
714
715
716 /* atomic64_sub_return_relaxed */
717 #ifndef atomic64_sub_return_relaxed
718 #define atomic64_sub_return_relaxed atomic64_sub_return
719 #define atomic64_sub_return_acquire atomic64_sub_return
720 #define atomic64_sub_return_release atomic64_sub_return
721
722 #else /* atomic64_sub_return_relaxed */
723
724 #ifndef atomic64_sub_return_acquire
725 #define atomic64_sub_return_acquire(...) \
726 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
727 #endif
728
729 #ifndef atomic64_sub_return_release
730 #define atomic64_sub_return_release(...) \
731 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
732 #endif
733
734 #ifndef atomic64_sub_return
735 #define atomic64_sub_return(...) \
736 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
737 #endif
738 #endif /* atomic64_sub_return_relaxed */
739
740 /* atomic64_dec_return_relaxed */
741 #ifndef atomic64_dec_return_relaxed
742 #define atomic64_dec_return_relaxed atomic64_dec_return
743 #define atomic64_dec_return_acquire atomic64_dec_return
744 #define atomic64_dec_return_release atomic64_dec_return
745
746 #else /* atomic64_dec_return_relaxed */
747
748 #ifndef atomic64_dec_return_acquire
749 #define atomic64_dec_return_acquire(...) \
750 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
751 #endif
752
753 #ifndef atomic64_dec_return_release
754 #define atomic64_dec_return_release(...) \
755 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
756 #endif
757
758 #ifndef atomic64_dec_return
759 #define atomic64_dec_return(...) \
760 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
761 #endif
762 #endif /* atomic64_dec_return_relaxed */
763
764
765 /* atomic64_fetch_add_relaxed */
766 #ifndef atomic64_fetch_add_relaxed
767 #define atomic64_fetch_add_relaxed atomic64_fetch_add
768 #define atomic64_fetch_add_acquire atomic64_fetch_add
769 #define atomic64_fetch_add_release atomic64_fetch_add
770
771 #else /* atomic64_fetch_add_relaxed */
772
773 #ifndef atomic64_fetch_add_acquire
774 #define atomic64_fetch_add_acquire(...) \
775 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
776 #endif
777
778 #ifndef atomic64_fetch_add_release
779 #define atomic64_fetch_add_release(...) \
780 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
781 #endif
782
783 #ifndef atomic64_fetch_add
784 #define atomic64_fetch_add(...) \
785 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
786 #endif
787 #endif /* atomic64_fetch_add_relaxed */
788
789 /* atomic64_fetch_inc_relaxed */
790 #ifndef atomic64_fetch_inc_relaxed
791
792 #ifndef atomic64_fetch_inc
793 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
794 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
795 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
796 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
797 #else /* atomic64_fetch_inc */
798 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
799 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
800 #define atomic64_fetch_inc_release atomic64_fetch_inc
801 #endif /* atomic64_fetch_inc */
802
803 #else /* atomic64_fetch_inc_relaxed */
804
805 #ifndef atomic64_fetch_inc_acquire
806 #define atomic64_fetch_inc_acquire(...) \
807 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
808 #endif
809
810 #ifndef atomic64_fetch_inc_release
811 #define atomic64_fetch_inc_release(...) \
812 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
813 #endif
814
815 #ifndef atomic64_fetch_inc
816 #define atomic64_fetch_inc(...) \
817 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
818 #endif
819 #endif /* atomic64_fetch_inc_relaxed */
820
821 /* atomic64_fetch_sub_relaxed */
822 #ifndef atomic64_fetch_sub_relaxed
823 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
824 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
825 #define atomic64_fetch_sub_release atomic64_fetch_sub
826
827 #else /* atomic64_fetch_sub_relaxed */
828
829 #ifndef atomic64_fetch_sub_acquire
830 #define atomic64_fetch_sub_acquire(...) \
831 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
832 #endif
833
834 #ifndef atomic64_fetch_sub_release
835 #define atomic64_fetch_sub_release(...) \
836 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
837 #endif
838
839 #ifndef atomic64_fetch_sub
840 #define atomic64_fetch_sub(...) \
841 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
842 #endif
843 #endif /* atomic64_fetch_sub_relaxed */
844
845 /* atomic64_fetch_dec_relaxed */
846 #ifndef atomic64_fetch_dec_relaxed
847
848 #ifndef atomic64_fetch_dec
849 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
850 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
851 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
852 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
853 #else /* atomic64_fetch_dec */
854 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
855 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
856 #define atomic64_fetch_dec_release atomic64_fetch_dec
857 #endif /* atomic64_fetch_dec */
858
859 #else /* atomic64_fetch_dec_relaxed */
860
861 #ifndef atomic64_fetch_dec_acquire
862 #define atomic64_fetch_dec_acquire(...) \
863 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
864 #endif
865
866 #ifndef atomic64_fetch_dec_release
867 #define atomic64_fetch_dec_release(...) \
868 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
869 #endif
870
871 #ifndef atomic64_fetch_dec
872 #define atomic64_fetch_dec(...) \
873 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
874 #endif
875 #endif /* atomic64_fetch_dec_relaxed */
876
877 /* atomic64_fetch_or_relaxed */
878 #ifndef atomic64_fetch_or_relaxed
879 #define atomic64_fetch_or_relaxed atomic64_fetch_or
880 #define atomic64_fetch_or_acquire atomic64_fetch_or
881 #define atomic64_fetch_or_release atomic64_fetch_or
882
883 #else /* atomic64_fetch_or_relaxed */
884
885 #ifndef atomic64_fetch_or_acquire
886 #define atomic64_fetch_or_acquire(...) \
887 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
888 #endif
889
890 #ifndef atomic64_fetch_or_release
891 #define atomic64_fetch_or_release(...) \
892 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
893 #endif
894
895 #ifndef atomic64_fetch_or
896 #define atomic64_fetch_or(...) \
897 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
898 #endif
899 #endif /* atomic64_fetch_or_relaxed */
900
901 /* atomic64_fetch_and_relaxed */
902 #ifndef atomic64_fetch_and_relaxed
903 #define atomic64_fetch_and_relaxed atomic64_fetch_and
904 #define atomic64_fetch_and_acquire atomic64_fetch_and
905 #define atomic64_fetch_and_release atomic64_fetch_and
906
907 #else /* atomic64_fetch_and_relaxed */
908
909 #ifndef atomic64_fetch_and_acquire
910 #define atomic64_fetch_and_acquire(...) \
911 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
912 #endif
913
914 #ifndef atomic64_fetch_and_release
915 #define atomic64_fetch_and_release(...) \
916 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
917 #endif
918
919 #ifndef atomic64_fetch_and
920 #define atomic64_fetch_and(...) \
921 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
922 #endif
923 #endif /* atomic64_fetch_and_relaxed */
924
925 #ifdef atomic64_andnot
926 /* atomic64_fetch_andnot_relaxed */
927 #ifndef atomic64_fetch_andnot_relaxed
928 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
929 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
930 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
931
932 #else /* atomic64_fetch_andnot_relaxed */
933
934 #ifndef atomic64_fetch_andnot_acquire
935 #define atomic64_fetch_andnot_acquire(...) \
936 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
937 #endif
938
939 #ifndef atomic64_fetch_andnot_release
940 #define atomic64_fetch_andnot_release(...) \
941 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
942 #endif
943
944 #ifndef atomic64_fetch_andnot
945 #define atomic64_fetch_andnot(...) \
946 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
947 #endif
948 #endif /* atomic64_fetch_andnot_relaxed */
949 #endif /* atomic64_andnot */
950
951 /* atomic64_fetch_xor_relaxed */
952 #ifndef atomic64_fetch_xor_relaxed
953 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
954 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
955 #define atomic64_fetch_xor_release atomic64_fetch_xor
956
957 #else /* atomic64_fetch_xor_relaxed */
958
959 #ifndef atomic64_fetch_xor_acquire
960 #define atomic64_fetch_xor_acquire(...) \
961 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
962 #endif
963
964 #ifndef atomic64_fetch_xor_release
965 #define atomic64_fetch_xor_release(...) \
966 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
967 #endif
968
969 #ifndef atomic64_fetch_xor
970 #define atomic64_fetch_xor(...) \
971 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
972 #endif
973 #endif /* atomic64_fetch_xor_relaxed */
974
975
976 /* atomic64_xchg_relaxed */
977 #ifndef atomic64_xchg_relaxed
978 #define atomic64_xchg_relaxed atomic64_xchg
979 #define atomic64_xchg_acquire atomic64_xchg
980 #define atomic64_xchg_release atomic64_xchg
981
982 #else /* atomic64_xchg_relaxed */
983
984 #ifndef atomic64_xchg_acquire
985 #define atomic64_xchg_acquire(...) \
986 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
987 #endif
988
989 #ifndef atomic64_xchg_release
990 #define atomic64_xchg_release(...) \
991 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
992 #endif
993
994 #ifndef atomic64_xchg
995 #define atomic64_xchg(...) \
996 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
997 #endif
998 #endif /* atomic64_xchg_relaxed */
999
1000 /* atomic64_cmpxchg_relaxed */
1001 #ifndef atomic64_cmpxchg_relaxed
1002 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1003 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1004 #define atomic64_cmpxchg_release atomic64_cmpxchg
1005
1006 #else /* atomic64_cmpxchg_relaxed */
1007
1008 #ifndef atomic64_cmpxchg_acquire
1009 #define atomic64_cmpxchg_acquire(...) \
1010 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1011 #endif
1012
1013 #ifndef atomic64_cmpxchg_release
1014 #define atomic64_cmpxchg_release(...) \
1015 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1016 #endif
1017
1018 #ifndef atomic64_cmpxchg
1019 #define atomic64_cmpxchg(...) \
1020 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1021 #endif
1022 #endif /* atomic64_cmpxchg_relaxed */
1023
1024 #ifndef atomic64_try_cmpxchg
1025
1026 #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1027 ({ \
1028 typeof(_po) __po = (_po); \
1029 typeof(*(_po)) __r, __o = *__po; \
1030 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1031 if (unlikely(__r != __o)) \
1032 *__po = __r; \
1033 likely(__r == __o); \
1034 })
1035
1036 #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1037 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1038 #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1039 #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1040
1041 #else /* atomic64_try_cmpxchg */
1042 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1043 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1044 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1045 #endif /* atomic64_try_cmpxchg */
1046
1047 #ifndef atomic64_andnot
1048 static inline void atomic64_andnot(long long i, atomic64_t *v)
1049 {
1050 atomic64_and(~i, v);
1051 }
1052
1053 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1054 {
1055 return atomic64_fetch_and(~i, v);
1056 }
1057
1058 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1059 {
1060 return atomic64_fetch_and_relaxed(~i, v);
1061 }
1062
1063 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1064 {
1065 return atomic64_fetch_and_acquire(~i, v);
1066 }
1067
1068 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1069 {
1070 return atomic64_fetch_and_release(~i, v);
1071 }
1072 #endif
1073
1074 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1075
1076 #include <asm-generic/atomic-long.h>
1077
1078 #endif /* _LINUX_ATOMIC_H */