]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - include/asm-generic/atomic-instrumented.h
Merge tag 'timers-urgent-2020-08-14' of git://git.kernel.org/pub/scm/linux/kernel...
[mirror_ubuntu-jammy-kernel.git] / include / asm-generic / atomic-instrumented.h
1 // SPDX-License-Identifier: GPL-2.0
2
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5
6 /*
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
16 */
17 #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18 #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
19
20 #include <linux/build_bug.h>
21 #include <linux/compiler.h>
22 #include <linux/instrumented.h>
23
24 static __always_inline int
25 atomic_read(const atomic_t *v)
26 {
27 instrument_atomic_read(v, sizeof(*v));
28 return arch_atomic_read(v);
29 }
30 #define atomic_read atomic_read
31
32 #if defined(arch_atomic_read_acquire)
33 static __always_inline int
34 atomic_read_acquire(const atomic_t *v)
35 {
36 instrument_atomic_read(v, sizeof(*v));
37 return arch_atomic_read_acquire(v);
38 }
39 #define atomic_read_acquire atomic_read_acquire
40 #endif
41
42 static __always_inline void
43 atomic_set(atomic_t *v, int i)
44 {
45 instrument_atomic_write(v, sizeof(*v));
46 arch_atomic_set(v, i);
47 }
48 #define atomic_set atomic_set
49
50 #if defined(arch_atomic_set_release)
51 static __always_inline void
52 atomic_set_release(atomic_t *v, int i)
53 {
54 instrument_atomic_write(v, sizeof(*v));
55 arch_atomic_set_release(v, i);
56 }
57 #define atomic_set_release atomic_set_release
58 #endif
59
60 static __always_inline void
61 atomic_add(int i, atomic_t *v)
62 {
63 instrument_atomic_write(v, sizeof(*v));
64 arch_atomic_add(i, v);
65 }
66 #define atomic_add atomic_add
67
68 #if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
69 static __always_inline int
70 atomic_add_return(int i, atomic_t *v)
71 {
72 instrument_atomic_write(v, sizeof(*v));
73 return arch_atomic_add_return(i, v);
74 }
75 #define atomic_add_return atomic_add_return
76 #endif
77
78 #if defined(arch_atomic_add_return_acquire)
79 static __always_inline int
80 atomic_add_return_acquire(int i, atomic_t *v)
81 {
82 instrument_atomic_write(v, sizeof(*v));
83 return arch_atomic_add_return_acquire(i, v);
84 }
85 #define atomic_add_return_acquire atomic_add_return_acquire
86 #endif
87
88 #if defined(arch_atomic_add_return_release)
89 static __always_inline int
90 atomic_add_return_release(int i, atomic_t *v)
91 {
92 instrument_atomic_write(v, sizeof(*v));
93 return arch_atomic_add_return_release(i, v);
94 }
95 #define atomic_add_return_release atomic_add_return_release
96 #endif
97
98 #if defined(arch_atomic_add_return_relaxed)
99 static __always_inline int
100 atomic_add_return_relaxed(int i, atomic_t *v)
101 {
102 instrument_atomic_write(v, sizeof(*v));
103 return arch_atomic_add_return_relaxed(i, v);
104 }
105 #define atomic_add_return_relaxed atomic_add_return_relaxed
106 #endif
107
108 #if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
109 static __always_inline int
110 atomic_fetch_add(int i, atomic_t *v)
111 {
112 instrument_atomic_write(v, sizeof(*v));
113 return arch_atomic_fetch_add(i, v);
114 }
115 #define atomic_fetch_add atomic_fetch_add
116 #endif
117
118 #if defined(arch_atomic_fetch_add_acquire)
119 static __always_inline int
120 atomic_fetch_add_acquire(int i, atomic_t *v)
121 {
122 instrument_atomic_write(v, sizeof(*v));
123 return arch_atomic_fetch_add_acquire(i, v);
124 }
125 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
126 #endif
127
128 #if defined(arch_atomic_fetch_add_release)
129 static __always_inline int
130 atomic_fetch_add_release(int i, atomic_t *v)
131 {
132 instrument_atomic_write(v, sizeof(*v));
133 return arch_atomic_fetch_add_release(i, v);
134 }
135 #define atomic_fetch_add_release atomic_fetch_add_release
136 #endif
137
138 #if defined(arch_atomic_fetch_add_relaxed)
139 static __always_inline int
140 atomic_fetch_add_relaxed(int i, atomic_t *v)
141 {
142 instrument_atomic_write(v, sizeof(*v));
143 return arch_atomic_fetch_add_relaxed(i, v);
144 }
145 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
146 #endif
147
148 static __always_inline void
149 atomic_sub(int i, atomic_t *v)
150 {
151 instrument_atomic_write(v, sizeof(*v));
152 arch_atomic_sub(i, v);
153 }
154 #define atomic_sub atomic_sub
155
156 #if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
157 static __always_inline int
158 atomic_sub_return(int i, atomic_t *v)
159 {
160 instrument_atomic_write(v, sizeof(*v));
161 return arch_atomic_sub_return(i, v);
162 }
163 #define atomic_sub_return atomic_sub_return
164 #endif
165
166 #if defined(arch_atomic_sub_return_acquire)
167 static __always_inline int
168 atomic_sub_return_acquire(int i, atomic_t *v)
169 {
170 instrument_atomic_write(v, sizeof(*v));
171 return arch_atomic_sub_return_acquire(i, v);
172 }
173 #define atomic_sub_return_acquire atomic_sub_return_acquire
174 #endif
175
176 #if defined(arch_atomic_sub_return_release)
177 static __always_inline int
178 atomic_sub_return_release(int i, atomic_t *v)
179 {
180 instrument_atomic_write(v, sizeof(*v));
181 return arch_atomic_sub_return_release(i, v);
182 }
183 #define atomic_sub_return_release atomic_sub_return_release
184 #endif
185
186 #if defined(arch_atomic_sub_return_relaxed)
187 static __always_inline int
188 atomic_sub_return_relaxed(int i, atomic_t *v)
189 {
190 instrument_atomic_write(v, sizeof(*v));
191 return arch_atomic_sub_return_relaxed(i, v);
192 }
193 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
194 #endif
195
196 #if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
197 static __always_inline int
198 atomic_fetch_sub(int i, atomic_t *v)
199 {
200 instrument_atomic_write(v, sizeof(*v));
201 return arch_atomic_fetch_sub(i, v);
202 }
203 #define atomic_fetch_sub atomic_fetch_sub
204 #endif
205
206 #if defined(arch_atomic_fetch_sub_acquire)
207 static __always_inline int
208 atomic_fetch_sub_acquire(int i, atomic_t *v)
209 {
210 instrument_atomic_write(v, sizeof(*v));
211 return arch_atomic_fetch_sub_acquire(i, v);
212 }
213 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
214 #endif
215
216 #if defined(arch_atomic_fetch_sub_release)
217 static __always_inline int
218 atomic_fetch_sub_release(int i, atomic_t *v)
219 {
220 instrument_atomic_write(v, sizeof(*v));
221 return arch_atomic_fetch_sub_release(i, v);
222 }
223 #define atomic_fetch_sub_release atomic_fetch_sub_release
224 #endif
225
226 #if defined(arch_atomic_fetch_sub_relaxed)
227 static __always_inline int
228 atomic_fetch_sub_relaxed(int i, atomic_t *v)
229 {
230 instrument_atomic_write(v, sizeof(*v));
231 return arch_atomic_fetch_sub_relaxed(i, v);
232 }
233 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
234 #endif
235
236 #if defined(arch_atomic_inc)
237 static __always_inline void
238 atomic_inc(atomic_t *v)
239 {
240 instrument_atomic_write(v, sizeof(*v));
241 arch_atomic_inc(v);
242 }
243 #define atomic_inc atomic_inc
244 #endif
245
246 #if defined(arch_atomic_inc_return)
247 static __always_inline int
248 atomic_inc_return(atomic_t *v)
249 {
250 instrument_atomic_write(v, sizeof(*v));
251 return arch_atomic_inc_return(v);
252 }
253 #define atomic_inc_return atomic_inc_return
254 #endif
255
256 #if defined(arch_atomic_inc_return_acquire)
257 static __always_inline int
258 atomic_inc_return_acquire(atomic_t *v)
259 {
260 instrument_atomic_write(v, sizeof(*v));
261 return arch_atomic_inc_return_acquire(v);
262 }
263 #define atomic_inc_return_acquire atomic_inc_return_acquire
264 #endif
265
266 #if defined(arch_atomic_inc_return_release)
267 static __always_inline int
268 atomic_inc_return_release(atomic_t *v)
269 {
270 instrument_atomic_write(v, sizeof(*v));
271 return arch_atomic_inc_return_release(v);
272 }
273 #define atomic_inc_return_release atomic_inc_return_release
274 #endif
275
276 #if defined(arch_atomic_inc_return_relaxed)
277 static __always_inline int
278 atomic_inc_return_relaxed(atomic_t *v)
279 {
280 instrument_atomic_write(v, sizeof(*v));
281 return arch_atomic_inc_return_relaxed(v);
282 }
283 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
284 #endif
285
286 #if defined(arch_atomic_fetch_inc)
287 static __always_inline int
288 atomic_fetch_inc(atomic_t *v)
289 {
290 instrument_atomic_write(v, sizeof(*v));
291 return arch_atomic_fetch_inc(v);
292 }
293 #define atomic_fetch_inc atomic_fetch_inc
294 #endif
295
296 #if defined(arch_atomic_fetch_inc_acquire)
297 static __always_inline int
298 atomic_fetch_inc_acquire(atomic_t *v)
299 {
300 instrument_atomic_write(v, sizeof(*v));
301 return arch_atomic_fetch_inc_acquire(v);
302 }
303 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
304 #endif
305
306 #if defined(arch_atomic_fetch_inc_release)
307 static __always_inline int
308 atomic_fetch_inc_release(atomic_t *v)
309 {
310 instrument_atomic_write(v, sizeof(*v));
311 return arch_atomic_fetch_inc_release(v);
312 }
313 #define atomic_fetch_inc_release atomic_fetch_inc_release
314 #endif
315
316 #if defined(arch_atomic_fetch_inc_relaxed)
317 static __always_inline int
318 atomic_fetch_inc_relaxed(atomic_t *v)
319 {
320 instrument_atomic_write(v, sizeof(*v));
321 return arch_atomic_fetch_inc_relaxed(v);
322 }
323 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
324 #endif
325
326 #if defined(arch_atomic_dec)
327 static __always_inline void
328 atomic_dec(atomic_t *v)
329 {
330 instrument_atomic_write(v, sizeof(*v));
331 arch_atomic_dec(v);
332 }
333 #define atomic_dec atomic_dec
334 #endif
335
336 #if defined(arch_atomic_dec_return)
337 static __always_inline int
338 atomic_dec_return(atomic_t *v)
339 {
340 instrument_atomic_write(v, sizeof(*v));
341 return arch_atomic_dec_return(v);
342 }
343 #define atomic_dec_return atomic_dec_return
344 #endif
345
346 #if defined(arch_atomic_dec_return_acquire)
347 static __always_inline int
348 atomic_dec_return_acquire(atomic_t *v)
349 {
350 instrument_atomic_write(v, sizeof(*v));
351 return arch_atomic_dec_return_acquire(v);
352 }
353 #define atomic_dec_return_acquire atomic_dec_return_acquire
354 #endif
355
356 #if defined(arch_atomic_dec_return_release)
357 static __always_inline int
358 atomic_dec_return_release(atomic_t *v)
359 {
360 instrument_atomic_write(v, sizeof(*v));
361 return arch_atomic_dec_return_release(v);
362 }
363 #define atomic_dec_return_release atomic_dec_return_release
364 #endif
365
366 #if defined(arch_atomic_dec_return_relaxed)
367 static __always_inline int
368 atomic_dec_return_relaxed(atomic_t *v)
369 {
370 instrument_atomic_write(v, sizeof(*v));
371 return arch_atomic_dec_return_relaxed(v);
372 }
373 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
374 #endif
375
376 #if defined(arch_atomic_fetch_dec)
377 static __always_inline int
378 atomic_fetch_dec(atomic_t *v)
379 {
380 instrument_atomic_write(v, sizeof(*v));
381 return arch_atomic_fetch_dec(v);
382 }
383 #define atomic_fetch_dec atomic_fetch_dec
384 #endif
385
386 #if defined(arch_atomic_fetch_dec_acquire)
387 static __always_inline int
388 atomic_fetch_dec_acquire(atomic_t *v)
389 {
390 instrument_atomic_write(v, sizeof(*v));
391 return arch_atomic_fetch_dec_acquire(v);
392 }
393 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
394 #endif
395
396 #if defined(arch_atomic_fetch_dec_release)
397 static __always_inline int
398 atomic_fetch_dec_release(atomic_t *v)
399 {
400 instrument_atomic_write(v, sizeof(*v));
401 return arch_atomic_fetch_dec_release(v);
402 }
403 #define atomic_fetch_dec_release atomic_fetch_dec_release
404 #endif
405
406 #if defined(arch_atomic_fetch_dec_relaxed)
407 static __always_inline int
408 atomic_fetch_dec_relaxed(atomic_t *v)
409 {
410 instrument_atomic_write(v, sizeof(*v));
411 return arch_atomic_fetch_dec_relaxed(v);
412 }
413 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
414 #endif
415
416 static __always_inline void
417 atomic_and(int i, atomic_t *v)
418 {
419 instrument_atomic_write(v, sizeof(*v));
420 arch_atomic_and(i, v);
421 }
422 #define atomic_and atomic_and
423
424 #if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
425 static __always_inline int
426 atomic_fetch_and(int i, atomic_t *v)
427 {
428 instrument_atomic_write(v, sizeof(*v));
429 return arch_atomic_fetch_and(i, v);
430 }
431 #define atomic_fetch_and atomic_fetch_and
432 #endif
433
434 #if defined(arch_atomic_fetch_and_acquire)
435 static __always_inline int
436 atomic_fetch_and_acquire(int i, atomic_t *v)
437 {
438 instrument_atomic_write(v, sizeof(*v));
439 return arch_atomic_fetch_and_acquire(i, v);
440 }
441 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
442 #endif
443
444 #if defined(arch_atomic_fetch_and_release)
445 static __always_inline int
446 atomic_fetch_and_release(int i, atomic_t *v)
447 {
448 instrument_atomic_write(v, sizeof(*v));
449 return arch_atomic_fetch_and_release(i, v);
450 }
451 #define atomic_fetch_and_release atomic_fetch_and_release
452 #endif
453
454 #if defined(arch_atomic_fetch_and_relaxed)
455 static __always_inline int
456 atomic_fetch_and_relaxed(int i, atomic_t *v)
457 {
458 instrument_atomic_write(v, sizeof(*v));
459 return arch_atomic_fetch_and_relaxed(i, v);
460 }
461 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
462 #endif
463
464 #if defined(arch_atomic_andnot)
465 static __always_inline void
466 atomic_andnot(int i, atomic_t *v)
467 {
468 instrument_atomic_write(v, sizeof(*v));
469 arch_atomic_andnot(i, v);
470 }
471 #define atomic_andnot atomic_andnot
472 #endif
473
474 #if defined(arch_atomic_fetch_andnot)
475 static __always_inline int
476 atomic_fetch_andnot(int i, atomic_t *v)
477 {
478 instrument_atomic_write(v, sizeof(*v));
479 return arch_atomic_fetch_andnot(i, v);
480 }
481 #define atomic_fetch_andnot atomic_fetch_andnot
482 #endif
483
484 #if defined(arch_atomic_fetch_andnot_acquire)
485 static __always_inline int
486 atomic_fetch_andnot_acquire(int i, atomic_t *v)
487 {
488 instrument_atomic_write(v, sizeof(*v));
489 return arch_atomic_fetch_andnot_acquire(i, v);
490 }
491 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
492 #endif
493
494 #if defined(arch_atomic_fetch_andnot_release)
495 static __always_inline int
496 atomic_fetch_andnot_release(int i, atomic_t *v)
497 {
498 instrument_atomic_write(v, sizeof(*v));
499 return arch_atomic_fetch_andnot_release(i, v);
500 }
501 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
502 #endif
503
504 #if defined(arch_atomic_fetch_andnot_relaxed)
505 static __always_inline int
506 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
507 {
508 instrument_atomic_write(v, sizeof(*v));
509 return arch_atomic_fetch_andnot_relaxed(i, v);
510 }
511 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
512 #endif
513
514 static __always_inline void
515 atomic_or(int i, atomic_t *v)
516 {
517 instrument_atomic_write(v, sizeof(*v));
518 arch_atomic_or(i, v);
519 }
520 #define atomic_or atomic_or
521
522 #if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
523 static __always_inline int
524 atomic_fetch_or(int i, atomic_t *v)
525 {
526 instrument_atomic_write(v, sizeof(*v));
527 return arch_atomic_fetch_or(i, v);
528 }
529 #define atomic_fetch_or atomic_fetch_or
530 #endif
531
532 #if defined(arch_atomic_fetch_or_acquire)
533 static __always_inline int
534 atomic_fetch_or_acquire(int i, atomic_t *v)
535 {
536 instrument_atomic_write(v, sizeof(*v));
537 return arch_atomic_fetch_or_acquire(i, v);
538 }
539 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
540 #endif
541
542 #if defined(arch_atomic_fetch_or_release)
543 static __always_inline int
544 atomic_fetch_or_release(int i, atomic_t *v)
545 {
546 instrument_atomic_write(v, sizeof(*v));
547 return arch_atomic_fetch_or_release(i, v);
548 }
549 #define atomic_fetch_or_release atomic_fetch_or_release
550 #endif
551
552 #if defined(arch_atomic_fetch_or_relaxed)
553 static __always_inline int
554 atomic_fetch_or_relaxed(int i, atomic_t *v)
555 {
556 instrument_atomic_write(v, sizeof(*v));
557 return arch_atomic_fetch_or_relaxed(i, v);
558 }
559 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
560 #endif
561
562 static __always_inline void
563 atomic_xor(int i, atomic_t *v)
564 {
565 instrument_atomic_write(v, sizeof(*v));
566 arch_atomic_xor(i, v);
567 }
568 #define atomic_xor atomic_xor
569
570 #if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
571 static __always_inline int
572 atomic_fetch_xor(int i, atomic_t *v)
573 {
574 instrument_atomic_write(v, sizeof(*v));
575 return arch_atomic_fetch_xor(i, v);
576 }
577 #define atomic_fetch_xor atomic_fetch_xor
578 #endif
579
580 #if defined(arch_atomic_fetch_xor_acquire)
581 static __always_inline int
582 atomic_fetch_xor_acquire(int i, atomic_t *v)
583 {
584 instrument_atomic_write(v, sizeof(*v));
585 return arch_atomic_fetch_xor_acquire(i, v);
586 }
587 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
588 #endif
589
590 #if defined(arch_atomic_fetch_xor_release)
591 static __always_inline int
592 atomic_fetch_xor_release(int i, atomic_t *v)
593 {
594 instrument_atomic_write(v, sizeof(*v));
595 return arch_atomic_fetch_xor_release(i, v);
596 }
597 #define atomic_fetch_xor_release atomic_fetch_xor_release
598 #endif
599
600 #if defined(arch_atomic_fetch_xor_relaxed)
601 static __always_inline int
602 atomic_fetch_xor_relaxed(int i, atomic_t *v)
603 {
604 instrument_atomic_write(v, sizeof(*v));
605 return arch_atomic_fetch_xor_relaxed(i, v);
606 }
607 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
608 #endif
609
610 #if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
611 static __always_inline int
612 atomic_xchg(atomic_t *v, int i)
613 {
614 instrument_atomic_write(v, sizeof(*v));
615 return arch_atomic_xchg(v, i);
616 }
617 #define atomic_xchg atomic_xchg
618 #endif
619
620 #if defined(arch_atomic_xchg_acquire)
621 static __always_inline int
622 atomic_xchg_acquire(atomic_t *v, int i)
623 {
624 instrument_atomic_write(v, sizeof(*v));
625 return arch_atomic_xchg_acquire(v, i);
626 }
627 #define atomic_xchg_acquire atomic_xchg_acquire
628 #endif
629
630 #if defined(arch_atomic_xchg_release)
631 static __always_inline int
632 atomic_xchg_release(atomic_t *v, int i)
633 {
634 instrument_atomic_write(v, sizeof(*v));
635 return arch_atomic_xchg_release(v, i);
636 }
637 #define atomic_xchg_release atomic_xchg_release
638 #endif
639
640 #if defined(arch_atomic_xchg_relaxed)
641 static __always_inline int
642 atomic_xchg_relaxed(atomic_t *v, int i)
643 {
644 instrument_atomic_write(v, sizeof(*v));
645 return arch_atomic_xchg_relaxed(v, i);
646 }
647 #define atomic_xchg_relaxed atomic_xchg_relaxed
648 #endif
649
650 #if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
651 static __always_inline int
652 atomic_cmpxchg(atomic_t *v, int old, int new)
653 {
654 instrument_atomic_write(v, sizeof(*v));
655 return arch_atomic_cmpxchg(v, old, new);
656 }
657 #define atomic_cmpxchg atomic_cmpxchg
658 #endif
659
660 #if defined(arch_atomic_cmpxchg_acquire)
661 static __always_inline int
662 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
663 {
664 instrument_atomic_write(v, sizeof(*v));
665 return arch_atomic_cmpxchg_acquire(v, old, new);
666 }
667 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
668 #endif
669
670 #if defined(arch_atomic_cmpxchg_release)
671 static __always_inline int
672 atomic_cmpxchg_release(atomic_t *v, int old, int new)
673 {
674 instrument_atomic_write(v, sizeof(*v));
675 return arch_atomic_cmpxchg_release(v, old, new);
676 }
677 #define atomic_cmpxchg_release atomic_cmpxchg_release
678 #endif
679
680 #if defined(arch_atomic_cmpxchg_relaxed)
681 static __always_inline int
682 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
683 {
684 instrument_atomic_write(v, sizeof(*v));
685 return arch_atomic_cmpxchg_relaxed(v, old, new);
686 }
687 #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
688 #endif
689
690 #if defined(arch_atomic_try_cmpxchg)
691 static __always_inline bool
692 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
693 {
694 instrument_atomic_write(v, sizeof(*v));
695 instrument_atomic_write(old, sizeof(*old));
696 return arch_atomic_try_cmpxchg(v, old, new);
697 }
698 #define atomic_try_cmpxchg atomic_try_cmpxchg
699 #endif
700
701 #if defined(arch_atomic_try_cmpxchg_acquire)
702 static __always_inline bool
703 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
704 {
705 instrument_atomic_write(v, sizeof(*v));
706 instrument_atomic_write(old, sizeof(*old));
707 return arch_atomic_try_cmpxchg_acquire(v, old, new);
708 }
709 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
710 #endif
711
712 #if defined(arch_atomic_try_cmpxchg_release)
713 static __always_inline bool
714 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
715 {
716 instrument_atomic_write(v, sizeof(*v));
717 instrument_atomic_write(old, sizeof(*old));
718 return arch_atomic_try_cmpxchg_release(v, old, new);
719 }
720 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
721 #endif
722
723 #if defined(arch_atomic_try_cmpxchg_relaxed)
724 static __always_inline bool
725 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
726 {
727 instrument_atomic_write(v, sizeof(*v));
728 instrument_atomic_write(old, sizeof(*old));
729 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
730 }
731 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
732 #endif
733
734 #if defined(arch_atomic_sub_and_test)
735 static __always_inline bool
736 atomic_sub_and_test(int i, atomic_t *v)
737 {
738 instrument_atomic_write(v, sizeof(*v));
739 return arch_atomic_sub_and_test(i, v);
740 }
741 #define atomic_sub_and_test atomic_sub_and_test
742 #endif
743
744 #if defined(arch_atomic_dec_and_test)
745 static __always_inline bool
746 atomic_dec_and_test(atomic_t *v)
747 {
748 instrument_atomic_write(v, sizeof(*v));
749 return arch_atomic_dec_and_test(v);
750 }
751 #define atomic_dec_and_test atomic_dec_and_test
752 #endif
753
754 #if defined(arch_atomic_inc_and_test)
755 static __always_inline bool
756 atomic_inc_and_test(atomic_t *v)
757 {
758 instrument_atomic_write(v, sizeof(*v));
759 return arch_atomic_inc_and_test(v);
760 }
761 #define atomic_inc_and_test atomic_inc_and_test
762 #endif
763
764 #if defined(arch_atomic_add_negative)
765 static __always_inline bool
766 atomic_add_negative(int i, atomic_t *v)
767 {
768 instrument_atomic_write(v, sizeof(*v));
769 return arch_atomic_add_negative(i, v);
770 }
771 #define atomic_add_negative atomic_add_negative
772 #endif
773
774 #if defined(arch_atomic_fetch_add_unless)
775 static __always_inline int
776 atomic_fetch_add_unless(atomic_t *v, int a, int u)
777 {
778 instrument_atomic_write(v, sizeof(*v));
779 return arch_atomic_fetch_add_unless(v, a, u);
780 }
781 #define atomic_fetch_add_unless atomic_fetch_add_unless
782 #endif
783
784 #if defined(arch_atomic_add_unless)
785 static __always_inline bool
786 atomic_add_unless(atomic_t *v, int a, int u)
787 {
788 instrument_atomic_write(v, sizeof(*v));
789 return arch_atomic_add_unless(v, a, u);
790 }
791 #define atomic_add_unless atomic_add_unless
792 #endif
793
794 #if defined(arch_atomic_inc_not_zero)
795 static __always_inline bool
796 atomic_inc_not_zero(atomic_t *v)
797 {
798 instrument_atomic_write(v, sizeof(*v));
799 return arch_atomic_inc_not_zero(v);
800 }
801 #define atomic_inc_not_zero atomic_inc_not_zero
802 #endif
803
804 #if defined(arch_atomic_inc_unless_negative)
805 static __always_inline bool
806 atomic_inc_unless_negative(atomic_t *v)
807 {
808 instrument_atomic_write(v, sizeof(*v));
809 return arch_atomic_inc_unless_negative(v);
810 }
811 #define atomic_inc_unless_negative atomic_inc_unless_negative
812 #endif
813
814 #if defined(arch_atomic_dec_unless_positive)
815 static __always_inline bool
816 atomic_dec_unless_positive(atomic_t *v)
817 {
818 instrument_atomic_write(v, sizeof(*v));
819 return arch_atomic_dec_unless_positive(v);
820 }
821 #define atomic_dec_unless_positive atomic_dec_unless_positive
822 #endif
823
824 #if defined(arch_atomic_dec_if_positive)
825 static __always_inline int
826 atomic_dec_if_positive(atomic_t *v)
827 {
828 instrument_atomic_write(v, sizeof(*v));
829 return arch_atomic_dec_if_positive(v);
830 }
831 #define atomic_dec_if_positive atomic_dec_if_positive
832 #endif
833
834 static __always_inline s64
835 atomic64_read(const atomic64_t *v)
836 {
837 instrument_atomic_read(v, sizeof(*v));
838 return arch_atomic64_read(v);
839 }
840 #define atomic64_read atomic64_read
841
842 #if defined(arch_atomic64_read_acquire)
843 static __always_inline s64
844 atomic64_read_acquire(const atomic64_t *v)
845 {
846 instrument_atomic_read(v, sizeof(*v));
847 return arch_atomic64_read_acquire(v);
848 }
849 #define atomic64_read_acquire atomic64_read_acquire
850 #endif
851
852 static __always_inline void
853 atomic64_set(atomic64_t *v, s64 i)
854 {
855 instrument_atomic_write(v, sizeof(*v));
856 arch_atomic64_set(v, i);
857 }
858 #define atomic64_set atomic64_set
859
860 #if defined(arch_atomic64_set_release)
861 static __always_inline void
862 atomic64_set_release(atomic64_t *v, s64 i)
863 {
864 instrument_atomic_write(v, sizeof(*v));
865 arch_atomic64_set_release(v, i);
866 }
867 #define atomic64_set_release atomic64_set_release
868 #endif
869
870 static __always_inline void
871 atomic64_add(s64 i, atomic64_t *v)
872 {
873 instrument_atomic_write(v, sizeof(*v));
874 arch_atomic64_add(i, v);
875 }
876 #define atomic64_add atomic64_add
877
878 #if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
879 static __always_inline s64
880 atomic64_add_return(s64 i, atomic64_t *v)
881 {
882 instrument_atomic_write(v, sizeof(*v));
883 return arch_atomic64_add_return(i, v);
884 }
885 #define atomic64_add_return atomic64_add_return
886 #endif
887
888 #if defined(arch_atomic64_add_return_acquire)
889 static __always_inline s64
890 atomic64_add_return_acquire(s64 i, atomic64_t *v)
891 {
892 instrument_atomic_write(v, sizeof(*v));
893 return arch_atomic64_add_return_acquire(i, v);
894 }
895 #define atomic64_add_return_acquire atomic64_add_return_acquire
896 #endif
897
898 #if defined(arch_atomic64_add_return_release)
899 static __always_inline s64
900 atomic64_add_return_release(s64 i, atomic64_t *v)
901 {
902 instrument_atomic_write(v, sizeof(*v));
903 return arch_atomic64_add_return_release(i, v);
904 }
905 #define atomic64_add_return_release atomic64_add_return_release
906 #endif
907
908 #if defined(arch_atomic64_add_return_relaxed)
909 static __always_inline s64
910 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
911 {
912 instrument_atomic_write(v, sizeof(*v));
913 return arch_atomic64_add_return_relaxed(i, v);
914 }
915 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
916 #endif
917
918 #if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
919 static __always_inline s64
920 atomic64_fetch_add(s64 i, atomic64_t *v)
921 {
922 instrument_atomic_write(v, sizeof(*v));
923 return arch_atomic64_fetch_add(i, v);
924 }
925 #define atomic64_fetch_add atomic64_fetch_add
926 #endif
927
928 #if defined(arch_atomic64_fetch_add_acquire)
929 static __always_inline s64
930 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
931 {
932 instrument_atomic_write(v, sizeof(*v));
933 return arch_atomic64_fetch_add_acquire(i, v);
934 }
935 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
936 #endif
937
938 #if defined(arch_atomic64_fetch_add_release)
939 static __always_inline s64
940 atomic64_fetch_add_release(s64 i, atomic64_t *v)
941 {
942 instrument_atomic_write(v, sizeof(*v));
943 return arch_atomic64_fetch_add_release(i, v);
944 }
945 #define atomic64_fetch_add_release atomic64_fetch_add_release
946 #endif
947
948 #if defined(arch_atomic64_fetch_add_relaxed)
949 static __always_inline s64
950 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
951 {
952 instrument_atomic_write(v, sizeof(*v));
953 return arch_atomic64_fetch_add_relaxed(i, v);
954 }
955 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
956 #endif
957
958 static __always_inline void
959 atomic64_sub(s64 i, atomic64_t *v)
960 {
961 instrument_atomic_write(v, sizeof(*v));
962 arch_atomic64_sub(i, v);
963 }
964 #define atomic64_sub atomic64_sub
965
966 #if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
967 static __always_inline s64
968 atomic64_sub_return(s64 i, atomic64_t *v)
969 {
970 instrument_atomic_write(v, sizeof(*v));
971 return arch_atomic64_sub_return(i, v);
972 }
973 #define atomic64_sub_return atomic64_sub_return
974 #endif
975
976 #if defined(arch_atomic64_sub_return_acquire)
977 static __always_inline s64
978 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
979 {
980 instrument_atomic_write(v, sizeof(*v));
981 return arch_atomic64_sub_return_acquire(i, v);
982 }
983 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
984 #endif
985
986 #if defined(arch_atomic64_sub_return_release)
987 static __always_inline s64
988 atomic64_sub_return_release(s64 i, atomic64_t *v)
989 {
990 instrument_atomic_write(v, sizeof(*v));
991 return arch_atomic64_sub_return_release(i, v);
992 }
993 #define atomic64_sub_return_release atomic64_sub_return_release
994 #endif
995
996 #if defined(arch_atomic64_sub_return_relaxed)
997 static __always_inline s64
998 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
999 {
1000 instrument_atomic_write(v, sizeof(*v));
1001 return arch_atomic64_sub_return_relaxed(i, v);
1002 }
1003 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1004 #endif
1005
1006 #if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1007 static __always_inline s64
1008 atomic64_fetch_sub(s64 i, atomic64_t *v)
1009 {
1010 instrument_atomic_write(v, sizeof(*v));
1011 return arch_atomic64_fetch_sub(i, v);
1012 }
1013 #define atomic64_fetch_sub atomic64_fetch_sub
1014 #endif
1015
1016 #if defined(arch_atomic64_fetch_sub_acquire)
1017 static __always_inline s64
1018 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1019 {
1020 instrument_atomic_write(v, sizeof(*v));
1021 return arch_atomic64_fetch_sub_acquire(i, v);
1022 }
1023 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1024 #endif
1025
1026 #if defined(arch_atomic64_fetch_sub_release)
1027 static __always_inline s64
1028 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1029 {
1030 instrument_atomic_write(v, sizeof(*v));
1031 return arch_atomic64_fetch_sub_release(i, v);
1032 }
1033 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1034 #endif
1035
1036 #if defined(arch_atomic64_fetch_sub_relaxed)
1037 static __always_inline s64
1038 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1039 {
1040 instrument_atomic_write(v, sizeof(*v));
1041 return arch_atomic64_fetch_sub_relaxed(i, v);
1042 }
1043 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1044 #endif
1045
1046 #if defined(arch_atomic64_inc)
1047 static __always_inline void
1048 atomic64_inc(atomic64_t *v)
1049 {
1050 instrument_atomic_write(v, sizeof(*v));
1051 arch_atomic64_inc(v);
1052 }
1053 #define atomic64_inc atomic64_inc
1054 #endif
1055
1056 #if defined(arch_atomic64_inc_return)
1057 static __always_inline s64
1058 atomic64_inc_return(atomic64_t *v)
1059 {
1060 instrument_atomic_write(v, sizeof(*v));
1061 return arch_atomic64_inc_return(v);
1062 }
1063 #define atomic64_inc_return atomic64_inc_return
1064 #endif
1065
1066 #if defined(arch_atomic64_inc_return_acquire)
1067 static __always_inline s64
1068 atomic64_inc_return_acquire(atomic64_t *v)
1069 {
1070 instrument_atomic_write(v, sizeof(*v));
1071 return arch_atomic64_inc_return_acquire(v);
1072 }
1073 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1074 #endif
1075
1076 #if defined(arch_atomic64_inc_return_release)
1077 static __always_inline s64
1078 atomic64_inc_return_release(atomic64_t *v)
1079 {
1080 instrument_atomic_write(v, sizeof(*v));
1081 return arch_atomic64_inc_return_release(v);
1082 }
1083 #define atomic64_inc_return_release atomic64_inc_return_release
1084 #endif
1085
1086 #if defined(arch_atomic64_inc_return_relaxed)
1087 static __always_inline s64
1088 atomic64_inc_return_relaxed(atomic64_t *v)
1089 {
1090 instrument_atomic_write(v, sizeof(*v));
1091 return arch_atomic64_inc_return_relaxed(v);
1092 }
1093 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1094 #endif
1095
1096 #if defined(arch_atomic64_fetch_inc)
1097 static __always_inline s64
1098 atomic64_fetch_inc(atomic64_t *v)
1099 {
1100 instrument_atomic_write(v, sizeof(*v));
1101 return arch_atomic64_fetch_inc(v);
1102 }
1103 #define atomic64_fetch_inc atomic64_fetch_inc
1104 #endif
1105
1106 #if defined(arch_atomic64_fetch_inc_acquire)
1107 static __always_inline s64
1108 atomic64_fetch_inc_acquire(atomic64_t *v)
1109 {
1110 instrument_atomic_write(v, sizeof(*v));
1111 return arch_atomic64_fetch_inc_acquire(v);
1112 }
1113 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1114 #endif
1115
1116 #if defined(arch_atomic64_fetch_inc_release)
1117 static __always_inline s64
1118 atomic64_fetch_inc_release(atomic64_t *v)
1119 {
1120 instrument_atomic_write(v, sizeof(*v));
1121 return arch_atomic64_fetch_inc_release(v);
1122 }
1123 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1124 #endif
1125
1126 #if defined(arch_atomic64_fetch_inc_relaxed)
1127 static __always_inline s64
1128 atomic64_fetch_inc_relaxed(atomic64_t *v)
1129 {
1130 instrument_atomic_write(v, sizeof(*v));
1131 return arch_atomic64_fetch_inc_relaxed(v);
1132 }
1133 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1134 #endif
1135
1136 #if defined(arch_atomic64_dec)
1137 static __always_inline void
1138 atomic64_dec(atomic64_t *v)
1139 {
1140 instrument_atomic_write(v, sizeof(*v));
1141 arch_atomic64_dec(v);
1142 }
1143 #define atomic64_dec atomic64_dec
1144 #endif
1145
1146 #if defined(arch_atomic64_dec_return)
1147 static __always_inline s64
1148 atomic64_dec_return(atomic64_t *v)
1149 {
1150 instrument_atomic_write(v, sizeof(*v));
1151 return arch_atomic64_dec_return(v);
1152 }
1153 #define atomic64_dec_return atomic64_dec_return
1154 #endif
1155
1156 #if defined(arch_atomic64_dec_return_acquire)
1157 static __always_inline s64
1158 atomic64_dec_return_acquire(atomic64_t *v)
1159 {
1160 instrument_atomic_write(v, sizeof(*v));
1161 return arch_atomic64_dec_return_acquire(v);
1162 }
1163 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1164 #endif
1165
1166 #if defined(arch_atomic64_dec_return_release)
1167 static __always_inline s64
1168 atomic64_dec_return_release(atomic64_t *v)
1169 {
1170 instrument_atomic_write(v, sizeof(*v));
1171 return arch_atomic64_dec_return_release(v);
1172 }
1173 #define atomic64_dec_return_release atomic64_dec_return_release
1174 #endif
1175
1176 #if defined(arch_atomic64_dec_return_relaxed)
1177 static __always_inline s64
1178 atomic64_dec_return_relaxed(atomic64_t *v)
1179 {
1180 instrument_atomic_write(v, sizeof(*v));
1181 return arch_atomic64_dec_return_relaxed(v);
1182 }
1183 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1184 #endif
1185
1186 #if defined(arch_atomic64_fetch_dec)
1187 static __always_inline s64
1188 atomic64_fetch_dec(atomic64_t *v)
1189 {
1190 instrument_atomic_write(v, sizeof(*v));
1191 return arch_atomic64_fetch_dec(v);
1192 }
1193 #define atomic64_fetch_dec atomic64_fetch_dec
1194 #endif
1195
1196 #if defined(arch_atomic64_fetch_dec_acquire)
1197 static __always_inline s64
1198 atomic64_fetch_dec_acquire(atomic64_t *v)
1199 {
1200 instrument_atomic_write(v, sizeof(*v));
1201 return arch_atomic64_fetch_dec_acquire(v);
1202 }
1203 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1204 #endif
1205
1206 #if defined(arch_atomic64_fetch_dec_release)
1207 static __always_inline s64
1208 atomic64_fetch_dec_release(atomic64_t *v)
1209 {
1210 instrument_atomic_write(v, sizeof(*v));
1211 return arch_atomic64_fetch_dec_release(v);
1212 }
1213 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1214 #endif
1215
1216 #if defined(arch_atomic64_fetch_dec_relaxed)
1217 static __always_inline s64
1218 atomic64_fetch_dec_relaxed(atomic64_t *v)
1219 {
1220 instrument_atomic_write(v, sizeof(*v));
1221 return arch_atomic64_fetch_dec_relaxed(v);
1222 }
1223 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1224 #endif
1225
1226 static __always_inline void
1227 atomic64_and(s64 i, atomic64_t *v)
1228 {
1229 instrument_atomic_write(v, sizeof(*v));
1230 arch_atomic64_and(i, v);
1231 }
1232 #define atomic64_and atomic64_and
1233
1234 #if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1235 static __always_inline s64
1236 atomic64_fetch_and(s64 i, atomic64_t *v)
1237 {
1238 instrument_atomic_write(v, sizeof(*v));
1239 return arch_atomic64_fetch_and(i, v);
1240 }
1241 #define atomic64_fetch_and atomic64_fetch_and
1242 #endif
1243
1244 #if defined(arch_atomic64_fetch_and_acquire)
1245 static __always_inline s64
1246 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1247 {
1248 instrument_atomic_write(v, sizeof(*v));
1249 return arch_atomic64_fetch_and_acquire(i, v);
1250 }
1251 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1252 #endif
1253
1254 #if defined(arch_atomic64_fetch_and_release)
1255 static __always_inline s64
1256 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1257 {
1258 instrument_atomic_write(v, sizeof(*v));
1259 return arch_atomic64_fetch_and_release(i, v);
1260 }
1261 #define atomic64_fetch_and_release atomic64_fetch_and_release
1262 #endif
1263
1264 #if defined(arch_atomic64_fetch_and_relaxed)
1265 static __always_inline s64
1266 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1267 {
1268 instrument_atomic_write(v, sizeof(*v));
1269 return arch_atomic64_fetch_and_relaxed(i, v);
1270 }
1271 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1272 #endif
1273
1274 #if defined(arch_atomic64_andnot)
1275 static __always_inline void
1276 atomic64_andnot(s64 i, atomic64_t *v)
1277 {
1278 instrument_atomic_write(v, sizeof(*v));
1279 arch_atomic64_andnot(i, v);
1280 }
1281 #define atomic64_andnot atomic64_andnot
1282 #endif
1283
1284 #if defined(arch_atomic64_fetch_andnot)
1285 static __always_inline s64
1286 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1287 {
1288 instrument_atomic_write(v, sizeof(*v));
1289 return arch_atomic64_fetch_andnot(i, v);
1290 }
1291 #define atomic64_fetch_andnot atomic64_fetch_andnot
1292 #endif
1293
1294 #if defined(arch_atomic64_fetch_andnot_acquire)
1295 static __always_inline s64
1296 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1297 {
1298 instrument_atomic_write(v, sizeof(*v));
1299 return arch_atomic64_fetch_andnot_acquire(i, v);
1300 }
1301 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1302 #endif
1303
1304 #if defined(arch_atomic64_fetch_andnot_release)
1305 static __always_inline s64
1306 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1307 {
1308 instrument_atomic_write(v, sizeof(*v));
1309 return arch_atomic64_fetch_andnot_release(i, v);
1310 }
1311 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1312 #endif
1313
1314 #if defined(arch_atomic64_fetch_andnot_relaxed)
1315 static __always_inline s64
1316 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1317 {
1318 instrument_atomic_write(v, sizeof(*v));
1319 return arch_atomic64_fetch_andnot_relaxed(i, v);
1320 }
1321 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1322 #endif
1323
1324 static __always_inline void
1325 atomic64_or(s64 i, atomic64_t *v)
1326 {
1327 instrument_atomic_write(v, sizeof(*v));
1328 arch_atomic64_or(i, v);
1329 }
1330 #define atomic64_or atomic64_or
1331
1332 #if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1333 static __always_inline s64
1334 atomic64_fetch_or(s64 i, atomic64_t *v)
1335 {
1336 instrument_atomic_write(v, sizeof(*v));
1337 return arch_atomic64_fetch_or(i, v);
1338 }
1339 #define atomic64_fetch_or atomic64_fetch_or
1340 #endif
1341
1342 #if defined(arch_atomic64_fetch_or_acquire)
1343 static __always_inline s64
1344 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1345 {
1346 instrument_atomic_write(v, sizeof(*v));
1347 return arch_atomic64_fetch_or_acquire(i, v);
1348 }
1349 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1350 #endif
1351
1352 #if defined(arch_atomic64_fetch_or_release)
1353 static __always_inline s64
1354 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1355 {
1356 instrument_atomic_write(v, sizeof(*v));
1357 return arch_atomic64_fetch_or_release(i, v);
1358 }
1359 #define atomic64_fetch_or_release atomic64_fetch_or_release
1360 #endif
1361
1362 #if defined(arch_atomic64_fetch_or_relaxed)
1363 static __always_inline s64
1364 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1365 {
1366 instrument_atomic_write(v, sizeof(*v));
1367 return arch_atomic64_fetch_or_relaxed(i, v);
1368 }
1369 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1370 #endif
1371
1372 static __always_inline void
1373 atomic64_xor(s64 i, atomic64_t *v)
1374 {
1375 instrument_atomic_write(v, sizeof(*v));
1376 arch_atomic64_xor(i, v);
1377 }
1378 #define atomic64_xor atomic64_xor
1379
1380 #if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1381 static __always_inline s64
1382 atomic64_fetch_xor(s64 i, atomic64_t *v)
1383 {
1384 instrument_atomic_write(v, sizeof(*v));
1385 return arch_atomic64_fetch_xor(i, v);
1386 }
1387 #define atomic64_fetch_xor atomic64_fetch_xor
1388 #endif
1389
1390 #if defined(arch_atomic64_fetch_xor_acquire)
1391 static __always_inline s64
1392 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1393 {
1394 instrument_atomic_write(v, sizeof(*v));
1395 return arch_atomic64_fetch_xor_acquire(i, v);
1396 }
1397 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1398 #endif
1399
1400 #if defined(arch_atomic64_fetch_xor_release)
1401 static __always_inline s64
1402 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1403 {
1404 instrument_atomic_write(v, sizeof(*v));
1405 return arch_atomic64_fetch_xor_release(i, v);
1406 }
1407 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1408 #endif
1409
1410 #if defined(arch_atomic64_fetch_xor_relaxed)
1411 static __always_inline s64
1412 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1413 {
1414 instrument_atomic_write(v, sizeof(*v));
1415 return arch_atomic64_fetch_xor_relaxed(i, v);
1416 }
1417 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1418 #endif
1419
1420 #if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1421 static __always_inline s64
1422 atomic64_xchg(atomic64_t *v, s64 i)
1423 {
1424 instrument_atomic_write(v, sizeof(*v));
1425 return arch_atomic64_xchg(v, i);
1426 }
1427 #define atomic64_xchg atomic64_xchg
1428 #endif
1429
1430 #if defined(arch_atomic64_xchg_acquire)
1431 static __always_inline s64
1432 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1433 {
1434 instrument_atomic_write(v, sizeof(*v));
1435 return arch_atomic64_xchg_acquire(v, i);
1436 }
1437 #define atomic64_xchg_acquire atomic64_xchg_acquire
1438 #endif
1439
1440 #if defined(arch_atomic64_xchg_release)
1441 static __always_inline s64
1442 atomic64_xchg_release(atomic64_t *v, s64 i)
1443 {
1444 instrument_atomic_write(v, sizeof(*v));
1445 return arch_atomic64_xchg_release(v, i);
1446 }
1447 #define atomic64_xchg_release atomic64_xchg_release
1448 #endif
1449
1450 #if defined(arch_atomic64_xchg_relaxed)
1451 static __always_inline s64
1452 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1453 {
1454 instrument_atomic_write(v, sizeof(*v));
1455 return arch_atomic64_xchg_relaxed(v, i);
1456 }
1457 #define atomic64_xchg_relaxed atomic64_xchg_relaxed
1458 #endif
1459
1460 #if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1461 static __always_inline s64
1462 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1463 {
1464 instrument_atomic_write(v, sizeof(*v));
1465 return arch_atomic64_cmpxchg(v, old, new);
1466 }
1467 #define atomic64_cmpxchg atomic64_cmpxchg
1468 #endif
1469
1470 #if defined(arch_atomic64_cmpxchg_acquire)
1471 static __always_inline s64
1472 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1473 {
1474 instrument_atomic_write(v, sizeof(*v));
1475 return arch_atomic64_cmpxchg_acquire(v, old, new);
1476 }
1477 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1478 #endif
1479
1480 #if defined(arch_atomic64_cmpxchg_release)
1481 static __always_inline s64
1482 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1483 {
1484 instrument_atomic_write(v, sizeof(*v));
1485 return arch_atomic64_cmpxchg_release(v, old, new);
1486 }
1487 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
1488 #endif
1489
1490 #if defined(arch_atomic64_cmpxchg_relaxed)
1491 static __always_inline s64
1492 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1493 {
1494 instrument_atomic_write(v, sizeof(*v));
1495 return arch_atomic64_cmpxchg_relaxed(v, old, new);
1496 }
1497 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1498 #endif
1499
1500 #if defined(arch_atomic64_try_cmpxchg)
1501 static __always_inline bool
1502 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1503 {
1504 instrument_atomic_write(v, sizeof(*v));
1505 instrument_atomic_write(old, sizeof(*old));
1506 return arch_atomic64_try_cmpxchg(v, old, new);
1507 }
1508 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
1509 #endif
1510
1511 #if defined(arch_atomic64_try_cmpxchg_acquire)
1512 static __always_inline bool
1513 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1514 {
1515 instrument_atomic_write(v, sizeof(*v));
1516 instrument_atomic_write(old, sizeof(*old));
1517 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1518 }
1519 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1520 #endif
1521
1522 #if defined(arch_atomic64_try_cmpxchg_release)
1523 static __always_inline bool
1524 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1525 {
1526 instrument_atomic_write(v, sizeof(*v));
1527 instrument_atomic_write(old, sizeof(*old));
1528 return arch_atomic64_try_cmpxchg_release(v, old, new);
1529 }
1530 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1531 #endif
1532
1533 #if defined(arch_atomic64_try_cmpxchg_relaxed)
1534 static __always_inline bool
1535 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1536 {
1537 instrument_atomic_write(v, sizeof(*v));
1538 instrument_atomic_write(old, sizeof(*old));
1539 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1540 }
1541 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1542 #endif
1543
1544 #if defined(arch_atomic64_sub_and_test)
1545 static __always_inline bool
1546 atomic64_sub_and_test(s64 i, atomic64_t *v)
1547 {
1548 instrument_atomic_write(v, sizeof(*v));
1549 return arch_atomic64_sub_and_test(i, v);
1550 }
1551 #define atomic64_sub_and_test atomic64_sub_and_test
1552 #endif
1553
1554 #if defined(arch_atomic64_dec_and_test)
1555 static __always_inline bool
1556 atomic64_dec_and_test(atomic64_t *v)
1557 {
1558 instrument_atomic_write(v, sizeof(*v));
1559 return arch_atomic64_dec_and_test(v);
1560 }
1561 #define atomic64_dec_and_test atomic64_dec_and_test
1562 #endif
1563
1564 #if defined(arch_atomic64_inc_and_test)
1565 static __always_inline bool
1566 atomic64_inc_and_test(atomic64_t *v)
1567 {
1568 instrument_atomic_write(v, sizeof(*v));
1569 return arch_atomic64_inc_and_test(v);
1570 }
1571 #define atomic64_inc_and_test atomic64_inc_and_test
1572 #endif
1573
1574 #if defined(arch_atomic64_add_negative)
1575 static __always_inline bool
1576 atomic64_add_negative(s64 i, atomic64_t *v)
1577 {
1578 instrument_atomic_write(v, sizeof(*v));
1579 return arch_atomic64_add_negative(i, v);
1580 }
1581 #define atomic64_add_negative atomic64_add_negative
1582 #endif
1583
1584 #if defined(arch_atomic64_fetch_add_unless)
1585 static __always_inline s64
1586 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1587 {
1588 instrument_atomic_write(v, sizeof(*v));
1589 return arch_atomic64_fetch_add_unless(v, a, u);
1590 }
1591 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
1592 #endif
1593
1594 #if defined(arch_atomic64_add_unless)
1595 static __always_inline bool
1596 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1597 {
1598 instrument_atomic_write(v, sizeof(*v));
1599 return arch_atomic64_add_unless(v, a, u);
1600 }
1601 #define atomic64_add_unless atomic64_add_unless
1602 #endif
1603
1604 #if defined(arch_atomic64_inc_not_zero)
1605 static __always_inline bool
1606 atomic64_inc_not_zero(atomic64_t *v)
1607 {
1608 instrument_atomic_write(v, sizeof(*v));
1609 return arch_atomic64_inc_not_zero(v);
1610 }
1611 #define atomic64_inc_not_zero atomic64_inc_not_zero
1612 #endif
1613
1614 #if defined(arch_atomic64_inc_unless_negative)
1615 static __always_inline bool
1616 atomic64_inc_unless_negative(atomic64_t *v)
1617 {
1618 instrument_atomic_write(v, sizeof(*v));
1619 return arch_atomic64_inc_unless_negative(v);
1620 }
1621 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
1622 #endif
1623
1624 #if defined(arch_atomic64_dec_unless_positive)
1625 static __always_inline bool
1626 atomic64_dec_unless_positive(atomic64_t *v)
1627 {
1628 instrument_atomic_write(v, sizeof(*v));
1629 return arch_atomic64_dec_unless_positive(v);
1630 }
1631 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
1632 #endif
1633
1634 #if defined(arch_atomic64_dec_if_positive)
1635 static __always_inline s64
1636 atomic64_dec_if_positive(atomic64_t *v)
1637 {
1638 instrument_atomic_write(v, sizeof(*v));
1639 return arch_atomic64_dec_if_positive(v);
1640 }
1641 #define atomic64_dec_if_positive atomic64_dec_if_positive
1642 #endif
1643
1644 #if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1645 #define xchg(ptr, ...) \
1646 ({ \
1647 typeof(ptr) __ai_ptr = (ptr); \
1648 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1649 arch_xchg(__ai_ptr, __VA_ARGS__); \
1650 })
1651 #endif
1652
1653 #if defined(arch_xchg_acquire)
1654 #define xchg_acquire(ptr, ...) \
1655 ({ \
1656 typeof(ptr) __ai_ptr = (ptr); \
1657 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1658 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1659 })
1660 #endif
1661
1662 #if defined(arch_xchg_release)
1663 #define xchg_release(ptr, ...) \
1664 ({ \
1665 typeof(ptr) __ai_ptr = (ptr); \
1666 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1667 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1668 })
1669 #endif
1670
1671 #if defined(arch_xchg_relaxed)
1672 #define xchg_relaxed(ptr, ...) \
1673 ({ \
1674 typeof(ptr) __ai_ptr = (ptr); \
1675 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1676 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1677 })
1678 #endif
1679
1680 #if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1681 #define cmpxchg(ptr, ...) \
1682 ({ \
1683 typeof(ptr) __ai_ptr = (ptr); \
1684 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1685 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1686 })
1687 #endif
1688
1689 #if defined(arch_cmpxchg_acquire)
1690 #define cmpxchg_acquire(ptr, ...) \
1691 ({ \
1692 typeof(ptr) __ai_ptr = (ptr); \
1693 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1694 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1695 })
1696 #endif
1697
1698 #if defined(arch_cmpxchg_release)
1699 #define cmpxchg_release(ptr, ...) \
1700 ({ \
1701 typeof(ptr) __ai_ptr = (ptr); \
1702 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1703 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1704 })
1705 #endif
1706
1707 #if defined(arch_cmpxchg_relaxed)
1708 #define cmpxchg_relaxed(ptr, ...) \
1709 ({ \
1710 typeof(ptr) __ai_ptr = (ptr); \
1711 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1712 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1713 })
1714 #endif
1715
1716 #if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1717 #define cmpxchg64(ptr, ...) \
1718 ({ \
1719 typeof(ptr) __ai_ptr = (ptr); \
1720 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1721 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1722 })
1723 #endif
1724
1725 #if defined(arch_cmpxchg64_acquire)
1726 #define cmpxchg64_acquire(ptr, ...) \
1727 ({ \
1728 typeof(ptr) __ai_ptr = (ptr); \
1729 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1730 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1731 })
1732 #endif
1733
1734 #if defined(arch_cmpxchg64_release)
1735 #define cmpxchg64_release(ptr, ...) \
1736 ({ \
1737 typeof(ptr) __ai_ptr = (ptr); \
1738 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1739 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1740 })
1741 #endif
1742
1743 #if defined(arch_cmpxchg64_relaxed)
1744 #define cmpxchg64_relaxed(ptr, ...) \
1745 ({ \
1746 typeof(ptr) __ai_ptr = (ptr); \
1747 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1748 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1749 })
1750 #endif
1751
1752 #define cmpxchg_local(ptr, ...) \
1753 ({ \
1754 typeof(ptr) __ai_ptr = (ptr); \
1755 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1756 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1757 })
1758
1759 #define cmpxchg64_local(ptr, ...) \
1760 ({ \
1761 typeof(ptr) __ai_ptr = (ptr); \
1762 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1763 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1764 })
1765
1766 #define sync_cmpxchg(ptr, ...) \
1767 ({ \
1768 typeof(ptr) __ai_ptr = (ptr); \
1769 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1770 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1771 })
1772
1773 #define cmpxchg_double(ptr, ...) \
1774 ({ \
1775 typeof(ptr) __ai_ptr = (ptr); \
1776 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1777 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1778 })
1779
1780
1781 #define cmpxchg_double_local(ptr, ...) \
1782 ({ \
1783 typeof(ptr) __ai_ptr = (ptr); \
1784 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1785 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1786 })
1787
1788 #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1789 // 89bf97f3a7509b740845e51ddf31055b48a81f40