]>
Commit | Line | Data |
---|---|---|
acac43e2 | 1 | /* Atomic operations usable in machine independent code */ |
3f9d35b9 ED |
2 | #ifndef _LINUX_ATOMIC_H |
3 | #define _LINUX_ATOMIC_H | |
4 | #include <asm/atomic.h> | |
654672d4 WD |
5 | #include <asm/barrier.h> |
6 | ||
7 | /* | |
8 | * Relaxed variants of xchg, cmpxchg and some atomic operations. | |
9 | * | |
10 | * We support four variants: | |
11 | * | |
12 | * - Fully ordered: The default implementation, no suffix required. | |
13 | * - Acquire: Provides ACQUIRE semantics, _acquire suffix. | |
14 | * - Release: Provides RELEASE semantics, _release suffix. | |
15 | * - Relaxed: No ordering guarantees, _relaxed suffix. | |
16 | * | |
17 | * For compound atomics performing both a load and a store, ACQUIRE | |
18 | * semantics apply only to the load and RELEASE semantics only to the | |
19 | * store portion of the operation. Note that a failed cmpxchg_acquire | |
20 | * does -not- imply any memory ordering constraints. | |
21 | * | |
22 | * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions. | |
23 | */ | |
24 | ||
25 | #ifndef atomic_read_acquire | |
26 | #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter) | |
27 | #endif | |
28 | ||
29 | #ifndef atomic_set_release | |
30 | #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i)) | |
31 | #endif | |
32 | ||
33 | /* | |
34 | * The idea here is to build acquire/release variants by adding explicit | |
35 | * barriers on top of the relaxed variant. In the case where the relaxed | |
36 | * variant is already fully ordered, no additional barriers are needed. | |
e1ab7f39 BF |
37 | * |
38 | * Besides, if an arch has a special barrier for acquire/release, it could | |
39 | * implement its own __atomic_op_* and use the same framework for building | |
40 | * variants | |
d89e588c PZ |
41 | * |
42 | * If an architecture overrides __atomic_op_acquire() it will probably want | |
43 | * to define smp_mb__after_spinlock(). | |
654672d4 | 44 | */ |
e1ab7f39 | 45 | #ifndef __atomic_op_acquire |
654672d4 WD |
46 | #define __atomic_op_acquire(op, args...) \ |
47 | ({ \ | |
48 | typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \ | |
49 | smp_mb__after_atomic(); \ | |
50 | __ret; \ | |
51 | }) | |
e1ab7f39 | 52 | #endif |
654672d4 | 53 | |
e1ab7f39 | 54 | #ifndef __atomic_op_release |
654672d4 WD |
55 | #define __atomic_op_release(op, args...) \ |
56 | ({ \ | |
57 | smp_mb__before_atomic(); \ | |
58 | op##_relaxed(args); \ | |
59 | }) | |
e1ab7f39 | 60 | #endif |
654672d4 | 61 | |
e1ab7f39 | 62 | #ifndef __atomic_op_fence |
654672d4 WD |
63 | #define __atomic_op_fence(op, args...) \ |
64 | ({ \ | |
65 | typeof(op##_relaxed(args)) __ret; \ | |
66 | smp_mb__before_atomic(); \ | |
67 | __ret = op##_relaxed(args); \ | |
68 | smp_mb__after_atomic(); \ | |
69 | __ret; \ | |
70 | }) | |
e1ab7f39 | 71 | #endif |
654672d4 WD |
72 | |
73 | /* atomic_add_return_relaxed */ | |
74 | #ifndef atomic_add_return_relaxed | |
75 | #define atomic_add_return_relaxed atomic_add_return | |
76 | #define atomic_add_return_acquire atomic_add_return | |
77 | #define atomic_add_return_release atomic_add_return | |
78 | ||
79 | #else /* atomic_add_return_relaxed */ | |
80 | ||
81 | #ifndef atomic_add_return_acquire | |
82 | #define atomic_add_return_acquire(...) \ | |
83 | __atomic_op_acquire(atomic_add_return, __VA_ARGS__) | |
84 | #endif | |
85 | ||
86 | #ifndef atomic_add_return_release | |
87 | #define atomic_add_return_release(...) \ | |
88 | __atomic_op_release(atomic_add_return, __VA_ARGS__) | |
89 | #endif | |
90 | ||
91 | #ifndef atomic_add_return | |
92 | #define atomic_add_return(...) \ | |
93 | __atomic_op_fence(atomic_add_return, __VA_ARGS__) | |
94 | #endif | |
95 | #endif /* atomic_add_return_relaxed */ | |
96 | ||
63ab7bd0 DB |
97 | /* atomic_inc_return_relaxed */ |
98 | #ifndef atomic_inc_return_relaxed | |
99 | #define atomic_inc_return_relaxed atomic_inc_return | |
100 | #define atomic_inc_return_acquire atomic_inc_return | |
101 | #define atomic_inc_return_release atomic_inc_return | |
102 | ||
103 | #else /* atomic_inc_return_relaxed */ | |
104 | ||
105 | #ifndef atomic_inc_return_acquire | |
106 | #define atomic_inc_return_acquire(...) \ | |
107 | __atomic_op_acquire(atomic_inc_return, __VA_ARGS__) | |
108 | #endif | |
109 | ||
110 | #ifndef atomic_inc_return_release | |
111 | #define atomic_inc_return_release(...) \ | |
112 | __atomic_op_release(atomic_inc_return, __VA_ARGS__) | |
113 | #endif | |
114 | ||
115 | #ifndef atomic_inc_return | |
116 | #define atomic_inc_return(...) \ | |
117 | __atomic_op_fence(atomic_inc_return, __VA_ARGS__) | |
118 | #endif | |
119 | #endif /* atomic_inc_return_relaxed */ | |
120 | ||
654672d4 WD |
121 | /* atomic_sub_return_relaxed */ |
122 | #ifndef atomic_sub_return_relaxed | |
123 | #define atomic_sub_return_relaxed atomic_sub_return | |
124 | #define atomic_sub_return_acquire atomic_sub_return | |
125 | #define atomic_sub_return_release atomic_sub_return | |
126 | ||
127 | #else /* atomic_sub_return_relaxed */ | |
128 | ||
129 | #ifndef atomic_sub_return_acquire | |
130 | #define atomic_sub_return_acquire(...) \ | |
131 | __atomic_op_acquire(atomic_sub_return, __VA_ARGS__) | |
132 | #endif | |
133 | ||
134 | #ifndef atomic_sub_return_release | |
135 | #define atomic_sub_return_release(...) \ | |
136 | __atomic_op_release(atomic_sub_return, __VA_ARGS__) | |
137 | #endif | |
138 | ||
139 | #ifndef atomic_sub_return | |
140 | #define atomic_sub_return(...) \ | |
141 | __atomic_op_fence(atomic_sub_return, __VA_ARGS__) | |
142 | #endif | |
143 | #endif /* atomic_sub_return_relaxed */ | |
144 | ||
63ab7bd0 DB |
145 | /* atomic_dec_return_relaxed */ |
146 | #ifndef atomic_dec_return_relaxed | |
147 | #define atomic_dec_return_relaxed atomic_dec_return | |
148 | #define atomic_dec_return_acquire atomic_dec_return | |
149 | #define atomic_dec_return_release atomic_dec_return | |
150 | ||
151 | #else /* atomic_dec_return_relaxed */ | |
152 | ||
153 | #ifndef atomic_dec_return_acquire | |
154 | #define atomic_dec_return_acquire(...) \ | |
155 | __atomic_op_acquire(atomic_dec_return, __VA_ARGS__) | |
156 | #endif | |
157 | ||
158 | #ifndef atomic_dec_return_release | |
159 | #define atomic_dec_return_release(...) \ | |
160 | __atomic_op_release(atomic_dec_return, __VA_ARGS__) | |
161 | #endif | |
162 | ||
163 | #ifndef atomic_dec_return | |
164 | #define atomic_dec_return(...) \ | |
165 | __atomic_op_fence(atomic_dec_return, __VA_ARGS__) | |
166 | #endif | |
167 | #endif /* atomic_dec_return_relaxed */ | |
168 | ||
28aa2bda PZ |
169 | |
170 | /* atomic_fetch_add_relaxed */ | |
171 | #ifndef atomic_fetch_add_relaxed | |
172 | #define atomic_fetch_add_relaxed atomic_fetch_add | |
173 | #define atomic_fetch_add_acquire atomic_fetch_add | |
174 | #define atomic_fetch_add_release atomic_fetch_add | |
175 | ||
176 | #else /* atomic_fetch_add_relaxed */ | |
177 | ||
178 | #ifndef atomic_fetch_add_acquire | |
179 | #define atomic_fetch_add_acquire(...) \ | |
180 | __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__) | |
181 | #endif | |
182 | ||
183 | #ifndef atomic_fetch_add_release | |
184 | #define atomic_fetch_add_release(...) \ | |
185 | __atomic_op_release(atomic_fetch_add, __VA_ARGS__) | |
186 | #endif | |
187 | ||
188 | #ifndef atomic_fetch_add | |
189 | #define atomic_fetch_add(...) \ | |
190 | __atomic_op_fence(atomic_fetch_add, __VA_ARGS__) | |
191 | #endif | |
192 | #endif /* atomic_fetch_add_relaxed */ | |
193 | ||
f0662863 DB |
194 | /* atomic_fetch_inc_relaxed */ |
195 | #ifndef atomic_fetch_inc_relaxed | |
196 | ||
197 | #ifndef atomic_fetch_inc | |
198 | #define atomic_fetch_inc(v) atomic_fetch_add(1, (v)) | |
199 | #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v)) | |
200 | #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v)) | |
201 | #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v)) | |
202 | #else /* atomic_fetch_inc */ | |
203 | #define atomic_fetch_inc_relaxed atomic_fetch_inc | |
204 | #define atomic_fetch_inc_acquire atomic_fetch_inc | |
205 | #define atomic_fetch_inc_release atomic_fetch_inc | |
206 | #endif /* atomic_fetch_inc */ | |
207 | ||
208 | #else /* atomic_fetch_inc_relaxed */ | |
209 | ||
210 | #ifndef atomic_fetch_inc_acquire | |
211 | #define atomic_fetch_inc_acquire(...) \ | |
212 | __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__) | |
213 | #endif | |
214 | ||
215 | #ifndef atomic_fetch_inc_release | |
216 | #define atomic_fetch_inc_release(...) \ | |
217 | __atomic_op_release(atomic_fetch_inc, __VA_ARGS__) | |
218 | #endif | |
219 | ||
220 | #ifndef atomic_fetch_inc | |
221 | #define atomic_fetch_inc(...) \ | |
222 | __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__) | |
223 | #endif | |
224 | #endif /* atomic_fetch_inc_relaxed */ | |
225 | ||
28aa2bda PZ |
226 | /* atomic_fetch_sub_relaxed */ |
227 | #ifndef atomic_fetch_sub_relaxed | |
228 | #define atomic_fetch_sub_relaxed atomic_fetch_sub | |
229 | #define atomic_fetch_sub_acquire atomic_fetch_sub | |
230 | #define atomic_fetch_sub_release atomic_fetch_sub | |
231 | ||
232 | #else /* atomic_fetch_sub_relaxed */ | |
233 | ||
234 | #ifndef atomic_fetch_sub_acquire | |
235 | #define atomic_fetch_sub_acquire(...) \ | |
236 | __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__) | |
237 | #endif | |
238 | ||
239 | #ifndef atomic_fetch_sub_release | |
240 | #define atomic_fetch_sub_release(...) \ | |
241 | __atomic_op_release(atomic_fetch_sub, __VA_ARGS__) | |
242 | #endif | |
243 | ||
244 | #ifndef atomic_fetch_sub | |
245 | #define atomic_fetch_sub(...) \ | |
246 | __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__) | |
247 | #endif | |
248 | #endif /* atomic_fetch_sub_relaxed */ | |
249 | ||
f0662863 DB |
250 | /* atomic_fetch_dec_relaxed */ |
251 | #ifndef atomic_fetch_dec_relaxed | |
252 | ||
253 | #ifndef atomic_fetch_dec | |
254 | #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v)) | |
255 | #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v)) | |
256 | #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v)) | |
257 | #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v)) | |
258 | #else /* atomic_fetch_dec */ | |
259 | #define atomic_fetch_dec_relaxed atomic_fetch_dec | |
260 | #define atomic_fetch_dec_acquire atomic_fetch_dec | |
261 | #define atomic_fetch_dec_release atomic_fetch_dec | |
262 | #endif /* atomic_fetch_dec */ | |
263 | ||
264 | #else /* atomic_fetch_dec_relaxed */ | |
265 | ||
266 | #ifndef atomic_fetch_dec_acquire | |
267 | #define atomic_fetch_dec_acquire(...) \ | |
268 | __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__) | |
269 | #endif | |
270 | ||
271 | #ifndef atomic_fetch_dec_release | |
272 | #define atomic_fetch_dec_release(...) \ | |
273 | __atomic_op_release(atomic_fetch_dec, __VA_ARGS__) | |
274 | #endif | |
275 | ||
276 | #ifndef atomic_fetch_dec | |
277 | #define atomic_fetch_dec(...) \ | |
278 | __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__) | |
279 | #endif | |
280 | #endif /* atomic_fetch_dec_relaxed */ | |
281 | ||
28aa2bda PZ |
282 | /* atomic_fetch_or_relaxed */ |
283 | #ifndef atomic_fetch_or_relaxed | |
284 | #define atomic_fetch_or_relaxed atomic_fetch_or | |
285 | #define atomic_fetch_or_acquire atomic_fetch_or | |
286 | #define atomic_fetch_or_release atomic_fetch_or | |
287 | ||
288 | #else /* atomic_fetch_or_relaxed */ | |
289 | ||
290 | #ifndef atomic_fetch_or_acquire | |
291 | #define atomic_fetch_or_acquire(...) \ | |
292 | __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__) | |
293 | #endif | |
294 | ||
295 | #ifndef atomic_fetch_or_release | |
296 | #define atomic_fetch_or_release(...) \ | |
297 | __atomic_op_release(atomic_fetch_or, __VA_ARGS__) | |
298 | #endif | |
299 | ||
300 | #ifndef atomic_fetch_or | |
301 | #define atomic_fetch_or(...) \ | |
302 | __atomic_op_fence(atomic_fetch_or, __VA_ARGS__) | |
303 | #endif | |
304 | #endif /* atomic_fetch_or_relaxed */ | |
305 | ||
306 | /* atomic_fetch_and_relaxed */ | |
307 | #ifndef atomic_fetch_and_relaxed | |
308 | #define atomic_fetch_and_relaxed atomic_fetch_and | |
309 | #define atomic_fetch_and_acquire atomic_fetch_and | |
310 | #define atomic_fetch_and_release atomic_fetch_and | |
311 | ||
312 | #else /* atomic_fetch_and_relaxed */ | |
313 | ||
314 | #ifndef atomic_fetch_and_acquire | |
315 | #define atomic_fetch_and_acquire(...) \ | |
316 | __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__) | |
317 | #endif | |
318 | ||
319 | #ifndef atomic_fetch_and_release | |
320 | #define atomic_fetch_and_release(...) \ | |
321 | __atomic_op_release(atomic_fetch_and, __VA_ARGS__) | |
322 | #endif | |
323 | ||
324 | #ifndef atomic_fetch_and | |
325 | #define atomic_fetch_and(...) \ | |
326 | __atomic_op_fence(atomic_fetch_and, __VA_ARGS__) | |
327 | #endif | |
328 | #endif /* atomic_fetch_and_relaxed */ | |
329 | ||
330 | #ifdef atomic_andnot | |
331 | /* atomic_fetch_andnot_relaxed */ | |
332 | #ifndef atomic_fetch_andnot_relaxed | |
333 | #define atomic_fetch_andnot_relaxed atomic_fetch_andnot | |
334 | #define atomic_fetch_andnot_acquire atomic_fetch_andnot | |
335 | #define atomic_fetch_andnot_release atomic_fetch_andnot | |
336 | ||
337 | #else /* atomic_fetch_andnot_relaxed */ | |
338 | ||
339 | #ifndef atomic_fetch_andnot_acquire | |
340 | #define atomic_fetch_andnot_acquire(...) \ | |
341 | __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__) | |
342 | #endif | |
343 | ||
344 | #ifndef atomic_fetch_andnot_release | |
345 | #define atomic_fetch_andnot_release(...) \ | |
346 | __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__) | |
347 | #endif | |
348 | ||
349 | #ifndef atomic_fetch_andnot | |
350 | #define atomic_fetch_andnot(...) \ | |
351 | __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__) | |
352 | #endif | |
353 | #endif /* atomic_fetch_andnot_relaxed */ | |
354 | #endif /* atomic_andnot */ | |
355 | ||
356 | /* atomic_fetch_xor_relaxed */ | |
357 | #ifndef atomic_fetch_xor_relaxed | |
358 | #define atomic_fetch_xor_relaxed atomic_fetch_xor | |
359 | #define atomic_fetch_xor_acquire atomic_fetch_xor | |
360 | #define atomic_fetch_xor_release atomic_fetch_xor | |
361 | ||
362 | #else /* atomic_fetch_xor_relaxed */ | |
363 | ||
364 | #ifndef atomic_fetch_xor_acquire | |
365 | #define atomic_fetch_xor_acquire(...) \ | |
366 | __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__) | |
367 | #endif | |
368 | ||
369 | #ifndef atomic_fetch_xor_release | |
370 | #define atomic_fetch_xor_release(...) \ | |
371 | __atomic_op_release(atomic_fetch_xor, __VA_ARGS__) | |
372 | #endif | |
373 | ||
374 | #ifndef atomic_fetch_xor | |
375 | #define atomic_fetch_xor(...) \ | |
376 | __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__) | |
377 | #endif | |
378 | #endif /* atomic_fetch_xor_relaxed */ | |
379 | ||
380 | ||
654672d4 WD |
381 | /* atomic_xchg_relaxed */ |
382 | #ifndef atomic_xchg_relaxed | |
383 | #define atomic_xchg_relaxed atomic_xchg | |
384 | #define atomic_xchg_acquire atomic_xchg | |
385 | #define atomic_xchg_release atomic_xchg | |
386 | ||
387 | #else /* atomic_xchg_relaxed */ | |
388 | ||
389 | #ifndef atomic_xchg_acquire | |
390 | #define atomic_xchg_acquire(...) \ | |
391 | __atomic_op_acquire(atomic_xchg, __VA_ARGS__) | |
392 | #endif | |
393 | ||
394 | #ifndef atomic_xchg_release | |
395 | #define atomic_xchg_release(...) \ | |
396 | __atomic_op_release(atomic_xchg, __VA_ARGS__) | |
397 | #endif | |
398 | ||
399 | #ifndef atomic_xchg | |
400 | #define atomic_xchg(...) \ | |
401 | __atomic_op_fence(atomic_xchg, __VA_ARGS__) | |
402 | #endif | |
403 | #endif /* atomic_xchg_relaxed */ | |
404 | ||
405 | /* atomic_cmpxchg_relaxed */ | |
406 | #ifndef atomic_cmpxchg_relaxed | |
407 | #define atomic_cmpxchg_relaxed atomic_cmpxchg | |
408 | #define atomic_cmpxchg_acquire atomic_cmpxchg | |
409 | #define atomic_cmpxchg_release atomic_cmpxchg | |
410 | ||
411 | #else /* atomic_cmpxchg_relaxed */ | |
412 | ||
413 | #ifndef atomic_cmpxchg_acquire | |
414 | #define atomic_cmpxchg_acquire(...) \ | |
415 | __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__) | |
416 | #endif | |
417 | ||
418 | #ifndef atomic_cmpxchg_release | |
419 | #define atomic_cmpxchg_release(...) \ | |
420 | __atomic_op_release(atomic_cmpxchg, __VA_ARGS__) | |
421 | #endif | |
422 | ||
423 | #ifndef atomic_cmpxchg | |
424 | #define atomic_cmpxchg(...) \ | |
425 | __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__) | |
426 | #endif | |
427 | #endif /* atomic_cmpxchg_relaxed */ | |
428 | ||
a9ebf306 PZ |
429 | #ifndef atomic_try_cmpxchg |
430 | ||
431 | #define __atomic_try_cmpxchg(type, _p, _po, _n) \ | |
432 | ({ \ | |
433 | typeof(_po) __po = (_po); \ | |
44fe8445 PZ |
434 | typeof(*(_po)) __r, __o = *__po; \ |
435 | __r = atomic_cmpxchg##type((_p), __o, (_n)); \ | |
436 | if (unlikely(__r != __o)) \ | |
437 | *__po = __r; \ | |
438 | likely(__r == __o); \ | |
a9ebf306 PZ |
439 | }) |
440 | ||
441 | #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n) | |
442 | #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n) | |
443 | #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n) | |
444 | #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n) | |
445 | ||
446 | #else /* atomic_try_cmpxchg */ | |
447 | #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg | |
448 | #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg | |
449 | #define atomic_try_cmpxchg_release atomic_try_cmpxchg | |
450 | #endif /* atomic_try_cmpxchg */ | |
451 | ||
654672d4 WD |
452 | /* cmpxchg_relaxed */ |
453 | #ifndef cmpxchg_relaxed | |
454 | #define cmpxchg_relaxed cmpxchg | |
455 | #define cmpxchg_acquire cmpxchg | |
456 | #define cmpxchg_release cmpxchg | |
457 | ||
458 | #else /* cmpxchg_relaxed */ | |
459 | ||
460 | #ifndef cmpxchg_acquire | |
461 | #define cmpxchg_acquire(...) \ | |
462 | __atomic_op_acquire(cmpxchg, __VA_ARGS__) | |
463 | #endif | |
464 | ||
465 | #ifndef cmpxchg_release | |
466 | #define cmpxchg_release(...) \ | |
467 | __atomic_op_release(cmpxchg, __VA_ARGS__) | |
468 | #endif | |
469 | ||
470 | #ifndef cmpxchg | |
471 | #define cmpxchg(...) \ | |
472 | __atomic_op_fence(cmpxchg, __VA_ARGS__) | |
473 | #endif | |
474 | #endif /* cmpxchg_relaxed */ | |
475 | ||
476 | /* cmpxchg64_relaxed */ | |
477 | #ifndef cmpxchg64_relaxed | |
478 | #define cmpxchg64_relaxed cmpxchg64 | |
479 | #define cmpxchg64_acquire cmpxchg64 | |
480 | #define cmpxchg64_release cmpxchg64 | |
481 | ||
482 | #else /* cmpxchg64_relaxed */ | |
483 | ||
484 | #ifndef cmpxchg64_acquire | |
485 | #define cmpxchg64_acquire(...) \ | |
486 | __atomic_op_acquire(cmpxchg64, __VA_ARGS__) | |
487 | #endif | |
488 | ||
489 | #ifndef cmpxchg64_release | |
490 | #define cmpxchg64_release(...) \ | |
491 | __atomic_op_release(cmpxchg64, __VA_ARGS__) | |
492 | #endif | |
493 | ||
494 | #ifndef cmpxchg64 | |
495 | #define cmpxchg64(...) \ | |
496 | __atomic_op_fence(cmpxchg64, __VA_ARGS__) | |
497 | #endif | |
498 | #endif /* cmpxchg64_relaxed */ | |
499 | ||
500 | /* xchg_relaxed */ | |
501 | #ifndef xchg_relaxed | |
502 | #define xchg_relaxed xchg | |
503 | #define xchg_acquire xchg | |
504 | #define xchg_release xchg | |
505 | ||
506 | #else /* xchg_relaxed */ | |
507 | ||
508 | #ifndef xchg_acquire | |
509 | #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__) | |
510 | #endif | |
511 | ||
512 | #ifndef xchg_release | |
513 | #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__) | |
514 | #endif | |
515 | ||
516 | #ifndef xchg | |
517 | #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__) | |
518 | #endif | |
519 | #endif /* xchg_relaxed */ | |
3f9d35b9 | 520 | |
f24219b4 AS |
521 | /** |
522 | * atomic_add_unless - add unless the number is already a given value | |
523 | * @v: pointer of type atomic_t | |
524 | * @a: the amount to add to v... | |
525 | * @u: ...unless v is equal to u. | |
526 | * | |
527 | * Atomically adds @a to @v, so long as @v was not already @u. | |
528 | * Returns non-zero if @v was not @u, and zero otherwise. | |
529 | */ | |
530 | static inline int atomic_add_unless(atomic_t *v, int a, int u) | |
531 | { | |
532 | return __atomic_add_unless(v, a, u) != u; | |
533 | } | |
534 | ||
60063497 AS |
535 | /** |
536 | * atomic_inc_not_zero - increment unless the number is zero | |
537 | * @v: pointer of type atomic_t | |
538 | * | |
539 | * Atomically increments @v by 1, so long as @v is non-zero. | |
540 | * Returns non-zero if @v was non-zero, and zero otherwise. | |
541 | */ | |
b1ada601 | 542 | #ifndef atomic_inc_not_zero |
60063497 | 543 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
b1ada601 | 544 | #endif |
60063497 | 545 | |
de9e432c PZ |
546 | #ifndef atomic_andnot |
547 | static inline void atomic_andnot(int i, atomic_t *v) | |
548 | { | |
549 | atomic_and(~i, v); | |
550 | } | |
28aa2bda PZ |
551 | |
552 | static inline int atomic_fetch_andnot(int i, atomic_t *v) | |
553 | { | |
554 | return atomic_fetch_and(~i, v); | |
555 | } | |
556 | ||
557 | static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v) | |
558 | { | |
559 | return atomic_fetch_and_relaxed(~i, v); | |
560 | } | |
561 | ||
562 | static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v) | |
563 | { | |
564 | return atomic_fetch_and_acquire(~i, v); | |
565 | } | |
566 | ||
567 | static inline int atomic_fetch_andnot_release(int i, atomic_t *v) | |
568 | { | |
569 | return atomic_fetch_and_release(~i, v); | |
570 | } | |
de9e432c PZ |
571 | #endif |
572 | ||
3f9d35b9 ED |
573 | /** |
574 | * atomic_inc_not_zero_hint - increment if not null | |
575 | * @v: pointer of type atomic_t | |
576 | * @hint: probable value of the atomic before the increment | |
577 | * | |
578 | * This version of atomic_inc_not_zero() gives a hint of probable | |
579 | * value of the atomic. This helps processor to not read the memory | |
580 | * before doing the atomic read/modify/write cycle, lowering | |
581 | * number of bus transactions on some arches. | |
582 | * | |
583 | * Returns: 0 if increment was not done, 1 otherwise. | |
584 | */ | |
585 | #ifndef atomic_inc_not_zero_hint | |
586 | static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint) | |
587 | { | |
588 | int val, c = hint; | |
589 | ||
590 | /* sanity test, should be removed by compiler if hint is a constant */ | |
591 | if (!hint) | |
592 | return atomic_inc_not_zero(v); | |
593 | ||
594 | do { | |
595 | val = atomic_cmpxchg(v, c, c + 1); | |
596 | if (val == c) | |
597 | return 1; | |
598 | c = val; | |
599 | } while (c); | |
600 | ||
601 | return 0; | |
602 | } | |
603 | #endif | |
604 | ||
07b8ce1e AV |
605 | #ifndef atomic_inc_unless_negative |
606 | static inline int atomic_inc_unless_negative(atomic_t *p) | |
607 | { | |
608 | int v, v1; | |
609 | for (v = 0; v >= 0; v = v1) { | |
610 | v1 = atomic_cmpxchg(p, v, v + 1); | |
611 | if (likely(v1 == v)) | |
612 | return 1; | |
613 | } | |
614 | return 0; | |
615 | } | |
616 | #endif | |
617 | ||
618 | #ifndef atomic_dec_unless_positive | |
619 | static inline int atomic_dec_unless_positive(atomic_t *p) | |
620 | { | |
621 | int v, v1; | |
622 | for (v = 0; v <= 0; v = v1) { | |
623 | v1 = atomic_cmpxchg(p, v, v - 1); | |
624 | if (likely(v1 == v)) | |
625 | return 1; | |
626 | } | |
627 | return 0; | |
628 | } | |
629 | #endif | |
630 | ||
e79bee24 SL |
631 | /* |
632 | * atomic_dec_if_positive - decrement by 1 if old value positive | |
633 | * @v: pointer of type atomic_t | |
634 | * | |
635 | * The function returns the old value of *v minus 1, even if | |
636 | * the atomic variable, v, was not decremented. | |
637 | */ | |
638 | #ifndef atomic_dec_if_positive | |
639 | static inline int atomic_dec_if_positive(atomic_t *v) | |
640 | { | |
641 | int c, old, dec; | |
642 | c = atomic_read(v); | |
643 | for (;;) { | |
644 | dec = c - 1; | |
645 | if (unlikely(dec < 0)) | |
646 | break; | |
647 | old = atomic_cmpxchg((v), c, dec); | |
648 | if (likely(old == c)) | |
649 | break; | |
650 | c = old; | |
651 | } | |
652 | return dec; | |
653 | } | |
654 | #endif | |
655 | ||
7847777a AS |
656 | #ifdef CONFIG_GENERIC_ATOMIC64 |
657 | #include <asm-generic/atomic64.h> | |
658 | #endif | |
de9e432c | 659 | |
e1213332 PZ |
660 | #ifndef atomic64_read_acquire |
661 | #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter) | |
662 | #endif | |
663 | ||
664 | #ifndef atomic64_set_release | |
665 | #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i)) | |
666 | #endif | |
667 | ||
668 | /* atomic64_add_return_relaxed */ | |
669 | #ifndef atomic64_add_return_relaxed | |
670 | #define atomic64_add_return_relaxed atomic64_add_return | |
671 | #define atomic64_add_return_acquire atomic64_add_return | |
672 | #define atomic64_add_return_release atomic64_add_return | |
673 | ||
674 | #else /* atomic64_add_return_relaxed */ | |
675 | ||
676 | #ifndef atomic64_add_return_acquire | |
677 | #define atomic64_add_return_acquire(...) \ | |
678 | __atomic_op_acquire(atomic64_add_return, __VA_ARGS__) | |
679 | #endif | |
680 | ||
681 | #ifndef atomic64_add_return_release | |
682 | #define atomic64_add_return_release(...) \ | |
683 | __atomic_op_release(atomic64_add_return, __VA_ARGS__) | |
684 | #endif | |
685 | ||
686 | #ifndef atomic64_add_return | |
687 | #define atomic64_add_return(...) \ | |
688 | __atomic_op_fence(atomic64_add_return, __VA_ARGS__) | |
689 | #endif | |
690 | #endif /* atomic64_add_return_relaxed */ | |
691 | ||
692 | /* atomic64_inc_return_relaxed */ | |
693 | #ifndef atomic64_inc_return_relaxed | |
694 | #define atomic64_inc_return_relaxed atomic64_inc_return | |
695 | #define atomic64_inc_return_acquire atomic64_inc_return | |
696 | #define atomic64_inc_return_release atomic64_inc_return | |
697 | ||
698 | #else /* atomic64_inc_return_relaxed */ | |
699 | ||
700 | #ifndef atomic64_inc_return_acquire | |
701 | #define atomic64_inc_return_acquire(...) \ | |
702 | __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__) | |
703 | #endif | |
704 | ||
705 | #ifndef atomic64_inc_return_release | |
706 | #define atomic64_inc_return_release(...) \ | |
707 | __atomic_op_release(atomic64_inc_return, __VA_ARGS__) | |
708 | #endif | |
709 | ||
710 | #ifndef atomic64_inc_return | |
711 | #define atomic64_inc_return(...) \ | |
712 | __atomic_op_fence(atomic64_inc_return, __VA_ARGS__) | |
713 | #endif | |
714 | #endif /* atomic64_inc_return_relaxed */ | |
715 | ||
716 | ||
717 | /* atomic64_sub_return_relaxed */ | |
718 | #ifndef atomic64_sub_return_relaxed | |
719 | #define atomic64_sub_return_relaxed atomic64_sub_return | |
720 | #define atomic64_sub_return_acquire atomic64_sub_return | |
721 | #define atomic64_sub_return_release atomic64_sub_return | |
722 | ||
723 | #else /* atomic64_sub_return_relaxed */ | |
724 | ||
725 | #ifndef atomic64_sub_return_acquire | |
726 | #define atomic64_sub_return_acquire(...) \ | |
727 | __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__) | |
728 | #endif | |
729 | ||
730 | #ifndef atomic64_sub_return_release | |
731 | #define atomic64_sub_return_release(...) \ | |
732 | __atomic_op_release(atomic64_sub_return, __VA_ARGS__) | |
733 | #endif | |
734 | ||
735 | #ifndef atomic64_sub_return | |
736 | #define atomic64_sub_return(...) \ | |
737 | __atomic_op_fence(atomic64_sub_return, __VA_ARGS__) | |
738 | #endif | |
739 | #endif /* atomic64_sub_return_relaxed */ | |
740 | ||
741 | /* atomic64_dec_return_relaxed */ | |
742 | #ifndef atomic64_dec_return_relaxed | |
743 | #define atomic64_dec_return_relaxed atomic64_dec_return | |
744 | #define atomic64_dec_return_acquire atomic64_dec_return | |
745 | #define atomic64_dec_return_release atomic64_dec_return | |
746 | ||
747 | #else /* atomic64_dec_return_relaxed */ | |
748 | ||
749 | #ifndef atomic64_dec_return_acquire | |
750 | #define atomic64_dec_return_acquire(...) \ | |
751 | __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__) | |
752 | #endif | |
753 | ||
754 | #ifndef atomic64_dec_return_release | |
755 | #define atomic64_dec_return_release(...) \ | |
756 | __atomic_op_release(atomic64_dec_return, __VA_ARGS__) | |
757 | #endif | |
758 | ||
759 | #ifndef atomic64_dec_return | |
760 | #define atomic64_dec_return(...) \ | |
761 | __atomic_op_fence(atomic64_dec_return, __VA_ARGS__) | |
762 | #endif | |
763 | #endif /* atomic64_dec_return_relaxed */ | |
764 | ||
28aa2bda PZ |
765 | |
766 | /* atomic64_fetch_add_relaxed */ | |
767 | #ifndef atomic64_fetch_add_relaxed | |
768 | #define atomic64_fetch_add_relaxed atomic64_fetch_add | |
769 | #define atomic64_fetch_add_acquire atomic64_fetch_add | |
770 | #define atomic64_fetch_add_release atomic64_fetch_add | |
771 | ||
772 | #else /* atomic64_fetch_add_relaxed */ | |
773 | ||
774 | #ifndef atomic64_fetch_add_acquire | |
775 | #define atomic64_fetch_add_acquire(...) \ | |
776 | __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__) | |
777 | #endif | |
778 | ||
779 | #ifndef atomic64_fetch_add_release | |
780 | #define atomic64_fetch_add_release(...) \ | |
781 | __atomic_op_release(atomic64_fetch_add, __VA_ARGS__) | |
782 | #endif | |
783 | ||
784 | #ifndef atomic64_fetch_add | |
785 | #define atomic64_fetch_add(...) \ | |
786 | __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__) | |
787 | #endif | |
788 | #endif /* atomic64_fetch_add_relaxed */ | |
789 | ||
f0662863 DB |
790 | /* atomic64_fetch_inc_relaxed */ |
791 | #ifndef atomic64_fetch_inc_relaxed | |
792 | ||
793 | #ifndef atomic64_fetch_inc | |
794 | #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v)) | |
795 | #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v)) | |
796 | #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v)) | |
797 | #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v)) | |
798 | #else /* atomic64_fetch_inc */ | |
799 | #define atomic64_fetch_inc_relaxed atomic64_fetch_inc | |
800 | #define atomic64_fetch_inc_acquire atomic64_fetch_inc | |
801 | #define atomic64_fetch_inc_release atomic64_fetch_inc | |
802 | #endif /* atomic64_fetch_inc */ | |
803 | ||
804 | #else /* atomic64_fetch_inc_relaxed */ | |
805 | ||
806 | #ifndef atomic64_fetch_inc_acquire | |
807 | #define atomic64_fetch_inc_acquire(...) \ | |
808 | __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__) | |
809 | #endif | |
810 | ||
811 | #ifndef atomic64_fetch_inc_release | |
812 | #define atomic64_fetch_inc_release(...) \ | |
813 | __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__) | |
814 | #endif | |
815 | ||
816 | #ifndef atomic64_fetch_inc | |
817 | #define atomic64_fetch_inc(...) \ | |
818 | __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__) | |
819 | #endif | |
820 | #endif /* atomic64_fetch_inc_relaxed */ | |
821 | ||
28aa2bda PZ |
822 | /* atomic64_fetch_sub_relaxed */ |
823 | #ifndef atomic64_fetch_sub_relaxed | |
824 | #define atomic64_fetch_sub_relaxed atomic64_fetch_sub | |
825 | #define atomic64_fetch_sub_acquire atomic64_fetch_sub | |
826 | #define atomic64_fetch_sub_release atomic64_fetch_sub | |
827 | ||
828 | #else /* atomic64_fetch_sub_relaxed */ | |
829 | ||
830 | #ifndef atomic64_fetch_sub_acquire | |
831 | #define atomic64_fetch_sub_acquire(...) \ | |
832 | __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__) | |
833 | #endif | |
834 | ||
835 | #ifndef atomic64_fetch_sub_release | |
836 | #define atomic64_fetch_sub_release(...) \ | |
837 | __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__) | |
838 | #endif | |
839 | ||
840 | #ifndef atomic64_fetch_sub | |
841 | #define atomic64_fetch_sub(...) \ | |
842 | __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__) | |
843 | #endif | |
844 | #endif /* atomic64_fetch_sub_relaxed */ | |
845 | ||
f0662863 DB |
846 | /* atomic64_fetch_dec_relaxed */ |
847 | #ifndef atomic64_fetch_dec_relaxed | |
848 | ||
849 | #ifndef atomic64_fetch_dec | |
850 | #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v)) | |
851 | #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v)) | |
852 | #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v)) | |
853 | #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v)) | |
854 | #else /* atomic64_fetch_dec */ | |
855 | #define atomic64_fetch_dec_relaxed atomic64_fetch_dec | |
856 | #define atomic64_fetch_dec_acquire atomic64_fetch_dec | |
857 | #define atomic64_fetch_dec_release atomic64_fetch_dec | |
858 | #endif /* atomic64_fetch_dec */ | |
859 | ||
860 | #else /* atomic64_fetch_dec_relaxed */ | |
861 | ||
862 | #ifndef atomic64_fetch_dec_acquire | |
863 | #define atomic64_fetch_dec_acquire(...) \ | |
864 | __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__) | |
865 | #endif | |
866 | ||
867 | #ifndef atomic64_fetch_dec_release | |
868 | #define atomic64_fetch_dec_release(...) \ | |
869 | __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__) | |
870 | #endif | |
871 | ||
872 | #ifndef atomic64_fetch_dec | |
873 | #define atomic64_fetch_dec(...) \ | |
874 | __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__) | |
875 | #endif | |
876 | #endif /* atomic64_fetch_dec_relaxed */ | |
877 | ||
28aa2bda PZ |
878 | /* atomic64_fetch_or_relaxed */ |
879 | #ifndef atomic64_fetch_or_relaxed | |
880 | #define atomic64_fetch_or_relaxed atomic64_fetch_or | |
881 | #define atomic64_fetch_or_acquire atomic64_fetch_or | |
882 | #define atomic64_fetch_or_release atomic64_fetch_or | |
883 | ||
884 | #else /* atomic64_fetch_or_relaxed */ | |
885 | ||
886 | #ifndef atomic64_fetch_or_acquire | |
887 | #define atomic64_fetch_or_acquire(...) \ | |
888 | __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__) | |
889 | #endif | |
890 | ||
891 | #ifndef atomic64_fetch_or_release | |
892 | #define atomic64_fetch_or_release(...) \ | |
893 | __atomic_op_release(atomic64_fetch_or, __VA_ARGS__) | |
894 | #endif | |
895 | ||
896 | #ifndef atomic64_fetch_or | |
897 | #define atomic64_fetch_or(...) \ | |
898 | __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__) | |
899 | #endif | |
900 | #endif /* atomic64_fetch_or_relaxed */ | |
901 | ||
902 | /* atomic64_fetch_and_relaxed */ | |
903 | #ifndef atomic64_fetch_and_relaxed | |
904 | #define atomic64_fetch_and_relaxed atomic64_fetch_and | |
905 | #define atomic64_fetch_and_acquire atomic64_fetch_and | |
906 | #define atomic64_fetch_and_release atomic64_fetch_and | |
907 | ||
908 | #else /* atomic64_fetch_and_relaxed */ | |
909 | ||
910 | #ifndef atomic64_fetch_and_acquire | |
911 | #define atomic64_fetch_and_acquire(...) \ | |
912 | __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__) | |
913 | #endif | |
914 | ||
915 | #ifndef atomic64_fetch_and_release | |
916 | #define atomic64_fetch_and_release(...) \ | |
917 | __atomic_op_release(atomic64_fetch_and, __VA_ARGS__) | |
918 | #endif | |
919 | ||
920 | #ifndef atomic64_fetch_and | |
921 | #define atomic64_fetch_and(...) \ | |
922 | __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__) | |
923 | #endif | |
924 | #endif /* atomic64_fetch_and_relaxed */ | |
925 | ||
926 | #ifdef atomic64_andnot | |
927 | /* atomic64_fetch_andnot_relaxed */ | |
928 | #ifndef atomic64_fetch_andnot_relaxed | |
929 | #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot | |
930 | #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot | |
931 | #define atomic64_fetch_andnot_release atomic64_fetch_andnot | |
932 | ||
933 | #else /* atomic64_fetch_andnot_relaxed */ | |
934 | ||
935 | #ifndef atomic64_fetch_andnot_acquire | |
936 | #define atomic64_fetch_andnot_acquire(...) \ | |
937 | __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__) | |
938 | #endif | |
939 | ||
940 | #ifndef atomic64_fetch_andnot_release | |
941 | #define atomic64_fetch_andnot_release(...) \ | |
942 | __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__) | |
943 | #endif | |
944 | ||
945 | #ifndef atomic64_fetch_andnot | |
946 | #define atomic64_fetch_andnot(...) \ | |
947 | __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__) | |
948 | #endif | |
949 | #endif /* atomic64_fetch_andnot_relaxed */ | |
950 | #endif /* atomic64_andnot */ | |
951 | ||
952 | /* atomic64_fetch_xor_relaxed */ | |
953 | #ifndef atomic64_fetch_xor_relaxed | |
954 | #define atomic64_fetch_xor_relaxed atomic64_fetch_xor | |
955 | #define atomic64_fetch_xor_acquire atomic64_fetch_xor | |
956 | #define atomic64_fetch_xor_release atomic64_fetch_xor | |
957 | ||
958 | #else /* atomic64_fetch_xor_relaxed */ | |
959 | ||
960 | #ifndef atomic64_fetch_xor_acquire | |
961 | #define atomic64_fetch_xor_acquire(...) \ | |
962 | __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__) | |
963 | #endif | |
964 | ||
965 | #ifndef atomic64_fetch_xor_release | |
966 | #define atomic64_fetch_xor_release(...) \ | |
967 | __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__) | |
968 | #endif | |
969 | ||
970 | #ifndef atomic64_fetch_xor | |
971 | #define atomic64_fetch_xor(...) \ | |
972 | __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__) | |
973 | #endif | |
974 | #endif /* atomic64_fetch_xor_relaxed */ | |
975 | ||
976 | ||
e1213332 PZ |
977 | /* atomic64_xchg_relaxed */ |
978 | #ifndef atomic64_xchg_relaxed | |
979 | #define atomic64_xchg_relaxed atomic64_xchg | |
980 | #define atomic64_xchg_acquire atomic64_xchg | |
981 | #define atomic64_xchg_release atomic64_xchg | |
982 | ||
983 | #else /* atomic64_xchg_relaxed */ | |
984 | ||
985 | #ifndef atomic64_xchg_acquire | |
986 | #define atomic64_xchg_acquire(...) \ | |
987 | __atomic_op_acquire(atomic64_xchg, __VA_ARGS__) | |
988 | #endif | |
989 | ||
990 | #ifndef atomic64_xchg_release | |
991 | #define atomic64_xchg_release(...) \ | |
992 | __atomic_op_release(atomic64_xchg, __VA_ARGS__) | |
993 | #endif | |
994 | ||
995 | #ifndef atomic64_xchg | |
996 | #define atomic64_xchg(...) \ | |
997 | __atomic_op_fence(atomic64_xchg, __VA_ARGS__) | |
998 | #endif | |
999 | #endif /* atomic64_xchg_relaxed */ | |
1000 | ||
1001 | /* atomic64_cmpxchg_relaxed */ | |
1002 | #ifndef atomic64_cmpxchg_relaxed | |
1003 | #define atomic64_cmpxchg_relaxed atomic64_cmpxchg | |
1004 | #define atomic64_cmpxchg_acquire atomic64_cmpxchg | |
1005 | #define atomic64_cmpxchg_release atomic64_cmpxchg | |
1006 | ||
1007 | #else /* atomic64_cmpxchg_relaxed */ | |
1008 | ||
1009 | #ifndef atomic64_cmpxchg_acquire | |
1010 | #define atomic64_cmpxchg_acquire(...) \ | |
1011 | __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__) | |
1012 | #endif | |
1013 | ||
1014 | #ifndef atomic64_cmpxchg_release | |
1015 | #define atomic64_cmpxchg_release(...) \ | |
1016 | __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__) | |
1017 | #endif | |
1018 | ||
1019 | #ifndef atomic64_cmpxchg | |
1020 | #define atomic64_cmpxchg(...) \ | |
1021 | __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__) | |
1022 | #endif | |
1023 | #endif /* atomic64_cmpxchg_relaxed */ | |
1024 | ||
a9ebf306 PZ |
1025 | #ifndef atomic64_try_cmpxchg |
1026 | ||
1027 | #define __atomic64_try_cmpxchg(type, _p, _po, _n) \ | |
1028 | ({ \ | |
1029 | typeof(_po) __po = (_po); \ | |
44fe8445 PZ |
1030 | typeof(*(_po)) __r, __o = *__po; \ |
1031 | __r = atomic64_cmpxchg##type((_p), __o, (_n)); \ | |
1032 | if (unlikely(__r != __o)) \ | |
1033 | *__po = __r; \ | |
1034 | likely(__r == __o); \ | |
a9ebf306 PZ |
1035 | }) |
1036 | ||
1037 | #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n) | |
1038 | #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n) | |
1039 | #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n) | |
1040 | #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n) | |
1041 | ||
1042 | #else /* atomic64_try_cmpxchg */ | |
1043 | #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg | |
1044 | #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg | |
1045 | #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg | |
1046 | #endif /* atomic64_try_cmpxchg */ | |
1047 | ||
de9e432c PZ |
1048 | #ifndef atomic64_andnot |
1049 | static inline void atomic64_andnot(long long i, atomic64_t *v) | |
1050 | { | |
1051 | atomic64_and(~i, v); | |
1052 | } | |
28aa2bda PZ |
1053 | |
1054 | static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v) | |
1055 | { | |
1056 | return atomic64_fetch_and(~i, v); | |
1057 | } | |
1058 | ||
1059 | static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v) | |
1060 | { | |
1061 | return atomic64_fetch_and_relaxed(~i, v); | |
1062 | } | |
1063 | ||
1064 | static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v) | |
1065 | { | |
1066 | return atomic64_fetch_and_acquire(~i, v); | |
1067 | } | |
1068 | ||
1069 | static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v) | |
1070 | { | |
1071 | return atomic64_fetch_and_release(~i, v); | |
1072 | } | |
de9e432c PZ |
1073 | #endif |
1074 | ||
90fe6514 PZ |
1075 | #include <asm-generic/atomic-long.h> |
1076 | ||
3f9d35b9 | 1077 | #endif /* _LINUX_ATOMIC_H */ |