]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - arch/powerpc/include/asm/atomic.h
powerpc/booke: Provide exception macros with interrupt name
[mirror_ubuntu-bionic-kernel.git] / arch / powerpc / include / asm / atomic.h
CommitLineData
feaf7cf1
BB
1#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
1da177e4
LT
4/*
5 * PowerPC atomic operations
6 */
7
1da177e4 8#ifdef __KERNEL__
ae3a197e
DH
9#include <linux/types.h>
10#include <asm/cmpxchg.h>
1da177e4 11
feaf7cf1 12#define ATOMIC_INIT(i) { (i) }
1da177e4 13
9f0cbea0
SB
14static __inline__ int atomic_read(const atomic_t *v)
15{
16 int t;
17
18 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
19
20 return t;
21}
22
23static __inline__ void atomic_set(atomic_t *v, int i)
24{
25 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
26}
1da177e4 27
1da177e4
LT
28static __inline__ void atomic_add(int a, atomic_t *v)
29{
30 int t;
31
32 __asm__ __volatile__(
33"1: lwarx %0,0,%3 # atomic_add\n\
34 add %0,%2,%0\n"
35 PPC405_ERR77(0,%3)
36" stwcx. %0,0,%3 \n\
37 bne- 1b"
e2a3d402
LT
38 : "=&r" (t), "+m" (v->counter)
39 : "r" (a), "r" (&v->counter)
1da177e4
LT
40 : "cc");
41}
42
43static __inline__ int atomic_add_return(int a, atomic_t *v)
44{
45 int t;
46
47 __asm__ __volatile__(
b97021f8 48 PPC_ATOMIC_ENTRY_BARRIER
1da177e4
LT
49"1: lwarx %0,0,%2 # atomic_add_return\n\
50 add %0,%1,%0\n"
51 PPC405_ERR77(0,%2)
52" stwcx. %0,0,%2 \n\
53 bne- 1b"
b97021f8 54 PPC_ATOMIC_EXIT_BARRIER
1da177e4
LT
55 : "=&r" (t)
56 : "r" (a), "r" (&v->counter)
57 : "cc", "memory");
58
59 return t;
60}
61
62#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
63
64static __inline__ void atomic_sub(int a, atomic_t *v)
65{
66 int t;
67
68 __asm__ __volatile__(
69"1: lwarx %0,0,%3 # atomic_sub\n\
70 subf %0,%2,%0\n"
71 PPC405_ERR77(0,%3)
72" stwcx. %0,0,%3 \n\
73 bne- 1b"
e2a3d402
LT
74 : "=&r" (t), "+m" (v->counter)
75 : "r" (a), "r" (&v->counter)
1da177e4
LT
76 : "cc");
77}
78
79static __inline__ int atomic_sub_return(int a, atomic_t *v)
80{
81 int t;
82
83 __asm__ __volatile__(
b97021f8 84 PPC_ATOMIC_ENTRY_BARRIER
1da177e4
LT
85"1: lwarx %0,0,%2 # atomic_sub_return\n\
86 subf %0,%1,%0\n"
87 PPC405_ERR77(0,%2)
88" stwcx. %0,0,%2 \n\
89 bne- 1b"
b97021f8 90 PPC_ATOMIC_EXIT_BARRIER
1da177e4
LT
91 : "=&r" (t)
92 : "r" (a), "r" (&v->counter)
93 : "cc", "memory");
94
95 return t;
96}
97
98static __inline__ void atomic_inc(atomic_t *v)
99{
100 int t;
101
102 __asm__ __volatile__(
103"1: lwarx %0,0,%2 # atomic_inc\n\
104 addic %0,%0,1\n"
105 PPC405_ERR77(0,%2)
106" stwcx. %0,0,%2 \n\
107 bne- 1b"
e2a3d402
LT
108 : "=&r" (t), "+m" (v->counter)
109 : "r" (&v->counter)
efc3624c 110 : "cc", "xer");
1da177e4
LT
111}
112
113static __inline__ int atomic_inc_return(atomic_t *v)
114{
115 int t;
116
117 __asm__ __volatile__(
b97021f8 118 PPC_ATOMIC_ENTRY_BARRIER
1da177e4
LT
119"1: lwarx %0,0,%1 # atomic_inc_return\n\
120 addic %0,%0,1\n"
121 PPC405_ERR77(0,%1)
122" stwcx. %0,0,%1 \n\
123 bne- 1b"
b97021f8 124 PPC_ATOMIC_EXIT_BARRIER
1da177e4
LT
125 : "=&r" (t)
126 : "r" (&v->counter)
efc3624c 127 : "cc", "xer", "memory");
1da177e4
LT
128
129 return t;
130}
131
132/*
133 * atomic_inc_and_test - increment and test
134 * @v: pointer of type atomic_t
135 *
136 * Atomically increments @v by 1
137 * and returns true if the result is zero, or false for all
138 * other cases.
139 */
140#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
141
142static __inline__ void atomic_dec(atomic_t *v)
143{
144 int t;
145
146 __asm__ __volatile__(
147"1: lwarx %0,0,%2 # atomic_dec\n\
148 addic %0,%0,-1\n"
149 PPC405_ERR77(0,%2)\
150" stwcx. %0,0,%2\n\
151 bne- 1b"
e2a3d402
LT
152 : "=&r" (t), "+m" (v->counter)
153 : "r" (&v->counter)
efc3624c 154 : "cc", "xer");
1da177e4
LT
155}
156
157static __inline__ int atomic_dec_return(atomic_t *v)
158{
159 int t;
160
161 __asm__ __volatile__(
b97021f8 162 PPC_ATOMIC_ENTRY_BARRIER
1da177e4
LT
163"1: lwarx %0,0,%1 # atomic_dec_return\n\
164 addic %0,%0,-1\n"
165 PPC405_ERR77(0,%1)
166" stwcx. %0,0,%1\n\
167 bne- 1b"
b97021f8 168 PPC_ATOMIC_EXIT_BARRIER
1da177e4
LT
169 : "=&r" (t)
170 : "r" (&v->counter)
efc3624c 171 : "cc", "xer", "memory");
1da177e4
LT
172
173 return t;
174}
175
f46e477e 176#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
ffbf670f 177#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
4a6dae6d 178
8426e1f6 179/**
f24219b4 180 * __atomic_add_unless - add unless the number is a given value
8426e1f6
NP
181 * @v: pointer of type atomic_t
182 * @a: the amount to add to v...
183 * @u: ...unless v is equal to u.
184 *
185 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 186 * Returns the old value of @v.
8426e1f6 187 */
f24219b4 188static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
f055affb
NP
189{
190 int t;
191
192 __asm__ __volatile__ (
b97021f8 193 PPC_ATOMIC_ENTRY_BARRIER
f24219b4 194"1: lwarx %0,0,%1 # __atomic_add_unless\n\
f055affb
NP
195 cmpw 0,%0,%3 \n\
196 beq- 2f \n\
197 add %0,%2,%0 \n"
198 PPC405_ERR77(0,%2)
199" stwcx. %0,0,%1 \n\
200 bne- 1b \n"
b97021f8 201 PPC_ATOMIC_EXIT_BARRIER
f055affb
NP
202" subf %0,%2,%0 \n\
2032:"
204 : "=&r" (t)
205 : "r" (&v->counter), "r" (a), "r" (u)
206 : "cc", "memory");
207
f24219b4 208 return t;
f055affb
NP
209}
210
a6cf7ed5
AB
211/**
212 * atomic_inc_not_zero - increment unless the number is zero
213 * @v: pointer of type atomic_t
214 *
215 * Atomically increments @v by 1, so long as @v is non-zero.
216 * Returns non-zero if @v was non-zero, and zero otherwise.
217 */
218static __inline__ int atomic_inc_not_zero(atomic_t *v)
219{
220 int t1, t2;
221
222 __asm__ __volatile__ (
223 PPC_ATOMIC_ENTRY_BARRIER
224"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
225 cmpwi 0,%0,0\n\
226 beq- 2f\n\
227 addic %1,%0,1\n"
228 PPC405_ERR77(0,%2)
229" stwcx. %1,0,%2\n\
230 bne- 1b\n"
231 PPC_ATOMIC_EXIT_BARRIER
232 "\n\
2332:"
234 : "=&r" (t1), "=&r" (t2)
235 : "r" (&v->counter)
236 : "cc", "xer", "memory");
237
238 return t1;
239}
240#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
8426e1f6 241
1da177e4
LT
242#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
243#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
244
245/*
246 * Atomically test *v and decrement if it is greater than 0.
434f98c4
RJ
247 * The function returns the old value of *v minus 1, even if
248 * the atomic variable, v, was not decremented.
1da177e4
LT
249 */
250static __inline__ int atomic_dec_if_positive(atomic_t *v)
251{
252 int t;
253
254 __asm__ __volatile__(
b97021f8 255 PPC_ATOMIC_ENTRY_BARRIER
1da177e4 256"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
434f98c4
RJ
257 cmpwi %0,1\n\
258 addi %0,%0,-1\n\
1da177e4
LT
259 blt- 2f\n"
260 PPC405_ERR77(0,%1)
261" stwcx. %0,0,%1\n\
262 bne- 1b"
b97021f8 263 PPC_ATOMIC_EXIT_BARRIER
1da177e4 264 "\n\
434f98c4 2652:" : "=&b" (t)
1da177e4
LT
266 : "r" (&v->counter)
267 : "cc", "memory");
268
269 return t;
270}
271
feaf7cf1
BB
272#define smp_mb__before_atomic_dec() smp_mb()
273#define smp_mb__after_atomic_dec() smp_mb()
274#define smp_mb__before_atomic_inc() smp_mb()
275#define smp_mb__after_atomic_inc() smp_mb()
1da177e4 276
06a98dba
SR
277#ifdef __powerpc64__
278
06a98dba
SR
279#define ATOMIC64_INIT(i) { (i) }
280
9f0cbea0
SB
281static __inline__ long atomic64_read(const atomic64_t *v)
282{
283 long t;
284
285 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
286
287 return t;
288}
289
290static __inline__ void atomic64_set(atomic64_t *v, long i)
291{
292 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
293}
06a98dba
SR
294
295static __inline__ void atomic64_add(long a, atomic64_t *v)
296{
297 long t;
298
299 __asm__ __volatile__(
300"1: ldarx %0,0,%3 # atomic64_add\n\
301 add %0,%2,%0\n\
302 stdcx. %0,0,%3 \n\
303 bne- 1b"
e2a3d402
LT
304 : "=&r" (t), "+m" (v->counter)
305 : "r" (a), "r" (&v->counter)
06a98dba
SR
306 : "cc");
307}
308
309static __inline__ long atomic64_add_return(long a, atomic64_t *v)
310{
311 long t;
312
313 __asm__ __volatile__(
b97021f8 314 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
315"1: ldarx %0,0,%2 # atomic64_add_return\n\
316 add %0,%1,%0\n\
317 stdcx. %0,0,%2 \n\
318 bne- 1b"
b97021f8 319 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
320 : "=&r" (t)
321 : "r" (a), "r" (&v->counter)
322 : "cc", "memory");
323
324 return t;
325}
326
327#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
328
329static __inline__ void atomic64_sub(long a, atomic64_t *v)
330{
331 long t;
332
333 __asm__ __volatile__(
334"1: ldarx %0,0,%3 # atomic64_sub\n\
335 subf %0,%2,%0\n\
336 stdcx. %0,0,%3 \n\
337 bne- 1b"
e2a3d402
LT
338 : "=&r" (t), "+m" (v->counter)
339 : "r" (a), "r" (&v->counter)
06a98dba
SR
340 : "cc");
341}
342
343static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
344{
345 long t;
346
347 __asm__ __volatile__(
b97021f8 348 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
349"1: ldarx %0,0,%2 # atomic64_sub_return\n\
350 subf %0,%1,%0\n\
351 stdcx. %0,0,%2 \n\
352 bne- 1b"
b97021f8 353 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
354 : "=&r" (t)
355 : "r" (a), "r" (&v->counter)
356 : "cc", "memory");
357
358 return t;
359}
360
361static __inline__ void atomic64_inc(atomic64_t *v)
362{
363 long t;
364
365 __asm__ __volatile__(
366"1: ldarx %0,0,%2 # atomic64_inc\n\
367 addic %0,%0,1\n\
368 stdcx. %0,0,%2 \n\
369 bne- 1b"
e2a3d402
LT
370 : "=&r" (t), "+m" (v->counter)
371 : "r" (&v->counter)
efc3624c 372 : "cc", "xer");
06a98dba
SR
373}
374
375static __inline__ long atomic64_inc_return(atomic64_t *v)
376{
377 long t;
378
379 __asm__ __volatile__(
b97021f8 380 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
381"1: ldarx %0,0,%1 # atomic64_inc_return\n\
382 addic %0,%0,1\n\
383 stdcx. %0,0,%1 \n\
384 bne- 1b"
b97021f8 385 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
386 : "=&r" (t)
387 : "r" (&v->counter)
efc3624c 388 : "cc", "xer", "memory");
06a98dba
SR
389
390 return t;
391}
392
393/*
394 * atomic64_inc_and_test - increment and test
395 * @v: pointer of type atomic64_t
396 *
397 * Atomically increments @v by 1
398 * and returns true if the result is zero, or false for all
399 * other cases.
400 */
401#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
402
403static __inline__ void atomic64_dec(atomic64_t *v)
404{
405 long t;
406
407 __asm__ __volatile__(
408"1: ldarx %0,0,%2 # atomic64_dec\n\
409 addic %0,%0,-1\n\
410 stdcx. %0,0,%2\n\
411 bne- 1b"
e2a3d402
LT
412 : "=&r" (t), "+m" (v->counter)
413 : "r" (&v->counter)
efc3624c 414 : "cc", "xer");
06a98dba
SR
415}
416
417static __inline__ long atomic64_dec_return(atomic64_t *v)
418{
419 long t;
420
421 __asm__ __volatile__(
b97021f8 422 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
423"1: ldarx %0,0,%1 # atomic64_dec_return\n\
424 addic %0,%0,-1\n\
425 stdcx. %0,0,%1\n\
426 bne- 1b"
b97021f8 427 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
428 : "=&r" (t)
429 : "r" (&v->counter)
efc3624c 430 : "cc", "xer", "memory");
06a98dba
SR
431
432 return t;
433}
434
435#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
436#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
437
438/*
439 * Atomically test *v and decrement if it is greater than 0.
440 * The function returns the old value of *v minus 1.
441 */
442static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
443{
444 long t;
445
446 __asm__ __volatile__(
b97021f8 447 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
448"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
449 addic. %0,%0,-1\n\
450 blt- 2f\n\
451 stdcx. %0,0,%1\n\
452 bne- 1b"
b97021f8 453 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
454 "\n\
4552:" : "=&r" (t)
456 : "r" (&v->counter)
efc3624c 457 : "cc", "xer", "memory");
06a98dba
SR
458
459 return t;
460}
461
f46e477e 462#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
41806ef4
MD
463#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
464
465/**
466 * atomic64_add_unless - add unless the number is a given value
467 * @v: pointer of type atomic64_t
468 * @a: the amount to add to v...
469 * @u: ...unless v is equal to u.
470 *
471 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 472 * Returns the old value of @v.
41806ef4
MD
473 */
474static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
475{
476 long t;
477
478 __asm__ __volatile__ (
b97021f8 479 PPC_ATOMIC_ENTRY_BARRIER
f24219b4 480"1: ldarx %0,0,%1 # __atomic_add_unless\n\
41806ef4
MD
481 cmpd 0,%0,%3 \n\
482 beq- 2f \n\
483 add %0,%2,%0 \n"
484" stdcx. %0,0,%1 \n\
485 bne- 1b \n"
b97021f8 486 PPC_ATOMIC_EXIT_BARRIER
41806ef4
MD
487" subf %0,%2,%0 \n\
4882:"
489 : "=&r" (t)
490 : "r" (&v->counter), "r" (a), "r" (u)
491 : "cc", "memory");
492
493 return t != u;
494}
495
a6cf7ed5
AB
496/**
497 * atomic_inc64_not_zero - increment unless the number is zero
498 * @v: pointer of type atomic64_t
499 *
500 * Atomically increments @v by 1, so long as @v is non-zero.
501 * Returns non-zero if @v was non-zero, and zero otherwise.
502 */
503static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
504{
505 long t1, t2;
506
507 __asm__ __volatile__ (
508 PPC_ATOMIC_ENTRY_BARRIER
509"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
510 cmpdi 0,%0,0\n\
511 beq- 2f\n\
512 addic %1,%0,1\n\
513 stdcx. %1,0,%2\n\
514 bne- 1b\n"
515 PPC_ATOMIC_EXIT_BARRIER
516 "\n\
5172:"
518 : "=&r" (t1), "=&r" (t2)
519 : "r" (&v->counter)
520 : "cc", "xer", "memory");
521
522 return t1;
523}
41806ef4 524
06a98dba
SR
525#endif /* __powerpc64__ */
526
1da177e4 527#endif /* __KERNEL__ */
feaf7cf1 528#endif /* _ASM_POWERPC_ATOMIC_H_ */