]>
git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/powerpc/include/asm/atomic.h
1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
9 #include <linux/types.h>
10 #include <asm/cmpxchg.h>
12 #define ATOMIC_INIT(i) { (i) }
14 static __inline__
int atomic_read(const atomic_t
*v
)
18 __asm__
__volatile__("lwz%U1%X1 %0,%1" : "=r"(t
) : "m"(v
->counter
));
23 static __inline__
void atomic_set(atomic_t
*v
, int i
)
25 __asm__
__volatile__("stw%U0%X0 %1,%0" : "=m"(v
->counter
) : "r"(i
));
28 static __inline__
void atomic_add(int a
, atomic_t
*v
)
33 "1: lwarx %0,0,%3 # atomic_add\n\
38 : "=&r" (t
), "+m" (v
->counter
)
39 : "r" (a
), "r" (&v
->counter
)
43 static __inline__
int atomic_add_return(int a
, atomic_t
*v
)
48 PPC_ATOMIC_ENTRY_BARRIER
49 "1: lwarx %0,0,%2 # atomic_add_return\n\
54 PPC_ATOMIC_EXIT_BARRIER
56 : "r" (a
), "r" (&v
->counter
)
62 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
64 static __inline__
void atomic_sub(int a
, atomic_t
*v
)
69 "1: lwarx %0,0,%3 # atomic_sub\n\
74 : "=&r" (t
), "+m" (v
->counter
)
75 : "r" (a
), "r" (&v
->counter
)
79 static __inline__
int atomic_sub_return(int a
, atomic_t
*v
)
84 PPC_ATOMIC_ENTRY_BARRIER
85 "1: lwarx %0,0,%2 # atomic_sub_return\n\
90 PPC_ATOMIC_EXIT_BARRIER
92 : "r" (a
), "r" (&v
->counter
)
98 static __inline__
void atomic_inc(atomic_t
*v
)
102 __asm__
__volatile__(
103 "1: lwarx %0,0,%2 # atomic_inc\n\
108 : "=&r" (t
), "+m" (v
->counter
)
113 static __inline__
int atomic_inc_return(atomic_t
*v
)
117 __asm__
__volatile__(
118 PPC_ATOMIC_ENTRY_BARRIER
119 "1: lwarx %0,0,%1 # atomic_inc_return\n\
124 PPC_ATOMIC_EXIT_BARRIER
127 : "cc", "xer", "memory");
133 * atomic_inc_and_test - increment and test
134 * @v: pointer of type atomic_t
136 * Atomically increments @v by 1
137 * and returns true if the result is zero, or false for all
140 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
142 static __inline__
void atomic_dec(atomic_t
*v
)
146 __asm__
__volatile__(
147 "1: lwarx %0,0,%2 # atomic_dec\n\
152 : "=&r" (t
), "+m" (v
->counter
)
157 static __inline__
int atomic_dec_return(atomic_t
*v
)
161 __asm__
__volatile__(
162 PPC_ATOMIC_ENTRY_BARRIER
163 "1: lwarx %0,0,%1 # atomic_dec_return\n\
168 PPC_ATOMIC_EXIT_BARRIER
171 : "cc", "xer", "memory");
176 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
177 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
180 * __atomic_add_unless - add unless the number is a given value
181 * @v: pointer of type atomic_t
182 * @a: the amount to add to v...
183 * @u: ...unless v is equal to u.
185 * Atomically adds @a to @v, so long as it was not @u.
186 * Returns the old value of @v.
188 static __inline__
int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
192 __asm__
__volatile__ (
193 PPC_ATOMIC_ENTRY_BARRIER
194 "1: lwarx %0,0,%1 # __atomic_add_unless\n\
201 PPC_ATOMIC_EXIT_BARRIER
205 : "r" (&v
->counter
), "r" (a
), "r" (u
)
212 * atomic_inc_not_zero - increment unless the number is zero
213 * @v: pointer of type atomic_t
215 * Atomically increments @v by 1, so long as @v is non-zero.
216 * Returns non-zero if @v was non-zero, and zero otherwise.
218 static __inline__
int atomic_inc_not_zero(atomic_t
*v
)
222 __asm__
__volatile__ (
223 PPC_ATOMIC_ENTRY_BARRIER
224 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
231 PPC_ATOMIC_EXIT_BARRIER
234 : "=&r" (t1
), "=&r" (t2
)
236 : "cc", "xer", "memory");
240 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
242 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
243 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
246 * Atomically test *v and decrement if it is greater than 0.
247 * The function returns the old value of *v minus 1, even if
248 * the atomic variable, v, was not decremented.
250 static __inline__
int atomic_dec_if_positive(atomic_t
*v
)
254 __asm__
__volatile__(
255 PPC_ATOMIC_ENTRY_BARRIER
256 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
263 PPC_ATOMIC_EXIT_BARRIER
272 #define smp_mb__before_atomic_dec() smp_mb()
273 #define smp_mb__after_atomic_dec() smp_mb()
274 #define smp_mb__before_atomic_inc() smp_mb()
275 #define smp_mb__after_atomic_inc() smp_mb()
279 #define ATOMIC64_INIT(i) { (i) }
281 static __inline__
long atomic64_read(const atomic64_t
*v
)
285 __asm__
__volatile__("ld%U1%X1 %0,%1" : "=r"(t
) : "m"(v
->counter
));
290 static __inline__
void atomic64_set(atomic64_t
*v
, long i
)
292 __asm__
__volatile__("std%U0%X0 %1,%0" : "=m"(v
->counter
) : "r"(i
));
295 static __inline__
void atomic64_add(long a
, atomic64_t
*v
)
299 __asm__
__volatile__(
300 "1: ldarx %0,0,%3 # atomic64_add\n\
304 : "=&r" (t
), "+m" (v
->counter
)
305 : "r" (a
), "r" (&v
->counter
)
309 static __inline__
long atomic64_add_return(long a
, atomic64_t
*v
)
313 __asm__
__volatile__(
314 PPC_ATOMIC_ENTRY_BARRIER
315 "1: ldarx %0,0,%2 # atomic64_add_return\n\
319 PPC_ATOMIC_EXIT_BARRIER
321 : "r" (a
), "r" (&v
->counter
)
327 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
329 static __inline__
void atomic64_sub(long a
, atomic64_t
*v
)
333 __asm__
__volatile__(
334 "1: ldarx %0,0,%3 # atomic64_sub\n\
338 : "=&r" (t
), "+m" (v
->counter
)
339 : "r" (a
), "r" (&v
->counter
)
343 static __inline__
long atomic64_sub_return(long a
, atomic64_t
*v
)
347 __asm__
__volatile__(
348 PPC_ATOMIC_ENTRY_BARRIER
349 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
353 PPC_ATOMIC_EXIT_BARRIER
355 : "r" (a
), "r" (&v
->counter
)
361 static __inline__
void atomic64_inc(atomic64_t
*v
)
365 __asm__
__volatile__(
366 "1: ldarx %0,0,%2 # atomic64_inc\n\
370 : "=&r" (t
), "+m" (v
->counter
)
375 static __inline__
long atomic64_inc_return(atomic64_t
*v
)
379 __asm__
__volatile__(
380 PPC_ATOMIC_ENTRY_BARRIER
381 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
385 PPC_ATOMIC_EXIT_BARRIER
388 : "cc", "xer", "memory");
394 * atomic64_inc_and_test - increment and test
395 * @v: pointer of type atomic64_t
397 * Atomically increments @v by 1
398 * and returns true if the result is zero, or false for all
401 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
403 static __inline__
void atomic64_dec(atomic64_t
*v
)
407 __asm__
__volatile__(
408 "1: ldarx %0,0,%2 # atomic64_dec\n\
412 : "=&r" (t
), "+m" (v
->counter
)
417 static __inline__
long atomic64_dec_return(atomic64_t
*v
)
421 __asm__
__volatile__(
422 PPC_ATOMIC_ENTRY_BARRIER
423 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
427 PPC_ATOMIC_EXIT_BARRIER
430 : "cc", "xer", "memory");
435 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
436 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
439 * Atomically test *v and decrement if it is greater than 0.
440 * The function returns the old value of *v minus 1.
442 static __inline__
long atomic64_dec_if_positive(atomic64_t
*v
)
446 __asm__
__volatile__(
447 PPC_ATOMIC_ENTRY_BARRIER
448 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
453 PPC_ATOMIC_EXIT_BARRIER
457 : "cc", "xer", "memory");
462 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
463 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
466 * atomic64_add_unless - add unless the number is a given value
467 * @v: pointer of type atomic64_t
468 * @a: the amount to add to v...
469 * @u: ...unless v is equal to u.
471 * Atomically adds @a to @v, so long as it was not @u.
472 * Returns the old value of @v.
474 static __inline__
int atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
478 __asm__
__volatile__ (
479 PPC_ATOMIC_ENTRY_BARRIER
480 "1: ldarx %0,0,%1 # __atomic_add_unless\n\
486 PPC_ATOMIC_EXIT_BARRIER
490 : "r" (&v
->counter
), "r" (a
), "r" (u
)
497 * atomic_inc64_not_zero - increment unless the number is zero
498 * @v: pointer of type atomic64_t
500 * Atomically increments @v by 1, so long as @v is non-zero.
501 * Returns non-zero if @v was non-zero, and zero otherwise.
503 static __inline__
long atomic64_inc_not_zero(atomic64_t
*v
)
507 __asm__
__volatile__ (
508 PPC_ATOMIC_ENTRY_BARRIER
509 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
515 PPC_ATOMIC_EXIT_BARRIER
518 : "=&r" (t1
), "=&r" (t2
)
520 : "cc", "xer", "memory");
525 #endif /* __powerpc64__ */
527 #endif /* __KERNEL__ */
528 #endif /* _ASM_POWERPC_ATOMIC_H_ */