]>
git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blob - arch/mips/include/asm/atomic.h
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
25 #define ATOMIC_INIT(i) { (i) }
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
31 * Atomically reads the value of @v.
33 #define atomic_read(v) READ_ONCE((v)->counter)
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
40 * Atomically sets the value of @v to @i.
42 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
44 #define ATOMIC_OP(op, c_op, asm_op) \
45 static __inline__ void atomic_##op(int i, atomic_t * v) \
47 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
50 __asm__ __volatile__( \
51 " .set arch=r4000 \n" \
52 "1: ll %0, %1 # atomic_" #op " \n" \
53 " " #asm_op " %0, %2 \n" \
57 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
59 } else if (kernel_uses_llsc) { \
63 __asm__ __volatile__( \
64 " .set "MIPS_ISA_LEVEL" \n" \
65 " ll %0, %1 # atomic_" #op "\n" \
66 " " #asm_op " %0, %2 \n" \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
71 } while (unlikely(!temp)); \
73 unsigned long flags; \
75 raw_local_irq_save(flags); \
77 raw_local_irq_restore(flags); \
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82 static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
86 smp_mb__before_llsc(); \
88 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
91 __asm__ __volatile__( \
92 " .set arch=r4000 \n" \
93 "1: ll %1, %2 # atomic_" #op "_return \n" \
94 " " #asm_op " %0, %1, %3 \n" \
97 " " #asm_op " %0, %1, %3 \n" \
99 : "=&r" (result), "=&r" (temp), \
100 "+" GCC_OFF_SMALL_ASM() (v->counter) \
102 } else if (kernel_uses_llsc) { \
106 __asm__ __volatile__( \
107 " .set "MIPS_ISA_LEVEL" \n" \
108 " ll %1, %2 # atomic_" #op "_return \n" \
109 " " #asm_op " %0, %1, %3 \n" \
112 : "=&r" (result), "=&r" (temp), \
113 "+" GCC_OFF_SMALL_ASM() (v->counter) \
115 } while (unlikely(!result)); \
117 result = temp; result c_op i; \
119 unsigned long flags; \
121 raw_local_irq_save(flags); \
122 result = v->counter; \
124 v->counter = result; \
125 raw_local_irq_restore(flags); \
133 #define ATOMIC_OPS(op, c_op, asm_op) \
134 ATOMIC_OP(op, c_op, asm_op) \
135 ATOMIC_OP_RETURN(op, c_op, asm_op)
137 ATOMIC_OPS(add
, +=, addu
)
138 ATOMIC_OPS(sub
, -=, subu
)
140 ATOMIC_OP(and, &=, and)
141 ATOMIC_OP(or, |=, or)
142 ATOMIC_OP(xor, ^=, xor)
145 #undef ATOMIC_OP_RETURN
149 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
150 * @i: integer value to subtract
151 * @v: pointer of type atomic_t
153 * Atomically test @v and subtract @i if @v is greater or equal than @i.
154 * The function returns the old value of @v minus @i.
156 static __inline__
int atomic_sub_if_positive(int i
, atomic_t
* v
)
160 smp_mb__before_llsc();
162 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
165 __asm__
__volatile__(
166 " .set arch=r4000 \n"
167 "1: ll %1, %2 # atomic_sub_if_positive\n"
168 " subu %0, %1, %3 \n"
173 " subu %0, %1, %3 \n"
177 : "=&r" (result
), "=&r" (temp
),
178 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
179 : "Ir" (i
), GCC_OFF_SMALL_ASM() (v
->counter
)
181 } else if (kernel_uses_llsc
) {
184 __asm__
__volatile__(
185 " .set "MIPS_ISA_LEVEL
" \n"
186 "1: ll %1, %2 # atomic_sub_if_positive\n"
187 " subu %0, %1, %3 \n"
192 " subu %0, %1, %3 \n"
196 : "=&r" (result
), "=&r" (temp
),
197 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
202 raw_local_irq_save(flags
);
207 raw_local_irq_restore(flags
);
215 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
216 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
219 * __atomic_add_unless - add unless the number is a given value
220 * @v: pointer of type atomic_t
221 * @a: the amount to add to v...
222 * @u: ...unless v is equal to u.
224 * Atomically adds @a to @v, so long as it was not @u.
225 * Returns the old value of @v.
227 static __inline__
int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
232 if (unlikely(c
== (u
)))
234 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
235 if (likely(old
== c
))
242 #define atomic_dec_return(v) atomic_sub_return(1, (v))
243 #define atomic_inc_return(v) atomic_add_return(1, (v))
246 * atomic_sub_and_test - subtract value from variable and test result
247 * @i: integer value to subtract
248 * @v: pointer of type atomic_t
250 * Atomically subtracts @i from @v and returns
251 * true if the result is zero, or false for all
254 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
257 * atomic_inc_and_test - increment and test
258 * @v: pointer of type atomic_t
260 * Atomically increments @v by 1
261 * and returns true if the result is zero, or false for all
264 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
267 * atomic_dec_and_test - decrement by 1 and test
268 * @v: pointer of type atomic_t
270 * Atomically decrements @v by 1 and
271 * returns true if the result is 0, or false for all other
274 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
277 * atomic_dec_if_positive - decrement by 1 if old value positive
278 * @v: pointer of type atomic_t
280 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
283 * atomic_inc - increment atomic variable
284 * @v: pointer of type atomic_t
286 * Atomically increments @v by 1.
288 #define atomic_inc(v) atomic_add(1, (v))
291 * atomic_dec - decrement and test
292 * @v: pointer of type atomic_t
294 * Atomically decrements @v by 1.
296 #define atomic_dec(v) atomic_sub(1, (v))
299 * atomic_add_negative - add and test if negative
300 * @v: pointer of type atomic_t
301 * @i: integer value to add
303 * Atomically adds @i to @v and returns true
304 * if the result is negative, or false when
305 * result is greater than or equal to zero.
307 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
311 #define ATOMIC64_INIT(i) { (i) }
314 * atomic64_read - read atomic variable
315 * @v: pointer of type atomic64_t
318 #define atomic64_read(v) READ_ONCE((v)->counter)
321 * atomic64_set - set atomic variable
322 * @v: pointer of type atomic64_t
325 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
327 #define ATOMIC64_OP(op, c_op, asm_op) \
328 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
330 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
333 __asm__ __volatile__( \
334 " .set arch=r4000 \n" \
335 "1: lld %0, %1 # atomic64_" #op " \n" \
336 " " #asm_op " %0, %2 \n" \
340 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
342 } else if (kernel_uses_llsc) { \
346 __asm__ __volatile__( \
347 " .set "MIPS_ISA_LEVEL" \n" \
348 " lld %0, %1 # atomic64_" #op "\n" \
349 " " #asm_op " %0, %2 \n" \
352 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
354 } while (unlikely(!temp)); \
356 unsigned long flags; \
358 raw_local_irq_save(flags); \
360 raw_local_irq_restore(flags); \
364 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
365 static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
369 smp_mb__before_llsc(); \
371 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
374 __asm__ __volatile__( \
375 " .set arch=r4000 \n" \
376 "1: lld %1, %2 # atomic64_" #op "_return\n" \
377 " " #asm_op " %0, %1, %3 \n" \
380 " " #asm_op " %0, %1, %3 \n" \
382 : "=&r" (result), "=&r" (temp), \
383 "+" GCC_OFF_SMALL_ASM() (v->counter) \
385 } else if (kernel_uses_llsc) { \
389 __asm__ __volatile__( \
390 " .set "MIPS_ISA_LEVEL" \n" \
391 " lld %1, %2 # atomic64_" #op "_return\n" \
392 " " #asm_op " %0, %1, %3 \n" \
395 : "=&r" (result), "=&r" (temp), \
396 "=" GCC_OFF_SMALL_ASM() (v->counter) \
397 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
399 } while (unlikely(!result)); \
401 result = temp; result c_op i; \
403 unsigned long flags; \
405 raw_local_irq_save(flags); \
406 result = v->counter; \
408 v->counter = result; \
409 raw_local_irq_restore(flags); \
417 #define ATOMIC64_OPS(op, c_op, asm_op) \
418 ATOMIC64_OP(op, c_op, asm_op) \
419 ATOMIC64_OP_RETURN(op, c_op, asm_op)
421 ATOMIC64_OPS(add
, +=, daddu
)
422 ATOMIC64_OPS(sub
, -=, dsubu
)
423 ATOMIC64_OP(and, &=, and)
424 ATOMIC64_OP(or, |=, or)
425 ATOMIC64_OP(xor, ^=, xor)
428 #undef ATOMIC64_OP_RETURN
432 * atomic64_sub_if_positive - conditionally subtract integer from atomic
434 * @i: integer value to subtract
435 * @v: pointer of type atomic64_t
437 * Atomically test @v and subtract @i if @v is greater or equal than @i.
438 * The function returns the old value of @v minus @i.
440 static __inline__
long atomic64_sub_if_positive(long i
, atomic64_t
* v
)
444 smp_mb__before_llsc();
446 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
449 __asm__
__volatile__(
450 " .set arch=r4000 \n"
451 "1: lld %1, %2 # atomic64_sub_if_positive\n"
452 " dsubu %0, %1, %3 \n"
457 " dsubu %0, %1, %3 \n"
461 : "=&r" (result
), "=&r" (temp
),
462 "=" GCC_OFF_SMALL_ASM() (v
->counter
)
463 : "Ir" (i
), GCC_OFF_SMALL_ASM() (v
->counter
)
465 } else if (kernel_uses_llsc
) {
468 __asm__
__volatile__(
469 " .set "MIPS_ISA_LEVEL
" \n"
470 "1: lld %1, %2 # atomic64_sub_if_positive\n"
471 " dsubu %0, %1, %3 \n"
476 " dsubu %0, %1, %3 \n"
480 : "=&r" (result
), "=&r" (temp
),
481 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
486 raw_local_irq_save(flags
);
491 raw_local_irq_restore(flags
);
499 #define atomic64_cmpxchg(v, o, n) \
500 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
501 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
504 * atomic64_add_unless - add unless the number is a given value
505 * @v: pointer of type atomic64_t
506 * @a: the amount to add to v...
507 * @u: ...unless v is equal to u.
509 * Atomically adds @a to @v, so long as it was not @u.
510 * Returns true iff @v was not @u.
512 static __inline__
int atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
515 c
= atomic64_read(v
);
517 if (unlikely(c
== (u
)))
519 old
= atomic64_cmpxchg((v
), c
, c
+ (a
));
520 if (likely(old
== c
))
527 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
529 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
530 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
533 * atomic64_sub_and_test - subtract value from variable and test result
534 * @i: integer value to subtract
535 * @v: pointer of type atomic64_t
537 * Atomically subtracts @i from @v and returns
538 * true if the result is zero, or false for all
541 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
544 * atomic64_inc_and_test - increment and test
545 * @v: pointer of type atomic64_t
547 * Atomically increments @v by 1
548 * and returns true if the result is zero, or false for all
551 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
554 * atomic64_dec_and_test - decrement by 1 and test
555 * @v: pointer of type atomic64_t
557 * Atomically decrements @v by 1 and
558 * returns true if the result is 0, or false for all other
561 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
564 * atomic64_dec_if_positive - decrement by 1 if old value positive
565 * @v: pointer of type atomic64_t
567 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
570 * atomic64_inc - increment atomic variable
571 * @v: pointer of type atomic64_t
573 * Atomically increments @v by 1.
575 #define atomic64_inc(v) atomic64_add(1, (v))
578 * atomic64_dec - decrement and test
579 * @v: pointer of type atomic64_t
581 * Atomically decrements @v by 1.
583 #define atomic64_dec(v) atomic64_sub(1, (v))
586 * atomic64_add_negative - add and test if negative
587 * @v: pointer of type atomic64_t
588 * @i: integer value to add
590 * Atomically adds @i to @v and returns true
591 * if the result is negative, or false when
592 * result is greater than or equal to zero.
594 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
596 #endif /* CONFIG_64BIT */
598 #endif /* _ASM_ATOMIC_H */