]>
git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/mips/include/asm/atomic.h
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
21 #include <asm/cmpxchg.h>
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) (*(volatile int *)&(v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v, i) ((v)->counter = (i))
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
48 * Atomically adds @i to @v.
50 static __inline__
void atomic_add(int i
, atomic_t
* v
)
52 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
57 "1: ll %0, %1 # atomic_add \n"
62 : "=&r" (temp
), "+m" (v
->counter
)
64 } else if (kernel_uses_llsc
) {
70 " ll %0, %1 # atomic_add \n"
74 : "=&r" (temp
), "+m" (v
->counter
)
76 } while (unlikely(!temp
));
80 raw_local_irq_save(flags
);
82 raw_local_irq_restore(flags
);
87 * atomic_sub - subtract the atomic variable
88 * @i: integer value to subtract
89 * @v: pointer of type atomic_t
91 * Atomically subtracts @i from @v.
93 static __inline__
void atomic_sub(int i
, atomic_t
* v
)
95 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
100 "1: ll %0, %1 # atomic_sub \n"
105 : "=&r" (temp
), "+m" (v
->counter
)
107 } else if (kernel_uses_llsc
) {
111 __asm__
__volatile__(
113 " ll %0, %1 # atomic_sub \n"
117 : "=&r" (temp
), "+m" (v
->counter
)
119 } while (unlikely(!temp
));
123 raw_local_irq_save(flags
);
125 raw_local_irq_restore(flags
);
130 * Same as above, but return the result value
132 static __inline__
int atomic_add_return(int i
, atomic_t
* v
)
136 smp_mb__before_llsc();
138 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
141 __asm__
__volatile__(
143 "1: ll %1, %2 # atomic_add_return \n"
144 " addu %0, %1, %3 \n"
147 " addu %0, %1, %3 \n"
149 : "=&r" (result
), "=&r" (temp
), "+m" (v
->counter
)
151 } else if (kernel_uses_llsc
) {
155 __asm__
__volatile__(
157 " ll %1, %2 # atomic_add_return \n"
158 " addu %0, %1, %3 \n"
161 : "=&r" (result
), "=&r" (temp
), "+m" (v
->counter
)
163 } while (unlikely(!result
));
169 raw_local_irq_save(flags
);
173 raw_local_irq_restore(flags
);
181 static __inline__
int atomic_sub_return(int i
, atomic_t
* v
)
185 smp_mb__before_llsc();
187 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
190 __asm__
__volatile__(
192 "1: ll %1, %2 # atomic_sub_return \n"
193 " subu %0, %1, %3 \n"
196 " subu %0, %1, %3 \n"
198 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
199 : "Ir" (i
), "m" (v
->counter
)
203 } else if (kernel_uses_llsc
) {
207 __asm__
__volatile__(
209 " ll %1, %2 # atomic_sub_return \n"
210 " subu %0, %1, %3 \n"
213 : "=&r" (result
), "=&r" (temp
), "+m" (v
->counter
)
215 } while (unlikely(!result
));
221 raw_local_irq_save(flags
);
225 raw_local_irq_restore(flags
);
234 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
235 * @i: integer value to subtract
236 * @v: pointer of type atomic_t
238 * Atomically test @v and subtract @i if @v is greater or equal than @i.
239 * The function returns the old value of @v minus @i.
241 static __inline__
int atomic_sub_if_positive(int i
, atomic_t
* v
)
245 smp_mb__before_llsc();
247 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
250 __asm__
__volatile__(
252 "1: ll %1, %2 # atomic_sub_if_positive\n"
253 " subu %0, %1, %3 \n"
258 " subu %0, %1, %3 \n"
262 : "=&r" (result
), "=&r" (temp
), "+m" (v
->counter
)
263 : "Ir" (i
), "m" (v
->counter
)
265 } else if (kernel_uses_llsc
) {
268 __asm__
__volatile__(
270 "1: ll %1, %2 # atomic_sub_if_positive\n"
271 " subu %0, %1, %3 \n"
276 " subu %0, %1, %3 \n"
280 : "=&r" (result
), "=&r" (temp
), "+m" (v
->counter
)
285 raw_local_irq_save(flags
);
290 raw_local_irq_restore(flags
);
298 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
299 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
302 * __atomic_add_unless - add unless the number is a given value
303 * @v: pointer of type atomic_t
304 * @a: the amount to add to v...
305 * @u: ...unless v is equal to u.
307 * Atomically adds @a to @v, so long as it was not @u.
308 * Returns the old value of @v.
310 static __inline__
int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
315 if (unlikely(c
== (u
)))
317 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
318 if (likely(old
== c
))
325 #define atomic_dec_return(v) atomic_sub_return(1, (v))
326 #define atomic_inc_return(v) atomic_add_return(1, (v))
329 * atomic_sub_and_test - subtract value from variable and test result
330 * @i: integer value to subtract
331 * @v: pointer of type atomic_t
333 * Atomically subtracts @i from @v and returns
334 * true if the result is zero, or false for all
337 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
340 * atomic_inc_and_test - increment and test
341 * @v: pointer of type atomic_t
343 * Atomically increments @v by 1
344 * and returns true if the result is zero, or false for all
347 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
350 * atomic_dec_and_test - decrement by 1 and test
351 * @v: pointer of type atomic_t
353 * Atomically decrements @v by 1 and
354 * returns true if the result is 0, or false for all other
357 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
360 * atomic_dec_if_positive - decrement by 1 if old value positive
361 * @v: pointer of type atomic_t
363 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
366 * atomic_inc - increment atomic variable
367 * @v: pointer of type atomic_t
369 * Atomically increments @v by 1.
371 #define atomic_inc(v) atomic_add(1, (v))
374 * atomic_dec - decrement and test
375 * @v: pointer of type atomic_t
377 * Atomically decrements @v by 1.
379 #define atomic_dec(v) atomic_sub(1, (v))
382 * atomic_add_negative - add and test if negative
383 * @v: pointer of type atomic_t
384 * @i: integer value to add
386 * Atomically adds @i to @v and returns true
387 * if the result is negative, or false when
388 * result is greater than or equal to zero.
390 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
394 #define ATOMIC64_INIT(i) { (i) }
397 * atomic64_read - read atomic variable
398 * @v: pointer of type atomic64_t
401 #define atomic64_read(v) (*(volatile long *)&(v)->counter)
404 * atomic64_set - set atomic variable
405 * @v: pointer of type atomic64_t
408 #define atomic64_set(v, i) ((v)->counter = (i))
411 * atomic64_add - add integer to atomic variable
412 * @i: integer value to add
413 * @v: pointer of type atomic64_t
415 * Atomically adds @i to @v.
417 static __inline__
void atomic64_add(long i
, atomic64_t
* v
)
419 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
422 __asm__
__volatile__(
424 "1: lld %0, %1 # atomic64_add \n"
429 : "=&r" (temp
), "+m" (v
->counter
)
431 } else if (kernel_uses_llsc
) {
435 __asm__
__volatile__(
437 " lld %0, %1 # atomic64_add \n"
441 : "=&r" (temp
), "+m" (v
->counter
)
443 } while (unlikely(!temp
));
447 raw_local_irq_save(flags
);
449 raw_local_irq_restore(flags
);
454 * atomic64_sub - subtract the atomic variable
455 * @i: integer value to subtract
456 * @v: pointer of type atomic64_t
458 * Atomically subtracts @i from @v.
460 static __inline__
void atomic64_sub(long i
, atomic64_t
* v
)
462 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
465 __asm__
__volatile__(
467 "1: lld %0, %1 # atomic64_sub \n"
472 : "=&r" (temp
), "+m" (v
->counter
)
474 } else if (kernel_uses_llsc
) {
478 __asm__
__volatile__(
480 " lld %0, %1 # atomic64_sub \n"
484 : "=&r" (temp
), "+m" (v
->counter
)
486 } while (unlikely(!temp
));
490 raw_local_irq_save(flags
);
492 raw_local_irq_restore(flags
);
497 * Same as above, but return the result value
499 static __inline__
long atomic64_add_return(long i
, atomic64_t
* v
)
503 smp_mb__before_llsc();
505 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
508 __asm__
__volatile__(
510 "1: lld %1, %2 # atomic64_add_return \n"
511 " daddu %0, %1, %3 \n"
514 " daddu %0, %1, %3 \n"
516 : "=&r" (result
), "=&r" (temp
), "+m" (v
->counter
)
518 } else if (kernel_uses_llsc
) {
522 __asm__
__volatile__(
524 " lld %1, %2 # atomic64_add_return \n"
525 " daddu %0, %1, %3 \n"
528 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
529 : "Ir" (i
), "m" (v
->counter
)
531 } while (unlikely(!result
));
537 raw_local_irq_save(flags
);
541 raw_local_irq_restore(flags
);
549 static __inline__
long atomic64_sub_return(long i
, atomic64_t
* v
)
553 smp_mb__before_llsc();
555 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
558 __asm__
__volatile__(
560 "1: lld %1, %2 # atomic64_sub_return \n"
561 " dsubu %0, %1, %3 \n"
564 " dsubu %0, %1, %3 \n"
566 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
567 : "Ir" (i
), "m" (v
->counter
)
569 } else if (kernel_uses_llsc
) {
573 __asm__
__volatile__(
575 " lld %1, %2 # atomic64_sub_return \n"
576 " dsubu %0, %1, %3 \n"
579 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
580 : "Ir" (i
), "m" (v
->counter
)
582 } while (unlikely(!result
));
588 raw_local_irq_save(flags
);
592 raw_local_irq_restore(flags
);
601 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
602 * @i: integer value to subtract
603 * @v: pointer of type atomic64_t
605 * Atomically test @v and subtract @i if @v is greater or equal than @i.
606 * The function returns the old value of @v minus @i.
608 static __inline__
long atomic64_sub_if_positive(long i
, atomic64_t
* v
)
612 smp_mb__before_llsc();
614 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
617 __asm__
__volatile__(
619 "1: lld %1, %2 # atomic64_sub_if_positive\n"
620 " dsubu %0, %1, %3 \n"
625 " dsubu %0, %1, %3 \n"
629 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
630 : "Ir" (i
), "m" (v
->counter
)
632 } else if (kernel_uses_llsc
) {
635 __asm__
__volatile__(
637 "1: lld %1, %2 # atomic64_sub_if_positive\n"
638 " dsubu %0, %1, %3 \n"
643 " dsubu %0, %1, %3 \n"
647 : "=&r" (result
), "=&r" (temp
), "+m" (v
->counter
)
652 raw_local_irq_save(flags
);
657 raw_local_irq_restore(flags
);
665 #define atomic64_cmpxchg(v, o, n) \
666 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
667 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
670 * atomic64_add_unless - add unless the number is a given value
671 * @v: pointer of type atomic64_t
672 * @a: the amount to add to v...
673 * @u: ...unless v is equal to u.
675 * Atomically adds @a to @v, so long as it was not @u.
676 * Returns the old value of @v.
678 static __inline__
int atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
681 c
= atomic64_read(v
);
683 if (unlikely(c
== (u
)))
685 old
= atomic64_cmpxchg((v
), c
, c
+ (a
));
686 if (likely(old
== c
))
693 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
695 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
696 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
699 * atomic64_sub_and_test - subtract value from variable and test result
700 * @i: integer value to subtract
701 * @v: pointer of type atomic64_t
703 * Atomically subtracts @i from @v and returns
704 * true if the result is zero, or false for all
707 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
710 * atomic64_inc_and_test - increment and test
711 * @v: pointer of type atomic64_t
713 * Atomically increments @v by 1
714 * and returns true if the result is zero, or false for all
717 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
720 * atomic64_dec_and_test - decrement by 1 and test
721 * @v: pointer of type atomic64_t
723 * Atomically decrements @v by 1 and
724 * returns true if the result is 0, or false for all other
727 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
730 * atomic64_dec_if_positive - decrement by 1 if old value positive
731 * @v: pointer of type atomic64_t
733 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
736 * atomic64_inc - increment atomic variable
737 * @v: pointer of type atomic64_t
739 * Atomically increments @v by 1.
741 #define atomic64_inc(v) atomic64_add(1, (v))
744 * atomic64_dec - decrement and test
745 * @v: pointer of type atomic64_t
747 * Atomically decrements @v by 1.
749 #define atomic64_dec(v) atomic64_sub(1, (v))
752 * atomic64_add_negative - add and test if negative
753 * @v: pointer of type atomic64_t
754 * @i: integer value to add
756 * Atomically adds @i to @v and returns true
757 * if the result is negative, or false when
758 * result is greater than or equal to zero.
760 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
762 #endif /* CONFIG_64BIT */
765 * atomic*_return operations are serializing but not the non-*_return
768 #define smp_mb__before_atomic_dec() smp_mb__before_llsc()
769 #define smp_mb__after_atomic_dec() smp_llsc_mb()
770 #define smp_mb__before_atomic_inc() smp_mb__before_llsc()
771 #define smp_mb__after_atomic_inc() smp_llsc_mb()
773 #endif /* _ASM_ATOMIC_H */