]>
git.proxmox.com Git - mirror_ubuntu-disco-kernel.git/blob - arch/mips/include/asm/atomic.h
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
26 * Using a branch-likely instruction to check the result of an sc instruction
27 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
28 * cause ll-sc sequences to execute non-atomically.
31 # define __scbeqz "beqzl"
33 # define __scbeqz "beqz"
36 #define ATOMIC_INIT(i) { (i) }
39 * atomic_read - read atomic variable
40 * @v: pointer of type atomic_t
42 * Atomically reads the value of @v.
44 #define atomic_read(v) READ_ONCE((v)->counter)
47 * atomic_set - set atomic variable
48 * @v: pointer of type atomic_t
51 * Atomically sets the value of @v to @i.
53 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
55 #define ATOMIC_OP(op, c_op, asm_op) \
56 static __inline__ void atomic_##op(int i, atomic_t * v) \
58 if (kernel_uses_llsc) { \
61 __asm__ __volatile__( \
63 " .set "MIPS_ISA_LEVEL" \n" \
64 "1: ll %0, %1 # atomic_" #op " \n" \
65 " " #asm_op " %0, %2 \n" \
67 "\t" __scbeqz " %0, 1b \n" \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
72 unsigned long flags; \
74 raw_local_irq_save(flags); \
76 raw_local_irq_restore(flags); \
80 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
81 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
85 if (kernel_uses_llsc) { \
88 __asm__ __volatile__( \
90 " .set "MIPS_ISA_LEVEL" \n" \
91 "1: ll %1, %2 # atomic_" #op "_return \n" \
92 " " #asm_op " %0, %1, %3 \n" \
94 "\t" __scbeqz " %0, 1b \n" \
95 " " #asm_op " %0, %1, %3 \n" \
97 : "=&r" (result), "=&r" (temp), \
98 "+" GCC_OFF_SMALL_ASM() (v->counter) \
101 unsigned long flags; \
103 raw_local_irq_save(flags); \
104 result = v->counter; \
106 v->counter = result; \
107 raw_local_irq_restore(flags); \
113 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
114 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
118 if (kernel_uses_llsc) { \
121 __asm__ __volatile__( \
123 " .set "MIPS_ISA_LEVEL" \n" \
124 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
125 " " #asm_op " %0, %1, %3 \n" \
127 "\t" __scbeqz " %0, 1b \n" \
130 : "=&r" (result), "=&r" (temp), \
131 "+" GCC_OFF_SMALL_ASM() (v->counter) \
134 unsigned long flags; \
136 raw_local_irq_save(flags); \
137 result = v->counter; \
139 raw_local_irq_restore(flags); \
145 #define ATOMIC_OPS(op, c_op, asm_op) \
146 ATOMIC_OP(op, c_op, asm_op) \
147 ATOMIC_OP_RETURN(op, c_op, asm_op) \
148 ATOMIC_FETCH_OP(op, c_op, asm_op)
150 ATOMIC_OPS(add
, +=, addu
)
151 ATOMIC_OPS(sub
, -=, subu
)
153 #define atomic_add_return_relaxed atomic_add_return_relaxed
154 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
155 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
156 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
159 #define ATOMIC_OPS(op, c_op, asm_op) \
160 ATOMIC_OP(op, c_op, asm_op) \
161 ATOMIC_FETCH_OP(op, c_op, asm_op)
163 ATOMIC_OPS(and, &=, and)
164 ATOMIC_OPS(or, |=, or)
165 ATOMIC_OPS(xor, ^=, xor)
167 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
168 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
169 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
172 #undef ATOMIC_FETCH_OP
173 #undef ATOMIC_OP_RETURN
177 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
178 * @i: integer value to subtract
179 * @v: pointer of type atomic_t
181 * Atomically test @v and subtract @i if @v is greater or equal than @i.
182 * The function returns the old value of @v minus @i.
184 static __inline__
int atomic_sub_if_positive(int i
, atomic_t
* v
)
188 smp_mb__before_llsc();
190 if (kernel_uses_llsc
) {
193 __asm__
__volatile__(
195 " .set "MIPS_ISA_LEVEL
" \n"
196 "1: ll %1, %2 # atomic_sub_if_positive\n"
198 " subu %0, %1, %3 \n"
202 " .set "MIPS_ISA_LEVEL
" \n"
204 "\t" __scbeqz
" %1, 1b \n"
207 : "=&r" (result
), "=&r" (temp
),
208 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
213 raw_local_irq_save(flags
);
218 raw_local_irq_restore(flags
);
226 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
227 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
230 * atomic_dec_if_positive - decrement by 1 if old value positive
231 * @v: pointer of type atomic_t
233 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
237 #define ATOMIC64_INIT(i) { (i) }
240 * atomic64_read - read atomic variable
241 * @v: pointer of type atomic64_t
244 #define atomic64_read(v) READ_ONCE((v)->counter)
247 * atomic64_set - set atomic variable
248 * @v: pointer of type atomic64_t
251 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
253 #define ATOMIC64_OP(op, c_op, asm_op) \
254 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
256 if (kernel_uses_llsc) { \
259 __asm__ __volatile__( \
261 " .set "MIPS_ISA_LEVEL" \n" \
262 "1: lld %0, %1 # atomic64_" #op " \n" \
263 " " #asm_op " %0, %2 \n" \
265 "\t" __scbeqz " %0, 1b \n" \
267 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
270 unsigned long flags; \
272 raw_local_irq_save(flags); \
274 raw_local_irq_restore(flags); \
278 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
279 static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
283 if (kernel_uses_llsc) { \
286 __asm__ __volatile__( \
288 " .set "MIPS_ISA_LEVEL" \n" \
289 "1: lld %1, %2 # atomic64_" #op "_return\n" \
290 " " #asm_op " %0, %1, %3 \n" \
292 "\t" __scbeqz " %0, 1b \n" \
293 " " #asm_op " %0, %1, %3 \n" \
295 : "=&r" (result), "=&r" (temp), \
296 "+" GCC_OFF_SMALL_ASM() (v->counter) \
299 unsigned long flags; \
301 raw_local_irq_save(flags); \
302 result = v->counter; \
304 v->counter = result; \
305 raw_local_irq_restore(flags); \
311 #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
312 static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \
316 if (kernel_uses_llsc) { \
319 __asm__ __volatile__( \
321 " .set "MIPS_ISA_LEVEL" \n" \
322 "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
323 " " #asm_op " %0, %1, %3 \n" \
325 "\t" __scbeqz " %0, 1b \n" \
328 : "=&r" (result), "=&r" (temp), \
329 "+" GCC_OFF_SMALL_ASM() (v->counter) \
332 unsigned long flags; \
334 raw_local_irq_save(flags); \
335 result = v->counter; \
337 raw_local_irq_restore(flags); \
343 #define ATOMIC64_OPS(op, c_op, asm_op) \
344 ATOMIC64_OP(op, c_op, asm_op) \
345 ATOMIC64_OP_RETURN(op, c_op, asm_op) \
346 ATOMIC64_FETCH_OP(op, c_op, asm_op)
348 ATOMIC64_OPS(add
, +=, daddu
)
349 ATOMIC64_OPS(sub
, -=, dsubu
)
351 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
352 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
353 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
354 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
357 #define ATOMIC64_OPS(op, c_op, asm_op) \
358 ATOMIC64_OP(op, c_op, asm_op) \
359 ATOMIC64_FETCH_OP(op, c_op, asm_op)
361 ATOMIC64_OPS(and, &=, and)
362 ATOMIC64_OPS(or, |=, or)
363 ATOMIC64_OPS(xor, ^=, xor)
365 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
366 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
367 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
370 #undef ATOMIC64_FETCH_OP
371 #undef ATOMIC64_OP_RETURN
375 * atomic64_sub_if_positive - conditionally subtract integer from atomic
377 * @i: integer value to subtract
378 * @v: pointer of type atomic64_t
380 * Atomically test @v and subtract @i if @v is greater or equal than @i.
381 * The function returns the old value of @v minus @i.
383 static __inline__
long atomic64_sub_if_positive(long i
, atomic64_t
* v
)
387 smp_mb__before_llsc();
389 if (kernel_uses_llsc
) {
392 __asm__
__volatile__(
394 " .set "MIPS_ISA_LEVEL
" \n"
395 "1: lld %1, %2 # atomic64_sub_if_positive\n"
396 " dsubu %0, %1, %3 \n"
400 "\t" __scbeqz
" %1, 1b \n"
403 : "=&r" (result
), "=&r" (temp
),
404 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
409 raw_local_irq_save(flags
);
414 raw_local_irq_restore(flags
);
422 #define atomic64_cmpxchg(v, o, n) \
423 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
424 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
427 * atomic64_dec_if_positive - decrement by 1 if old value positive
428 * @v: pointer of type atomic64_t
430 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
432 #endif /* CONFIG_64BIT */
434 #endif /* _ASM_ATOMIC_H */