]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/mips/include/asm/atomic.h
MIPS: Whitespace cleanup.
[mirror_ubuntu-artful-kernel.git] / arch / mips / include / asm / atomic.h
CommitLineData
1da177e4 1/*
70342287 2 * Atomic operations that C can't guarantee us. Useful for
1da177e4
LT
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
e303e088 12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
1da177e4 13 */
1da177e4
LT
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
192ef366 17#include <linux/irqflags.h>
ea435467 18#include <linux/types.h>
0004a9df 19#include <asm/barrier.h>
1da177e4 20#include <asm/cpu-features.h>
b81947c6 21#include <asm/cmpxchg.h>
1da177e4
LT
22#include <asm/war.h>
23
70342287 24#define ATOMIC_INIT(i) { (i) }
1da177e4
LT
25
26/*
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
f3d46f9d 32#define atomic_read(v) (*(volatile int *)&(v)->counter)
1da177e4
LT
33
34/*
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
21a151d8 41#define atomic_set(v, i) ((v)->counter = (i))
1da177e4
LT
42
43/*
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
47 *
48 * Atomically adds @i to @v.
49 */
50static __inline__ void atomic_add(int i, atomic_t * v)
51{
b791d119 52 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 53 int temp;
1da177e4
LT
54
55 __asm__ __volatile__(
c4559f67 56 " .set mips3 \n"
1da177e4
LT
57 "1: ll %0, %1 # atomic_add \n"
58 " addu %0, %2 \n"
59 " sc %0, %1 \n"
60 " beqzl %0, 1b \n"
aac8aa77 61 " .set mips0 \n"
b4f2a17b
JK
62 : "=&r" (temp), "+m" (v->counter)
63 : "Ir" (i));
b791d119 64 } else if (kernel_uses_llsc) {
915ec1e2 65 int temp;
1da177e4 66
7837314d
RB
67 do {
68 __asm__ __volatile__(
69 " .set mips3 \n"
70 " ll %0, %1 # atomic_add \n"
71 " addu %0, %2 \n"
72 " sc %0, %1 \n"
73 " .set mips0 \n"
b4f2a17b
JK
74 : "=&r" (temp), "+m" (v->counter)
75 : "Ir" (i));
7837314d 76 } while (unlikely(!temp));
1da177e4
LT
77 } else {
78 unsigned long flags;
79
49edd098 80 raw_local_irq_save(flags);
1da177e4 81 v->counter += i;
49edd098 82 raw_local_irq_restore(flags);
1da177e4
LT
83 }
84}
85
86/*
87 * atomic_sub - subtract the atomic variable
88 * @i: integer value to subtract
89 * @v: pointer of type atomic_t
90 *
91 * Atomically subtracts @i from @v.
92 */
93static __inline__ void atomic_sub(int i, atomic_t * v)
94{
b791d119 95 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 96 int temp;
1da177e4
LT
97
98 __asm__ __volatile__(
c4559f67 99 " .set mips3 \n"
1da177e4
LT
100 "1: ll %0, %1 # atomic_sub \n"
101 " subu %0, %2 \n"
102 " sc %0, %1 \n"
103 " beqzl %0, 1b \n"
aac8aa77 104 " .set mips0 \n"
b4f2a17b
JK
105 : "=&r" (temp), "+m" (v->counter)
106 : "Ir" (i));
b791d119 107 } else if (kernel_uses_llsc) {
915ec1e2 108 int temp;
1da177e4 109
7837314d
RB
110 do {
111 __asm__ __volatile__(
112 " .set mips3 \n"
113 " ll %0, %1 # atomic_sub \n"
114 " subu %0, %2 \n"
115 " sc %0, %1 \n"
116 " .set mips0 \n"
b4f2a17b
JK
117 : "=&r" (temp), "+m" (v->counter)
118 : "Ir" (i));
7837314d 119 } while (unlikely(!temp));
1da177e4
LT
120 } else {
121 unsigned long flags;
122
49edd098 123 raw_local_irq_save(flags);
1da177e4 124 v->counter -= i;
49edd098 125 raw_local_irq_restore(flags);
1da177e4
LT
126 }
127}
128
129/*
130 * Same as above, but return the result value
131 */
132static __inline__ int atomic_add_return(int i, atomic_t * v)
133{
915ec1e2 134 int result;
1da177e4 135
f252ffd5 136 smp_mb__before_llsc();
0004a9df 137
b791d119 138 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 139 int temp;
1da177e4
LT
140
141 __asm__ __volatile__(
c4559f67 142 " .set mips3 \n"
1da177e4
LT
143 "1: ll %1, %2 # atomic_add_return \n"
144 " addu %0, %1, %3 \n"
145 " sc %0, %2 \n"
146 " beqzl %0, 1b \n"
147 " addu %0, %1, %3 \n"
aac8aa77 148 " .set mips0 \n"
b4f2a17b
JK
149 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
150 : "Ir" (i));
b791d119 151 } else if (kernel_uses_llsc) {
915ec1e2 152 int temp;
1da177e4 153
7837314d
RB
154 do {
155 __asm__ __volatile__(
156 " .set mips3 \n"
157 " ll %1, %2 # atomic_add_return \n"
158 " addu %0, %1, %3 \n"
159 " sc %0, %2 \n"
160 " .set mips0 \n"
b4f2a17b
JK
161 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
162 : "Ir" (i));
7837314d
RB
163 } while (unlikely(!result));
164
165 result = temp + i;
1da177e4
LT
166 } else {
167 unsigned long flags;
168
49edd098 169 raw_local_irq_save(flags);
1da177e4
LT
170 result = v->counter;
171 result += i;
172 v->counter = result;
49edd098 173 raw_local_irq_restore(flags);
1da177e4
LT
174 }
175
17099b11 176 smp_llsc_mb();
0004a9df 177
1da177e4
LT
178 return result;
179}
180
181static __inline__ int atomic_sub_return(int i, atomic_t * v)
182{
915ec1e2 183 int result;
1da177e4 184
f252ffd5 185 smp_mb__before_llsc();
0004a9df 186
b791d119 187 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 188 int temp;
1da177e4
LT
189
190 __asm__ __volatile__(
c4559f67 191 " .set mips3 \n"
1da177e4
LT
192 "1: ll %1, %2 # atomic_sub_return \n"
193 " subu %0, %1, %3 \n"
194 " sc %0, %2 \n"
195 " beqzl %0, 1b \n"
196 " subu %0, %1, %3 \n"
aac8aa77 197 " .set mips0 \n"
1da177e4
LT
198 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
199 : "Ir" (i), "m" (v->counter)
200 : "memory");
7837314d
RB
201
202 result = temp - i;
b791d119 203 } else if (kernel_uses_llsc) {
915ec1e2 204 int temp;
1da177e4 205
7837314d
RB
206 do {
207 __asm__ __volatile__(
208 " .set mips3 \n"
209 " ll %1, %2 # atomic_sub_return \n"
210 " subu %0, %1, %3 \n"
211 " sc %0, %2 \n"
212 " .set mips0 \n"
b4f2a17b
JK
213 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
214 : "Ir" (i));
7837314d
RB
215 } while (unlikely(!result));
216
217 result = temp - i;
1da177e4
LT
218 } else {
219 unsigned long flags;
220
49edd098 221 raw_local_irq_save(flags);
1da177e4
LT
222 result = v->counter;
223 result -= i;
224 v->counter = result;
49edd098 225 raw_local_irq_restore(flags);
1da177e4
LT
226 }
227
17099b11 228 smp_llsc_mb();
0004a9df 229
1da177e4
LT
230 return result;
231}
232
233/*
f10d14dd
AG
234 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
235 * @i: integer value to subtract
1da177e4
LT
236 * @v: pointer of type atomic_t
237 *
f10d14dd
AG
238 * Atomically test @v and subtract @i if @v is greater or equal than @i.
239 * The function returns the old value of @v minus @i.
1da177e4
LT
240 */
241static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
242{
915ec1e2 243 int result;
1da177e4 244
f252ffd5 245 smp_mb__before_llsc();
0004a9df 246
b791d119 247 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 248 int temp;
1da177e4
LT
249
250 __asm__ __volatile__(
c4559f67 251 " .set mips3 \n"
1da177e4
LT
252 "1: ll %1, %2 # atomic_sub_if_positive\n"
253 " subu %0, %1, %3 \n"
254 " bltz %0, 1f \n"
255 " sc %0, %2 \n"
92f22c18 256 " .set noreorder \n"
1da177e4 257 " beqzl %0, 1b \n"
92f22c18
RB
258 " subu %0, %1, %3 \n"
259 " .set reorder \n"
1da177e4 260 "1: \n"
aac8aa77 261 " .set mips0 \n"
b4f2a17b 262 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
1da177e4
LT
263 : "Ir" (i), "m" (v->counter)
264 : "memory");
b791d119 265 } else if (kernel_uses_llsc) {
915ec1e2 266 int temp;
1da177e4
LT
267
268 __asm__ __volatile__(
c4559f67 269 " .set mips3 \n"
1da177e4
LT
270 "1: ll %1, %2 # atomic_sub_if_positive\n"
271 " subu %0, %1, %3 \n"
272 " bltz %0, 1f \n"
273 " sc %0, %2 \n"
92f22c18 274 " .set noreorder \n"
7837314d 275 " beqz %0, 1b \n"
92f22c18
RB
276 " subu %0, %1, %3 \n"
277 " .set reorder \n"
50952026 278 "1: \n"
aac8aa77 279 " .set mips0 \n"
b4f2a17b
JK
280 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
281 : "Ir" (i));
1da177e4
LT
282 } else {
283 unsigned long flags;
284
49edd098 285 raw_local_irq_save(flags);
1da177e4
LT
286 result = v->counter;
287 result -= i;
288 if (result >= 0)
289 v->counter = result;
49edd098 290 raw_local_irq_restore(flags);
1da177e4
LT
291 }
292
17099b11 293 smp_llsc_mb();
0004a9df 294
1da177e4
LT
295 return result;
296}
297
e12f644b
MD
298#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
299#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
4a6dae6d 300
8426e1f6 301/**
f24219b4 302 * __atomic_add_unless - add unless the number is a given value
8426e1f6
NP
303 * @v: pointer of type atomic_t
304 * @a: the amount to add to v...
305 * @u: ...unless v is equal to u.
306 *
307 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 308 * Returns the old value of @v.
8426e1f6 309 */
f24219b4 310static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
2856f5e3
MD
311{
312 int c, old;
313 c = atomic_read(v);
314 for (;;) {
315 if (unlikely(c == (u)))
316 break;
317 old = atomic_cmpxchg((v), c, c + (a));
318 if (likely(old == c))
319 break;
320 c = old;
321 }
f24219b4 322 return c;
2856f5e3 323}
8426e1f6 324
21a151d8
RB
325#define atomic_dec_return(v) atomic_sub_return(1, (v))
326#define atomic_inc_return(v) atomic_add_return(1, (v))
1da177e4
LT
327
328/*
329 * atomic_sub_and_test - subtract value from variable and test result
330 * @i: integer value to subtract
331 * @v: pointer of type atomic_t
332 *
333 * Atomically subtracts @i from @v and returns
334 * true if the result is zero, or false for all
335 * other cases.
336 */
21a151d8 337#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
1da177e4
LT
338
339/*
340 * atomic_inc_and_test - increment and test
341 * @v: pointer of type atomic_t
342 *
343 * Atomically increments @v by 1
344 * and returns true if the result is zero, or false for all
345 * other cases.
346 */
347#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
348
349/*
350 * atomic_dec_and_test - decrement by 1 and test
351 * @v: pointer of type atomic_t
352 *
353 * Atomically decrements @v by 1 and
354 * returns true if the result is 0, or false for all other
355 * cases.
356 */
357#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
358
359/*
360 * atomic_dec_if_positive - decrement by 1 if old value positive
361 * @v: pointer of type atomic_t
362 */
363#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
364
365/*
366 * atomic_inc - increment atomic variable
367 * @v: pointer of type atomic_t
368 *
369 * Atomically increments @v by 1.
370 */
21a151d8 371#define atomic_inc(v) atomic_add(1, (v))
1da177e4
LT
372
373/*
374 * atomic_dec - decrement and test
375 * @v: pointer of type atomic_t
376 *
377 * Atomically decrements @v by 1.
378 */
21a151d8 379#define atomic_dec(v) atomic_sub(1, (v))
1da177e4
LT
380
381/*
382 * atomic_add_negative - add and test if negative
383 * @v: pointer of type atomic_t
384 * @i: integer value to add
385 *
386 * Atomically adds @i to @v and returns true
387 * if the result is negative, or false when
388 * result is greater than or equal to zero.
389 */
21a151d8 390#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
1da177e4 391
875d43e7 392#ifdef CONFIG_64BIT
1da177e4 393
1da177e4
LT
394#define ATOMIC64_INIT(i) { (i) }
395
396/*
397 * atomic64_read - read atomic variable
398 * @v: pointer of type atomic64_t
399 *
400 */
f3d46f9d 401#define atomic64_read(v) (*(volatile long *)&(v)->counter)
1da177e4
LT
402
403/*
404 * atomic64_set - set atomic variable
405 * @v: pointer of type atomic64_t
406 * @i: required value
407 */
21a151d8 408#define atomic64_set(v, i) ((v)->counter = (i))
1da177e4
LT
409
410/*
411 * atomic64_add - add integer to atomic variable
412 * @i: integer value to add
413 * @v: pointer of type atomic64_t
414 *
415 * Atomically adds @i to @v.
416 */
417static __inline__ void atomic64_add(long i, atomic64_t * v)
418{
b791d119 419 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 420 long temp;
1da177e4
LT
421
422 __asm__ __volatile__(
aac8aa77 423 " .set mips3 \n"
1da177e4 424 "1: lld %0, %1 # atomic64_add \n"
f2a68272 425 " daddu %0, %2 \n"
1da177e4
LT
426 " scd %0, %1 \n"
427 " beqzl %0, 1b \n"
aac8aa77 428 " .set mips0 \n"
b4f2a17b
JK
429 : "=&r" (temp), "+m" (v->counter)
430 : "Ir" (i));
b791d119 431 } else if (kernel_uses_llsc) {
915ec1e2 432 long temp;
1da177e4 433
7837314d
RB
434 do {
435 __asm__ __volatile__(
436 " .set mips3 \n"
437 " lld %0, %1 # atomic64_add \n"
438 " daddu %0, %2 \n"
439 " scd %0, %1 \n"
440 " .set mips0 \n"
b4f2a17b
JK
441 : "=&r" (temp), "+m" (v->counter)
442 : "Ir" (i));
7837314d 443 } while (unlikely(!temp));
1da177e4
LT
444 } else {
445 unsigned long flags;
446
49edd098 447 raw_local_irq_save(flags);
1da177e4 448 v->counter += i;
49edd098 449 raw_local_irq_restore(flags);
1da177e4
LT
450 }
451}
452
453/*
454 * atomic64_sub - subtract the atomic variable
455 * @i: integer value to subtract
456 * @v: pointer of type atomic64_t
457 *
458 * Atomically subtracts @i from @v.
459 */
460static __inline__ void atomic64_sub(long i, atomic64_t * v)
461{
b791d119 462 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 463 long temp;
1da177e4
LT
464
465 __asm__ __volatile__(
aac8aa77 466 " .set mips3 \n"
1da177e4 467 "1: lld %0, %1 # atomic64_sub \n"
f2a68272 468 " dsubu %0, %2 \n"
1da177e4
LT
469 " scd %0, %1 \n"
470 " beqzl %0, 1b \n"
aac8aa77 471 " .set mips0 \n"
b4f2a17b
JK
472 : "=&r" (temp), "+m" (v->counter)
473 : "Ir" (i));
b791d119 474 } else if (kernel_uses_llsc) {
915ec1e2 475 long temp;
1da177e4 476
7837314d
RB
477 do {
478 __asm__ __volatile__(
479 " .set mips3 \n"
480 " lld %0, %1 # atomic64_sub \n"
481 " dsubu %0, %2 \n"
482 " scd %0, %1 \n"
483 " .set mips0 \n"
b4f2a17b
JK
484 : "=&r" (temp), "+m" (v->counter)
485 : "Ir" (i));
7837314d 486 } while (unlikely(!temp));
1da177e4
LT
487 } else {
488 unsigned long flags;
489
49edd098 490 raw_local_irq_save(flags);
1da177e4 491 v->counter -= i;
49edd098 492 raw_local_irq_restore(flags);
1da177e4
LT
493 }
494}
495
496/*
497 * Same as above, but return the result value
498 */
499static __inline__ long atomic64_add_return(long i, atomic64_t * v)
500{
915ec1e2 501 long result;
1da177e4 502
f252ffd5 503 smp_mb__before_llsc();
0004a9df 504
b791d119 505 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 506 long temp;
1da177e4
LT
507
508 __asm__ __volatile__(
aac8aa77 509 " .set mips3 \n"
1da177e4 510 "1: lld %1, %2 # atomic64_add_return \n"
f2a68272 511 " daddu %0, %1, %3 \n"
1da177e4
LT
512 " scd %0, %2 \n"
513 " beqzl %0, 1b \n"
f2a68272 514 " daddu %0, %1, %3 \n"
aac8aa77 515 " .set mips0 \n"
b4f2a17b
JK
516 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
517 : "Ir" (i));
b791d119 518 } else if (kernel_uses_llsc) {
915ec1e2 519 long temp;
1da177e4 520
7837314d
RB
521 do {
522 __asm__ __volatile__(
523 " .set mips3 \n"
524 " lld %1, %2 # atomic64_add_return \n"
525 " daddu %0, %1, %3 \n"
526 " scd %0, %2 \n"
527 " .set mips0 \n"
528 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
529 : "Ir" (i), "m" (v->counter)
530 : "memory");
531 } while (unlikely(!result));
532
533 result = temp + i;
1da177e4
LT
534 } else {
535 unsigned long flags;
536
49edd098 537 raw_local_irq_save(flags);
1da177e4
LT
538 result = v->counter;
539 result += i;
540 v->counter = result;
49edd098 541 raw_local_irq_restore(flags);
1da177e4
LT
542 }
543
17099b11 544 smp_llsc_mb();
0004a9df 545
1da177e4
LT
546 return result;
547}
548
549static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
550{
915ec1e2 551 long result;
1da177e4 552
f252ffd5 553 smp_mb__before_llsc();
0004a9df 554
b791d119 555 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 556 long temp;
1da177e4
LT
557
558 __asm__ __volatile__(
aac8aa77 559 " .set mips3 \n"
1da177e4 560 "1: lld %1, %2 # atomic64_sub_return \n"
f2a68272 561 " dsubu %0, %1, %3 \n"
1da177e4
LT
562 " scd %0, %2 \n"
563 " beqzl %0, 1b \n"
f2a68272 564 " dsubu %0, %1, %3 \n"
aac8aa77 565 " .set mips0 \n"
1da177e4
LT
566 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
567 : "Ir" (i), "m" (v->counter)
568 : "memory");
b791d119 569 } else if (kernel_uses_llsc) {
915ec1e2 570 long temp;
1da177e4 571
7837314d
RB
572 do {
573 __asm__ __volatile__(
574 " .set mips3 \n"
575 " lld %1, %2 # atomic64_sub_return \n"
576 " dsubu %0, %1, %3 \n"
577 " scd %0, %2 \n"
578 " .set mips0 \n"
579 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
580 : "Ir" (i), "m" (v->counter)
581 : "memory");
582 } while (unlikely(!result));
583
584 result = temp - i;
1da177e4
LT
585 } else {
586 unsigned long flags;
587
49edd098 588 raw_local_irq_save(flags);
1da177e4
LT
589 result = v->counter;
590 result -= i;
591 v->counter = result;
49edd098 592 raw_local_irq_restore(flags);
1da177e4
LT
593 }
594
17099b11 595 smp_llsc_mb();
0004a9df 596
1da177e4
LT
597 return result;
598}
599
600/*
f10d14dd
AG
601 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
602 * @i: integer value to subtract
1da177e4
LT
603 * @v: pointer of type atomic64_t
604 *
f10d14dd
AG
605 * Atomically test @v and subtract @i if @v is greater or equal than @i.
606 * The function returns the old value of @v minus @i.
1da177e4
LT
607 */
608static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
609{
915ec1e2 610 long result;
1da177e4 611
f252ffd5 612 smp_mb__before_llsc();
0004a9df 613
b791d119 614 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 615 long temp;
1da177e4
LT
616
617 __asm__ __volatile__(
aac8aa77 618 " .set mips3 \n"
1da177e4
LT
619 "1: lld %1, %2 # atomic64_sub_if_positive\n"
620 " dsubu %0, %1, %3 \n"
621 " bltz %0, 1f \n"
622 " scd %0, %2 \n"
92f22c18 623 " .set noreorder \n"
1da177e4 624 " beqzl %0, 1b \n"
92f22c18
RB
625 " dsubu %0, %1, %3 \n"
626 " .set reorder \n"
1da177e4 627 "1: \n"
aac8aa77 628 " .set mips0 \n"
1da177e4
LT
629 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
630 : "Ir" (i), "m" (v->counter)
631 : "memory");
b791d119 632 } else if (kernel_uses_llsc) {
915ec1e2 633 long temp;
1da177e4
LT
634
635 __asm__ __volatile__(
aac8aa77 636 " .set mips3 \n"
1da177e4
LT
637 "1: lld %1, %2 # atomic64_sub_if_positive\n"
638 " dsubu %0, %1, %3 \n"
639 " bltz %0, 1f \n"
640 " scd %0, %2 \n"
92f22c18 641 " .set noreorder \n"
7837314d 642 " beqz %0, 1b \n"
92f22c18
RB
643 " dsubu %0, %1, %3 \n"
644 " .set reorder \n"
50952026 645 "1: \n"
aac8aa77 646 " .set mips0 \n"
b4f2a17b
JK
647 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
648 : "Ir" (i));
1da177e4
LT
649 } else {
650 unsigned long flags;
651
49edd098 652 raw_local_irq_save(flags);
1da177e4
LT
653 result = v->counter;
654 result -= i;
655 if (result >= 0)
656 v->counter = result;
49edd098 657 raw_local_irq_restore(flags);
1da177e4
LT
658 }
659
17099b11 660 smp_llsc_mb();
0004a9df 661
1da177e4
LT
662 return result;
663}
664
e12f644b 665#define atomic64_cmpxchg(v, o, n) \
7b239bb1 666 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
e12f644b
MD
667#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
668
669/**
670 * atomic64_add_unless - add unless the number is a given value
671 * @v: pointer of type atomic64_t
672 * @a: the amount to add to v...
673 * @u: ...unless v is equal to u.
674 *
675 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 676 * Returns the old value of @v.
e12f644b 677 */
2856f5e3
MD
678static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
679{
680 long c, old;
681 c = atomic64_read(v);
682 for (;;) {
683 if (unlikely(c == (u)))
684 break;
685 old = atomic64_cmpxchg((v), c, c + (a));
686 if (likely(old == c))
687 break;
688 c = old;
689 }
690 return c != (u);
691}
692
e12f644b
MD
693#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
694
21a151d8
RB
695#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
696#define atomic64_inc_return(v) atomic64_add_return(1, (v))
1da177e4
LT
697
698/*
699 * atomic64_sub_and_test - subtract value from variable and test result
700 * @i: integer value to subtract
701 * @v: pointer of type atomic64_t
702 *
703 * Atomically subtracts @i from @v and returns
704 * true if the result is zero, or false for all
705 * other cases.
706 */
21a151d8 707#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
1da177e4
LT
708
709/*
710 * atomic64_inc_and_test - increment and test
711 * @v: pointer of type atomic64_t
712 *
713 * Atomically increments @v by 1
714 * and returns true if the result is zero, or false for all
715 * other cases.
716 */
717#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
718
719/*
720 * atomic64_dec_and_test - decrement by 1 and test
721 * @v: pointer of type atomic64_t
722 *
723 * Atomically decrements @v by 1 and
724 * returns true if the result is 0, or false for all other
725 * cases.
726 */
727#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
728
729/*
730 * atomic64_dec_if_positive - decrement by 1 if old value positive
731 * @v: pointer of type atomic64_t
732 */
733#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
734
735/*
736 * atomic64_inc - increment atomic variable
737 * @v: pointer of type atomic64_t
738 *
739 * Atomically increments @v by 1.
740 */
21a151d8 741#define atomic64_inc(v) atomic64_add(1, (v))
1da177e4
LT
742
743/*
744 * atomic64_dec - decrement and test
745 * @v: pointer of type atomic64_t
746 *
747 * Atomically decrements @v by 1.
748 */
21a151d8 749#define atomic64_dec(v) atomic64_sub(1, (v))
1da177e4
LT
750
751/*
752 * atomic64_add_negative - add and test if negative
753 * @v: pointer of type atomic64_t
754 * @i: integer value to add
755 *
756 * Atomically adds @i to @v and returns true
757 * if the result is negative, or false when
758 * result is greater than or equal to zero.
759 */
21a151d8 760#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
1da177e4 761
875d43e7 762#endif /* CONFIG_64BIT */
1da177e4
LT
763
764/*
765 * atomic*_return operations are serializing but not the non-*_return
766 * versions.
767 */
f252ffd5 768#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
17099b11 769#define smp_mb__after_atomic_dec() smp_llsc_mb()
f252ffd5 770#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
17099b11 771#define smp_mb__after_atomic_inc() smp_llsc_mb()
1da177e4
LT
772
773#endif /* _ASM_ATOMIC_H */