]>
Commit | Line | Data |
---|---|---|
feaf7cf1 BB |
1 | #ifndef _ASM_POWERPC_ATOMIC_H_ |
2 | #define _ASM_POWERPC_ATOMIC_H_ | |
3 | ||
1da177e4 LT |
4 | /* |
5 | * PowerPC atomic operations | |
6 | */ | |
7 | ||
ea435467 | 8 | #include <linux/types.h> |
1da177e4 LT |
9 | |
10 | #ifdef __KERNEL__ | |
f055affb | 11 | #include <linux/compiler.h> |
feaf7cf1 | 12 | #include <asm/synch.h> |
3ddfbcf1 | 13 | #include <asm/asm-compat.h> |
2856f5e3 | 14 | #include <asm/system.h> |
1da177e4 | 15 | |
feaf7cf1 | 16 | #define ATOMIC_INIT(i) { (i) } |
1da177e4 | 17 | |
9f0cbea0 SB |
18 | static __inline__ int atomic_read(const atomic_t *v) |
19 | { | |
20 | int t; | |
21 | ||
22 | __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | |
23 | ||
24 | return t; | |
25 | } | |
26 | ||
27 | static __inline__ void atomic_set(atomic_t *v, int i) | |
28 | { | |
29 | __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | |
30 | } | |
1da177e4 | 31 | |
1da177e4 LT |
32 | static __inline__ void atomic_add(int a, atomic_t *v) |
33 | { | |
34 | int t; | |
35 | ||
36 | __asm__ __volatile__( | |
37 | "1: lwarx %0,0,%3 # atomic_add\n\ | |
38 | add %0,%2,%0\n" | |
39 | PPC405_ERR77(0,%3) | |
40 | " stwcx. %0,0,%3 \n\ | |
41 | bne- 1b" | |
e2a3d402 LT |
42 | : "=&r" (t), "+m" (v->counter) |
43 | : "r" (a), "r" (&v->counter) | |
1da177e4 LT |
44 | : "cc"); |
45 | } | |
46 | ||
47 | static __inline__ int atomic_add_return(int a, atomic_t *v) | |
48 | { | |
49 | int t; | |
50 | ||
51 | __asm__ __volatile__( | |
144b9c13 | 52 | LWSYNC_ON_SMP |
1da177e4 LT |
53 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
54 | add %0,%1,%0\n" | |
55 | PPC405_ERR77(0,%2) | |
56 | " stwcx. %0,0,%2 \n\ | |
57 | bne- 1b" | |
feaf7cf1 | 58 | ISYNC_ON_SMP |
1da177e4 LT |
59 | : "=&r" (t) |
60 | : "r" (a), "r" (&v->counter) | |
61 | : "cc", "memory"); | |
62 | ||
63 | return t; | |
64 | } | |
65 | ||
66 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | |
67 | ||
68 | static __inline__ void atomic_sub(int a, atomic_t *v) | |
69 | { | |
70 | int t; | |
71 | ||
72 | __asm__ __volatile__( | |
73 | "1: lwarx %0,0,%3 # atomic_sub\n\ | |
74 | subf %0,%2,%0\n" | |
75 | PPC405_ERR77(0,%3) | |
76 | " stwcx. %0,0,%3 \n\ | |
77 | bne- 1b" | |
e2a3d402 LT |
78 | : "=&r" (t), "+m" (v->counter) |
79 | : "r" (a), "r" (&v->counter) | |
1da177e4 LT |
80 | : "cc"); |
81 | } | |
82 | ||
83 | static __inline__ int atomic_sub_return(int a, atomic_t *v) | |
84 | { | |
85 | int t; | |
86 | ||
87 | __asm__ __volatile__( | |
144b9c13 | 88 | LWSYNC_ON_SMP |
1da177e4 LT |
89 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
90 | subf %0,%1,%0\n" | |
91 | PPC405_ERR77(0,%2) | |
92 | " stwcx. %0,0,%2 \n\ | |
93 | bne- 1b" | |
feaf7cf1 | 94 | ISYNC_ON_SMP |
1da177e4 LT |
95 | : "=&r" (t) |
96 | : "r" (a), "r" (&v->counter) | |
97 | : "cc", "memory"); | |
98 | ||
99 | return t; | |
100 | } | |
101 | ||
102 | static __inline__ void atomic_inc(atomic_t *v) | |
103 | { | |
104 | int t; | |
105 | ||
106 | __asm__ __volatile__( | |
107 | "1: lwarx %0,0,%2 # atomic_inc\n\ | |
108 | addic %0,%0,1\n" | |
109 | PPC405_ERR77(0,%2) | |
110 | " stwcx. %0,0,%2 \n\ | |
111 | bne- 1b" | |
e2a3d402 LT |
112 | : "=&r" (t), "+m" (v->counter) |
113 | : "r" (&v->counter) | |
efc3624c | 114 | : "cc", "xer"); |
1da177e4 LT |
115 | } |
116 | ||
117 | static __inline__ int atomic_inc_return(atomic_t *v) | |
118 | { | |
119 | int t; | |
120 | ||
121 | __asm__ __volatile__( | |
144b9c13 | 122 | LWSYNC_ON_SMP |
1da177e4 LT |
123 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
124 | addic %0,%0,1\n" | |
125 | PPC405_ERR77(0,%1) | |
126 | " stwcx. %0,0,%1 \n\ | |
127 | bne- 1b" | |
feaf7cf1 | 128 | ISYNC_ON_SMP |
1da177e4 LT |
129 | : "=&r" (t) |
130 | : "r" (&v->counter) | |
efc3624c | 131 | : "cc", "xer", "memory"); |
1da177e4 LT |
132 | |
133 | return t; | |
134 | } | |
135 | ||
136 | /* | |
137 | * atomic_inc_and_test - increment and test | |
138 | * @v: pointer of type atomic_t | |
139 | * | |
140 | * Atomically increments @v by 1 | |
141 | * and returns true if the result is zero, or false for all | |
142 | * other cases. | |
143 | */ | |
144 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | |
145 | ||
146 | static __inline__ void atomic_dec(atomic_t *v) | |
147 | { | |
148 | int t; | |
149 | ||
150 | __asm__ __volatile__( | |
151 | "1: lwarx %0,0,%2 # atomic_dec\n\ | |
152 | addic %0,%0,-1\n" | |
153 | PPC405_ERR77(0,%2)\ | |
154 | " stwcx. %0,0,%2\n\ | |
155 | bne- 1b" | |
e2a3d402 LT |
156 | : "=&r" (t), "+m" (v->counter) |
157 | : "r" (&v->counter) | |
efc3624c | 158 | : "cc", "xer"); |
1da177e4 LT |
159 | } |
160 | ||
161 | static __inline__ int atomic_dec_return(atomic_t *v) | |
162 | { | |
163 | int t; | |
164 | ||
165 | __asm__ __volatile__( | |
144b9c13 | 166 | LWSYNC_ON_SMP |
1da177e4 LT |
167 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
168 | addic %0,%0,-1\n" | |
169 | PPC405_ERR77(0,%1) | |
170 | " stwcx. %0,0,%1\n\ | |
171 | bne- 1b" | |
feaf7cf1 | 172 | ISYNC_ON_SMP |
1da177e4 LT |
173 | : "=&r" (t) |
174 | : "r" (&v->counter) | |
efc3624c | 175 | : "cc", "xer", "memory"); |
1da177e4 LT |
176 | |
177 | return t; | |
178 | } | |
179 | ||
f46e477e | 180 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
ffbf670f | 181 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
4a6dae6d | 182 | |
8426e1f6 NP |
183 | /** |
184 | * atomic_add_unless - add unless the number is a given value | |
185 | * @v: pointer of type atomic_t | |
186 | * @a: the amount to add to v... | |
187 | * @u: ...unless v is equal to u. | |
188 | * | |
189 | * Atomically adds @a to @v, so long as it was not @u. | |
190 | * Returns non-zero if @v was not @u, and zero otherwise. | |
191 | */ | |
f055affb NP |
192 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
193 | { | |
194 | int t; | |
195 | ||
196 | __asm__ __volatile__ ( | |
197 | LWSYNC_ON_SMP | |
198 | "1: lwarx %0,0,%1 # atomic_add_unless\n\ | |
199 | cmpw 0,%0,%3 \n\ | |
200 | beq- 2f \n\ | |
201 | add %0,%2,%0 \n" | |
202 | PPC405_ERR77(0,%2) | |
203 | " stwcx. %0,0,%1 \n\ | |
204 | bne- 1b \n" | |
205 | ISYNC_ON_SMP | |
206 | " subf %0,%2,%0 \n\ | |
207 | 2:" | |
208 | : "=&r" (t) | |
209 | : "r" (&v->counter), "r" (a), "r" (u) | |
210 | : "cc", "memory"); | |
211 | ||
212 | return t != u; | |
213 | } | |
214 | ||
8426e1f6 NP |
215 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
216 | ||
1da177e4 LT |
217 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
218 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) | |
219 | ||
220 | /* | |
221 | * Atomically test *v and decrement if it is greater than 0. | |
434f98c4 RJ |
222 | * The function returns the old value of *v minus 1, even if |
223 | * the atomic variable, v, was not decremented. | |
1da177e4 LT |
224 | */ |
225 | static __inline__ int atomic_dec_if_positive(atomic_t *v) | |
226 | { | |
227 | int t; | |
228 | ||
229 | __asm__ __volatile__( | |
144b9c13 | 230 | LWSYNC_ON_SMP |
1da177e4 | 231 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
434f98c4 RJ |
232 | cmpwi %0,1\n\ |
233 | addi %0,%0,-1\n\ | |
1da177e4 LT |
234 | blt- 2f\n" |
235 | PPC405_ERR77(0,%1) | |
236 | " stwcx. %0,0,%1\n\ | |
237 | bne- 1b" | |
feaf7cf1 | 238 | ISYNC_ON_SMP |
1da177e4 | 239 | "\n\ |
434f98c4 | 240 | 2:" : "=&b" (t) |
1da177e4 LT |
241 | : "r" (&v->counter) |
242 | : "cc", "memory"); | |
243 | ||
244 | return t; | |
245 | } | |
246 | ||
feaf7cf1 BB |
247 | #define smp_mb__before_atomic_dec() smp_mb() |
248 | #define smp_mb__after_atomic_dec() smp_mb() | |
249 | #define smp_mb__before_atomic_inc() smp_mb() | |
250 | #define smp_mb__after_atomic_inc() smp_mb() | |
1da177e4 | 251 | |
06a98dba SR |
252 | #ifdef __powerpc64__ |
253 | ||
06a98dba SR |
254 | #define ATOMIC64_INIT(i) { (i) } |
255 | ||
9f0cbea0 SB |
256 | static __inline__ long atomic64_read(const atomic64_t *v) |
257 | { | |
258 | long t; | |
259 | ||
260 | __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | |
261 | ||
262 | return t; | |
263 | } | |
264 | ||
265 | static __inline__ void atomic64_set(atomic64_t *v, long i) | |
266 | { | |
267 | __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | |
268 | } | |
06a98dba SR |
269 | |
270 | static __inline__ void atomic64_add(long a, atomic64_t *v) | |
271 | { | |
272 | long t; | |
273 | ||
274 | __asm__ __volatile__( | |
275 | "1: ldarx %0,0,%3 # atomic64_add\n\ | |
276 | add %0,%2,%0\n\ | |
277 | stdcx. %0,0,%3 \n\ | |
278 | bne- 1b" | |
e2a3d402 LT |
279 | : "=&r" (t), "+m" (v->counter) |
280 | : "r" (a), "r" (&v->counter) | |
06a98dba SR |
281 | : "cc"); |
282 | } | |
283 | ||
284 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | |
285 | { | |
286 | long t; | |
287 | ||
288 | __asm__ __volatile__( | |
144b9c13 | 289 | LWSYNC_ON_SMP |
06a98dba SR |
290 | "1: ldarx %0,0,%2 # atomic64_add_return\n\ |
291 | add %0,%1,%0\n\ | |
292 | stdcx. %0,0,%2 \n\ | |
293 | bne- 1b" | |
294 | ISYNC_ON_SMP | |
295 | : "=&r" (t) | |
296 | : "r" (a), "r" (&v->counter) | |
297 | : "cc", "memory"); | |
298 | ||
299 | return t; | |
300 | } | |
301 | ||
302 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | |
303 | ||
304 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | |
305 | { | |
306 | long t; | |
307 | ||
308 | __asm__ __volatile__( | |
309 | "1: ldarx %0,0,%3 # atomic64_sub\n\ | |
310 | subf %0,%2,%0\n\ | |
311 | stdcx. %0,0,%3 \n\ | |
312 | bne- 1b" | |
e2a3d402 LT |
313 | : "=&r" (t), "+m" (v->counter) |
314 | : "r" (a), "r" (&v->counter) | |
06a98dba SR |
315 | : "cc"); |
316 | } | |
317 | ||
318 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | |
319 | { | |
320 | long t; | |
321 | ||
322 | __asm__ __volatile__( | |
144b9c13 | 323 | LWSYNC_ON_SMP |
06a98dba SR |
324 | "1: ldarx %0,0,%2 # atomic64_sub_return\n\ |
325 | subf %0,%1,%0\n\ | |
326 | stdcx. %0,0,%2 \n\ | |
327 | bne- 1b" | |
328 | ISYNC_ON_SMP | |
329 | : "=&r" (t) | |
330 | : "r" (a), "r" (&v->counter) | |
331 | : "cc", "memory"); | |
332 | ||
333 | return t; | |
334 | } | |
335 | ||
336 | static __inline__ void atomic64_inc(atomic64_t *v) | |
337 | { | |
338 | long t; | |
339 | ||
340 | __asm__ __volatile__( | |
341 | "1: ldarx %0,0,%2 # atomic64_inc\n\ | |
342 | addic %0,%0,1\n\ | |
343 | stdcx. %0,0,%2 \n\ | |
344 | bne- 1b" | |
e2a3d402 LT |
345 | : "=&r" (t), "+m" (v->counter) |
346 | : "r" (&v->counter) | |
efc3624c | 347 | : "cc", "xer"); |
06a98dba SR |
348 | } |
349 | ||
350 | static __inline__ long atomic64_inc_return(atomic64_t *v) | |
351 | { | |
352 | long t; | |
353 | ||
354 | __asm__ __volatile__( | |
144b9c13 | 355 | LWSYNC_ON_SMP |
06a98dba SR |
356 | "1: ldarx %0,0,%1 # atomic64_inc_return\n\ |
357 | addic %0,%0,1\n\ | |
358 | stdcx. %0,0,%1 \n\ | |
359 | bne- 1b" | |
360 | ISYNC_ON_SMP | |
361 | : "=&r" (t) | |
362 | : "r" (&v->counter) | |
efc3624c | 363 | : "cc", "xer", "memory"); |
06a98dba SR |
364 | |
365 | return t; | |
366 | } | |
367 | ||
368 | /* | |
369 | * atomic64_inc_and_test - increment and test | |
370 | * @v: pointer of type atomic64_t | |
371 | * | |
372 | * Atomically increments @v by 1 | |
373 | * and returns true if the result is zero, or false for all | |
374 | * other cases. | |
375 | */ | |
376 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | |
377 | ||
378 | static __inline__ void atomic64_dec(atomic64_t *v) | |
379 | { | |
380 | long t; | |
381 | ||
382 | __asm__ __volatile__( | |
383 | "1: ldarx %0,0,%2 # atomic64_dec\n\ | |
384 | addic %0,%0,-1\n\ | |
385 | stdcx. %0,0,%2\n\ | |
386 | bne- 1b" | |
e2a3d402 LT |
387 | : "=&r" (t), "+m" (v->counter) |
388 | : "r" (&v->counter) | |
efc3624c | 389 | : "cc", "xer"); |
06a98dba SR |
390 | } |
391 | ||
392 | static __inline__ long atomic64_dec_return(atomic64_t *v) | |
393 | { | |
394 | long t; | |
395 | ||
396 | __asm__ __volatile__( | |
144b9c13 | 397 | LWSYNC_ON_SMP |
06a98dba SR |
398 | "1: ldarx %0,0,%1 # atomic64_dec_return\n\ |
399 | addic %0,%0,-1\n\ | |
400 | stdcx. %0,0,%1\n\ | |
401 | bne- 1b" | |
402 | ISYNC_ON_SMP | |
403 | : "=&r" (t) | |
404 | : "r" (&v->counter) | |
efc3624c | 405 | : "cc", "xer", "memory"); |
06a98dba SR |
406 | |
407 | return t; | |
408 | } | |
409 | ||
410 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | |
411 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | |
412 | ||
413 | /* | |
414 | * Atomically test *v and decrement if it is greater than 0. | |
415 | * The function returns the old value of *v minus 1. | |
416 | */ | |
417 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |
418 | { | |
419 | long t; | |
420 | ||
421 | __asm__ __volatile__( | |
144b9c13 | 422 | LWSYNC_ON_SMP |
06a98dba SR |
423 | "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ |
424 | addic. %0,%0,-1\n\ | |
425 | blt- 2f\n\ | |
426 | stdcx. %0,0,%1\n\ | |
427 | bne- 1b" | |
428 | ISYNC_ON_SMP | |
429 | "\n\ | |
430 | 2:" : "=&r" (t) | |
431 | : "r" (&v->counter) | |
efc3624c | 432 | : "cc", "xer", "memory"); |
06a98dba SR |
433 | |
434 | return t; | |
435 | } | |
436 | ||
f46e477e | 437 | #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
41806ef4 MD |
438 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
439 | ||
440 | /** | |
441 | * atomic64_add_unless - add unless the number is a given value | |
442 | * @v: pointer of type atomic64_t | |
443 | * @a: the amount to add to v... | |
444 | * @u: ...unless v is equal to u. | |
445 | * | |
446 | * Atomically adds @a to @v, so long as it was not @u. | |
447 | * Returns non-zero if @v was not @u, and zero otherwise. | |
448 | */ | |
449 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | |
450 | { | |
451 | long t; | |
452 | ||
453 | __asm__ __volatile__ ( | |
454 | LWSYNC_ON_SMP | |
455 | "1: ldarx %0,0,%1 # atomic_add_unless\n\ | |
456 | cmpd 0,%0,%3 \n\ | |
457 | beq- 2f \n\ | |
458 | add %0,%2,%0 \n" | |
459 | " stdcx. %0,0,%1 \n\ | |
460 | bne- 1b \n" | |
461 | ISYNC_ON_SMP | |
462 | " subf %0,%2,%0 \n\ | |
463 | 2:" | |
464 | : "=&r" (t) | |
465 | : "r" (&v->counter), "r" (a), "r" (u) | |
466 | : "cc", "memory"); | |
467 | ||
468 | return t != u; | |
469 | } | |
470 | ||
471 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | |
472 | ||
c2e95c6d PM |
473 | #else /* __powerpc64__ */ |
474 | #include <asm-generic/atomic64.h> | |
475 | ||
06a98dba SR |
476 | #endif /* __powerpc64__ */ |
477 | ||
72099ed2 | 478 | #include <asm-generic/atomic-long.h> |
1da177e4 | 479 | #endif /* __KERNEL__ */ |
feaf7cf1 | 480 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ |