]>
Commit | Line | Data |
---|---|---|
feaf7cf1 BB |
1 | #ifndef _ASM_POWERPC_ATOMIC_H_ |
2 | #define _ASM_POWERPC_ATOMIC_H_ | |
3 | ||
1da177e4 LT |
4 | /* |
5 | * PowerPC atomic operations | |
6 | */ | |
7 | ||
ea435467 | 8 | #include <linux/types.h> |
1da177e4 LT |
9 | |
10 | #ifdef __KERNEL__ | |
f055affb | 11 | #include <linux/compiler.h> |
feaf7cf1 | 12 | #include <asm/synch.h> |
3ddfbcf1 | 13 | #include <asm/asm-compat.h> |
2856f5e3 | 14 | #include <asm/system.h> |
1da177e4 | 15 | |
feaf7cf1 | 16 | #define ATOMIC_INIT(i) { (i) } |
1da177e4 | 17 | |
9f0cbea0 SB |
18 | static __inline__ int atomic_read(const atomic_t *v) |
19 | { | |
20 | int t; | |
21 | ||
22 | __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | |
23 | ||
24 | return t; | |
25 | } | |
26 | ||
27 | static __inline__ void atomic_set(atomic_t *v, int i) | |
28 | { | |
29 | __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | |
30 | } | |
1da177e4 | 31 | |
1da177e4 LT |
32 | static __inline__ void atomic_add(int a, atomic_t *v) |
33 | { | |
34 | int t; | |
35 | ||
36 | __asm__ __volatile__( | |
37 | "1: lwarx %0,0,%3 # atomic_add\n\ | |
38 | add %0,%2,%0\n" | |
39 | PPC405_ERR77(0,%3) | |
40 | " stwcx. %0,0,%3 \n\ | |
41 | bne- 1b" | |
e2a3d402 LT |
42 | : "=&r" (t), "+m" (v->counter) |
43 | : "r" (a), "r" (&v->counter) | |
1da177e4 LT |
44 | : "cc"); |
45 | } | |
46 | ||
47 | static __inline__ int atomic_add_return(int a, atomic_t *v) | |
48 | { | |
49 | int t; | |
50 | ||
51 | __asm__ __volatile__( | |
f10e2e5b | 52 | PPC_RELEASE_BARRIER |
1da177e4 LT |
53 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
54 | add %0,%1,%0\n" | |
55 | PPC405_ERR77(0,%2) | |
56 | " stwcx. %0,0,%2 \n\ | |
57 | bne- 1b" | |
f10e2e5b | 58 | PPC_ACQUIRE_BARRIER |
1da177e4 LT |
59 | : "=&r" (t) |
60 | : "r" (a), "r" (&v->counter) | |
61 | : "cc", "memory"); | |
62 | ||
63 | return t; | |
64 | } | |
65 | ||
66 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | |
67 | ||
68 | static __inline__ void atomic_sub(int a, atomic_t *v) | |
69 | { | |
70 | int t; | |
71 | ||
72 | __asm__ __volatile__( | |
73 | "1: lwarx %0,0,%3 # atomic_sub\n\ | |
74 | subf %0,%2,%0\n" | |
75 | PPC405_ERR77(0,%3) | |
76 | " stwcx. %0,0,%3 \n\ | |
77 | bne- 1b" | |
e2a3d402 LT |
78 | : "=&r" (t), "+m" (v->counter) |
79 | : "r" (a), "r" (&v->counter) | |
1da177e4 LT |
80 | : "cc"); |
81 | } | |
82 | ||
83 | static __inline__ int atomic_sub_return(int a, atomic_t *v) | |
84 | { | |
85 | int t; | |
86 | ||
87 | __asm__ __volatile__( | |
f10e2e5b | 88 | PPC_RELEASE_BARRIER |
1da177e4 LT |
89 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
90 | subf %0,%1,%0\n" | |
91 | PPC405_ERR77(0,%2) | |
92 | " stwcx. %0,0,%2 \n\ | |
93 | bne- 1b" | |
f10e2e5b | 94 | PPC_ACQUIRE_BARRIER |
1da177e4 LT |
95 | : "=&r" (t) |
96 | : "r" (a), "r" (&v->counter) | |
97 | : "cc", "memory"); | |
98 | ||
99 | return t; | |
100 | } | |
101 | ||
102 | static __inline__ void atomic_inc(atomic_t *v) | |
103 | { | |
104 | int t; | |
105 | ||
106 | __asm__ __volatile__( | |
107 | "1: lwarx %0,0,%2 # atomic_inc\n\ | |
108 | addic %0,%0,1\n" | |
109 | PPC405_ERR77(0,%2) | |
110 | " stwcx. %0,0,%2 \n\ | |
111 | bne- 1b" | |
e2a3d402 LT |
112 | : "=&r" (t), "+m" (v->counter) |
113 | : "r" (&v->counter) | |
efc3624c | 114 | : "cc", "xer"); |
1da177e4 LT |
115 | } |
116 | ||
117 | static __inline__ int atomic_inc_return(atomic_t *v) | |
118 | { | |
119 | int t; | |
120 | ||
121 | __asm__ __volatile__( | |
f10e2e5b | 122 | PPC_RELEASE_BARRIER |
1da177e4 LT |
123 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
124 | addic %0,%0,1\n" | |
125 | PPC405_ERR77(0,%1) | |
126 | " stwcx. %0,0,%1 \n\ | |
127 | bne- 1b" | |
f10e2e5b | 128 | PPC_ACQUIRE_BARRIER |
1da177e4 LT |
129 | : "=&r" (t) |
130 | : "r" (&v->counter) | |
efc3624c | 131 | : "cc", "xer", "memory"); |
1da177e4 LT |
132 | |
133 | return t; | |
134 | } | |
135 | ||
136 | /* | |
137 | * atomic_inc_and_test - increment and test | |
138 | * @v: pointer of type atomic_t | |
139 | * | |
140 | * Atomically increments @v by 1 | |
141 | * and returns true if the result is zero, or false for all | |
142 | * other cases. | |
143 | */ | |
144 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | |
145 | ||
146 | static __inline__ void atomic_dec(atomic_t *v) | |
147 | { | |
148 | int t; | |
149 | ||
150 | __asm__ __volatile__( | |
151 | "1: lwarx %0,0,%2 # atomic_dec\n\ | |
152 | addic %0,%0,-1\n" | |
153 | PPC405_ERR77(0,%2)\ | |
154 | " stwcx. %0,0,%2\n\ | |
155 | bne- 1b" | |
e2a3d402 LT |
156 | : "=&r" (t), "+m" (v->counter) |
157 | : "r" (&v->counter) | |
efc3624c | 158 | : "cc", "xer"); |
1da177e4 LT |
159 | } |
160 | ||
161 | static __inline__ int atomic_dec_return(atomic_t *v) | |
162 | { | |
163 | int t; | |
164 | ||
165 | __asm__ __volatile__( | |
f10e2e5b | 166 | PPC_RELEASE_BARRIER |
1da177e4 LT |
167 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
168 | addic %0,%0,-1\n" | |
169 | PPC405_ERR77(0,%1) | |
170 | " stwcx. %0,0,%1\n\ | |
171 | bne- 1b" | |
f10e2e5b | 172 | PPC_ACQUIRE_BARRIER |
1da177e4 LT |
173 | : "=&r" (t) |
174 | : "r" (&v->counter) | |
efc3624c | 175 | : "cc", "xer", "memory"); |
1da177e4 LT |
176 | |
177 | return t; | |
178 | } | |
179 | ||
f46e477e | 180 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
ffbf670f | 181 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
4a6dae6d | 182 | |
8426e1f6 NP |
183 | /** |
184 | * atomic_add_unless - add unless the number is a given value | |
185 | * @v: pointer of type atomic_t | |
186 | * @a: the amount to add to v... | |
187 | * @u: ...unless v is equal to u. | |
188 | * | |
189 | * Atomically adds @a to @v, so long as it was not @u. | |
190 | * Returns non-zero if @v was not @u, and zero otherwise. | |
191 | */ | |
f055affb NP |
192 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
193 | { | |
194 | int t; | |
195 | ||
196 | __asm__ __volatile__ ( | |
f10e2e5b | 197 | PPC_RELEASE_BARRIER |
f055affb NP |
198 | "1: lwarx %0,0,%1 # atomic_add_unless\n\ |
199 | cmpw 0,%0,%3 \n\ | |
200 | beq- 2f \n\ | |
201 | add %0,%2,%0 \n" | |
202 | PPC405_ERR77(0,%2) | |
203 | " stwcx. %0,0,%1 \n\ | |
204 | bne- 1b \n" | |
f10e2e5b | 205 | PPC_ACQUIRE_BARRIER |
f055affb NP |
206 | " subf %0,%2,%0 \n\ |
207 | 2:" | |
208 | : "=&r" (t) | |
209 | : "r" (&v->counter), "r" (a), "r" (u) | |
210 | : "cc", "memory"); | |
211 | ||
212 | return t != u; | |
213 | } | |
214 | ||
8426e1f6 | 215 | |
1da177e4 LT |
216 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
217 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) | |
218 | ||
219 | /* | |
220 | * Atomically test *v and decrement if it is greater than 0. | |
434f98c4 RJ |
221 | * The function returns the old value of *v minus 1, even if |
222 | * the atomic variable, v, was not decremented. | |
1da177e4 LT |
223 | */ |
224 | static __inline__ int atomic_dec_if_positive(atomic_t *v) | |
225 | { | |
226 | int t; | |
227 | ||
228 | __asm__ __volatile__( | |
f10e2e5b | 229 | PPC_RELEASE_BARRIER |
1da177e4 | 230 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
434f98c4 RJ |
231 | cmpwi %0,1\n\ |
232 | addi %0,%0,-1\n\ | |
1da177e4 LT |
233 | blt- 2f\n" |
234 | PPC405_ERR77(0,%1) | |
235 | " stwcx. %0,0,%1\n\ | |
236 | bne- 1b" | |
f10e2e5b | 237 | PPC_ACQUIRE_BARRIER |
1da177e4 | 238 | "\n\ |
434f98c4 | 239 | 2:" : "=&b" (t) |
1da177e4 LT |
240 | : "r" (&v->counter) |
241 | : "cc", "memory"); | |
242 | ||
243 | return t; | |
244 | } | |
245 | ||
feaf7cf1 BB |
246 | #define smp_mb__before_atomic_dec() smp_mb() |
247 | #define smp_mb__after_atomic_dec() smp_mb() | |
248 | #define smp_mb__before_atomic_inc() smp_mb() | |
249 | #define smp_mb__after_atomic_inc() smp_mb() | |
1da177e4 | 250 | |
06a98dba SR |
251 | #ifdef __powerpc64__ |
252 | ||
06a98dba SR |
253 | #define ATOMIC64_INIT(i) { (i) } |
254 | ||
9f0cbea0 SB |
255 | static __inline__ long atomic64_read(const atomic64_t *v) |
256 | { | |
257 | long t; | |
258 | ||
259 | __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | |
260 | ||
261 | return t; | |
262 | } | |
263 | ||
264 | static __inline__ void atomic64_set(atomic64_t *v, long i) | |
265 | { | |
266 | __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | |
267 | } | |
06a98dba SR |
268 | |
269 | static __inline__ void atomic64_add(long a, atomic64_t *v) | |
270 | { | |
271 | long t; | |
272 | ||
273 | __asm__ __volatile__( | |
274 | "1: ldarx %0,0,%3 # atomic64_add\n\ | |
275 | add %0,%2,%0\n\ | |
276 | stdcx. %0,0,%3 \n\ | |
277 | bne- 1b" | |
e2a3d402 LT |
278 | : "=&r" (t), "+m" (v->counter) |
279 | : "r" (a), "r" (&v->counter) | |
06a98dba SR |
280 | : "cc"); |
281 | } | |
282 | ||
283 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | |
284 | { | |
285 | long t; | |
286 | ||
287 | __asm__ __volatile__( | |
f10e2e5b | 288 | PPC_RELEASE_BARRIER |
06a98dba SR |
289 | "1: ldarx %0,0,%2 # atomic64_add_return\n\ |
290 | add %0,%1,%0\n\ | |
291 | stdcx. %0,0,%2 \n\ | |
292 | bne- 1b" | |
f10e2e5b | 293 | PPC_ACQUIRE_BARRIER |
06a98dba SR |
294 | : "=&r" (t) |
295 | : "r" (a), "r" (&v->counter) | |
296 | : "cc", "memory"); | |
297 | ||
298 | return t; | |
299 | } | |
300 | ||
301 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | |
302 | ||
303 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | |
304 | { | |
305 | long t; | |
306 | ||
307 | __asm__ __volatile__( | |
308 | "1: ldarx %0,0,%3 # atomic64_sub\n\ | |
309 | subf %0,%2,%0\n\ | |
310 | stdcx. %0,0,%3 \n\ | |
311 | bne- 1b" | |
e2a3d402 LT |
312 | : "=&r" (t), "+m" (v->counter) |
313 | : "r" (a), "r" (&v->counter) | |
06a98dba SR |
314 | : "cc"); |
315 | } | |
316 | ||
317 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | |
318 | { | |
319 | long t; | |
320 | ||
321 | __asm__ __volatile__( | |
f10e2e5b | 322 | PPC_RELEASE_BARRIER |
06a98dba SR |
323 | "1: ldarx %0,0,%2 # atomic64_sub_return\n\ |
324 | subf %0,%1,%0\n\ | |
325 | stdcx. %0,0,%2 \n\ | |
326 | bne- 1b" | |
f10e2e5b | 327 | PPC_ACQUIRE_BARRIER |
06a98dba SR |
328 | : "=&r" (t) |
329 | : "r" (a), "r" (&v->counter) | |
330 | : "cc", "memory"); | |
331 | ||
332 | return t; | |
333 | } | |
334 | ||
335 | static __inline__ void atomic64_inc(atomic64_t *v) | |
336 | { | |
337 | long t; | |
338 | ||
339 | __asm__ __volatile__( | |
340 | "1: ldarx %0,0,%2 # atomic64_inc\n\ | |
341 | addic %0,%0,1\n\ | |
342 | stdcx. %0,0,%2 \n\ | |
343 | bne- 1b" | |
e2a3d402 LT |
344 | : "=&r" (t), "+m" (v->counter) |
345 | : "r" (&v->counter) | |
efc3624c | 346 | : "cc", "xer"); |
06a98dba SR |
347 | } |
348 | ||
349 | static __inline__ long atomic64_inc_return(atomic64_t *v) | |
350 | { | |
351 | long t; | |
352 | ||
353 | __asm__ __volatile__( | |
f10e2e5b | 354 | PPC_RELEASE_BARRIER |
06a98dba SR |
355 | "1: ldarx %0,0,%1 # atomic64_inc_return\n\ |
356 | addic %0,%0,1\n\ | |
357 | stdcx. %0,0,%1 \n\ | |
358 | bne- 1b" | |
f10e2e5b | 359 | PPC_ACQUIRE_BARRIER |
06a98dba SR |
360 | : "=&r" (t) |
361 | : "r" (&v->counter) | |
efc3624c | 362 | : "cc", "xer", "memory"); |
06a98dba SR |
363 | |
364 | return t; | |
365 | } | |
366 | ||
367 | /* | |
368 | * atomic64_inc_and_test - increment and test | |
369 | * @v: pointer of type atomic64_t | |
370 | * | |
371 | * Atomically increments @v by 1 | |
372 | * and returns true if the result is zero, or false for all | |
373 | * other cases. | |
374 | */ | |
375 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | |
376 | ||
377 | static __inline__ void atomic64_dec(atomic64_t *v) | |
378 | { | |
379 | long t; | |
380 | ||
381 | __asm__ __volatile__( | |
382 | "1: ldarx %0,0,%2 # atomic64_dec\n\ | |
383 | addic %0,%0,-1\n\ | |
384 | stdcx. %0,0,%2\n\ | |
385 | bne- 1b" | |
e2a3d402 LT |
386 | : "=&r" (t), "+m" (v->counter) |
387 | : "r" (&v->counter) | |
efc3624c | 388 | : "cc", "xer"); |
06a98dba SR |
389 | } |
390 | ||
391 | static __inline__ long atomic64_dec_return(atomic64_t *v) | |
392 | { | |
393 | long t; | |
394 | ||
395 | __asm__ __volatile__( | |
f10e2e5b | 396 | PPC_RELEASE_BARRIER |
06a98dba SR |
397 | "1: ldarx %0,0,%1 # atomic64_dec_return\n\ |
398 | addic %0,%0,-1\n\ | |
399 | stdcx. %0,0,%1\n\ | |
400 | bne- 1b" | |
f10e2e5b | 401 | PPC_ACQUIRE_BARRIER |
06a98dba SR |
402 | : "=&r" (t) |
403 | : "r" (&v->counter) | |
efc3624c | 404 | : "cc", "xer", "memory"); |
06a98dba SR |
405 | |
406 | return t; | |
407 | } | |
408 | ||
409 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | |
410 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | |
411 | ||
412 | /* | |
413 | * Atomically test *v and decrement if it is greater than 0. | |
414 | * The function returns the old value of *v minus 1. | |
415 | */ | |
416 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |
417 | { | |
418 | long t; | |
419 | ||
420 | __asm__ __volatile__( | |
f10e2e5b | 421 | PPC_RELEASE_BARRIER |
06a98dba SR |
422 | "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ |
423 | addic. %0,%0,-1\n\ | |
424 | blt- 2f\n\ | |
425 | stdcx. %0,0,%1\n\ | |
426 | bne- 1b" | |
f10e2e5b | 427 | PPC_ACQUIRE_BARRIER |
06a98dba SR |
428 | "\n\ |
429 | 2:" : "=&r" (t) | |
430 | : "r" (&v->counter) | |
efc3624c | 431 | : "cc", "xer", "memory"); |
06a98dba SR |
432 | |
433 | return t; | |
434 | } | |
435 | ||
f46e477e | 436 | #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
41806ef4 MD |
437 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
438 | ||
439 | /** | |
440 | * atomic64_add_unless - add unless the number is a given value | |
441 | * @v: pointer of type atomic64_t | |
442 | * @a: the amount to add to v... | |
443 | * @u: ...unless v is equal to u. | |
444 | * | |
445 | * Atomically adds @a to @v, so long as it was not @u. | |
446 | * Returns non-zero if @v was not @u, and zero otherwise. | |
447 | */ | |
448 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | |
449 | { | |
450 | long t; | |
451 | ||
452 | __asm__ __volatile__ ( | |
f10e2e5b | 453 | PPC_RELEASE_BARRIER |
41806ef4 MD |
454 | "1: ldarx %0,0,%1 # atomic_add_unless\n\ |
455 | cmpd 0,%0,%3 \n\ | |
456 | beq- 2f \n\ | |
457 | add %0,%2,%0 \n" | |
458 | " stdcx. %0,0,%1 \n\ | |
459 | bne- 1b \n" | |
f10e2e5b | 460 | PPC_ACQUIRE_BARRIER |
41806ef4 MD |
461 | " subf %0,%2,%0 \n\ |
462 | 2:" | |
463 | : "=&r" (t) | |
464 | : "r" (&v->counter), "r" (a), "r" (u) | |
465 | : "cc", "memory"); | |
466 | ||
467 | return t != u; | |
468 | } | |
469 | ||
470 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | |
471 | ||
c2e95c6d PM |
472 | #else /* __powerpc64__ */ |
473 | #include <asm-generic/atomic64.h> | |
474 | ||
06a98dba SR |
475 | #endif /* __powerpc64__ */ |
476 | ||
72099ed2 | 477 | #include <asm-generic/atomic-long.h> |
1da177e4 | 478 | #endif /* __KERNEL__ */ |
feaf7cf1 | 479 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ |