]>
Commit | Line | Data |
---|---|---|
1a3b1d89 BG |
1 | #ifndef _ASM_X86_ATOMIC64_32_H |
2 | #define _ASM_X86_ATOMIC64_32_H | |
3 | ||
4 | #include <linux/compiler.h> | |
5 | #include <linux/types.h> | |
6 | #include <asm/processor.h> | |
7 | //#include <asm/cmpxchg.h> | |
8 | ||
9 | /* An 64bit atomic type */ | |
10 | ||
11 | typedef struct { | |
12 | u64 __aligned(8) counter; | |
13 | } atomic64_t; | |
14 | ||
15 | #define ATOMIC64_INIT(val) { (val) } | |
16 | ||
a7e926ab LB |
17 | #ifdef CONFIG_X86_CMPXCHG64 |
18 | #define ATOMIC64_ALTERNATIVE_(f, g) "call atomic64_" #g "_cx8" | |
19 | #else | |
20 | #define ATOMIC64_ALTERNATIVE_(f, g) ALTERNATIVE("call atomic64_" #f "_386", "call atomic64_" #g "_cx8", X86_FEATURE_CX8) | |
21 | #endif | |
22 | ||
23 | #define ATOMIC64_ALTERNATIVE(f) ATOMIC64_ALTERNATIVE_(f, f) | |
24 | ||
25 | /** | |
26 | * atomic64_cmpxchg - cmpxchg atomic64 variable | |
27 | * @p: pointer to type atomic64_t | |
28 | * @o: expected value | |
29 | * @n: new value | |
30 | * | |
31 | * Atomically sets @v to @n if it was equal to @o and returns | |
32 | * the old value. | |
33 | */ | |
34 | ||
35 | static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n) | |
36 | { | |
37 | return cmpxchg64(&v->counter, o, n); | |
38 | } | |
1a3b1d89 BG |
39 | |
40 | /** | |
41 | * atomic64_xchg - xchg atomic64 variable | |
a7e926ab LB |
42 | * @v: pointer to type atomic64_t |
43 | * @n: value to assign | |
1a3b1d89 | 44 | * |
a7e926ab | 45 | * Atomically xchgs the value of @v to @n and returns |
1a3b1d89 BG |
46 | * the old value. |
47 | */ | |
a7e926ab LB |
48 | static inline long long atomic64_xchg(atomic64_t *v, long long n) |
49 | { | |
50 | long long o; | |
51 | unsigned high = (unsigned)(n >> 32); | |
52 | unsigned low = (unsigned)n; | |
53 | asm volatile(ATOMIC64_ALTERNATIVE(xchg) | |
54 | : "=A" (o), "+b" (low), "+c" (high) | |
55 | : "S" (v) | |
56 | : "memory" | |
57 | ); | |
58 | return o; | |
59 | } | |
1a3b1d89 BG |
60 | |
61 | /** | |
62 | * atomic64_set - set atomic64 variable | |
a7e926ab LB |
63 | * @v: pointer to type atomic64_t |
64 | * @n: value to assign | |
1a3b1d89 | 65 | * |
a7e926ab | 66 | * Atomically sets the value of @v to @n. |
1a3b1d89 | 67 | */ |
a7e926ab LB |
68 | static inline void atomic64_set(atomic64_t *v, long long i) |
69 | { | |
70 | unsigned high = (unsigned)(i >> 32); | |
71 | unsigned low = (unsigned)i; | |
72 | asm volatile(ATOMIC64_ALTERNATIVE(set) | |
73 | : "+b" (low), "+c" (high) | |
74 | : "S" (v) | |
75 | : "eax", "edx", "memory" | |
76 | ); | |
77 | } | |
1a3b1d89 BG |
78 | |
79 | /** | |
80 | * atomic64_read - read atomic64 variable | |
a7e926ab | 81 | * @v: pointer to type atomic64_t |
1a3b1d89 | 82 | * |
a7e926ab | 83 | * Atomically reads the value of @v and returns it. |
1a3b1d89 | 84 | */ |
a7e926ab | 85 | static inline long long atomic64_read(atomic64_t *v) |
1a3b1d89 | 86 | { |
a7e926ab LB |
87 | long long r; |
88 | asm volatile(ATOMIC64_ALTERNATIVE(read) | |
89 | : "=A" (r), "+c" (v) | |
90 | : : "memory" | |
91 | ); | |
92 | return r; | |
93 | } | |
1a3b1d89 BG |
94 | |
95 | /** | |
96 | * atomic64_add_return - add and return | |
a7e926ab LB |
97 | * @i: integer value to add |
98 | * @v: pointer to type atomic64_t | |
1a3b1d89 | 99 | * |
a7e926ab | 100 | * Atomically adds @i to @v and returns @i + *@v |
1a3b1d89 | 101 | */ |
a7e926ab LB |
102 | static inline long long atomic64_add_return(long long i, atomic64_t *v) |
103 | { | |
104 | asm volatile(ATOMIC64_ALTERNATIVE(add_return) | |
105 | : "+A" (i), "+c" (v) | |
106 | : : "memory" | |
107 | ); | |
108 | return i; | |
109 | } | |
1a3b1d89 BG |
110 | |
111 | /* | |
112 | * Other variants with different arithmetic operators: | |
113 | */ | |
a7e926ab LB |
114 | static inline long long atomic64_sub_return(long long i, atomic64_t *v) |
115 | { | |
116 | asm volatile(ATOMIC64_ALTERNATIVE(sub_return) | |
117 | : "+A" (i), "+c" (v) | |
118 | : : "memory" | |
119 | ); | |
120 | return i; | |
121 | } | |
122 | ||
123 | static inline long long atomic64_inc_return(atomic64_t *v) | |
124 | { | |
125 | long long a; | |
126 | asm volatile(ATOMIC64_ALTERNATIVE(inc_return) | |
127 | : "=A" (a) | |
128 | : "S" (v) | |
129 | : "memory", "ecx" | |
130 | ); | |
131 | return a; | |
132 | } | |
133 | ||
134 | static inline long long atomic64_dec_return(atomic64_t *v) | |
135 | { | |
136 | long long a; | |
137 | asm volatile(ATOMIC64_ALTERNATIVE(dec_return) | |
138 | : "=A" (a) | |
139 | : "S" (v) | |
140 | : "memory", "ecx" | |
141 | ); | |
142 | return a; | |
143 | } | |
1a3b1d89 BG |
144 | |
145 | /** | |
146 | * atomic64_add - add integer to atomic64 variable | |
a7e926ab LB |
147 | * @i: integer value to add |
148 | * @v: pointer to type atomic64_t | |
1a3b1d89 | 149 | * |
a7e926ab | 150 | * Atomically adds @i to @v. |
1a3b1d89 | 151 | */ |
a7e926ab LB |
152 | static inline long long atomic64_add(long long i, atomic64_t *v) |
153 | { | |
154 | asm volatile(ATOMIC64_ALTERNATIVE_(add, add_return) | |
155 | : "+A" (i), "+c" (v) | |
156 | : : "memory" | |
157 | ); | |
158 | return i; | |
159 | } | |
1a3b1d89 BG |
160 | |
161 | /** | |
162 | * atomic64_sub - subtract the atomic64 variable | |
a7e926ab LB |
163 | * @i: integer value to subtract |
164 | * @v: pointer to type atomic64_t | |
1a3b1d89 | 165 | * |
a7e926ab | 166 | * Atomically subtracts @i from @v. |
1a3b1d89 | 167 | */ |
a7e926ab LB |
168 | static inline long long atomic64_sub(long long i, atomic64_t *v) |
169 | { | |
170 | asm volatile(ATOMIC64_ALTERNATIVE_(sub, sub_return) | |
171 | : "+A" (i), "+c" (v) | |
172 | : : "memory" | |
173 | ); | |
174 | return i; | |
175 | } | |
1a3b1d89 BG |
176 | |
177 | /** | |
178 | * atomic64_sub_and_test - subtract value from variable and test result | |
a7e926ab LB |
179 | * @i: integer value to subtract |
180 | * @v: pointer to type atomic64_t | |
181 | * | |
182 | * Atomically subtracts @i from @v and returns | |
1a3b1d89 BG |
183 | * true if the result is zero, or false for all |
184 | * other cases. | |
185 | */ | |
a7e926ab LB |
186 | static inline int atomic64_sub_and_test(long long i, atomic64_t *v) |
187 | { | |
188 | return atomic64_sub_return(i, v) == 0; | |
189 | } | |
1a3b1d89 BG |
190 | |
191 | /** | |
192 | * atomic64_inc - increment atomic64 variable | |
a7e926ab | 193 | * @v: pointer to type atomic64_t |
1a3b1d89 | 194 | * |
a7e926ab | 195 | * Atomically increments @v by 1. |
1a3b1d89 | 196 | */ |
a7e926ab LB |
197 | static inline void atomic64_inc(atomic64_t *v) |
198 | { | |
199 | asm volatile(ATOMIC64_ALTERNATIVE_(inc, inc_return) | |
200 | : : "S" (v) | |
201 | : "memory", "eax", "ecx", "edx" | |
202 | ); | |
203 | } | |
1a3b1d89 BG |
204 | |
205 | /** | |
206 | * atomic64_dec - decrement atomic64 variable | |
207 | * @ptr: pointer to type atomic64_t | |
208 | * | |
209 | * Atomically decrements @ptr by 1. | |
210 | */ | |
a7e926ab LB |
211 | static inline void atomic64_dec(atomic64_t *v) |
212 | { | |
213 | asm volatile(ATOMIC64_ALTERNATIVE_(dec, dec_return) | |
214 | : : "S" (v) | |
215 | : "memory", "eax", "ecx", "edx" | |
216 | ); | |
217 | } | |
1a3b1d89 BG |
218 | |
219 | /** | |
220 | * atomic64_dec_and_test - decrement and test | |
a7e926ab | 221 | * @v: pointer to type atomic64_t |
1a3b1d89 | 222 | * |
a7e926ab | 223 | * Atomically decrements @v by 1 and |
1a3b1d89 BG |
224 | * returns true if the result is 0, or false for all other |
225 | * cases. | |
226 | */ | |
a7e926ab LB |
227 | static inline int atomic64_dec_and_test(atomic64_t *v) |
228 | { | |
229 | return atomic64_dec_return(v) == 0; | |
230 | } | |
1a3b1d89 BG |
231 | |
232 | /** | |
233 | * atomic64_inc_and_test - increment and test | |
a7e926ab | 234 | * @v: pointer to type atomic64_t |
1a3b1d89 | 235 | * |
a7e926ab | 236 | * Atomically increments @v by 1 |
1a3b1d89 BG |
237 | * and returns true if the result is zero, or false for all |
238 | * other cases. | |
239 | */ | |
a7e926ab LB |
240 | static inline int atomic64_inc_and_test(atomic64_t *v) |
241 | { | |
242 | return atomic64_inc_return(v) == 0; | |
243 | } | |
1a3b1d89 BG |
244 | |
245 | /** | |
246 | * atomic64_add_negative - add and test if negative | |
a7e926ab LB |
247 | * @i: integer value to add |
248 | * @v: pointer to type atomic64_t | |
1a3b1d89 | 249 | * |
a7e926ab | 250 | * Atomically adds @i to @v and returns true |
1a3b1d89 BG |
251 | * if the result is negative, or false when |
252 | * result is greater than or equal to zero. | |
253 | */ | |
a7e926ab LB |
254 | static inline int atomic64_add_negative(long long i, atomic64_t *v) |
255 | { | |
256 | return atomic64_add_return(i, v) < 0; | |
257 | } | |
258 | ||
259 | /** | |
260 | * atomic64_add_unless - add unless the number is a given value | |
261 | * @v: pointer of type atomic64_t | |
262 | * @a: the amount to add to v... | |
263 | * @u: ...unless v is equal to u. | |
264 | * | |
265 | * Atomically adds @a to @v, so long as it was not @u. | |
f24219b4 | 266 | * Returns the old value of @v. |
a7e926ab LB |
267 | */ |
268 | static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) | |
269 | { | |
270 | unsigned low = (unsigned)u; | |
271 | unsigned high = (unsigned)(u >> 32); | |
272 | asm volatile(ATOMIC64_ALTERNATIVE(add_unless) "\n\t" | |
273 | : "+A" (a), "+c" (v), "+S" (low), "+D" (high) | |
274 | : : "memory"); | |
275 | return (int)a; | |
276 | } | |
277 | ||
278 | ||
279 | static inline int atomic64_inc_not_zero(atomic64_t *v) | |
280 | { | |
281 | int r; | |
282 | asm volatile(ATOMIC64_ALTERNATIVE(inc_not_zero) | |
283 | : "=a" (r) | |
284 | : "S" (v) | |
285 | : "ecx", "edx", "memory" | |
286 | ); | |
287 | return r; | |
288 | } | |
289 | ||
290 | static inline long long atomic64_dec_if_positive(atomic64_t *v) | |
291 | { | |
292 | long long r; | |
293 | asm volatile(ATOMIC64_ALTERNATIVE(dec_if_positive) | |
294 | : "=A" (r) | |
295 | : "S" (v) | |
296 | : "ecx", "memory" | |
297 | ); | |
298 | return r; | |
299 | } | |
300 | ||
301 | #undef ATOMIC64_ALTERNATIVE | |
302 | #undef ATOMIC64_ALTERNATIVE_ | |
1a3b1d89 BG |
303 | |
304 | #endif /* _ASM_X86_ATOMIC64_32_H */ |