]>
Commit | Line | Data |
---|---|---|
5abbbbf0 BG |
1 | #ifndef _ASM_X86_ATOMIC_H |
2 | #define _ASM_X86_ATOMIC_H | |
3 | ||
4 | #include <linux/compiler.h> | |
5 | #include <linux/types.h> | |
5abbbbf0 BG |
6 | #include <asm/alternative.h> |
7 | #include <asm/cmpxchg.h> | |
0c44c2d0 | 8 | #include <asm/rmwcc.h> |
d00a5692 | 9 | #include <asm/barrier.h> |
5abbbbf0 BG |
10 | |
11 | /* | |
12 | * Atomic operations that C can't guarantee us. Useful for | |
13 | * resource counting etc.. | |
14 | */ | |
15 | ||
16 | #define ATOMIC_INIT(i) { (i) } | |
17 | ||
18 | /** | |
19 | * atomic_read - read atomic variable | |
20 | * @v: pointer of type atomic_t | |
21 | * | |
22 | * Atomically reads the value of @v. | |
23 | */ | |
2a4e90b1 | 24 | static __always_inline int atomic_read(const atomic_t *v) |
5abbbbf0 | 25 | { |
62e8a325 | 26 | return READ_ONCE((v)->counter); |
5abbbbf0 BG |
27 | } |
28 | ||
29 | /** | |
30 | * atomic_set - set atomic variable | |
31 | * @v: pointer of type atomic_t | |
32 | * @i: required value | |
33 | * | |
34 | * Atomically sets the value of @v to @i. | |
35 | */ | |
2a4e90b1 | 36 | static __always_inline void atomic_set(atomic_t *v, int i) |
5abbbbf0 | 37 | { |
62e8a325 | 38 | WRITE_ONCE(v->counter, i); |
5abbbbf0 BG |
39 | } |
40 | ||
41 | /** | |
42 | * atomic_add - add integer to atomic variable | |
43 | * @i: integer value to add | |
44 | * @v: pointer of type atomic_t | |
45 | * | |
46 | * Atomically adds @i to @v. | |
47 | */ | |
3462bd2a | 48 | static __always_inline void atomic_add(int i, atomic_t *v) |
5abbbbf0 BG |
49 | { |
50 | asm volatile(LOCK_PREFIX "addl %1,%0" | |
51 | : "+m" (v->counter) | |
52 | : "ir" (i)); | |
53 | } | |
54 | ||
55 | /** | |
56 | * atomic_sub - subtract integer from atomic variable | |
57 | * @i: integer value to subtract | |
58 | * @v: pointer of type atomic_t | |
59 | * | |
60 | * Atomically subtracts @i from @v. | |
61 | */ | |
3462bd2a | 62 | static __always_inline void atomic_sub(int i, atomic_t *v) |
5abbbbf0 BG |
63 | { |
64 | asm volatile(LOCK_PREFIX "subl %1,%0" | |
65 | : "+m" (v->counter) | |
66 | : "ir" (i)); | |
67 | } | |
68 | ||
69 | /** | |
70 | * atomic_sub_and_test - subtract value from variable and test result | |
71 | * @i: integer value to subtract | |
72 | * @v: pointer of type atomic_t | |
73 | * | |
74 | * Atomically subtracts @i from @v and returns | |
75 | * true if the result is zero, or false for all | |
76 | * other cases. | |
77 | */ | |
117780ee | 78 | static __always_inline bool atomic_sub_and_test(int i, atomic_t *v) |
5abbbbf0 | 79 | { |
18fe5822 | 80 | GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e); |
5abbbbf0 BG |
81 | } |
82 | ||
83 | /** | |
84 | * atomic_inc - increment atomic variable | |
85 | * @v: pointer of type atomic_t | |
86 | * | |
87 | * Atomically increments @v by 1. | |
88 | */ | |
3462bd2a | 89 | static __always_inline void atomic_inc(atomic_t *v) |
5abbbbf0 BG |
90 | { |
91 | asm volatile(LOCK_PREFIX "incl %0" | |
92 | : "+m" (v->counter)); | |
93 | } | |
94 | ||
95 | /** | |
96 | * atomic_dec - decrement atomic variable | |
97 | * @v: pointer of type atomic_t | |
98 | * | |
99 | * Atomically decrements @v by 1. | |
100 | */ | |
3462bd2a | 101 | static __always_inline void atomic_dec(atomic_t *v) |
5abbbbf0 BG |
102 | { |
103 | asm volatile(LOCK_PREFIX "decl %0" | |
104 | : "+m" (v->counter)); | |
105 | } | |
106 | ||
107 | /** | |
108 | * atomic_dec_and_test - decrement and test | |
109 | * @v: pointer of type atomic_t | |
110 | * | |
111 | * Atomically decrements @v by 1 and | |
112 | * returns true if the result is 0, or false for all other | |
113 | * cases. | |
114 | */ | |
117780ee | 115 | static __always_inline bool atomic_dec_and_test(atomic_t *v) |
5abbbbf0 | 116 | { |
18fe5822 | 117 | GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e); |
5abbbbf0 BG |
118 | } |
119 | ||
120 | /** | |
121 | * atomic_inc_and_test - increment and test | |
122 | * @v: pointer of type atomic_t | |
123 | * | |
124 | * Atomically increments @v by 1 | |
125 | * and returns true if the result is zero, or false for all | |
126 | * other cases. | |
127 | */ | |
117780ee | 128 | static __always_inline bool atomic_inc_and_test(atomic_t *v) |
5abbbbf0 | 129 | { |
18fe5822 | 130 | GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e); |
5abbbbf0 BG |
131 | } |
132 | ||
133 | /** | |
134 | * atomic_add_negative - add and test if negative | |
135 | * @i: integer value to add | |
136 | * @v: pointer of type atomic_t | |
137 | * | |
138 | * Atomically adds @i to @v and returns true | |
139 | * if the result is negative, or false when | |
140 | * result is greater than or equal to zero. | |
141 | */ | |
117780ee | 142 | static __always_inline bool atomic_add_negative(int i, atomic_t *v) |
5abbbbf0 | 143 | { |
18fe5822 | 144 | GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s); |
5abbbbf0 BG |
145 | } |
146 | ||
147 | /** | |
148 | * atomic_add_return - add integer and return | |
149 | * @i: integer value to add | |
150 | * @v: pointer of type atomic_t | |
151 | * | |
152 | * Atomically adds @i to @v and returns @i + @v | |
153 | */ | |
3462bd2a | 154 | static __always_inline int atomic_add_return(int i, atomic_t *v) |
5abbbbf0 | 155 | { |
8b8bc2f7 | 156 | return i + xadd(&v->counter, i); |
5abbbbf0 BG |
157 | } |
158 | ||
159 | /** | |
160 | * atomic_sub_return - subtract integer and return | |
161 | * @v: pointer of type atomic_t | |
162 | * @i: integer value to subtract | |
163 | * | |
164 | * Atomically subtracts @i from @v and returns @v - @i | |
165 | */ | |
2a4e90b1 | 166 | static __always_inline int atomic_sub_return(int i, atomic_t *v) |
5abbbbf0 BG |
167 | { |
168 | return atomic_add_return(-i, v); | |
169 | } | |
170 | ||
171 | #define atomic_inc_return(v) (atomic_add_return(1, v)) | |
172 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) | |
173 | ||
a8bcccab PZ |
174 | static __always_inline int atomic_fetch_add(int i, atomic_t *v) |
175 | { | |
176 | return xadd(&v->counter, i); | |
177 | } | |
178 | ||
179 | static __always_inline int atomic_fetch_sub(int i, atomic_t *v) | |
180 | { | |
181 | return xadd(&v->counter, -i); | |
182 | } | |
183 | ||
2a4e90b1 | 184 | static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new) |
5abbbbf0 BG |
185 | { |
186 | return cmpxchg(&v->counter, old, new); | |
187 | } | |
188 | ||
a9ebf306 PZ |
189 | #define atomic_try_cmpxchg atomic_try_cmpxchg |
190 | static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new) | |
191 | { | |
192 | return try_cmpxchg(&v->counter, old, new); | |
193 | } | |
194 | ||
5abbbbf0 BG |
195 | static inline int atomic_xchg(atomic_t *v, int new) |
196 | { | |
197 | return xchg(&v->counter, new); | |
198 | } | |
199 | ||
7fc1845d PZ |
200 | #define ATOMIC_OP(op) \ |
201 | static inline void atomic_##op(int i, atomic_t *v) \ | |
202 | { \ | |
203 | asm volatile(LOCK_PREFIX #op"l %1,%0" \ | |
204 | : "+m" (v->counter) \ | |
205 | : "ir" (i) \ | |
206 | : "memory"); \ | |
207 | } | |
208 | ||
a8bcccab | 209 | #define ATOMIC_FETCH_OP(op, c_op) \ |
e6790e4b | 210 | static inline int atomic_fetch_##op(int i, atomic_t *v) \ |
a8bcccab | 211 | { \ |
e6790e4b PZ |
212 | int val = atomic_read(v); \ |
213 | do { \ | |
214 | } while (!atomic_try_cmpxchg(v, &val, val c_op i)); \ | |
215 | return val; \ | |
a8bcccab PZ |
216 | } |
217 | ||
218 | #define ATOMIC_OPS(op, c_op) \ | |
219 | ATOMIC_OP(op) \ | |
220 | ATOMIC_FETCH_OP(op, c_op) | |
221 | ||
a8bcccab PZ |
222 | ATOMIC_OPS(and, &) |
223 | ATOMIC_OPS(or , |) | |
224 | ATOMIC_OPS(xor, ^) | |
7fc1845d | 225 | |
a8bcccab PZ |
226 | #undef ATOMIC_OPS |
227 | #undef ATOMIC_FETCH_OP | |
7fc1845d PZ |
228 | #undef ATOMIC_OP |
229 | ||
5abbbbf0 | 230 | /** |
f24219b4 | 231 | * __atomic_add_unless - add unless the number is already a given value |
5abbbbf0 BG |
232 | * @v: pointer of type atomic_t |
233 | * @a: the amount to add to v... | |
234 | * @u: ...unless v is equal to u. | |
235 | * | |
236 | * Atomically adds @a to @v, so long as @v was not already @u. | |
f24219b4 | 237 | * Returns the old value of @v. |
5abbbbf0 | 238 | */ |
3462bd2a | 239 | static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) |
5abbbbf0 | 240 | { |
e6790e4b PZ |
241 | int c = atomic_read(v); |
242 | do { | |
243 | if (unlikely(c == u)) | |
5abbbbf0 | 244 | break; |
e6790e4b | 245 | } while (!atomic_try_cmpxchg(v, &c, c + a)); |
f24219b4 | 246 | return c; |
5abbbbf0 BG |
247 | } |
248 | ||
96a388de | 249 | #ifdef CONFIG_X86_32 |
a1ce3928 | 250 | # include <asm/atomic64_32.h> |
96a388de | 251 | #else |
a1ce3928 | 252 | # include <asm/atomic64_64.h> |
96a388de | 253 | #endif |
5abbbbf0 | 254 | |
5abbbbf0 | 255 | #endif /* _ASM_X86_ATOMIC_H */ |