]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
5abbbbf0 BG |
2 | #ifndef _ASM_X86_ATOMIC_H |
3 | #define _ASM_X86_ATOMIC_H | |
4 | ||
5 | #include <linux/compiler.h> | |
6 | #include <linux/types.h> | |
5abbbbf0 BG |
7 | #include <asm/alternative.h> |
8 | #include <asm/cmpxchg.h> | |
0c44c2d0 | 9 | #include <asm/rmwcc.h> |
d00a5692 | 10 | #include <asm/barrier.h> |
5abbbbf0 BG |
11 | |
12 | /* | |
13 | * Atomic operations that C can't guarantee us. Useful for | |
14 | * resource counting etc.. | |
15 | */ | |
16 | ||
17 | #define ATOMIC_INIT(i) { (i) } | |
18 | ||
19 | /** | |
20 | * atomic_read - read atomic variable | |
21 | * @v: pointer of type atomic_t | |
22 | * | |
23 | * Atomically reads the value of @v. | |
24 | */ | |
2a4e90b1 | 25 | static __always_inline int atomic_read(const atomic_t *v) |
5abbbbf0 | 26 | { |
62e8a325 | 27 | return READ_ONCE((v)->counter); |
5abbbbf0 BG |
28 | } |
29 | ||
30 | /** | |
31 | * atomic_set - set atomic variable | |
32 | * @v: pointer of type atomic_t | |
33 | * @i: required value | |
34 | * | |
35 | * Atomically sets the value of @v to @i. | |
36 | */ | |
2a4e90b1 | 37 | static __always_inline void atomic_set(atomic_t *v, int i) |
5abbbbf0 | 38 | { |
62e8a325 | 39 | WRITE_ONCE(v->counter, i); |
5abbbbf0 BG |
40 | } |
41 | ||
42 | /** | |
43 | * atomic_add - add integer to atomic variable | |
44 | * @i: integer value to add | |
45 | * @v: pointer of type atomic_t | |
46 | * | |
47 | * Atomically adds @i to @v. | |
48 | */ | |
3462bd2a | 49 | static __always_inline void atomic_add(int i, atomic_t *v) |
5abbbbf0 BG |
50 | { |
51 | asm volatile(LOCK_PREFIX "addl %1,%0" | |
52 | : "+m" (v->counter) | |
8bb45500 | 53 | : "ir" (i) : "memory"); |
5abbbbf0 BG |
54 | } |
55 | ||
56 | /** | |
57 | * atomic_sub - subtract integer from atomic variable | |
58 | * @i: integer value to subtract | |
59 | * @v: pointer of type atomic_t | |
60 | * | |
61 | * Atomically subtracts @i from @v. | |
62 | */ | |
3462bd2a | 63 | static __always_inline void atomic_sub(int i, atomic_t *v) |
5abbbbf0 BG |
64 | { |
65 | asm volatile(LOCK_PREFIX "subl %1,%0" | |
66 | : "+m" (v->counter) | |
8bb45500 | 67 | : "ir" (i) : "memory"); |
5abbbbf0 BG |
68 | } |
69 | ||
70 | /** | |
71 | * atomic_sub_and_test - subtract value from variable and test result | |
72 | * @i: integer value to subtract | |
73 | * @v: pointer of type atomic_t | |
74 | * | |
75 | * Atomically subtracts @i from @v and returns | |
76 | * true if the result is zero, or false for all | |
77 | * other cases. | |
78 | */ | |
117780ee | 79 | static __always_inline bool atomic_sub_and_test(int i, atomic_t *v) |
5abbbbf0 | 80 | { |
18fe5822 | 81 | GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e); |
5abbbbf0 BG |
82 | } |
83 | ||
84 | /** | |
85 | * atomic_inc - increment atomic variable | |
86 | * @v: pointer of type atomic_t | |
87 | * | |
88 | * Atomically increments @v by 1. | |
89 | */ | |
3462bd2a | 90 | static __always_inline void atomic_inc(atomic_t *v) |
5abbbbf0 BG |
91 | { |
92 | asm volatile(LOCK_PREFIX "incl %0" | |
8bb45500 | 93 | : "+m" (v->counter) :: "memory"); |
5abbbbf0 BG |
94 | } |
95 | ||
96 | /** | |
97 | * atomic_dec - decrement atomic variable | |
98 | * @v: pointer of type atomic_t | |
99 | * | |
100 | * Atomically decrements @v by 1. | |
101 | */ | |
3462bd2a | 102 | static __always_inline void atomic_dec(atomic_t *v) |
5abbbbf0 BG |
103 | { |
104 | asm volatile(LOCK_PREFIX "decl %0" | |
8bb45500 | 105 | : "+m" (v->counter) :: "memory"); |
5abbbbf0 BG |
106 | } |
107 | ||
108 | /** | |
109 | * atomic_dec_and_test - decrement and test | |
110 | * @v: pointer of type atomic_t | |
111 | * | |
112 | * Atomically decrements @v by 1 and | |
113 | * returns true if the result is 0, or false for all other | |
114 | * cases. | |
115 | */ | |
117780ee | 116 | static __always_inline bool atomic_dec_and_test(atomic_t *v) |
5abbbbf0 | 117 | { |
18fe5822 | 118 | GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e); |
5abbbbf0 BG |
119 | } |
120 | ||
121 | /** | |
122 | * atomic_inc_and_test - increment and test | |
123 | * @v: pointer of type atomic_t | |
124 | * | |
125 | * Atomically increments @v by 1 | |
126 | * and returns true if the result is zero, or false for all | |
127 | * other cases. | |
128 | */ | |
117780ee | 129 | static __always_inline bool atomic_inc_and_test(atomic_t *v) |
5abbbbf0 | 130 | { |
18fe5822 | 131 | GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e); |
5abbbbf0 BG |
132 | } |
133 | ||
134 | /** | |
135 | * atomic_add_negative - add and test if negative | |
136 | * @i: integer value to add | |
137 | * @v: pointer of type atomic_t | |
138 | * | |
139 | * Atomically adds @i to @v and returns true | |
140 | * if the result is negative, or false when | |
141 | * result is greater than or equal to zero. | |
142 | */ | |
117780ee | 143 | static __always_inline bool atomic_add_negative(int i, atomic_t *v) |
5abbbbf0 | 144 | { |
18fe5822 | 145 | GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s); |
5abbbbf0 BG |
146 | } |
147 | ||
148 | /** | |
149 | * atomic_add_return - add integer and return | |
150 | * @i: integer value to add | |
151 | * @v: pointer of type atomic_t | |
152 | * | |
153 | * Atomically adds @i to @v and returns @i + @v | |
154 | */ | |
3462bd2a | 155 | static __always_inline int atomic_add_return(int i, atomic_t *v) |
5abbbbf0 | 156 | { |
8b8bc2f7 | 157 | return i + xadd(&v->counter, i); |
5abbbbf0 BG |
158 | } |
159 | ||
160 | /** | |
161 | * atomic_sub_return - subtract integer and return | |
162 | * @v: pointer of type atomic_t | |
163 | * @i: integer value to subtract | |
164 | * | |
165 | * Atomically subtracts @i from @v and returns @v - @i | |
166 | */ | |
2a4e90b1 | 167 | static __always_inline int atomic_sub_return(int i, atomic_t *v) |
5abbbbf0 BG |
168 | { |
169 | return atomic_add_return(-i, v); | |
170 | } | |
171 | ||
172 | #define atomic_inc_return(v) (atomic_add_return(1, v)) | |
173 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) | |
174 | ||
a8bcccab PZ |
175 | static __always_inline int atomic_fetch_add(int i, atomic_t *v) |
176 | { | |
177 | return xadd(&v->counter, i); | |
178 | } | |
179 | ||
180 | static __always_inline int atomic_fetch_sub(int i, atomic_t *v) | |
181 | { | |
182 | return xadd(&v->counter, -i); | |
183 | } | |
184 | ||
2a4e90b1 | 185 | static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new) |
5abbbbf0 BG |
186 | { |
187 | return cmpxchg(&v->counter, old, new); | |
188 | } | |
189 | ||
a9ebf306 PZ |
190 | #define atomic_try_cmpxchg atomic_try_cmpxchg |
191 | static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new) | |
192 | { | |
193 | return try_cmpxchg(&v->counter, old, new); | |
194 | } | |
195 | ||
5abbbbf0 BG |
196 | static inline int atomic_xchg(atomic_t *v, int new) |
197 | { | |
198 | return xchg(&v->counter, new); | |
199 | } | |
200 | ||
ba1c9f83 DV |
201 | static inline void atomic_and(int i, atomic_t *v) |
202 | { | |
203 | asm volatile(LOCK_PREFIX "andl %1,%0" | |
204 | : "+m" (v->counter) | |
205 | : "ir" (i) | |
206 | : "memory"); | |
207 | } | |
208 | ||
209 | static inline int atomic_fetch_and(int i, atomic_t *v) | |
210 | { | |
211 | int val = atomic_read(v); | |
212 | ||
213 | do { } while (!atomic_try_cmpxchg(v, &val, val & i)); | |
214 | ||
215 | return val; | |
7fc1845d PZ |
216 | } |
217 | ||
ba1c9f83 DV |
218 | static inline void atomic_or(int i, atomic_t *v) |
219 | { | |
220 | asm volatile(LOCK_PREFIX "orl %1,%0" | |
221 | : "+m" (v->counter) | |
222 | : "ir" (i) | |
223 | : "memory"); | |
a8bcccab PZ |
224 | } |
225 | ||
ba1c9f83 DV |
226 | static inline int atomic_fetch_or(int i, atomic_t *v) |
227 | { | |
228 | int val = atomic_read(v); | |
a8bcccab | 229 | |
ba1c9f83 | 230 | do { } while (!atomic_try_cmpxchg(v, &val, val | i)); |
7fc1845d | 231 | |
ba1c9f83 DV |
232 | return val; |
233 | } | |
234 | ||
235 | static inline void atomic_xor(int i, atomic_t *v) | |
236 | { | |
237 | asm volatile(LOCK_PREFIX "xorl %1,%0" | |
238 | : "+m" (v->counter) | |
239 | : "ir" (i) | |
240 | : "memory"); | |
241 | } | |
242 | ||
243 | static inline int atomic_fetch_xor(int i, atomic_t *v) | |
244 | { | |
245 | int val = atomic_read(v); | |
246 | ||
247 | do { } while (!atomic_try_cmpxchg(v, &val, val ^ i)); | |
248 | ||
249 | return val; | |
250 | } | |
7fc1845d | 251 | |
5abbbbf0 | 252 | /** |
f24219b4 | 253 | * __atomic_add_unless - add unless the number is already a given value |
5abbbbf0 BG |
254 | * @v: pointer of type atomic_t |
255 | * @a: the amount to add to v... | |
256 | * @u: ...unless v is equal to u. | |
257 | * | |
258 | * Atomically adds @a to @v, so long as @v was not already @u. | |
f24219b4 | 259 | * Returns the old value of @v. |
5abbbbf0 | 260 | */ |
3462bd2a | 261 | static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) |
5abbbbf0 | 262 | { |
e6790e4b | 263 | int c = atomic_read(v); |
ba1c9f83 | 264 | |
e6790e4b PZ |
265 | do { |
266 | if (unlikely(c == u)) | |
5abbbbf0 | 267 | break; |
e6790e4b | 268 | } while (!atomic_try_cmpxchg(v, &c, c + a)); |
ba1c9f83 | 269 | |
f24219b4 | 270 | return c; |
5abbbbf0 BG |
271 | } |
272 | ||
96a388de | 273 | #ifdef CONFIG_X86_32 |
a1ce3928 | 274 | # include <asm/atomic64_32.h> |
96a388de | 275 | #else |
a1ce3928 | 276 | # include <asm/atomic64_64.h> |
96a388de | 277 | #endif |
5abbbbf0 | 278 | |
5abbbbf0 | 279 | #endif /* _ASM_X86_ATOMIC_H */ |