]>
Commit | Line | Data |
---|---|---|
5abbbbf0 BG |
1 | #ifndef _ASM_X86_ATOMIC_H |
2 | #define _ASM_X86_ATOMIC_H | |
3 | ||
4 | #include <linux/compiler.h> | |
5 | #include <linux/types.h> | |
6 | #include <asm/processor.h> | |
7 | #include <asm/alternative.h> | |
8 | #include <asm/cmpxchg.h> | |
0c44c2d0 | 9 | #include <asm/rmwcc.h> |
d00a5692 | 10 | #include <asm/barrier.h> |
5abbbbf0 BG |
11 | |
12 | /* | |
13 | * Atomic operations that C can't guarantee us. Useful for | |
14 | * resource counting etc.. | |
15 | */ | |
16 | ||
17 | #define ATOMIC_INIT(i) { (i) } | |
18 | ||
19 | /** | |
20 | * atomic_read - read atomic variable | |
21 | * @v: pointer of type atomic_t | |
22 | * | |
23 | * Atomically reads the value of @v. | |
24 | */ | |
2a4e90b1 | 25 | static __always_inline int atomic_read(const atomic_t *v) |
5abbbbf0 | 26 | { |
2291059c | 27 | return ACCESS_ONCE((v)->counter); |
5abbbbf0 BG |
28 | } |
29 | ||
30 | /** | |
31 | * atomic_set - set atomic variable | |
32 | * @v: pointer of type atomic_t | |
33 | * @i: required value | |
34 | * | |
35 | * Atomically sets the value of @v to @i. | |
36 | */ | |
2a4e90b1 | 37 | static __always_inline void atomic_set(atomic_t *v, int i) |
5abbbbf0 BG |
38 | { |
39 | v->counter = i; | |
40 | } | |
41 | ||
42 | /** | |
43 | * atomic_add - add integer to atomic variable | |
44 | * @i: integer value to add | |
45 | * @v: pointer of type atomic_t | |
46 | * | |
47 | * Atomically adds @i to @v. | |
48 | */ | |
3462bd2a | 49 | static __always_inline void atomic_add(int i, atomic_t *v) |
5abbbbf0 BG |
50 | { |
51 | asm volatile(LOCK_PREFIX "addl %1,%0" | |
52 | : "+m" (v->counter) | |
53 | : "ir" (i)); | |
54 | } | |
55 | ||
56 | /** | |
57 | * atomic_sub - subtract integer from atomic variable | |
58 | * @i: integer value to subtract | |
59 | * @v: pointer of type atomic_t | |
60 | * | |
61 | * Atomically subtracts @i from @v. | |
62 | */ | |
3462bd2a | 63 | static __always_inline void atomic_sub(int i, atomic_t *v) |
5abbbbf0 BG |
64 | { |
65 | asm volatile(LOCK_PREFIX "subl %1,%0" | |
66 | : "+m" (v->counter) | |
67 | : "ir" (i)); | |
68 | } | |
69 | ||
70 | /** | |
71 | * atomic_sub_and_test - subtract value from variable and test result | |
72 | * @i: integer value to subtract | |
73 | * @v: pointer of type atomic_t | |
74 | * | |
75 | * Atomically subtracts @i from @v and returns | |
76 | * true if the result is zero, or false for all | |
77 | * other cases. | |
78 | */ | |
3462bd2a | 79 | static __always_inline int atomic_sub_and_test(int i, atomic_t *v) |
5abbbbf0 | 80 | { |
e0f6dec3 | 81 | GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e"); |
5abbbbf0 BG |
82 | } |
83 | ||
84 | /** | |
85 | * atomic_inc - increment atomic variable | |
86 | * @v: pointer of type atomic_t | |
87 | * | |
88 | * Atomically increments @v by 1. | |
89 | */ | |
3462bd2a | 90 | static __always_inline void atomic_inc(atomic_t *v) |
5abbbbf0 BG |
91 | { |
92 | asm volatile(LOCK_PREFIX "incl %0" | |
93 | : "+m" (v->counter)); | |
94 | } | |
95 | ||
96 | /** | |
97 | * atomic_dec - decrement atomic variable | |
98 | * @v: pointer of type atomic_t | |
99 | * | |
100 | * Atomically decrements @v by 1. | |
101 | */ | |
3462bd2a | 102 | static __always_inline void atomic_dec(atomic_t *v) |
5abbbbf0 BG |
103 | { |
104 | asm volatile(LOCK_PREFIX "decl %0" | |
105 | : "+m" (v->counter)); | |
106 | } | |
107 | ||
108 | /** | |
109 | * atomic_dec_and_test - decrement and test | |
110 | * @v: pointer of type atomic_t | |
111 | * | |
112 | * Atomically decrements @v by 1 and | |
113 | * returns true if the result is 0, or false for all other | |
114 | * cases. | |
115 | */ | |
3462bd2a | 116 | static __always_inline int atomic_dec_and_test(atomic_t *v) |
5abbbbf0 | 117 | { |
0c44c2d0 | 118 | GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e"); |
5abbbbf0 BG |
119 | } |
120 | ||
121 | /** | |
122 | * atomic_inc_and_test - increment and test | |
123 | * @v: pointer of type atomic_t | |
124 | * | |
125 | * Atomically increments @v by 1 | |
126 | * and returns true if the result is zero, or false for all | |
127 | * other cases. | |
128 | */ | |
2a4e90b1 | 129 | static __always_inline int atomic_inc_and_test(atomic_t *v) |
5abbbbf0 | 130 | { |
0c44c2d0 | 131 | GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e"); |
5abbbbf0 BG |
132 | } |
133 | ||
134 | /** | |
135 | * atomic_add_negative - add and test if negative | |
136 | * @i: integer value to add | |
137 | * @v: pointer of type atomic_t | |
138 | * | |
139 | * Atomically adds @i to @v and returns true | |
140 | * if the result is negative, or false when | |
141 | * result is greater than or equal to zero. | |
142 | */ | |
2a4e90b1 | 143 | static __always_inline int atomic_add_negative(int i, atomic_t *v) |
5abbbbf0 | 144 | { |
e0f6dec3 | 145 | GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s"); |
5abbbbf0 BG |
146 | } |
147 | ||
148 | /** | |
149 | * atomic_add_return - add integer and return | |
150 | * @i: integer value to add | |
151 | * @v: pointer of type atomic_t | |
152 | * | |
153 | * Atomically adds @i to @v and returns @i + @v | |
154 | */ | |
3462bd2a | 155 | static __always_inline int atomic_add_return(int i, atomic_t *v) |
5abbbbf0 | 156 | { |
8b8bc2f7 | 157 | return i + xadd(&v->counter, i); |
5abbbbf0 BG |
158 | } |
159 | ||
160 | /** | |
161 | * atomic_sub_return - subtract integer and return | |
162 | * @v: pointer of type atomic_t | |
163 | * @i: integer value to subtract | |
164 | * | |
165 | * Atomically subtracts @i from @v and returns @v - @i | |
166 | */ | |
2a4e90b1 | 167 | static __always_inline int atomic_sub_return(int i, atomic_t *v) |
5abbbbf0 BG |
168 | { |
169 | return atomic_add_return(-i, v); | |
170 | } | |
171 | ||
172 | #define atomic_inc_return(v) (atomic_add_return(1, v)) | |
173 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) | |
174 | ||
2a4e90b1 | 175 | static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new) |
5abbbbf0 BG |
176 | { |
177 | return cmpxchg(&v->counter, old, new); | |
178 | } | |
179 | ||
180 | static inline int atomic_xchg(atomic_t *v, int new) | |
181 | { | |
182 | return xchg(&v->counter, new); | |
183 | } | |
184 | ||
7fc1845d PZ |
185 | #define ATOMIC_OP(op) \ |
186 | static inline void atomic_##op(int i, atomic_t *v) \ | |
187 | { \ | |
188 | asm volatile(LOCK_PREFIX #op"l %1,%0" \ | |
189 | : "+m" (v->counter) \ | |
190 | : "ir" (i) \ | |
191 | : "memory"); \ | |
192 | } | |
193 | ||
194 | #define CONFIG_ARCH_HAS_ATOMIC_OR | |
195 | ||
196 | ATOMIC_OP(and) | |
197 | ATOMIC_OP(or) | |
198 | ATOMIC_OP(xor) | |
199 | ||
200 | #undef ATOMIC_OP | |
201 | ||
5abbbbf0 | 202 | /** |
f24219b4 | 203 | * __atomic_add_unless - add unless the number is already a given value |
5abbbbf0 BG |
204 | * @v: pointer of type atomic_t |
205 | * @a: the amount to add to v... | |
206 | * @u: ...unless v is equal to u. | |
207 | * | |
208 | * Atomically adds @a to @v, so long as @v was not already @u. | |
f24219b4 | 209 | * Returns the old value of @v. |
5abbbbf0 | 210 | */ |
3462bd2a | 211 | static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) |
5abbbbf0 BG |
212 | { |
213 | int c, old; | |
214 | c = atomic_read(v); | |
215 | for (;;) { | |
216 | if (unlikely(c == (u))) | |
217 | break; | |
218 | old = atomic_cmpxchg((v), c, c + (a)); | |
219 | if (likely(old == c)) | |
220 | break; | |
221 | c = old; | |
222 | } | |
f24219b4 | 223 | return c; |
5abbbbf0 BG |
224 | } |
225 | ||
5abbbbf0 BG |
226 | /** |
227 | * atomic_inc_short - increment of a short integer | |
228 | * @v: pointer to type int | |
229 | * | |
230 | * Atomically adds 1 to @v | |
231 | * Returns the new value of @u | |
232 | */ | |
2a4e90b1 | 233 | static __always_inline short int atomic_inc_short(short int *v) |
5abbbbf0 BG |
234 | { |
235 | asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v)); | |
236 | return *v; | |
237 | } | |
238 | ||
7fc1845d PZ |
239 | static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v) |
240 | { | |
241 | atomic_and(~mask, v); | |
242 | } | |
5abbbbf0 | 243 | |
7fc1845d PZ |
244 | static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v) |
245 | { | |
246 | atomic_or(mask, v); | |
247 | } | |
5abbbbf0 | 248 | |
96a388de | 249 | #ifdef CONFIG_X86_32 |
a1ce3928 | 250 | # include <asm/atomic64_32.h> |
96a388de | 251 | #else |
a1ce3928 | 252 | # include <asm/atomic64_64.h> |
96a388de | 253 | #endif |
5abbbbf0 | 254 | |
5abbbbf0 | 255 | #endif /* _ASM_X86_ATOMIC_H */ |