]>
git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/x86/include/asm/atomic.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_ATOMIC_H
3 #define _ASM_X86_ATOMIC_H
5 #include <linux/compiler.h>
6 #include <linux/types.h>
7 #include <asm/alternative.h>
8 #include <asm/cmpxchg.h>
10 #include <asm/barrier.h>
13 * Atomic operations that C can't guarantee us. Useful for
14 * resource counting etc..
17 #define ATOMIC_INIT(i) { (i) }
20 * atomic_read - read atomic variable
21 * @v: pointer of type atomic_t
23 * Atomically reads the value of @v.
25 static __always_inline
int atomic_read(const atomic_t
*v
)
27 return READ_ONCE((v
)->counter
);
31 * atomic_set - set atomic variable
32 * @v: pointer of type atomic_t
35 * Atomically sets the value of @v to @i.
37 static __always_inline
void atomic_set(atomic_t
*v
, int i
)
39 WRITE_ONCE(v
->counter
, i
);
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
47 * Atomically adds @i to @v.
49 static __always_inline
void atomic_add(int i
, atomic_t
*v
)
51 asm volatile(LOCK_PREFIX
"addl %1,%0"
57 * atomic_sub - subtract integer from atomic variable
58 * @i: integer value to subtract
59 * @v: pointer of type atomic_t
61 * Atomically subtracts @i from @v.
63 static __always_inline
void atomic_sub(int i
, atomic_t
*v
)
65 asm volatile(LOCK_PREFIX
"subl %1,%0"
71 * atomic_sub_and_test - subtract value from variable and test result
72 * @i: integer value to subtract
73 * @v: pointer of type atomic_t
75 * Atomically subtracts @i from @v and returns
76 * true if the result is zero, or false for all
79 static __always_inline
bool atomic_sub_and_test(int i
, atomic_t
*v
)
81 GEN_BINARY_RMWcc(LOCK_PREFIX
"subl", v
->counter
, "er", i
, "%0", e
);
85 * atomic_inc - increment atomic variable
86 * @v: pointer of type atomic_t
88 * Atomically increments @v by 1.
90 static __always_inline
void atomic_inc(atomic_t
*v
)
92 asm volatile(LOCK_PREFIX
"incl %0"
97 * atomic_dec - decrement atomic variable
98 * @v: pointer of type atomic_t
100 * Atomically decrements @v by 1.
102 static __always_inline
void atomic_dec(atomic_t
*v
)
104 asm volatile(LOCK_PREFIX
"decl %0"
105 : "+m" (v
->counter
));
109 * atomic_dec_and_test - decrement and test
110 * @v: pointer of type atomic_t
112 * Atomically decrements @v by 1 and
113 * returns true if the result is 0, or false for all other
116 static __always_inline
bool atomic_dec_and_test(atomic_t
*v
)
118 GEN_UNARY_RMWcc(LOCK_PREFIX
"decl", v
->counter
, "%0", e
);
122 * atomic_inc_and_test - increment and test
123 * @v: pointer of type atomic_t
125 * Atomically increments @v by 1
126 * and returns true if the result is zero, or false for all
129 static __always_inline
bool atomic_inc_and_test(atomic_t
*v
)
131 GEN_UNARY_RMWcc(LOCK_PREFIX
"incl", v
->counter
, "%0", e
);
135 * atomic_add_negative - add and test if negative
136 * @i: integer value to add
137 * @v: pointer of type atomic_t
139 * Atomically adds @i to @v and returns true
140 * if the result is negative, or false when
141 * result is greater than or equal to zero.
143 static __always_inline
bool atomic_add_negative(int i
, atomic_t
*v
)
145 GEN_BINARY_RMWcc(LOCK_PREFIX
"addl", v
->counter
, "er", i
, "%0", s
);
149 * atomic_add_return - add integer and return
150 * @i: integer value to add
151 * @v: pointer of type atomic_t
153 * Atomically adds @i to @v and returns @i + @v
155 static __always_inline
int atomic_add_return(int i
, atomic_t
*v
)
157 return i
+ xadd(&v
->counter
, i
);
161 * atomic_sub_return - subtract integer and return
162 * @v: pointer of type atomic_t
163 * @i: integer value to subtract
165 * Atomically subtracts @i from @v and returns @v - @i
167 static __always_inline
int atomic_sub_return(int i
, atomic_t
*v
)
169 return atomic_add_return(-i
, v
);
172 #define atomic_inc_return(v) (atomic_add_return(1, v))
173 #define atomic_dec_return(v) (atomic_sub_return(1, v))
175 static __always_inline
int atomic_fetch_add(int i
, atomic_t
*v
)
177 return xadd(&v
->counter
, i
);
180 static __always_inline
int atomic_fetch_sub(int i
, atomic_t
*v
)
182 return xadd(&v
->counter
, -i
);
185 static __always_inline
int atomic_cmpxchg(atomic_t
*v
, int old
, int new)
187 return cmpxchg(&v
->counter
, old
, new);
190 #define atomic_try_cmpxchg atomic_try_cmpxchg
191 static __always_inline
bool atomic_try_cmpxchg(atomic_t
*v
, int *old
, int new)
193 return try_cmpxchg(&v
->counter
, old
, new);
196 static inline int atomic_xchg(atomic_t
*v
, int new)
198 return xchg(&v
->counter
, new);
201 static inline void atomic_and(int i
, atomic_t
*v
)
203 asm volatile(LOCK_PREFIX
"andl %1,%0"
209 static inline int atomic_fetch_and(int i
, atomic_t
*v
)
211 int val
= atomic_read(v
);
213 do { } while (!atomic_try_cmpxchg(v
, &val
, val
& i
));
218 static inline void atomic_or(int i
, atomic_t
*v
)
220 asm volatile(LOCK_PREFIX
"orl %1,%0"
226 static inline int atomic_fetch_or(int i
, atomic_t
*v
)
228 int val
= atomic_read(v
);
230 do { } while (!atomic_try_cmpxchg(v
, &val
, val
| i
));
235 static inline void atomic_xor(int i
, atomic_t
*v
)
237 asm volatile(LOCK_PREFIX
"xorl %1,%0"
243 static inline int atomic_fetch_xor(int i
, atomic_t
*v
)
245 int val
= atomic_read(v
);
247 do { } while (!atomic_try_cmpxchg(v
, &val
, val
^ i
));
253 * __atomic_add_unless - add unless the number is already a given value
254 * @v: pointer of type atomic_t
255 * @a: the amount to add to v...
256 * @u: ...unless v is equal to u.
258 * Atomically adds @a to @v, so long as @v was not already @u.
259 * Returns the old value of @v.
261 static __always_inline
int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
263 int c
= atomic_read(v
);
266 if (unlikely(c
== u
))
268 } while (!atomic_try_cmpxchg(v
, &c
, c
+ a
));
274 # include <asm/atomic64_32.h>
276 # include <asm/atomic64_64.h>
279 #endif /* _ASM_X86_ATOMIC_H */