]>
git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/ia64/include/asm/atomic.h
fdb887005dfffbdeeef7bcb9067704d1295a913b
1 #ifndef _ASM_IA64_ATOMIC_H
2 #define _ASM_IA64_ATOMIC_H
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
8 * NOTE: don't mess with the types below! The "unsigned long" and
9 * "int" types were carefully placed so as to ensure proper operation
12 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
13 * David Mosberger-Tang <davidm@hpl.hp.com>
15 #include <linux/types.h>
17 #include <asm/intrinsics.h>
18 #include <asm/system.h>
21 #define ATOMIC_INIT(i) ((atomic_t) { (i) })
22 #define ATOMIC64_INIT(i) ((atomic64_t) { (i) })
24 #define atomic_read(v) (*(volatile int *)&(v)->counter)
25 #define atomic64_read(v) (*(volatile long *)&(v)->counter)
27 #define atomic_set(v,i) (((v)->counter) = (i))
28 #define atomic64_set(v,i) (((v)->counter) = (i))
31 ia64_atomic_add (int i
, atomic_t
*v
)
40 } while (ia64_cmpxchg(acq
, v
, old
, new, sizeof(atomic_t
)) != old
);
44 static __inline__
long
45 ia64_atomic64_add (__s64 i
, atomic64_t
*v
)
52 old
= atomic64_read(v
);
54 } while (ia64_cmpxchg(acq
, v
, old
, new, sizeof(atomic64_t
)) != old
);
59 ia64_atomic_sub (int i
, atomic_t
*v
)
68 } while (ia64_cmpxchg(acq
, v
, old
, new, sizeof(atomic_t
)) != old
);
72 static __inline__
long
73 ia64_atomic64_sub (__s64 i
, atomic64_t
*v
)
80 old
= atomic64_read(v
);
82 } while (ia64_cmpxchg(acq
, v
, old
, new, sizeof(atomic64_t
)) != old
);
86 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
87 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
89 #define atomic64_cmpxchg(v, old, new) \
90 (cmpxchg(&((v)->counter), old, new))
91 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
93 static __inline__
int atomic_add_unless(atomic_t
*v
, int a
, int u
)
98 if (unlikely(c
== (u
)))
100 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
101 if (likely(old
== c
))
109 static __inline__
long atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
112 c
= atomic64_read(v
);
114 if (unlikely(c
== (u
)))
116 old
= atomic64_cmpxchg((v
), c
, c
+ (a
));
117 if (likely(old
== c
))
124 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
126 #define atomic_add_return(i,v) \
128 int __ia64_aar_i = (i); \
129 (__builtin_constant_p(i) \
130 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
131 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
132 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
133 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
134 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
135 : ia64_atomic_add(__ia64_aar_i, v); \
138 #define atomic64_add_return(i,v) \
140 long __ia64_aar_i = (i); \
141 (__builtin_constant_p(i) \
142 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
143 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
144 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
145 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
146 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
147 : ia64_atomic64_add(__ia64_aar_i, v); \
151 * Atomically add I to V and return TRUE if the resulting value is
154 static __inline__
int
155 atomic_add_negative (int i
, atomic_t
*v
)
157 return atomic_add_return(i
, v
) < 0;
160 static __inline__
long
161 atomic64_add_negative (__s64 i
, atomic64_t
*v
)
163 return atomic64_add_return(i
, v
) < 0;
166 #define atomic_sub_return(i,v) \
168 int __ia64_asr_i = (i); \
169 (__builtin_constant_p(i) \
170 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
171 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
172 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
173 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
174 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
175 : ia64_atomic_sub(__ia64_asr_i, v); \
178 #define atomic64_sub_return(i,v) \
180 long __ia64_asr_i = (i); \
181 (__builtin_constant_p(i) \
182 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
183 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
184 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
185 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
186 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
187 : ia64_atomic64_sub(__ia64_asr_i, v); \
190 #define atomic_dec_return(v) atomic_sub_return(1, (v))
191 #define atomic_inc_return(v) atomic_add_return(1, (v))
192 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
193 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
195 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
196 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
197 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
198 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
199 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
200 #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
202 #define atomic_add(i,v) atomic_add_return((i), (v))
203 #define atomic_sub(i,v) atomic_sub_return((i), (v))
204 #define atomic_inc(v) atomic_add(1, (v))
205 #define atomic_dec(v) atomic_sub(1, (v))
207 #define atomic64_add(i,v) atomic64_add_return((i), (v))
208 #define atomic64_sub(i,v) atomic64_sub_return((i), (v))
209 #define atomic64_inc(v) atomic64_add(1, (v))
210 #define atomic64_dec(v) atomic64_sub(1, (v))
212 /* Atomic operations are already serializing */
213 #define smp_mb__before_atomic_dec() barrier()
214 #define smp_mb__after_atomic_dec() barrier()
215 #define smp_mb__before_atomic_inc() barrier()
216 #define smp_mb__after_atomic_inc() barrier()
218 #include <asm-generic/atomic-long.h>
219 #endif /* _ASM_IA64_ATOMIC_H */