]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - include/asm-generic/atomic.h
atomic.h: add atomic64 cmpxchg, xchg and add_unless to x86_64
[mirror_ubuntu-bionic-kernel.git] / include / asm-generic / atomic.h
CommitLineData
d3cb4871
CL
1#ifndef _ASM_GENERIC_ATOMIC_H
2#define _ASM_GENERIC_ATOMIC_H
3/*
4 * Copyright (C) 2005 Silicon Graphics, Inc.
5 * Christoph Lameter <clameter@sgi.com>
6 *
7 * Allows to provide arch independent atomic definitions without the need to
8 * edit all arch specific atomic.h files.
9 */
10
5998bf1d 11#include <asm/types.h>
bb2382c3 12#include <asm/system.h>
d3cb4871
CL
13
14/*
15 * Suppport for atomic_long_t
16 *
17 * Casts for parameters are avoided for existing atomic functions in order to
18 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
19 * macros of a platform may have.
20 */
21
22#if BITS_PER_LONG == 64
23
24typedef atomic64_t atomic_long_t;
25
26#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
27
28static inline long atomic_long_read(atomic_long_t *l)
29{
30 atomic64_t *v = (atomic64_t *)l;
31
32 return (long)atomic64_read(v);
33}
34
35static inline void atomic_long_set(atomic_long_t *l, long i)
36{
37 atomic64_t *v = (atomic64_t *)l;
38
6b4977ce 39 atomic64_set(v, i);
d3cb4871
CL
40}
41
42static inline void atomic_long_inc(atomic_long_t *l)
43{
44 atomic64_t *v = (atomic64_t *)l;
45
46 atomic64_inc(v);
47}
48
49static inline void atomic_long_dec(atomic_long_t *l)
50{
51 atomic64_t *v = (atomic64_t *)l;
52
53 atomic64_dec(v);
54}
55
56static inline void atomic_long_add(long i, atomic_long_t *l)
57{
58 atomic64_t *v = (atomic64_t *)l;
59
60 atomic64_add(i, v);
61}
62
63static inline void atomic_long_sub(long i, atomic_long_t *l)
64{
65 atomic64_t *v = (atomic64_t *)l;
66
67 atomic64_sub(i, v);
68}
69
bb2382c3
MD
70static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
71{
72 atomic64_t *v = (atomic64_t *)l;
73
74 return atomic64_sub_and_test(i, v);
75}
76
77static inline int atomic_long_dec_and_test(atomic_long_t *l)
78{
79 atomic64_t *v = (atomic64_t *)l;
80
81 return atomic64_dec_and_test(v);
82}
83
84static inline int atomic_long_inc_and_test(atomic_long_t *l)
85{
86 atomic64_t *v = (atomic64_t *)l;
87
88 return atomic64_inc_and_test(v);
89}
90
91static inline int atomic_long_add_negative(long i, atomic_long_t *l)
92{
93 atomic64_t *v = (atomic64_t *)l;
94
95 return atomic64_add_negative(i, v);
96}
97
98static inline long atomic_long_add_return(long i, atomic_long_t *l)
99{
100 atomic64_t *v = (atomic64_t *)l;
101
102 return (long)atomic64_add_return(i, v);
103}
104
105static inline long atomic_long_sub_return(long i, atomic_long_t *l)
106{
107 atomic64_t *v = (atomic64_t *)l;
108
109 return (long)atomic64_sub_return(i, v);
110}
111
112static inline long atomic_long_inc_return(atomic_long_t *l)
113{
114 atomic64_t *v = (atomic64_t *)l;
115
116 return (long)atomic64_inc_return(v);
117}
118
119static inline long atomic_long_dec_return(atomic_long_t *l)
120{
121 atomic64_t *v = (atomic64_t *)l;
122
123 return (long)atomic64_dec_return(v);
124}
125
126#define atomic_long_add_unless(l, a, u) \
127 atomic64_add_unless((atomic64_t *)(l), (a), (u))
128
129#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
130
131#define atomic_long_cmpxchg(l, old, new) \
132 (atomic_cmpxchg((atomic64_t *)(l), (old), (new)))
133#define atomic_long_xchg(v, new) \
134 (atomic_xchg((atomic64_t *)(l), (new)))
135
4b358e22 136#else /* BITS_PER_LONG == 64 */
d3cb4871
CL
137
138typedef atomic_t atomic_long_t;
139
140#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
141static inline long atomic_long_read(atomic_long_t *l)
142{
143 atomic_t *v = (atomic_t *)l;
144
145 return (long)atomic_read(v);
146}
147
148static inline void atomic_long_set(atomic_long_t *l, long i)
149{
150 atomic_t *v = (atomic_t *)l;
151
152 atomic_set(v, i);
153}
154
155static inline void atomic_long_inc(atomic_long_t *l)
156{
157 atomic_t *v = (atomic_t *)l;
158
159 atomic_inc(v);
160}
161
162static inline void atomic_long_dec(atomic_long_t *l)
163{
164 atomic_t *v = (atomic_t *)l;
165
166 atomic_dec(v);
167}
168
169static inline void atomic_long_add(long i, atomic_long_t *l)
170{
171 atomic_t *v = (atomic_t *)l;
172
173 atomic_add(i, v);
174}
175
176static inline void atomic_long_sub(long i, atomic_long_t *l)
177{
178 atomic_t *v = (atomic_t *)l;
179
180 atomic_sub(i, v);
181}
182
bb2382c3
MD
183static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
184{
185 atomic_t *v = (atomic_t *)l;
186
187 return atomic_sub_and_test(i, v);
188}
189
190static inline int atomic_long_dec_and_test(atomic_long_t *l)
191{
192 atomic_t *v = (atomic_t *)l;
193
194 return atomic_dec_and_test(v);
195}
196
197static inline int atomic_long_inc_and_test(atomic_long_t *l)
198{
199 atomic_t *v = (atomic_t *)l;
200
201 return atomic_inc_and_test(v);
202}
203
204static inline int atomic_long_add_negative(long i, atomic_long_t *l)
205{
206 atomic_t *v = (atomic_t *)l;
207
208 return atomic_add_negative(i, v);
209}
210
211static inline long atomic_long_add_return(long i, atomic_long_t *l)
212{
213 atomic_t *v = (atomic_t *)l;
214
215 return (long)atomic_add_return(i, v);
216}
217
218static inline long atomic_long_sub_return(long i, atomic_long_t *l)
219{
220 atomic_t *v = (atomic_t *)l;
221
222 return (long)atomic_sub_return(i, v);
223}
224
225static inline long atomic_long_inc_return(atomic_long_t *l)
226{
227 atomic_t *v = (atomic_t *)l;
228
229 return (long)atomic_inc_return(v);
230}
231
232static inline long atomic_long_dec_return(atomic_long_t *l)
233{
234 atomic_t *v = (atomic_t *)l;
235
236 return (long)atomic_dec_return(v);
237}
238
239#define atomic_long_add_unless(l, a, u) \
240 atomic_add_unless((atomic_t *)(l), (a), (u))
241
242#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
243
244#define atomic_long_cmpxchg(l, old, new) \
245 (atomic_cmpxchg((atomic_t *)(l), (old), (new)))
246#define atomic_long_xchg(v, new) \
247 (atomic_xchg((atomic_t *)(l), (new)))
248
4b358e22
AB
249#endif /* BITS_PER_LONG == 64 */
250
251#endif /* _ASM_GENERIC_ATOMIC_H */