4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License"). You may not use this file except in compliance
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
23 * Copyright (c) 2009 by Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
32 * All operations are implemented by serializing them through a global
33 * pthread mutex. This provides a correct generic implementation.
34 * However all supported architectures are encouraged to provide a
35 * native implementation is assembly for performance reasons.
37 pthread_mutex_t atomic_lock
= PTHREAD_MUTEX_INITIALIZER
;
40 * Theses are the void returning variants
43 #define ATOMIC_INC(name, type) \
44 void atomic_inc_##name(volatile type *target) \
46 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
48 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
51 ATOMIC_INC(long, unsigned long)
52 ATOMIC_INC(8, uint8_t)
53 ATOMIC_INC(uchar
, uchar_t
)
54 ATOMIC_INC(16, uint16_t)
55 ATOMIC_INC(ushort
, ushort_t
)
56 ATOMIC_INC(32, uint32_t)
57 ATOMIC_INC(uint
, uint_t
)
58 ATOMIC_INC(ulong
, ulong_t
)
59 ATOMIC_INC(64, uint64_t)
62 #define ATOMIC_DEC(name, type) \
63 void atomic_dec_##name(volatile type *target) \
65 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
67 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
70 ATOMIC_DEC(long, unsigned long)
71 ATOMIC_DEC(8, uint8_t)
72 ATOMIC_DEC(uchar
, uchar_t
)
73 ATOMIC_DEC(16, uint16_t)
74 ATOMIC_DEC(ushort
, ushort_t
)
75 ATOMIC_DEC(32, uint32_t)
76 ATOMIC_DEC(uint
, uint_t
)
77 ATOMIC_DEC(ulong
, ulong_t
)
78 ATOMIC_DEC(64, uint64_t)
81 #define ATOMIC_ADD(name, type1, type2) \
82 void atomic_add_##name(volatile type1 *target, type2 bits) \
84 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
86 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
89 ATOMIC_ADD(8, uint8_t, int8_t)
90 ATOMIC_ADD(char, uchar_t
, signed char)
91 ATOMIC_ADD(16, uint16_t, int16_t)
92 ATOMIC_ADD(short, ushort_t
, short)
93 ATOMIC_ADD(32, uint32_t, int32_t)
94 ATOMIC_ADD(int, uint_t
, int)
95 ATOMIC_ADD(long, ulong_t
, long)
96 ATOMIC_ADD(64, uint64_t, int64_t)
98 void atomic_add_ptr(volatile void *target
, ssize_t bits
)
100 VERIFY3S(pthread_mutex_lock(&atomic_lock
), ==, 0);
101 *(caddr_t
*)target
+= bits
;
102 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
106 #define ATOMIC_OR(name, type) \
107 void atomic_or_##name(volatile type *target, type bits) \
109 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
111 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
114 ATOMIC_OR(8, uint8_t)
115 ATOMIC_OR(uchar
, uchar_t
)
116 ATOMIC_OR(16, uint16_t)
117 ATOMIC_OR(ushort
, ushort_t
)
118 ATOMIC_OR(32, uint32_t)
119 ATOMIC_OR(uint
, uint_t
)
120 ATOMIC_OR(ulong
, ulong_t
)
121 ATOMIC_OR(64, uint64_t)
124 #define ATOMIC_AND(name, type) \
125 void atomic_and_##name(volatile type *target, type bits) \
127 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
129 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
132 ATOMIC_AND(8, uint8_t)
133 ATOMIC_AND(uchar
, uchar_t
)
134 ATOMIC_AND(16, uint16_t)
135 ATOMIC_AND(ushort
, ushort_t
)
136 ATOMIC_AND(32, uint32_t)
137 ATOMIC_AND(uint
, uint_t
)
138 ATOMIC_AND(ulong
, ulong_t
)
139 ATOMIC_AND(64, uint64_t)
143 * New value returning variants
146 #define ATOMIC_INC_NV(name, type) \
147 type atomic_inc_##name##_nv(volatile type *target) \
150 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
151 rc = (++(*target)); \
152 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
156 ATOMIC_INC_NV(long, unsigned long)
157 ATOMIC_INC_NV(8, uint8_t)
158 ATOMIC_INC_NV(uchar
, uchar_t
)
159 ATOMIC_INC_NV(16, uint16_t)
160 ATOMIC_INC_NV(ushort
, ushort_t
)
161 ATOMIC_INC_NV(32, uint32_t)
162 ATOMIC_INC_NV(uint
, uint_t
)
163 ATOMIC_INC_NV(ulong
, ulong_t
)
164 ATOMIC_INC_NV(64, uint64_t)
167 #define ATOMIC_DEC_NV(name, type) \
168 type atomic_dec_##name##_nv(volatile type *target) \
171 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
172 rc = (--(*target)); \
173 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
177 ATOMIC_DEC_NV(long, unsigned long)
178 ATOMIC_DEC_NV(8, uint8_t)
179 ATOMIC_DEC_NV(uchar
, uchar_t
)
180 ATOMIC_DEC_NV(16, uint16_t)
181 ATOMIC_DEC_NV(ushort
, ushort_t
)
182 ATOMIC_DEC_NV(32, uint32_t)
183 ATOMIC_DEC_NV(uint
, uint_t
)
184 ATOMIC_DEC_NV(ulong
, ulong_t
)
185 ATOMIC_DEC_NV(64, uint64_t)
188 #define ATOMIC_ADD_NV(name, type1, type2) \
189 type1 atomic_add_##name##_nv(volatile type1 *target, type2 bits)\
192 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
193 rc = (*target += bits); \
194 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
198 ATOMIC_ADD_NV(8, uint8_t, int8_t)
199 ATOMIC_ADD_NV(char, uchar_t
, signed char)
200 ATOMIC_ADD_NV(16, uint16_t, int16_t)
201 ATOMIC_ADD_NV(short, ushort_t
, short)
202 ATOMIC_ADD_NV(32, uint32_t, int32_t)
203 ATOMIC_ADD_NV(int, uint_t
, int)
204 ATOMIC_ADD_NV(long, ulong_t
, long)
205 ATOMIC_ADD_NV(64, uint64_t, int64_t)
207 void *atomic_add_ptr_nv(volatile void *target
, ssize_t bits
)
211 VERIFY3S(pthread_mutex_lock(&atomic_lock
), ==, 0);
212 ptr
= (*(caddr_t
*)target
+= bits
);
213 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
219 #define ATOMIC_OR_NV(name, type) \
220 type atomic_or_##name##_nv(volatile type *target, type bits) \
223 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
224 rc = (*target |= bits); \
225 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
229 ATOMIC_OR_NV(long, unsigned long)
230 ATOMIC_OR_NV(8, uint8_t)
231 ATOMIC_OR_NV(uchar
, uchar_t
)
232 ATOMIC_OR_NV(16, uint16_t)
233 ATOMIC_OR_NV(ushort
, ushort_t
)
234 ATOMIC_OR_NV(32, uint32_t)
235 ATOMIC_OR_NV(uint
, uint_t
)
236 ATOMIC_OR_NV(ulong
, ulong_t
)
237 ATOMIC_OR_NV(64, uint64_t)
240 #define ATOMIC_AND_NV(name, type) \
241 type atomic_and_##name##_nv(volatile type *target, type bits) \
244 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
245 rc = (*target &= bits); \
246 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
250 ATOMIC_AND_NV(long, unsigned long)
251 ATOMIC_AND_NV(8, uint8_t)
252 ATOMIC_AND_NV(uchar
, uchar_t
)
253 ATOMIC_AND_NV(16, uint16_t)
254 ATOMIC_AND_NV(ushort
, ushort_t
)
255 ATOMIC_AND_NV(32, uint32_t)
256 ATOMIC_AND_NV(uint
, uint_t
)
257 ATOMIC_AND_NV(ulong
, ulong_t
)
258 ATOMIC_AND_NV(64, uint64_t)
262 * If *arg1 == arg2, set *arg1 = arg3; return old value
265 #define ATOMIC_CAS(name, type) \
266 type atomic_cas_##name(volatile type *target, type arg1, type arg2) \
269 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
273 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
277 ATOMIC_CAS(8, uint8_t)
278 ATOMIC_CAS(uchar
, uchar_t
)
279 ATOMIC_CAS(16, uint16_t)
280 ATOMIC_CAS(ushort
, ushort_t
)
281 ATOMIC_CAS(32, uint32_t)
282 ATOMIC_CAS(uint
, uint_t
)
283 ATOMIC_CAS(ulong
, ulong_t
)
284 ATOMIC_CAS(64, uint64_t)
286 void *atomic_cas_ptr(volatile void *target
, void *arg1
, void *arg2
)
290 VERIFY3S(pthread_mutex_lock(&atomic_lock
), ==, 0);
291 old
= *(void **)target
;
293 *(void **)target
= arg2
;
294 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
301 * Swap target and return old value
304 #define ATOMIC_SWAP(name, type) \
305 type atomic_swap_##name(volatile type *target, type bits) \
308 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \
311 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \
315 ATOMIC_SWAP(8, uint8_t)
316 ATOMIC_SWAP(uchar
, uchar_t
)
317 ATOMIC_SWAP(16, uint16_t)
318 ATOMIC_SWAP(ushort
, ushort_t
)
319 ATOMIC_SWAP(32, uint32_t)
320 ATOMIC_SWAP(uint
, uint_t
)
321 ATOMIC_SWAP(ulong
, ulong_t
)
322 ATOMIC_SWAP(64, uint64_t)
324 void *atomic_swap_ptr(volatile void *target
, void *bits
)
328 VERIFY3S(pthread_mutex_lock(&atomic_lock
), ==, 0);
329 old
= *(void **)target
;
330 *(void **)target
= bits
;
331 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
337 int atomic_set_long_excl(volatile ulong_t
*target
, uint_t value
)
341 VERIFY3S(pthread_mutex_lock(&atomic_lock
), ==, 0);
342 bit
= (1UL << value
);
343 if ((*target
& bit
) != 0) {
344 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
348 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
353 int atomic_clear_long_excl(volatile ulong_t
*target
, uint_t value
)
357 VERIFY3S(pthread_mutex_lock(&atomic_lock
), ==, 0);
358 bit
= (1UL << value
);
359 if ((*target
& bit
) != 0) {
360 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
364 VERIFY3S(pthread_mutex_unlock(&atomic_lock
), ==, 0);
369 void membar_enter(void)
371 /* XXX - Implement me */
374 void membar_exit(void)
376 /* XXX - Implement me */
379 void membar_producer(void)
381 /* XXX - Implement me */
384 void membar_consumer(void)
386 /* XXX - Implement me */
389 /* Legacy kernel interfaces; they will go away (eventually). */
391 uint8_t cas8(uint8_t *target
, uint8_t arg1
, uint8_t arg2
)
393 return atomic_cas_8(target
, arg1
, arg2
);
396 uint32_t cas32(uint32_t *target
, uint32_t arg1
, uint32_t arg2
)
398 return atomic_cas_32(target
, arg1
, arg2
);
401 uint64_t cas64(uint64_t *target
, uint64_t arg1
, uint64_t arg2
)
403 return atomic_cas_64(target
, arg1
, arg2
);
406 ulong_t
caslong(ulong_t
*target
, ulong_t arg1
, ulong_t arg2
)
408 return atomic_cas_ulong(target
, arg1
, arg2
);
411 void *casptr(void *target
, void *arg1
, void *arg2
)
413 return atomic_cas_ptr(target
, arg1
, arg2
);
416 void atomic_and_long(ulong_t
*target
, ulong_t bits
)
418 return atomic_and_ulong(target
, bits
);
421 void atomic_or_long(ulong_t
*target
, ulong_t bits
)
423 return atomic_or_ulong(target
, bits
);