]>
Commit | Line | Data |
---|---|---|
a26baf28 BB |
1 | /* |
2 | * CDDL HEADER START | |
3 | * | |
4 | * The contents of this file are subject to the terms of the | |
5 | * Common Development and Distribution License, Version 1.0 only | |
6 | * (the "License"). You may not use this file except in compliance | |
7 | * with the License. | |
8 | * | |
9 | * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE | |
10 | * or http://www.opensolaris.org/os/licensing. | |
11 | * See the License for the specific language governing permissions | |
12 | * and limitations under the License. | |
13 | * | |
14 | * When distributing Covered Code, include this CDDL HEADER in each | |
15 | * file and include the License file at usr/src/OPENSOLARIS.LICENSE. | |
16 | * If applicable, add the following below this CDDL HEADER, with the | |
17 | * fields enclosed by brackets "[]" replaced with your own identifying | |
18 | * information: Portions Copyright [yyyy] [name of copyright owner] | |
19 | * | |
20 | * CDDL HEADER END | |
21 | */ | |
22 | /* | |
23 | * Copyright (c) 2009 by Sun Microsystems, Inc. All rights reserved. | |
24 | * Use is subject to license terms. | |
25 | */ | |
26 | ||
27 | #include <atomic.h> | |
28 | #include <assert.h> | |
29 | #include <pthread.h> | |
30 | ||
31 | /* | |
32 | * All operations are implemented by serializing them through a global | |
33 | * pthread mutex. This provides a correct generic implementation. | |
34 | * However all supported architectures are encouraged to provide a | |
35 | * native implementation is assembly for performance reasons. | |
36 | */ | |
37 | pthread_mutex_t atomic_lock = PTHREAD_MUTEX_INITIALIZER; | |
38 | ||
39 | /* | |
40 | * Theses are the void returning variants | |
41 | */ | |
42 | ||
43 | #define ATOMIC_INC(name, type) \ | |
44 | void atomic_inc_##name(volatile type *target) \ | |
45 | { \ | |
46 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
47 | (*target)++; \ | |
48 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
49 | } | |
50 | ||
51 | ATOMIC_INC(long, unsigned long) | |
52 | ATOMIC_INC(8, uint8_t) | |
53 | ATOMIC_INC(uchar, uchar_t) | |
54 | ATOMIC_INC(16, uint16_t) | |
55 | ATOMIC_INC(ushort, ushort_t) | |
56 | ATOMIC_INC(32, uint32_t) | |
57 | ATOMIC_INC(uint, uint_t) | |
58 | ATOMIC_INC(ulong, ulong_t) | |
59 | ATOMIC_INC(64, uint64_t) | |
60 | ||
61 | ||
62 | #define ATOMIC_DEC(name, type) \ | |
63 | void atomic_dec_##name(volatile type *target) \ | |
64 | { \ | |
65 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
66 | (*target)--; \ | |
67 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
68 | } | |
69 | ||
70 | ATOMIC_DEC(long, unsigned long) | |
71 | ATOMIC_DEC(8, uint8_t) | |
72 | ATOMIC_DEC(uchar, uchar_t) | |
73 | ATOMIC_DEC(16, uint16_t) | |
74 | ATOMIC_DEC(ushort, ushort_t) | |
75 | ATOMIC_DEC(32, uint32_t) | |
76 | ATOMIC_DEC(uint, uint_t) | |
77 | ATOMIC_DEC(ulong, ulong_t) | |
78 | ATOMIC_DEC(64, uint64_t) | |
79 | ||
80 | ||
81 | #define ATOMIC_ADD(name, type1, type2) \ | |
82 | void atomic_add_##name(volatile type1 *target, type2 bits) \ | |
83 | { \ | |
84 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
85 | *target += bits; \ | |
86 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
87 | } | |
88 | ||
89 | ATOMIC_ADD(8, uint8_t, int8_t) | |
90 | ATOMIC_ADD(char, uchar_t, signed char) | |
91 | ATOMIC_ADD(16, uint16_t, int16_t) | |
92 | ATOMIC_ADD(short, ushort_t, short) | |
93 | ATOMIC_ADD(32, uint32_t, int32_t) | |
94 | ATOMIC_ADD(int, uint_t, int) | |
95 | ATOMIC_ADD(long, ulong_t, long) | |
96 | ATOMIC_ADD(64, uint64_t, int64_t) | |
97 | ||
98 | void atomic_add_ptr(volatile void *target, ssize_t bits) | |
99 | { | |
100 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); | |
101 | *(caddr_t *)target += bits; | |
102 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
103 | } | |
104 | ||
105 | ||
106 | #define ATOMIC_OR(name, type) \ | |
107 | void atomic_or_##name(volatile type *target, type bits) \ | |
108 | { \ | |
109 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
110 | *target |= bits; \ | |
111 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
112 | } | |
113 | ||
114 | ATOMIC_OR(8, uint8_t) | |
115 | ATOMIC_OR(uchar, uchar_t) | |
116 | ATOMIC_OR(16, uint16_t) | |
117 | ATOMIC_OR(ushort, ushort_t) | |
118 | ATOMIC_OR(32, uint32_t) | |
119 | ATOMIC_OR(uint, uint_t) | |
120 | ATOMIC_OR(ulong, ulong_t) | |
121 | ATOMIC_OR(64, uint64_t) | |
122 | ||
123 | ||
124 | #define ATOMIC_AND(name, type) \ | |
125 | void atomic_and_##name(volatile type *target, type bits) \ | |
126 | { \ | |
127 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
128 | *target &= bits; \ | |
129 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
130 | } | |
131 | ||
132 | ATOMIC_AND(8, uint8_t) | |
133 | ATOMIC_AND(uchar, uchar_t) | |
134 | ATOMIC_AND(16, uint16_t) | |
135 | ATOMIC_AND(ushort, ushort_t) | |
136 | ATOMIC_AND(32, uint32_t) | |
137 | ATOMIC_AND(uint, uint_t) | |
138 | ATOMIC_AND(ulong, ulong_t) | |
139 | ATOMIC_AND(64, uint64_t) | |
140 | ||
141 | ||
142 | /* | |
143 | * New value returning variants | |
144 | */ | |
145 | ||
146 | #define ATOMIC_INC_NV(name, type) \ | |
147 | type atomic_inc_##name##_nv(volatile type *target) \ | |
148 | { \ | |
149 | type rc; \ | |
150 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
151 | rc = (++(*target)); \ | |
152 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
153 | return rc; \ | |
154 | } | |
155 | ||
156 | ATOMIC_INC_NV(long, unsigned long) | |
157 | ATOMIC_INC_NV(8, uint8_t) | |
158 | ATOMIC_INC_NV(uchar, uchar_t) | |
159 | ATOMIC_INC_NV(16, uint16_t) | |
160 | ATOMIC_INC_NV(ushort, ushort_t) | |
161 | ATOMIC_INC_NV(32, uint32_t) | |
162 | ATOMIC_INC_NV(uint, uint_t) | |
163 | ATOMIC_INC_NV(ulong, ulong_t) | |
164 | ATOMIC_INC_NV(64, uint64_t) | |
165 | ||
166 | ||
167 | #define ATOMIC_DEC_NV(name, type) \ | |
168 | type atomic_dec_##name##_nv(volatile type *target) \ | |
169 | { \ | |
170 | type rc; \ | |
171 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
172 | rc = (--(*target)); \ | |
173 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
174 | return rc; \ | |
175 | } | |
176 | ||
177 | ATOMIC_DEC_NV(long, unsigned long) | |
178 | ATOMIC_DEC_NV(8, uint8_t) | |
179 | ATOMIC_DEC_NV(uchar, uchar_t) | |
180 | ATOMIC_DEC_NV(16, uint16_t) | |
181 | ATOMIC_DEC_NV(ushort, ushort_t) | |
182 | ATOMIC_DEC_NV(32, uint32_t) | |
183 | ATOMIC_DEC_NV(uint, uint_t) | |
184 | ATOMIC_DEC_NV(ulong, ulong_t) | |
185 | ATOMIC_DEC_NV(64, uint64_t) | |
186 | ||
187 | ||
188 | #define ATOMIC_ADD_NV(name, type1, type2) \ | |
189 | type1 atomic_add_##name##_nv(volatile type1 *target, type2 bits)\ | |
190 | { \ | |
191 | type1 rc; \ | |
192 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
193 | rc = (*target += bits); \ | |
194 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
195 | return rc; \ | |
196 | } | |
197 | ||
198 | ATOMIC_ADD_NV(8, uint8_t, int8_t) | |
199 | ATOMIC_ADD_NV(char, uchar_t, signed char) | |
200 | ATOMIC_ADD_NV(16, uint16_t, int16_t) | |
201 | ATOMIC_ADD_NV(short, ushort_t, short) | |
202 | ATOMIC_ADD_NV(32, uint32_t, int32_t) | |
203 | ATOMIC_ADD_NV(int, uint_t, int) | |
204 | ATOMIC_ADD_NV(long, ulong_t, long) | |
205 | ATOMIC_ADD_NV(64, uint64_t, int64_t) | |
206 | ||
207 | void *atomic_add_ptr_nv(volatile void *target, ssize_t bits) | |
208 | { | |
209 | void *ptr; | |
210 | ||
211 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); | |
212 | ptr = (*(caddr_t *)target += bits); | |
213 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
214 | ||
215 | return ptr; | |
216 | } | |
217 | ||
218 | ||
219 | #define ATOMIC_OR_NV(name, type) \ | |
220 | type atomic_or_##name##_nv(volatile type *target, type bits) \ | |
221 | { \ | |
222 | type rc; \ | |
223 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
224 | rc = (*target |= bits); \ | |
225 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
226 | return rc; \ | |
227 | } | |
228 | ||
229 | ATOMIC_OR_NV(long, unsigned long) | |
230 | ATOMIC_OR_NV(8, uint8_t) | |
231 | ATOMIC_OR_NV(uchar, uchar_t) | |
232 | ATOMIC_OR_NV(16, uint16_t) | |
233 | ATOMIC_OR_NV(ushort, ushort_t) | |
234 | ATOMIC_OR_NV(32, uint32_t) | |
235 | ATOMIC_OR_NV(uint, uint_t) | |
236 | ATOMIC_OR_NV(ulong, ulong_t) | |
237 | ATOMIC_OR_NV(64, uint64_t) | |
238 | ||
239 | ||
240 | #define ATOMIC_AND_NV(name, type) \ | |
241 | type atomic_and_##name##_nv(volatile type *target, type bits) \ | |
242 | { \ | |
243 | type rc; \ | |
244 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
245 | rc = (*target &= bits); \ | |
246 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
247 | return rc; \ | |
248 | } | |
249 | ||
250 | ATOMIC_AND_NV(long, unsigned long) | |
251 | ATOMIC_AND_NV(8, uint8_t) | |
252 | ATOMIC_AND_NV(uchar, uchar_t) | |
253 | ATOMIC_AND_NV(16, uint16_t) | |
254 | ATOMIC_AND_NV(ushort, ushort_t) | |
255 | ATOMIC_AND_NV(32, uint32_t) | |
256 | ATOMIC_AND_NV(uint, uint_t) | |
257 | ATOMIC_AND_NV(ulong, ulong_t) | |
258 | ATOMIC_AND_NV(64, uint64_t) | |
259 | ||
260 | ||
261 | /* | |
262 | * If *arg1 == arg2, set *arg1 = arg3; return old value | |
263 | */ | |
264 | ||
265 | #define ATOMIC_CAS(name, type) \ | |
266 | type atomic_cas_##name(volatile type *target, type arg1, type arg2) \ | |
267 | { \ | |
268 | type old; \ | |
269 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
270 | old = *target; \ | |
271 | if (old == arg1) \ | |
272 | *target = arg2; \ | |
273 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
274 | return old; \ | |
275 | } | |
276 | ||
277 | ATOMIC_CAS(8, uint8_t) | |
278 | ATOMIC_CAS(uchar, uchar_t) | |
279 | ATOMIC_CAS(16, uint16_t) | |
280 | ATOMIC_CAS(ushort, ushort_t) | |
281 | ATOMIC_CAS(32, uint32_t) | |
282 | ATOMIC_CAS(uint, uint_t) | |
283 | ATOMIC_CAS(ulong, ulong_t) | |
284 | ATOMIC_CAS(64, uint64_t) | |
285 | ||
286 | void *atomic_cas_ptr(volatile void *target, void *arg1, void *arg2) | |
287 | { | |
288 | void *old; | |
289 | ||
290 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); | |
291 | old = *(void **)target; | |
292 | if (old == arg1) | |
293 | *(void **)target = arg2; | |
294 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
295 | ||
296 | return old; | |
297 | } | |
298 | ||
299 | ||
300 | /* | |
301 | * Swap target and return old value | |
302 | */ | |
303 | ||
304 | #define ATOMIC_SWAP(name, type) \ | |
305 | type atomic_swap_##name(volatile type *target, type bits) \ | |
306 | { \ | |
307 | type old; \ | |
308 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); \ | |
309 | old = *target; \ | |
310 | *target = bits; \ | |
311 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); \ | |
312 | return old; \ | |
313 | } | |
314 | ||
315 | ATOMIC_SWAP(8, uint8_t) | |
316 | ATOMIC_SWAP(uchar, uchar_t) | |
317 | ATOMIC_SWAP(16, uint16_t) | |
318 | ATOMIC_SWAP(ushort, ushort_t) | |
319 | ATOMIC_SWAP(32, uint32_t) | |
320 | ATOMIC_SWAP(uint, uint_t) | |
321 | ATOMIC_SWAP(ulong, ulong_t) | |
322 | ATOMIC_SWAP(64, uint64_t) | |
323 | ||
324 | void *atomic_swap_ptr(volatile void *target, void *bits) | |
325 | { | |
326 | void *old; | |
327 | ||
328 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); | |
329 | old = *(void **)target; | |
330 | *(void **)target = bits; | |
331 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
332 | ||
333 | return old; | |
334 | } | |
335 | ||
336 | ||
337 | int atomic_set_long_excl(volatile ulong_t *target, uint_t value) | |
338 | { | |
339 | ulong_t bit; | |
340 | ||
341 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); | |
342 | bit = (1UL << value); | |
343 | if ((*target & bit) != 0) { | |
344 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
345 | return -1; | |
346 | } | |
347 | *target |= bit; | |
348 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
349 | ||
350 | return 0; | |
351 | } | |
352 | ||
353 | int atomic_clear_long_excl(volatile ulong_t *target, uint_t value) | |
354 | { | |
355 | ulong_t bit; | |
356 | ||
357 | VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0); | |
358 | bit = (1UL << value); | |
359 | if ((*target & bit) != 0) { | |
360 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
361 | return -1; | |
362 | } | |
363 | *target &= ~bit; | |
364 | VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0); | |
365 | ||
366 | return 0; | |
367 | } | |
368 | ||
369 | void membar_enter(void) | |
370 | { | |
371 | /* XXX - Implement me */ | |
372 | } | |
373 | ||
374 | void membar_exit(void) | |
375 | { | |
376 | /* XXX - Implement me */ | |
377 | } | |
378 | ||
379 | void membar_producer(void) | |
380 | { | |
381 | /* XXX - Implement me */ | |
382 | } | |
383 | ||
384 | void membar_consumer(void) | |
385 | { | |
386 | /* XXX - Implement me */ | |
387 | } | |
388 | ||
389 | /* Legacy kernel interfaces; they will go away (eventually). */ | |
390 | ||
391 | uint8_t cas8(uint8_t *target, uint8_t arg1, uint8_t arg2) | |
392 | { | |
393 | return atomic_cas_8(target, arg1, arg2); | |
394 | } | |
395 | ||
396 | uint32_t cas32(uint32_t *target, uint32_t arg1, uint32_t arg2) | |
397 | { | |
398 | return atomic_cas_32(target, arg1, arg2); | |
399 | } | |
400 | ||
401 | uint64_t cas64(uint64_t *target, uint64_t arg1, uint64_t arg2) | |
402 | { | |
403 | return atomic_cas_64(target, arg1, arg2); | |
404 | } | |
405 | ||
406 | ulong_t caslong(ulong_t *target, ulong_t arg1, ulong_t arg2) | |
407 | { | |
408 | return atomic_cas_ulong(target, arg1, arg2); | |
409 | } | |
410 | ||
411 | void *casptr(void *target, void *arg1, void *arg2) | |
412 | { | |
413 | return atomic_cas_ptr(target, arg1, arg2); | |
414 | } | |
415 | ||
416 | void atomic_and_long(ulong_t *target, ulong_t bits) | |
417 | { | |
418 | return atomic_and_ulong(target, bits); | |
419 | } | |
420 | ||
421 | void atomic_or_long(ulong_t *target, ulong_t bits) | |
422 | { | |
423 | return atomic_or_ulong(target, bits); | |
424 | } |